* config/rx/rx.c (ADD_RX_BUILTIN0): New macro, used for builtins
[official-gcc.git] / gcc / tree-nested.c
blobdc63ef6902ebbfc017bdf9679681a886081b8e8c
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "tm_p.h"
26 #include "function.h"
27 #include "tree-dump.h"
28 #include "tree-inline.h"
29 #include "gimple.h"
30 #include "tree-iterator.h"
31 #include "bitmap.h"
32 #include "cgraph.h"
33 #include "tree-cfg.h"
34 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
35 #include "langhooks.h"
36 #include "pointer-set.h"
37 #include "gimple-low.h"
40 /* The object of this pass is to lower the representation of a set of nested
41 functions in order to expose all of the gory details of the various
42 nonlocal references. We want to do this sooner rather than later, in
43 order to give us more freedom in emitting all of the functions in question.
45 Back in olden times, when gcc was young, we developed an insanely
46 complicated scheme whereby variables which were referenced nonlocally
47 were forced to live in the stack of the declaring function, and then
48 the nested functions magically discovered where these variables were
49 placed. In order for this scheme to function properly, it required
50 that the outer function be partially expanded, then we switch to
51 compiling the inner function, and once done with those we switch back
52 to compiling the outer function. Such delicate ordering requirements
53 makes it difficult to do whole translation unit optimizations
54 involving such functions.
56 The implementation here is much more direct. Everything that can be
57 referenced by an inner function is a member of an explicitly created
58 structure herein called the "nonlocal frame struct". The incoming
59 static chain for a nested function is a pointer to this struct in
60 the parent. In this way, we settle on known offsets from a known
61 base, and so are decoupled from the logic that places objects in the
62 function's stack frame. More importantly, we don't have to wait for
63 that to happen -- since the compilation of the inner function is no
64 longer tied to a real stack frame, the nonlocal frame struct can be
65 allocated anywhere. Which means that the outer function is now
66 inlinable.
68 Theory of operation here is very simple. Iterate over all the
69 statements in all the functions (depth first) several times,
70 allocating structures and fields on demand. In general we want to
71 examine inner functions first, so that we can avoid making changes
72 to outer functions which are unnecessary.
74 The order of the passes matters a bit, in that later passes will be
75 skipped if it is discovered that the functions don't actually interact
76 at all. That is, they're nested in the lexical sense but could have
77 been written as independent functions without change. */
80 struct nesting_info
82 struct nesting_info *outer;
83 struct nesting_info *inner;
84 struct nesting_info *next;
86 struct pointer_map_t *field_map;
87 struct pointer_map_t *var_map;
88 struct pointer_set_t *mem_refs;
89 bitmap suppress_expansion;
91 tree context;
92 tree new_local_var_chain;
93 tree debug_var_chain;
94 tree frame_type;
95 tree frame_decl;
96 tree chain_field;
97 tree chain_decl;
98 tree nl_goto_field;
100 bool any_parm_remapped;
101 bool any_tramp_created;
102 char static_chain_added;
106 /* Iterate over the nesting tree, starting with ROOT, depth first. */
108 static inline struct nesting_info *
109 iter_nestinfo_start (struct nesting_info *root)
111 while (root->inner)
112 root = root->inner;
113 return root;
116 static inline struct nesting_info *
117 iter_nestinfo_next (struct nesting_info *node)
119 if (node->next)
120 return iter_nestinfo_start (node->next);
121 return node->outer;
124 #define FOR_EACH_NEST_INFO(I, ROOT) \
125 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
127 /* Obstack used for the bitmaps in the struct above. */
128 static struct bitmap_obstack nesting_info_bitmap_obstack;
131 /* We're working in so many different function contexts simultaneously,
132 that create_tmp_var is dangerous. Prevent mishap. */
133 #define create_tmp_var cant_use_create_tmp_var_here_dummy
135 /* Like create_tmp_var, except record the variable for registration at
136 the given nesting level. */
138 static tree
139 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
141 tree tmp_var;
143 /* If the type is of variable size or a type which must be created by the
144 frontend, something is wrong. Note that we explicitly allow
145 incomplete types here, since we create them ourselves here. */
146 gcc_assert (!TREE_ADDRESSABLE (type));
147 gcc_assert (!TYPE_SIZE_UNIT (type)
148 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
150 tmp_var = create_tmp_var_raw (type, prefix);
151 DECL_CONTEXT (tmp_var) = info->context;
152 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
153 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
154 if (TREE_CODE (type) == COMPLEX_TYPE
155 || TREE_CODE (type) == VECTOR_TYPE)
156 DECL_GIMPLE_REG_P (tmp_var) = 1;
158 info->new_local_var_chain = tmp_var;
160 return tmp_var;
163 /* Take the address of EXP to be used within function CONTEXT.
164 Mark it for addressability as necessary. */
166 tree
167 build_addr (tree exp, tree context)
169 tree base = exp;
170 tree save_context;
171 tree retval;
173 while (handled_component_p (base))
174 base = TREE_OPERAND (base, 0);
176 if (DECL_P (base))
177 TREE_ADDRESSABLE (base) = 1;
179 /* Building the ADDR_EXPR will compute a set of properties for
180 that ADDR_EXPR. Those properties are unfortunately context
181 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
183 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
184 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
185 way the properties are for the ADDR_EXPR are computed properly. */
186 save_context = current_function_decl;
187 current_function_decl = context;
188 retval = build_fold_addr_expr (exp);
189 current_function_decl = save_context;
190 return retval;
193 /* Insert FIELD into TYPE, sorted by alignment requirements. */
195 void
196 insert_field_into_struct (tree type, tree field)
198 tree *p;
200 DECL_CONTEXT (field) = type;
202 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
203 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
204 break;
206 DECL_CHAIN (field) = *p;
207 *p = field;
209 /* Set correct alignment for frame struct type. */
210 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
211 TYPE_ALIGN (type) = DECL_ALIGN (field);
214 /* Build or return the RECORD_TYPE that describes the frame state that is
215 shared between INFO->CONTEXT and its nested functions. This record will
216 not be complete until finalize_nesting_tree; up until that point we'll
217 be adding fields as necessary.
219 We also build the DECL that represents this frame in the function. */
221 static tree
222 get_frame_type (struct nesting_info *info)
224 tree type = info->frame_type;
225 if (!type)
227 char *name;
229 type = make_node (RECORD_TYPE);
231 name = concat ("FRAME.",
232 IDENTIFIER_POINTER (DECL_NAME (info->context)),
233 NULL);
234 TYPE_NAME (type) = get_identifier (name);
235 free (name);
237 info->frame_type = type;
238 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
239 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
241 /* ??? Always make it addressable for now, since it is meant to
242 be pointed to by the static chain pointer. This pessimizes
243 when it turns out that no static chains are needed because
244 the nested functions referencing non-local variables are not
245 reachable, but the true pessimization is to create the non-
246 local frame structure in the first place. */
247 TREE_ADDRESSABLE (info->frame_decl) = 1;
249 return type;
252 /* Return true if DECL should be referenced by pointer in the non-local
253 frame structure. */
255 static bool
256 use_pointer_in_frame (tree decl)
258 if (TREE_CODE (decl) == PARM_DECL)
260 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
261 sized decls, and inefficient to copy large aggregates. Don't bother
262 moving anything but scalar variables. */
263 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
265 else
267 /* Variable sized types make things "interesting" in the frame. */
268 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
272 /* Given DECL, a non-locally accessed variable, find or create a field
273 in the non-local frame structure for the given nesting context. */
275 static tree
276 lookup_field_for_decl (struct nesting_info *info, tree decl,
277 enum insert_option insert)
279 void **slot;
281 if (insert == NO_INSERT)
283 slot = pointer_map_contains (info->field_map, decl);
284 return slot ? (tree) *slot : NULL_TREE;
287 slot = pointer_map_insert (info->field_map, decl);
288 if (!*slot)
290 tree field = make_node (FIELD_DECL);
291 DECL_NAME (field) = DECL_NAME (decl);
293 if (use_pointer_in_frame (decl))
295 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
296 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
297 DECL_NONADDRESSABLE_P (field) = 1;
299 else
301 TREE_TYPE (field) = TREE_TYPE (decl);
302 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
303 DECL_ALIGN (field) = DECL_ALIGN (decl);
304 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
305 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
306 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
307 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
310 insert_field_into_struct (get_frame_type (info), field);
311 *slot = field;
313 if (TREE_CODE (decl) == PARM_DECL)
314 info->any_parm_remapped = true;
317 return (tree) *slot;
320 /* Build or return the variable that holds the static chain within
321 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
323 static tree
324 get_chain_decl (struct nesting_info *info)
326 tree decl = info->chain_decl;
328 if (!decl)
330 tree type;
332 type = get_frame_type (info->outer);
333 type = build_pointer_type (type);
335 /* Note that this variable is *not* entered into any BIND_EXPR;
336 the construction of this variable is handled specially in
337 expand_function_start and initialize_inlined_parameters.
338 Note also that it's represented as a parameter. This is more
339 close to the truth, since the initial value does come from
340 the caller. */
341 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
342 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
343 DECL_ARTIFICIAL (decl) = 1;
344 DECL_IGNORED_P (decl) = 1;
345 TREE_USED (decl) = 1;
346 DECL_CONTEXT (decl) = info->context;
347 DECL_ARG_TYPE (decl) = type;
349 /* Tell tree-inline.c that we never write to this variable, so
350 it can copy-prop the replacement value immediately. */
351 TREE_READONLY (decl) = 1;
353 info->chain_decl = decl;
355 if (dump_file
356 && (dump_flags & TDF_DETAILS)
357 && !DECL_STATIC_CHAIN (info->context))
358 fprintf (dump_file, "Setting static-chain for %s\n",
359 lang_hooks.decl_printable_name (info->context, 2));
361 DECL_STATIC_CHAIN (info->context) = 1;
363 return decl;
366 /* Build or return the field within the non-local frame state that holds
367 the static chain for INFO->CONTEXT. This is the way to walk back up
368 multiple nesting levels. */
370 static tree
371 get_chain_field (struct nesting_info *info)
373 tree field = info->chain_field;
375 if (!field)
377 tree type = build_pointer_type (get_frame_type (info->outer));
379 field = make_node (FIELD_DECL);
380 DECL_NAME (field) = get_identifier ("__chain");
381 TREE_TYPE (field) = type;
382 DECL_ALIGN (field) = TYPE_ALIGN (type);
383 DECL_NONADDRESSABLE_P (field) = 1;
385 insert_field_into_struct (get_frame_type (info), field);
387 info->chain_field = field;
389 if (dump_file
390 && (dump_flags & TDF_DETAILS)
391 && !DECL_STATIC_CHAIN (info->context))
392 fprintf (dump_file, "Setting static-chain for %s\n",
393 lang_hooks.decl_printable_name (info->context, 2));
395 DECL_STATIC_CHAIN (info->context) = 1;
397 return field;
400 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
402 static tree
403 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
404 gimple call)
406 tree t;
408 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
409 gimple_call_set_lhs (call, t);
410 if (! gsi_end_p (*gsi))
411 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
412 gsi_insert_before (gsi, call, GSI_SAME_STMT);
414 return t;
418 /* Copy EXP into a temporary. Allocate the temporary in the context of
419 INFO and insert the initialization statement before GSI. */
421 static tree
422 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
424 tree t;
425 gimple stmt;
427 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
428 stmt = gimple_build_assign (t, exp);
429 if (! gsi_end_p (*gsi))
430 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
431 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
433 return t;
437 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
439 static tree
440 gsi_gimplify_val (struct nesting_info *info, tree exp,
441 gimple_stmt_iterator *gsi)
443 if (is_gimple_val (exp))
444 return exp;
445 else
446 return init_tmp_var (info, exp, gsi);
449 /* Similarly, but copy from the temporary and insert the statement
450 after the iterator. */
452 static tree
453 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
455 tree t;
456 gimple stmt;
458 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
459 stmt = gimple_build_assign (exp, t);
460 if (! gsi_end_p (*gsi))
461 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
462 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
464 return t;
467 /* Build or return the type used to represent a nested function trampoline. */
469 static GTY(()) tree trampoline_type;
471 static tree
472 get_trampoline_type (struct nesting_info *info)
474 unsigned align, size;
475 tree t;
477 if (trampoline_type)
478 return trampoline_type;
480 align = TRAMPOLINE_ALIGNMENT;
481 size = TRAMPOLINE_SIZE;
483 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
484 then allocate extra space so that we can do dynamic alignment. */
485 if (align > STACK_BOUNDARY)
487 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
488 align = STACK_BOUNDARY;
491 t = build_index_type (size_int (size - 1));
492 t = build_array_type (char_type_node, t);
493 t = build_decl (DECL_SOURCE_LOCATION (info->context),
494 FIELD_DECL, get_identifier ("__data"), t);
495 DECL_ALIGN (t) = align;
496 DECL_USER_ALIGN (t) = 1;
498 trampoline_type = make_node (RECORD_TYPE);
499 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
500 TYPE_FIELDS (trampoline_type) = t;
501 layout_type (trampoline_type);
502 DECL_CONTEXT (t) = trampoline_type;
504 return trampoline_type;
507 /* Given DECL, a nested function, find or create a field in the non-local
508 frame structure for a trampoline for this function. */
510 static tree
511 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
512 enum insert_option insert)
514 void **slot;
516 if (insert == NO_INSERT)
518 slot = pointer_map_contains (info->var_map, decl);
519 return slot ? (tree) *slot : NULL_TREE;
522 slot = pointer_map_insert (info->var_map, decl);
523 if (!*slot)
525 tree field = make_node (FIELD_DECL);
526 DECL_NAME (field) = DECL_NAME (decl);
527 TREE_TYPE (field) = get_trampoline_type (info);
528 TREE_ADDRESSABLE (field) = 1;
530 insert_field_into_struct (get_frame_type (info), field);
531 *slot = field;
533 info->any_tramp_created = true;
536 return (tree) *slot;
539 /* Build or return the field within the non-local frame state that holds
540 the non-local goto "jmp_buf". The buffer itself is maintained by the
541 rtl middle-end as dynamic stack space is allocated. */
543 static tree
544 get_nl_goto_field (struct nesting_info *info)
546 tree field = info->nl_goto_field;
547 if (!field)
549 unsigned size;
550 tree type;
552 /* For __builtin_nonlocal_goto, we need N words. The first is the
553 frame pointer, the rest is for the target's stack pointer save
554 area. The number of words is controlled by STACK_SAVEAREA_MODE;
555 not the best interface, but it'll do for now. */
556 if (Pmode == ptr_mode)
557 type = ptr_type_node;
558 else
559 type = lang_hooks.types.type_for_mode (Pmode, 1);
561 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
562 size = size / GET_MODE_SIZE (Pmode);
563 size = size + 1;
565 type = build_array_type
566 (type, build_index_type (size_int (size)));
568 field = make_node (FIELD_DECL);
569 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
570 TREE_TYPE (field) = type;
571 DECL_ALIGN (field) = TYPE_ALIGN (type);
572 TREE_ADDRESSABLE (field) = 1;
574 insert_field_into_struct (get_frame_type (info), field);
576 info->nl_goto_field = field;
579 return field;
582 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
584 static void
585 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
586 struct nesting_info *info, gimple_seq *pseq)
588 struct walk_stmt_info wi;
590 memset (&wi, 0, sizeof (wi));
591 wi.info = info;
592 wi.val_only = true;
593 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
597 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
599 static inline void
600 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
601 struct nesting_info *info)
603 gimple_seq body = gimple_body (info->context);
604 walk_body (callback_stmt, callback_op, info, &body);
605 gimple_set_body (info->context, body);
608 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
610 static void
611 walk_gimple_omp_for (gimple for_stmt,
612 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
613 struct nesting_info *info)
615 struct walk_stmt_info wi;
616 gimple_seq seq;
617 tree t;
618 size_t i;
620 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
622 seq = NULL;
623 memset (&wi, 0, sizeof (wi));
624 wi.info = info;
625 wi.gsi = gsi_last (seq);
627 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
629 wi.val_only = false;
630 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
631 &wi, NULL);
632 wi.val_only = true;
633 wi.is_lhs = false;
634 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
635 &wi, NULL);
637 wi.val_only = true;
638 wi.is_lhs = false;
639 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
640 &wi, NULL);
642 t = gimple_omp_for_incr (for_stmt, i);
643 gcc_assert (BINARY_CLASS_P (t));
644 wi.val_only = false;
645 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
646 wi.val_only = true;
647 wi.is_lhs = false;
648 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
651 seq = gsi_seq (wi.gsi);
652 if (!gimple_seq_empty_p (seq))
654 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
655 annotate_all_with_location (seq, gimple_location (for_stmt));
656 gimple_seq_add_seq (&pre_body, seq);
657 gimple_omp_for_set_pre_body (for_stmt, pre_body);
661 /* Similarly for ROOT and all functions nested underneath, depth first. */
663 static void
664 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
665 struct nesting_info *root)
667 struct nesting_info *n;
668 FOR_EACH_NEST_INFO (n, root)
669 walk_function (callback_stmt, callback_op, n);
673 /* We have to check for a fairly pathological case. The operands of function
674 nested function are to be interpreted in the context of the enclosing
675 function. So if any are variably-sized, they will get remapped when the
676 enclosing function is inlined. But that remapping would also have to be
677 done in the types of the PARM_DECLs of the nested function, meaning the
678 argument types of that function will disagree with the arguments in the
679 calls to that function. So we'd either have to make a copy of the nested
680 function corresponding to each time the enclosing function was inlined or
681 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
682 function. The former is not practical. The latter would still require
683 detecting this case to know when to add the conversions. So, for now at
684 least, we don't inline such an enclosing function.
686 We have to do that check recursively, so here return indicating whether
687 FNDECL has such a nested function. ORIG_FN is the function we were
688 trying to inline to use for checking whether any argument is variably
689 modified by anything in it.
691 It would be better to do this in tree-inline.c so that we could give
692 the appropriate warning for why a function can't be inlined, but that's
693 too late since the nesting structure has already been flattened and
694 adding a flag just to record this fact seems a waste of a flag. */
696 static bool
697 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
699 struct cgraph_node *cgn = cgraph_get_node (fndecl);
700 tree arg;
702 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
704 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
705 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
706 return true;
708 if (check_for_nested_with_variably_modified (cgn->decl,
709 orig_fndecl))
710 return true;
713 return false;
716 /* Construct our local datastructure describing the function nesting
717 tree rooted by CGN. */
719 static struct nesting_info *
720 create_nesting_tree (struct cgraph_node *cgn)
722 struct nesting_info *info = XCNEW (struct nesting_info);
723 info->field_map = pointer_map_create ();
724 info->var_map = pointer_map_create ();
725 info->mem_refs = pointer_set_create ();
726 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
727 info->context = cgn->decl;
729 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
731 struct nesting_info *sub = create_nesting_tree (cgn);
732 sub->outer = info;
733 sub->next = info->inner;
734 info->inner = sub;
737 /* See discussion at check_for_nested_with_variably_modified for a
738 discussion of why this has to be here. */
739 if (check_for_nested_with_variably_modified (info->context, info->context))
740 DECL_UNINLINABLE (info->context) = true;
742 return info;
745 /* Return an expression computing the static chain for TARGET_CONTEXT
746 from INFO->CONTEXT. Insert any necessary computations before TSI. */
748 static tree
749 get_static_chain (struct nesting_info *info, tree target_context,
750 gimple_stmt_iterator *gsi)
752 struct nesting_info *i;
753 tree x;
755 if (info->context == target_context)
757 x = build_addr (info->frame_decl, target_context);
759 else
761 x = get_chain_decl (info);
763 for (i = info->outer; i->context != target_context; i = i->outer)
765 tree field = get_chain_field (i);
767 x = build_simple_mem_ref (x);
768 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
769 x = init_tmp_var (info, x, gsi);
773 return x;
777 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
778 frame as seen from INFO->CONTEXT. Insert any necessary computations
779 before GSI. */
781 static tree
782 get_frame_field (struct nesting_info *info, tree target_context,
783 tree field, gimple_stmt_iterator *gsi)
785 struct nesting_info *i;
786 tree x;
788 if (info->context == target_context)
790 /* Make sure frame_decl gets created. */
791 (void) get_frame_type (info);
792 x = info->frame_decl;
794 else
796 x = get_chain_decl (info);
798 for (i = info->outer; i->context != target_context; i = i->outer)
800 tree field = get_chain_field (i);
802 x = build_simple_mem_ref (x);
803 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
804 x = init_tmp_var (info, x, gsi);
807 x = build_simple_mem_ref (x);
810 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
811 return x;
814 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
816 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
817 in the nested function with DECL_VALUE_EXPR set to reference the true
818 variable in the parent function. This is used both for debug info
819 and in OpenMP lowering. */
821 static tree
822 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
824 tree target_context;
825 struct nesting_info *i;
826 tree x, field, new_decl;
827 void **slot;
829 slot = pointer_map_insert (info->var_map, decl);
831 if (*slot)
832 return (tree) *slot;
834 target_context = decl_function_context (decl);
836 /* A copy of the code in get_frame_field, but without the temporaries. */
837 if (info->context == target_context)
839 /* Make sure frame_decl gets created. */
840 (void) get_frame_type (info);
841 x = info->frame_decl;
842 i = info;
844 else
846 x = get_chain_decl (info);
847 for (i = info->outer; i->context != target_context; i = i->outer)
849 field = get_chain_field (i);
850 x = build_simple_mem_ref (x);
851 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
853 x = build_simple_mem_ref (x);
856 field = lookup_field_for_decl (i, decl, INSERT);
857 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
858 if (use_pointer_in_frame (decl))
859 x = build_simple_mem_ref (x);
861 /* ??? We should be remapping types as well, surely. */
862 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
863 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
864 DECL_CONTEXT (new_decl) = info->context;
865 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
866 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
867 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
868 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
869 TREE_READONLY (new_decl) = TREE_READONLY (decl);
870 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
871 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
872 if ((TREE_CODE (decl) == PARM_DECL
873 || TREE_CODE (decl) == RESULT_DECL
874 || TREE_CODE (decl) == VAR_DECL)
875 && DECL_BY_REFERENCE (decl))
876 DECL_BY_REFERENCE (new_decl) = 1;
878 SET_DECL_VALUE_EXPR (new_decl, x);
879 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
881 *slot = new_decl;
882 DECL_CHAIN (new_decl) = info->debug_var_chain;
883 info->debug_var_chain = new_decl;
885 if (!optimize
886 && info->context != target_context
887 && variably_modified_type_p (TREE_TYPE (decl), NULL))
888 note_nonlocal_vla_type (info, TREE_TYPE (decl));
890 return new_decl;
894 /* Callback for walk_gimple_stmt, rewrite all references to VAR
895 and PARM_DECLs that belong to outer functions.
897 The rewrite will involve some number of structure accesses back up
898 the static chain. E.g. for a variable FOO up one nesting level it'll
899 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
900 indirections apply to decls for which use_pointer_in_frame is true. */
902 static tree
903 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
905 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
906 struct nesting_info *const info = (struct nesting_info *) wi->info;
907 tree t = *tp;
909 *walk_subtrees = 0;
910 switch (TREE_CODE (t))
912 case VAR_DECL:
913 /* Non-automatic variables are never processed. */
914 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
915 break;
916 /* FALLTHRU */
918 case PARM_DECL:
919 if (decl_function_context (t) != info->context)
921 tree x;
922 wi->changed = true;
924 x = get_nonlocal_debug_decl (info, t);
925 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
927 tree target_context = decl_function_context (t);
928 struct nesting_info *i;
929 for (i = info->outer; i->context != target_context; i = i->outer)
930 continue;
931 x = lookup_field_for_decl (i, t, INSERT);
932 x = get_frame_field (info, target_context, x, &wi->gsi);
933 if (use_pointer_in_frame (t))
935 x = init_tmp_var (info, x, &wi->gsi);
936 x = build_simple_mem_ref (x);
940 if (wi->val_only)
942 if (wi->is_lhs)
943 x = save_tmp_var (info, x, &wi->gsi);
944 else
945 x = init_tmp_var (info, x, &wi->gsi);
948 *tp = x;
950 break;
952 case LABEL_DECL:
953 /* We're taking the address of a label from a parent function, but
954 this is not itself a non-local goto. Mark the label such that it
955 will not be deleted, much as we would with a label address in
956 static storage. */
957 if (decl_function_context (t) != info->context)
958 FORCED_LABEL (t) = 1;
959 break;
961 case ADDR_EXPR:
963 bool save_val_only = wi->val_only;
965 wi->val_only = false;
966 wi->is_lhs = false;
967 wi->changed = false;
968 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
969 wi->val_only = true;
971 if (wi->changed)
973 tree save_context;
975 /* If we changed anything, we might no longer be directly
976 referencing a decl. */
977 save_context = current_function_decl;
978 current_function_decl = info->context;
979 recompute_tree_invariant_for_addr_expr (t);
980 current_function_decl = save_context;
982 /* If the callback converted the address argument in a context
983 where we only accept variables (and min_invariant, presumably),
984 then compute the address into a temporary. */
985 if (save_val_only)
986 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
987 t, &wi->gsi);
990 break;
992 case REALPART_EXPR:
993 case IMAGPART_EXPR:
994 case COMPONENT_REF:
995 case ARRAY_REF:
996 case ARRAY_RANGE_REF:
997 case BIT_FIELD_REF:
998 /* Go down this entire nest and just look at the final prefix and
999 anything that describes the references. Otherwise, we lose track
1000 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1001 wi->val_only = true;
1002 wi->is_lhs = false;
1003 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1005 if (TREE_CODE (t) == COMPONENT_REF)
1006 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1007 NULL);
1008 else if (TREE_CODE (t) == ARRAY_REF
1009 || TREE_CODE (t) == ARRAY_RANGE_REF)
1011 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1012 wi, NULL);
1013 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1014 wi, NULL);
1015 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1016 wi, NULL);
1019 wi->val_only = false;
1020 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1021 break;
1023 case VIEW_CONVERT_EXPR:
1024 /* Just request to look at the subtrees, leaving val_only and lhs
1025 untouched. This might actually be for !val_only + lhs, in which
1026 case we don't want to force a replacement by a temporary. */
1027 *walk_subtrees = 1;
1028 break;
1030 default:
1031 if (!IS_TYPE_OR_DECL_P (t))
1033 *walk_subtrees = 1;
1034 wi->val_only = true;
1035 wi->is_lhs = false;
1037 break;
1040 return NULL_TREE;
1043 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1044 struct walk_stmt_info *);
1046 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1047 and PARM_DECLs that belong to outer functions. */
1049 static bool
1050 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1052 struct nesting_info *const info = (struct nesting_info *) wi->info;
1053 bool need_chain = false, need_stmts = false;
1054 tree clause, decl;
1055 int dummy;
1056 bitmap new_suppress;
1058 new_suppress = BITMAP_GGC_ALLOC ();
1059 bitmap_copy (new_suppress, info->suppress_expansion);
1061 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1063 switch (OMP_CLAUSE_CODE (clause))
1065 case OMP_CLAUSE_REDUCTION:
1066 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1067 need_stmts = true;
1068 goto do_decl_clause;
1070 case OMP_CLAUSE_LASTPRIVATE:
1071 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1072 need_stmts = true;
1073 goto do_decl_clause;
1075 case OMP_CLAUSE_PRIVATE:
1076 case OMP_CLAUSE_FIRSTPRIVATE:
1077 case OMP_CLAUSE_COPYPRIVATE:
1078 case OMP_CLAUSE_SHARED:
1079 do_decl_clause:
1080 decl = OMP_CLAUSE_DECL (clause);
1081 if (TREE_CODE (decl) == VAR_DECL
1082 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1083 break;
1084 if (decl_function_context (decl) != info->context)
1086 bitmap_set_bit (new_suppress, DECL_UID (decl));
1087 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1088 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1089 need_chain = true;
1091 break;
1093 case OMP_CLAUSE_SCHEDULE:
1094 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1095 break;
1096 /* FALLTHRU */
1097 case OMP_CLAUSE_FINAL:
1098 case OMP_CLAUSE_IF:
1099 case OMP_CLAUSE_NUM_THREADS:
1100 wi->val_only = true;
1101 wi->is_lhs = false;
1102 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1103 &dummy, wi);
1104 break;
1106 case OMP_CLAUSE_NOWAIT:
1107 case OMP_CLAUSE_ORDERED:
1108 case OMP_CLAUSE_DEFAULT:
1109 case OMP_CLAUSE_COPYIN:
1110 case OMP_CLAUSE_COLLAPSE:
1111 case OMP_CLAUSE_UNTIED:
1112 case OMP_CLAUSE_MERGEABLE:
1113 break;
1115 default:
1116 gcc_unreachable ();
1120 info->suppress_expansion = new_suppress;
1122 if (need_stmts)
1123 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1124 switch (OMP_CLAUSE_CODE (clause))
1126 case OMP_CLAUSE_REDUCTION:
1127 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1129 tree old_context
1130 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1131 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1132 = info->context;
1133 walk_body (convert_nonlocal_reference_stmt,
1134 convert_nonlocal_reference_op, info,
1135 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1136 walk_body (convert_nonlocal_reference_stmt,
1137 convert_nonlocal_reference_op, info,
1138 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1139 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1140 = old_context;
1142 break;
1144 case OMP_CLAUSE_LASTPRIVATE:
1145 walk_body (convert_nonlocal_reference_stmt,
1146 convert_nonlocal_reference_op, info,
1147 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1148 break;
1150 default:
1151 break;
1154 return need_chain;
1157 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1159 static void
1160 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1162 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1163 type = TREE_TYPE (type);
1165 if (TYPE_NAME (type)
1166 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1167 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1168 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1170 while (POINTER_TYPE_P (type)
1171 || TREE_CODE (type) == VECTOR_TYPE
1172 || TREE_CODE (type) == FUNCTION_TYPE
1173 || TREE_CODE (type) == METHOD_TYPE)
1174 type = TREE_TYPE (type);
1176 if (TREE_CODE (type) == ARRAY_TYPE)
1178 tree domain, t;
1180 note_nonlocal_vla_type (info, TREE_TYPE (type));
1181 domain = TYPE_DOMAIN (type);
1182 if (domain)
1184 t = TYPE_MIN_VALUE (domain);
1185 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1186 && decl_function_context (t) != info->context)
1187 get_nonlocal_debug_decl (info, t);
1188 t = TYPE_MAX_VALUE (domain);
1189 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1190 && decl_function_context (t) != info->context)
1191 get_nonlocal_debug_decl (info, t);
1196 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1197 in BLOCK. */
1199 static void
1200 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1202 tree var;
1204 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1205 if (TREE_CODE (var) == VAR_DECL
1206 && variably_modified_type_p (TREE_TYPE (var), NULL)
1207 && DECL_HAS_VALUE_EXPR_P (var)
1208 && decl_function_context (var) != info->context)
1209 note_nonlocal_vla_type (info, TREE_TYPE (var));
1212 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1213 PARM_DECLs that belong to outer functions. This handles statements
1214 that are not handled via the standard recursion done in
1215 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1216 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1217 operands of STMT have been handled by this function. */
1219 static tree
1220 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1221 struct walk_stmt_info *wi)
1223 struct nesting_info *info = (struct nesting_info *) wi->info;
1224 tree save_local_var_chain;
1225 bitmap save_suppress;
1226 gimple stmt = gsi_stmt (*gsi);
1228 switch (gimple_code (stmt))
1230 case GIMPLE_GOTO:
1231 /* Don't walk non-local gotos for now. */
1232 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1234 wi->val_only = true;
1235 wi->is_lhs = false;
1236 *handled_ops_p = true;
1237 return NULL_TREE;
1239 break;
1241 case GIMPLE_OMP_PARALLEL:
1242 case GIMPLE_OMP_TASK:
1243 save_suppress = info->suppress_expansion;
1244 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1245 wi))
1247 tree c, decl;
1248 decl = get_chain_decl (info);
1249 c = build_omp_clause (gimple_location (stmt),
1250 OMP_CLAUSE_FIRSTPRIVATE);
1251 OMP_CLAUSE_DECL (c) = decl;
1252 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1253 gimple_omp_taskreg_set_clauses (stmt, c);
1256 save_local_var_chain = info->new_local_var_chain;
1257 info->new_local_var_chain = NULL;
1259 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1260 info, gimple_omp_body_ptr (stmt));
1262 if (info->new_local_var_chain)
1263 declare_vars (info->new_local_var_chain,
1264 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1265 false);
1266 info->new_local_var_chain = save_local_var_chain;
1267 info->suppress_expansion = save_suppress;
1268 break;
1270 case GIMPLE_OMP_FOR:
1271 save_suppress = info->suppress_expansion;
1272 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1273 walk_gimple_omp_for (stmt, convert_nonlocal_reference_stmt,
1274 convert_nonlocal_reference_op, info);
1275 walk_body (convert_nonlocal_reference_stmt,
1276 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1277 info->suppress_expansion = save_suppress;
1278 break;
1280 case GIMPLE_OMP_SECTIONS:
1281 save_suppress = info->suppress_expansion;
1282 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1283 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1284 info, gimple_omp_body_ptr (stmt));
1285 info->suppress_expansion = save_suppress;
1286 break;
1288 case GIMPLE_OMP_SINGLE:
1289 save_suppress = info->suppress_expansion;
1290 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1291 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1292 info, gimple_omp_body_ptr (stmt));
1293 info->suppress_expansion = save_suppress;
1294 break;
1296 case GIMPLE_OMP_TARGET:
1297 save_suppress = info->suppress_expansion;
1298 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1299 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1300 info, gimple_omp_body_ptr (stmt));
1301 info->suppress_expansion = save_suppress;
1302 break;
1304 case GIMPLE_OMP_TEAMS:
1305 save_suppress = info->suppress_expansion;
1306 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1307 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1308 info, gimple_omp_body_ptr (stmt));
1309 info->suppress_expansion = save_suppress;
1310 break;
1312 case GIMPLE_OMP_SECTION:
1313 case GIMPLE_OMP_MASTER:
1314 case GIMPLE_OMP_TASKGROUP:
1315 case GIMPLE_OMP_ORDERED:
1316 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1317 info, gimple_omp_body_ptr (stmt));
1318 break;
1320 case GIMPLE_BIND:
1321 if (!optimize && gimple_bind_block (stmt))
1322 note_nonlocal_block_vlas (info, gimple_bind_block (stmt));
1324 *handled_ops_p = false;
1325 return NULL_TREE;
1327 case GIMPLE_COND:
1328 wi->val_only = true;
1329 wi->is_lhs = false;
1330 *handled_ops_p = false;
1331 return NULL_TREE;
1333 default:
1334 /* For every other statement that we are not interested in
1335 handling here, let the walker traverse the operands. */
1336 *handled_ops_p = false;
1337 return NULL_TREE;
1340 /* We have handled all of STMT operands, no need to traverse the operands. */
1341 *handled_ops_p = true;
1342 return NULL_TREE;
1346 /* A subroutine of convert_local_reference. Create a local variable
1347 in the parent function with DECL_VALUE_EXPR set to reference the
1348 field in FRAME. This is used both for debug info and in OpenMP
1349 lowering. */
1351 static tree
1352 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1354 tree x, new_decl;
1355 void **slot;
1357 slot = pointer_map_insert (info->var_map, decl);
1358 if (*slot)
1359 return (tree) *slot;
1361 /* Make sure frame_decl gets created. */
1362 (void) get_frame_type (info);
1363 x = info->frame_decl;
1364 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1366 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1367 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1368 DECL_CONTEXT (new_decl) = info->context;
1369 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1370 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1371 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1372 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1373 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1374 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1375 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1376 if ((TREE_CODE (decl) == PARM_DECL
1377 || TREE_CODE (decl) == RESULT_DECL
1378 || TREE_CODE (decl) == VAR_DECL)
1379 && DECL_BY_REFERENCE (decl))
1380 DECL_BY_REFERENCE (new_decl) = 1;
1382 SET_DECL_VALUE_EXPR (new_decl, x);
1383 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1384 *slot = new_decl;
1386 DECL_CHAIN (new_decl) = info->debug_var_chain;
1387 info->debug_var_chain = new_decl;
1389 /* Do not emit debug info twice. */
1390 DECL_IGNORED_P (decl) = 1;
1392 return new_decl;
1396 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1397 and PARM_DECLs that were referenced by inner nested functions.
1398 The rewrite will be a structure reference to the local frame variable. */
1400 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1402 static tree
1403 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1405 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1406 struct nesting_info *const info = (struct nesting_info *) wi->info;
1407 tree t = *tp, field, x;
1408 bool save_val_only;
1410 *walk_subtrees = 0;
1411 switch (TREE_CODE (t))
1413 case VAR_DECL:
1414 /* Non-automatic variables are never processed. */
1415 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1416 break;
1417 /* FALLTHRU */
1419 case PARM_DECL:
1420 if (decl_function_context (t) == info->context)
1422 /* If we copied a pointer to the frame, then the original decl
1423 is used unchanged in the parent function. */
1424 if (use_pointer_in_frame (t))
1425 break;
1427 /* No need to transform anything if no child references the
1428 variable. */
1429 field = lookup_field_for_decl (info, t, NO_INSERT);
1430 if (!field)
1431 break;
1432 wi->changed = true;
1434 x = get_local_debug_decl (info, t, field);
1435 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1436 x = get_frame_field (info, info->context, field, &wi->gsi);
1438 if (wi->val_only)
1440 if (wi->is_lhs)
1441 x = save_tmp_var (info, x, &wi->gsi);
1442 else
1443 x = init_tmp_var (info, x, &wi->gsi);
1446 *tp = x;
1448 break;
1450 case ADDR_EXPR:
1451 save_val_only = wi->val_only;
1452 wi->val_only = false;
1453 wi->is_lhs = false;
1454 wi->changed = false;
1455 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1456 wi->val_only = save_val_only;
1458 /* If we converted anything ... */
1459 if (wi->changed)
1461 tree save_context;
1463 /* Then the frame decl is now addressable. */
1464 TREE_ADDRESSABLE (info->frame_decl) = 1;
1466 save_context = current_function_decl;
1467 current_function_decl = info->context;
1468 recompute_tree_invariant_for_addr_expr (t);
1469 current_function_decl = save_context;
1471 /* If we are in a context where we only accept values, then
1472 compute the address into a temporary. */
1473 if (save_val_only)
1474 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1475 t, &wi->gsi);
1477 break;
1479 case REALPART_EXPR:
1480 case IMAGPART_EXPR:
1481 case COMPONENT_REF:
1482 case ARRAY_REF:
1483 case ARRAY_RANGE_REF:
1484 case BIT_FIELD_REF:
1485 /* Go down this entire nest and just look at the final prefix and
1486 anything that describes the references. Otherwise, we lose track
1487 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1488 save_val_only = wi->val_only;
1489 wi->val_only = true;
1490 wi->is_lhs = false;
1491 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1493 if (TREE_CODE (t) == COMPONENT_REF)
1494 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1495 NULL);
1496 else if (TREE_CODE (t) == ARRAY_REF
1497 || TREE_CODE (t) == ARRAY_RANGE_REF)
1499 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1500 NULL);
1501 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1502 NULL);
1503 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1504 NULL);
1507 wi->val_only = false;
1508 walk_tree (tp, convert_local_reference_op, wi, NULL);
1509 wi->val_only = save_val_only;
1510 break;
1512 case MEM_REF:
1513 save_val_only = wi->val_only;
1514 wi->val_only = true;
1515 wi->is_lhs = false;
1516 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1517 wi, NULL);
1518 /* We need to re-fold the MEM_REF as component references as
1519 part of a ADDR_EXPR address are not allowed. But we cannot
1520 fold here, as the chain record type is not yet finalized. */
1521 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1522 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1523 pointer_set_insert (info->mem_refs, tp);
1524 wi->val_only = save_val_only;
1525 break;
1527 case VIEW_CONVERT_EXPR:
1528 /* Just request to look at the subtrees, leaving val_only and lhs
1529 untouched. This might actually be for !val_only + lhs, in which
1530 case we don't want to force a replacement by a temporary. */
1531 *walk_subtrees = 1;
1532 break;
1534 default:
1535 if (!IS_TYPE_OR_DECL_P (t))
1537 *walk_subtrees = 1;
1538 wi->val_only = true;
1539 wi->is_lhs = false;
1541 break;
1544 return NULL_TREE;
1547 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1548 struct walk_stmt_info *);
1550 /* Helper for convert_local_reference. Convert all the references in
1551 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1553 static bool
1554 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1556 struct nesting_info *const info = (struct nesting_info *) wi->info;
1557 bool need_frame = false, need_stmts = false;
1558 tree clause, decl;
1559 int dummy;
1560 bitmap new_suppress;
1562 new_suppress = BITMAP_GGC_ALLOC ();
1563 bitmap_copy (new_suppress, info->suppress_expansion);
1565 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1567 switch (OMP_CLAUSE_CODE (clause))
1569 case OMP_CLAUSE_REDUCTION:
1570 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1571 need_stmts = true;
1572 goto do_decl_clause;
1574 case OMP_CLAUSE_LASTPRIVATE:
1575 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1576 need_stmts = true;
1577 goto do_decl_clause;
1579 case OMP_CLAUSE_PRIVATE:
1580 case OMP_CLAUSE_FIRSTPRIVATE:
1581 case OMP_CLAUSE_COPYPRIVATE:
1582 case OMP_CLAUSE_SHARED:
1583 do_decl_clause:
1584 decl = OMP_CLAUSE_DECL (clause);
1585 if (TREE_CODE (decl) == VAR_DECL
1586 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1587 break;
1588 if (decl_function_context (decl) == info->context
1589 && !use_pointer_in_frame (decl))
1591 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1592 if (field)
1594 bitmap_set_bit (new_suppress, DECL_UID (decl));
1595 OMP_CLAUSE_DECL (clause)
1596 = get_local_debug_decl (info, decl, field);
1597 need_frame = true;
1600 break;
1602 case OMP_CLAUSE_SCHEDULE:
1603 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1604 break;
1605 /* FALLTHRU */
1606 case OMP_CLAUSE_FINAL:
1607 case OMP_CLAUSE_IF:
1608 case OMP_CLAUSE_NUM_THREADS:
1609 wi->val_only = true;
1610 wi->is_lhs = false;
1611 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1612 wi);
1613 break;
1615 case OMP_CLAUSE_NOWAIT:
1616 case OMP_CLAUSE_ORDERED:
1617 case OMP_CLAUSE_DEFAULT:
1618 case OMP_CLAUSE_COPYIN:
1619 case OMP_CLAUSE_COLLAPSE:
1620 case OMP_CLAUSE_UNTIED:
1621 case OMP_CLAUSE_MERGEABLE:
1622 break;
1624 default:
1625 gcc_unreachable ();
1629 info->suppress_expansion = new_suppress;
1631 if (need_stmts)
1632 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1633 switch (OMP_CLAUSE_CODE (clause))
1635 case OMP_CLAUSE_REDUCTION:
1636 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1638 tree old_context
1639 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1640 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1641 = info->context;
1642 walk_body (convert_local_reference_stmt,
1643 convert_local_reference_op, info,
1644 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1645 walk_body (convert_local_reference_stmt,
1646 convert_local_reference_op, info,
1647 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1648 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1649 = old_context;
1651 break;
1653 case OMP_CLAUSE_LASTPRIVATE:
1654 walk_body (convert_local_reference_stmt,
1655 convert_local_reference_op, info,
1656 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1657 break;
1659 default:
1660 break;
1663 return need_frame;
1667 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1668 and PARM_DECLs that were referenced by inner nested functions.
1669 The rewrite will be a structure reference to the local frame variable. */
1671 static tree
1672 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1673 struct walk_stmt_info *wi)
1675 struct nesting_info *info = (struct nesting_info *) wi->info;
1676 tree save_local_var_chain;
1677 bitmap save_suppress;
1678 gimple stmt = gsi_stmt (*gsi);
1680 switch (gimple_code (stmt))
1682 case GIMPLE_OMP_PARALLEL:
1683 case GIMPLE_OMP_TASK:
1684 save_suppress = info->suppress_expansion;
1685 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1686 wi))
1688 tree c;
1689 (void) get_frame_type (info);
1690 c = build_omp_clause (gimple_location (stmt),
1691 OMP_CLAUSE_SHARED);
1692 OMP_CLAUSE_DECL (c) = info->frame_decl;
1693 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1694 gimple_omp_taskreg_set_clauses (stmt, c);
1697 save_local_var_chain = info->new_local_var_chain;
1698 info->new_local_var_chain = NULL;
1700 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1701 gimple_omp_body_ptr (stmt));
1703 if (info->new_local_var_chain)
1704 declare_vars (info->new_local_var_chain,
1705 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1706 info->new_local_var_chain = save_local_var_chain;
1707 info->suppress_expansion = save_suppress;
1708 break;
1710 case GIMPLE_OMP_FOR:
1711 save_suppress = info->suppress_expansion;
1712 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1713 walk_gimple_omp_for (stmt, convert_local_reference_stmt,
1714 convert_local_reference_op, info);
1715 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1716 info, gimple_omp_body_ptr (stmt));
1717 info->suppress_expansion = save_suppress;
1718 break;
1720 case GIMPLE_OMP_SECTIONS:
1721 save_suppress = info->suppress_expansion;
1722 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1723 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1724 info, gimple_omp_body_ptr (stmt));
1725 info->suppress_expansion = save_suppress;
1726 break;
1728 case GIMPLE_OMP_SINGLE:
1729 save_suppress = info->suppress_expansion;
1730 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1731 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1732 info, gimple_omp_body_ptr (stmt));
1733 info->suppress_expansion = save_suppress;
1734 break;
1736 case GIMPLE_OMP_TARGET:
1737 save_suppress = info->suppress_expansion;
1738 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1739 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1740 info, gimple_omp_body_ptr (stmt));
1741 info->suppress_expansion = save_suppress;
1742 break;
1744 case GIMPLE_OMP_TEAMS:
1745 save_suppress = info->suppress_expansion;
1746 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1747 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1748 info, gimple_omp_body_ptr (stmt));
1749 info->suppress_expansion = save_suppress;
1750 break;
1752 case GIMPLE_OMP_SECTION:
1753 case GIMPLE_OMP_MASTER:
1754 case GIMPLE_OMP_TASKGROUP:
1755 case GIMPLE_OMP_ORDERED:
1756 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1757 info, gimple_omp_body_ptr (stmt));
1758 break;
1760 case GIMPLE_COND:
1761 wi->val_only = true;
1762 wi->is_lhs = false;
1763 *handled_ops_p = false;
1764 return NULL_TREE;
1766 case GIMPLE_ASSIGN:
1767 if (gimple_clobber_p (stmt))
1769 tree lhs = gimple_assign_lhs (stmt);
1770 if (!use_pointer_in_frame (lhs)
1771 && lookup_field_for_decl (info, lhs, NO_INSERT))
1773 gsi_replace (gsi, gimple_build_nop (), true);
1774 break;
1777 *handled_ops_p = false;
1778 return NULL_TREE;
1780 default:
1781 /* For every other statement that we are not interested in
1782 handling here, let the walker traverse the operands. */
1783 *handled_ops_p = false;
1784 return NULL_TREE;
1787 /* Indicate that we have handled all the operands ourselves. */
1788 *handled_ops_p = true;
1789 return NULL_TREE;
1793 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
1794 that reference labels from outer functions. The rewrite will be a
1795 call to __builtin_nonlocal_goto. */
1797 static tree
1798 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1799 struct walk_stmt_info *wi)
1801 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
1802 tree label, new_label, target_context, x, field;
1803 void **slot;
1804 gimple call;
1805 gimple stmt = gsi_stmt (*gsi);
1807 if (gimple_code (stmt) != GIMPLE_GOTO)
1809 *handled_ops_p = false;
1810 return NULL_TREE;
1813 label = gimple_goto_dest (stmt);
1814 if (TREE_CODE (label) != LABEL_DECL)
1816 *handled_ops_p = false;
1817 return NULL_TREE;
1820 target_context = decl_function_context (label);
1821 if (target_context == info->context)
1823 *handled_ops_p = false;
1824 return NULL_TREE;
1827 for (i = info->outer; target_context != i->context; i = i->outer)
1828 continue;
1830 /* The original user label may also be use for a normal goto, therefore
1831 we must create a new label that will actually receive the abnormal
1832 control transfer. This new label will be marked LABEL_NONLOCAL; this
1833 mark will trigger proper behavior in the cfg, as well as cause the
1834 (hairy target-specific) non-local goto receiver code to be generated
1835 when we expand rtl. Enter this association into var_map so that we
1836 can insert the new label into the IL during a second pass. */
1837 slot = pointer_map_insert (i->var_map, label);
1838 if (*slot == NULL)
1840 new_label = create_artificial_label (UNKNOWN_LOCATION);
1841 DECL_NONLOCAL (new_label) = 1;
1842 *slot = new_label;
1844 else
1845 new_label = (tree) *slot;
1847 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
1848 field = get_nl_goto_field (i);
1849 x = get_frame_field (info, target_context, field, gsi);
1850 x = build_addr (x, target_context);
1851 x = gsi_gimplify_val (info, x, gsi);
1852 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
1853 2, build_addr (new_label, target_context), x);
1854 gsi_replace (gsi, call, false);
1856 /* We have handled all of STMT's operands, no need to keep going. */
1857 *handled_ops_p = true;
1858 return NULL_TREE;
1862 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
1863 are referenced via nonlocal goto from a nested function. The rewrite
1864 will involve installing a newly generated DECL_NONLOCAL label, and
1865 (potentially) a branch around the rtl gunk that is assumed to be
1866 attached to such a label. */
1868 static tree
1869 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1870 struct walk_stmt_info *wi)
1872 struct nesting_info *const info = (struct nesting_info *) wi->info;
1873 tree label, new_label;
1874 gimple_stmt_iterator tmp_gsi;
1875 void **slot;
1876 gimple stmt = gsi_stmt (*gsi);
1878 if (gimple_code (stmt) != GIMPLE_LABEL)
1880 *handled_ops_p = false;
1881 return NULL_TREE;
1884 label = gimple_label_label (stmt);
1886 slot = pointer_map_contains (info->var_map, label);
1887 if (!slot)
1889 *handled_ops_p = false;
1890 return NULL_TREE;
1893 /* If there's any possibility that the previous statement falls through,
1894 then we must branch around the new non-local label. */
1895 tmp_gsi = wi->gsi;
1896 gsi_prev (&tmp_gsi);
1897 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
1899 gimple stmt = gimple_build_goto (label);
1900 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1903 new_label = (tree) *slot;
1904 stmt = gimple_build_label (new_label);
1905 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1907 *handled_ops_p = true;
1908 return NULL_TREE;
1912 /* Called via walk_function+walk_stmt, rewrite all references to addresses
1913 of nested functions that require the use of trampolines. The rewrite
1914 will involve a reference a trampoline generated for the occasion. */
1916 static tree
1917 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
1919 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1920 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
1921 tree t = *tp, decl, target_context, x, builtin;
1922 gimple call;
1924 *walk_subtrees = 0;
1925 switch (TREE_CODE (t))
1927 case ADDR_EXPR:
1928 /* Build
1929 T.1 = &CHAIN->tramp;
1930 T.2 = __builtin_adjust_trampoline (T.1);
1931 T.3 = (func_type)T.2;
1934 decl = TREE_OPERAND (t, 0);
1935 if (TREE_CODE (decl) != FUNCTION_DECL)
1936 break;
1938 /* Only need to process nested functions. */
1939 target_context = decl_function_context (decl);
1940 if (!target_context)
1941 break;
1943 /* If the nested function doesn't use a static chain, then
1944 it doesn't need a trampoline. */
1945 if (!DECL_STATIC_CHAIN (decl))
1946 break;
1948 /* If we don't want a trampoline, then don't build one. */
1949 if (TREE_NO_TRAMPOLINE (t))
1950 break;
1952 /* Lookup the immediate parent of the callee, as that's where
1953 we need to insert the trampoline. */
1954 for (i = info; i->context != target_context; i = i->outer)
1955 continue;
1956 x = lookup_tramp_for_decl (i, decl, INSERT);
1958 /* Compute the address of the field holding the trampoline. */
1959 x = get_frame_field (info, target_context, x, &wi->gsi);
1960 x = build_addr (x, target_context);
1961 x = gsi_gimplify_val (info, x, &wi->gsi);
1963 /* Do machine-specific ugliness. Normally this will involve
1964 computing extra alignment, but it can really be anything. */
1965 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
1966 call = gimple_build_call (builtin, 1, x);
1967 x = init_tmp_var_with_call (info, &wi->gsi, call);
1969 /* Cast back to the proper function type. */
1970 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
1971 x = init_tmp_var (info, x, &wi->gsi);
1973 *tp = x;
1974 break;
1976 default:
1977 if (!IS_TYPE_OR_DECL_P (t))
1978 *walk_subtrees = 1;
1979 break;
1982 return NULL_TREE;
1986 /* Called via walk_function+walk_gimple_stmt, rewrite all references
1987 to addresses of nested functions that require the use of
1988 trampolines. The rewrite will involve a reference a trampoline
1989 generated for the occasion. */
1991 static tree
1992 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1993 struct walk_stmt_info *wi)
1995 struct nesting_info *info = (struct nesting_info *) wi->info;
1996 gimple stmt = gsi_stmt (*gsi);
1998 switch (gimple_code (stmt))
2000 case GIMPLE_CALL:
2002 /* Only walk call arguments, lest we generate trampolines for
2003 direct calls. */
2004 unsigned long i, nargs = gimple_call_num_args (stmt);
2005 for (i = 0; i < nargs; i++)
2006 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2007 wi, NULL);
2008 break;
2011 case GIMPLE_OMP_PARALLEL:
2012 case GIMPLE_OMP_TASK:
2014 tree save_local_var_chain;
2015 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2016 save_local_var_chain = info->new_local_var_chain;
2017 info->new_local_var_chain = NULL;
2018 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2019 info, gimple_omp_body_ptr (stmt));
2020 if (info->new_local_var_chain)
2021 declare_vars (info->new_local_var_chain,
2022 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2023 false);
2024 info->new_local_var_chain = save_local_var_chain;
2026 break;
2028 default:
2029 *handled_ops_p = false;
2030 return NULL_TREE;
2031 break;
2034 *handled_ops_p = true;
2035 return NULL_TREE;
2040 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2041 that reference nested functions to make sure that the static chain
2042 is set up properly for the call. */
2044 static tree
2045 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2046 struct walk_stmt_info *wi)
2048 struct nesting_info *const info = (struct nesting_info *) wi->info;
2049 tree decl, target_context;
2050 char save_static_chain_added;
2051 int i;
2052 gimple stmt = gsi_stmt (*gsi);
2054 switch (gimple_code (stmt))
2056 case GIMPLE_CALL:
2057 if (gimple_call_chain (stmt))
2058 break;
2059 decl = gimple_call_fndecl (stmt);
2060 if (!decl)
2061 break;
2062 target_context = decl_function_context (decl);
2063 if (target_context && DECL_STATIC_CHAIN (decl))
2065 gimple_call_set_chain (stmt, get_static_chain (info, target_context,
2066 &wi->gsi));
2067 info->static_chain_added |= (1 << (info->context != target_context));
2069 break;
2071 case GIMPLE_OMP_PARALLEL:
2072 case GIMPLE_OMP_TASK:
2073 save_static_chain_added = info->static_chain_added;
2074 info->static_chain_added = 0;
2075 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2076 for (i = 0; i < 2; i++)
2078 tree c, decl;
2079 if ((info->static_chain_added & (1 << i)) == 0)
2080 continue;
2081 decl = i ? get_chain_decl (info) : info->frame_decl;
2082 /* Don't add CHAIN.* or FRAME.* twice. */
2083 for (c = gimple_omp_taskreg_clauses (stmt);
2085 c = OMP_CLAUSE_CHAIN (c))
2086 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2087 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2088 && OMP_CLAUSE_DECL (c) == decl)
2089 break;
2090 if (c == NULL)
2092 c = build_omp_clause (gimple_location (stmt),
2093 i ? OMP_CLAUSE_FIRSTPRIVATE
2094 : OMP_CLAUSE_SHARED);
2095 OMP_CLAUSE_DECL (c) = decl;
2096 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2097 gimple_omp_taskreg_set_clauses (stmt, c);
2100 info->static_chain_added |= save_static_chain_added;
2101 break;
2103 case GIMPLE_OMP_FOR:
2104 walk_body (convert_gimple_call, NULL, info,
2105 gimple_omp_for_pre_body_ptr (stmt));
2106 /* FALLTHRU */
2107 case GIMPLE_OMP_SECTIONS:
2108 case GIMPLE_OMP_SECTION:
2109 case GIMPLE_OMP_SINGLE:
2110 case GIMPLE_OMP_TARGET:
2111 case GIMPLE_OMP_TEAMS:
2112 case GIMPLE_OMP_MASTER:
2113 case GIMPLE_OMP_TASKGROUP:
2114 case GIMPLE_OMP_ORDERED:
2115 case GIMPLE_OMP_CRITICAL:
2116 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2117 break;
2119 default:
2120 /* Keep looking for other operands. */
2121 *handled_ops_p = false;
2122 return NULL_TREE;
2125 *handled_ops_p = true;
2126 return NULL_TREE;
2129 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2130 call expressions. At the same time, determine if a nested function
2131 actually uses its static chain; if not, remember that. */
2133 static void
2134 convert_all_function_calls (struct nesting_info *root)
2136 unsigned int chain_count = 0, old_chain_count, iter_count;
2137 struct nesting_info *n;
2139 /* First, optimistically clear static_chain for all decls that haven't
2140 used the static chain already for variable access. */
2141 FOR_EACH_NEST_INFO (n, root)
2143 tree decl = n->context;
2144 if (!n->outer || (!n->chain_decl && !n->chain_field))
2146 DECL_STATIC_CHAIN (decl) = 0;
2147 if (dump_file && (dump_flags & TDF_DETAILS))
2148 fprintf (dump_file, "Guessing no static-chain for %s\n",
2149 lang_hooks.decl_printable_name (decl, 2));
2151 else
2152 DECL_STATIC_CHAIN (decl) = 1;
2153 chain_count += DECL_STATIC_CHAIN (decl);
2156 /* Walk the functions and perform transformations. Note that these
2157 transformations can induce new uses of the static chain, which in turn
2158 require re-examining all users of the decl. */
2159 /* ??? It would make sense to try to use the call graph to speed this up,
2160 but the call graph hasn't really been built yet. Even if it did, we
2161 would still need to iterate in this loop since address-of references
2162 wouldn't show up in the callgraph anyway. */
2163 iter_count = 0;
2166 old_chain_count = chain_count;
2167 chain_count = 0;
2168 iter_count++;
2170 if (dump_file && (dump_flags & TDF_DETAILS))
2171 fputc ('\n', dump_file);
2173 FOR_EACH_NEST_INFO (n, root)
2175 tree decl = n->context;
2176 walk_function (convert_tramp_reference_stmt,
2177 convert_tramp_reference_op, n);
2178 walk_function (convert_gimple_call, NULL, n);
2179 chain_count += DECL_STATIC_CHAIN (decl);
2182 while (chain_count != old_chain_count);
2184 if (dump_file && (dump_flags & TDF_DETAILS))
2185 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2186 iter_count);
2189 struct nesting_copy_body_data
2191 copy_body_data cb;
2192 struct nesting_info *root;
2195 /* A helper subroutine for debug_var_chain type remapping. */
2197 static tree
2198 nesting_copy_decl (tree decl, copy_body_data *id)
2200 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2201 void **slot = pointer_map_contains (nid->root->var_map, decl);
2203 if (slot)
2204 return (tree) *slot;
2206 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2208 tree new_decl = copy_decl_no_change (decl, id);
2209 DECL_ORIGINAL_TYPE (new_decl)
2210 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2211 return new_decl;
2214 if (TREE_CODE (decl) == VAR_DECL
2215 || TREE_CODE (decl) == PARM_DECL
2216 || TREE_CODE (decl) == RESULT_DECL)
2217 return decl;
2219 return copy_decl_no_change (decl, id);
2222 /* A helper function for remap_vla_decls. See if *TP contains
2223 some remapped variables. */
2225 static tree
2226 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2228 struct nesting_info *root = (struct nesting_info *) data;
2229 tree t = *tp;
2230 void **slot;
2232 if (DECL_P (t))
2234 *walk_subtrees = 0;
2235 slot = pointer_map_contains (root->var_map, t);
2237 if (slot)
2238 return (tree) *slot;
2240 return NULL;
2243 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2244 involved. */
2246 static void
2247 remap_vla_decls (tree block, struct nesting_info *root)
2249 tree var, subblock, val, type;
2250 struct nesting_copy_body_data id;
2252 for (subblock = BLOCK_SUBBLOCKS (block);
2253 subblock;
2254 subblock = BLOCK_CHAIN (subblock))
2255 remap_vla_decls (subblock, root);
2257 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2258 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2260 val = DECL_VALUE_EXPR (var);
2261 type = TREE_TYPE (var);
2263 if (!(TREE_CODE (val) == INDIRECT_REF
2264 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2265 && variably_modified_type_p (type, NULL)))
2266 continue;
2268 if (pointer_map_contains (root->var_map, TREE_OPERAND (val, 0))
2269 || walk_tree (&type, contains_remapped_vars, root, NULL))
2270 break;
2273 if (var == NULL_TREE)
2274 return;
2276 memset (&id, 0, sizeof (id));
2277 id.cb.copy_decl = nesting_copy_decl;
2278 id.cb.decl_map = pointer_map_create ();
2279 id.root = root;
2281 for (; var; var = DECL_CHAIN (var))
2282 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2284 struct nesting_info *i;
2285 tree newt, context;
2286 void **slot;
2288 val = DECL_VALUE_EXPR (var);
2289 type = TREE_TYPE (var);
2291 if (!(TREE_CODE (val) == INDIRECT_REF
2292 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2293 && variably_modified_type_p (type, NULL)))
2294 continue;
2296 slot = pointer_map_contains (root->var_map, TREE_OPERAND (val, 0));
2297 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2298 continue;
2300 context = decl_function_context (var);
2301 for (i = root; i; i = i->outer)
2302 if (i->context == context)
2303 break;
2305 if (i == NULL)
2306 continue;
2308 /* Fully expand value expressions. This avoids having debug variables
2309 only referenced from them and that can be swept during GC. */
2310 if (slot)
2312 tree t = (tree) *slot;
2313 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2314 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2317 id.cb.src_fn = i->context;
2318 id.cb.dst_fn = i->context;
2319 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2321 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2322 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2324 newt = TREE_TYPE (newt);
2325 type = TREE_TYPE (type);
2327 if (TYPE_NAME (newt)
2328 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2329 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2330 && newt != type
2331 && TYPE_NAME (newt) == TYPE_NAME (type))
2332 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2334 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2335 if (val != DECL_VALUE_EXPR (var))
2336 SET_DECL_VALUE_EXPR (var, val);
2339 pointer_map_destroy (id.cb.decl_map);
2342 /* Fold the MEM_REF *E. */
2343 static bool
2344 fold_mem_refs (const void *e, void *data ATTRIBUTE_UNUSED)
2346 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2347 *ref_p = fold (*ref_p);
2348 return true;
2351 /* Do "everything else" to clean up or complete state collected by the
2352 various walking passes -- lay out the types and decls, generate code
2353 to initialize the frame decl, store critical expressions in the
2354 struct function for rtl to find. */
2356 static void
2357 finalize_nesting_tree_1 (struct nesting_info *root)
2359 gimple_seq stmt_list;
2360 gimple stmt;
2361 tree context = root->context;
2362 struct function *sf;
2364 stmt_list = NULL;
2366 /* If we created a non-local frame type or decl, we need to lay them
2367 out at this time. */
2368 if (root->frame_type)
2370 /* In some cases the frame type will trigger the -Wpadded warning.
2371 This is not helpful; suppress it. */
2372 int save_warn_padded = warn_padded;
2373 tree *adjust;
2375 warn_padded = 0;
2376 layout_type (root->frame_type);
2377 warn_padded = save_warn_padded;
2378 layout_decl (root->frame_decl, 0);
2380 /* Remove root->frame_decl from root->new_local_var_chain, so
2381 that we can declare it also in the lexical blocks, which
2382 helps ensure virtual regs that end up appearing in its RTL
2383 expression get substituted in instantiate_virtual_regs(). */
2384 for (adjust = &root->new_local_var_chain;
2385 *adjust != root->frame_decl;
2386 adjust = &DECL_CHAIN (*adjust))
2387 gcc_assert (DECL_CHAIN (*adjust));
2388 *adjust = DECL_CHAIN (*adjust);
2390 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2391 declare_vars (root->frame_decl,
2392 gimple_seq_first_stmt (gimple_body (context)), true);
2395 /* If any parameters were referenced non-locally, then we need to
2396 insert a copy. Likewise, if any variables were referenced by
2397 pointer, we need to initialize the address. */
2398 if (root->any_parm_remapped)
2400 tree p;
2401 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2403 tree field, x, y;
2405 field = lookup_field_for_decl (root, p, NO_INSERT);
2406 if (!field)
2407 continue;
2409 if (use_pointer_in_frame (p))
2410 x = build_addr (p, context);
2411 else
2412 x = p;
2414 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2415 root->frame_decl, field, NULL_TREE);
2416 stmt = gimple_build_assign (y, x);
2417 gimple_seq_add_stmt (&stmt_list, stmt);
2418 /* If the assignment is from a non-register the stmt is
2419 not valid gimple. Make it so by using a temporary instead. */
2420 if (!is_gimple_reg (x)
2421 && is_gimple_reg_type (TREE_TYPE (x)))
2423 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2424 x = init_tmp_var (root, x, &gsi);
2425 gimple_assign_set_rhs1 (stmt, x);
2430 /* If a chain_field was created, then it needs to be initialized
2431 from chain_decl. */
2432 if (root->chain_field)
2434 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2435 root->frame_decl, root->chain_field, NULL_TREE);
2436 stmt = gimple_build_assign (x, get_chain_decl (root));
2437 gimple_seq_add_stmt (&stmt_list, stmt);
2440 /* If trampolines were created, then we need to initialize them. */
2441 if (root->any_tramp_created)
2443 struct nesting_info *i;
2444 for (i = root->inner; i ; i = i->next)
2446 tree arg1, arg2, arg3, x, field;
2448 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2449 if (!field)
2450 continue;
2452 gcc_assert (DECL_STATIC_CHAIN (i->context));
2453 arg3 = build_addr (root->frame_decl, context);
2455 arg2 = build_addr (i->context, context);
2457 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2458 root->frame_decl, field, NULL_TREE);
2459 arg1 = build_addr (x, context);
2461 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2462 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2463 gimple_seq_add_stmt (&stmt_list, stmt);
2467 /* If we created initialization statements, insert them. */
2468 if (stmt_list)
2470 gimple bind;
2471 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2472 bind = gimple_seq_first_stmt (gimple_body (context));
2473 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2474 gimple_bind_set_body (bind, stmt_list);
2477 /* If a chain_decl was created, then it needs to be registered with
2478 struct function so that it gets initialized from the static chain
2479 register at the beginning of the function. */
2480 sf = DECL_STRUCT_FUNCTION (root->context);
2481 sf->static_chain_decl = root->chain_decl;
2483 /* Similarly for the non-local goto save area. */
2484 if (root->nl_goto_field)
2486 sf->nonlocal_goto_save_area
2487 = get_frame_field (root, context, root->nl_goto_field, NULL);
2488 sf->has_nonlocal_label = 1;
2491 /* Make sure all new local variables get inserted into the
2492 proper BIND_EXPR. */
2493 if (root->new_local_var_chain)
2494 declare_vars (root->new_local_var_chain,
2495 gimple_seq_first_stmt (gimple_body (root->context)),
2496 false);
2498 if (root->debug_var_chain)
2500 tree debug_var;
2501 gimple scope;
2503 remap_vla_decls (DECL_INITIAL (root->context), root);
2505 for (debug_var = root->debug_var_chain; debug_var;
2506 debug_var = DECL_CHAIN (debug_var))
2507 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2508 break;
2510 /* If there are any debug decls with variable length types,
2511 remap those types using other debug_var_chain variables. */
2512 if (debug_var)
2514 struct nesting_copy_body_data id;
2516 memset (&id, 0, sizeof (id));
2517 id.cb.copy_decl = nesting_copy_decl;
2518 id.cb.decl_map = pointer_map_create ();
2519 id.root = root;
2521 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2522 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2524 tree type = TREE_TYPE (debug_var);
2525 tree newt, t = type;
2526 struct nesting_info *i;
2528 for (i = root; i; i = i->outer)
2529 if (variably_modified_type_p (type, i->context))
2530 break;
2532 if (i == NULL)
2533 continue;
2535 id.cb.src_fn = i->context;
2536 id.cb.dst_fn = i->context;
2537 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2539 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2540 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2542 newt = TREE_TYPE (newt);
2543 t = TREE_TYPE (t);
2545 if (TYPE_NAME (newt)
2546 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2547 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2548 && newt != t
2549 && TYPE_NAME (newt) == TYPE_NAME (t))
2550 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2553 pointer_map_destroy (id.cb.decl_map);
2556 scope = gimple_seq_first_stmt (gimple_body (root->context));
2557 if (gimple_bind_block (scope))
2558 declare_vars (root->debug_var_chain, scope, true);
2559 else
2560 BLOCK_VARS (DECL_INITIAL (root->context))
2561 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2562 root->debug_var_chain);
2565 /* Fold the rewritten MEM_REF trees. */
2566 pointer_set_traverse (root->mem_refs, fold_mem_refs, NULL);
2568 /* Dump the translated tree function. */
2569 if (dump_file)
2571 fputs ("\n\n", dump_file);
2572 dump_function_to_file (root->context, dump_file, dump_flags);
2576 static void
2577 finalize_nesting_tree (struct nesting_info *root)
2579 struct nesting_info *n;
2580 FOR_EACH_NEST_INFO (n, root)
2581 finalize_nesting_tree_1 (n);
2584 /* Unnest the nodes and pass them to cgraph. */
2586 static void
2587 unnest_nesting_tree_1 (struct nesting_info *root)
2589 struct cgraph_node *node = cgraph_get_node (root->context);
2591 /* For nested functions update the cgraph to reflect unnesting.
2592 We also delay finalizing of these functions up to this point. */
2593 if (node->origin)
2595 cgraph_unnest_node (node);
2596 cgraph_finalize_function (root->context, true);
2600 static void
2601 unnest_nesting_tree (struct nesting_info *root)
2603 struct nesting_info *n;
2604 FOR_EACH_NEST_INFO (n, root)
2605 unnest_nesting_tree_1 (n);
2608 /* Free the data structures allocated during this pass. */
2610 static void
2611 free_nesting_tree (struct nesting_info *root)
2613 struct nesting_info *node, *next;
2615 node = iter_nestinfo_start (root);
2618 next = iter_nestinfo_next (node);
2619 pointer_map_destroy (node->var_map);
2620 pointer_map_destroy (node->field_map);
2621 pointer_set_destroy (node->mem_refs);
2622 free (node);
2623 node = next;
2625 while (node);
2628 /* Gimplify a function and all its nested functions. */
2629 static void
2630 gimplify_all_functions (struct cgraph_node *root)
2632 struct cgraph_node *iter;
2633 if (!gimple_body (root->decl))
2634 gimplify_function_tree (root->decl);
2635 for (iter = root->nested; iter; iter = iter->next_nested)
2636 gimplify_all_functions (iter);
2639 /* Main entry point for this pass. Process FNDECL and all of its nested
2640 subroutines and turn them into something less tightly bound. */
2642 void
2643 lower_nested_functions (tree fndecl)
2645 struct cgraph_node *cgn;
2646 struct nesting_info *root;
2648 /* If there are no nested functions, there's nothing to do. */
2649 cgn = cgraph_get_node (fndecl);
2650 if (!cgn->nested)
2651 return;
2653 gimplify_all_functions (cgn);
2655 dump_file = dump_begin (TDI_nested, &dump_flags);
2656 if (dump_file)
2657 fprintf (dump_file, "\n;; Function %s\n\n",
2658 lang_hooks.decl_printable_name (fndecl, 2));
2660 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2661 root = create_nesting_tree (cgn);
2663 walk_all_functions (convert_nonlocal_reference_stmt,
2664 convert_nonlocal_reference_op,
2665 root);
2666 walk_all_functions (convert_local_reference_stmt,
2667 convert_local_reference_op,
2668 root);
2669 walk_all_functions (convert_nl_goto_reference, NULL, root);
2670 walk_all_functions (convert_nl_goto_receiver, NULL, root);
2672 convert_all_function_calls (root);
2673 finalize_nesting_tree (root);
2674 unnest_nesting_tree (root);
2676 free_nesting_tree (root);
2677 bitmap_obstack_release (&nesting_info_bitmap_obstack);
2679 if (dump_file)
2681 dump_end (TDI_nested, dump_file);
2682 dump_file = NULL;
2686 #include "gt-tree-nested.h"