Merge from trunk:
[official-gcc.git] / main / gcc / tree-nested.c
blob7d5c039927ee0fc01bc9373e27280be895f58d5d
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stringpool.h"
26 #include "stor-layout.h"
27 #include "tm_p.h"
28 #include "function.h"
29 #include "tree-dump.h"
30 #include "tree-inline.h"
31 #include "pointer-set.h"
32 #include "basic-block.h"
33 #include "tree-ssa-alias.h"
34 #include "internal-fn.h"
35 #include "gimple-expr.h"
36 #include "is-a.h"
37 #include "gimple.h"
38 #include "gimplify.h"
39 #include "gimple-iterator.h"
40 #include "gimple-walk.h"
41 #include "tree-iterator.h"
42 #include "bitmap.h"
43 #include "cgraph.h"
44 #include "tree-cfg.h"
45 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
46 #include "langhooks.h"
47 #include "gimple-low.h"
50 /* The object of this pass is to lower the representation of a set of nested
51 functions in order to expose all of the gory details of the various
52 nonlocal references. We want to do this sooner rather than later, in
53 order to give us more freedom in emitting all of the functions in question.
55 Back in olden times, when gcc was young, we developed an insanely
56 complicated scheme whereby variables which were referenced nonlocally
57 were forced to live in the stack of the declaring function, and then
58 the nested functions magically discovered where these variables were
59 placed. In order for this scheme to function properly, it required
60 that the outer function be partially expanded, then we switch to
61 compiling the inner function, and once done with those we switch back
62 to compiling the outer function. Such delicate ordering requirements
63 makes it difficult to do whole translation unit optimizations
64 involving such functions.
66 The implementation here is much more direct. Everything that can be
67 referenced by an inner function is a member of an explicitly created
68 structure herein called the "nonlocal frame struct". The incoming
69 static chain for a nested function is a pointer to this struct in
70 the parent. In this way, we settle on known offsets from a known
71 base, and so are decoupled from the logic that places objects in the
72 function's stack frame. More importantly, we don't have to wait for
73 that to happen -- since the compilation of the inner function is no
74 longer tied to a real stack frame, the nonlocal frame struct can be
75 allocated anywhere. Which means that the outer function is now
76 inlinable.
78 Theory of operation here is very simple. Iterate over all the
79 statements in all the functions (depth first) several times,
80 allocating structures and fields on demand. In general we want to
81 examine inner functions first, so that we can avoid making changes
82 to outer functions which are unnecessary.
84 The order of the passes matters a bit, in that later passes will be
85 skipped if it is discovered that the functions don't actually interact
86 at all. That is, they're nested in the lexical sense but could have
87 been written as independent functions without change. */
90 struct nesting_info
92 struct nesting_info *outer;
93 struct nesting_info *inner;
94 struct nesting_info *next;
96 hash_map<tree, tree> *field_map;
97 hash_map<tree, tree> *var_map;
98 hash_set<tree *> *mem_refs;
99 bitmap suppress_expansion;
101 tree context;
102 tree new_local_var_chain;
103 tree debug_var_chain;
104 tree frame_type;
105 tree frame_decl;
106 tree chain_field;
107 tree chain_decl;
108 tree nl_goto_field;
110 bool any_parm_remapped;
111 bool any_tramp_created;
112 char static_chain_added;
116 /* Iterate over the nesting tree, starting with ROOT, depth first. */
118 static inline struct nesting_info *
119 iter_nestinfo_start (struct nesting_info *root)
121 while (root->inner)
122 root = root->inner;
123 return root;
126 static inline struct nesting_info *
127 iter_nestinfo_next (struct nesting_info *node)
129 if (node->next)
130 return iter_nestinfo_start (node->next);
131 return node->outer;
134 #define FOR_EACH_NEST_INFO(I, ROOT) \
135 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
137 /* Obstack used for the bitmaps in the struct above. */
138 static struct bitmap_obstack nesting_info_bitmap_obstack;
141 /* We're working in so many different function contexts simultaneously,
142 that create_tmp_var is dangerous. Prevent mishap. */
143 #define create_tmp_var cant_use_create_tmp_var_here_dummy
145 /* Like create_tmp_var, except record the variable for registration at
146 the given nesting level. */
148 static tree
149 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
151 tree tmp_var;
153 /* If the type is of variable size or a type which must be created by the
154 frontend, something is wrong. Note that we explicitly allow
155 incomplete types here, since we create them ourselves here. */
156 gcc_assert (!TREE_ADDRESSABLE (type));
157 gcc_assert (!TYPE_SIZE_UNIT (type)
158 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
160 tmp_var = create_tmp_var_raw (type, prefix);
161 DECL_CONTEXT (tmp_var) = info->context;
162 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
163 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
164 if (TREE_CODE (type) == COMPLEX_TYPE
165 || TREE_CODE (type) == VECTOR_TYPE)
166 DECL_GIMPLE_REG_P (tmp_var) = 1;
168 info->new_local_var_chain = tmp_var;
170 return tmp_var;
173 /* Take the address of EXP to be used within function CONTEXT.
174 Mark it for addressability as necessary. */
176 tree
177 build_addr (tree exp, tree context)
179 tree base = exp;
180 tree save_context;
181 tree retval;
183 while (handled_component_p (base))
184 base = TREE_OPERAND (base, 0);
186 if (DECL_P (base))
187 TREE_ADDRESSABLE (base) = 1;
189 /* Building the ADDR_EXPR will compute a set of properties for
190 that ADDR_EXPR. Those properties are unfortunately context
191 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
193 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
194 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
195 way the properties are for the ADDR_EXPR are computed properly. */
196 save_context = current_function_decl;
197 current_function_decl = context;
198 retval = build_fold_addr_expr (exp);
199 current_function_decl = save_context;
200 return retval;
203 /* Insert FIELD into TYPE, sorted by alignment requirements. */
205 void
206 insert_field_into_struct (tree type, tree field)
208 tree *p;
210 DECL_CONTEXT (field) = type;
212 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
213 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
214 break;
216 DECL_CHAIN (field) = *p;
217 *p = field;
219 /* Set correct alignment for frame struct type. */
220 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
221 TYPE_ALIGN (type) = DECL_ALIGN (field);
224 /* Build or return the RECORD_TYPE that describes the frame state that is
225 shared between INFO->CONTEXT and its nested functions. This record will
226 not be complete until finalize_nesting_tree; up until that point we'll
227 be adding fields as necessary.
229 We also build the DECL that represents this frame in the function. */
231 static tree
232 get_frame_type (struct nesting_info *info)
234 tree type = info->frame_type;
235 if (!type)
237 char *name;
239 type = make_node (RECORD_TYPE);
241 name = concat ("FRAME.",
242 IDENTIFIER_POINTER (DECL_NAME (info->context)),
243 NULL);
244 TYPE_NAME (type) = get_identifier (name);
245 free (name);
247 info->frame_type = type;
248 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
249 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
251 /* ??? Always make it addressable for now, since it is meant to
252 be pointed to by the static chain pointer. This pessimizes
253 when it turns out that no static chains are needed because
254 the nested functions referencing non-local variables are not
255 reachable, but the true pessimization is to create the non-
256 local frame structure in the first place. */
257 TREE_ADDRESSABLE (info->frame_decl) = 1;
259 return type;
262 /* Return true if DECL should be referenced by pointer in the non-local
263 frame structure. */
265 static bool
266 use_pointer_in_frame (tree decl)
268 if (TREE_CODE (decl) == PARM_DECL)
270 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
271 sized decls, and inefficient to copy large aggregates. Don't bother
272 moving anything but scalar variables. */
273 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
275 else
277 /* Variable sized types make things "interesting" in the frame. */
278 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
282 /* Given DECL, a non-locally accessed variable, find or create a field
283 in the non-local frame structure for the given nesting context. */
285 static tree
286 lookup_field_for_decl (struct nesting_info *info, tree decl,
287 enum insert_option insert)
289 if (insert == NO_INSERT)
291 tree *slot = info->field_map->get (decl);
292 return slot ? *slot : NULL_TREE;
295 tree *slot = &info->field_map->get_or_insert (decl);
296 if (!*slot)
298 tree field = make_node (FIELD_DECL);
299 DECL_NAME (field) = DECL_NAME (decl);
301 if (use_pointer_in_frame (decl))
303 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
304 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
305 DECL_NONADDRESSABLE_P (field) = 1;
307 else
309 TREE_TYPE (field) = TREE_TYPE (decl);
310 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
311 DECL_ALIGN (field) = DECL_ALIGN (decl);
312 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
313 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
314 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
315 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
318 insert_field_into_struct (get_frame_type (info), field);
319 *slot = field;
321 if (TREE_CODE (decl) == PARM_DECL)
322 info->any_parm_remapped = true;
325 return *slot;
328 /* Build or return the variable that holds the static chain within
329 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
331 static tree
332 get_chain_decl (struct nesting_info *info)
334 tree decl = info->chain_decl;
336 if (!decl)
338 tree type;
340 type = get_frame_type (info->outer);
341 type = build_pointer_type (type);
343 /* Note that this variable is *not* entered into any BIND_EXPR;
344 the construction of this variable is handled specially in
345 expand_function_start and initialize_inlined_parameters.
346 Note also that it's represented as a parameter. This is more
347 close to the truth, since the initial value does come from
348 the caller. */
349 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
350 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
351 DECL_ARTIFICIAL (decl) = 1;
352 DECL_IGNORED_P (decl) = 1;
353 TREE_USED (decl) = 1;
354 DECL_CONTEXT (decl) = info->context;
355 DECL_ARG_TYPE (decl) = type;
357 /* Tell tree-inline.c that we never write to this variable, so
358 it can copy-prop the replacement value immediately. */
359 TREE_READONLY (decl) = 1;
361 info->chain_decl = decl;
363 if (dump_file
364 && (dump_flags & TDF_DETAILS)
365 && !DECL_STATIC_CHAIN (info->context))
366 fprintf (dump_file, "Setting static-chain for %s\n",
367 lang_hooks.decl_printable_name (info->context, 2));
369 DECL_STATIC_CHAIN (info->context) = 1;
371 return decl;
374 /* Build or return the field within the non-local frame state that holds
375 the static chain for INFO->CONTEXT. This is the way to walk back up
376 multiple nesting levels. */
378 static tree
379 get_chain_field (struct nesting_info *info)
381 tree field = info->chain_field;
383 if (!field)
385 tree type = build_pointer_type (get_frame_type (info->outer));
387 field = make_node (FIELD_DECL);
388 DECL_NAME (field) = get_identifier ("__chain");
389 TREE_TYPE (field) = type;
390 DECL_ALIGN (field) = TYPE_ALIGN (type);
391 DECL_NONADDRESSABLE_P (field) = 1;
393 insert_field_into_struct (get_frame_type (info), field);
395 info->chain_field = field;
397 if (dump_file
398 && (dump_flags & TDF_DETAILS)
399 && !DECL_STATIC_CHAIN (info->context))
400 fprintf (dump_file, "Setting static-chain for %s\n",
401 lang_hooks.decl_printable_name (info->context, 2));
403 DECL_STATIC_CHAIN (info->context) = 1;
405 return field;
408 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
410 static tree
411 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
412 gimple call)
414 tree t;
416 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
417 gimple_call_set_lhs (call, t);
418 if (! gsi_end_p (*gsi))
419 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
420 gsi_insert_before (gsi, call, GSI_SAME_STMT);
422 return t;
426 /* Copy EXP into a temporary. Allocate the temporary in the context of
427 INFO and insert the initialization statement before GSI. */
429 static tree
430 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
432 tree t;
433 gimple stmt;
435 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
436 stmt = gimple_build_assign (t, exp);
437 if (! gsi_end_p (*gsi))
438 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
439 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
441 return t;
445 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
447 static tree
448 gsi_gimplify_val (struct nesting_info *info, tree exp,
449 gimple_stmt_iterator *gsi)
451 if (is_gimple_val (exp))
452 return exp;
453 else
454 return init_tmp_var (info, exp, gsi);
457 /* Similarly, but copy from the temporary and insert the statement
458 after the iterator. */
460 static tree
461 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
463 tree t;
464 gimple stmt;
466 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
467 stmt = gimple_build_assign (exp, t);
468 if (! gsi_end_p (*gsi))
469 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
470 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
472 return t;
475 /* Build or return the type used to represent a nested function trampoline. */
477 static GTY(()) tree trampoline_type;
479 static tree
480 get_trampoline_type (struct nesting_info *info)
482 unsigned align, size;
483 tree t;
485 if (trampoline_type)
486 return trampoline_type;
488 align = TRAMPOLINE_ALIGNMENT;
489 size = TRAMPOLINE_SIZE;
491 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
492 then allocate extra space so that we can do dynamic alignment. */
493 if (align > STACK_BOUNDARY)
495 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
496 align = STACK_BOUNDARY;
499 t = build_index_type (size_int (size - 1));
500 t = build_array_type (char_type_node, t);
501 t = build_decl (DECL_SOURCE_LOCATION (info->context),
502 FIELD_DECL, get_identifier ("__data"), t);
503 DECL_ALIGN (t) = align;
504 DECL_USER_ALIGN (t) = 1;
506 trampoline_type = make_node (RECORD_TYPE);
507 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
508 TYPE_FIELDS (trampoline_type) = t;
509 layout_type (trampoline_type);
510 DECL_CONTEXT (t) = trampoline_type;
512 return trampoline_type;
515 /* Given DECL, a nested function, find or create a field in the non-local
516 frame structure for a trampoline for this function. */
518 static tree
519 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
520 enum insert_option insert)
522 if (insert == NO_INSERT)
524 tree *slot = info->var_map->get (decl);
525 return slot ? *slot : NULL_TREE;
528 tree *slot = &info->var_map->get_or_insert (decl);
529 if (!*slot)
531 tree field = make_node (FIELD_DECL);
532 DECL_NAME (field) = DECL_NAME (decl);
533 TREE_TYPE (field) = get_trampoline_type (info);
534 TREE_ADDRESSABLE (field) = 1;
536 insert_field_into_struct (get_frame_type (info), field);
537 *slot = field;
539 info->any_tramp_created = true;
542 return *slot;
545 /* Build or return the field within the non-local frame state that holds
546 the non-local goto "jmp_buf". The buffer itself is maintained by the
547 rtl middle-end as dynamic stack space is allocated. */
549 static tree
550 get_nl_goto_field (struct nesting_info *info)
552 tree field = info->nl_goto_field;
553 if (!field)
555 unsigned size;
556 tree type;
558 /* For __builtin_nonlocal_goto, we need N words. The first is the
559 frame pointer, the rest is for the target's stack pointer save
560 area. The number of words is controlled by STACK_SAVEAREA_MODE;
561 not the best interface, but it'll do for now. */
562 if (Pmode == ptr_mode)
563 type = ptr_type_node;
564 else
565 type = lang_hooks.types.type_for_mode (Pmode, 1);
567 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
568 size = size / GET_MODE_SIZE (Pmode);
569 size = size + 1;
571 type = build_array_type
572 (type, build_index_type (size_int (size)));
574 field = make_node (FIELD_DECL);
575 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
576 TREE_TYPE (field) = type;
577 DECL_ALIGN (field) = TYPE_ALIGN (type);
578 TREE_ADDRESSABLE (field) = 1;
580 insert_field_into_struct (get_frame_type (info), field);
582 info->nl_goto_field = field;
585 return field;
588 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
590 static void
591 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
592 struct nesting_info *info, gimple_seq *pseq)
594 struct walk_stmt_info wi;
596 memset (&wi, 0, sizeof (wi));
597 wi.info = info;
598 wi.val_only = true;
599 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
603 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
605 static inline void
606 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
607 struct nesting_info *info)
609 gimple_seq body = gimple_body (info->context);
610 walk_body (callback_stmt, callback_op, info, &body);
611 gimple_set_body (info->context, body);
614 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
616 static void
617 walk_gimple_omp_for (gimple for_stmt,
618 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
619 struct nesting_info *info)
621 struct walk_stmt_info wi;
622 gimple_seq seq;
623 tree t;
624 size_t i;
626 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
628 seq = NULL;
629 memset (&wi, 0, sizeof (wi));
630 wi.info = info;
631 wi.gsi = gsi_last (seq);
633 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
635 wi.val_only = false;
636 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
637 &wi, NULL);
638 wi.val_only = true;
639 wi.is_lhs = false;
640 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
641 &wi, NULL);
643 wi.val_only = true;
644 wi.is_lhs = false;
645 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
646 &wi, NULL);
648 t = gimple_omp_for_incr (for_stmt, i);
649 gcc_assert (BINARY_CLASS_P (t));
650 wi.val_only = false;
651 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
652 wi.val_only = true;
653 wi.is_lhs = false;
654 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
657 seq = gsi_seq (wi.gsi);
658 if (!gimple_seq_empty_p (seq))
660 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
661 annotate_all_with_location (seq, gimple_location (for_stmt));
662 gimple_seq_add_seq (&pre_body, seq);
663 gimple_omp_for_set_pre_body (for_stmt, pre_body);
667 /* Similarly for ROOT and all functions nested underneath, depth first. */
669 static void
670 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
671 struct nesting_info *root)
673 struct nesting_info *n;
674 FOR_EACH_NEST_INFO (n, root)
675 walk_function (callback_stmt, callback_op, n);
679 /* We have to check for a fairly pathological case. The operands of function
680 nested function are to be interpreted in the context of the enclosing
681 function. So if any are variably-sized, they will get remapped when the
682 enclosing function is inlined. But that remapping would also have to be
683 done in the types of the PARM_DECLs of the nested function, meaning the
684 argument types of that function will disagree with the arguments in the
685 calls to that function. So we'd either have to make a copy of the nested
686 function corresponding to each time the enclosing function was inlined or
687 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
688 function. The former is not practical. The latter would still require
689 detecting this case to know when to add the conversions. So, for now at
690 least, we don't inline such an enclosing function.
692 We have to do that check recursively, so here return indicating whether
693 FNDECL has such a nested function. ORIG_FN is the function we were
694 trying to inline to use for checking whether any argument is variably
695 modified by anything in it.
697 It would be better to do this in tree-inline.c so that we could give
698 the appropriate warning for why a function can't be inlined, but that's
699 too late since the nesting structure has already been flattened and
700 adding a flag just to record this fact seems a waste of a flag. */
702 static bool
703 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
705 struct cgraph_node *cgn = cgraph_node::get (fndecl);
706 tree arg;
708 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
710 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
711 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
712 return true;
714 if (check_for_nested_with_variably_modified (cgn->decl,
715 orig_fndecl))
716 return true;
719 return false;
722 /* Construct our local datastructure describing the function nesting
723 tree rooted by CGN. */
725 static struct nesting_info *
726 create_nesting_tree (struct cgraph_node *cgn)
728 struct nesting_info *info = XCNEW (struct nesting_info);
729 info->field_map = new hash_map<tree, tree>;
730 info->var_map = new hash_map<tree, tree>;
731 info->mem_refs = new hash_set<tree *>;
732 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
733 info->context = cgn->decl;
735 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
737 struct nesting_info *sub = create_nesting_tree (cgn);
738 sub->outer = info;
739 sub->next = info->inner;
740 info->inner = sub;
743 /* See discussion at check_for_nested_with_variably_modified for a
744 discussion of why this has to be here. */
745 if (check_for_nested_with_variably_modified (info->context, info->context))
746 DECL_UNINLINABLE (info->context) = true;
748 return info;
751 /* Return an expression computing the static chain for TARGET_CONTEXT
752 from INFO->CONTEXT. Insert any necessary computations before TSI. */
754 static tree
755 get_static_chain (struct nesting_info *info, tree target_context,
756 gimple_stmt_iterator *gsi)
758 struct nesting_info *i;
759 tree x;
761 if (info->context == target_context)
763 x = build_addr (info->frame_decl, target_context);
765 else
767 x = get_chain_decl (info);
769 for (i = info->outer; i->context != target_context; i = i->outer)
771 tree field = get_chain_field (i);
773 x = build_simple_mem_ref (x);
774 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
775 x = init_tmp_var (info, x, gsi);
779 return x;
783 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
784 frame as seen from INFO->CONTEXT. Insert any necessary computations
785 before GSI. */
787 static tree
788 get_frame_field (struct nesting_info *info, tree target_context,
789 tree field, gimple_stmt_iterator *gsi)
791 struct nesting_info *i;
792 tree x;
794 if (info->context == target_context)
796 /* Make sure frame_decl gets created. */
797 (void) get_frame_type (info);
798 x = info->frame_decl;
800 else
802 x = get_chain_decl (info);
804 for (i = info->outer; i->context != target_context; i = i->outer)
806 tree field = get_chain_field (i);
808 x = build_simple_mem_ref (x);
809 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
810 x = init_tmp_var (info, x, gsi);
813 x = build_simple_mem_ref (x);
816 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
817 return x;
820 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
822 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
823 in the nested function with DECL_VALUE_EXPR set to reference the true
824 variable in the parent function. This is used both for debug info
825 and in OpenMP lowering. */
827 static tree
828 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
830 tree target_context;
831 struct nesting_info *i;
832 tree x, field, new_decl;
834 tree *slot = &info->var_map->get_or_insert (decl);
836 if (*slot)
837 return *slot;
839 target_context = decl_function_context (decl);
841 /* A copy of the code in get_frame_field, but without the temporaries. */
842 if (info->context == target_context)
844 /* Make sure frame_decl gets created. */
845 (void) get_frame_type (info);
846 x = info->frame_decl;
847 i = info;
849 else
851 x = get_chain_decl (info);
852 for (i = info->outer; i->context != target_context; i = i->outer)
854 field = get_chain_field (i);
855 x = build_simple_mem_ref (x);
856 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
858 x = build_simple_mem_ref (x);
861 field = lookup_field_for_decl (i, decl, INSERT);
862 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
863 if (use_pointer_in_frame (decl))
864 x = build_simple_mem_ref (x);
866 /* ??? We should be remapping types as well, surely. */
867 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
868 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
869 DECL_CONTEXT (new_decl) = info->context;
870 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
871 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
872 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
873 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
874 TREE_READONLY (new_decl) = TREE_READONLY (decl);
875 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
876 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
877 if ((TREE_CODE (decl) == PARM_DECL
878 || TREE_CODE (decl) == RESULT_DECL
879 || TREE_CODE (decl) == VAR_DECL)
880 && DECL_BY_REFERENCE (decl))
881 DECL_BY_REFERENCE (new_decl) = 1;
883 SET_DECL_VALUE_EXPR (new_decl, x);
884 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
886 *slot = new_decl;
887 DECL_CHAIN (new_decl) = info->debug_var_chain;
888 info->debug_var_chain = new_decl;
890 if (!optimize
891 && info->context != target_context
892 && variably_modified_type_p (TREE_TYPE (decl), NULL))
893 note_nonlocal_vla_type (info, TREE_TYPE (decl));
895 return new_decl;
899 /* Callback for walk_gimple_stmt, rewrite all references to VAR
900 and PARM_DECLs that belong to outer functions.
902 The rewrite will involve some number of structure accesses back up
903 the static chain. E.g. for a variable FOO up one nesting level it'll
904 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
905 indirections apply to decls for which use_pointer_in_frame is true. */
907 static tree
908 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
910 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
911 struct nesting_info *const info = (struct nesting_info *) wi->info;
912 tree t = *tp;
914 *walk_subtrees = 0;
915 switch (TREE_CODE (t))
917 case VAR_DECL:
918 /* Non-automatic variables are never processed. */
919 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
920 break;
921 /* FALLTHRU */
923 case PARM_DECL:
924 if (decl_function_context (t) != info->context)
926 tree x;
927 wi->changed = true;
929 x = get_nonlocal_debug_decl (info, t);
930 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
932 tree target_context = decl_function_context (t);
933 struct nesting_info *i;
934 for (i = info->outer; i->context != target_context; i = i->outer)
935 continue;
936 x = lookup_field_for_decl (i, t, INSERT);
937 x = get_frame_field (info, target_context, x, &wi->gsi);
938 if (use_pointer_in_frame (t))
940 x = init_tmp_var (info, x, &wi->gsi);
941 x = build_simple_mem_ref (x);
945 if (wi->val_only)
947 if (wi->is_lhs)
948 x = save_tmp_var (info, x, &wi->gsi);
949 else
950 x = init_tmp_var (info, x, &wi->gsi);
953 *tp = x;
955 break;
957 case LABEL_DECL:
958 /* We're taking the address of a label from a parent function, but
959 this is not itself a non-local goto. Mark the label such that it
960 will not be deleted, much as we would with a label address in
961 static storage. */
962 if (decl_function_context (t) != info->context)
963 FORCED_LABEL (t) = 1;
964 break;
966 case ADDR_EXPR:
968 bool save_val_only = wi->val_only;
970 wi->val_only = false;
971 wi->is_lhs = false;
972 wi->changed = false;
973 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
974 wi->val_only = true;
976 if (wi->changed)
978 tree save_context;
980 /* If we changed anything, we might no longer be directly
981 referencing a decl. */
982 save_context = current_function_decl;
983 current_function_decl = info->context;
984 recompute_tree_invariant_for_addr_expr (t);
985 current_function_decl = save_context;
987 /* If the callback converted the address argument in a context
988 where we only accept variables (and min_invariant, presumably),
989 then compute the address into a temporary. */
990 if (save_val_only)
991 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
992 t, &wi->gsi);
995 break;
997 case REALPART_EXPR:
998 case IMAGPART_EXPR:
999 case COMPONENT_REF:
1000 case ARRAY_REF:
1001 case ARRAY_RANGE_REF:
1002 case BIT_FIELD_REF:
1003 /* Go down this entire nest and just look at the final prefix and
1004 anything that describes the references. Otherwise, we lose track
1005 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1006 wi->val_only = true;
1007 wi->is_lhs = false;
1008 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1010 if (TREE_CODE (t) == COMPONENT_REF)
1011 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1012 NULL);
1013 else if (TREE_CODE (t) == ARRAY_REF
1014 || TREE_CODE (t) == ARRAY_RANGE_REF)
1016 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1017 wi, NULL);
1018 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1019 wi, NULL);
1020 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1021 wi, NULL);
1024 wi->val_only = false;
1025 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1026 break;
1028 case VIEW_CONVERT_EXPR:
1029 /* Just request to look at the subtrees, leaving val_only and lhs
1030 untouched. This might actually be for !val_only + lhs, in which
1031 case we don't want to force a replacement by a temporary. */
1032 *walk_subtrees = 1;
1033 break;
1035 default:
1036 if (!IS_TYPE_OR_DECL_P (t))
1038 *walk_subtrees = 1;
1039 wi->val_only = true;
1040 wi->is_lhs = false;
1042 break;
1045 return NULL_TREE;
1048 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1049 struct walk_stmt_info *);
1051 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1052 and PARM_DECLs that belong to outer functions. */
1054 static bool
1055 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1057 struct nesting_info *const info = (struct nesting_info *) wi->info;
1058 bool need_chain = false, need_stmts = false;
1059 tree clause, decl;
1060 int dummy;
1061 bitmap new_suppress;
1063 new_suppress = BITMAP_GGC_ALLOC ();
1064 bitmap_copy (new_suppress, info->suppress_expansion);
1066 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1068 switch (OMP_CLAUSE_CODE (clause))
1070 case OMP_CLAUSE_REDUCTION:
1071 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1072 need_stmts = true;
1073 goto do_decl_clause;
1075 case OMP_CLAUSE_LASTPRIVATE:
1076 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1077 need_stmts = true;
1078 goto do_decl_clause;
1080 case OMP_CLAUSE_LINEAR:
1081 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1082 need_stmts = true;
1083 wi->val_only = true;
1084 wi->is_lhs = false;
1085 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1086 &dummy, wi);
1087 goto do_decl_clause;
1089 case OMP_CLAUSE_PRIVATE:
1090 case OMP_CLAUSE_FIRSTPRIVATE:
1091 case OMP_CLAUSE_COPYPRIVATE:
1092 case OMP_CLAUSE_SHARED:
1093 do_decl_clause:
1094 decl = OMP_CLAUSE_DECL (clause);
1095 if (TREE_CODE (decl) == VAR_DECL
1096 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1097 break;
1098 if (decl_function_context (decl) != info->context)
1100 bitmap_set_bit (new_suppress, DECL_UID (decl));
1101 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1102 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1103 need_chain = true;
1105 break;
1107 case OMP_CLAUSE_SCHEDULE:
1108 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1109 break;
1110 /* FALLTHRU */
1111 case OMP_CLAUSE_FINAL:
1112 case OMP_CLAUSE_IF:
1113 case OMP_CLAUSE_NUM_THREADS:
1114 case OMP_CLAUSE_DEPEND:
1115 case OMP_CLAUSE_DEVICE:
1116 case OMP_CLAUSE_NUM_TEAMS:
1117 case OMP_CLAUSE_THREAD_LIMIT:
1118 case OMP_CLAUSE_SAFELEN:
1119 wi->val_only = true;
1120 wi->is_lhs = false;
1121 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1122 &dummy, wi);
1123 break;
1125 case OMP_CLAUSE_DIST_SCHEDULE:
1126 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1128 wi->val_only = true;
1129 wi->is_lhs = false;
1130 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1131 &dummy, wi);
1133 break;
1135 case OMP_CLAUSE_MAP:
1136 case OMP_CLAUSE_TO:
1137 case OMP_CLAUSE_FROM:
1138 if (OMP_CLAUSE_SIZE (clause))
1140 wi->val_only = true;
1141 wi->is_lhs = false;
1142 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1143 &dummy, wi);
1145 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1146 goto do_decl_clause;
1147 wi->val_only = true;
1148 wi->is_lhs = false;
1149 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1150 wi, NULL);
1151 break;
1153 case OMP_CLAUSE_ALIGNED:
1154 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1156 wi->val_only = true;
1157 wi->is_lhs = false;
1158 convert_nonlocal_reference_op
1159 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1161 /* Like do_decl_clause, but don't add any suppression. */
1162 decl = OMP_CLAUSE_DECL (clause);
1163 if (TREE_CODE (decl) == VAR_DECL
1164 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1165 break;
1166 if (decl_function_context (decl) != info->context)
1168 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1169 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1170 need_chain = true;
1172 break;
1174 case OMP_CLAUSE_NOWAIT:
1175 case OMP_CLAUSE_ORDERED:
1176 case OMP_CLAUSE_DEFAULT:
1177 case OMP_CLAUSE_COPYIN:
1178 case OMP_CLAUSE_COLLAPSE:
1179 case OMP_CLAUSE_UNTIED:
1180 case OMP_CLAUSE_MERGEABLE:
1181 case OMP_CLAUSE_PROC_BIND:
1182 break;
1184 default:
1185 gcc_unreachable ();
1189 info->suppress_expansion = new_suppress;
1191 if (need_stmts)
1192 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1193 switch (OMP_CLAUSE_CODE (clause))
1195 case OMP_CLAUSE_REDUCTION:
1196 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1198 tree old_context
1199 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1200 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1201 = info->context;
1202 walk_body (convert_nonlocal_reference_stmt,
1203 convert_nonlocal_reference_op, info,
1204 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1205 walk_body (convert_nonlocal_reference_stmt,
1206 convert_nonlocal_reference_op, info,
1207 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1208 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1209 = old_context;
1211 break;
1213 case OMP_CLAUSE_LASTPRIVATE:
1214 walk_body (convert_nonlocal_reference_stmt,
1215 convert_nonlocal_reference_op, info,
1216 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1217 break;
1219 case OMP_CLAUSE_LINEAR:
1220 walk_body (convert_nonlocal_reference_stmt,
1221 convert_nonlocal_reference_op, info,
1222 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1223 break;
1225 default:
1226 break;
1229 return need_chain;
1232 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1234 static void
1235 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1237 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1238 type = TREE_TYPE (type);
1240 if (TYPE_NAME (type)
1241 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1242 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1243 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1245 while (POINTER_TYPE_P (type)
1246 || TREE_CODE (type) == VECTOR_TYPE
1247 || TREE_CODE (type) == FUNCTION_TYPE
1248 || TREE_CODE (type) == METHOD_TYPE)
1249 type = TREE_TYPE (type);
1251 if (TREE_CODE (type) == ARRAY_TYPE)
1253 tree domain, t;
1255 note_nonlocal_vla_type (info, TREE_TYPE (type));
1256 domain = TYPE_DOMAIN (type);
1257 if (domain)
1259 t = TYPE_MIN_VALUE (domain);
1260 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1261 && decl_function_context (t) != info->context)
1262 get_nonlocal_debug_decl (info, t);
1263 t = TYPE_MAX_VALUE (domain);
1264 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1265 && decl_function_context (t) != info->context)
1266 get_nonlocal_debug_decl (info, t);
1271 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1272 in BLOCK. */
1274 static void
1275 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1277 tree var;
1279 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1280 if (TREE_CODE (var) == VAR_DECL
1281 && variably_modified_type_p (TREE_TYPE (var), NULL)
1282 && DECL_HAS_VALUE_EXPR_P (var)
1283 && decl_function_context (var) != info->context)
1284 note_nonlocal_vla_type (info, TREE_TYPE (var));
1287 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1288 PARM_DECLs that belong to outer functions. This handles statements
1289 that are not handled via the standard recursion done in
1290 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1291 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1292 operands of STMT have been handled by this function. */
1294 static tree
1295 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1296 struct walk_stmt_info *wi)
1298 struct nesting_info *info = (struct nesting_info *) wi->info;
1299 tree save_local_var_chain;
1300 bitmap save_suppress;
1301 gimple stmt = gsi_stmt (*gsi);
1303 switch (gimple_code (stmt))
1305 case GIMPLE_GOTO:
1306 /* Don't walk non-local gotos for now. */
1307 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1309 wi->val_only = true;
1310 wi->is_lhs = false;
1311 *handled_ops_p = true;
1312 return NULL_TREE;
1314 break;
1316 case GIMPLE_OMP_PARALLEL:
1317 case GIMPLE_OMP_TASK:
1318 save_suppress = info->suppress_expansion;
1319 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1320 wi))
1322 tree c, decl;
1323 decl = get_chain_decl (info);
1324 c = build_omp_clause (gimple_location (stmt),
1325 OMP_CLAUSE_FIRSTPRIVATE);
1326 OMP_CLAUSE_DECL (c) = decl;
1327 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1328 gimple_omp_taskreg_set_clauses (stmt, c);
1331 save_local_var_chain = info->new_local_var_chain;
1332 info->new_local_var_chain = NULL;
1334 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1335 info, gimple_omp_body_ptr (stmt));
1337 if (info->new_local_var_chain)
1338 declare_vars (info->new_local_var_chain,
1339 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1340 false);
1341 info->new_local_var_chain = save_local_var_chain;
1342 info->suppress_expansion = save_suppress;
1343 break;
1345 case GIMPLE_OMP_FOR:
1346 save_suppress = info->suppress_expansion;
1347 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1348 walk_gimple_omp_for (stmt, convert_nonlocal_reference_stmt,
1349 convert_nonlocal_reference_op, info);
1350 walk_body (convert_nonlocal_reference_stmt,
1351 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1352 info->suppress_expansion = save_suppress;
1353 break;
1355 case GIMPLE_OMP_SECTIONS:
1356 save_suppress = info->suppress_expansion;
1357 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1358 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1359 info, gimple_omp_body_ptr (stmt));
1360 info->suppress_expansion = save_suppress;
1361 break;
1363 case GIMPLE_OMP_SINGLE:
1364 save_suppress = info->suppress_expansion;
1365 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1366 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1367 info, gimple_omp_body_ptr (stmt));
1368 info->suppress_expansion = save_suppress;
1369 break;
1371 case GIMPLE_OMP_TARGET:
1372 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
1374 save_suppress = info->suppress_expansion;
1375 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1376 wi);
1377 info->suppress_expansion = save_suppress;
1378 walk_body (convert_nonlocal_reference_stmt,
1379 convert_nonlocal_reference_op, info,
1380 gimple_omp_body_ptr (stmt));
1381 break;
1383 save_suppress = info->suppress_expansion;
1384 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1385 wi))
1387 tree c, decl;
1388 decl = get_chain_decl (info);
1389 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1390 OMP_CLAUSE_DECL (c) = decl;
1391 OMP_CLAUSE_MAP_KIND (c) = OMP_CLAUSE_MAP_TO;
1392 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1393 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1394 gimple_omp_target_set_clauses (stmt, c);
1397 save_local_var_chain = info->new_local_var_chain;
1398 info->new_local_var_chain = NULL;
1400 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1401 info, gimple_omp_body_ptr (stmt));
1403 if (info->new_local_var_chain)
1404 declare_vars (info->new_local_var_chain,
1405 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1406 false);
1407 info->new_local_var_chain = save_local_var_chain;
1408 info->suppress_expansion = save_suppress;
1409 break;
1411 case GIMPLE_OMP_TEAMS:
1412 save_suppress = info->suppress_expansion;
1413 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1414 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1415 info, gimple_omp_body_ptr (stmt));
1416 info->suppress_expansion = save_suppress;
1417 break;
1419 case GIMPLE_OMP_SECTION:
1420 case GIMPLE_OMP_MASTER:
1421 case GIMPLE_OMP_TASKGROUP:
1422 case GIMPLE_OMP_ORDERED:
1423 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1424 info, gimple_omp_body_ptr (stmt));
1425 break;
1427 case GIMPLE_BIND:
1428 if (!optimize && gimple_bind_block (stmt))
1429 note_nonlocal_block_vlas (info, gimple_bind_block (stmt));
1431 for (tree var = gimple_bind_vars (stmt); var; var = DECL_CHAIN (var))
1432 if (TREE_CODE (var) == NAMELIST_DECL)
1434 /* Adjust decls mentioned in NAMELIST_DECL. */
1435 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1436 tree decl;
1437 unsigned int i;
1439 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1441 if (TREE_CODE (decl) == VAR_DECL
1442 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1443 continue;
1444 if (decl_function_context (decl) != info->context)
1445 CONSTRUCTOR_ELT (decls, i)->value
1446 = get_nonlocal_debug_decl (info, decl);
1450 *handled_ops_p = false;
1451 return NULL_TREE;
1453 case GIMPLE_COND:
1454 wi->val_only = true;
1455 wi->is_lhs = false;
1456 *handled_ops_p = false;
1457 return NULL_TREE;
1459 default:
1460 /* For every other statement that we are not interested in
1461 handling here, let the walker traverse the operands. */
1462 *handled_ops_p = false;
1463 return NULL_TREE;
1466 /* We have handled all of STMT operands, no need to traverse the operands. */
1467 *handled_ops_p = true;
1468 return NULL_TREE;
1472 /* A subroutine of convert_local_reference. Create a local variable
1473 in the parent function with DECL_VALUE_EXPR set to reference the
1474 field in FRAME. This is used both for debug info and in OpenMP
1475 lowering. */
1477 static tree
1478 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1480 tree x, new_decl;
1482 tree *slot = &info->var_map->get_or_insert (decl);
1483 if (*slot)
1484 return *slot;
1486 /* Make sure frame_decl gets created. */
1487 (void) get_frame_type (info);
1488 x = info->frame_decl;
1489 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1491 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1492 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1493 DECL_CONTEXT (new_decl) = info->context;
1494 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1495 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1496 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1497 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1498 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1499 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1500 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1501 if ((TREE_CODE (decl) == PARM_DECL
1502 || TREE_CODE (decl) == RESULT_DECL
1503 || TREE_CODE (decl) == VAR_DECL)
1504 && DECL_BY_REFERENCE (decl))
1505 DECL_BY_REFERENCE (new_decl) = 1;
1507 SET_DECL_VALUE_EXPR (new_decl, x);
1508 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1509 *slot = new_decl;
1511 DECL_CHAIN (new_decl) = info->debug_var_chain;
1512 info->debug_var_chain = new_decl;
1514 /* Do not emit debug info twice. */
1515 DECL_IGNORED_P (decl) = 1;
1517 return new_decl;
1521 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1522 and PARM_DECLs that were referenced by inner nested functions.
1523 The rewrite will be a structure reference to the local frame variable. */
1525 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1527 static tree
1528 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1530 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1531 struct nesting_info *const info = (struct nesting_info *) wi->info;
1532 tree t = *tp, field, x;
1533 bool save_val_only;
1535 *walk_subtrees = 0;
1536 switch (TREE_CODE (t))
1538 case VAR_DECL:
1539 /* Non-automatic variables are never processed. */
1540 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1541 break;
1542 /* FALLTHRU */
1544 case PARM_DECL:
1545 if (decl_function_context (t) == info->context)
1547 /* If we copied a pointer to the frame, then the original decl
1548 is used unchanged in the parent function. */
1549 if (use_pointer_in_frame (t))
1550 break;
1552 /* No need to transform anything if no child references the
1553 variable. */
1554 field = lookup_field_for_decl (info, t, NO_INSERT);
1555 if (!field)
1556 break;
1557 wi->changed = true;
1559 x = get_local_debug_decl (info, t, field);
1560 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1561 x = get_frame_field (info, info->context, field, &wi->gsi);
1563 if (wi->val_only)
1565 if (wi->is_lhs)
1566 x = save_tmp_var (info, x, &wi->gsi);
1567 else
1568 x = init_tmp_var (info, x, &wi->gsi);
1571 *tp = x;
1573 break;
1575 case ADDR_EXPR:
1576 save_val_only = wi->val_only;
1577 wi->val_only = false;
1578 wi->is_lhs = false;
1579 wi->changed = false;
1580 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1581 wi->val_only = save_val_only;
1583 /* If we converted anything ... */
1584 if (wi->changed)
1586 tree save_context;
1588 /* Then the frame decl is now addressable. */
1589 TREE_ADDRESSABLE (info->frame_decl) = 1;
1591 save_context = current_function_decl;
1592 current_function_decl = info->context;
1593 recompute_tree_invariant_for_addr_expr (t);
1594 current_function_decl = save_context;
1596 /* If we are in a context where we only accept values, then
1597 compute the address into a temporary. */
1598 if (save_val_only)
1599 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1600 t, &wi->gsi);
1602 break;
1604 case REALPART_EXPR:
1605 case IMAGPART_EXPR:
1606 case COMPONENT_REF:
1607 case ARRAY_REF:
1608 case ARRAY_RANGE_REF:
1609 case BIT_FIELD_REF:
1610 /* Go down this entire nest and just look at the final prefix and
1611 anything that describes the references. Otherwise, we lose track
1612 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1613 save_val_only = wi->val_only;
1614 wi->val_only = true;
1615 wi->is_lhs = false;
1616 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1618 if (TREE_CODE (t) == COMPONENT_REF)
1619 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1620 NULL);
1621 else if (TREE_CODE (t) == ARRAY_REF
1622 || TREE_CODE (t) == ARRAY_RANGE_REF)
1624 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1625 NULL);
1626 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1627 NULL);
1628 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1629 NULL);
1632 wi->val_only = false;
1633 walk_tree (tp, convert_local_reference_op, wi, NULL);
1634 wi->val_only = save_val_only;
1635 break;
1637 case MEM_REF:
1638 save_val_only = wi->val_only;
1639 wi->val_only = true;
1640 wi->is_lhs = false;
1641 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1642 wi, NULL);
1643 /* We need to re-fold the MEM_REF as component references as
1644 part of a ADDR_EXPR address are not allowed. But we cannot
1645 fold here, as the chain record type is not yet finalized. */
1646 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1647 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1648 info->mem_refs->add (tp);
1649 wi->val_only = save_val_only;
1650 break;
1652 case VIEW_CONVERT_EXPR:
1653 /* Just request to look at the subtrees, leaving val_only and lhs
1654 untouched. This might actually be for !val_only + lhs, in which
1655 case we don't want to force a replacement by a temporary. */
1656 *walk_subtrees = 1;
1657 break;
1659 default:
1660 if (!IS_TYPE_OR_DECL_P (t))
1662 *walk_subtrees = 1;
1663 wi->val_only = true;
1664 wi->is_lhs = false;
1666 break;
1669 return NULL_TREE;
1672 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1673 struct walk_stmt_info *);
1675 /* Helper for convert_local_reference. Convert all the references in
1676 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1678 static bool
1679 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1681 struct nesting_info *const info = (struct nesting_info *) wi->info;
1682 bool need_frame = false, need_stmts = false;
1683 tree clause, decl;
1684 int dummy;
1685 bitmap new_suppress;
1687 new_suppress = BITMAP_GGC_ALLOC ();
1688 bitmap_copy (new_suppress, info->suppress_expansion);
1690 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1692 switch (OMP_CLAUSE_CODE (clause))
1694 case OMP_CLAUSE_REDUCTION:
1695 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1696 need_stmts = true;
1697 goto do_decl_clause;
1699 case OMP_CLAUSE_LASTPRIVATE:
1700 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1701 need_stmts = true;
1702 goto do_decl_clause;
1704 case OMP_CLAUSE_LINEAR:
1705 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1706 need_stmts = true;
1707 wi->val_only = true;
1708 wi->is_lhs = false;
1709 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1710 wi);
1711 goto do_decl_clause;
1713 case OMP_CLAUSE_PRIVATE:
1714 case OMP_CLAUSE_FIRSTPRIVATE:
1715 case OMP_CLAUSE_COPYPRIVATE:
1716 case OMP_CLAUSE_SHARED:
1717 do_decl_clause:
1718 decl = OMP_CLAUSE_DECL (clause);
1719 if (TREE_CODE (decl) == VAR_DECL
1720 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1721 break;
1722 if (decl_function_context (decl) == info->context
1723 && !use_pointer_in_frame (decl))
1725 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1726 if (field)
1728 bitmap_set_bit (new_suppress, DECL_UID (decl));
1729 OMP_CLAUSE_DECL (clause)
1730 = get_local_debug_decl (info, decl, field);
1731 need_frame = true;
1734 break;
1736 case OMP_CLAUSE_SCHEDULE:
1737 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1738 break;
1739 /* FALLTHRU */
1740 case OMP_CLAUSE_FINAL:
1741 case OMP_CLAUSE_IF:
1742 case OMP_CLAUSE_NUM_THREADS:
1743 case OMP_CLAUSE_DEPEND:
1744 case OMP_CLAUSE_DEVICE:
1745 case OMP_CLAUSE_NUM_TEAMS:
1746 case OMP_CLAUSE_THREAD_LIMIT:
1747 case OMP_CLAUSE_SAFELEN:
1748 wi->val_only = true;
1749 wi->is_lhs = false;
1750 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1751 wi);
1752 break;
1754 case OMP_CLAUSE_DIST_SCHEDULE:
1755 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1757 wi->val_only = true;
1758 wi->is_lhs = false;
1759 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1760 &dummy, wi);
1762 break;
1764 case OMP_CLAUSE_MAP:
1765 case OMP_CLAUSE_TO:
1766 case OMP_CLAUSE_FROM:
1767 if (OMP_CLAUSE_SIZE (clause))
1769 wi->val_only = true;
1770 wi->is_lhs = false;
1771 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1772 &dummy, wi);
1774 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1775 goto do_decl_clause;
1776 wi->val_only = true;
1777 wi->is_lhs = false;
1778 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1779 wi, NULL);
1780 break;
1782 case OMP_CLAUSE_ALIGNED:
1783 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1785 wi->val_only = true;
1786 wi->is_lhs = false;
1787 convert_local_reference_op
1788 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1790 /* Like do_decl_clause, but don't add any suppression. */
1791 decl = OMP_CLAUSE_DECL (clause);
1792 if (TREE_CODE (decl) == VAR_DECL
1793 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1794 break;
1795 if (decl_function_context (decl) == info->context
1796 && !use_pointer_in_frame (decl))
1798 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1799 if (field)
1801 OMP_CLAUSE_DECL (clause)
1802 = get_local_debug_decl (info, decl, field);
1803 need_frame = true;
1806 break;
1808 case OMP_CLAUSE_NOWAIT:
1809 case OMP_CLAUSE_ORDERED:
1810 case OMP_CLAUSE_DEFAULT:
1811 case OMP_CLAUSE_COPYIN:
1812 case OMP_CLAUSE_COLLAPSE:
1813 case OMP_CLAUSE_UNTIED:
1814 case OMP_CLAUSE_MERGEABLE:
1815 case OMP_CLAUSE_PROC_BIND:
1816 break;
1818 default:
1819 gcc_unreachable ();
1823 info->suppress_expansion = new_suppress;
1825 if (need_stmts)
1826 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1827 switch (OMP_CLAUSE_CODE (clause))
1829 case OMP_CLAUSE_REDUCTION:
1830 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1832 tree old_context
1833 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1834 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1835 = info->context;
1836 walk_body (convert_local_reference_stmt,
1837 convert_local_reference_op, info,
1838 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1839 walk_body (convert_local_reference_stmt,
1840 convert_local_reference_op, info,
1841 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1842 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1843 = old_context;
1845 break;
1847 case OMP_CLAUSE_LASTPRIVATE:
1848 walk_body (convert_local_reference_stmt,
1849 convert_local_reference_op, info,
1850 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1851 break;
1853 case OMP_CLAUSE_LINEAR:
1854 walk_body (convert_local_reference_stmt,
1855 convert_local_reference_op, info,
1856 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1857 break;
1859 default:
1860 break;
1863 return need_frame;
1867 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1868 and PARM_DECLs that were referenced by inner nested functions.
1869 The rewrite will be a structure reference to the local frame variable. */
1871 static tree
1872 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1873 struct walk_stmt_info *wi)
1875 struct nesting_info *info = (struct nesting_info *) wi->info;
1876 tree save_local_var_chain;
1877 bitmap save_suppress;
1878 gimple stmt = gsi_stmt (*gsi);
1880 switch (gimple_code (stmt))
1882 case GIMPLE_OMP_PARALLEL:
1883 case GIMPLE_OMP_TASK:
1884 save_suppress = info->suppress_expansion;
1885 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1886 wi))
1888 tree c;
1889 (void) get_frame_type (info);
1890 c = build_omp_clause (gimple_location (stmt),
1891 OMP_CLAUSE_SHARED);
1892 OMP_CLAUSE_DECL (c) = info->frame_decl;
1893 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1894 gimple_omp_taskreg_set_clauses (stmt, c);
1897 save_local_var_chain = info->new_local_var_chain;
1898 info->new_local_var_chain = NULL;
1900 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1901 gimple_omp_body_ptr (stmt));
1903 if (info->new_local_var_chain)
1904 declare_vars (info->new_local_var_chain,
1905 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1906 info->new_local_var_chain = save_local_var_chain;
1907 info->suppress_expansion = save_suppress;
1908 break;
1910 case GIMPLE_OMP_FOR:
1911 save_suppress = info->suppress_expansion;
1912 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1913 walk_gimple_omp_for (stmt, convert_local_reference_stmt,
1914 convert_local_reference_op, info);
1915 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1916 info, gimple_omp_body_ptr (stmt));
1917 info->suppress_expansion = save_suppress;
1918 break;
1920 case GIMPLE_OMP_SECTIONS:
1921 save_suppress = info->suppress_expansion;
1922 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1923 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1924 info, gimple_omp_body_ptr (stmt));
1925 info->suppress_expansion = save_suppress;
1926 break;
1928 case GIMPLE_OMP_SINGLE:
1929 save_suppress = info->suppress_expansion;
1930 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1931 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1932 info, gimple_omp_body_ptr (stmt));
1933 info->suppress_expansion = save_suppress;
1934 break;
1936 case GIMPLE_OMP_TARGET:
1937 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
1939 save_suppress = info->suppress_expansion;
1940 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1941 info->suppress_expansion = save_suppress;
1942 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1943 info, gimple_omp_body_ptr (stmt));
1944 break;
1946 save_suppress = info->suppress_expansion;
1947 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
1949 tree c;
1950 (void) get_frame_type (info);
1951 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1952 OMP_CLAUSE_DECL (c) = info->frame_decl;
1953 OMP_CLAUSE_MAP_KIND (c) = OMP_CLAUSE_MAP_TOFROM;
1954 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
1955 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1956 gimple_omp_target_set_clauses (stmt, c);
1959 save_local_var_chain = info->new_local_var_chain;
1960 info->new_local_var_chain = NULL;
1962 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1963 gimple_omp_body_ptr (stmt));
1965 if (info->new_local_var_chain)
1966 declare_vars (info->new_local_var_chain,
1967 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1968 info->new_local_var_chain = save_local_var_chain;
1969 info->suppress_expansion = save_suppress;
1970 break;
1972 case GIMPLE_OMP_TEAMS:
1973 save_suppress = info->suppress_expansion;
1974 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1975 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1976 info, gimple_omp_body_ptr (stmt));
1977 info->suppress_expansion = save_suppress;
1978 break;
1980 case GIMPLE_OMP_SECTION:
1981 case GIMPLE_OMP_MASTER:
1982 case GIMPLE_OMP_TASKGROUP:
1983 case GIMPLE_OMP_ORDERED:
1984 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1985 info, gimple_omp_body_ptr (stmt));
1986 break;
1988 case GIMPLE_COND:
1989 wi->val_only = true;
1990 wi->is_lhs = false;
1991 *handled_ops_p = false;
1992 return NULL_TREE;
1994 case GIMPLE_ASSIGN:
1995 if (gimple_clobber_p (stmt))
1997 tree lhs = gimple_assign_lhs (stmt);
1998 if (!use_pointer_in_frame (lhs)
1999 && lookup_field_for_decl (info, lhs, NO_INSERT))
2001 gsi_replace (gsi, gimple_build_nop (), true);
2002 break;
2005 *handled_ops_p = false;
2006 return NULL_TREE;
2008 case GIMPLE_BIND:
2009 for (tree var = gimple_bind_vars (stmt); var; var = DECL_CHAIN (var))
2010 if (TREE_CODE (var) == NAMELIST_DECL)
2012 /* Adjust decls mentioned in NAMELIST_DECL. */
2013 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2014 tree decl;
2015 unsigned int i;
2017 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2019 if (TREE_CODE (decl) == VAR_DECL
2020 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2021 continue;
2022 if (decl_function_context (decl) == info->context
2023 && !use_pointer_in_frame (decl))
2025 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2026 if (field)
2028 CONSTRUCTOR_ELT (decls, i)->value
2029 = get_local_debug_decl (info, decl, field);
2035 *handled_ops_p = false;
2036 return NULL_TREE;
2038 default:
2039 /* For every other statement that we are not interested in
2040 handling here, let the walker traverse the operands. */
2041 *handled_ops_p = false;
2042 return NULL_TREE;
2045 /* Indicate that we have handled all the operands ourselves. */
2046 *handled_ops_p = true;
2047 return NULL_TREE;
2051 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2052 that reference labels from outer functions. The rewrite will be a
2053 call to __builtin_nonlocal_goto. */
2055 static tree
2056 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2057 struct walk_stmt_info *wi)
2059 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2060 tree label, new_label, target_context, x, field;
2061 gimple call;
2062 gimple stmt = gsi_stmt (*gsi);
2064 if (gimple_code (stmt) != GIMPLE_GOTO)
2066 *handled_ops_p = false;
2067 return NULL_TREE;
2070 label = gimple_goto_dest (stmt);
2071 if (TREE_CODE (label) != LABEL_DECL)
2073 *handled_ops_p = false;
2074 return NULL_TREE;
2077 target_context = decl_function_context (label);
2078 if (target_context == info->context)
2080 *handled_ops_p = false;
2081 return NULL_TREE;
2084 for (i = info->outer; target_context != i->context; i = i->outer)
2085 continue;
2087 /* The original user label may also be use for a normal goto, therefore
2088 we must create a new label that will actually receive the abnormal
2089 control transfer. This new label will be marked LABEL_NONLOCAL; this
2090 mark will trigger proper behavior in the cfg, as well as cause the
2091 (hairy target-specific) non-local goto receiver code to be generated
2092 when we expand rtl. Enter this association into var_map so that we
2093 can insert the new label into the IL during a second pass. */
2094 tree *slot = &i->var_map->get_or_insert (label);
2095 if (*slot == NULL)
2097 new_label = create_artificial_label (UNKNOWN_LOCATION);
2098 DECL_NONLOCAL (new_label) = 1;
2099 *slot = new_label;
2101 else
2102 new_label = *slot;
2104 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2105 field = get_nl_goto_field (i);
2106 x = get_frame_field (info, target_context, field, gsi);
2107 x = build_addr (x, target_context);
2108 x = gsi_gimplify_val (info, x, gsi);
2109 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2110 2, build_addr (new_label, target_context), x);
2111 gsi_replace (gsi, call, false);
2113 /* We have handled all of STMT's operands, no need to keep going. */
2114 *handled_ops_p = true;
2115 return NULL_TREE;
2119 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2120 are referenced via nonlocal goto from a nested function. The rewrite
2121 will involve installing a newly generated DECL_NONLOCAL label, and
2122 (potentially) a branch around the rtl gunk that is assumed to be
2123 attached to such a label. */
2125 static tree
2126 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2127 struct walk_stmt_info *wi)
2129 struct nesting_info *const info = (struct nesting_info *) wi->info;
2130 tree label, new_label;
2131 gimple_stmt_iterator tmp_gsi;
2132 gimple stmt = gsi_stmt (*gsi);
2134 if (gimple_code (stmt) != GIMPLE_LABEL)
2136 *handled_ops_p = false;
2137 return NULL_TREE;
2140 label = gimple_label_label (stmt);
2142 tree *slot = info->var_map->get (label);
2143 if (!slot)
2145 *handled_ops_p = false;
2146 return NULL_TREE;
2149 /* If there's any possibility that the previous statement falls through,
2150 then we must branch around the new non-local label. */
2151 tmp_gsi = wi->gsi;
2152 gsi_prev (&tmp_gsi);
2153 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2155 gimple stmt = gimple_build_goto (label);
2156 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2159 new_label = (tree) *slot;
2160 stmt = gimple_build_label (new_label);
2161 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2163 *handled_ops_p = true;
2164 return NULL_TREE;
2168 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2169 of nested functions that require the use of trampolines. The rewrite
2170 will involve a reference a trampoline generated for the occasion. */
2172 static tree
2173 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2175 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2176 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2177 tree t = *tp, decl, target_context, x, builtin;
2178 gimple call;
2180 *walk_subtrees = 0;
2181 switch (TREE_CODE (t))
2183 case ADDR_EXPR:
2184 /* Build
2185 T.1 = &CHAIN->tramp;
2186 T.2 = __builtin_adjust_trampoline (T.1);
2187 T.3 = (func_type)T.2;
2190 decl = TREE_OPERAND (t, 0);
2191 if (TREE_CODE (decl) != FUNCTION_DECL)
2192 break;
2194 /* Only need to process nested functions. */
2195 target_context = decl_function_context (decl);
2196 if (!target_context)
2197 break;
2199 /* If the nested function doesn't use a static chain, then
2200 it doesn't need a trampoline. */
2201 if (!DECL_STATIC_CHAIN (decl))
2202 break;
2204 /* If we don't want a trampoline, then don't build one. */
2205 if (TREE_NO_TRAMPOLINE (t))
2206 break;
2208 /* Lookup the immediate parent of the callee, as that's where
2209 we need to insert the trampoline. */
2210 for (i = info; i->context != target_context; i = i->outer)
2211 continue;
2212 x = lookup_tramp_for_decl (i, decl, INSERT);
2214 /* Compute the address of the field holding the trampoline. */
2215 x = get_frame_field (info, target_context, x, &wi->gsi);
2216 x = build_addr (x, target_context);
2217 x = gsi_gimplify_val (info, x, &wi->gsi);
2219 /* Do machine-specific ugliness. Normally this will involve
2220 computing extra alignment, but it can really be anything. */
2221 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2222 call = gimple_build_call (builtin, 1, x);
2223 x = init_tmp_var_with_call (info, &wi->gsi, call);
2225 /* Cast back to the proper function type. */
2226 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2227 x = init_tmp_var (info, x, &wi->gsi);
2229 *tp = x;
2230 break;
2232 default:
2233 if (!IS_TYPE_OR_DECL_P (t))
2234 *walk_subtrees = 1;
2235 break;
2238 return NULL_TREE;
2242 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2243 to addresses of nested functions that require the use of
2244 trampolines. The rewrite will involve a reference a trampoline
2245 generated for the occasion. */
2247 static tree
2248 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2249 struct walk_stmt_info *wi)
2251 struct nesting_info *info = (struct nesting_info *) wi->info;
2252 gimple stmt = gsi_stmt (*gsi);
2254 switch (gimple_code (stmt))
2256 case GIMPLE_CALL:
2258 /* Only walk call arguments, lest we generate trampolines for
2259 direct calls. */
2260 unsigned long i, nargs = gimple_call_num_args (stmt);
2261 for (i = 0; i < nargs; i++)
2262 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2263 wi, NULL);
2264 break;
2267 case GIMPLE_OMP_TARGET:
2268 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
2270 *handled_ops_p = false;
2271 return NULL_TREE;
2273 /* FALLTHRU */
2274 case GIMPLE_OMP_PARALLEL:
2275 case GIMPLE_OMP_TASK:
2277 tree save_local_var_chain;
2278 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2279 save_local_var_chain = info->new_local_var_chain;
2280 info->new_local_var_chain = NULL;
2281 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2282 info, gimple_omp_body_ptr (stmt));
2283 if (info->new_local_var_chain)
2284 declare_vars (info->new_local_var_chain,
2285 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2286 false);
2287 info->new_local_var_chain = save_local_var_chain;
2289 break;
2291 default:
2292 *handled_ops_p = false;
2293 return NULL_TREE;
2296 *handled_ops_p = true;
2297 return NULL_TREE;
2302 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2303 that reference nested functions to make sure that the static chain
2304 is set up properly for the call. */
2306 static tree
2307 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2308 struct walk_stmt_info *wi)
2310 struct nesting_info *const info = (struct nesting_info *) wi->info;
2311 tree decl, target_context;
2312 char save_static_chain_added;
2313 int i;
2314 gimple stmt = gsi_stmt (*gsi);
2316 switch (gimple_code (stmt))
2318 case GIMPLE_CALL:
2319 if (gimple_call_chain (stmt))
2320 break;
2321 decl = gimple_call_fndecl (stmt);
2322 if (!decl)
2323 break;
2324 target_context = decl_function_context (decl);
2325 if (target_context && DECL_STATIC_CHAIN (decl))
2327 gimple_call_set_chain (stmt, get_static_chain (info, target_context,
2328 &wi->gsi));
2329 info->static_chain_added |= (1 << (info->context != target_context));
2331 break;
2333 case GIMPLE_OMP_PARALLEL:
2334 case GIMPLE_OMP_TASK:
2335 save_static_chain_added = info->static_chain_added;
2336 info->static_chain_added = 0;
2337 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2338 for (i = 0; i < 2; i++)
2340 tree c, decl;
2341 if ((info->static_chain_added & (1 << i)) == 0)
2342 continue;
2343 decl = i ? get_chain_decl (info) : info->frame_decl;
2344 /* Don't add CHAIN.* or FRAME.* twice. */
2345 for (c = gimple_omp_taskreg_clauses (stmt);
2347 c = OMP_CLAUSE_CHAIN (c))
2348 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2349 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2350 && OMP_CLAUSE_DECL (c) == decl)
2351 break;
2352 if (c == NULL)
2354 c = build_omp_clause (gimple_location (stmt),
2355 i ? OMP_CLAUSE_FIRSTPRIVATE
2356 : OMP_CLAUSE_SHARED);
2357 OMP_CLAUSE_DECL (c) = decl;
2358 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2359 gimple_omp_taskreg_set_clauses (stmt, c);
2362 info->static_chain_added |= save_static_chain_added;
2363 break;
2365 case GIMPLE_OMP_TARGET:
2366 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
2368 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2369 break;
2371 save_static_chain_added = info->static_chain_added;
2372 info->static_chain_added = 0;
2373 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2374 for (i = 0; i < 2; i++)
2376 tree c, decl;
2377 if ((info->static_chain_added & (1 << i)) == 0)
2378 continue;
2379 decl = i ? get_chain_decl (info) : info->frame_decl;
2380 /* Don't add CHAIN.* or FRAME.* twice. */
2381 for (c = gimple_omp_target_clauses (stmt);
2383 c = OMP_CLAUSE_CHAIN (c))
2384 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2385 && OMP_CLAUSE_DECL (c) == decl)
2386 break;
2387 if (c == NULL)
2389 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2390 OMP_CLAUSE_DECL (c) = decl;
2391 OMP_CLAUSE_MAP_KIND (c)
2392 = i ? OMP_CLAUSE_MAP_TO : OMP_CLAUSE_MAP_TOFROM;
2393 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2394 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2395 gimple_omp_target_set_clauses (stmt, c);
2398 info->static_chain_added |= save_static_chain_added;
2399 break;
2401 case GIMPLE_OMP_FOR:
2402 walk_body (convert_gimple_call, NULL, info,
2403 gimple_omp_for_pre_body_ptr (stmt));
2404 /* FALLTHRU */
2405 case GIMPLE_OMP_SECTIONS:
2406 case GIMPLE_OMP_SECTION:
2407 case GIMPLE_OMP_SINGLE:
2408 case GIMPLE_OMP_TEAMS:
2409 case GIMPLE_OMP_MASTER:
2410 case GIMPLE_OMP_TASKGROUP:
2411 case GIMPLE_OMP_ORDERED:
2412 case GIMPLE_OMP_CRITICAL:
2413 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2414 break;
2416 default:
2417 /* Keep looking for other operands. */
2418 *handled_ops_p = false;
2419 return NULL_TREE;
2422 *handled_ops_p = true;
2423 return NULL_TREE;
2426 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2427 call expressions. At the same time, determine if a nested function
2428 actually uses its static chain; if not, remember that. */
2430 static void
2431 convert_all_function_calls (struct nesting_info *root)
2433 unsigned int chain_count = 0, old_chain_count, iter_count;
2434 struct nesting_info *n;
2436 /* First, optimistically clear static_chain for all decls that haven't
2437 used the static chain already for variable access. But always create
2438 it if not optimizing. This makes it possible to reconstruct the static
2439 nesting tree at run time and thus to resolve up-level references from
2440 within the debugger. */
2441 FOR_EACH_NEST_INFO (n, root)
2443 tree decl = n->context;
2444 if (!optimize)
2446 if (n->inner)
2447 (void) get_frame_type (n);
2448 if (n->outer)
2449 (void) get_chain_decl (n);
2451 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2453 DECL_STATIC_CHAIN (decl) = 0;
2454 if (dump_file && (dump_flags & TDF_DETAILS))
2455 fprintf (dump_file, "Guessing no static-chain for %s\n",
2456 lang_hooks.decl_printable_name (decl, 2));
2458 else
2459 DECL_STATIC_CHAIN (decl) = 1;
2460 chain_count += DECL_STATIC_CHAIN (decl);
2463 /* Walk the functions and perform transformations. Note that these
2464 transformations can induce new uses of the static chain, which in turn
2465 require re-examining all users of the decl. */
2466 /* ??? It would make sense to try to use the call graph to speed this up,
2467 but the call graph hasn't really been built yet. Even if it did, we
2468 would still need to iterate in this loop since address-of references
2469 wouldn't show up in the callgraph anyway. */
2470 iter_count = 0;
2473 old_chain_count = chain_count;
2474 chain_count = 0;
2475 iter_count++;
2477 if (dump_file && (dump_flags & TDF_DETAILS))
2478 fputc ('\n', dump_file);
2480 FOR_EACH_NEST_INFO (n, root)
2482 tree decl = n->context;
2483 walk_function (convert_tramp_reference_stmt,
2484 convert_tramp_reference_op, n);
2485 walk_function (convert_gimple_call, NULL, n);
2486 chain_count += DECL_STATIC_CHAIN (decl);
2489 while (chain_count != old_chain_count);
2491 if (dump_file && (dump_flags & TDF_DETAILS))
2492 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2493 iter_count);
2496 struct nesting_copy_body_data
2498 copy_body_data cb;
2499 struct nesting_info *root;
2502 /* A helper subroutine for debug_var_chain type remapping. */
2504 static tree
2505 nesting_copy_decl (tree decl, copy_body_data *id)
2507 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2508 tree *slot = nid->root->var_map->get (decl);
2510 if (slot)
2511 return (tree) *slot;
2513 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2515 tree new_decl = copy_decl_no_change (decl, id);
2516 DECL_ORIGINAL_TYPE (new_decl)
2517 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2518 return new_decl;
2521 if (TREE_CODE (decl) == VAR_DECL
2522 || TREE_CODE (decl) == PARM_DECL
2523 || TREE_CODE (decl) == RESULT_DECL)
2524 return decl;
2526 return copy_decl_no_change (decl, id);
2529 /* A helper function for remap_vla_decls. See if *TP contains
2530 some remapped variables. */
2532 static tree
2533 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2535 struct nesting_info *root = (struct nesting_info *) data;
2536 tree t = *tp;
2538 if (DECL_P (t))
2540 *walk_subtrees = 0;
2541 tree *slot = root->var_map->get (t);
2543 if (slot)
2544 return *slot;
2546 return NULL;
2549 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2550 involved. */
2552 static void
2553 remap_vla_decls (tree block, struct nesting_info *root)
2555 tree var, subblock, val, type;
2556 struct nesting_copy_body_data id;
2558 for (subblock = BLOCK_SUBBLOCKS (block);
2559 subblock;
2560 subblock = BLOCK_CHAIN (subblock))
2561 remap_vla_decls (subblock, root);
2563 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2564 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2566 val = DECL_VALUE_EXPR (var);
2567 type = TREE_TYPE (var);
2569 if (!(TREE_CODE (val) == INDIRECT_REF
2570 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2571 && variably_modified_type_p (type, NULL)))
2572 continue;
2574 if (root->var_map->get (TREE_OPERAND (val, 0))
2575 || walk_tree (&type, contains_remapped_vars, root, NULL))
2576 break;
2579 if (var == NULL_TREE)
2580 return;
2582 memset (&id, 0, sizeof (id));
2583 id.cb.copy_decl = nesting_copy_decl;
2584 id.cb.decl_map = new hash_map<tree, tree>;
2585 id.root = root;
2587 for (; var; var = DECL_CHAIN (var))
2588 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2590 struct nesting_info *i;
2591 tree newt, context;
2593 val = DECL_VALUE_EXPR (var);
2594 type = TREE_TYPE (var);
2596 if (!(TREE_CODE (val) == INDIRECT_REF
2597 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2598 && variably_modified_type_p (type, NULL)))
2599 continue;
2601 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2602 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2603 continue;
2605 context = decl_function_context (var);
2606 for (i = root; i; i = i->outer)
2607 if (i->context == context)
2608 break;
2610 if (i == NULL)
2611 continue;
2613 /* Fully expand value expressions. This avoids having debug variables
2614 only referenced from them and that can be swept during GC. */
2615 if (slot)
2617 tree t = (tree) *slot;
2618 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2619 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2622 id.cb.src_fn = i->context;
2623 id.cb.dst_fn = i->context;
2624 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2626 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2627 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2629 newt = TREE_TYPE (newt);
2630 type = TREE_TYPE (type);
2632 if (TYPE_NAME (newt)
2633 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2634 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2635 && newt != type
2636 && TYPE_NAME (newt) == TYPE_NAME (type))
2637 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2639 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2640 if (val != DECL_VALUE_EXPR (var))
2641 SET_DECL_VALUE_EXPR (var, val);
2644 delete id.cb.decl_map;
2647 /* Fold the MEM_REF *E. */
2648 bool
2649 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2651 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2652 *ref_p = fold (*ref_p);
2653 return true;
2656 /* Do "everything else" to clean up or complete state collected by the
2657 various walking passes -- lay out the types and decls, generate code
2658 to initialize the frame decl, store critical expressions in the
2659 struct function for rtl to find. */
2661 static void
2662 finalize_nesting_tree_1 (struct nesting_info *root)
2664 gimple_seq stmt_list;
2665 gimple stmt;
2666 tree context = root->context;
2667 struct function *sf;
2669 stmt_list = NULL;
2671 /* If we created a non-local frame type or decl, we need to lay them
2672 out at this time. */
2673 if (root->frame_type)
2675 /* In some cases the frame type will trigger the -Wpadded warning.
2676 This is not helpful; suppress it. */
2677 int save_warn_padded = warn_padded;
2678 tree *adjust;
2680 warn_padded = 0;
2681 layout_type (root->frame_type);
2682 warn_padded = save_warn_padded;
2683 layout_decl (root->frame_decl, 0);
2685 /* Remove root->frame_decl from root->new_local_var_chain, so
2686 that we can declare it also in the lexical blocks, which
2687 helps ensure virtual regs that end up appearing in its RTL
2688 expression get substituted in instantiate_virtual_regs(). */
2689 for (adjust = &root->new_local_var_chain;
2690 *adjust != root->frame_decl;
2691 adjust = &DECL_CHAIN (*adjust))
2692 gcc_assert (DECL_CHAIN (*adjust));
2693 *adjust = DECL_CHAIN (*adjust);
2695 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2696 declare_vars (root->frame_decl,
2697 gimple_seq_first_stmt (gimple_body (context)), true);
2700 /* If any parameters were referenced non-locally, then we need to
2701 insert a copy. Likewise, if any variables were referenced by
2702 pointer, we need to initialize the address. */
2703 if (root->any_parm_remapped)
2705 tree p;
2706 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2708 tree field, x, y;
2710 field = lookup_field_for_decl (root, p, NO_INSERT);
2711 if (!field)
2712 continue;
2714 if (use_pointer_in_frame (p))
2715 x = build_addr (p, context);
2716 else
2717 x = p;
2719 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2720 root->frame_decl, field, NULL_TREE);
2721 stmt = gimple_build_assign (y, x);
2722 gimple_seq_add_stmt (&stmt_list, stmt);
2723 /* If the assignment is from a non-register the stmt is
2724 not valid gimple. Make it so by using a temporary instead. */
2725 if (!is_gimple_reg (x)
2726 && is_gimple_reg_type (TREE_TYPE (x)))
2728 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2729 x = init_tmp_var (root, x, &gsi);
2730 gimple_assign_set_rhs1 (stmt, x);
2735 /* If a chain_field was created, then it needs to be initialized
2736 from chain_decl. */
2737 if (root->chain_field)
2739 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2740 root->frame_decl, root->chain_field, NULL_TREE);
2741 stmt = gimple_build_assign (x, get_chain_decl (root));
2742 gimple_seq_add_stmt (&stmt_list, stmt);
2745 /* If trampolines were created, then we need to initialize them. */
2746 if (root->any_tramp_created)
2748 struct nesting_info *i;
2749 for (i = root->inner; i ; i = i->next)
2751 tree arg1, arg2, arg3, x, field;
2753 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2754 if (!field)
2755 continue;
2757 gcc_assert (DECL_STATIC_CHAIN (i->context));
2758 arg3 = build_addr (root->frame_decl, context);
2760 arg2 = build_addr (i->context, context);
2762 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2763 root->frame_decl, field, NULL_TREE);
2764 arg1 = build_addr (x, context);
2766 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2767 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2768 gimple_seq_add_stmt (&stmt_list, stmt);
2772 /* If we created initialization statements, insert them. */
2773 if (stmt_list)
2775 gimple bind;
2776 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2777 bind = gimple_seq_first_stmt (gimple_body (context));
2778 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2779 gimple_bind_set_body (bind, stmt_list);
2782 /* If a chain_decl was created, then it needs to be registered with
2783 struct function so that it gets initialized from the static chain
2784 register at the beginning of the function. */
2785 sf = DECL_STRUCT_FUNCTION (root->context);
2786 sf->static_chain_decl = root->chain_decl;
2788 /* Similarly for the non-local goto save area. */
2789 if (root->nl_goto_field)
2791 sf->nonlocal_goto_save_area
2792 = get_frame_field (root, context, root->nl_goto_field, NULL);
2793 sf->has_nonlocal_label = 1;
2796 /* Make sure all new local variables get inserted into the
2797 proper BIND_EXPR. */
2798 if (root->new_local_var_chain)
2799 declare_vars (root->new_local_var_chain,
2800 gimple_seq_first_stmt (gimple_body (root->context)),
2801 false);
2803 if (root->debug_var_chain)
2805 tree debug_var;
2806 gimple scope;
2808 remap_vla_decls (DECL_INITIAL (root->context), root);
2810 for (debug_var = root->debug_var_chain; debug_var;
2811 debug_var = DECL_CHAIN (debug_var))
2812 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2813 break;
2815 /* If there are any debug decls with variable length types,
2816 remap those types using other debug_var_chain variables. */
2817 if (debug_var)
2819 struct nesting_copy_body_data id;
2821 memset (&id, 0, sizeof (id));
2822 id.cb.copy_decl = nesting_copy_decl;
2823 id.cb.decl_map = new hash_map<tree, tree>;
2824 id.root = root;
2826 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2827 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2829 tree type = TREE_TYPE (debug_var);
2830 tree newt, t = type;
2831 struct nesting_info *i;
2833 for (i = root; i; i = i->outer)
2834 if (variably_modified_type_p (type, i->context))
2835 break;
2837 if (i == NULL)
2838 continue;
2840 id.cb.src_fn = i->context;
2841 id.cb.dst_fn = i->context;
2842 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2844 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2845 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2847 newt = TREE_TYPE (newt);
2848 t = TREE_TYPE (t);
2850 if (TYPE_NAME (newt)
2851 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2852 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2853 && newt != t
2854 && TYPE_NAME (newt) == TYPE_NAME (t))
2855 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2858 delete id.cb.decl_map;
2861 scope = gimple_seq_first_stmt (gimple_body (root->context));
2862 if (gimple_bind_block (scope))
2863 declare_vars (root->debug_var_chain, scope, true);
2864 else
2865 BLOCK_VARS (DECL_INITIAL (root->context))
2866 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2867 root->debug_var_chain);
2870 /* Fold the rewritten MEM_REF trees. */
2871 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
2873 /* Dump the translated tree function. */
2874 if (dump_file)
2876 fputs ("\n\n", dump_file);
2877 dump_function_to_file (root->context, dump_file, dump_flags);
2881 static void
2882 finalize_nesting_tree (struct nesting_info *root)
2884 struct nesting_info *n;
2885 FOR_EACH_NEST_INFO (n, root)
2886 finalize_nesting_tree_1 (n);
2889 /* Unnest the nodes and pass them to cgraph. */
2891 static void
2892 unnest_nesting_tree_1 (struct nesting_info *root)
2894 struct cgraph_node *node = cgraph_node::get (root->context);
2896 /* For nested functions update the cgraph to reflect unnesting.
2897 We also delay finalizing of these functions up to this point. */
2898 if (node->origin)
2900 node->unnest ();
2901 cgraph_finalize_function (root->context, true);
2905 static void
2906 unnest_nesting_tree (struct nesting_info *root)
2908 struct nesting_info *n;
2909 FOR_EACH_NEST_INFO (n, root)
2910 unnest_nesting_tree_1 (n);
2913 /* Free the data structures allocated during this pass. */
2915 static void
2916 free_nesting_tree (struct nesting_info *root)
2918 struct nesting_info *node, *next;
2920 node = iter_nestinfo_start (root);
2923 next = iter_nestinfo_next (node);
2924 delete node->var_map;
2925 delete node->field_map;
2926 delete node->mem_refs;
2927 free (node);
2928 node = next;
2930 while (node);
2933 /* Gimplify a function and all its nested functions. */
2934 static void
2935 gimplify_all_functions (struct cgraph_node *root)
2937 struct cgraph_node *iter;
2938 if (!gimple_body (root->decl))
2939 gimplify_function_tree (root->decl);
2940 for (iter = root->nested; iter; iter = iter->next_nested)
2941 gimplify_all_functions (iter);
2944 /* Main entry point for this pass. Process FNDECL and all of its nested
2945 subroutines and turn them into something less tightly bound. */
2947 void
2948 lower_nested_functions (tree fndecl)
2950 struct cgraph_node *cgn;
2951 struct nesting_info *root;
2953 /* If there are no nested functions, there's nothing to do. */
2954 cgn = cgraph_node::get (fndecl);
2955 if (!cgn->nested)
2956 return;
2958 gimplify_all_functions (cgn);
2960 dump_file = dump_begin (TDI_nested, &dump_flags);
2961 if (dump_file)
2962 fprintf (dump_file, "\n;; Function %s\n\n",
2963 lang_hooks.decl_printable_name (fndecl, 2));
2965 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2966 root = create_nesting_tree (cgn);
2968 walk_all_functions (convert_nonlocal_reference_stmt,
2969 convert_nonlocal_reference_op,
2970 root);
2971 walk_all_functions (convert_local_reference_stmt,
2972 convert_local_reference_op,
2973 root);
2974 walk_all_functions (convert_nl_goto_reference, NULL, root);
2975 walk_all_functions (convert_nl_goto_receiver, NULL, root);
2977 convert_all_function_calls (root);
2978 finalize_nesting_tree (root);
2979 unnest_nesting_tree (root);
2981 free_nesting_tree (root);
2982 bitmap_obstack_release (&nesting_info_bitmap_obstack);
2984 if (dump_file)
2986 dump_end (TDI_nested, dump_file);
2987 dump_file = NULL;
2991 #include "gt-tree-nested.h"