svn merge -r215707:216846 svn+ssh://gcc.gnu.org/svn/gcc/trunk
[official-gcc.git] / gcc / tree-nested.c
blobb5d654319d0a61fd2734ad2fccc86ec6ea558406
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stringpool.h"
26 #include "stor-layout.h"
27 #include "tm_p.h"
28 #include "hashtab.h"
29 #include "hash-set.h"
30 #include "vec.h"
31 #include "machmode.h"
32 #include "hard-reg-set.h"
33 #include "input.h"
34 #include "function.h"
35 #include "tree-dump.h"
36 #include "tree-inline.h"
37 #include "predict.h"
38 #include "basic-block.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "gimple-expr.h"
42 #include "is-a.h"
43 #include "gimple.h"
44 #include "gimplify.h"
45 #include "gimple-iterator.h"
46 #include "gimple-walk.h"
47 #include "tree-iterator.h"
48 #include "bitmap.h"
49 #include "hash-map.h"
50 #include "plugin-api.h"
51 #include "ipa-ref.h"
52 #include "cgraph.h"
53 #include "tree-cfg.h"
54 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
55 #include "langhooks.h"
56 #include "gimple-low.h"
59 /* The object of this pass is to lower the representation of a set of nested
60 functions in order to expose all of the gory details of the various
61 nonlocal references. We want to do this sooner rather than later, in
62 order to give us more freedom in emitting all of the functions in question.
64 Back in olden times, when gcc was young, we developed an insanely
65 complicated scheme whereby variables which were referenced nonlocally
66 were forced to live in the stack of the declaring function, and then
67 the nested functions magically discovered where these variables were
68 placed. In order for this scheme to function properly, it required
69 that the outer function be partially expanded, then we switch to
70 compiling the inner function, and once done with those we switch back
71 to compiling the outer function. Such delicate ordering requirements
72 makes it difficult to do whole translation unit optimizations
73 involving such functions.
75 The implementation here is much more direct. Everything that can be
76 referenced by an inner function is a member of an explicitly created
77 structure herein called the "nonlocal frame struct". The incoming
78 static chain for a nested function is a pointer to this struct in
79 the parent. In this way, we settle on known offsets from a known
80 base, and so are decoupled from the logic that places objects in the
81 function's stack frame. More importantly, we don't have to wait for
82 that to happen -- since the compilation of the inner function is no
83 longer tied to a real stack frame, the nonlocal frame struct can be
84 allocated anywhere. Which means that the outer function is now
85 inlinable.
87 Theory of operation here is very simple. Iterate over all the
88 statements in all the functions (depth first) several times,
89 allocating structures and fields on demand. In general we want to
90 examine inner functions first, so that we can avoid making changes
91 to outer functions which are unnecessary.
93 The order of the passes matters a bit, in that later passes will be
94 skipped if it is discovered that the functions don't actually interact
95 at all. That is, they're nested in the lexical sense but could have
96 been written as independent functions without change. */
99 struct nesting_info
101 struct nesting_info *outer;
102 struct nesting_info *inner;
103 struct nesting_info *next;
105 hash_map<tree, tree> *field_map;
106 hash_map<tree, tree> *var_map;
107 hash_set<tree *> *mem_refs;
108 bitmap suppress_expansion;
110 tree context;
111 tree new_local_var_chain;
112 tree debug_var_chain;
113 tree frame_type;
114 tree frame_decl;
115 tree chain_field;
116 tree chain_decl;
117 tree nl_goto_field;
119 bool any_parm_remapped;
120 bool any_tramp_created;
121 char static_chain_added;
125 /* Iterate over the nesting tree, starting with ROOT, depth first. */
127 static inline struct nesting_info *
128 iter_nestinfo_start (struct nesting_info *root)
130 while (root->inner)
131 root = root->inner;
132 return root;
135 static inline struct nesting_info *
136 iter_nestinfo_next (struct nesting_info *node)
138 if (node->next)
139 return iter_nestinfo_start (node->next);
140 return node->outer;
143 #define FOR_EACH_NEST_INFO(I, ROOT) \
144 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
146 /* Obstack used for the bitmaps in the struct above. */
147 static struct bitmap_obstack nesting_info_bitmap_obstack;
150 /* We're working in so many different function contexts simultaneously,
151 that create_tmp_var is dangerous. Prevent mishap. */
152 #define create_tmp_var cant_use_create_tmp_var_here_dummy
154 /* Like create_tmp_var, except record the variable for registration at
155 the given nesting level. */
157 static tree
158 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
160 tree tmp_var;
162 /* If the type is of variable size or a type which must be created by the
163 frontend, something is wrong. Note that we explicitly allow
164 incomplete types here, since we create them ourselves here. */
165 gcc_assert (!TREE_ADDRESSABLE (type));
166 gcc_assert (!TYPE_SIZE_UNIT (type)
167 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
169 tmp_var = create_tmp_var_raw (type, prefix);
170 DECL_CONTEXT (tmp_var) = info->context;
171 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
172 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
173 if (TREE_CODE (type) == COMPLEX_TYPE
174 || TREE_CODE (type) == VECTOR_TYPE)
175 DECL_GIMPLE_REG_P (tmp_var) = 1;
177 info->new_local_var_chain = tmp_var;
179 return tmp_var;
182 /* Take the address of EXP to be used within function CONTEXT.
183 Mark it for addressability as necessary. */
185 tree
186 build_addr (tree exp, tree context)
188 tree base = exp;
189 tree save_context;
190 tree retval;
192 while (handled_component_p (base))
193 base = TREE_OPERAND (base, 0);
195 if (DECL_P (base))
196 TREE_ADDRESSABLE (base) = 1;
198 /* Building the ADDR_EXPR will compute a set of properties for
199 that ADDR_EXPR. Those properties are unfortunately context
200 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
202 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
203 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
204 way the properties are for the ADDR_EXPR are computed properly. */
205 save_context = current_function_decl;
206 current_function_decl = context;
207 retval = build_fold_addr_expr (exp);
208 current_function_decl = save_context;
209 return retval;
212 /* Insert FIELD into TYPE, sorted by alignment requirements. */
214 void
215 insert_field_into_struct (tree type, tree field)
217 tree *p;
219 DECL_CONTEXT (field) = type;
221 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
222 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
223 break;
225 DECL_CHAIN (field) = *p;
226 *p = field;
228 /* Set correct alignment for frame struct type. */
229 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
230 TYPE_ALIGN (type) = DECL_ALIGN (field);
233 /* Build or return the RECORD_TYPE that describes the frame state that is
234 shared between INFO->CONTEXT and its nested functions. This record will
235 not be complete until finalize_nesting_tree; up until that point we'll
236 be adding fields as necessary.
238 We also build the DECL that represents this frame in the function. */
240 static tree
241 get_frame_type (struct nesting_info *info)
243 tree type = info->frame_type;
244 if (!type)
246 char *name;
248 type = make_node (RECORD_TYPE);
250 name = concat ("FRAME.",
251 IDENTIFIER_POINTER (DECL_NAME (info->context)),
252 NULL);
253 TYPE_NAME (type) = get_identifier (name);
254 free (name);
256 info->frame_type = type;
257 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
258 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
260 /* ??? Always make it addressable for now, since it is meant to
261 be pointed to by the static chain pointer. This pessimizes
262 when it turns out that no static chains are needed because
263 the nested functions referencing non-local variables are not
264 reachable, but the true pessimization is to create the non-
265 local frame structure in the first place. */
266 TREE_ADDRESSABLE (info->frame_decl) = 1;
268 return type;
271 /* Return true if DECL should be referenced by pointer in the non-local
272 frame structure. */
274 static bool
275 use_pointer_in_frame (tree decl)
277 if (TREE_CODE (decl) == PARM_DECL)
279 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
280 sized decls, and inefficient to copy large aggregates. Don't bother
281 moving anything but scalar variables. */
282 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
284 else
286 /* Variable sized types make things "interesting" in the frame. */
287 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
291 /* Given DECL, a non-locally accessed variable, find or create a field
292 in the non-local frame structure for the given nesting context. */
294 static tree
295 lookup_field_for_decl (struct nesting_info *info, tree decl,
296 enum insert_option insert)
298 if (insert == NO_INSERT)
300 tree *slot = info->field_map->get (decl);
301 return slot ? *slot : NULL_TREE;
304 tree *slot = &info->field_map->get_or_insert (decl);
305 if (!*slot)
307 tree field = make_node (FIELD_DECL);
308 DECL_NAME (field) = DECL_NAME (decl);
310 if (use_pointer_in_frame (decl))
312 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
313 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
314 DECL_NONADDRESSABLE_P (field) = 1;
316 else
318 TREE_TYPE (field) = TREE_TYPE (decl);
319 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
320 DECL_ALIGN (field) = DECL_ALIGN (decl);
321 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
322 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
323 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
324 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
327 insert_field_into_struct (get_frame_type (info), field);
328 *slot = field;
330 if (TREE_CODE (decl) == PARM_DECL)
331 info->any_parm_remapped = true;
334 return *slot;
337 /* Build or return the variable that holds the static chain within
338 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
340 static tree
341 get_chain_decl (struct nesting_info *info)
343 tree decl = info->chain_decl;
345 if (!decl)
347 tree type;
349 type = get_frame_type (info->outer);
350 type = build_pointer_type (type);
352 /* Note that this variable is *not* entered into any BIND_EXPR;
353 the construction of this variable is handled specially in
354 expand_function_start and initialize_inlined_parameters.
355 Note also that it's represented as a parameter. This is more
356 close to the truth, since the initial value does come from
357 the caller. */
358 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
359 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
360 DECL_ARTIFICIAL (decl) = 1;
361 DECL_IGNORED_P (decl) = 1;
362 TREE_USED (decl) = 1;
363 DECL_CONTEXT (decl) = info->context;
364 DECL_ARG_TYPE (decl) = type;
366 /* Tell tree-inline.c that we never write to this variable, so
367 it can copy-prop the replacement value immediately. */
368 TREE_READONLY (decl) = 1;
370 info->chain_decl = decl;
372 if (dump_file
373 && (dump_flags & TDF_DETAILS)
374 && !DECL_STATIC_CHAIN (info->context))
375 fprintf (dump_file, "Setting static-chain for %s\n",
376 lang_hooks.decl_printable_name (info->context, 2));
378 DECL_STATIC_CHAIN (info->context) = 1;
380 return decl;
383 /* Build or return the field within the non-local frame state that holds
384 the static chain for INFO->CONTEXT. This is the way to walk back up
385 multiple nesting levels. */
387 static tree
388 get_chain_field (struct nesting_info *info)
390 tree field = info->chain_field;
392 if (!field)
394 tree type = build_pointer_type (get_frame_type (info->outer));
396 field = make_node (FIELD_DECL);
397 DECL_NAME (field) = get_identifier ("__chain");
398 TREE_TYPE (field) = type;
399 DECL_ALIGN (field) = TYPE_ALIGN (type);
400 DECL_NONADDRESSABLE_P (field) = 1;
402 insert_field_into_struct (get_frame_type (info), field);
404 info->chain_field = field;
406 if (dump_file
407 && (dump_flags & TDF_DETAILS)
408 && !DECL_STATIC_CHAIN (info->context))
409 fprintf (dump_file, "Setting static-chain for %s\n",
410 lang_hooks.decl_printable_name (info->context, 2));
412 DECL_STATIC_CHAIN (info->context) = 1;
414 return field;
417 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
419 static tree
420 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
421 gimple call)
423 tree t;
425 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
426 gimple_call_set_lhs (call, t);
427 if (! gsi_end_p (*gsi))
428 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
429 gsi_insert_before (gsi, call, GSI_SAME_STMT);
431 return t;
435 /* Copy EXP into a temporary. Allocate the temporary in the context of
436 INFO and insert the initialization statement before GSI. */
438 static tree
439 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
441 tree t;
442 gimple stmt;
444 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
445 stmt = gimple_build_assign (t, exp);
446 if (! gsi_end_p (*gsi))
447 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
448 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
450 return t;
454 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
456 static tree
457 gsi_gimplify_val (struct nesting_info *info, tree exp,
458 gimple_stmt_iterator *gsi)
460 if (is_gimple_val (exp))
461 return exp;
462 else
463 return init_tmp_var (info, exp, gsi);
466 /* Similarly, but copy from the temporary and insert the statement
467 after the iterator. */
469 static tree
470 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
472 tree t;
473 gimple stmt;
475 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
476 stmt = gimple_build_assign (exp, t);
477 if (! gsi_end_p (*gsi))
478 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
479 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
481 return t;
484 /* Build or return the type used to represent a nested function trampoline. */
486 static GTY(()) tree trampoline_type;
488 static tree
489 get_trampoline_type (struct nesting_info *info)
491 unsigned align, size;
492 tree t;
494 if (trampoline_type)
495 return trampoline_type;
497 align = TRAMPOLINE_ALIGNMENT;
498 size = TRAMPOLINE_SIZE;
500 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
501 then allocate extra space so that we can do dynamic alignment. */
502 if (align > STACK_BOUNDARY)
504 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
505 align = STACK_BOUNDARY;
508 t = build_index_type (size_int (size - 1));
509 t = build_array_type (char_type_node, t);
510 t = build_decl (DECL_SOURCE_LOCATION (info->context),
511 FIELD_DECL, get_identifier ("__data"), t);
512 DECL_ALIGN (t) = align;
513 DECL_USER_ALIGN (t) = 1;
515 trampoline_type = make_node (RECORD_TYPE);
516 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
517 TYPE_FIELDS (trampoline_type) = t;
518 layout_type (trampoline_type);
519 DECL_CONTEXT (t) = trampoline_type;
521 return trampoline_type;
524 /* Given DECL, a nested function, find or create a field in the non-local
525 frame structure for a trampoline for this function. */
527 static tree
528 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
529 enum insert_option insert)
531 if (insert == NO_INSERT)
533 tree *slot = info->var_map->get (decl);
534 return slot ? *slot : NULL_TREE;
537 tree *slot = &info->var_map->get_or_insert (decl);
538 if (!*slot)
540 tree field = make_node (FIELD_DECL);
541 DECL_NAME (field) = DECL_NAME (decl);
542 TREE_TYPE (field) = get_trampoline_type (info);
543 TREE_ADDRESSABLE (field) = 1;
545 insert_field_into_struct (get_frame_type (info), field);
546 *slot = field;
548 info->any_tramp_created = true;
551 return *slot;
554 /* Build or return the field within the non-local frame state that holds
555 the non-local goto "jmp_buf". The buffer itself is maintained by the
556 rtl middle-end as dynamic stack space is allocated. */
558 static tree
559 get_nl_goto_field (struct nesting_info *info)
561 tree field = info->nl_goto_field;
562 if (!field)
564 unsigned size;
565 tree type;
567 /* For __builtin_nonlocal_goto, we need N words. The first is the
568 frame pointer, the rest is for the target's stack pointer save
569 area. The number of words is controlled by STACK_SAVEAREA_MODE;
570 not the best interface, but it'll do for now. */
571 if (Pmode == ptr_mode)
572 type = ptr_type_node;
573 else
574 type = lang_hooks.types.type_for_mode (Pmode, 1);
576 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
577 size = size / GET_MODE_SIZE (Pmode);
578 size = size + 1;
580 type = build_array_type
581 (type, build_index_type (size_int (size)));
583 field = make_node (FIELD_DECL);
584 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
585 TREE_TYPE (field) = type;
586 DECL_ALIGN (field) = TYPE_ALIGN (type);
587 TREE_ADDRESSABLE (field) = 1;
589 insert_field_into_struct (get_frame_type (info), field);
591 info->nl_goto_field = field;
594 return field;
597 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
599 static void
600 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
601 struct nesting_info *info, gimple_seq *pseq)
603 struct walk_stmt_info wi;
605 memset (&wi, 0, sizeof (wi));
606 wi.info = info;
607 wi.val_only = true;
608 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
612 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
614 static inline void
615 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
616 struct nesting_info *info)
618 gimple_seq body = gimple_body (info->context);
619 walk_body (callback_stmt, callback_op, info, &body);
620 gimple_set_body (info->context, body);
623 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
625 static void
626 walk_gimple_omp_for (gimple for_stmt,
627 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
628 struct nesting_info *info)
630 gcc_assert (!is_gimple_omp_oacc_specifically (for_stmt));
632 struct walk_stmt_info wi;
633 gimple_seq seq;
634 tree t;
635 size_t i;
637 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
639 seq = NULL;
640 memset (&wi, 0, sizeof (wi));
641 wi.info = info;
642 wi.gsi = gsi_last (seq);
644 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
646 wi.val_only = false;
647 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
648 &wi, NULL);
649 wi.val_only = true;
650 wi.is_lhs = false;
651 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
652 &wi, NULL);
654 wi.val_only = true;
655 wi.is_lhs = false;
656 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
657 &wi, NULL);
659 t = gimple_omp_for_incr (for_stmt, i);
660 gcc_assert (BINARY_CLASS_P (t));
661 wi.val_only = false;
662 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
663 wi.val_only = true;
664 wi.is_lhs = false;
665 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
668 seq = gsi_seq (wi.gsi);
669 if (!gimple_seq_empty_p (seq))
671 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
672 annotate_all_with_location (seq, gimple_location (for_stmt));
673 gimple_seq_add_seq (&pre_body, seq);
674 gimple_omp_for_set_pre_body (for_stmt, pre_body);
678 /* Similarly for ROOT and all functions nested underneath, depth first. */
680 static void
681 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
682 struct nesting_info *root)
684 struct nesting_info *n;
685 FOR_EACH_NEST_INFO (n, root)
686 walk_function (callback_stmt, callback_op, n);
690 /* We have to check for a fairly pathological case. The operands of function
691 nested function are to be interpreted in the context of the enclosing
692 function. So if any are variably-sized, they will get remapped when the
693 enclosing function is inlined. But that remapping would also have to be
694 done in the types of the PARM_DECLs of the nested function, meaning the
695 argument types of that function will disagree with the arguments in the
696 calls to that function. So we'd either have to make a copy of the nested
697 function corresponding to each time the enclosing function was inlined or
698 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
699 function. The former is not practical. The latter would still require
700 detecting this case to know when to add the conversions. So, for now at
701 least, we don't inline such an enclosing function.
703 We have to do that check recursively, so here return indicating whether
704 FNDECL has such a nested function. ORIG_FN is the function we were
705 trying to inline to use for checking whether any argument is variably
706 modified by anything in it.
708 It would be better to do this in tree-inline.c so that we could give
709 the appropriate warning for why a function can't be inlined, but that's
710 too late since the nesting structure has already been flattened and
711 adding a flag just to record this fact seems a waste of a flag. */
713 static bool
714 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
716 struct cgraph_node *cgn = cgraph_node::get (fndecl);
717 tree arg;
719 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
721 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
722 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
723 return true;
725 if (check_for_nested_with_variably_modified (cgn->decl,
726 orig_fndecl))
727 return true;
730 return false;
733 /* Construct our local datastructure describing the function nesting
734 tree rooted by CGN. */
736 static struct nesting_info *
737 create_nesting_tree (struct cgraph_node *cgn)
739 struct nesting_info *info = XCNEW (struct nesting_info);
740 info->field_map = new hash_map<tree, tree>;
741 info->var_map = new hash_map<tree, tree>;
742 info->mem_refs = new hash_set<tree *>;
743 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
744 info->context = cgn->decl;
746 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
748 struct nesting_info *sub = create_nesting_tree (cgn);
749 sub->outer = info;
750 sub->next = info->inner;
751 info->inner = sub;
754 /* See discussion at check_for_nested_with_variably_modified for a
755 discussion of why this has to be here. */
756 if (check_for_nested_with_variably_modified (info->context, info->context))
757 DECL_UNINLINABLE (info->context) = true;
759 return info;
762 /* Return an expression computing the static chain for TARGET_CONTEXT
763 from INFO->CONTEXT. Insert any necessary computations before TSI. */
765 static tree
766 get_static_chain (struct nesting_info *info, tree target_context,
767 gimple_stmt_iterator *gsi)
769 struct nesting_info *i;
770 tree x;
772 if (info->context == target_context)
774 x = build_addr (info->frame_decl, target_context);
776 else
778 x = get_chain_decl (info);
780 for (i = info->outer; i->context != target_context; i = i->outer)
782 tree field = get_chain_field (i);
784 x = build_simple_mem_ref (x);
785 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
786 x = init_tmp_var (info, x, gsi);
790 return x;
794 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
795 frame as seen from INFO->CONTEXT. Insert any necessary computations
796 before GSI. */
798 static tree
799 get_frame_field (struct nesting_info *info, tree target_context,
800 tree field, gimple_stmt_iterator *gsi)
802 struct nesting_info *i;
803 tree x;
805 if (info->context == target_context)
807 /* Make sure frame_decl gets created. */
808 (void) get_frame_type (info);
809 x = info->frame_decl;
811 else
813 x = get_chain_decl (info);
815 for (i = info->outer; i->context != target_context; i = i->outer)
817 tree field = get_chain_field (i);
819 x = build_simple_mem_ref (x);
820 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
821 x = init_tmp_var (info, x, gsi);
824 x = build_simple_mem_ref (x);
827 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
828 return x;
831 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
833 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
834 in the nested function with DECL_VALUE_EXPR set to reference the true
835 variable in the parent function. This is used both for debug info
836 and in OpenMP lowering. */
838 static tree
839 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
841 tree target_context;
842 struct nesting_info *i;
843 tree x, field, new_decl;
845 tree *slot = &info->var_map->get_or_insert (decl);
847 if (*slot)
848 return *slot;
850 target_context = decl_function_context (decl);
852 /* A copy of the code in get_frame_field, but without the temporaries. */
853 if (info->context == target_context)
855 /* Make sure frame_decl gets created. */
856 (void) get_frame_type (info);
857 x = info->frame_decl;
858 i = info;
860 else
862 x = get_chain_decl (info);
863 for (i = info->outer; i->context != target_context; i = i->outer)
865 field = get_chain_field (i);
866 x = build_simple_mem_ref (x);
867 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
869 x = build_simple_mem_ref (x);
872 field = lookup_field_for_decl (i, decl, INSERT);
873 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
874 if (use_pointer_in_frame (decl))
875 x = build_simple_mem_ref (x);
877 /* ??? We should be remapping types as well, surely. */
878 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
879 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
880 DECL_CONTEXT (new_decl) = info->context;
881 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
882 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
883 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
884 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
885 TREE_READONLY (new_decl) = TREE_READONLY (decl);
886 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
887 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
888 if ((TREE_CODE (decl) == PARM_DECL
889 || TREE_CODE (decl) == RESULT_DECL
890 || TREE_CODE (decl) == VAR_DECL)
891 && DECL_BY_REFERENCE (decl))
892 DECL_BY_REFERENCE (new_decl) = 1;
894 SET_DECL_VALUE_EXPR (new_decl, x);
895 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
897 *slot = new_decl;
898 DECL_CHAIN (new_decl) = info->debug_var_chain;
899 info->debug_var_chain = new_decl;
901 if (!optimize
902 && info->context != target_context
903 && variably_modified_type_p (TREE_TYPE (decl), NULL))
904 note_nonlocal_vla_type (info, TREE_TYPE (decl));
906 return new_decl;
910 /* Callback for walk_gimple_stmt, rewrite all references to VAR
911 and PARM_DECLs that belong to outer functions.
913 The rewrite will involve some number of structure accesses back up
914 the static chain. E.g. for a variable FOO up one nesting level it'll
915 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
916 indirections apply to decls for which use_pointer_in_frame is true. */
918 static tree
919 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
921 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
922 struct nesting_info *const info = (struct nesting_info *) wi->info;
923 tree t = *tp;
925 *walk_subtrees = 0;
926 switch (TREE_CODE (t))
928 case VAR_DECL:
929 /* Non-automatic variables are never processed. */
930 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
931 break;
932 /* FALLTHRU */
934 case PARM_DECL:
935 if (decl_function_context (t) != info->context)
937 tree x;
938 wi->changed = true;
940 x = get_nonlocal_debug_decl (info, t);
941 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
943 tree target_context = decl_function_context (t);
944 struct nesting_info *i;
945 for (i = info->outer; i->context != target_context; i = i->outer)
946 continue;
947 x = lookup_field_for_decl (i, t, INSERT);
948 x = get_frame_field (info, target_context, x, &wi->gsi);
949 if (use_pointer_in_frame (t))
951 x = init_tmp_var (info, x, &wi->gsi);
952 x = build_simple_mem_ref (x);
956 if (wi->val_only)
958 if (wi->is_lhs)
959 x = save_tmp_var (info, x, &wi->gsi);
960 else
961 x = init_tmp_var (info, x, &wi->gsi);
964 *tp = x;
966 break;
968 case LABEL_DECL:
969 /* We're taking the address of a label from a parent function, but
970 this is not itself a non-local goto. Mark the label such that it
971 will not be deleted, much as we would with a label address in
972 static storage. */
973 if (decl_function_context (t) != info->context)
974 FORCED_LABEL (t) = 1;
975 break;
977 case ADDR_EXPR:
979 bool save_val_only = wi->val_only;
981 wi->val_only = false;
982 wi->is_lhs = false;
983 wi->changed = false;
984 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
985 wi->val_only = true;
987 if (wi->changed)
989 tree save_context;
991 /* If we changed anything, we might no longer be directly
992 referencing a decl. */
993 save_context = current_function_decl;
994 current_function_decl = info->context;
995 recompute_tree_invariant_for_addr_expr (t);
996 current_function_decl = save_context;
998 /* If the callback converted the address argument in a context
999 where we only accept variables (and min_invariant, presumably),
1000 then compute the address into a temporary. */
1001 if (save_val_only)
1002 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1003 t, &wi->gsi);
1006 break;
1008 case REALPART_EXPR:
1009 case IMAGPART_EXPR:
1010 case COMPONENT_REF:
1011 case ARRAY_REF:
1012 case ARRAY_RANGE_REF:
1013 case BIT_FIELD_REF:
1014 /* Go down this entire nest and just look at the final prefix and
1015 anything that describes the references. Otherwise, we lose track
1016 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1017 wi->val_only = true;
1018 wi->is_lhs = false;
1019 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1021 if (TREE_CODE (t) == COMPONENT_REF)
1022 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1023 NULL);
1024 else if (TREE_CODE (t) == ARRAY_REF
1025 || TREE_CODE (t) == ARRAY_RANGE_REF)
1027 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1028 wi, NULL);
1029 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1030 wi, NULL);
1031 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1032 wi, NULL);
1035 wi->val_only = false;
1036 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1037 break;
1039 case VIEW_CONVERT_EXPR:
1040 /* Just request to look at the subtrees, leaving val_only and lhs
1041 untouched. This might actually be for !val_only + lhs, in which
1042 case we don't want to force a replacement by a temporary. */
1043 *walk_subtrees = 1;
1044 break;
1046 default:
1047 if (!IS_TYPE_OR_DECL_P (t))
1049 *walk_subtrees = 1;
1050 wi->val_only = true;
1051 wi->is_lhs = false;
1053 break;
1056 return NULL_TREE;
1059 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1060 struct walk_stmt_info *);
1062 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1063 and PARM_DECLs that belong to outer functions. */
1065 static bool
1066 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1068 struct nesting_info *const info = (struct nesting_info *) wi->info;
1069 bool need_chain = false, need_stmts = false;
1070 tree clause, decl;
1071 int dummy;
1072 bitmap new_suppress;
1074 new_suppress = BITMAP_GGC_ALLOC ();
1075 bitmap_copy (new_suppress, info->suppress_expansion);
1077 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1079 switch (OMP_CLAUSE_CODE (clause))
1081 case OMP_CLAUSE_REDUCTION:
1082 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1083 need_stmts = true;
1084 goto do_decl_clause;
1086 case OMP_CLAUSE_LASTPRIVATE:
1087 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1088 need_stmts = true;
1089 goto do_decl_clause;
1091 case OMP_CLAUSE_LINEAR:
1092 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1093 need_stmts = true;
1094 wi->val_only = true;
1095 wi->is_lhs = false;
1096 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1097 &dummy, wi);
1098 goto do_decl_clause;
1100 case OMP_CLAUSE_PRIVATE:
1101 case OMP_CLAUSE_FIRSTPRIVATE:
1102 case OMP_CLAUSE_COPYPRIVATE:
1103 case OMP_CLAUSE_SHARED:
1104 do_decl_clause:
1105 decl = OMP_CLAUSE_DECL (clause);
1106 if (TREE_CODE (decl) == VAR_DECL
1107 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1108 break;
1109 if (decl_function_context (decl) != info->context)
1111 bitmap_set_bit (new_suppress, DECL_UID (decl));
1112 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1113 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1114 need_chain = true;
1116 break;
1118 case OMP_CLAUSE_SCHEDULE:
1119 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1120 break;
1121 /* FALLTHRU */
1122 case OMP_CLAUSE_FINAL:
1123 case OMP_CLAUSE_IF:
1124 case OMP_CLAUSE_NUM_THREADS:
1125 case OMP_CLAUSE_DEPEND:
1126 case OMP_CLAUSE_DEVICE:
1127 case OMP_CLAUSE_NUM_TEAMS:
1128 case OMP_CLAUSE_THREAD_LIMIT:
1129 case OMP_CLAUSE_SAFELEN:
1130 case OMP_CLAUSE__CILK_FOR_COUNT_:
1131 wi->val_only = true;
1132 wi->is_lhs = false;
1133 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1134 &dummy, wi);
1135 break;
1137 case OMP_CLAUSE_DIST_SCHEDULE:
1138 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1140 wi->val_only = true;
1141 wi->is_lhs = false;
1142 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1143 &dummy, wi);
1145 break;
1147 case OMP_CLAUSE_MAP:
1148 case OMP_CLAUSE_TO:
1149 case OMP_CLAUSE_FROM:
1150 if (OMP_CLAUSE_SIZE (clause))
1152 wi->val_only = true;
1153 wi->is_lhs = false;
1154 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1155 &dummy, wi);
1157 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1158 goto do_decl_clause;
1159 wi->val_only = true;
1160 wi->is_lhs = false;
1161 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1162 wi, NULL);
1163 break;
1165 case OMP_CLAUSE_ALIGNED:
1166 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1168 wi->val_only = true;
1169 wi->is_lhs = false;
1170 convert_nonlocal_reference_op
1171 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1173 /* Like do_decl_clause, but don't add any suppression. */
1174 decl = OMP_CLAUSE_DECL (clause);
1175 if (TREE_CODE (decl) == VAR_DECL
1176 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1177 break;
1178 if (decl_function_context (decl) != info->context)
1180 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1181 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1182 need_chain = true;
1184 break;
1186 case OMP_CLAUSE_NOWAIT:
1187 case OMP_CLAUSE_ORDERED:
1188 case OMP_CLAUSE_DEFAULT:
1189 case OMP_CLAUSE_COPYIN:
1190 case OMP_CLAUSE_COLLAPSE:
1191 case OMP_CLAUSE_UNTIED:
1192 case OMP_CLAUSE_MERGEABLE:
1193 case OMP_CLAUSE_PROC_BIND:
1194 break;
1196 default:
1197 gcc_unreachable ();
1201 info->suppress_expansion = new_suppress;
1203 if (need_stmts)
1204 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1205 switch (OMP_CLAUSE_CODE (clause))
1207 case OMP_CLAUSE_REDUCTION:
1208 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1210 tree old_context
1211 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1212 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1213 = info->context;
1214 walk_body (convert_nonlocal_reference_stmt,
1215 convert_nonlocal_reference_op, info,
1216 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1217 walk_body (convert_nonlocal_reference_stmt,
1218 convert_nonlocal_reference_op, info,
1219 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1220 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1221 = old_context;
1223 break;
1225 case OMP_CLAUSE_LASTPRIVATE:
1226 walk_body (convert_nonlocal_reference_stmt,
1227 convert_nonlocal_reference_op, info,
1228 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1229 break;
1231 case OMP_CLAUSE_LINEAR:
1232 walk_body (convert_nonlocal_reference_stmt,
1233 convert_nonlocal_reference_op, info,
1234 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1235 break;
1237 default:
1238 break;
1241 return need_chain;
1244 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1246 static void
1247 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1249 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1250 type = TREE_TYPE (type);
1252 if (TYPE_NAME (type)
1253 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1254 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1255 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1257 while (POINTER_TYPE_P (type)
1258 || TREE_CODE (type) == VECTOR_TYPE
1259 || TREE_CODE (type) == FUNCTION_TYPE
1260 || TREE_CODE (type) == METHOD_TYPE)
1261 type = TREE_TYPE (type);
1263 if (TREE_CODE (type) == ARRAY_TYPE)
1265 tree domain, t;
1267 note_nonlocal_vla_type (info, TREE_TYPE (type));
1268 domain = TYPE_DOMAIN (type);
1269 if (domain)
1271 t = TYPE_MIN_VALUE (domain);
1272 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1273 && decl_function_context (t) != info->context)
1274 get_nonlocal_debug_decl (info, t);
1275 t = TYPE_MAX_VALUE (domain);
1276 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1277 && decl_function_context (t) != info->context)
1278 get_nonlocal_debug_decl (info, t);
1283 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1284 in BLOCK. */
1286 static void
1287 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1289 tree var;
1291 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1292 if (TREE_CODE (var) == VAR_DECL
1293 && variably_modified_type_p (TREE_TYPE (var), NULL)
1294 && DECL_HAS_VALUE_EXPR_P (var)
1295 && decl_function_context (var) != info->context)
1296 note_nonlocal_vla_type (info, TREE_TYPE (var));
1299 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1300 PARM_DECLs that belong to outer functions. This handles statements
1301 that are not handled via the standard recursion done in
1302 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1303 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1304 operands of STMT have been handled by this function. */
1306 static tree
1307 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1308 struct walk_stmt_info *wi)
1310 struct nesting_info *info = (struct nesting_info *) wi->info;
1311 tree save_local_var_chain;
1312 bitmap save_suppress;
1313 gimple stmt = gsi_stmt (*gsi);
1315 switch (gimple_code (stmt))
1317 case GIMPLE_GOTO:
1318 /* Don't walk non-local gotos for now. */
1319 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1321 wi->val_only = true;
1322 wi->is_lhs = false;
1323 *handled_ops_p = true;
1324 return NULL_TREE;
1326 break;
1328 case GIMPLE_OACC_KERNELS:
1329 case GIMPLE_OACC_PARALLEL:
1330 gcc_unreachable ();
1332 case GIMPLE_OMP_PARALLEL:
1333 case GIMPLE_OMP_TASK:
1334 save_suppress = info->suppress_expansion;
1335 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1336 wi))
1338 tree c, decl;
1339 decl = get_chain_decl (info);
1340 c = build_omp_clause (gimple_location (stmt),
1341 OMP_CLAUSE_FIRSTPRIVATE);
1342 OMP_CLAUSE_DECL (c) = decl;
1343 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1344 gimple_omp_taskreg_set_clauses (stmt, c);
1347 save_local_var_chain = info->new_local_var_chain;
1348 info->new_local_var_chain = NULL;
1350 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1351 info, gimple_omp_body_ptr (stmt));
1353 if (info->new_local_var_chain)
1354 declare_vars (info->new_local_var_chain,
1355 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1356 false);
1357 info->new_local_var_chain = save_local_var_chain;
1358 info->suppress_expansion = save_suppress;
1359 break;
1361 case GIMPLE_OMP_FOR:
1362 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
1363 save_suppress = info->suppress_expansion;
1364 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1365 walk_gimple_omp_for (stmt, convert_nonlocal_reference_stmt,
1366 convert_nonlocal_reference_op, info);
1367 walk_body (convert_nonlocal_reference_stmt,
1368 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1369 info->suppress_expansion = save_suppress;
1370 break;
1372 case GIMPLE_OMP_SECTIONS:
1373 save_suppress = info->suppress_expansion;
1374 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1375 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1376 info, gimple_omp_body_ptr (stmt));
1377 info->suppress_expansion = save_suppress;
1378 break;
1380 case GIMPLE_OMP_SINGLE:
1381 save_suppress = info->suppress_expansion;
1382 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1383 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1384 info, gimple_omp_body_ptr (stmt));
1385 info->suppress_expansion = save_suppress;
1386 break;
1388 case GIMPLE_OMP_TARGET:
1389 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
1390 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
1392 save_suppress = info->suppress_expansion;
1393 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1394 wi);
1395 info->suppress_expansion = save_suppress;
1396 walk_body (convert_nonlocal_reference_stmt,
1397 convert_nonlocal_reference_op, info,
1398 gimple_omp_body_ptr (stmt));
1399 break;
1401 save_suppress = info->suppress_expansion;
1402 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1403 wi))
1405 tree c, decl;
1406 decl = get_chain_decl (info);
1407 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1408 OMP_CLAUSE_DECL (c) = decl;
1409 OMP_CLAUSE_MAP_KIND (c) = OMP_CLAUSE_MAP_TO;
1410 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1411 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1412 gimple_omp_target_set_clauses (stmt, c);
1415 save_local_var_chain = info->new_local_var_chain;
1416 info->new_local_var_chain = NULL;
1418 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1419 info, gimple_omp_body_ptr (stmt));
1421 if (info->new_local_var_chain)
1422 declare_vars (info->new_local_var_chain,
1423 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1424 false);
1425 info->new_local_var_chain = save_local_var_chain;
1426 info->suppress_expansion = save_suppress;
1427 break;
1429 case GIMPLE_OMP_TEAMS:
1430 save_suppress = info->suppress_expansion;
1431 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1432 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1433 info, gimple_omp_body_ptr (stmt));
1434 info->suppress_expansion = save_suppress;
1435 break;
1437 case GIMPLE_OMP_SECTION:
1438 case GIMPLE_OMP_MASTER:
1439 case GIMPLE_OMP_TASKGROUP:
1440 case GIMPLE_OMP_ORDERED:
1441 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1442 info, gimple_omp_body_ptr (stmt));
1443 break;
1445 case GIMPLE_BIND:
1446 if (!optimize && gimple_bind_block (stmt))
1447 note_nonlocal_block_vlas (info, gimple_bind_block (stmt));
1449 for (tree var = gimple_bind_vars (stmt); var; var = DECL_CHAIN (var))
1450 if (TREE_CODE (var) == NAMELIST_DECL)
1452 /* Adjust decls mentioned in NAMELIST_DECL. */
1453 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1454 tree decl;
1455 unsigned int i;
1457 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1459 if (TREE_CODE (decl) == VAR_DECL
1460 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1461 continue;
1462 if (decl_function_context (decl) != info->context)
1463 CONSTRUCTOR_ELT (decls, i)->value
1464 = get_nonlocal_debug_decl (info, decl);
1468 *handled_ops_p = false;
1469 return NULL_TREE;
1471 case GIMPLE_COND:
1472 wi->val_only = true;
1473 wi->is_lhs = false;
1474 *handled_ops_p = false;
1475 return NULL_TREE;
1477 default:
1478 /* For every other statement that we are not interested in
1479 handling here, let the walker traverse the operands. */
1480 *handled_ops_p = false;
1481 return NULL_TREE;
1484 /* We have handled all of STMT operands, no need to traverse the operands. */
1485 *handled_ops_p = true;
1486 return NULL_TREE;
1490 /* A subroutine of convert_local_reference. Create a local variable
1491 in the parent function with DECL_VALUE_EXPR set to reference the
1492 field in FRAME. This is used both for debug info and in OpenMP
1493 lowering. */
1495 static tree
1496 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1498 tree x, new_decl;
1500 tree *slot = &info->var_map->get_or_insert (decl);
1501 if (*slot)
1502 return *slot;
1504 /* Make sure frame_decl gets created. */
1505 (void) get_frame_type (info);
1506 x = info->frame_decl;
1507 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1509 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1510 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1511 DECL_CONTEXT (new_decl) = info->context;
1512 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1513 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1514 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1515 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1516 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1517 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1518 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1519 if ((TREE_CODE (decl) == PARM_DECL
1520 || TREE_CODE (decl) == RESULT_DECL
1521 || TREE_CODE (decl) == VAR_DECL)
1522 && DECL_BY_REFERENCE (decl))
1523 DECL_BY_REFERENCE (new_decl) = 1;
1525 SET_DECL_VALUE_EXPR (new_decl, x);
1526 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1527 *slot = new_decl;
1529 DECL_CHAIN (new_decl) = info->debug_var_chain;
1530 info->debug_var_chain = new_decl;
1532 /* Do not emit debug info twice. */
1533 DECL_IGNORED_P (decl) = 1;
1535 return new_decl;
1539 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1540 and PARM_DECLs that were referenced by inner nested functions.
1541 The rewrite will be a structure reference to the local frame variable. */
1543 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1545 static tree
1546 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1548 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1549 struct nesting_info *const info = (struct nesting_info *) wi->info;
1550 tree t = *tp, field, x;
1551 bool save_val_only;
1553 *walk_subtrees = 0;
1554 switch (TREE_CODE (t))
1556 case VAR_DECL:
1557 /* Non-automatic variables are never processed. */
1558 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1559 break;
1560 /* FALLTHRU */
1562 case PARM_DECL:
1563 if (decl_function_context (t) == info->context)
1565 /* If we copied a pointer to the frame, then the original decl
1566 is used unchanged in the parent function. */
1567 if (use_pointer_in_frame (t))
1568 break;
1570 /* No need to transform anything if no child references the
1571 variable. */
1572 field = lookup_field_for_decl (info, t, NO_INSERT);
1573 if (!field)
1574 break;
1575 wi->changed = true;
1577 x = get_local_debug_decl (info, t, field);
1578 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1579 x = get_frame_field (info, info->context, field, &wi->gsi);
1581 if (wi->val_only)
1583 if (wi->is_lhs)
1584 x = save_tmp_var (info, x, &wi->gsi);
1585 else
1586 x = init_tmp_var (info, x, &wi->gsi);
1589 *tp = x;
1591 break;
1593 case ADDR_EXPR:
1594 save_val_only = wi->val_only;
1595 wi->val_only = false;
1596 wi->is_lhs = false;
1597 wi->changed = false;
1598 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1599 wi->val_only = save_val_only;
1601 /* If we converted anything ... */
1602 if (wi->changed)
1604 tree save_context;
1606 /* Then the frame decl is now addressable. */
1607 TREE_ADDRESSABLE (info->frame_decl) = 1;
1609 save_context = current_function_decl;
1610 current_function_decl = info->context;
1611 recompute_tree_invariant_for_addr_expr (t);
1612 current_function_decl = save_context;
1614 /* If we are in a context where we only accept values, then
1615 compute the address into a temporary. */
1616 if (save_val_only)
1617 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1618 t, &wi->gsi);
1620 break;
1622 case REALPART_EXPR:
1623 case IMAGPART_EXPR:
1624 case COMPONENT_REF:
1625 case ARRAY_REF:
1626 case ARRAY_RANGE_REF:
1627 case BIT_FIELD_REF:
1628 /* Go down this entire nest and just look at the final prefix and
1629 anything that describes the references. Otherwise, we lose track
1630 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1631 save_val_only = wi->val_only;
1632 wi->val_only = true;
1633 wi->is_lhs = false;
1634 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1636 if (TREE_CODE (t) == COMPONENT_REF)
1637 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1638 NULL);
1639 else if (TREE_CODE (t) == ARRAY_REF
1640 || TREE_CODE (t) == ARRAY_RANGE_REF)
1642 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1643 NULL);
1644 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1645 NULL);
1646 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1647 NULL);
1650 wi->val_only = false;
1651 walk_tree (tp, convert_local_reference_op, wi, NULL);
1652 wi->val_only = save_val_only;
1653 break;
1655 case MEM_REF:
1656 save_val_only = wi->val_only;
1657 wi->val_only = true;
1658 wi->is_lhs = false;
1659 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1660 wi, NULL);
1661 /* We need to re-fold the MEM_REF as component references as
1662 part of a ADDR_EXPR address are not allowed. But we cannot
1663 fold here, as the chain record type is not yet finalized. */
1664 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1665 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1666 info->mem_refs->add (tp);
1667 wi->val_only = save_val_only;
1668 break;
1670 case VIEW_CONVERT_EXPR:
1671 /* Just request to look at the subtrees, leaving val_only and lhs
1672 untouched. This might actually be for !val_only + lhs, in which
1673 case we don't want to force a replacement by a temporary. */
1674 *walk_subtrees = 1;
1675 break;
1677 default:
1678 if (!IS_TYPE_OR_DECL_P (t))
1680 *walk_subtrees = 1;
1681 wi->val_only = true;
1682 wi->is_lhs = false;
1684 break;
1687 return NULL_TREE;
1690 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1691 struct walk_stmt_info *);
1693 /* Helper for convert_local_reference. Convert all the references in
1694 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1696 static bool
1697 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1699 struct nesting_info *const info = (struct nesting_info *) wi->info;
1700 bool need_frame = false, need_stmts = false;
1701 tree clause, decl;
1702 int dummy;
1703 bitmap new_suppress;
1705 new_suppress = BITMAP_GGC_ALLOC ();
1706 bitmap_copy (new_suppress, info->suppress_expansion);
1708 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1710 switch (OMP_CLAUSE_CODE (clause))
1712 case OMP_CLAUSE_REDUCTION:
1713 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1714 need_stmts = true;
1715 goto do_decl_clause;
1717 case OMP_CLAUSE_LASTPRIVATE:
1718 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1719 need_stmts = true;
1720 goto do_decl_clause;
1722 case OMP_CLAUSE_LINEAR:
1723 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1724 need_stmts = true;
1725 wi->val_only = true;
1726 wi->is_lhs = false;
1727 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1728 wi);
1729 goto do_decl_clause;
1731 case OMP_CLAUSE_PRIVATE:
1732 case OMP_CLAUSE_FIRSTPRIVATE:
1733 case OMP_CLAUSE_COPYPRIVATE:
1734 case OMP_CLAUSE_SHARED:
1735 do_decl_clause:
1736 decl = OMP_CLAUSE_DECL (clause);
1737 if (TREE_CODE (decl) == VAR_DECL
1738 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1739 break;
1740 if (decl_function_context (decl) == info->context
1741 && !use_pointer_in_frame (decl))
1743 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1744 if (field)
1746 bitmap_set_bit (new_suppress, DECL_UID (decl));
1747 OMP_CLAUSE_DECL (clause)
1748 = get_local_debug_decl (info, decl, field);
1749 need_frame = true;
1752 break;
1754 case OMP_CLAUSE_SCHEDULE:
1755 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1756 break;
1757 /* FALLTHRU */
1758 case OMP_CLAUSE_FINAL:
1759 case OMP_CLAUSE_IF:
1760 case OMP_CLAUSE_NUM_THREADS:
1761 case OMP_CLAUSE_DEPEND:
1762 case OMP_CLAUSE_DEVICE:
1763 case OMP_CLAUSE_NUM_TEAMS:
1764 case OMP_CLAUSE_THREAD_LIMIT:
1765 case OMP_CLAUSE_SAFELEN:
1766 case OMP_CLAUSE__CILK_FOR_COUNT_:
1767 wi->val_only = true;
1768 wi->is_lhs = false;
1769 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1770 wi);
1771 break;
1773 case OMP_CLAUSE_DIST_SCHEDULE:
1774 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1776 wi->val_only = true;
1777 wi->is_lhs = false;
1778 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1779 &dummy, wi);
1781 break;
1783 case OMP_CLAUSE_MAP:
1784 case OMP_CLAUSE_TO:
1785 case OMP_CLAUSE_FROM:
1786 if (OMP_CLAUSE_SIZE (clause))
1788 wi->val_only = true;
1789 wi->is_lhs = false;
1790 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1791 &dummy, wi);
1793 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1794 goto do_decl_clause;
1795 wi->val_only = true;
1796 wi->is_lhs = false;
1797 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1798 wi, NULL);
1799 break;
1801 case OMP_CLAUSE_ALIGNED:
1802 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1804 wi->val_only = true;
1805 wi->is_lhs = false;
1806 convert_local_reference_op
1807 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1809 /* Like do_decl_clause, but don't add any suppression. */
1810 decl = OMP_CLAUSE_DECL (clause);
1811 if (TREE_CODE (decl) == VAR_DECL
1812 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1813 break;
1814 if (decl_function_context (decl) == info->context
1815 && !use_pointer_in_frame (decl))
1817 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1818 if (field)
1820 OMP_CLAUSE_DECL (clause)
1821 = get_local_debug_decl (info, decl, field);
1822 need_frame = true;
1825 break;
1827 case OMP_CLAUSE_NOWAIT:
1828 case OMP_CLAUSE_ORDERED:
1829 case OMP_CLAUSE_DEFAULT:
1830 case OMP_CLAUSE_COPYIN:
1831 case OMP_CLAUSE_COLLAPSE:
1832 case OMP_CLAUSE_UNTIED:
1833 case OMP_CLAUSE_MERGEABLE:
1834 case OMP_CLAUSE_PROC_BIND:
1835 break;
1837 default:
1838 gcc_unreachable ();
1842 info->suppress_expansion = new_suppress;
1844 if (need_stmts)
1845 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1846 switch (OMP_CLAUSE_CODE (clause))
1848 case OMP_CLAUSE_REDUCTION:
1849 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1851 tree old_context
1852 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1853 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1854 = info->context;
1855 walk_body (convert_local_reference_stmt,
1856 convert_local_reference_op, info,
1857 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1858 walk_body (convert_local_reference_stmt,
1859 convert_local_reference_op, info,
1860 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1861 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1862 = old_context;
1864 break;
1866 case OMP_CLAUSE_LASTPRIVATE:
1867 walk_body (convert_local_reference_stmt,
1868 convert_local_reference_op, info,
1869 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1870 break;
1872 case OMP_CLAUSE_LINEAR:
1873 walk_body (convert_local_reference_stmt,
1874 convert_local_reference_op, info,
1875 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1876 break;
1878 default:
1879 break;
1882 return need_frame;
1886 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1887 and PARM_DECLs that were referenced by inner nested functions.
1888 The rewrite will be a structure reference to the local frame variable. */
1890 static tree
1891 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1892 struct walk_stmt_info *wi)
1894 struct nesting_info *info = (struct nesting_info *) wi->info;
1895 tree save_local_var_chain;
1896 bitmap save_suppress;
1897 gimple stmt = gsi_stmt (*gsi);
1899 switch (gimple_code (stmt))
1901 case GIMPLE_OACC_KERNELS:
1902 case GIMPLE_OACC_PARALLEL:
1903 gcc_unreachable ();
1905 case GIMPLE_OMP_PARALLEL:
1906 case GIMPLE_OMP_TASK:
1907 save_suppress = info->suppress_expansion;
1908 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1909 wi))
1911 tree c;
1912 (void) get_frame_type (info);
1913 c = build_omp_clause (gimple_location (stmt),
1914 OMP_CLAUSE_SHARED);
1915 OMP_CLAUSE_DECL (c) = info->frame_decl;
1916 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1917 gimple_omp_taskreg_set_clauses (stmt, c);
1920 save_local_var_chain = info->new_local_var_chain;
1921 info->new_local_var_chain = NULL;
1923 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1924 gimple_omp_body_ptr (stmt));
1926 if (info->new_local_var_chain)
1927 declare_vars (info->new_local_var_chain,
1928 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1929 info->new_local_var_chain = save_local_var_chain;
1930 info->suppress_expansion = save_suppress;
1931 break;
1933 case GIMPLE_OMP_FOR:
1934 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
1935 save_suppress = info->suppress_expansion;
1936 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1937 walk_gimple_omp_for (stmt, convert_local_reference_stmt,
1938 convert_local_reference_op, info);
1939 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1940 info, gimple_omp_body_ptr (stmt));
1941 info->suppress_expansion = save_suppress;
1942 break;
1944 case GIMPLE_OMP_SECTIONS:
1945 save_suppress = info->suppress_expansion;
1946 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1947 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1948 info, gimple_omp_body_ptr (stmt));
1949 info->suppress_expansion = save_suppress;
1950 break;
1952 case GIMPLE_OMP_SINGLE:
1953 save_suppress = info->suppress_expansion;
1954 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1955 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1956 info, gimple_omp_body_ptr (stmt));
1957 info->suppress_expansion = save_suppress;
1958 break;
1960 case GIMPLE_OMP_TARGET:
1961 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
1962 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
1964 save_suppress = info->suppress_expansion;
1965 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1966 info->suppress_expansion = save_suppress;
1967 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1968 info, gimple_omp_body_ptr (stmt));
1969 break;
1971 save_suppress = info->suppress_expansion;
1972 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
1974 tree c;
1975 (void) get_frame_type (info);
1976 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1977 OMP_CLAUSE_DECL (c) = info->frame_decl;
1978 OMP_CLAUSE_MAP_KIND (c) = OMP_CLAUSE_MAP_TOFROM;
1979 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
1980 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1981 gimple_omp_target_set_clauses (stmt, c);
1984 save_local_var_chain = info->new_local_var_chain;
1985 info->new_local_var_chain = NULL;
1987 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1988 gimple_omp_body_ptr (stmt));
1990 if (info->new_local_var_chain)
1991 declare_vars (info->new_local_var_chain,
1992 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1993 info->new_local_var_chain = save_local_var_chain;
1994 info->suppress_expansion = save_suppress;
1995 break;
1997 case GIMPLE_OMP_TEAMS:
1998 save_suppress = info->suppress_expansion;
1999 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2000 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2001 info, gimple_omp_body_ptr (stmt));
2002 info->suppress_expansion = save_suppress;
2003 break;
2005 case GIMPLE_OMP_SECTION:
2006 case GIMPLE_OMP_MASTER:
2007 case GIMPLE_OMP_TASKGROUP:
2008 case GIMPLE_OMP_ORDERED:
2009 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2010 info, gimple_omp_body_ptr (stmt));
2011 break;
2013 case GIMPLE_COND:
2014 wi->val_only = true;
2015 wi->is_lhs = false;
2016 *handled_ops_p = false;
2017 return NULL_TREE;
2019 case GIMPLE_ASSIGN:
2020 if (gimple_clobber_p (stmt))
2022 tree lhs = gimple_assign_lhs (stmt);
2023 if (!use_pointer_in_frame (lhs)
2024 && lookup_field_for_decl (info, lhs, NO_INSERT))
2026 gsi_replace (gsi, gimple_build_nop (), true);
2027 break;
2030 *handled_ops_p = false;
2031 return NULL_TREE;
2033 case GIMPLE_BIND:
2034 for (tree var = gimple_bind_vars (stmt); var; var = DECL_CHAIN (var))
2035 if (TREE_CODE (var) == NAMELIST_DECL)
2037 /* Adjust decls mentioned in NAMELIST_DECL. */
2038 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2039 tree decl;
2040 unsigned int i;
2042 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2044 if (TREE_CODE (decl) == VAR_DECL
2045 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2046 continue;
2047 if (decl_function_context (decl) == info->context
2048 && !use_pointer_in_frame (decl))
2050 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2051 if (field)
2053 CONSTRUCTOR_ELT (decls, i)->value
2054 = get_local_debug_decl (info, decl, field);
2060 *handled_ops_p = false;
2061 return NULL_TREE;
2063 default:
2064 /* For every other statement that we are not interested in
2065 handling here, let the walker traverse the operands. */
2066 *handled_ops_p = false;
2067 return NULL_TREE;
2070 /* Indicate that we have handled all the operands ourselves. */
2071 *handled_ops_p = true;
2072 return NULL_TREE;
2076 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2077 that reference labels from outer functions. The rewrite will be a
2078 call to __builtin_nonlocal_goto. */
2080 static tree
2081 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2082 struct walk_stmt_info *wi)
2084 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2085 tree label, new_label, target_context, x, field;
2086 gimple call;
2087 gimple stmt = gsi_stmt (*gsi);
2089 if (gimple_code (stmt) != GIMPLE_GOTO)
2091 *handled_ops_p = false;
2092 return NULL_TREE;
2095 label = gimple_goto_dest (stmt);
2096 if (TREE_CODE (label) != LABEL_DECL)
2098 *handled_ops_p = false;
2099 return NULL_TREE;
2102 target_context = decl_function_context (label);
2103 if (target_context == info->context)
2105 *handled_ops_p = false;
2106 return NULL_TREE;
2109 for (i = info->outer; target_context != i->context; i = i->outer)
2110 continue;
2112 /* The original user label may also be use for a normal goto, therefore
2113 we must create a new label that will actually receive the abnormal
2114 control transfer. This new label will be marked LABEL_NONLOCAL; this
2115 mark will trigger proper behavior in the cfg, as well as cause the
2116 (hairy target-specific) non-local goto receiver code to be generated
2117 when we expand rtl. Enter this association into var_map so that we
2118 can insert the new label into the IL during a second pass. */
2119 tree *slot = &i->var_map->get_or_insert (label);
2120 if (*slot == NULL)
2122 new_label = create_artificial_label (UNKNOWN_LOCATION);
2123 DECL_NONLOCAL (new_label) = 1;
2124 *slot = new_label;
2126 else
2127 new_label = *slot;
2129 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2130 field = get_nl_goto_field (i);
2131 x = get_frame_field (info, target_context, field, gsi);
2132 x = build_addr (x, target_context);
2133 x = gsi_gimplify_val (info, x, gsi);
2134 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2135 2, build_addr (new_label, target_context), x);
2136 gsi_replace (gsi, call, false);
2138 /* We have handled all of STMT's operands, no need to keep going. */
2139 *handled_ops_p = true;
2140 return NULL_TREE;
2144 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2145 are referenced via nonlocal goto from a nested function. The rewrite
2146 will involve installing a newly generated DECL_NONLOCAL label, and
2147 (potentially) a branch around the rtl gunk that is assumed to be
2148 attached to such a label. */
2150 static tree
2151 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2152 struct walk_stmt_info *wi)
2154 struct nesting_info *const info = (struct nesting_info *) wi->info;
2155 tree label, new_label;
2156 gimple_stmt_iterator tmp_gsi;
2157 gimple stmt = gsi_stmt (*gsi);
2159 if (gimple_code (stmt) != GIMPLE_LABEL)
2161 *handled_ops_p = false;
2162 return NULL_TREE;
2165 label = gimple_label_label (stmt);
2167 tree *slot = info->var_map->get (label);
2168 if (!slot)
2170 *handled_ops_p = false;
2171 return NULL_TREE;
2174 /* If there's any possibility that the previous statement falls through,
2175 then we must branch around the new non-local label. */
2176 tmp_gsi = wi->gsi;
2177 gsi_prev (&tmp_gsi);
2178 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2180 gimple stmt = gimple_build_goto (label);
2181 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2184 new_label = (tree) *slot;
2185 stmt = gimple_build_label (new_label);
2186 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2188 *handled_ops_p = true;
2189 return NULL_TREE;
2193 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2194 of nested functions that require the use of trampolines. The rewrite
2195 will involve a reference a trampoline generated for the occasion. */
2197 static tree
2198 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2200 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2201 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2202 tree t = *tp, decl, target_context, x, builtin;
2203 gimple call;
2205 *walk_subtrees = 0;
2206 switch (TREE_CODE (t))
2208 case ADDR_EXPR:
2209 /* Build
2210 T.1 = &CHAIN->tramp;
2211 T.2 = __builtin_adjust_trampoline (T.1);
2212 T.3 = (func_type)T.2;
2215 decl = TREE_OPERAND (t, 0);
2216 if (TREE_CODE (decl) != FUNCTION_DECL)
2217 break;
2219 /* Only need to process nested functions. */
2220 target_context = decl_function_context (decl);
2221 if (!target_context)
2222 break;
2224 /* If the nested function doesn't use a static chain, then
2225 it doesn't need a trampoline. */
2226 if (!DECL_STATIC_CHAIN (decl))
2227 break;
2229 /* If we don't want a trampoline, then don't build one. */
2230 if (TREE_NO_TRAMPOLINE (t))
2231 break;
2233 /* Lookup the immediate parent of the callee, as that's where
2234 we need to insert the trampoline. */
2235 for (i = info; i->context != target_context; i = i->outer)
2236 continue;
2237 x = lookup_tramp_for_decl (i, decl, INSERT);
2239 /* Compute the address of the field holding the trampoline. */
2240 x = get_frame_field (info, target_context, x, &wi->gsi);
2241 x = build_addr (x, target_context);
2242 x = gsi_gimplify_val (info, x, &wi->gsi);
2244 /* Do machine-specific ugliness. Normally this will involve
2245 computing extra alignment, but it can really be anything. */
2246 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2247 call = gimple_build_call (builtin, 1, x);
2248 x = init_tmp_var_with_call (info, &wi->gsi, call);
2250 /* Cast back to the proper function type. */
2251 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2252 x = init_tmp_var (info, x, &wi->gsi);
2254 *tp = x;
2255 break;
2257 default:
2258 if (!IS_TYPE_OR_DECL_P (t))
2259 *walk_subtrees = 1;
2260 break;
2263 return NULL_TREE;
2267 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2268 to addresses of nested functions that require the use of
2269 trampolines. The rewrite will involve a reference a trampoline
2270 generated for the occasion. */
2272 static tree
2273 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2274 struct walk_stmt_info *wi)
2276 struct nesting_info *info = (struct nesting_info *) wi->info;
2277 gimple stmt = gsi_stmt (*gsi);
2279 switch (gimple_code (stmt))
2281 case GIMPLE_CALL:
2283 /* Only walk call arguments, lest we generate trampolines for
2284 direct calls. */
2285 unsigned long i, nargs = gimple_call_num_args (stmt);
2286 for (i = 0; i < nargs; i++)
2287 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2288 wi, NULL);
2289 break;
2292 case GIMPLE_OACC_KERNELS:
2293 case GIMPLE_OACC_PARALLEL:
2294 gcc_unreachable ();
2296 case GIMPLE_OMP_TARGET:
2297 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
2298 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
2300 *handled_ops_p = false;
2301 return NULL_TREE;
2303 /* FALLTHRU */
2304 case GIMPLE_OMP_PARALLEL:
2305 case GIMPLE_OMP_TASK:
2307 tree save_local_var_chain;
2308 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2309 save_local_var_chain = info->new_local_var_chain;
2310 info->new_local_var_chain = NULL;
2311 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2312 info, gimple_omp_body_ptr (stmt));
2313 if (info->new_local_var_chain)
2314 declare_vars (info->new_local_var_chain,
2315 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2316 false);
2317 info->new_local_var_chain = save_local_var_chain;
2319 break;
2321 default:
2322 *handled_ops_p = false;
2323 return NULL_TREE;
2326 *handled_ops_p = true;
2327 return NULL_TREE;
2332 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2333 that reference nested functions to make sure that the static chain
2334 is set up properly for the call. */
2336 static tree
2337 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2338 struct walk_stmt_info *wi)
2340 struct nesting_info *const info = (struct nesting_info *) wi->info;
2341 tree decl, target_context;
2342 char save_static_chain_added;
2343 int i;
2344 gimple stmt = gsi_stmt (*gsi);
2346 switch (gimple_code (stmt))
2348 case GIMPLE_CALL:
2349 if (gimple_call_chain (stmt))
2350 break;
2351 decl = gimple_call_fndecl (stmt);
2352 if (!decl)
2353 break;
2354 target_context = decl_function_context (decl);
2355 if (target_context && DECL_STATIC_CHAIN (decl))
2357 gimple_call_set_chain (stmt, get_static_chain (info, target_context,
2358 &wi->gsi));
2359 info->static_chain_added |= (1 << (info->context != target_context));
2361 break;
2363 case GIMPLE_OACC_KERNELS:
2364 case GIMPLE_OACC_PARALLEL:
2365 gcc_unreachable ();
2367 case GIMPLE_OMP_PARALLEL:
2368 case GIMPLE_OMP_TASK:
2369 save_static_chain_added = info->static_chain_added;
2370 info->static_chain_added = 0;
2371 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2372 for (i = 0; i < 2; i++)
2374 tree c, decl;
2375 if ((info->static_chain_added & (1 << i)) == 0)
2376 continue;
2377 decl = i ? get_chain_decl (info) : info->frame_decl;
2378 /* Don't add CHAIN.* or FRAME.* twice. */
2379 for (c = gimple_omp_taskreg_clauses (stmt);
2381 c = OMP_CLAUSE_CHAIN (c))
2382 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2383 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2384 && OMP_CLAUSE_DECL (c) == decl)
2385 break;
2386 if (c == NULL)
2388 c = build_omp_clause (gimple_location (stmt),
2389 i ? OMP_CLAUSE_FIRSTPRIVATE
2390 : OMP_CLAUSE_SHARED);
2391 OMP_CLAUSE_DECL (c) = decl;
2392 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2393 gimple_omp_taskreg_set_clauses (stmt, c);
2396 info->static_chain_added |= save_static_chain_added;
2397 break;
2399 case GIMPLE_OMP_TARGET:
2400 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
2401 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
2403 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2404 break;
2406 save_static_chain_added = info->static_chain_added;
2407 info->static_chain_added = 0;
2408 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2409 for (i = 0; i < 2; i++)
2411 tree c, decl;
2412 if ((info->static_chain_added & (1 << i)) == 0)
2413 continue;
2414 decl = i ? get_chain_decl (info) : info->frame_decl;
2415 /* Don't add CHAIN.* or FRAME.* twice. */
2416 for (c = gimple_omp_target_clauses (stmt);
2418 c = OMP_CLAUSE_CHAIN (c))
2419 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2420 && OMP_CLAUSE_DECL (c) == decl)
2421 break;
2422 if (c == NULL)
2424 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2425 OMP_CLAUSE_DECL (c) = decl;
2426 OMP_CLAUSE_MAP_KIND (c)
2427 = i ? OMP_CLAUSE_MAP_TO : OMP_CLAUSE_MAP_TOFROM;
2428 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2429 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2430 gimple_omp_target_set_clauses (stmt, c);
2433 info->static_chain_added |= save_static_chain_added;
2434 break;
2436 case GIMPLE_OMP_FOR:
2437 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
2438 walk_body (convert_gimple_call, NULL, info,
2439 gimple_omp_for_pre_body_ptr (stmt));
2440 /* FALLTHRU */
2441 case GIMPLE_OMP_SECTIONS:
2442 case GIMPLE_OMP_SECTION:
2443 case GIMPLE_OMP_SINGLE:
2444 case GIMPLE_OMP_TEAMS:
2445 case GIMPLE_OMP_MASTER:
2446 case GIMPLE_OMP_TASKGROUP:
2447 case GIMPLE_OMP_ORDERED:
2448 case GIMPLE_OMP_CRITICAL:
2449 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
2450 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2451 break;
2453 default:
2454 /* Keep looking for other operands. */
2455 *handled_ops_p = false;
2456 return NULL_TREE;
2459 *handled_ops_p = true;
2460 return NULL_TREE;
2463 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2464 call expressions. At the same time, determine if a nested function
2465 actually uses its static chain; if not, remember that. */
2467 static void
2468 convert_all_function_calls (struct nesting_info *root)
2470 unsigned int chain_count = 0, old_chain_count, iter_count;
2471 struct nesting_info *n;
2473 /* First, optimistically clear static_chain for all decls that haven't
2474 used the static chain already for variable access. But always create
2475 it if not optimizing. This makes it possible to reconstruct the static
2476 nesting tree at run time and thus to resolve up-level references from
2477 within the debugger. */
2478 FOR_EACH_NEST_INFO (n, root)
2480 tree decl = n->context;
2481 if (!optimize)
2483 if (n->inner)
2484 (void) get_frame_type (n);
2485 if (n->outer)
2486 (void) get_chain_decl (n);
2488 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2490 DECL_STATIC_CHAIN (decl) = 0;
2491 if (dump_file && (dump_flags & TDF_DETAILS))
2492 fprintf (dump_file, "Guessing no static-chain for %s\n",
2493 lang_hooks.decl_printable_name (decl, 2));
2495 else
2496 DECL_STATIC_CHAIN (decl) = 1;
2497 chain_count += DECL_STATIC_CHAIN (decl);
2500 /* Walk the functions and perform transformations. Note that these
2501 transformations can induce new uses of the static chain, which in turn
2502 require re-examining all users of the decl. */
2503 /* ??? It would make sense to try to use the call graph to speed this up,
2504 but the call graph hasn't really been built yet. Even if it did, we
2505 would still need to iterate in this loop since address-of references
2506 wouldn't show up in the callgraph anyway. */
2507 iter_count = 0;
2510 old_chain_count = chain_count;
2511 chain_count = 0;
2512 iter_count++;
2514 if (dump_file && (dump_flags & TDF_DETAILS))
2515 fputc ('\n', dump_file);
2517 FOR_EACH_NEST_INFO (n, root)
2519 tree decl = n->context;
2520 walk_function (convert_tramp_reference_stmt,
2521 convert_tramp_reference_op, n);
2522 walk_function (convert_gimple_call, NULL, n);
2523 chain_count += DECL_STATIC_CHAIN (decl);
2526 while (chain_count != old_chain_count);
2528 if (dump_file && (dump_flags & TDF_DETAILS))
2529 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2530 iter_count);
2533 struct nesting_copy_body_data
2535 copy_body_data cb;
2536 struct nesting_info *root;
2539 /* A helper subroutine for debug_var_chain type remapping. */
2541 static tree
2542 nesting_copy_decl (tree decl, copy_body_data *id)
2544 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2545 tree *slot = nid->root->var_map->get (decl);
2547 if (slot)
2548 return (tree) *slot;
2550 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2552 tree new_decl = copy_decl_no_change (decl, id);
2553 DECL_ORIGINAL_TYPE (new_decl)
2554 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2555 return new_decl;
2558 if (TREE_CODE (decl) == VAR_DECL
2559 || TREE_CODE (decl) == PARM_DECL
2560 || TREE_CODE (decl) == RESULT_DECL)
2561 return decl;
2563 return copy_decl_no_change (decl, id);
2566 /* A helper function for remap_vla_decls. See if *TP contains
2567 some remapped variables. */
2569 static tree
2570 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2572 struct nesting_info *root = (struct nesting_info *) data;
2573 tree t = *tp;
2575 if (DECL_P (t))
2577 *walk_subtrees = 0;
2578 tree *slot = root->var_map->get (t);
2580 if (slot)
2581 return *slot;
2583 return NULL;
2586 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2587 involved. */
2589 static void
2590 remap_vla_decls (tree block, struct nesting_info *root)
2592 tree var, subblock, val, type;
2593 struct nesting_copy_body_data id;
2595 for (subblock = BLOCK_SUBBLOCKS (block);
2596 subblock;
2597 subblock = BLOCK_CHAIN (subblock))
2598 remap_vla_decls (subblock, root);
2600 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2601 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2603 val = DECL_VALUE_EXPR (var);
2604 type = TREE_TYPE (var);
2606 if (!(TREE_CODE (val) == INDIRECT_REF
2607 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2608 && variably_modified_type_p (type, NULL)))
2609 continue;
2611 if (root->var_map->get (TREE_OPERAND (val, 0))
2612 || walk_tree (&type, contains_remapped_vars, root, NULL))
2613 break;
2616 if (var == NULL_TREE)
2617 return;
2619 memset (&id, 0, sizeof (id));
2620 id.cb.copy_decl = nesting_copy_decl;
2621 id.cb.decl_map = new hash_map<tree, tree>;
2622 id.root = root;
2624 for (; var; var = DECL_CHAIN (var))
2625 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2627 struct nesting_info *i;
2628 tree newt, context;
2630 val = DECL_VALUE_EXPR (var);
2631 type = TREE_TYPE (var);
2633 if (!(TREE_CODE (val) == INDIRECT_REF
2634 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2635 && variably_modified_type_p (type, NULL)))
2636 continue;
2638 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2639 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2640 continue;
2642 context = decl_function_context (var);
2643 for (i = root; i; i = i->outer)
2644 if (i->context == context)
2645 break;
2647 if (i == NULL)
2648 continue;
2650 /* Fully expand value expressions. This avoids having debug variables
2651 only referenced from them and that can be swept during GC. */
2652 if (slot)
2654 tree t = (tree) *slot;
2655 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2656 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2659 id.cb.src_fn = i->context;
2660 id.cb.dst_fn = i->context;
2661 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2663 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2664 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2666 newt = TREE_TYPE (newt);
2667 type = TREE_TYPE (type);
2669 if (TYPE_NAME (newt)
2670 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2671 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2672 && newt != type
2673 && TYPE_NAME (newt) == TYPE_NAME (type))
2674 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2676 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2677 if (val != DECL_VALUE_EXPR (var))
2678 SET_DECL_VALUE_EXPR (var, val);
2681 delete id.cb.decl_map;
2684 /* Fold the MEM_REF *E. */
2685 bool
2686 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2688 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2689 *ref_p = fold (*ref_p);
2690 return true;
2693 /* Do "everything else" to clean up or complete state collected by the
2694 various walking passes -- lay out the types and decls, generate code
2695 to initialize the frame decl, store critical expressions in the
2696 struct function for rtl to find. */
2698 static void
2699 finalize_nesting_tree_1 (struct nesting_info *root)
2701 gimple_seq stmt_list;
2702 gimple stmt;
2703 tree context = root->context;
2704 struct function *sf;
2706 stmt_list = NULL;
2708 /* If we created a non-local frame type or decl, we need to lay them
2709 out at this time. */
2710 if (root->frame_type)
2712 /* In some cases the frame type will trigger the -Wpadded warning.
2713 This is not helpful; suppress it. */
2714 int save_warn_padded = warn_padded;
2715 tree *adjust;
2717 warn_padded = 0;
2718 layout_type (root->frame_type);
2719 warn_padded = save_warn_padded;
2720 layout_decl (root->frame_decl, 0);
2722 /* Remove root->frame_decl from root->new_local_var_chain, so
2723 that we can declare it also in the lexical blocks, which
2724 helps ensure virtual regs that end up appearing in its RTL
2725 expression get substituted in instantiate_virtual_regs(). */
2726 for (adjust = &root->new_local_var_chain;
2727 *adjust != root->frame_decl;
2728 adjust = &DECL_CHAIN (*adjust))
2729 gcc_assert (DECL_CHAIN (*adjust));
2730 *adjust = DECL_CHAIN (*adjust);
2732 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2733 declare_vars (root->frame_decl,
2734 gimple_seq_first_stmt (gimple_body (context)), true);
2737 /* If any parameters were referenced non-locally, then we need to
2738 insert a copy. Likewise, if any variables were referenced by
2739 pointer, we need to initialize the address. */
2740 if (root->any_parm_remapped)
2742 tree p;
2743 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2745 tree field, x, y;
2747 field = lookup_field_for_decl (root, p, NO_INSERT);
2748 if (!field)
2749 continue;
2751 if (use_pointer_in_frame (p))
2752 x = build_addr (p, context);
2753 else
2754 x = p;
2756 /* If the assignment is from a non-register the stmt is
2757 not valid gimple. Make it so by using a temporary instead. */
2758 if (!is_gimple_reg (x)
2759 && is_gimple_reg_type (TREE_TYPE (x)))
2761 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2762 x = init_tmp_var (root, x, &gsi);
2765 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2766 root->frame_decl, field, NULL_TREE);
2767 stmt = gimple_build_assign (y, x);
2768 gimple_seq_add_stmt (&stmt_list, stmt);
2772 /* If a chain_field was created, then it needs to be initialized
2773 from chain_decl. */
2774 if (root->chain_field)
2776 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2777 root->frame_decl, root->chain_field, NULL_TREE);
2778 stmt = gimple_build_assign (x, get_chain_decl (root));
2779 gimple_seq_add_stmt (&stmt_list, stmt);
2782 /* If trampolines were created, then we need to initialize them. */
2783 if (root->any_tramp_created)
2785 struct nesting_info *i;
2786 for (i = root->inner; i ; i = i->next)
2788 tree arg1, arg2, arg3, x, field;
2790 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2791 if (!field)
2792 continue;
2794 gcc_assert (DECL_STATIC_CHAIN (i->context));
2795 arg3 = build_addr (root->frame_decl, context);
2797 arg2 = build_addr (i->context, context);
2799 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2800 root->frame_decl, field, NULL_TREE);
2801 arg1 = build_addr (x, context);
2803 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2804 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2805 gimple_seq_add_stmt (&stmt_list, stmt);
2809 /* If we created initialization statements, insert them. */
2810 if (stmt_list)
2812 gimple bind;
2813 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2814 bind = gimple_seq_first_stmt (gimple_body (context));
2815 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2816 gimple_bind_set_body (bind, stmt_list);
2819 /* If a chain_decl was created, then it needs to be registered with
2820 struct function so that it gets initialized from the static chain
2821 register at the beginning of the function. */
2822 sf = DECL_STRUCT_FUNCTION (root->context);
2823 sf->static_chain_decl = root->chain_decl;
2825 /* Similarly for the non-local goto save area. */
2826 if (root->nl_goto_field)
2828 sf->nonlocal_goto_save_area
2829 = get_frame_field (root, context, root->nl_goto_field, NULL);
2830 sf->has_nonlocal_label = 1;
2833 /* Make sure all new local variables get inserted into the
2834 proper BIND_EXPR. */
2835 if (root->new_local_var_chain)
2836 declare_vars (root->new_local_var_chain,
2837 gimple_seq_first_stmt (gimple_body (root->context)),
2838 false);
2840 if (root->debug_var_chain)
2842 tree debug_var;
2843 gimple scope;
2845 remap_vla_decls (DECL_INITIAL (root->context), root);
2847 for (debug_var = root->debug_var_chain; debug_var;
2848 debug_var = DECL_CHAIN (debug_var))
2849 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2850 break;
2852 /* If there are any debug decls with variable length types,
2853 remap those types using other debug_var_chain variables. */
2854 if (debug_var)
2856 struct nesting_copy_body_data id;
2858 memset (&id, 0, sizeof (id));
2859 id.cb.copy_decl = nesting_copy_decl;
2860 id.cb.decl_map = new hash_map<tree, tree>;
2861 id.root = root;
2863 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2864 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2866 tree type = TREE_TYPE (debug_var);
2867 tree newt, t = type;
2868 struct nesting_info *i;
2870 for (i = root; i; i = i->outer)
2871 if (variably_modified_type_p (type, i->context))
2872 break;
2874 if (i == NULL)
2875 continue;
2877 id.cb.src_fn = i->context;
2878 id.cb.dst_fn = i->context;
2879 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2881 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2882 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2884 newt = TREE_TYPE (newt);
2885 t = TREE_TYPE (t);
2887 if (TYPE_NAME (newt)
2888 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2889 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2890 && newt != t
2891 && TYPE_NAME (newt) == TYPE_NAME (t))
2892 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2895 delete id.cb.decl_map;
2898 scope = gimple_seq_first_stmt (gimple_body (root->context));
2899 if (gimple_bind_block (scope))
2900 declare_vars (root->debug_var_chain, scope, true);
2901 else
2902 BLOCK_VARS (DECL_INITIAL (root->context))
2903 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2904 root->debug_var_chain);
2907 /* Fold the rewritten MEM_REF trees. */
2908 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
2910 /* Dump the translated tree function. */
2911 if (dump_file)
2913 fputs ("\n\n", dump_file);
2914 dump_function_to_file (root->context, dump_file, dump_flags);
2918 static void
2919 finalize_nesting_tree (struct nesting_info *root)
2921 struct nesting_info *n;
2922 FOR_EACH_NEST_INFO (n, root)
2923 finalize_nesting_tree_1 (n);
2926 /* Unnest the nodes and pass them to cgraph. */
2928 static void
2929 unnest_nesting_tree_1 (struct nesting_info *root)
2931 struct cgraph_node *node = cgraph_node::get (root->context);
2933 /* For nested functions update the cgraph to reflect unnesting.
2934 We also delay finalizing of these functions up to this point. */
2935 if (node->origin)
2937 node->unnest ();
2938 cgraph_node::finalize_function (root->context, true);
2942 static void
2943 unnest_nesting_tree (struct nesting_info *root)
2945 struct nesting_info *n;
2946 FOR_EACH_NEST_INFO (n, root)
2947 unnest_nesting_tree_1 (n);
2950 /* Free the data structures allocated during this pass. */
2952 static void
2953 free_nesting_tree (struct nesting_info *root)
2955 struct nesting_info *node, *next;
2957 node = iter_nestinfo_start (root);
2960 next = iter_nestinfo_next (node);
2961 delete node->var_map;
2962 delete node->field_map;
2963 delete node->mem_refs;
2964 free (node);
2965 node = next;
2967 while (node);
2970 /* Gimplify a function and all its nested functions. */
2971 static void
2972 gimplify_all_functions (struct cgraph_node *root)
2974 struct cgraph_node *iter;
2975 if (!gimple_body (root->decl))
2976 gimplify_function_tree (root->decl);
2977 for (iter = root->nested; iter; iter = iter->next_nested)
2978 gimplify_all_functions (iter);
2981 /* Main entry point for this pass. Process FNDECL and all of its nested
2982 subroutines and turn them into something less tightly bound. */
2984 void
2985 lower_nested_functions (tree fndecl)
2987 struct cgraph_node *cgn;
2988 struct nesting_info *root;
2990 /* If there are no nested functions, there's nothing to do. */
2991 cgn = cgraph_node::get (fndecl);
2992 if (!cgn->nested)
2993 return;
2995 gimplify_all_functions (cgn);
2997 dump_file = dump_begin (TDI_nested, &dump_flags);
2998 if (dump_file)
2999 fprintf (dump_file, "\n;; Function %s\n\n",
3000 lang_hooks.decl_printable_name (fndecl, 2));
3002 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3003 root = create_nesting_tree (cgn);
3005 walk_all_functions (convert_nonlocal_reference_stmt,
3006 convert_nonlocal_reference_op,
3007 root);
3008 walk_all_functions (convert_local_reference_stmt,
3009 convert_local_reference_op,
3010 root);
3011 walk_all_functions (convert_nl_goto_reference, NULL, root);
3012 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3014 convert_all_function_calls (root);
3015 finalize_nesting_tree (root);
3016 unnest_nesting_tree (root);
3018 free_nesting_tree (root);
3019 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3021 if (dump_file)
3023 dump_end (TDI_nested, dump_file);
3024 dump_file = NULL;
3028 #include "gt-tree-nested.h"