svn merge -r 217500:218679 svn+ssh://gcc.gnu.org/svn/gcc/trunk
[official-gcc.git] / gcc / tree-nested.c
blobac686e2a1e4ed58ff13b052daca7b196eeef2177
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stringpool.h"
26 #include "stor-layout.h"
27 #include "tm_p.h"
28 #include "hashtab.h"
29 #include "hash-set.h"
30 #include "vec.h"
31 #include "machmode.h"
32 #include "hard-reg-set.h"
33 #include "input.h"
34 #include "function.h"
35 #include "tree-dump.h"
36 #include "tree-inline.h"
37 #include "predict.h"
38 #include "basic-block.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "gimple-expr.h"
42 #include "is-a.h"
43 #include "gimple.h"
44 #include "gimplify.h"
45 #include "gimple-iterator.h"
46 #include "gimple-walk.h"
47 #include "tree-iterator.h"
48 #include "bitmap.h"
49 #include "hash-map.h"
50 #include "plugin-api.h"
51 #include "ipa-ref.h"
52 #include "cgraph.h"
53 #include "tree-cfg.h"
54 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
55 #include "langhooks.h"
56 #include "gimple-low.h"
59 /* The object of this pass is to lower the representation of a set of nested
60 functions in order to expose all of the gory details of the various
61 nonlocal references. We want to do this sooner rather than later, in
62 order to give us more freedom in emitting all of the functions in question.
64 Back in olden times, when gcc was young, we developed an insanely
65 complicated scheme whereby variables which were referenced nonlocally
66 were forced to live in the stack of the declaring function, and then
67 the nested functions magically discovered where these variables were
68 placed. In order for this scheme to function properly, it required
69 that the outer function be partially expanded, then we switch to
70 compiling the inner function, and once done with those we switch back
71 to compiling the outer function. Such delicate ordering requirements
72 makes it difficult to do whole translation unit optimizations
73 involving such functions.
75 The implementation here is much more direct. Everything that can be
76 referenced by an inner function is a member of an explicitly created
77 structure herein called the "nonlocal frame struct". The incoming
78 static chain for a nested function is a pointer to this struct in
79 the parent. In this way, we settle on known offsets from a known
80 base, and so are decoupled from the logic that places objects in the
81 function's stack frame. More importantly, we don't have to wait for
82 that to happen -- since the compilation of the inner function is no
83 longer tied to a real stack frame, the nonlocal frame struct can be
84 allocated anywhere. Which means that the outer function is now
85 inlinable.
87 Theory of operation here is very simple. Iterate over all the
88 statements in all the functions (depth first) several times,
89 allocating structures and fields on demand. In general we want to
90 examine inner functions first, so that we can avoid making changes
91 to outer functions which are unnecessary.
93 The order of the passes matters a bit, in that later passes will be
94 skipped if it is discovered that the functions don't actually interact
95 at all. That is, they're nested in the lexical sense but could have
96 been written as independent functions without change. */
99 struct nesting_info
101 struct nesting_info *outer;
102 struct nesting_info *inner;
103 struct nesting_info *next;
105 hash_map<tree, tree> *field_map;
106 hash_map<tree, tree> *var_map;
107 hash_set<tree *> *mem_refs;
108 bitmap suppress_expansion;
110 tree context;
111 tree new_local_var_chain;
112 tree debug_var_chain;
113 tree frame_type;
114 tree frame_decl;
115 tree chain_field;
116 tree chain_decl;
117 tree nl_goto_field;
119 bool any_parm_remapped;
120 bool any_tramp_created;
121 char static_chain_added;
125 /* Iterate over the nesting tree, starting with ROOT, depth first. */
127 static inline struct nesting_info *
128 iter_nestinfo_start (struct nesting_info *root)
130 while (root->inner)
131 root = root->inner;
132 return root;
135 static inline struct nesting_info *
136 iter_nestinfo_next (struct nesting_info *node)
138 if (node->next)
139 return iter_nestinfo_start (node->next);
140 return node->outer;
143 #define FOR_EACH_NEST_INFO(I, ROOT) \
144 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
146 /* Obstack used for the bitmaps in the struct above. */
147 static struct bitmap_obstack nesting_info_bitmap_obstack;
150 /* We're working in so many different function contexts simultaneously,
151 that create_tmp_var is dangerous. Prevent mishap. */
152 #define create_tmp_var cant_use_create_tmp_var_here_dummy
154 /* Like create_tmp_var, except record the variable for registration at
155 the given nesting level. */
157 static tree
158 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
160 tree tmp_var;
162 /* If the type is of variable size or a type which must be created by the
163 frontend, something is wrong. Note that we explicitly allow
164 incomplete types here, since we create them ourselves here. */
165 gcc_assert (!TREE_ADDRESSABLE (type));
166 gcc_assert (!TYPE_SIZE_UNIT (type)
167 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
169 tmp_var = create_tmp_var_raw (type, prefix);
170 DECL_CONTEXT (tmp_var) = info->context;
171 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
172 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
173 if (TREE_CODE (type) == COMPLEX_TYPE
174 || TREE_CODE (type) == VECTOR_TYPE)
175 DECL_GIMPLE_REG_P (tmp_var) = 1;
177 info->new_local_var_chain = tmp_var;
179 return tmp_var;
182 /* Take the address of EXP to be used within function CONTEXT.
183 Mark it for addressability as necessary. */
185 tree
186 build_addr (tree exp, tree context)
188 tree base = exp;
189 tree save_context;
190 tree retval;
192 while (handled_component_p (base))
193 base = TREE_OPERAND (base, 0);
195 if (DECL_P (base))
196 TREE_ADDRESSABLE (base) = 1;
198 /* Building the ADDR_EXPR will compute a set of properties for
199 that ADDR_EXPR. Those properties are unfortunately context
200 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
202 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
203 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
204 way the properties are for the ADDR_EXPR are computed properly. */
205 save_context = current_function_decl;
206 current_function_decl = context;
207 retval = build_fold_addr_expr (exp);
208 current_function_decl = save_context;
209 return retval;
212 /* Insert FIELD into TYPE, sorted by alignment requirements. */
214 void
215 insert_field_into_struct (tree type, tree field)
217 tree *p;
219 DECL_CONTEXT (field) = type;
221 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
222 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
223 break;
225 DECL_CHAIN (field) = *p;
226 *p = field;
228 /* Set correct alignment for frame struct type. */
229 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
230 TYPE_ALIGN (type) = DECL_ALIGN (field);
233 /* Build or return the RECORD_TYPE that describes the frame state that is
234 shared between INFO->CONTEXT and its nested functions. This record will
235 not be complete until finalize_nesting_tree; up until that point we'll
236 be adding fields as necessary.
238 We also build the DECL that represents this frame in the function. */
240 static tree
241 get_frame_type (struct nesting_info *info)
243 tree type = info->frame_type;
244 if (!type)
246 char *name;
248 type = make_node (RECORD_TYPE);
250 name = concat ("FRAME.",
251 IDENTIFIER_POINTER (DECL_NAME (info->context)),
252 NULL);
253 TYPE_NAME (type) = get_identifier (name);
254 free (name);
256 info->frame_type = type;
257 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
258 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
260 /* ??? Always make it addressable for now, since it is meant to
261 be pointed to by the static chain pointer. This pessimizes
262 when it turns out that no static chains are needed because
263 the nested functions referencing non-local variables are not
264 reachable, but the true pessimization is to create the non-
265 local frame structure in the first place. */
266 TREE_ADDRESSABLE (info->frame_decl) = 1;
268 return type;
271 /* Return true if DECL should be referenced by pointer in the non-local
272 frame structure. */
274 static bool
275 use_pointer_in_frame (tree decl)
277 if (TREE_CODE (decl) == PARM_DECL)
279 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
280 sized decls, and inefficient to copy large aggregates. Don't bother
281 moving anything but scalar variables. */
282 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
284 else
286 /* Variable sized types make things "interesting" in the frame. */
287 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
291 /* Given DECL, a non-locally accessed variable, find or create a field
292 in the non-local frame structure for the given nesting context. */
294 static tree
295 lookup_field_for_decl (struct nesting_info *info, tree decl,
296 enum insert_option insert)
298 if (insert == NO_INSERT)
300 tree *slot = info->field_map->get (decl);
301 return slot ? *slot : NULL_TREE;
304 tree *slot = &info->field_map->get_or_insert (decl);
305 if (!*slot)
307 tree field = make_node (FIELD_DECL);
308 DECL_NAME (field) = DECL_NAME (decl);
310 if (use_pointer_in_frame (decl))
312 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
313 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
314 DECL_NONADDRESSABLE_P (field) = 1;
316 else
318 TREE_TYPE (field) = TREE_TYPE (decl);
319 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
320 DECL_ALIGN (field) = DECL_ALIGN (decl);
321 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
322 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
323 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
324 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
327 insert_field_into_struct (get_frame_type (info), field);
328 *slot = field;
330 if (TREE_CODE (decl) == PARM_DECL)
331 info->any_parm_remapped = true;
334 return *slot;
337 /* Build or return the variable that holds the static chain within
338 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
340 static tree
341 get_chain_decl (struct nesting_info *info)
343 tree decl = info->chain_decl;
345 if (!decl)
347 tree type;
349 type = get_frame_type (info->outer);
350 type = build_pointer_type (type);
352 /* Note that this variable is *not* entered into any BIND_EXPR;
353 the construction of this variable is handled specially in
354 expand_function_start and initialize_inlined_parameters.
355 Note also that it's represented as a parameter. This is more
356 close to the truth, since the initial value does come from
357 the caller. */
358 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
359 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
360 DECL_ARTIFICIAL (decl) = 1;
361 DECL_IGNORED_P (decl) = 1;
362 TREE_USED (decl) = 1;
363 DECL_CONTEXT (decl) = info->context;
364 DECL_ARG_TYPE (decl) = type;
366 /* Tell tree-inline.c that we never write to this variable, so
367 it can copy-prop the replacement value immediately. */
368 TREE_READONLY (decl) = 1;
370 info->chain_decl = decl;
372 if (dump_file
373 && (dump_flags & TDF_DETAILS)
374 && !DECL_STATIC_CHAIN (info->context))
375 fprintf (dump_file, "Setting static-chain for %s\n",
376 lang_hooks.decl_printable_name (info->context, 2));
378 DECL_STATIC_CHAIN (info->context) = 1;
380 return decl;
383 /* Build or return the field within the non-local frame state that holds
384 the static chain for INFO->CONTEXT. This is the way to walk back up
385 multiple nesting levels. */
387 static tree
388 get_chain_field (struct nesting_info *info)
390 tree field = info->chain_field;
392 if (!field)
394 tree type = build_pointer_type (get_frame_type (info->outer));
396 field = make_node (FIELD_DECL);
397 DECL_NAME (field) = get_identifier ("__chain");
398 TREE_TYPE (field) = type;
399 DECL_ALIGN (field) = TYPE_ALIGN (type);
400 DECL_NONADDRESSABLE_P (field) = 1;
402 insert_field_into_struct (get_frame_type (info), field);
404 info->chain_field = field;
406 if (dump_file
407 && (dump_flags & TDF_DETAILS)
408 && !DECL_STATIC_CHAIN (info->context))
409 fprintf (dump_file, "Setting static-chain for %s\n",
410 lang_hooks.decl_printable_name (info->context, 2));
412 DECL_STATIC_CHAIN (info->context) = 1;
414 return field;
417 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
419 static tree
420 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
421 gcall *call)
423 tree t;
425 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
426 gimple_call_set_lhs (call, t);
427 if (! gsi_end_p (*gsi))
428 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
429 gsi_insert_before (gsi, call, GSI_SAME_STMT);
431 return t;
435 /* Copy EXP into a temporary. Allocate the temporary in the context of
436 INFO and insert the initialization statement before GSI. */
438 static tree
439 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
441 tree t;
442 gimple stmt;
444 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
445 stmt = gimple_build_assign (t, exp);
446 if (! gsi_end_p (*gsi))
447 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
448 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
450 return t;
454 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
456 static tree
457 gsi_gimplify_val (struct nesting_info *info, tree exp,
458 gimple_stmt_iterator *gsi)
460 if (is_gimple_val (exp))
461 return exp;
462 else
463 return init_tmp_var (info, exp, gsi);
466 /* Similarly, but copy from the temporary and insert the statement
467 after the iterator. */
469 static tree
470 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
472 tree t;
473 gimple stmt;
475 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
476 stmt = gimple_build_assign (exp, t);
477 if (! gsi_end_p (*gsi))
478 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
479 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
481 return t;
484 /* Build or return the type used to represent a nested function trampoline. */
486 static GTY(()) tree trampoline_type;
488 static tree
489 get_trampoline_type (struct nesting_info *info)
491 unsigned align, size;
492 tree t;
494 if (trampoline_type)
495 return trampoline_type;
497 align = TRAMPOLINE_ALIGNMENT;
498 size = TRAMPOLINE_SIZE;
500 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
501 then allocate extra space so that we can do dynamic alignment. */
502 if (align > STACK_BOUNDARY)
504 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
505 align = STACK_BOUNDARY;
508 t = build_index_type (size_int (size - 1));
509 t = build_array_type (char_type_node, t);
510 t = build_decl (DECL_SOURCE_LOCATION (info->context),
511 FIELD_DECL, get_identifier ("__data"), t);
512 DECL_ALIGN (t) = align;
513 DECL_USER_ALIGN (t) = 1;
515 trampoline_type = make_node (RECORD_TYPE);
516 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
517 TYPE_FIELDS (trampoline_type) = t;
518 layout_type (trampoline_type);
519 DECL_CONTEXT (t) = trampoline_type;
521 return trampoline_type;
524 /* Given DECL, a nested function, find or create a field in the non-local
525 frame structure for a trampoline for this function. */
527 static tree
528 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
529 enum insert_option insert)
531 if (insert == NO_INSERT)
533 tree *slot = info->var_map->get (decl);
534 return slot ? *slot : NULL_TREE;
537 tree *slot = &info->var_map->get_or_insert (decl);
538 if (!*slot)
540 tree field = make_node (FIELD_DECL);
541 DECL_NAME (field) = DECL_NAME (decl);
542 TREE_TYPE (field) = get_trampoline_type (info);
543 TREE_ADDRESSABLE (field) = 1;
545 insert_field_into_struct (get_frame_type (info), field);
546 *slot = field;
548 info->any_tramp_created = true;
551 return *slot;
554 /* Build or return the field within the non-local frame state that holds
555 the non-local goto "jmp_buf". The buffer itself is maintained by the
556 rtl middle-end as dynamic stack space is allocated. */
558 static tree
559 get_nl_goto_field (struct nesting_info *info)
561 tree field = info->nl_goto_field;
562 if (!field)
564 unsigned size;
565 tree type;
567 /* For __builtin_nonlocal_goto, we need N words. The first is the
568 frame pointer, the rest is for the target's stack pointer save
569 area. The number of words is controlled by STACK_SAVEAREA_MODE;
570 not the best interface, but it'll do for now. */
571 if (Pmode == ptr_mode)
572 type = ptr_type_node;
573 else
574 type = lang_hooks.types.type_for_mode (Pmode, 1);
576 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
577 size = size / GET_MODE_SIZE (Pmode);
578 size = size + 1;
580 type = build_array_type
581 (type, build_index_type (size_int (size)));
583 field = make_node (FIELD_DECL);
584 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
585 TREE_TYPE (field) = type;
586 DECL_ALIGN (field) = TYPE_ALIGN (type);
587 TREE_ADDRESSABLE (field) = 1;
589 insert_field_into_struct (get_frame_type (info), field);
591 info->nl_goto_field = field;
594 return field;
597 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
599 static void
600 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
601 struct nesting_info *info, gimple_seq *pseq)
603 struct walk_stmt_info wi;
605 memset (&wi, 0, sizeof (wi));
606 wi.info = info;
607 wi.val_only = true;
608 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
612 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
614 static inline void
615 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
616 struct nesting_info *info)
618 gimple_seq body = gimple_body (info->context);
619 walk_body (callback_stmt, callback_op, info, &body);
620 gimple_set_body (info->context, body);
623 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
625 static void
626 walk_gimple_omp_for (gomp_for *for_stmt,
627 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
628 struct nesting_info *info)
630 gcc_assert (!is_gimple_omp_oacc_specifically (for_stmt));
632 struct walk_stmt_info wi;
633 gimple_seq seq;
634 tree t;
635 size_t i;
637 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
639 seq = NULL;
640 memset (&wi, 0, sizeof (wi));
641 wi.info = info;
642 wi.gsi = gsi_last (seq);
644 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
646 wi.val_only = false;
647 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
648 &wi, NULL);
649 wi.val_only = true;
650 wi.is_lhs = false;
651 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
652 &wi, NULL);
654 wi.val_only = true;
655 wi.is_lhs = false;
656 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
657 &wi, NULL);
659 t = gimple_omp_for_incr (for_stmt, i);
660 gcc_assert (BINARY_CLASS_P (t));
661 wi.val_only = false;
662 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
663 wi.val_only = true;
664 wi.is_lhs = false;
665 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
668 seq = gsi_seq (wi.gsi);
669 if (!gimple_seq_empty_p (seq))
671 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
672 annotate_all_with_location (seq, gimple_location (for_stmt));
673 gimple_seq_add_seq (&pre_body, seq);
674 gimple_omp_for_set_pre_body (for_stmt, pre_body);
678 /* Similarly for ROOT and all functions nested underneath, depth first. */
680 static void
681 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
682 struct nesting_info *root)
684 struct nesting_info *n;
685 FOR_EACH_NEST_INFO (n, root)
686 walk_function (callback_stmt, callback_op, n);
690 /* We have to check for a fairly pathological case. The operands of function
691 nested function are to be interpreted in the context of the enclosing
692 function. So if any are variably-sized, they will get remapped when the
693 enclosing function is inlined. But that remapping would also have to be
694 done in the types of the PARM_DECLs of the nested function, meaning the
695 argument types of that function will disagree with the arguments in the
696 calls to that function. So we'd either have to make a copy of the nested
697 function corresponding to each time the enclosing function was inlined or
698 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
699 function. The former is not practical. The latter would still require
700 detecting this case to know when to add the conversions. So, for now at
701 least, we don't inline such an enclosing function.
703 We have to do that check recursively, so here return indicating whether
704 FNDECL has such a nested function. ORIG_FN is the function we were
705 trying to inline to use for checking whether any argument is variably
706 modified by anything in it.
708 It would be better to do this in tree-inline.c so that we could give
709 the appropriate warning for why a function can't be inlined, but that's
710 too late since the nesting structure has already been flattened and
711 adding a flag just to record this fact seems a waste of a flag. */
713 static bool
714 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
716 struct cgraph_node *cgn = cgraph_node::get (fndecl);
717 tree arg;
719 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
721 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
722 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
723 return true;
725 if (check_for_nested_with_variably_modified (cgn->decl,
726 orig_fndecl))
727 return true;
730 return false;
733 /* Construct our local datastructure describing the function nesting
734 tree rooted by CGN. */
736 static struct nesting_info *
737 create_nesting_tree (struct cgraph_node *cgn)
739 struct nesting_info *info = XCNEW (struct nesting_info);
740 info->field_map = new hash_map<tree, tree>;
741 info->var_map = new hash_map<tree, tree>;
742 info->mem_refs = new hash_set<tree *>;
743 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
744 info->context = cgn->decl;
746 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
748 struct nesting_info *sub = create_nesting_tree (cgn);
749 sub->outer = info;
750 sub->next = info->inner;
751 info->inner = sub;
754 /* See discussion at check_for_nested_with_variably_modified for a
755 discussion of why this has to be here. */
756 if (check_for_nested_with_variably_modified (info->context, info->context))
757 DECL_UNINLINABLE (info->context) = true;
759 return info;
762 /* Return an expression computing the static chain for TARGET_CONTEXT
763 from INFO->CONTEXT. Insert any necessary computations before TSI. */
765 static tree
766 get_static_chain (struct nesting_info *info, tree target_context,
767 gimple_stmt_iterator *gsi)
769 struct nesting_info *i;
770 tree x;
772 if (info->context == target_context)
774 x = build_addr (info->frame_decl, target_context);
776 else
778 x = get_chain_decl (info);
780 for (i = info->outer; i->context != target_context; i = i->outer)
782 tree field = get_chain_field (i);
784 x = build_simple_mem_ref (x);
785 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
786 x = init_tmp_var (info, x, gsi);
790 return x;
794 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
795 frame as seen from INFO->CONTEXT. Insert any necessary computations
796 before GSI. */
798 static tree
799 get_frame_field (struct nesting_info *info, tree target_context,
800 tree field, gimple_stmt_iterator *gsi)
802 struct nesting_info *i;
803 tree x;
805 if (info->context == target_context)
807 /* Make sure frame_decl gets created. */
808 (void) get_frame_type (info);
809 x = info->frame_decl;
811 else
813 x = get_chain_decl (info);
815 for (i = info->outer; i->context != target_context; i = i->outer)
817 tree field = get_chain_field (i);
819 x = build_simple_mem_ref (x);
820 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
821 x = init_tmp_var (info, x, gsi);
824 x = build_simple_mem_ref (x);
827 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
828 return x;
831 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
833 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
834 in the nested function with DECL_VALUE_EXPR set to reference the true
835 variable in the parent function. This is used both for debug info
836 and in OpenMP lowering. */
838 static tree
839 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
841 tree target_context;
842 struct nesting_info *i;
843 tree x, field, new_decl;
845 tree *slot = &info->var_map->get_or_insert (decl);
847 if (*slot)
848 return *slot;
850 target_context = decl_function_context (decl);
852 /* A copy of the code in get_frame_field, but without the temporaries. */
853 if (info->context == target_context)
855 /* Make sure frame_decl gets created. */
856 (void) get_frame_type (info);
857 x = info->frame_decl;
858 i = info;
860 else
862 x = get_chain_decl (info);
863 for (i = info->outer; i->context != target_context; i = i->outer)
865 field = get_chain_field (i);
866 x = build_simple_mem_ref (x);
867 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
869 x = build_simple_mem_ref (x);
872 field = lookup_field_for_decl (i, decl, INSERT);
873 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
874 if (use_pointer_in_frame (decl))
875 x = build_simple_mem_ref (x);
877 /* ??? We should be remapping types as well, surely. */
878 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
879 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
880 DECL_CONTEXT (new_decl) = info->context;
881 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
882 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
883 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
884 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
885 TREE_READONLY (new_decl) = TREE_READONLY (decl);
886 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
887 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
888 if ((TREE_CODE (decl) == PARM_DECL
889 || TREE_CODE (decl) == RESULT_DECL
890 || TREE_CODE (decl) == VAR_DECL)
891 && DECL_BY_REFERENCE (decl))
892 DECL_BY_REFERENCE (new_decl) = 1;
894 SET_DECL_VALUE_EXPR (new_decl, x);
895 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
897 *slot = new_decl;
898 DECL_CHAIN (new_decl) = info->debug_var_chain;
899 info->debug_var_chain = new_decl;
901 if (!optimize
902 && info->context != target_context
903 && variably_modified_type_p (TREE_TYPE (decl), NULL))
904 note_nonlocal_vla_type (info, TREE_TYPE (decl));
906 return new_decl;
910 /* Callback for walk_gimple_stmt, rewrite all references to VAR
911 and PARM_DECLs that belong to outer functions.
913 The rewrite will involve some number of structure accesses back up
914 the static chain. E.g. for a variable FOO up one nesting level it'll
915 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
916 indirections apply to decls for which use_pointer_in_frame is true. */
918 static tree
919 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
921 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
922 struct nesting_info *const info = (struct nesting_info *) wi->info;
923 tree t = *tp;
925 *walk_subtrees = 0;
926 switch (TREE_CODE (t))
928 case VAR_DECL:
929 /* Non-automatic variables are never processed. */
930 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
931 break;
932 /* FALLTHRU */
934 case PARM_DECL:
935 if (decl_function_context (t) != info->context)
937 tree x;
938 wi->changed = true;
940 x = get_nonlocal_debug_decl (info, t);
941 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
943 tree target_context = decl_function_context (t);
944 struct nesting_info *i;
945 for (i = info->outer; i->context != target_context; i = i->outer)
946 continue;
947 x = lookup_field_for_decl (i, t, INSERT);
948 x = get_frame_field (info, target_context, x, &wi->gsi);
949 if (use_pointer_in_frame (t))
951 x = init_tmp_var (info, x, &wi->gsi);
952 x = build_simple_mem_ref (x);
956 if (wi->val_only)
958 if (wi->is_lhs)
959 x = save_tmp_var (info, x, &wi->gsi);
960 else
961 x = init_tmp_var (info, x, &wi->gsi);
964 *tp = x;
966 break;
968 case LABEL_DECL:
969 /* We're taking the address of a label from a parent function, but
970 this is not itself a non-local goto. Mark the label such that it
971 will not be deleted, much as we would with a label address in
972 static storage. */
973 if (decl_function_context (t) != info->context)
974 FORCED_LABEL (t) = 1;
975 break;
977 case ADDR_EXPR:
979 bool save_val_only = wi->val_only;
981 wi->val_only = false;
982 wi->is_lhs = false;
983 wi->changed = false;
984 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
985 wi->val_only = true;
987 if (wi->changed)
989 tree save_context;
991 /* If we changed anything, we might no longer be directly
992 referencing a decl. */
993 save_context = current_function_decl;
994 current_function_decl = info->context;
995 recompute_tree_invariant_for_addr_expr (t);
996 current_function_decl = save_context;
998 /* If the callback converted the address argument in a context
999 where we only accept variables (and min_invariant, presumably),
1000 then compute the address into a temporary. */
1001 if (save_val_only)
1002 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1003 t, &wi->gsi);
1006 break;
1008 case REALPART_EXPR:
1009 case IMAGPART_EXPR:
1010 case COMPONENT_REF:
1011 case ARRAY_REF:
1012 case ARRAY_RANGE_REF:
1013 case BIT_FIELD_REF:
1014 /* Go down this entire nest and just look at the final prefix and
1015 anything that describes the references. Otherwise, we lose track
1016 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1017 wi->val_only = true;
1018 wi->is_lhs = false;
1019 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1021 if (TREE_CODE (t) == COMPONENT_REF)
1022 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1023 NULL);
1024 else if (TREE_CODE (t) == ARRAY_REF
1025 || TREE_CODE (t) == ARRAY_RANGE_REF)
1027 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1028 wi, NULL);
1029 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1030 wi, NULL);
1031 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1032 wi, NULL);
1035 wi->val_only = false;
1036 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1037 break;
1039 case VIEW_CONVERT_EXPR:
1040 /* Just request to look at the subtrees, leaving val_only and lhs
1041 untouched. This might actually be for !val_only + lhs, in which
1042 case we don't want to force a replacement by a temporary. */
1043 *walk_subtrees = 1;
1044 break;
1046 default:
1047 if (!IS_TYPE_OR_DECL_P (t))
1049 *walk_subtrees = 1;
1050 wi->val_only = true;
1051 wi->is_lhs = false;
1053 break;
1056 return NULL_TREE;
1059 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1060 struct walk_stmt_info *);
1062 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1063 and PARM_DECLs that belong to outer functions. */
1065 static bool
1066 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1068 struct nesting_info *const info = (struct nesting_info *) wi->info;
1069 bool need_chain = false, need_stmts = false;
1070 tree clause, decl;
1071 int dummy;
1072 bitmap new_suppress;
1074 new_suppress = BITMAP_GGC_ALLOC ();
1075 bitmap_copy (new_suppress, info->suppress_expansion);
1077 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1079 switch (OMP_CLAUSE_CODE (clause))
1081 case OMP_CLAUSE_REDUCTION:
1082 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1083 need_stmts = true;
1084 goto do_decl_clause;
1086 case OMP_CLAUSE_LASTPRIVATE:
1087 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1088 need_stmts = true;
1089 goto do_decl_clause;
1091 case OMP_CLAUSE_LINEAR:
1092 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1093 need_stmts = true;
1094 wi->val_only = true;
1095 wi->is_lhs = false;
1096 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1097 &dummy, wi);
1098 goto do_decl_clause;
1100 case OMP_CLAUSE_PRIVATE:
1101 case OMP_CLAUSE_FIRSTPRIVATE:
1102 case OMP_CLAUSE_COPYPRIVATE:
1103 case OMP_CLAUSE_SHARED:
1104 do_decl_clause:
1105 decl = OMP_CLAUSE_DECL (clause);
1106 if (TREE_CODE (decl) == VAR_DECL
1107 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1108 break;
1109 if (decl_function_context (decl) != info->context)
1111 bitmap_set_bit (new_suppress, DECL_UID (decl));
1112 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1113 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1114 need_chain = true;
1116 break;
1118 case OMP_CLAUSE_SCHEDULE:
1119 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1120 break;
1121 /* FALLTHRU */
1122 case OMP_CLAUSE_FINAL:
1123 case OMP_CLAUSE_IF:
1124 case OMP_CLAUSE_NUM_THREADS:
1125 case OMP_CLAUSE_DEPEND:
1126 case OMP_CLAUSE_DEVICE:
1127 case OMP_CLAUSE_NUM_TEAMS:
1128 case OMP_CLAUSE_THREAD_LIMIT:
1129 case OMP_CLAUSE_SAFELEN:
1130 case OMP_CLAUSE__CILK_FOR_COUNT_:
1131 wi->val_only = true;
1132 wi->is_lhs = false;
1133 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1134 &dummy, wi);
1135 break;
1137 case OMP_CLAUSE_DIST_SCHEDULE:
1138 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1140 wi->val_only = true;
1141 wi->is_lhs = false;
1142 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1143 &dummy, wi);
1145 break;
1147 case OMP_CLAUSE_MAP:
1148 case OMP_CLAUSE_TO:
1149 case OMP_CLAUSE_FROM:
1150 if (OMP_CLAUSE_SIZE (clause))
1152 wi->val_only = true;
1153 wi->is_lhs = false;
1154 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1155 &dummy, wi);
1157 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1158 goto do_decl_clause;
1159 wi->val_only = true;
1160 wi->is_lhs = false;
1161 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1162 wi, NULL);
1163 break;
1165 case OMP_CLAUSE_ALIGNED:
1166 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1168 wi->val_only = true;
1169 wi->is_lhs = false;
1170 convert_nonlocal_reference_op
1171 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1173 /* Like do_decl_clause, but don't add any suppression. */
1174 decl = OMP_CLAUSE_DECL (clause);
1175 if (TREE_CODE (decl) == VAR_DECL
1176 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1177 break;
1178 if (decl_function_context (decl) != info->context)
1180 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1181 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1182 need_chain = true;
1184 break;
1186 case OMP_CLAUSE_NOWAIT:
1187 case OMP_CLAUSE_ORDERED:
1188 case OMP_CLAUSE_DEFAULT:
1189 case OMP_CLAUSE_COPYIN:
1190 case OMP_CLAUSE_COLLAPSE:
1191 case OMP_CLAUSE_UNTIED:
1192 case OMP_CLAUSE_MERGEABLE:
1193 case OMP_CLAUSE_PROC_BIND:
1194 break;
1196 default:
1197 gcc_unreachable ();
1201 info->suppress_expansion = new_suppress;
1203 if (need_stmts)
1204 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1205 switch (OMP_CLAUSE_CODE (clause))
1207 case OMP_CLAUSE_REDUCTION:
1208 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1210 tree old_context
1211 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1212 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1213 = info->context;
1214 walk_body (convert_nonlocal_reference_stmt,
1215 convert_nonlocal_reference_op, info,
1216 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1217 walk_body (convert_nonlocal_reference_stmt,
1218 convert_nonlocal_reference_op, info,
1219 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1220 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1221 = old_context;
1223 break;
1225 case OMP_CLAUSE_LASTPRIVATE:
1226 walk_body (convert_nonlocal_reference_stmt,
1227 convert_nonlocal_reference_op, info,
1228 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1229 break;
1231 case OMP_CLAUSE_LINEAR:
1232 walk_body (convert_nonlocal_reference_stmt,
1233 convert_nonlocal_reference_op, info,
1234 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1235 break;
1237 default:
1238 break;
1241 return need_chain;
1244 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1246 static void
1247 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1249 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1250 type = TREE_TYPE (type);
1252 if (TYPE_NAME (type)
1253 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1254 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1255 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1257 while (POINTER_TYPE_P (type)
1258 || TREE_CODE (type) == VECTOR_TYPE
1259 || TREE_CODE (type) == FUNCTION_TYPE
1260 || TREE_CODE (type) == METHOD_TYPE)
1261 type = TREE_TYPE (type);
1263 if (TREE_CODE (type) == ARRAY_TYPE)
1265 tree domain, t;
1267 note_nonlocal_vla_type (info, TREE_TYPE (type));
1268 domain = TYPE_DOMAIN (type);
1269 if (domain)
1271 t = TYPE_MIN_VALUE (domain);
1272 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1273 && decl_function_context (t) != info->context)
1274 get_nonlocal_debug_decl (info, t);
1275 t = TYPE_MAX_VALUE (domain);
1276 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1277 && decl_function_context (t) != info->context)
1278 get_nonlocal_debug_decl (info, t);
1283 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1284 in BLOCK. */
1286 static void
1287 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1289 tree var;
1291 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1292 if (TREE_CODE (var) == VAR_DECL
1293 && variably_modified_type_p (TREE_TYPE (var), NULL)
1294 && DECL_HAS_VALUE_EXPR_P (var)
1295 && decl_function_context (var) != info->context)
1296 note_nonlocal_vla_type (info, TREE_TYPE (var));
1299 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1300 PARM_DECLs that belong to outer functions. This handles statements
1301 that are not handled via the standard recursion done in
1302 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1303 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1304 operands of STMT have been handled by this function. */
1306 static tree
1307 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1308 struct walk_stmt_info *wi)
1310 struct nesting_info *info = (struct nesting_info *) wi->info;
1311 tree save_local_var_chain;
1312 bitmap save_suppress;
1313 gimple stmt = gsi_stmt (*gsi);
1315 switch (gimple_code (stmt))
1317 case GIMPLE_GOTO:
1318 /* Don't walk non-local gotos for now. */
1319 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1321 wi->val_only = true;
1322 wi->is_lhs = false;
1323 *handled_ops_p = true;
1324 return NULL_TREE;
1326 break;
1328 case GIMPLE_OMP_PARALLEL:
1329 case GIMPLE_OMP_TASK:
1330 save_suppress = info->suppress_expansion;
1331 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1332 wi))
1334 tree c, decl;
1335 decl = get_chain_decl (info);
1336 c = build_omp_clause (gimple_location (stmt),
1337 OMP_CLAUSE_FIRSTPRIVATE);
1338 OMP_CLAUSE_DECL (c) = decl;
1339 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1340 gimple_omp_taskreg_set_clauses (stmt, c);
1343 save_local_var_chain = info->new_local_var_chain;
1344 info->new_local_var_chain = NULL;
1346 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1347 info, gimple_omp_body_ptr (stmt));
1349 if (info->new_local_var_chain)
1350 declare_vars (info->new_local_var_chain,
1351 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1352 false);
1353 info->new_local_var_chain = save_local_var_chain;
1354 info->suppress_expansion = save_suppress;
1355 break;
1357 case GIMPLE_OMP_FOR:
1358 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
1359 save_suppress = info->suppress_expansion;
1360 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1361 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1362 convert_nonlocal_reference_stmt,
1363 convert_nonlocal_reference_op, info);
1364 walk_body (convert_nonlocal_reference_stmt,
1365 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1366 info->suppress_expansion = save_suppress;
1367 break;
1369 case GIMPLE_OMP_SECTIONS:
1370 save_suppress = info->suppress_expansion;
1371 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1372 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1373 info, gimple_omp_body_ptr (stmt));
1374 info->suppress_expansion = save_suppress;
1375 break;
1377 case GIMPLE_OMP_SINGLE:
1378 save_suppress = info->suppress_expansion;
1379 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1380 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1381 info, gimple_omp_body_ptr (stmt));
1382 info->suppress_expansion = save_suppress;
1383 break;
1385 case GIMPLE_OMP_TARGET:
1386 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
1387 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
1389 save_suppress = info->suppress_expansion;
1390 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1391 wi);
1392 info->suppress_expansion = save_suppress;
1393 walk_body (convert_nonlocal_reference_stmt,
1394 convert_nonlocal_reference_op, info,
1395 gimple_omp_body_ptr (stmt));
1396 break;
1398 save_suppress = info->suppress_expansion;
1399 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1400 wi))
1402 tree c, decl;
1403 decl = get_chain_decl (info);
1404 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1405 OMP_CLAUSE_DECL (c) = decl;
1406 OMP_CLAUSE_MAP_KIND (c) = OMP_CLAUSE_MAP_TO;
1407 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1408 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1409 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1412 save_local_var_chain = info->new_local_var_chain;
1413 info->new_local_var_chain = NULL;
1415 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1416 info, gimple_omp_body_ptr (stmt));
1418 if (info->new_local_var_chain)
1419 declare_vars (info->new_local_var_chain,
1420 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1421 false);
1422 info->new_local_var_chain = save_local_var_chain;
1423 info->suppress_expansion = save_suppress;
1424 break;
1426 case GIMPLE_OMP_TEAMS:
1427 save_suppress = info->suppress_expansion;
1428 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1429 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1430 info, gimple_omp_body_ptr (stmt));
1431 info->suppress_expansion = save_suppress;
1432 break;
1434 case GIMPLE_OMP_SECTION:
1435 case GIMPLE_OMP_MASTER:
1436 case GIMPLE_OMP_TASKGROUP:
1437 case GIMPLE_OMP_ORDERED:
1438 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1439 info, gimple_omp_body_ptr (stmt));
1440 break;
1442 case GIMPLE_BIND:
1444 gbind *bind_stmt = as_a <gbind *> (stmt);
1445 if (!optimize && gimple_bind_block (bind_stmt))
1446 note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
1448 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1449 if (TREE_CODE (var) == NAMELIST_DECL)
1451 /* Adjust decls mentioned in NAMELIST_DECL. */
1452 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1453 tree decl;
1454 unsigned int i;
1456 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1458 if (TREE_CODE (decl) == VAR_DECL
1459 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1460 continue;
1461 if (decl_function_context (decl) != info->context)
1462 CONSTRUCTOR_ELT (decls, i)->value
1463 = get_nonlocal_debug_decl (info, decl);
1467 *handled_ops_p = false;
1468 return NULL_TREE;
1470 case GIMPLE_COND:
1471 wi->val_only = true;
1472 wi->is_lhs = false;
1473 *handled_ops_p = false;
1474 return NULL_TREE;
1476 default:
1477 /* For every other statement that we are not interested in
1478 handling here, let the walker traverse the operands. */
1479 *handled_ops_p = false;
1480 return NULL_TREE;
1483 /* We have handled all of STMT operands, no need to traverse the operands. */
1484 *handled_ops_p = true;
1485 return NULL_TREE;
1489 /* A subroutine of convert_local_reference. Create a local variable
1490 in the parent function with DECL_VALUE_EXPR set to reference the
1491 field in FRAME. This is used both for debug info and in OpenMP
1492 lowering. */
1494 static tree
1495 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1497 tree x, new_decl;
1499 tree *slot = &info->var_map->get_or_insert (decl);
1500 if (*slot)
1501 return *slot;
1503 /* Make sure frame_decl gets created. */
1504 (void) get_frame_type (info);
1505 x = info->frame_decl;
1506 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1508 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1509 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1510 DECL_CONTEXT (new_decl) = info->context;
1511 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1512 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1513 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1514 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1515 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1516 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1517 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1518 if ((TREE_CODE (decl) == PARM_DECL
1519 || TREE_CODE (decl) == RESULT_DECL
1520 || TREE_CODE (decl) == VAR_DECL)
1521 && DECL_BY_REFERENCE (decl))
1522 DECL_BY_REFERENCE (new_decl) = 1;
1524 SET_DECL_VALUE_EXPR (new_decl, x);
1525 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1526 *slot = new_decl;
1528 DECL_CHAIN (new_decl) = info->debug_var_chain;
1529 info->debug_var_chain = new_decl;
1531 /* Do not emit debug info twice. */
1532 DECL_IGNORED_P (decl) = 1;
1534 return new_decl;
1538 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1539 and PARM_DECLs that were referenced by inner nested functions.
1540 The rewrite will be a structure reference to the local frame variable. */
1542 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1544 static tree
1545 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1547 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1548 struct nesting_info *const info = (struct nesting_info *) wi->info;
1549 tree t = *tp, field, x;
1550 bool save_val_only;
1552 *walk_subtrees = 0;
1553 switch (TREE_CODE (t))
1555 case VAR_DECL:
1556 /* Non-automatic variables are never processed. */
1557 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1558 break;
1559 /* FALLTHRU */
1561 case PARM_DECL:
1562 if (decl_function_context (t) == info->context)
1564 /* If we copied a pointer to the frame, then the original decl
1565 is used unchanged in the parent function. */
1566 if (use_pointer_in_frame (t))
1567 break;
1569 /* No need to transform anything if no child references the
1570 variable. */
1571 field = lookup_field_for_decl (info, t, NO_INSERT);
1572 if (!field)
1573 break;
1574 wi->changed = true;
1576 x = get_local_debug_decl (info, t, field);
1577 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1578 x = get_frame_field (info, info->context, field, &wi->gsi);
1580 if (wi->val_only)
1582 if (wi->is_lhs)
1583 x = save_tmp_var (info, x, &wi->gsi);
1584 else
1585 x = init_tmp_var (info, x, &wi->gsi);
1588 *tp = x;
1590 break;
1592 case ADDR_EXPR:
1593 save_val_only = wi->val_only;
1594 wi->val_only = false;
1595 wi->is_lhs = false;
1596 wi->changed = false;
1597 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1598 wi->val_only = save_val_only;
1600 /* If we converted anything ... */
1601 if (wi->changed)
1603 tree save_context;
1605 /* Then the frame decl is now addressable. */
1606 TREE_ADDRESSABLE (info->frame_decl) = 1;
1608 save_context = current_function_decl;
1609 current_function_decl = info->context;
1610 recompute_tree_invariant_for_addr_expr (t);
1611 current_function_decl = save_context;
1613 /* If we are in a context where we only accept values, then
1614 compute the address into a temporary. */
1615 if (save_val_only)
1616 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1617 t, &wi->gsi);
1619 break;
1621 case REALPART_EXPR:
1622 case IMAGPART_EXPR:
1623 case COMPONENT_REF:
1624 case ARRAY_REF:
1625 case ARRAY_RANGE_REF:
1626 case BIT_FIELD_REF:
1627 /* Go down this entire nest and just look at the final prefix and
1628 anything that describes the references. Otherwise, we lose track
1629 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1630 save_val_only = wi->val_only;
1631 wi->val_only = true;
1632 wi->is_lhs = false;
1633 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1635 if (TREE_CODE (t) == COMPONENT_REF)
1636 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1637 NULL);
1638 else if (TREE_CODE (t) == ARRAY_REF
1639 || TREE_CODE (t) == ARRAY_RANGE_REF)
1641 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1642 NULL);
1643 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1644 NULL);
1645 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1646 NULL);
1649 wi->val_only = false;
1650 walk_tree (tp, convert_local_reference_op, wi, NULL);
1651 wi->val_only = save_val_only;
1652 break;
1654 case MEM_REF:
1655 save_val_only = wi->val_only;
1656 wi->val_only = true;
1657 wi->is_lhs = false;
1658 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1659 wi, NULL);
1660 /* We need to re-fold the MEM_REF as component references as
1661 part of a ADDR_EXPR address are not allowed. But we cannot
1662 fold here, as the chain record type is not yet finalized. */
1663 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1664 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1665 info->mem_refs->add (tp);
1666 wi->val_only = save_val_only;
1667 break;
1669 case VIEW_CONVERT_EXPR:
1670 /* Just request to look at the subtrees, leaving val_only and lhs
1671 untouched. This might actually be for !val_only + lhs, in which
1672 case we don't want to force a replacement by a temporary. */
1673 *walk_subtrees = 1;
1674 break;
1676 default:
1677 if (!IS_TYPE_OR_DECL_P (t))
1679 *walk_subtrees = 1;
1680 wi->val_only = true;
1681 wi->is_lhs = false;
1683 break;
1686 return NULL_TREE;
1689 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1690 struct walk_stmt_info *);
1692 /* Helper for convert_local_reference. Convert all the references in
1693 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1695 static bool
1696 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1698 struct nesting_info *const info = (struct nesting_info *) wi->info;
1699 bool need_frame = false, need_stmts = false;
1700 tree clause, decl;
1701 int dummy;
1702 bitmap new_suppress;
1704 new_suppress = BITMAP_GGC_ALLOC ();
1705 bitmap_copy (new_suppress, info->suppress_expansion);
1707 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1709 switch (OMP_CLAUSE_CODE (clause))
1711 case OMP_CLAUSE_REDUCTION:
1712 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1713 need_stmts = true;
1714 goto do_decl_clause;
1716 case OMP_CLAUSE_LASTPRIVATE:
1717 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1718 need_stmts = true;
1719 goto do_decl_clause;
1721 case OMP_CLAUSE_LINEAR:
1722 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1723 need_stmts = true;
1724 wi->val_only = true;
1725 wi->is_lhs = false;
1726 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1727 wi);
1728 goto do_decl_clause;
1730 case OMP_CLAUSE_PRIVATE:
1731 case OMP_CLAUSE_FIRSTPRIVATE:
1732 case OMP_CLAUSE_COPYPRIVATE:
1733 case OMP_CLAUSE_SHARED:
1734 do_decl_clause:
1735 decl = OMP_CLAUSE_DECL (clause);
1736 if (TREE_CODE (decl) == VAR_DECL
1737 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1738 break;
1739 if (decl_function_context (decl) == info->context
1740 && !use_pointer_in_frame (decl))
1742 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1743 if (field)
1745 bitmap_set_bit (new_suppress, DECL_UID (decl));
1746 OMP_CLAUSE_DECL (clause)
1747 = get_local_debug_decl (info, decl, field);
1748 need_frame = true;
1751 break;
1753 case OMP_CLAUSE_SCHEDULE:
1754 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1755 break;
1756 /* FALLTHRU */
1757 case OMP_CLAUSE_FINAL:
1758 case OMP_CLAUSE_IF:
1759 case OMP_CLAUSE_NUM_THREADS:
1760 case OMP_CLAUSE_DEPEND:
1761 case OMP_CLAUSE_DEVICE:
1762 case OMP_CLAUSE_NUM_TEAMS:
1763 case OMP_CLAUSE_THREAD_LIMIT:
1764 case OMP_CLAUSE_SAFELEN:
1765 case OMP_CLAUSE__CILK_FOR_COUNT_:
1766 wi->val_only = true;
1767 wi->is_lhs = false;
1768 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1769 wi);
1770 break;
1772 case OMP_CLAUSE_DIST_SCHEDULE:
1773 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1775 wi->val_only = true;
1776 wi->is_lhs = false;
1777 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1778 &dummy, wi);
1780 break;
1782 case OMP_CLAUSE_MAP:
1783 case OMP_CLAUSE_TO:
1784 case OMP_CLAUSE_FROM:
1785 if (OMP_CLAUSE_SIZE (clause))
1787 wi->val_only = true;
1788 wi->is_lhs = false;
1789 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1790 &dummy, wi);
1792 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1793 goto do_decl_clause;
1794 wi->val_only = true;
1795 wi->is_lhs = false;
1796 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1797 wi, NULL);
1798 break;
1800 case OMP_CLAUSE_ALIGNED:
1801 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1803 wi->val_only = true;
1804 wi->is_lhs = false;
1805 convert_local_reference_op
1806 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1808 /* Like do_decl_clause, but don't add any suppression. */
1809 decl = OMP_CLAUSE_DECL (clause);
1810 if (TREE_CODE (decl) == VAR_DECL
1811 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1812 break;
1813 if (decl_function_context (decl) == info->context
1814 && !use_pointer_in_frame (decl))
1816 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1817 if (field)
1819 OMP_CLAUSE_DECL (clause)
1820 = get_local_debug_decl (info, decl, field);
1821 need_frame = true;
1824 break;
1826 case OMP_CLAUSE_NOWAIT:
1827 case OMP_CLAUSE_ORDERED:
1828 case OMP_CLAUSE_DEFAULT:
1829 case OMP_CLAUSE_COPYIN:
1830 case OMP_CLAUSE_COLLAPSE:
1831 case OMP_CLAUSE_UNTIED:
1832 case OMP_CLAUSE_MERGEABLE:
1833 case OMP_CLAUSE_PROC_BIND:
1834 break;
1836 default:
1837 gcc_unreachable ();
1841 info->suppress_expansion = new_suppress;
1843 if (need_stmts)
1844 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1845 switch (OMP_CLAUSE_CODE (clause))
1847 case OMP_CLAUSE_REDUCTION:
1848 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1850 tree old_context
1851 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1852 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1853 = info->context;
1854 walk_body (convert_local_reference_stmt,
1855 convert_local_reference_op, info,
1856 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1857 walk_body (convert_local_reference_stmt,
1858 convert_local_reference_op, info,
1859 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1860 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1861 = old_context;
1863 break;
1865 case OMP_CLAUSE_LASTPRIVATE:
1866 walk_body (convert_local_reference_stmt,
1867 convert_local_reference_op, info,
1868 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1869 break;
1871 case OMP_CLAUSE_LINEAR:
1872 walk_body (convert_local_reference_stmt,
1873 convert_local_reference_op, info,
1874 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1875 break;
1877 default:
1878 break;
1881 return need_frame;
1885 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1886 and PARM_DECLs that were referenced by inner nested functions.
1887 The rewrite will be a structure reference to the local frame variable. */
1889 static tree
1890 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1891 struct walk_stmt_info *wi)
1893 struct nesting_info *info = (struct nesting_info *) wi->info;
1894 tree save_local_var_chain;
1895 bitmap save_suppress;
1896 gimple stmt = gsi_stmt (*gsi);
1898 switch (gimple_code (stmt))
1900 case GIMPLE_OMP_PARALLEL:
1901 case GIMPLE_OMP_TASK:
1902 save_suppress = info->suppress_expansion;
1903 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1904 wi))
1906 tree c;
1907 (void) get_frame_type (info);
1908 c = build_omp_clause (gimple_location (stmt),
1909 OMP_CLAUSE_SHARED);
1910 OMP_CLAUSE_DECL (c) = info->frame_decl;
1911 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1912 gimple_omp_taskreg_set_clauses (stmt, c);
1915 save_local_var_chain = info->new_local_var_chain;
1916 info->new_local_var_chain = NULL;
1918 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1919 gimple_omp_body_ptr (stmt));
1921 if (info->new_local_var_chain)
1922 declare_vars (info->new_local_var_chain,
1923 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1924 info->new_local_var_chain = save_local_var_chain;
1925 info->suppress_expansion = save_suppress;
1926 break;
1928 case GIMPLE_OMP_FOR:
1929 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
1930 save_suppress = info->suppress_expansion;
1931 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1932 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1933 convert_local_reference_stmt,
1934 convert_local_reference_op, info);
1935 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1936 info, gimple_omp_body_ptr (stmt));
1937 info->suppress_expansion = save_suppress;
1938 break;
1940 case GIMPLE_OMP_SECTIONS:
1941 save_suppress = info->suppress_expansion;
1942 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1943 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1944 info, gimple_omp_body_ptr (stmt));
1945 info->suppress_expansion = save_suppress;
1946 break;
1948 case GIMPLE_OMP_SINGLE:
1949 save_suppress = info->suppress_expansion;
1950 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1951 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1952 info, gimple_omp_body_ptr (stmt));
1953 info->suppress_expansion = save_suppress;
1954 break;
1956 case GIMPLE_OMP_TARGET:
1957 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
1958 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
1960 save_suppress = info->suppress_expansion;
1961 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1962 info->suppress_expansion = save_suppress;
1963 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1964 info, gimple_omp_body_ptr (stmt));
1965 break;
1967 save_suppress = info->suppress_expansion;
1968 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
1970 tree c;
1971 (void) get_frame_type (info);
1972 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1973 OMP_CLAUSE_DECL (c) = info->frame_decl;
1974 OMP_CLAUSE_MAP_KIND (c) = OMP_CLAUSE_MAP_TOFROM;
1975 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
1976 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1977 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1980 save_local_var_chain = info->new_local_var_chain;
1981 info->new_local_var_chain = NULL;
1983 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1984 gimple_omp_body_ptr (stmt));
1986 if (info->new_local_var_chain)
1987 declare_vars (info->new_local_var_chain,
1988 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1989 info->new_local_var_chain = save_local_var_chain;
1990 info->suppress_expansion = save_suppress;
1991 break;
1993 case GIMPLE_OMP_TEAMS:
1994 save_suppress = info->suppress_expansion;
1995 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1996 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1997 info, gimple_omp_body_ptr (stmt));
1998 info->suppress_expansion = save_suppress;
1999 break;
2001 case GIMPLE_OMP_SECTION:
2002 case GIMPLE_OMP_MASTER:
2003 case GIMPLE_OMP_TASKGROUP:
2004 case GIMPLE_OMP_ORDERED:
2005 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2006 info, gimple_omp_body_ptr (stmt));
2007 break;
2009 case GIMPLE_COND:
2010 wi->val_only = true;
2011 wi->is_lhs = false;
2012 *handled_ops_p = false;
2013 return NULL_TREE;
2015 case GIMPLE_ASSIGN:
2016 if (gimple_clobber_p (stmt))
2018 tree lhs = gimple_assign_lhs (stmt);
2019 if (!use_pointer_in_frame (lhs)
2020 && lookup_field_for_decl (info, lhs, NO_INSERT))
2022 gsi_replace (gsi, gimple_build_nop (), true);
2023 break;
2026 *handled_ops_p = false;
2027 return NULL_TREE;
2029 case GIMPLE_BIND:
2030 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2031 var;
2032 var = DECL_CHAIN (var))
2033 if (TREE_CODE (var) == NAMELIST_DECL)
2035 /* Adjust decls mentioned in NAMELIST_DECL. */
2036 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2037 tree decl;
2038 unsigned int i;
2040 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2042 if (TREE_CODE (decl) == VAR_DECL
2043 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2044 continue;
2045 if (decl_function_context (decl) == info->context
2046 && !use_pointer_in_frame (decl))
2048 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2049 if (field)
2051 CONSTRUCTOR_ELT (decls, i)->value
2052 = get_local_debug_decl (info, decl, field);
2058 *handled_ops_p = false;
2059 return NULL_TREE;
2061 default:
2062 /* For every other statement that we are not interested in
2063 handling here, let the walker traverse the operands. */
2064 *handled_ops_p = false;
2065 return NULL_TREE;
2068 /* Indicate that we have handled all the operands ourselves. */
2069 *handled_ops_p = true;
2070 return NULL_TREE;
2074 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2075 that reference labels from outer functions. The rewrite will be a
2076 call to __builtin_nonlocal_goto. */
2078 static tree
2079 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2080 struct walk_stmt_info *wi)
2082 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2083 tree label, new_label, target_context, x, field;
2084 gcall *call;
2085 gimple stmt = gsi_stmt (*gsi);
2087 if (gimple_code (stmt) != GIMPLE_GOTO)
2089 *handled_ops_p = false;
2090 return NULL_TREE;
2093 label = gimple_goto_dest (stmt);
2094 if (TREE_CODE (label) != LABEL_DECL)
2096 *handled_ops_p = false;
2097 return NULL_TREE;
2100 target_context = decl_function_context (label);
2101 if (target_context == info->context)
2103 *handled_ops_p = false;
2104 return NULL_TREE;
2107 for (i = info->outer; target_context != i->context; i = i->outer)
2108 continue;
2110 /* The original user label may also be use for a normal goto, therefore
2111 we must create a new label that will actually receive the abnormal
2112 control transfer. This new label will be marked LABEL_NONLOCAL; this
2113 mark will trigger proper behavior in the cfg, as well as cause the
2114 (hairy target-specific) non-local goto receiver code to be generated
2115 when we expand rtl. Enter this association into var_map so that we
2116 can insert the new label into the IL during a second pass. */
2117 tree *slot = &i->var_map->get_or_insert (label);
2118 if (*slot == NULL)
2120 new_label = create_artificial_label (UNKNOWN_LOCATION);
2121 DECL_NONLOCAL (new_label) = 1;
2122 *slot = new_label;
2124 else
2125 new_label = *slot;
2127 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2128 field = get_nl_goto_field (i);
2129 x = get_frame_field (info, target_context, field, gsi);
2130 x = build_addr (x, target_context);
2131 x = gsi_gimplify_val (info, x, gsi);
2132 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2133 2, build_addr (new_label, target_context), x);
2134 gsi_replace (gsi, call, false);
2136 /* We have handled all of STMT's operands, no need to keep going. */
2137 *handled_ops_p = true;
2138 return NULL_TREE;
2142 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2143 are referenced via nonlocal goto from a nested function. The rewrite
2144 will involve installing a newly generated DECL_NONLOCAL label, and
2145 (potentially) a branch around the rtl gunk that is assumed to be
2146 attached to such a label. */
2148 static tree
2149 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2150 struct walk_stmt_info *wi)
2152 struct nesting_info *const info = (struct nesting_info *) wi->info;
2153 tree label, new_label;
2154 gimple_stmt_iterator tmp_gsi;
2155 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2157 if (!stmt)
2159 *handled_ops_p = false;
2160 return NULL_TREE;
2163 label = gimple_label_label (stmt);
2165 tree *slot = info->var_map->get (label);
2166 if (!slot)
2168 *handled_ops_p = false;
2169 return NULL_TREE;
2172 /* If there's any possibility that the previous statement falls through,
2173 then we must branch around the new non-local label. */
2174 tmp_gsi = wi->gsi;
2175 gsi_prev (&tmp_gsi);
2176 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2178 gimple stmt = gimple_build_goto (label);
2179 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2182 new_label = (tree) *slot;
2183 stmt = gimple_build_label (new_label);
2184 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2186 *handled_ops_p = true;
2187 return NULL_TREE;
2191 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2192 of nested functions that require the use of trampolines. The rewrite
2193 will involve a reference a trampoline generated for the occasion. */
2195 static tree
2196 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2198 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2199 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2200 tree t = *tp, decl, target_context, x, builtin;
2201 gcall *call;
2203 *walk_subtrees = 0;
2204 switch (TREE_CODE (t))
2206 case ADDR_EXPR:
2207 /* Build
2208 T.1 = &CHAIN->tramp;
2209 T.2 = __builtin_adjust_trampoline (T.1);
2210 T.3 = (func_type)T.2;
2213 decl = TREE_OPERAND (t, 0);
2214 if (TREE_CODE (decl) != FUNCTION_DECL)
2215 break;
2217 /* Only need to process nested functions. */
2218 target_context = decl_function_context (decl);
2219 if (!target_context)
2220 break;
2222 /* If the nested function doesn't use a static chain, then
2223 it doesn't need a trampoline. */
2224 if (!DECL_STATIC_CHAIN (decl))
2225 break;
2227 /* If we don't want a trampoline, then don't build one. */
2228 if (TREE_NO_TRAMPOLINE (t))
2229 break;
2231 /* Lookup the immediate parent of the callee, as that's where
2232 we need to insert the trampoline. */
2233 for (i = info; i->context != target_context; i = i->outer)
2234 continue;
2235 x = lookup_tramp_for_decl (i, decl, INSERT);
2237 /* Compute the address of the field holding the trampoline. */
2238 x = get_frame_field (info, target_context, x, &wi->gsi);
2239 x = build_addr (x, target_context);
2240 x = gsi_gimplify_val (info, x, &wi->gsi);
2242 /* Do machine-specific ugliness. Normally this will involve
2243 computing extra alignment, but it can really be anything. */
2244 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2245 call = gimple_build_call (builtin, 1, x);
2246 x = init_tmp_var_with_call (info, &wi->gsi, call);
2248 /* Cast back to the proper function type. */
2249 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2250 x = init_tmp_var (info, x, &wi->gsi);
2252 *tp = x;
2253 break;
2255 default:
2256 if (!IS_TYPE_OR_DECL_P (t))
2257 *walk_subtrees = 1;
2258 break;
2261 return NULL_TREE;
2265 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2266 to addresses of nested functions that require the use of
2267 trampolines. The rewrite will involve a reference a trampoline
2268 generated for the occasion. */
2270 static tree
2271 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2272 struct walk_stmt_info *wi)
2274 struct nesting_info *info = (struct nesting_info *) wi->info;
2275 gimple stmt = gsi_stmt (*gsi);
2277 switch (gimple_code (stmt))
2279 case GIMPLE_CALL:
2281 /* Only walk call arguments, lest we generate trampolines for
2282 direct calls. */
2283 unsigned long i, nargs = gimple_call_num_args (stmt);
2284 for (i = 0; i < nargs; i++)
2285 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2286 wi, NULL);
2287 break;
2290 case GIMPLE_OMP_TARGET:
2291 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
2292 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
2294 *handled_ops_p = false;
2295 return NULL_TREE;
2297 /* FALLTHRU */
2298 case GIMPLE_OMP_PARALLEL:
2299 case GIMPLE_OMP_TASK:
2301 tree save_local_var_chain;
2302 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2303 save_local_var_chain = info->new_local_var_chain;
2304 info->new_local_var_chain = NULL;
2305 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2306 info, gimple_omp_body_ptr (stmt));
2307 if (info->new_local_var_chain)
2308 declare_vars (info->new_local_var_chain,
2309 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2310 false);
2311 info->new_local_var_chain = save_local_var_chain;
2313 break;
2315 default:
2316 *handled_ops_p = false;
2317 return NULL_TREE;
2320 *handled_ops_p = true;
2321 return NULL_TREE;
2326 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2327 that reference nested functions to make sure that the static chain
2328 is set up properly for the call. */
2330 static tree
2331 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2332 struct walk_stmt_info *wi)
2334 struct nesting_info *const info = (struct nesting_info *) wi->info;
2335 tree decl, target_context;
2336 char save_static_chain_added;
2337 int i;
2338 gimple stmt = gsi_stmt (*gsi);
2340 switch (gimple_code (stmt))
2342 case GIMPLE_CALL:
2343 if (gimple_call_chain (stmt))
2344 break;
2345 decl = gimple_call_fndecl (stmt);
2346 if (!decl)
2347 break;
2348 target_context = decl_function_context (decl);
2349 if (target_context && DECL_STATIC_CHAIN (decl))
2351 gimple_call_set_chain (as_a <gcall *> (stmt),
2352 get_static_chain (info, target_context,
2353 &wi->gsi));
2354 info->static_chain_added |= (1 << (info->context != target_context));
2356 break;
2358 case GIMPLE_OMP_PARALLEL:
2359 case GIMPLE_OMP_TASK:
2360 save_static_chain_added = info->static_chain_added;
2361 info->static_chain_added = 0;
2362 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2363 for (i = 0; i < 2; i++)
2365 tree c, decl;
2366 if ((info->static_chain_added & (1 << i)) == 0)
2367 continue;
2368 decl = i ? get_chain_decl (info) : info->frame_decl;
2369 /* Don't add CHAIN.* or FRAME.* twice. */
2370 for (c = gimple_omp_taskreg_clauses (stmt);
2372 c = OMP_CLAUSE_CHAIN (c))
2373 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2374 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2375 && OMP_CLAUSE_DECL (c) == decl)
2376 break;
2377 if (c == NULL)
2379 c = build_omp_clause (gimple_location (stmt),
2380 i ? OMP_CLAUSE_FIRSTPRIVATE
2381 : OMP_CLAUSE_SHARED);
2382 OMP_CLAUSE_DECL (c) = decl;
2383 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2384 gimple_omp_taskreg_set_clauses (stmt, c);
2387 info->static_chain_added |= save_static_chain_added;
2388 break;
2390 case GIMPLE_OMP_TARGET:
2391 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
2392 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
2394 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2395 break;
2397 save_static_chain_added = info->static_chain_added;
2398 info->static_chain_added = 0;
2399 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2400 for (i = 0; i < 2; i++)
2402 tree c, decl;
2403 if ((info->static_chain_added & (1 << i)) == 0)
2404 continue;
2405 decl = i ? get_chain_decl (info) : info->frame_decl;
2406 /* Don't add CHAIN.* or FRAME.* twice. */
2407 for (c = gimple_omp_target_clauses (stmt);
2409 c = OMP_CLAUSE_CHAIN (c))
2410 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2411 && OMP_CLAUSE_DECL (c) == decl)
2412 break;
2413 if (c == NULL)
2415 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2416 OMP_CLAUSE_DECL (c) = decl;
2417 OMP_CLAUSE_MAP_KIND (c)
2418 = i ? OMP_CLAUSE_MAP_TO : OMP_CLAUSE_MAP_TOFROM;
2419 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2420 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2421 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2425 info->static_chain_added |= save_static_chain_added;
2426 break;
2428 case GIMPLE_OMP_FOR:
2429 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
2430 walk_body (convert_gimple_call, NULL, info,
2431 gimple_omp_for_pre_body_ptr (stmt));
2432 /* FALLTHRU */
2433 case GIMPLE_OMP_SECTIONS:
2434 case GIMPLE_OMP_SECTION:
2435 case GIMPLE_OMP_SINGLE:
2436 case GIMPLE_OMP_TEAMS:
2437 case GIMPLE_OMP_MASTER:
2438 case GIMPLE_OMP_TASKGROUP:
2439 case GIMPLE_OMP_ORDERED:
2440 case GIMPLE_OMP_CRITICAL:
2441 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
2442 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2443 break;
2445 default:
2446 /* Keep looking for other operands. */
2447 *handled_ops_p = false;
2448 return NULL_TREE;
2451 *handled_ops_p = true;
2452 return NULL_TREE;
2455 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2456 call expressions. At the same time, determine if a nested function
2457 actually uses its static chain; if not, remember that. */
2459 static void
2460 convert_all_function_calls (struct nesting_info *root)
2462 unsigned int chain_count = 0, old_chain_count, iter_count;
2463 struct nesting_info *n;
2465 /* First, optimistically clear static_chain for all decls that haven't
2466 used the static chain already for variable access. But always create
2467 it if not optimizing. This makes it possible to reconstruct the static
2468 nesting tree at run time and thus to resolve up-level references from
2469 within the debugger. */
2470 FOR_EACH_NEST_INFO (n, root)
2472 tree decl = n->context;
2473 if (!optimize)
2475 if (n->inner)
2476 (void) get_frame_type (n);
2477 if (n->outer)
2478 (void) get_chain_decl (n);
2480 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2482 DECL_STATIC_CHAIN (decl) = 0;
2483 if (dump_file && (dump_flags & TDF_DETAILS))
2484 fprintf (dump_file, "Guessing no static-chain for %s\n",
2485 lang_hooks.decl_printable_name (decl, 2));
2487 else
2488 DECL_STATIC_CHAIN (decl) = 1;
2489 chain_count += DECL_STATIC_CHAIN (decl);
2492 /* Walk the functions and perform transformations. Note that these
2493 transformations can induce new uses of the static chain, which in turn
2494 require re-examining all users of the decl. */
2495 /* ??? It would make sense to try to use the call graph to speed this up,
2496 but the call graph hasn't really been built yet. Even if it did, we
2497 would still need to iterate in this loop since address-of references
2498 wouldn't show up in the callgraph anyway. */
2499 iter_count = 0;
2502 old_chain_count = chain_count;
2503 chain_count = 0;
2504 iter_count++;
2506 if (dump_file && (dump_flags & TDF_DETAILS))
2507 fputc ('\n', dump_file);
2509 FOR_EACH_NEST_INFO (n, root)
2511 tree decl = n->context;
2512 walk_function (convert_tramp_reference_stmt,
2513 convert_tramp_reference_op, n);
2514 walk_function (convert_gimple_call, NULL, n);
2515 chain_count += DECL_STATIC_CHAIN (decl);
2518 while (chain_count != old_chain_count);
2520 if (dump_file && (dump_flags & TDF_DETAILS))
2521 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2522 iter_count);
2525 struct nesting_copy_body_data
2527 copy_body_data cb;
2528 struct nesting_info *root;
2531 /* A helper subroutine for debug_var_chain type remapping. */
2533 static tree
2534 nesting_copy_decl (tree decl, copy_body_data *id)
2536 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2537 tree *slot = nid->root->var_map->get (decl);
2539 if (slot)
2540 return (tree) *slot;
2542 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2544 tree new_decl = copy_decl_no_change (decl, id);
2545 DECL_ORIGINAL_TYPE (new_decl)
2546 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2547 return new_decl;
2550 if (TREE_CODE (decl) == VAR_DECL
2551 || TREE_CODE (decl) == PARM_DECL
2552 || TREE_CODE (decl) == RESULT_DECL)
2553 return decl;
2555 return copy_decl_no_change (decl, id);
2558 /* A helper function for remap_vla_decls. See if *TP contains
2559 some remapped variables. */
2561 static tree
2562 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2564 struct nesting_info *root = (struct nesting_info *) data;
2565 tree t = *tp;
2567 if (DECL_P (t))
2569 *walk_subtrees = 0;
2570 tree *slot = root->var_map->get (t);
2572 if (slot)
2573 return *slot;
2575 return NULL;
2578 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2579 involved. */
2581 static void
2582 remap_vla_decls (tree block, struct nesting_info *root)
2584 tree var, subblock, val, type;
2585 struct nesting_copy_body_data id;
2587 for (subblock = BLOCK_SUBBLOCKS (block);
2588 subblock;
2589 subblock = BLOCK_CHAIN (subblock))
2590 remap_vla_decls (subblock, root);
2592 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2593 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2595 val = DECL_VALUE_EXPR (var);
2596 type = TREE_TYPE (var);
2598 if (!(TREE_CODE (val) == INDIRECT_REF
2599 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2600 && variably_modified_type_p (type, NULL)))
2601 continue;
2603 if (root->var_map->get (TREE_OPERAND (val, 0))
2604 || walk_tree (&type, contains_remapped_vars, root, NULL))
2605 break;
2608 if (var == NULL_TREE)
2609 return;
2611 memset (&id, 0, sizeof (id));
2612 id.cb.copy_decl = nesting_copy_decl;
2613 id.cb.decl_map = new hash_map<tree, tree>;
2614 id.root = root;
2616 for (; var; var = DECL_CHAIN (var))
2617 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2619 struct nesting_info *i;
2620 tree newt, context;
2622 val = DECL_VALUE_EXPR (var);
2623 type = TREE_TYPE (var);
2625 if (!(TREE_CODE (val) == INDIRECT_REF
2626 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2627 && variably_modified_type_p (type, NULL)))
2628 continue;
2630 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2631 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2632 continue;
2634 context = decl_function_context (var);
2635 for (i = root; i; i = i->outer)
2636 if (i->context == context)
2637 break;
2639 if (i == NULL)
2640 continue;
2642 /* Fully expand value expressions. This avoids having debug variables
2643 only referenced from them and that can be swept during GC. */
2644 if (slot)
2646 tree t = (tree) *slot;
2647 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2648 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2651 id.cb.src_fn = i->context;
2652 id.cb.dst_fn = i->context;
2653 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2655 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2656 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2658 newt = TREE_TYPE (newt);
2659 type = TREE_TYPE (type);
2661 if (TYPE_NAME (newt)
2662 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2663 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2664 && newt != type
2665 && TYPE_NAME (newt) == TYPE_NAME (type))
2666 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2668 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2669 if (val != DECL_VALUE_EXPR (var))
2670 SET_DECL_VALUE_EXPR (var, val);
2673 delete id.cb.decl_map;
2676 /* Fold the MEM_REF *E. */
2677 bool
2678 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2680 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2681 *ref_p = fold (*ref_p);
2682 return true;
2685 /* Do "everything else" to clean up or complete state collected by the
2686 various walking passes -- lay out the types and decls, generate code
2687 to initialize the frame decl, store critical expressions in the
2688 struct function for rtl to find. */
2690 static void
2691 finalize_nesting_tree_1 (struct nesting_info *root)
2693 gimple_seq stmt_list;
2694 gimple stmt;
2695 tree context = root->context;
2696 struct function *sf;
2698 stmt_list = NULL;
2700 /* If we created a non-local frame type or decl, we need to lay them
2701 out at this time. */
2702 if (root->frame_type)
2704 /* In some cases the frame type will trigger the -Wpadded warning.
2705 This is not helpful; suppress it. */
2706 int save_warn_padded = warn_padded;
2707 tree *adjust;
2709 warn_padded = 0;
2710 layout_type (root->frame_type);
2711 warn_padded = save_warn_padded;
2712 layout_decl (root->frame_decl, 0);
2714 /* Remove root->frame_decl from root->new_local_var_chain, so
2715 that we can declare it also in the lexical blocks, which
2716 helps ensure virtual regs that end up appearing in its RTL
2717 expression get substituted in instantiate_virtual_regs(). */
2718 for (adjust = &root->new_local_var_chain;
2719 *adjust != root->frame_decl;
2720 adjust = &DECL_CHAIN (*adjust))
2721 gcc_assert (DECL_CHAIN (*adjust));
2722 *adjust = DECL_CHAIN (*adjust);
2724 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2725 declare_vars (root->frame_decl,
2726 gimple_seq_first_stmt (gimple_body (context)), true);
2729 /* If any parameters were referenced non-locally, then we need to
2730 insert a copy. Likewise, if any variables were referenced by
2731 pointer, we need to initialize the address. */
2732 if (root->any_parm_remapped)
2734 tree p;
2735 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2737 tree field, x, y;
2739 field = lookup_field_for_decl (root, p, NO_INSERT);
2740 if (!field)
2741 continue;
2743 if (use_pointer_in_frame (p))
2744 x = build_addr (p, context);
2745 else
2746 x = p;
2748 /* If the assignment is from a non-register the stmt is
2749 not valid gimple. Make it so by using a temporary instead. */
2750 if (!is_gimple_reg (x)
2751 && is_gimple_reg_type (TREE_TYPE (x)))
2753 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2754 x = init_tmp_var (root, x, &gsi);
2757 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2758 root->frame_decl, field, NULL_TREE);
2759 stmt = gimple_build_assign (y, x);
2760 gimple_seq_add_stmt (&stmt_list, stmt);
2764 /* If a chain_field was created, then it needs to be initialized
2765 from chain_decl. */
2766 if (root->chain_field)
2768 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2769 root->frame_decl, root->chain_field, NULL_TREE);
2770 stmt = gimple_build_assign (x, get_chain_decl (root));
2771 gimple_seq_add_stmt (&stmt_list, stmt);
2774 /* If trampolines were created, then we need to initialize them. */
2775 if (root->any_tramp_created)
2777 struct nesting_info *i;
2778 for (i = root->inner; i ; i = i->next)
2780 tree arg1, arg2, arg3, x, field;
2782 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2783 if (!field)
2784 continue;
2786 gcc_assert (DECL_STATIC_CHAIN (i->context));
2787 arg3 = build_addr (root->frame_decl, context);
2789 arg2 = build_addr (i->context, context);
2791 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2792 root->frame_decl, field, NULL_TREE);
2793 arg1 = build_addr (x, context);
2795 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2796 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2797 gimple_seq_add_stmt (&stmt_list, stmt);
2801 /* If we created initialization statements, insert them. */
2802 if (stmt_list)
2804 gbind *bind;
2805 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2806 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
2807 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2808 gimple_bind_set_body (bind, stmt_list);
2811 /* If a chain_decl was created, then it needs to be registered with
2812 struct function so that it gets initialized from the static chain
2813 register at the beginning of the function. */
2814 sf = DECL_STRUCT_FUNCTION (root->context);
2815 sf->static_chain_decl = root->chain_decl;
2817 /* Similarly for the non-local goto save area. */
2818 if (root->nl_goto_field)
2820 sf->nonlocal_goto_save_area
2821 = get_frame_field (root, context, root->nl_goto_field, NULL);
2822 sf->has_nonlocal_label = 1;
2825 /* Make sure all new local variables get inserted into the
2826 proper BIND_EXPR. */
2827 if (root->new_local_var_chain)
2828 declare_vars (root->new_local_var_chain,
2829 gimple_seq_first_stmt (gimple_body (root->context)),
2830 false);
2832 if (root->debug_var_chain)
2834 tree debug_var;
2835 gbind *scope;
2837 remap_vla_decls (DECL_INITIAL (root->context), root);
2839 for (debug_var = root->debug_var_chain; debug_var;
2840 debug_var = DECL_CHAIN (debug_var))
2841 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2842 break;
2844 /* If there are any debug decls with variable length types,
2845 remap those types using other debug_var_chain variables. */
2846 if (debug_var)
2848 struct nesting_copy_body_data id;
2850 memset (&id, 0, sizeof (id));
2851 id.cb.copy_decl = nesting_copy_decl;
2852 id.cb.decl_map = new hash_map<tree, tree>;
2853 id.root = root;
2855 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2856 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2858 tree type = TREE_TYPE (debug_var);
2859 tree newt, t = type;
2860 struct nesting_info *i;
2862 for (i = root; i; i = i->outer)
2863 if (variably_modified_type_p (type, i->context))
2864 break;
2866 if (i == NULL)
2867 continue;
2869 id.cb.src_fn = i->context;
2870 id.cb.dst_fn = i->context;
2871 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2873 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2874 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2876 newt = TREE_TYPE (newt);
2877 t = TREE_TYPE (t);
2879 if (TYPE_NAME (newt)
2880 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2881 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2882 && newt != t
2883 && TYPE_NAME (newt) == TYPE_NAME (t))
2884 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2887 delete id.cb.decl_map;
2890 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
2891 if (gimple_bind_block (scope))
2892 declare_vars (root->debug_var_chain, scope, true);
2893 else
2894 BLOCK_VARS (DECL_INITIAL (root->context))
2895 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2896 root->debug_var_chain);
2899 /* Fold the rewritten MEM_REF trees. */
2900 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
2902 /* Dump the translated tree function. */
2903 if (dump_file)
2905 fputs ("\n\n", dump_file);
2906 dump_function_to_file (root->context, dump_file, dump_flags);
2910 static void
2911 finalize_nesting_tree (struct nesting_info *root)
2913 struct nesting_info *n;
2914 FOR_EACH_NEST_INFO (n, root)
2915 finalize_nesting_tree_1 (n);
2918 /* Unnest the nodes and pass them to cgraph. */
2920 static void
2921 unnest_nesting_tree_1 (struct nesting_info *root)
2923 struct cgraph_node *node = cgraph_node::get (root->context);
2925 /* For nested functions update the cgraph to reflect unnesting.
2926 We also delay finalizing of these functions up to this point. */
2927 if (node->origin)
2929 node->unnest ();
2930 cgraph_node::finalize_function (root->context, true);
2934 static void
2935 unnest_nesting_tree (struct nesting_info *root)
2937 struct nesting_info *n;
2938 FOR_EACH_NEST_INFO (n, root)
2939 unnest_nesting_tree_1 (n);
2942 /* Free the data structures allocated during this pass. */
2944 static void
2945 free_nesting_tree (struct nesting_info *root)
2947 struct nesting_info *node, *next;
2949 node = iter_nestinfo_start (root);
2952 next = iter_nestinfo_next (node);
2953 delete node->var_map;
2954 delete node->field_map;
2955 delete node->mem_refs;
2956 free (node);
2957 node = next;
2959 while (node);
2962 /* Gimplify a function and all its nested functions. */
2963 static void
2964 gimplify_all_functions (struct cgraph_node *root)
2966 struct cgraph_node *iter;
2967 if (!gimple_body (root->decl))
2968 gimplify_function_tree (root->decl);
2969 for (iter = root->nested; iter; iter = iter->next_nested)
2970 gimplify_all_functions (iter);
2973 /* Main entry point for this pass. Process FNDECL and all of its nested
2974 subroutines and turn them into something less tightly bound. */
2976 void
2977 lower_nested_functions (tree fndecl)
2979 struct cgraph_node *cgn;
2980 struct nesting_info *root;
2982 /* If there are no nested functions, there's nothing to do. */
2983 cgn = cgraph_node::get (fndecl);
2984 if (!cgn->nested)
2985 return;
2987 gimplify_all_functions (cgn);
2989 dump_file = dump_begin (TDI_nested, &dump_flags);
2990 if (dump_file)
2991 fprintf (dump_file, "\n;; Function %s\n\n",
2992 lang_hooks.decl_printable_name (fndecl, 2));
2994 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2995 root = create_nesting_tree (cgn);
2997 walk_all_functions (convert_nonlocal_reference_stmt,
2998 convert_nonlocal_reference_op,
2999 root);
3000 walk_all_functions (convert_local_reference_stmt,
3001 convert_local_reference_op,
3002 root);
3003 walk_all_functions (convert_nl_goto_reference, NULL, root);
3004 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3006 convert_all_function_calls (root);
3007 finalize_nesting_tree (root);
3008 unnest_nesting_tree (root);
3010 free_nesting_tree (root);
3011 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3013 if (dump_file)
3015 dump_end (TDI_nested, dump_file);
3016 dump_file = NULL;
3020 #include "gt-tree-nested.h"