* tree-core.h (enum omp_clause_code): Add OMP_CLAUSE_DEFAULTMAP.
[official-gcc.git] / gcc / tree-nested.c
blobc2e471a9e16ec1aa95bfce3e80fcec60b2602524
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "alias.h"
25 #include "symtab.h"
26 #include "tree.h"
27 #include "fold-const.h"
28 #include "stringpool.h"
29 #include "stor-layout.h"
30 #include "tm_p.h"
31 #include "hard-reg-set.h"
32 #include "function.h"
33 #include "tree-dump.h"
34 #include "tree-inline.h"
35 #include "predict.h"
36 #include "basic-block.h"
37 #include "tree-ssa-alias.h"
38 #include "internal-fn.h"
39 #include "gimple-expr.h"
40 #include "gimple.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimple-walk.h"
44 #include "tree-iterator.h"
45 #include "bitmap.h"
46 #include "cgraph.h"
47 #include "tree-cfg.h"
48 #include "rtl.h"
49 #include "flags.h"
50 #include "insn-config.h"
51 #include "expmed.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "calls.h"
55 #include "emit-rtl.h"
56 #include "varasm.h"
57 #include "stmt.h"
58 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
59 #include "langhooks.h"
60 #include "gimple-low.h"
61 #include "gomp-constants.h"
64 /* The object of this pass is to lower the representation of a set of nested
65 functions in order to expose all of the gory details of the various
66 nonlocal references. We want to do this sooner rather than later, in
67 order to give us more freedom in emitting all of the functions in question.
69 Back in olden times, when gcc was young, we developed an insanely
70 complicated scheme whereby variables which were referenced nonlocally
71 were forced to live in the stack of the declaring function, and then
72 the nested functions magically discovered where these variables were
73 placed. In order for this scheme to function properly, it required
74 that the outer function be partially expanded, then we switch to
75 compiling the inner function, and once done with those we switch back
76 to compiling the outer function. Such delicate ordering requirements
77 makes it difficult to do whole translation unit optimizations
78 involving such functions.
80 The implementation here is much more direct. Everything that can be
81 referenced by an inner function is a member of an explicitly created
82 structure herein called the "nonlocal frame struct". The incoming
83 static chain for a nested function is a pointer to this struct in
84 the parent. In this way, we settle on known offsets from a known
85 base, and so are decoupled from the logic that places objects in the
86 function's stack frame. More importantly, we don't have to wait for
87 that to happen -- since the compilation of the inner function is no
88 longer tied to a real stack frame, the nonlocal frame struct can be
89 allocated anywhere. Which means that the outer function is now
90 inlinable.
92 Theory of operation here is very simple. Iterate over all the
93 statements in all the functions (depth first) several times,
94 allocating structures and fields on demand. In general we want to
95 examine inner functions first, so that we can avoid making changes
96 to outer functions which are unnecessary.
98 The order of the passes matters a bit, in that later passes will be
99 skipped if it is discovered that the functions don't actually interact
100 at all. That is, they're nested in the lexical sense but could have
101 been written as independent functions without change. */
104 struct nesting_info
106 struct nesting_info *outer;
107 struct nesting_info *inner;
108 struct nesting_info *next;
110 hash_map<tree, tree> *field_map;
111 hash_map<tree, tree> *var_map;
112 hash_set<tree *> *mem_refs;
113 bitmap suppress_expansion;
115 tree context;
116 tree new_local_var_chain;
117 tree debug_var_chain;
118 tree frame_type;
119 tree frame_decl;
120 tree chain_field;
121 tree chain_decl;
122 tree nl_goto_field;
124 bool any_parm_remapped;
125 bool any_tramp_created;
126 char static_chain_added;
130 /* Iterate over the nesting tree, starting with ROOT, depth first. */
132 static inline struct nesting_info *
133 iter_nestinfo_start (struct nesting_info *root)
135 while (root->inner)
136 root = root->inner;
137 return root;
140 static inline struct nesting_info *
141 iter_nestinfo_next (struct nesting_info *node)
143 if (node->next)
144 return iter_nestinfo_start (node->next);
145 return node->outer;
148 #define FOR_EACH_NEST_INFO(I, ROOT) \
149 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
151 /* Obstack used for the bitmaps in the struct above. */
152 static struct bitmap_obstack nesting_info_bitmap_obstack;
155 /* We're working in so many different function contexts simultaneously,
156 that create_tmp_var is dangerous. Prevent mishap. */
157 #define create_tmp_var cant_use_create_tmp_var_here_dummy
159 /* Like create_tmp_var, except record the variable for registration at
160 the given nesting level. */
162 static tree
163 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
165 tree tmp_var;
167 /* If the type is of variable size or a type which must be created by the
168 frontend, something is wrong. Note that we explicitly allow
169 incomplete types here, since we create them ourselves here. */
170 gcc_assert (!TREE_ADDRESSABLE (type));
171 gcc_assert (!TYPE_SIZE_UNIT (type)
172 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
174 tmp_var = create_tmp_var_raw (type, prefix);
175 DECL_CONTEXT (tmp_var) = info->context;
176 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
177 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
178 if (TREE_CODE (type) == COMPLEX_TYPE
179 || TREE_CODE (type) == VECTOR_TYPE)
180 DECL_GIMPLE_REG_P (tmp_var) = 1;
182 info->new_local_var_chain = tmp_var;
184 return tmp_var;
187 /* Take the address of EXP to be used within function CONTEXT.
188 Mark it for addressability as necessary. */
190 tree
191 build_addr (tree exp, tree context)
193 tree base = exp;
194 tree save_context;
195 tree retval;
197 while (handled_component_p (base))
198 base = TREE_OPERAND (base, 0);
200 if (DECL_P (base))
201 TREE_ADDRESSABLE (base) = 1;
203 /* Building the ADDR_EXPR will compute a set of properties for
204 that ADDR_EXPR. Those properties are unfortunately context
205 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
207 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
208 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
209 way the properties are for the ADDR_EXPR are computed properly. */
210 save_context = current_function_decl;
211 current_function_decl = context;
212 retval = build_fold_addr_expr (exp);
213 current_function_decl = save_context;
214 return retval;
217 /* Insert FIELD into TYPE, sorted by alignment requirements. */
219 void
220 insert_field_into_struct (tree type, tree field)
222 tree *p;
224 DECL_CONTEXT (field) = type;
226 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
227 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
228 break;
230 DECL_CHAIN (field) = *p;
231 *p = field;
233 /* Set correct alignment for frame struct type. */
234 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
235 TYPE_ALIGN (type) = DECL_ALIGN (field);
238 /* Build or return the RECORD_TYPE that describes the frame state that is
239 shared between INFO->CONTEXT and its nested functions. This record will
240 not be complete until finalize_nesting_tree; up until that point we'll
241 be adding fields as necessary.
243 We also build the DECL that represents this frame in the function. */
245 static tree
246 get_frame_type (struct nesting_info *info)
248 tree type = info->frame_type;
249 if (!type)
251 char *name;
253 type = make_node (RECORD_TYPE);
255 name = concat ("FRAME.",
256 IDENTIFIER_POINTER (DECL_NAME (info->context)),
257 NULL);
258 TYPE_NAME (type) = get_identifier (name);
259 free (name);
261 info->frame_type = type;
262 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
263 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
265 /* ??? Always make it addressable for now, since it is meant to
266 be pointed to by the static chain pointer. This pessimizes
267 when it turns out that no static chains are needed because
268 the nested functions referencing non-local variables are not
269 reachable, but the true pessimization is to create the non-
270 local frame structure in the first place. */
271 TREE_ADDRESSABLE (info->frame_decl) = 1;
273 return type;
276 /* Return true if DECL should be referenced by pointer in the non-local
277 frame structure. */
279 static bool
280 use_pointer_in_frame (tree decl)
282 if (TREE_CODE (decl) == PARM_DECL)
284 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
285 sized decls, and inefficient to copy large aggregates. Don't bother
286 moving anything but scalar variables. */
287 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
289 else
291 /* Variable sized types make things "interesting" in the frame. */
292 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
296 /* Given DECL, a non-locally accessed variable, find or create a field
297 in the non-local frame structure for the given nesting context. */
299 static tree
300 lookup_field_for_decl (struct nesting_info *info, tree decl,
301 enum insert_option insert)
303 if (insert == NO_INSERT)
305 tree *slot = info->field_map->get (decl);
306 return slot ? *slot : NULL_TREE;
309 tree *slot = &info->field_map->get_or_insert (decl);
310 if (!*slot)
312 tree field = make_node (FIELD_DECL);
313 DECL_NAME (field) = DECL_NAME (decl);
315 if (use_pointer_in_frame (decl))
317 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
318 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
319 DECL_NONADDRESSABLE_P (field) = 1;
321 else
323 TREE_TYPE (field) = TREE_TYPE (decl);
324 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
325 DECL_ALIGN (field) = DECL_ALIGN (decl);
326 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
327 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
328 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
329 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
332 insert_field_into_struct (get_frame_type (info), field);
333 *slot = field;
335 if (TREE_CODE (decl) == PARM_DECL)
336 info->any_parm_remapped = true;
339 return *slot;
342 /* Build or return the variable that holds the static chain within
343 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
345 static tree
346 get_chain_decl (struct nesting_info *info)
348 tree decl = info->chain_decl;
350 if (!decl)
352 tree type;
354 type = get_frame_type (info->outer);
355 type = build_pointer_type (type);
357 /* Note that this variable is *not* entered into any BIND_EXPR;
358 the construction of this variable is handled specially in
359 expand_function_start and initialize_inlined_parameters.
360 Note also that it's represented as a parameter. This is more
361 close to the truth, since the initial value does come from
362 the caller. */
363 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
364 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
365 DECL_ARTIFICIAL (decl) = 1;
366 DECL_IGNORED_P (decl) = 1;
367 TREE_USED (decl) = 1;
368 DECL_CONTEXT (decl) = info->context;
369 DECL_ARG_TYPE (decl) = type;
371 /* Tell tree-inline.c that we never write to this variable, so
372 it can copy-prop the replacement value immediately. */
373 TREE_READONLY (decl) = 1;
375 info->chain_decl = decl;
377 if (dump_file
378 && (dump_flags & TDF_DETAILS)
379 && !DECL_STATIC_CHAIN (info->context))
380 fprintf (dump_file, "Setting static-chain for %s\n",
381 lang_hooks.decl_printable_name (info->context, 2));
383 DECL_STATIC_CHAIN (info->context) = 1;
385 return decl;
388 /* Build or return the field within the non-local frame state that holds
389 the static chain for INFO->CONTEXT. This is the way to walk back up
390 multiple nesting levels. */
392 static tree
393 get_chain_field (struct nesting_info *info)
395 tree field = info->chain_field;
397 if (!field)
399 tree type = build_pointer_type (get_frame_type (info->outer));
401 field = make_node (FIELD_DECL);
402 DECL_NAME (field) = get_identifier ("__chain");
403 TREE_TYPE (field) = type;
404 DECL_ALIGN (field) = TYPE_ALIGN (type);
405 DECL_NONADDRESSABLE_P (field) = 1;
407 insert_field_into_struct (get_frame_type (info), field);
409 info->chain_field = field;
411 if (dump_file
412 && (dump_flags & TDF_DETAILS)
413 && !DECL_STATIC_CHAIN (info->context))
414 fprintf (dump_file, "Setting static-chain for %s\n",
415 lang_hooks.decl_printable_name (info->context, 2));
417 DECL_STATIC_CHAIN (info->context) = 1;
419 return field;
422 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
424 static tree
425 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
426 gcall *call)
428 tree t;
430 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
431 gimple_call_set_lhs (call, t);
432 if (! gsi_end_p (*gsi))
433 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
434 gsi_insert_before (gsi, call, GSI_SAME_STMT);
436 return t;
440 /* Copy EXP into a temporary. Allocate the temporary in the context of
441 INFO and insert the initialization statement before GSI. */
443 static tree
444 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
446 tree t;
447 gimple stmt;
449 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
450 stmt = gimple_build_assign (t, exp);
451 if (! gsi_end_p (*gsi))
452 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
453 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
455 return t;
459 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
461 static tree
462 gsi_gimplify_val (struct nesting_info *info, tree exp,
463 gimple_stmt_iterator *gsi)
465 if (is_gimple_val (exp))
466 return exp;
467 else
468 return init_tmp_var (info, exp, gsi);
471 /* Similarly, but copy from the temporary and insert the statement
472 after the iterator. */
474 static tree
475 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
477 tree t;
478 gimple stmt;
480 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
481 stmt = gimple_build_assign (exp, t);
482 if (! gsi_end_p (*gsi))
483 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
484 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
486 return t;
489 /* Build or return the type used to represent a nested function trampoline. */
491 static GTY(()) tree trampoline_type;
493 static tree
494 get_trampoline_type (struct nesting_info *info)
496 unsigned align, size;
497 tree t;
499 if (trampoline_type)
500 return trampoline_type;
502 align = TRAMPOLINE_ALIGNMENT;
503 size = TRAMPOLINE_SIZE;
505 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
506 then allocate extra space so that we can do dynamic alignment. */
507 if (align > STACK_BOUNDARY)
509 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
510 align = STACK_BOUNDARY;
513 t = build_index_type (size_int (size - 1));
514 t = build_array_type (char_type_node, t);
515 t = build_decl (DECL_SOURCE_LOCATION (info->context),
516 FIELD_DECL, get_identifier ("__data"), t);
517 DECL_ALIGN (t) = align;
518 DECL_USER_ALIGN (t) = 1;
520 trampoline_type = make_node (RECORD_TYPE);
521 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
522 TYPE_FIELDS (trampoline_type) = t;
523 layout_type (trampoline_type);
524 DECL_CONTEXT (t) = trampoline_type;
526 return trampoline_type;
529 /* Given DECL, a nested function, find or create a field in the non-local
530 frame structure for a trampoline for this function. */
532 static tree
533 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
534 enum insert_option insert)
536 if (insert == NO_INSERT)
538 tree *slot = info->var_map->get (decl);
539 return slot ? *slot : NULL_TREE;
542 tree *slot = &info->var_map->get_or_insert (decl);
543 if (!*slot)
545 tree field = make_node (FIELD_DECL);
546 DECL_NAME (field) = DECL_NAME (decl);
547 TREE_TYPE (field) = get_trampoline_type (info);
548 TREE_ADDRESSABLE (field) = 1;
550 insert_field_into_struct (get_frame_type (info), field);
551 *slot = field;
553 info->any_tramp_created = true;
556 return *slot;
559 /* Build or return the field within the non-local frame state that holds
560 the non-local goto "jmp_buf". The buffer itself is maintained by the
561 rtl middle-end as dynamic stack space is allocated. */
563 static tree
564 get_nl_goto_field (struct nesting_info *info)
566 tree field = info->nl_goto_field;
567 if (!field)
569 unsigned size;
570 tree type;
572 /* For __builtin_nonlocal_goto, we need N words. The first is the
573 frame pointer, the rest is for the target's stack pointer save
574 area. The number of words is controlled by STACK_SAVEAREA_MODE;
575 not the best interface, but it'll do for now. */
576 if (Pmode == ptr_mode)
577 type = ptr_type_node;
578 else
579 type = lang_hooks.types.type_for_mode (Pmode, 1);
581 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
582 size = size / GET_MODE_SIZE (Pmode);
583 size = size + 1;
585 type = build_array_type
586 (type, build_index_type (size_int (size)));
588 field = make_node (FIELD_DECL);
589 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
590 TREE_TYPE (field) = type;
591 DECL_ALIGN (field) = TYPE_ALIGN (type);
592 TREE_ADDRESSABLE (field) = 1;
594 insert_field_into_struct (get_frame_type (info), field);
596 info->nl_goto_field = field;
599 return field;
602 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
604 static void
605 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
606 struct nesting_info *info, gimple_seq *pseq)
608 struct walk_stmt_info wi;
610 memset (&wi, 0, sizeof (wi));
611 wi.info = info;
612 wi.val_only = true;
613 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
617 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
619 static inline void
620 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
621 struct nesting_info *info)
623 gimple_seq body = gimple_body (info->context);
624 walk_body (callback_stmt, callback_op, info, &body);
625 gimple_set_body (info->context, body);
628 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
630 static void
631 walk_gimple_omp_for (gomp_for *for_stmt,
632 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
633 struct nesting_info *info)
635 struct walk_stmt_info wi;
636 gimple_seq seq;
637 tree t;
638 size_t i;
640 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
642 seq = NULL;
643 memset (&wi, 0, sizeof (wi));
644 wi.info = info;
645 wi.gsi = gsi_last (seq);
647 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
649 wi.val_only = false;
650 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
651 &wi, NULL);
652 wi.val_only = true;
653 wi.is_lhs = false;
654 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
655 &wi, NULL);
657 wi.val_only = true;
658 wi.is_lhs = false;
659 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
660 &wi, NULL);
662 t = gimple_omp_for_incr (for_stmt, i);
663 gcc_assert (BINARY_CLASS_P (t));
664 wi.val_only = false;
665 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
666 wi.val_only = true;
667 wi.is_lhs = false;
668 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
671 seq = gsi_seq (wi.gsi);
672 if (!gimple_seq_empty_p (seq))
674 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
675 annotate_all_with_location (seq, gimple_location (for_stmt));
676 gimple_seq_add_seq (&pre_body, seq);
677 gimple_omp_for_set_pre_body (for_stmt, pre_body);
681 /* Similarly for ROOT and all functions nested underneath, depth first. */
683 static void
684 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
685 struct nesting_info *root)
687 struct nesting_info *n;
688 FOR_EACH_NEST_INFO (n, root)
689 walk_function (callback_stmt, callback_op, n);
693 /* We have to check for a fairly pathological case. The operands of function
694 nested function are to be interpreted in the context of the enclosing
695 function. So if any are variably-sized, they will get remapped when the
696 enclosing function is inlined. But that remapping would also have to be
697 done in the types of the PARM_DECLs of the nested function, meaning the
698 argument types of that function will disagree with the arguments in the
699 calls to that function. So we'd either have to make a copy of the nested
700 function corresponding to each time the enclosing function was inlined or
701 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
702 function. The former is not practical. The latter would still require
703 detecting this case to know when to add the conversions. So, for now at
704 least, we don't inline such an enclosing function.
706 We have to do that check recursively, so here return indicating whether
707 FNDECL has such a nested function. ORIG_FN is the function we were
708 trying to inline to use for checking whether any argument is variably
709 modified by anything in it.
711 It would be better to do this in tree-inline.c so that we could give
712 the appropriate warning for why a function can't be inlined, but that's
713 too late since the nesting structure has already been flattened and
714 adding a flag just to record this fact seems a waste of a flag. */
716 static bool
717 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
719 struct cgraph_node *cgn = cgraph_node::get (fndecl);
720 tree arg;
722 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
724 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
725 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
726 return true;
728 if (check_for_nested_with_variably_modified (cgn->decl,
729 orig_fndecl))
730 return true;
733 return false;
736 /* Construct our local datastructure describing the function nesting
737 tree rooted by CGN. */
739 static struct nesting_info *
740 create_nesting_tree (struct cgraph_node *cgn)
742 struct nesting_info *info = XCNEW (struct nesting_info);
743 info->field_map = new hash_map<tree, tree>;
744 info->var_map = new hash_map<tree, tree>;
745 info->mem_refs = new hash_set<tree *>;
746 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
747 info->context = cgn->decl;
749 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
751 struct nesting_info *sub = create_nesting_tree (cgn);
752 sub->outer = info;
753 sub->next = info->inner;
754 info->inner = sub;
757 /* See discussion at check_for_nested_with_variably_modified for a
758 discussion of why this has to be here. */
759 if (check_for_nested_with_variably_modified (info->context, info->context))
760 DECL_UNINLINABLE (info->context) = true;
762 return info;
765 /* Return an expression computing the static chain for TARGET_CONTEXT
766 from INFO->CONTEXT. Insert any necessary computations before TSI. */
768 static tree
769 get_static_chain (struct nesting_info *info, tree target_context,
770 gimple_stmt_iterator *gsi)
772 struct nesting_info *i;
773 tree x;
775 if (info->context == target_context)
777 x = build_addr (info->frame_decl, target_context);
779 else
781 x = get_chain_decl (info);
783 for (i = info->outer; i->context != target_context; i = i->outer)
785 tree field = get_chain_field (i);
787 x = build_simple_mem_ref (x);
788 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
789 x = init_tmp_var (info, x, gsi);
793 return x;
797 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
798 frame as seen from INFO->CONTEXT. Insert any necessary computations
799 before GSI. */
801 static tree
802 get_frame_field (struct nesting_info *info, tree target_context,
803 tree field, gimple_stmt_iterator *gsi)
805 struct nesting_info *i;
806 tree x;
808 if (info->context == target_context)
810 /* Make sure frame_decl gets created. */
811 (void) get_frame_type (info);
812 x = info->frame_decl;
814 else
816 x = get_chain_decl (info);
818 for (i = info->outer; i->context != target_context; i = i->outer)
820 tree field = get_chain_field (i);
822 x = build_simple_mem_ref (x);
823 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
824 x = init_tmp_var (info, x, gsi);
827 x = build_simple_mem_ref (x);
830 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
831 return x;
834 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
836 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
837 in the nested function with DECL_VALUE_EXPR set to reference the true
838 variable in the parent function. This is used both for debug info
839 and in OMP lowering. */
841 static tree
842 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
844 tree target_context;
845 struct nesting_info *i;
846 tree x, field, new_decl;
848 tree *slot = &info->var_map->get_or_insert (decl);
850 if (*slot)
851 return *slot;
853 target_context = decl_function_context (decl);
855 /* A copy of the code in get_frame_field, but without the temporaries. */
856 if (info->context == target_context)
858 /* Make sure frame_decl gets created. */
859 (void) get_frame_type (info);
860 x = info->frame_decl;
861 i = info;
863 else
865 x = get_chain_decl (info);
866 for (i = info->outer; i->context != target_context; i = i->outer)
868 field = get_chain_field (i);
869 x = build_simple_mem_ref (x);
870 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
872 x = build_simple_mem_ref (x);
875 field = lookup_field_for_decl (i, decl, INSERT);
876 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
877 if (use_pointer_in_frame (decl))
878 x = build_simple_mem_ref (x);
880 /* ??? We should be remapping types as well, surely. */
881 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
882 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
883 DECL_CONTEXT (new_decl) = info->context;
884 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
885 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
886 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
887 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
888 TREE_READONLY (new_decl) = TREE_READONLY (decl);
889 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
890 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
891 if ((TREE_CODE (decl) == PARM_DECL
892 || TREE_CODE (decl) == RESULT_DECL
893 || TREE_CODE (decl) == VAR_DECL)
894 && DECL_BY_REFERENCE (decl))
895 DECL_BY_REFERENCE (new_decl) = 1;
897 SET_DECL_VALUE_EXPR (new_decl, x);
898 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
900 *slot = new_decl;
901 DECL_CHAIN (new_decl) = info->debug_var_chain;
902 info->debug_var_chain = new_decl;
904 if (!optimize
905 && info->context != target_context
906 && variably_modified_type_p (TREE_TYPE (decl), NULL))
907 note_nonlocal_vla_type (info, TREE_TYPE (decl));
909 return new_decl;
913 /* Callback for walk_gimple_stmt, rewrite all references to VAR
914 and PARM_DECLs that belong to outer functions.
916 The rewrite will involve some number of structure accesses back up
917 the static chain. E.g. for a variable FOO up one nesting level it'll
918 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
919 indirections apply to decls for which use_pointer_in_frame is true. */
921 static tree
922 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
924 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
925 struct nesting_info *const info = (struct nesting_info *) wi->info;
926 tree t = *tp;
928 *walk_subtrees = 0;
929 switch (TREE_CODE (t))
931 case VAR_DECL:
932 /* Non-automatic variables are never processed. */
933 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
934 break;
935 /* FALLTHRU */
937 case PARM_DECL:
938 if (decl_function_context (t) != info->context)
940 tree x;
941 wi->changed = true;
943 x = get_nonlocal_debug_decl (info, t);
944 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
946 tree target_context = decl_function_context (t);
947 struct nesting_info *i;
948 for (i = info->outer; i->context != target_context; i = i->outer)
949 continue;
950 x = lookup_field_for_decl (i, t, INSERT);
951 x = get_frame_field (info, target_context, x, &wi->gsi);
952 if (use_pointer_in_frame (t))
954 x = init_tmp_var (info, x, &wi->gsi);
955 x = build_simple_mem_ref (x);
959 if (wi->val_only)
961 if (wi->is_lhs)
962 x = save_tmp_var (info, x, &wi->gsi);
963 else
964 x = init_tmp_var (info, x, &wi->gsi);
967 *tp = x;
969 break;
971 case LABEL_DECL:
972 /* We're taking the address of a label from a parent function, but
973 this is not itself a non-local goto. Mark the label such that it
974 will not be deleted, much as we would with a label address in
975 static storage. */
976 if (decl_function_context (t) != info->context)
977 FORCED_LABEL (t) = 1;
978 break;
980 case ADDR_EXPR:
982 bool save_val_only = wi->val_only;
984 wi->val_only = false;
985 wi->is_lhs = false;
986 wi->changed = false;
987 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
988 wi->val_only = true;
990 if (wi->changed)
992 tree save_context;
994 /* If we changed anything, we might no longer be directly
995 referencing a decl. */
996 save_context = current_function_decl;
997 current_function_decl = info->context;
998 recompute_tree_invariant_for_addr_expr (t);
999 current_function_decl = save_context;
1001 /* If the callback converted the address argument in a context
1002 where we only accept variables (and min_invariant, presumably),
1003 then compute the address into a temporary. */
1004 if (save_val_only)
1005 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1006 t, &wi->gsi);
1009 break;
1011 case REALPART_EXPR:
1012 case IMAGPART_EXPR:
1013 case COMPONENT_REF:
1014 case ARRAY_REF:
1015 case ARRAY_RANGE_REF:
1016 case BIT_FIELD_REF:
1017 /* Go down this entire nest and just look at the final prefix and
1018 anything that describes the references. Otherwise, we lose track
1019 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1020 wi->val_only = true;
1021 wi->is_lhs = false;
1022 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1024 if (TREE_CODE (t) == COMPONENT_REF)
1025 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1026 NULL);
1027 else if (TREE_CODE (t) == ARRAY_REF
1028 || TREE_CODE (t) == ARRAY_RANGE_REF)
1030 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1031 wi, NULL);
1032 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1033 wi, NULL);
1034 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1035 wi, NULL);
1038 wi->val_only = false;
1039 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1040 break;
1042 case VIEW_CONVERT_EXPR:
1043 /* Just request to look at the subtrees, leaving val_only and lhs
1044 untouched. This might actually be for !val_only + lhs, in which
1045 case we don't want to force a replacement by a temporary. */
1046 *walk_subtrees = 1;
1047 break;
1049 default:
1050 if (!IS_TYPE_OR_DECL_P (t))
1052 *walk_subtrees = 1;
1053 wi->val_only = true;
1054 wi->is_lhs = false;
1056 break;
1059 return NULL_TREE;
1062 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1063 struct walk_stmt_info *);
1065 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1066 and PARM_DECLs that belong to outer functions. */
1068 static bool
1069 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1071 struct nesting_info *const info = (struct nesting_info *) wi->info;
1072 bool need_chain = false, need_stmts = false;
1073 tree clause, decl;
1074 int dummy;
1075 bitmap new_suppress;
1077 new_suppress = BITMAP_GGC_ALLOC ();
1078 bitmap_copy (new_suppress, info->suppress_expansion);
1080 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1082 switch (OMP_CLAUSE_CODE (clause))
1084 case OMP_CLAUSE_REDUCTION:
1085 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1086 need_stmts = true;
1087 goto do_decl_clause;
1089 case OMP_CLAUSE_LASTPRIVATE:
1090 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1091 need_stmts = true;
1092 goto do_decl_clause;
1094 case OMP_CLAUSE_LINEAR:
1095 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1096 need_stmts = true;
1097 wi->val_only = true;
1098 wi->is_lhs = false;
1099 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1100 &dummy, wi);
1101 goto do_decl_clause;
1103 case OMP_CLAUSE_PRIVATE:
1104 case OMP_CLAUSE_FIRSTPRIVATE:
1105 case OMP_CLAUSE_COPYPRIVATE:
1106 case OMP_CLAUSE_SHARED:
1107 do_decl_clause:
1108 decl = OMP_CLAUSE_DECL (clause);
1109 if (TREE_CODE (decl) == VAR_DECL
1110 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1111 break;
1112 if (decl_function_context (decl) != info->context)
1114 bitmap_set_bit (new_suppress, DECL_UID (decl));
1115 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1116 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1117 need_chain = true;
1119 break;
1121 case OMP_CLAUSE_SCHEDULE:
1122 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1123 break;
1124 /* FALLTHRU */
1125 case OMP_CLAUSE_FINAL:
1126 case OMP_CLAUSE_IF:
1127 case OMP_CLAUSE_NUM_THREADS:
1128 case OMP_CLAUSE_DEPEND:
1129 case OMP_CLAUSE_DEVICE:
1130 case OMP_CLAUSE_NUM_TEAMS:
1131 case OMP_CLAUSE_THREAD_LIMIT:
1132 case OMP_CLAUSE_SAFELEN:
1133 case OMP_CLAUSE_SIMDLEN:
1134 case OMP_CLAUSE_PRIORITY:
1135 case OMP_CLAUSE_GRAINSIZE:
1136 case OMP_CLAUSE_NUM_TASKS:
1137 case OMP_CLAUSE_HINT:
1138 case OMP_CLAUSE__CILK_FOR_COUNT_:
1139 wi->val_only = true;
1140 wi->is_lhs = false;
1141 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1142 &dummy, wi);
1143 break;
1145 case OMP_CLAUSE_DIST_SCHEDULE:
1146 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1148 wi->val_only = true;
1149 wi->is_lhs = false;
1150 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1151 &dummy, wi);
1153 break;
1155 case OMP_CLAUSE_MAP:
1156 case OMP_CLAUSE_TO:
1157 case OMP_CLAUSE_FROM:
1158 if (OMP_CLAUSE_SIZE (clause))
1160 wi->val_only = true;
1161 wi->is_lhs = false;
1162 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1163 &dummy, wi);
1165 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1166 goto do_decl_clause;
1167 wi->val_only = true;
1168 wi->is_lhs = false;
1169 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1170 wi, NULL);
1171 break;
1173 case OMP_CLAUSE_ALIGNED:
1174 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1176 wi->val_only = true;
1177 wi->is_lhs = false;
1178 convert_nonlocal_reference_op
1179 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1181 /* Like do_decl_clause, but don't add any suppression. */
1182 decl = OMP_CLAUSE_DECL (clause);
1183 if (TREE_CODE (decl) == VAR_DECL
1184 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1185 break;
1186 if (decl_function_context (decl) != info->context)
1188 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1189 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1190 need_chain = true;
1192 break;
1194 case OMP_CLAUSE_NOWAIT:
1195 case OMP_CLAUSE_ORDERED:
1196 case OMP_CLAUSE_DEFAULT:
1197 case OMP_CLAUSE_COPYIN:
1198 case OMP_CLAUSE_COLLAPSE:
1199 case OMP_CLAUSE_UNTIED:
1200 case OMP_CLAUSE_MERGEABLE:
1201 case OMP_CLAUSE_PROC_BIND:
1202 case OMP_CLAUSE_NOGROUP:
1203 case OMP_CLAUSE_THREADS:
1204 case OMP_CLAUSE_SIMD:
1205 case OMP_CLAUSE_DEFAULTMAP:
1206 break;
1208 default:
1209 gcc_unreachable ();
1213 info->suppress_expansion = new_suppress;
1215 if (need_stmts)
1216 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1217 switch (OMP_CLAUSE_CODE (clause))
1219 case OMP_CLAUSE_REDUCTION:
1220 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1222 tree old_context
1223 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1224 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1225 = info->context;
1226 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1227 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1228 = info->context;
1229 walk_body (convert_nonlocal_reference_stmt,
1230 convert_nonlocal_reference_op, info,
1231 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1232 walk_body (convert_nonlocal_reference_stmt,
1233 convert_nonlocal_reference_op, info,
1234 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1235 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1236 = old_context;
1237 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1238 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1239 = old_context;
1241 break;
1243 case OMP_CLAUSE_LASTPRIVATE:
1244 walk_body (convert_nonlocal_reference_stmt,
1245 convert_nonlocal_reference_op, info,
1246 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1247 break;
1249 case OMP_CLAUSE_LINEAR:
1250 walk_body (convert_nonlocal_reference_stmt,
1251 convert_nonlocal_reference_op, info,
1252 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1253 break;
1255 default:
1256 break;
1259 return need_chain;
1262 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1264 static void
1265 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1267 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1268 type = TREE_TYPE (type);
1270 if (TYPE_NAME (type)
1271 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1272 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1273 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1275 while (POINTER_TYPE_P (type)
1276 || TREE_CODE (type) == VECTOR_TYPE
1277 || TREE_CODE (type) == FUNCTION_TYPE
1278 || TREE_CODE (type) == METHOD_TYPE)
1279 type = TREE_TYPE (type);
1281 if (TREE_CODE (type) == ARRAY_TYPE)
1283 tree domain, t;
1285 note_nonlocal_vla_type (info, TREE_TYPE (type));
1286 domain = TYPE_DOMAIN (type);
1287 if (domain)
1289 t = TYPE_MIN_VALUE (domain);
1290 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1291 && decl_function_context (t) != info->context)
1292 get_nonlocal_debug_decl (info, t);
1293 t = TYPE_MAX_VALUE (domain);
1294 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1295 && decl_function_context (t) != info->context)
1296 get_nonlocal_debug_decl (info, t);
1301 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1302 in BLOCK. */
1304 static void
1305 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1307 tree var;
1309 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1310 if (TREE_CODE (var) == VAR_DECL
1311 && variably_modified_type_p (TREE_TYPE (var), NULL)
1312 && DECL_HAS_VALUE_EXPR_P (var)
1313 && decl_function_context (var) != info->context)
1314 note_nonlocal_vla_type (info, TREE_TYPE (var));
1317 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1318 PARM_DECLs that belong to outer functions. This handles statements
1319 that are not handled via the standard recursion done in
1320 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1321 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1322 operands of STMT have been handled by this function. */
1324 static tree
1325 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1326 struct walk_stmt_info *wi)
1328 struct nesting_info *info = (struct nesting_info *) wi->info;
1329 tree save_local_var_chain;
1330 bitmap save_suppress;
1331 gimple stmt = gsi_stmt (*gsi);
1333 switch (gimple_code (stmt))
1335 case GIMPLE_GOTO:
1336 /* Don't walk non-local gotos for now. */
1337 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1339 wi->val_only = true;
1340 wi->is_lhs = false;
1341 *handled_ops_p = true;
1342 return NULL_TREE;
1344 break;
1346 case GIMPLE_OMP_PARALLEL:
1347 case GIMPLE_OMP_TASK:
1348 save_suppress = info->suppress_expansion;
1349 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1350 wi))
1352 tree c, decl;
1353 decl = get_chain_decl (info);
1354 c = build_omp_clause (gimple_location (stmt),
1355 OMP_CLAUSE_FIRSTPRIVATE);
1356 OMP_CLAUSE_DECL (c) = decl;
1357 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1358 gimple_omp_taskreg_set_clauses (stmt, c);
1361 save_local_var_chain = info->new_local_var_chain;
1362 info->new_local_var_chain = NULL;
1364 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1365 info, gimple_omp_body_ptr (stmt));
1367 if (info->new_local_var_chain)
1368 declare_vars (info->new_local_var_chain,
1369 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1370 false);
1371 info->new_local_var_chain = save_local_var_chain;
1372 info->suppress_expansion = save_suppress;
1373 break;
1375 case GIMPLE_OMP_FOR:
1376 save_suppress = info->suppress_expansion;
1377 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1378 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1379 convert_nonlocal_reference_stmt,
1380 convert_nonlocal_reference_op, info);
1381 walk_body (convert_nonlocal_reference_stmt,
1382 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1383 info->suppress_expansion = save_suppress;
1384 break;
1386 case GIMPLE_OMP_SECTIONS:
1387 save_suppress = info->suppress_expansion;
1388 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1389 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1390 info, gimple_omp_body_ptr (stmt));
1391 info->suppress_expansion = save_suppress;
1392 break;
1394 case GIMPLE_OMP_SINGLE:
1395 save_suppress = info->suppress_expansion;
1396 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1397 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1398 info, gimple_omp_body_ptr (stmt));
1399 info->suppress_expansion = save_suppress;
1400 break;
1402 case GIMPLE_OMP_TARGET:
1403 if (!is_gimple_omp_offloaded (stmt))
1405 save_suppress = info->suppress_expansion;
1406 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1407 wi);
1408 info->suppress_expansion = save_suppress;
1409 walk_body (convert_nonlocal_reference_stmt,
1410 convert_nonlocal_reference_op, info,
1411 gimple_omp_body_ptr (stmt));
1412 break;
1414 save_suppress = info->suppress_expansion;
1415 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1416 wi))
1418 tree c, decl;
1419 decl = get_chain_decl (info);
1420 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1421 OMP_CLAUSE_DECL (c) = decl;
1422 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1423 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1424 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1425 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1428 save_local_var_chain = info->new_local_var_chain;
1429 info->new_local_var_chain = NULL;
1431 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1432 info, gimple_omp_body_ptr (stmt));
1434 if (info->new_local_var_chain)
1435 declare_vars (info->new_local_var_chain,
1436 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1437 false);
1438 info->new_local_var_chain = save_local_var_chain;
1439 info->suppress_expansion = save_suppress;
1440 break;
1442 case GIMPLE_OMP_TEAMS:
1443 save_suppress = info->suppress_expansion;
1444 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1445 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1446 info, gimple_omp_body_ptr (stmt));
1447 info->suppress_expansion = save_suppress;
1448 break;
1450 case GIMPLE_OMP_SECTION:
1451 case GIMPLE_OMP_MASTER:
1452 case GIMPLE_OMP_TASKGROUP:
1453 case GIMPLE_OMP_ORDERED:
1454 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1455 info, gimple_omp_body_ptr (stmt));
1456 break;
1458 case GIMPLE_BIND:
1460 gbind *bind_stmt = as_a <gbind *> (stmt);
1461 if (!optimize && gimple_bind_block (bind_stmt))
1462 note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
1464 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1465 if (TREE_CODE (var) == NAMELIST_DECL)
1467 /* Adjust decls mentioned in NAMELIST_DECL. */
1468 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1469 tree decl;
1470 unsigned int i;
1472 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1474 if (TREE_CODE (decl) == VAR_DECL
1475 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1476 continue;
1477 if (decl_function_context (decl) != info->context)
1478 CONSTRUCTOR_ELT (decls, i)->value
1479 = get_nonlocal_debug_decl (info, decl);
1483 *handled_ops_p = false;
1484 return NULL_TREE;
1486 case GIMPLE_COND:
1487 wi->val_only = true;
1488 wi->is_lhs = false;
1489 *handled_ops_p = false;
1490 return NULL_TREE;
1492 default:
1493 /* For every other statement that we are not interested in
1494 handling here, let the walker traverse the operands. */
1495 *handled_ops_p = false;
1496 return NULL_TREE;
1499 /* We have handled all of STMT operands, no need to traverse the operands. */
1500 *handled_ops_p = true;
1501 return NULL_TREE;
1505 /* A subroutine of convert_local_reference. Create a local variable
1506 in the parent function with DECL_VALUE_EXPR set to reference the
1507 field in FRAME. This is used both for debug info and in OMP
1508 lowering. */
1510 static tree
1511 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1513 tree x, new_decl;
1515 tree *slot = &info->var_map->get_or_insert (decl);
1516 if (*slot)
1517 return *slot;
1519 /* Make sure frame_decl gets created. */
1520 (void) get_frame_type (info);
1521 x = info->frame_decl;
1522 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1524 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1525 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1526 DECL_CONTEXT (new_decl) = info->context;
1527 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1528 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1529 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1530 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1531 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1532 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1533 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1534 if ((TREE_CODE (decl) == PARM_DECL
1535 || TREE_CODE (decl) == RESULT_DECL
1536 || TREE_CODE (decl) == VAR_DECL)
1537 && DECL_BY_REFERENCE (decl))
1538 DECL_BY_REFERENCE (new_decl) = 1;
1540 SET_DECL_VALUE_EXPR (new_decl, x);
1541 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1542 *slot = new_decl;
1544 DECL_CHAIN (new_decl) = info->debug_var_chain;
1545 info->debug_var_chain = new_decl;
1547 /* Do not emit debug info twice. */
1548 DECL_IGNORED_P (decl) = 1;
1550 return new_decl;
1554 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1555 and PARM_DECLs that were referenced by inner nested functions.
1556 The rewrite will be a structure reference to the local frame variable. */
1558 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1560 static tree
1561 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1563 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1564 struct nesting_info *const info = (struct nesting_info *) wi->info;
1565 tree t = *tp, field, x;
1566 bool save_val_only;
1568 *walk_subtrees = 0;
1569 switch (TREE_CODE (t))
1571 case VAR_DECL:
1572 /* Non-automatic variables are never processed. */
1573 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1574 break;
1575 /* FALLTHRU */
1577 case PARM_DECL:
1578 if (decl_function_context (t) == info->context)
1580 /* If we copied a pointer to the frame, then the original decl
1581 is used unchanged in the parent function. */
1582 if (use_pointer_in_frame (t))
1583 break;
1585 /* No need to transform anything if no child references the
1586 variable. */
1587 field = lookup_field_for_decl (info, t, NO_INSERT);
1588 if (!field)
1589 break;
1590 wi->changed = true;
1592 x = get_local_debug_decl (info, t, field);
1593 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1594 x = get_frame_field (info, info->context, field, &wi->gsi);
1596 if (wi->val_only)
1598 if (wi->is_lhs)
1599 x = save_tmp_var (info, x, &wi->gsi);
1600 else
1601 x = init_tmp_var (info, x, &wi->gsi);
1604 *tp = x;
1606 break;
1608 case ADDR_EXPR:
1609 save_val_only = wi->val_only;
1610 wi->val_only = false;
1611 wi->is_lhs = false;
1612 wi->changed = false;
1613 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1614 wi->val_only = save_val_only;
1616 /* If we converted anything ... */
1617 if (wi->changed)
1619 tree save_context;
1621 /* Then the frame decl is now addressable. */
1622 TREE_ADDRESSABLE (info->frame_decl) = 1;
1624 save_context = current_function_decl;
1625 current_function_decl = info->context;
1626 recompute_tree_invariant_for_addr_expr (t);
1627 current_function_decl = save_context;
1629 /* If we are in a context where we only accept values, then
1630 compute the address into a temporary. */
1631 if (save_val_only)
1632 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1633 t, &wi->gsi);
1635 break;
1637 case REALPART_EXPR:
1638 case IMAGPART_EXPR:
1639 case COMPONENT_REF:
1640 case ARRAY_REF:
1641 case ARRAY_RANGE_REF:
1642 case BIT_FIELD_REF:
1643 /* Go down this entire nest and just look at the final prefix and
1644 anything that describes the references. Otherwise, we lose track
1645 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1646 save_val_only = wi->val_only;
1647 wi->val_only = true;
1648 wi->is_lhs = false;
1649 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1651 if (TREE_CODE (t) == COMPONENT_REF)
1652 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1653 NULL);
1654 else if (TREE_CODE (t) == ARRAY_REF
1655 || TREE_CODE (t) == ARRAY_RANGE_REF)
1657 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1658 NULL);
1659 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1660 NULL);
1661 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1662 NULL);
1665 wi->val_only = false;
1666 walk_tree (tp, convert_local_reference_op, wi, NULL);
1667 wi->val_only = save_val_only;
1668 break;
1670 case MEM_REF:
1671 save_val_only = wi->val_only;
1672 wi->val_only = true;
1673 wi->is_lhs = false;
1674 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1675 wi, NULL);
1676 /* We need to re-fold the MEM_REF as component references as
1677 part of a ADDR_EXPR address are not allowed. But we cannot
1678 fold here, as the chain record type is not yet finalized. */
1679 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1680 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1681 info->mem_refs->add (tp);
1682 wi->val_only = save_val_only;
1683 break;
1685 case VIEW_CONVERT_EXPR:
1686 /* Just request to look at the subtrees, leaving val_only and lhs
1687 untouched. This might actually be for !val_only + lhs, in which
1688 case we don't want to force a replacement by a temporary. */
1689 *walk_subtrees = 1;
1690 break;
1692 default:
1693 if (!IS_TYPE_OR_DECL_P (t))
1695 *walk_subtrees = 1;
1696 wi->val_only = true;
1697 wi->is_lhs = false;
1699 break;
1702 return NULL_TREE;
1705 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1706 struct walk_stmt_info *);
1708 /* Helper for convert_local_reference. Convert all the references in
1709 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1711 static bool
1712 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1714 struct nesting_info *const info = (struct nesting_info *) wi->info;
1715 bool need_frame = false, need_stmts = false;
1716 tree clause, decl;
1717 int dummy;
1718 bitmap new_suppress;
1720 new_suppress = BITMAP_GGC_ALLOC ();
1721 bitmap_copy (new_suppress, info->suppress_expansion);
1723 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1725 switch (OMP_CLAUSE_CODE (clause))
1727 case OMP_CLAUSE_REDUCTION:
1728 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1729 need_stmts = true;
1730 goto do_decl_clause;
1732 case OMP_CLAUSE_LASTPRIVATE:
1733 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1734 need_stmts = true;
1735 goto do_decl_clause;
1737 case OMP_CLAUSE_LINEAR:
1738 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1739 need_stmts = true;
1740 wi->val_only = true;
1741 wi->is_lhs = false;
1742 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1743 wi);
1744 goto do_decl_clause;
1746 case OMP_CLAUSE_PRIVATE:
1747 case OMP_CLAUSE_FIRSTPRIVATE:
1748 case OMP_CLAUSE_COPYPRIVATE:
1749 case OMP_CLAUSE_SHARED:
1750 do_decl_clause:
1751 decl = OMP_CLAUSE_DECL (clause);
1752 if (TREE_CODE (decl) == VAR_DECL
1753 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1754 break;
1755 if (decl_function_context (decl) == info->context
1756 && !use_pointer_in_frame (decl))
1758 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1759 if (field)
1761 bitmap_set_bit (new_suppress, DECL_UID (decl));
1762 OMP_CLAUSE_DECL (clause)
1763 = get_local_debug_decl (info, decl, field);
1764 need_frame = true;
1767 break;
1769 case OMP_CLAUSE_SCHEDULE:
1770 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1771 break;
1772 /* FALLTHRU */
1773 case OMP_CLAUSE_FINAL:
1774 case OMP_CLAUSE_IF:
1775 case OMP_CLAUSE_NUM_THREADS:
1776 case OMP_CLAUSE_DEPEND:
1777 case OMP_CLAUSE_DEVICE:
1778 case OMP_CLAUSE_NUM_TEAMS:
1779 case OMP_CLAUSE_THREAD_LIMIT:
1780 case OMP_CLAUSE_SAFELEN:
1781 case OMP_CLAUSE_SIMDLEN:
1782 case OMP_CLAUSE_PRIORITY:
1783 case OMP_CLAUSE_GRAINSIZE:
1784 case OMP_CLAUSE_NUM_TASKS:
1785 case OMP_CLAUSE_HINT:
1786 case OMP_CLAUSE__CILK_FOR_COUNT_:
1787 wi->val_only = true;
1788 wi->is_lhs = false;
1789 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1790 wi);
1791 break;
1793 case OMP_CLAUSE_DIST_SCHEDULE:
1794 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1796 wi->val_only = true;
1797 wi->is_lhs = false;
1798 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1799 &dummy, wi);
1801 break;
1803 case OMP_CLAUSE_MAP:
1804 case OMP_CLAUSE_TO:
1805 case OMP_CLAUSE_FROM:
1806 if (OMP_CLAUSE_SIZE (clause))
1808 wi->val_only = true;
1809 wi->is_lhs = false;
1810 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1811 &dummy, wi);
1813 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1814 goto do_decl_clause;
1815 wi->val_only = true;
1816 wi->is_lhs = false;
1817 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1818 wi, NULL);
1819 break;
1821 case OMP_CLAUSE_ALIGNED:
1822 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1824 wi->val_only = true;
1825 wi->is_lhs = false;
1826 convert_local_reference_op
1827 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1829 /* Like do_decl_clause, but don't add any suppression. */
1830 decl = OMP_CLAUSE_DECL (clause);
1831 if (TREE_CODE (decl) == VAR_DECL
1832 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1833 break;
1834 if (decl_function_context (decl) == info->context
1835 && !use_pointer_in_frame (decl))
1837 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1838 if (field)
1840 OMP_CLAUSE_DECL (clause)
1841 = get_local_debug_decl (info, decl, field);
1842 need_frame = true;
1845 break;
1847 case OMP_CLAUSE_NOWAIT:
1848 case OMP_CLAUSE_ORDERED:
1849 case OMP_CLAUSE_DEFAULT:
1850 case OMP_CLAUSE_COPYIN:
1851 case OMP_CLAUSE_COLLAPSE:
1852 case OMP_CLAUSE_UNTIED:
1853 case OMP_CLAUSE_MERGEABLE:
1854 case OMP_CLAUSE_PROC_BIND:
1855 case OMP_CLAUSE_NOGROUP:
1856 case OMP_CLAUSE_THREADS:
1857 case OMP_CLAUSE_SIMD:
1858 case OMP_CLAUSE_DEFAULTMAP:
1859 break;
1861 default:
1862 gcc_unreachable ();
1866 info->suppress_expansion = new_suppress;
1868 if (need_stmts)
1869 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1870 switch (OMP_CLAUSE_CODE (clause))
1872 case OMP_CLAUSE_REDUCTION:
1873 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1875 tree old_context
1876 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1877 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1878 = info->context;
1879 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1880 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1881 = info->context;
1882 walk_body (convert_local_reference_stmt,
1883 convert_local_reference_op, info,
1884 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1885 walk_body (convert_local_reference_stmt,
1886 convert_local_reference_op, info,
1887 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1888 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1889 = old_context;
1890 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1891 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1892 = old_context;
1894 break;
1896 case OMP_CLAUSE_LASTPRIVATE:
1897 walk_body (convert_local_reference_stmt,
1898 convert_local_reference_op, info,
1899 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1900 break;
1902 case OMP_CLAUSE_LINEAR:
1903 walk_body (convert_local_reference_stmt,
1904 convert_local_reference_op, info,
1905 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1906 break;
1908 default:
1909 break;
1912 return need_frame;
1916 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1917 and PARM_DECLs that were referenced by inner nested functions.
1918 The rewrite will be a structure reference to the local frame variable. */
1920 static tree
1921 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1922 struct walk_stmt_info *wi)
1924 struct nesting_info *info = (struct nesting_info *) wi->info;
1925 tree save_local_var_chain;
1926 bitmap save_suppress;
1927 gimple stmt = gsi_stmt (*gsi);
1929 switch (gimple_code (stmt))
1931 case GIMPLE_OMP_PARALLEL:
1932 case GIMPLE_OMP_TASK:
1933 save_suppress = info->suppress_expansion;
1934 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1935 wi))
1937 tree c;
1938 (void) get_frame_type (info);
1939 c = build_omp_clause (gimple_location (stmt),
1940 OMP_CLAUSE_SHARED);
1941 OMP_CLAUSE_DECL (c) = info->frame_decl;
1942 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1943 gimple_omp_taskreg_set_clauses (stmt, c);
1946 save_local_var_chain = info->new_local_var_chain;
1947 info->new_local_var_chain = NULL;
1949 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1950 gimple_omp_body_ptr (stmt));
1952 if (info->new_local_var_chain)
1953 declare_vars (info->new_local_var_chain,
1954 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1955 info->new_local_var_chain = save_local_var_chain;
1956 info->suppress_expansion = save_suppress;
1957 break;
1959 case GIMPLE_OMP_FOR:
1960 save_suppress = info->suppress_expansion;
1961 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1962 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1963 convert_local_reference_stmt,
1964 convert_local_reference_op, info);
1965 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1966 info, gimple_omp_body_ptr (stmt));
1967 info->suppress_expansion = save_suppress;
1968 break;
1970 case GIMPLE_OMP_SECTIONS:
1971 save_suppress = info->suppress_expansion;
1972 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1973 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1974 info, gimple_omp_body_ptr (stmt));
1975 info->suppress_expansion = save_suppress;
1976 break;
1978 case GIMPLE_OMP_SINGLE:
1979 save_suppress = info->suppress_expansion;
1980 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1981 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1982 info, gimple_omp_body_ptr (stmt));
1983 info->suppress_expansion = save_suppress;
1984 break;
1986 case GIMPLE_OMP_TARGET:
1987 if (!is_gimple_omp_offloaded (stmt))
1989 save_suppress = info->suppress_expansion;
1990 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1991 info->suppress_expansion = save_suppress;
1992 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1993 info, gimple_omp_body_ptr (stmt));
1994 break;
1996 save_suppress = info->suppress_expansion;
1997 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
1999 tree c;
2000 (void) get_frame_type (info);
2001 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2002 OMP_CLAUSE_DECL (c) = info->frame_decl;
2003 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2004 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2005 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2006 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2009 save_local_var_chain = info->new_local_var_chain;
2010 info->new_local_var_chain = NULL;
2012 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2013 gimple_omp_body_ptr (stmt));
2015 if (info->new_local_var_chain)
2016 declare_vars (info->new_local_var_chain,
2017 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2018 info->new_local_var_chain = save_local_var_chain;
2019 info->suppress_expansion = save_suppress;
2020 break;
2022 case GIMPLE_OMP_TEAMS:
2023 save_suppress = info->suppress_expansion;
2024 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2025 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2026 info, gimple_omp_body_ptr (stmt));
2027 info->suppress_expansion = save_suppress;
2028 break;
2030 case GIMPLE_OMP_SECTION:
2031 case GIMPLE_OMP_MASTER:
2032 case GIMPLE_OMP_TASKGROUP:
2033 case GIMPLE_OMP_ORDERED:
2034 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2035 info, gimple_omp_body_ptr (stmt));
2036 break;
2038 case GIMPLE_COND:
2039 wi->val_only = true;
2040 wi->is_lhs = false;
2041 *handled_ops_p = false;
2042 return NULL_TREE;
2044 case GIMPLE_ASSIGN:
2045 if (gimple_clobber_p (stmt))
2047 tree lhs = gimple_assign_lhs (stmt);
2048 if (!use_pointer_in_frame (lhs)
2049 && lookup_field_for_decl (info, lhs, NO_INSERT))
2051 gsi_replace (gsi, gimple_build_nop (), true);
2052 break;
2055 *handled_ops_p = false;
2056 return NULL_TREE;
2058 case GIMPLE_BIND:
2059 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2060 var;
2061 var = DECL_CHAIN (var))
2062 if (TREE_CODE (var) == NAMELIST_DECL)
2064 /* Adjust decls mentioned in NAMELIST_DECL. */
2065 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2066 tree decl;
2067 unsigned int i;
2069 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2071 if (TREE_CODE (decl) == VAR_DECL
2072 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2073 continue;
2074 if (decl_function_context (decl) == info->context
2075 && !use_pointer_in_frame (decl))
2077 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2078 if (field)
2080 CONSTRUCTOR_ELT (decls, i)->value
2081 = get_local_debug_decl (info, decl, field);
2087 *handled_ops_p = false;
2088 return NULL_TREE;
2090 default:
2091 /* For every other statement that we are not interested in
2092 handling here, let the walker traverse the operands. */
2093 *handled_ops_p = false;
2094 return NULL_TREE;
2097 /* Indicate that we have handled all the operands ourselves. */
2098 *handled_ops_p = true;
2099 return NULL_TREE;
2103 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2104 that reference labels from outer functions. The rewrite will be a
2105 call to __builtin_nonlocal_goto. */
2107 static tree
2108 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2109 struct walk_stmt_info *wi)
2111 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2112 tree label, new_label, target_context, x, field;
2113 gcall *call;
2114 gimple stmt = gsi_stmt (*gsi);
2116 if (gimple_code (stmt) != GIMPLE_GOTO)
2118 *handled_ops_p = false;
2119 return NULL_TREE;
2122 label = gimple_goto_dest (stmt);
2123 if (TREE_CODE (label) != LABEL_DECL)
2125 *handled_ops_p = false;
2126 return NULL_TREE;
2129 target_context = decl_function_context (label);
2130 if (target_context == info->context)
2132 *handled_ops_p = false;
2133 return NULL_TREE;
2136 for (i = info->outer; target_context != i->context; i = i->outer)
2137 continue;
2139 /* The original user label may also be use for a normal goto, therefore
2140 we must create a new label that will actually receive the abnormal
2141 control transfer. This new label will be marked LABEL_NONLOCAL; this
2142 mark will trigger proper behavior in the cfg, as well as cause the
2143 (hairy target-specific) non-local goto receiver code to be generated
2144 when we expand rtl. Enter this association into var_map so that we
2145 can insert the new label into the IL during a second pass. */
2146 tree *slot = &i->var_map->get_or_insert (label);
2147 if (*slot == NULL)
2149 new_label = create_artificial_label (UNKNOWN_LOCATION);
2150 DECL_NONLOCAL (new_label) = 1;
2151 *slot = new_label;
2153 else
2154 new_label = *slot;
2156 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2157 field = get_nl_goto_field (i);
2158 x = get_frame_field (info, target_context, field, gsi);
2159 x = build_addr (x, target_context);
2160 x = gsi_gimplify_val (info, x, gsi);
2161 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2162 2, build_addr (new_label, target_context), x);
2163 gsi_replace (gsi, call, false);
2165 /* We have handled all of STMT's operands, no need to keep going. */
2166 *handled_ops_p = true;
2167 return NULL_TREE;
2171 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2172 are referenced via nonlocal goto from a nested function. The rewrite
2173 will involve installing a newly generated DECL_NONLOCAL label, and
2174 (potentially) a branch around the rtl gunk that is assumed to be
2175 attached to such a label. */
2177 static tree
2178 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2179 struct walk_stmt_info *wi)
2181 struct nesting_info *const info = (struct nesting_info *) wi->info;
2182 tree label, new_label;
2183 gimple_stmt_iterator tmp_gsi;
2184 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2186 if (!stmt)
2188 *handled_ops_p = false;
2189 return NULL_TREE;
2192 label = gimple_label_label (stmt);
2194 tree *slot = info->var_map->get (label);
2195 if (!slot)
2197 *handled_ops_p = false;
2198 return NULL_TREE;
2201 /* If there's any possibility that the previous statement falls through,
2202 then we must branch around the new non-local label. */
2203 tmp_gsi = wi->gsi;
2204 gsi_prev (&tmp_gsi);
2205 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2207 gimple stmt = gimple_build_goto (label);
2208 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2211 new_label = (tree) *slot;
2212 stmt = gimple_build_label (new_label);
2213 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2215 *handled_ops_p = true;
2216 return NULL_TREE;
2220 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2221 of nested functions that require the use of trampolines. The rewrite
2222 will involve a reference a trampoline generated for the occasion. */
2224 static tree
2225 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2227 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2228 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2229 tree t = *tp, decl, target_context, x, builtin;
2230 gcall *call;
2232 *walk_subtrees = 0;
2233 switch (TREE_CODE (t))
2235 case ADDR_EXPR:
2236 /* Build
2237 T.1 = &CHAIN->tramp;
2238 T.2 = __builtin_adjust_trampoline (T.1);
2239 T.3 = (func_type)T.2;
2242 decl = TREE_OPERAND (t, 0);
2243 if (TREE_CODE (decl) != FUNCTION_DECL)
2244 break;
2246 /* Only need to process nested functions. */
2247 target_context = decl_function_context (decl);
2248 if (!target_context)
2249 break;
2251 /* If the nested function doesn't use a static chain, then
2252 it doesn't need a trampoline. */
2253 if (!DECL_STATIC_CHAIN (decl))
2254 break;
2256 /* If we don't want a trampoline, then don't build one. */
2257 if (TREE_NO_TRAMPOLINE (t))
2258 break;
2260 /* Lookup the immediate parent of the callee, as that's where
2261 we need to insert the trampoline. */
2262 for (i = info; i->context != target_context; i = i->outer)
2263 continue;
2264 x = lookup_tramp_for_decl (i, decl, INSERT);
2266 /* Compute the address of the field holding the trampoline. */
2267 x = get_frame_field (info, target_context, x, &wi->gsi);
2268 x = build_addr (x, target_context);
2269 x = gsi_gimplify_val (info, x, &wi->gsi);
2271 /* Do machine-specific ugliness. Normally this will involve
2272 computing extra alignment, but it can really be anything. */
2273 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2274 call = gimple_build_call (builtin, 1, x);
2275 x = init_tmp_var_with_call (info, &wi->gsi, call);
2277 /* Cast back to the proper function type. */
2278 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2279 x = init_tmp_var (info, x, &wi->gsi);
2281 *tp = x;
2282 break;
2284 default:
2285 if (!IS_TYPE_OR_DECL_P (t))
2286 *walk_subtrees = 1;
2287 break;
2290 return NULL_TREE;
2294 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2295 to addresses of nested functions that require the use of
2296 trampolines. The rewrite will involve a reference a trampoline
2297 generated for the occasion. */
2299 static tree
2300 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2301 struct walk_stmt_info *wi)
2303 struct nesting_info *info = (struct nesting_info *) wi->info;
2304 gimple stmt = gsi_stmt (*gsi);
2306 switch (gimple_code (stmt))
2308 case GIMPLE_CALL:
2310 /* Only walk call arguments, lest we generate trampolines for
2311 direct calls. */
2312 unsigned long i, nargs = gimple_call_num_args (stmt);
2313 for (i = 0; i < nargs; i++)
2314 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2315 wi, NULL);
2316 break;
2319 case GIMPLE_OMP_TARGET:
2320 if (!is_gimple_omp_offloaded (stmt))
2322 *handled_ops_p = false;
2323 return NULL_TREE;
2325 /* FALLTHRU */
2326 case GIMPLE_OMP_PARALLEL:
2327 case GIMPLE_OMP_TASK:
2329 tree save_local_var_chain;
2330 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2331 save_local_var_chain = info->new_local_var_chain;
2332 info->new_local_var_chain = NULL;
2333 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2334 info, gimple_omp_body_ptr (stmt));
2335 if (info->new_local_var_chain)
2336 declare_vars (info->new_local_var_chain,
2337 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2338 false);
2339 info->new_local_var_chain = save_local_var_chain;
2341 break;
2343 default:
2344 *handled_ops_p = false;
2345 return NULL_TREE;
2348 *handled_ops_p = true;
2349 return NULL_TREE;
2354 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2355 that reference nested functions to make sure that the static chain
2356 is set up properly for the call. */
2358 static tree
2359 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2360 struct walk_stmt_info *wi)
2362 struct nesting_info *const info = (struct nesting_info *) wi->info;
2363 tree decl, target_context;
2364 char save_static_chain_added;
2365 int i;
2366 gimple stmt = gsi_stmt (*gsi);
2368 switch (gimple_code (stmt))
2370 case GIMPLE_CALL:
2371 if (gimple_call_chain (stmt))
2372 break;
2373 decl = gimple_call_fndecl (stmt);
2374 if (!decl)
2375 break;
2376 target_context = decl_function_context (decl);
2377 if (target_context && DECL_STATIC_CHAIN (decl))
2379 gimple_call_set_chain (as_a <gcall *> (stmt),
2380 get_static_chain (info, target_context,
2381 &wi->gsi));
2382 info->static_chain_added |= (1 << (info->context != target_context));
2384 break;
2386 case GIMPLE_OMP_PARALLEL:
2387 case GIMPLE_OMP_TASK:
2388 save_static_chain_added = info->static_chain_added;
2389 info->static_chain_added = 0;
2390 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2391 for (i = 0; i < 2; i++)
2393 tree c, decl;
2394 if ((info->static_chain_added & (1 << i)) == 0)
2395 continue;
2396 decl = i ? get_chain_decl (info) : info->frame_decl;
2397 /* Don't add CHAIN.* or FRAME.* twice. */
2398 for (c = gimple_omp_taskreg_clauses (stmt);
2400 c = OMP_CLAUSE_CHAIN (c))
2401 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2402 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2403 && OMP_CLAUSE_DECL (c) == decl)
2404 break;
2405 if (c == NULL)
2407 c = build_omp_clause (gimple_location (stmt),
2408 i ? OMP_CLAUSE_FIRSTPRIVATE
2409 : OMP_CLAUSE_SHARED);
2410 OMP_CLAUSE_DECL (c) = decl;
2411 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2412 gimple_omp_taskreg_set_clauses (stmt, c);
2415 info->static_chain_added |= save_static_chain_added;
2416 break;
2418 case GIMPLE_OMP_TARGET:
2419 if (!is_gimple_omp_offloaded (stmt))
2421 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2422 break;
2424 save_static_chain_added = info->static_chain_added;
2425 info->static_chain_added = 0;
2426 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2427 for (i = 0; i < 2; i++)
2429 tree c, decl;
2430 if ((info->static_chain_added & (1 << i)) == 0)
2431 continue;
2432 decl = i ? get_chain_decl (info) : info->frame_decl;
2433 /* Don't add CHAIN.* or FRAME.* twice. */
2434 for (c = gimple_omp_target_clauses (stmt);
2436 c = OMP_CLAUSE_CHAIN (c))
2437 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2438 && OMP_CLAUSE_DECL (c) == decl)
2439 break;
2440 if (c == NULL)
2442 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2443 OMP_CLAUSE_DECL (c) = decl;
2444 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2445 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2446 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2447 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2451 info->static_chain_added |= save_static_chain_added;
2452 break;
2454 case GIMPLE_OMP_FOR:
2455 walk_body (convert_gimple_call, NULL, info,
2456 gimple_omp_for_pre_body_ptr (stmt));
2457 /* FALLTHRU */
2458 case GIMPLE_OMP_SECTIONS:
2459 case GIMPLE_OMP_SECTION:
2460 case GIMPLE_OMP_SINGLE:
2461 case GIMPLE_OMP_TEAMS:
2462 case GIMPLE_OMP_MASTER:
2463 case GIMPLE_OMP_TASKGROUP:
2464 case GIMPLE_OMP_ORDERED:
2465 case GIMPLE_OMP_CRITICAL:
2466 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2467 break;
2469 default:
2470 /* Keep looking for other operands. */
2471 *handled_ops_p = false;
2472 return NULL_TREE;
2475 *handled_ops_p = true;
2476 return NULL_TREE;
2479 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2480 call expressions. At the same time, determine if a nested function
2481 actually uses its static chain; if not, remember that. */
2483 static void
2484 convert_all_function_calls (struct nesting_info *root)
2486 unsigned int chain_count = 0, old_chain_count, iter_count;
2487 struct nesting_info *n;
2489 /* First, optimistically clear static_chain for all decls that haven't
2490 used the static chain already for variable access. But always create
2491 it if not optimizing. This makes it possible to reconstruct the static
2492 nesting tree at run time and thus to resolve up-level references from
2493 within the debugger. */
2494 FOR_EACH_NEST_INFO (n, root)
2496 tree decl = n->context;
2497 if (!optimize)
2499 if (n->inner)
2500 (void) get_frame_type (n);
2501 if (n->outer)
2502 (void) get_chain_decl (n);
2504 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2506 DECL_STATIC_CHAIN (decl) = 0;
2507 if (dump_file && (dump_flags & TDF_DETAILS))
2508 fprintf (dump_file, "Guessing no static-chain for %s\n",
2509 lang_hooks.decl_printable_name (decl, 2));
2511 else
2512 DECL_STATIC_CHAIN (decl) = 1;
2513 chain_count += DECL_STATIC_CHAIN (decl);
2516 /* Walk the functions and perform transformations. Note that these
2517 transformations can induce new uses of the static chain, which in turn
2518 require re-examining all users of the decl. */
2519 /* ??? It would make sense to try to use the call graph to speed this up,
2520 but the call graph hasn't really been built yet. Even if it did, we
2521 would still need to iterate in this loop since address-of references
2522 wouldn't show up in the callgraph anyway. */
2523 iter_count = 0;
2526 old_chain_count = chain_count;
2527 chain_count = 0;
2528 iter_count++;
2530 if (dump_file && (dump_flags & TDF_DETAILS))
2531 fputc ('\n', dump_file);
2533 FOR_EACH_NEST_INFO (n, root)
2535 tree decl = n->context;
2536 walk_function (convert_tramp_reference_stmt,
2537 convert_tramp_reference_op, n);
2538 walk_function (convert_gimple_call, NULL, n);
2539 chain_count += DECL_STATIC_CHAIN (decl);
2542 while (chain_count != old_chain_count);
2544 if (dump_file && (dump_flags & TDF_DETAILS))
2545 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2546 iter_count);
2549 struct nesting_copy_body_data
2551 copy_body_data cb;
2552 struct nesting_info *root;
2555 /* A helper subroutine for debug_var_chain type remapping. */
2557 static tree
2558 nesting_copy_decl (tree decl, copy_body_data *id)
2560 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2561 tree *slot = nid->root->var_map->get (decl);
2563 if (slot)
2564 return (tree) *slot;
2566 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2568 tree new_decl = copy_decl_no_change (decl, id);
2569 DECL_ORIGINAL_TYPE (new_decl)
2570 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2571 return new_decl;
2574 if (TREE_CODE (decl) == VAR_DECL
2575 || TREE_CODE (decl) == PARM_DECL
2576 || TREE_CODE (decl) == RESULT_DECL)
2577 return decl;
2579 return copy_decl_no_change (decl, id);
2582 /* A helper function for remap_vla_decls. See if *TP contains
2583 some remapped variables. */
2585 static tree
2586 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2588 struct nesting_info *root = (struct nesting_info *) data;
2589 tree t = *tp;
2591 if (DECL_P (t))
2593 *walk_subtrees = 0;
2594 tree *slot = root->var_map->get (t);
2596 if (slot)
2597 return *slot;
2599 return NULL;
2602 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2603 involved. */
2605 static void
2606 remap_vla_decls (tree block, struct nesting_info *root)
2608 tree var, subblock, val, type;
2609 struct nesting_copy_body_data id;
2611 for (subblock = BLOCK_SUBBLOCKS (block);
2612 subblock;
2613 subblock = BLOCK_CHAIN (subblock))
2614 remap_vla_decls (subblock, root);
2616 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2617 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2619 val = DECL_VALUE_EXPR (var);
2620 type = TREE_TYPE (var);
2622 if (!(TREE_CODE (val) == INDIRECT_REF
2623 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2624 && variably_modified_type_p (type, NULL)))
2625 continue;
2627 if (root->var_map->get (TREE_OPERAND (val, 0))
2628 || walk_tree (&type, contains_remapped_vars, root, NULL))
2629 break;
2632 if (var == NULL_TREE)
2633 return;
2635 memset (&id, 0, sizeof (id));
2636 id.cb.copy_decl = nesting_copy_decl;
2637 id.cb.decl_map = new hash_map<tree, tree>;
2638 id.root = root;
2640 for (; var; var = DECL_CHAIN (var))
2641 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2643 struct nesting_info *i;
2644 tree newt, context;
2646 val = DECL_VALUE_EXPR (var);
2647 type = TREE_TYPE (var);
2649 if (!(TREE_CODE (val) == INDIRECT_REF
2650 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2651 && variably_modified_type_p (type, NULL)))
2652 continue;
2654 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2655 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2656 continue;
2658 context = decl_function_context (var);
2659 for (i = root; i; i = i->outer)
2660 if (i->context == context)
2661 break;
2663 if (i == NULL)
2664 continue;
2666 /* Fully expand value expressions. This avoids having debug variables
2667 only referenced from them and that can be swept during GC. */
2668 if (slot)
2670 tree t = (tree) *slot;
2671 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2672 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2675 id.cb.src_fn = i->context;
2676 id.cb.dst_fn = i->context;
2677 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2679 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2680 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2682 newt = TREE_TYPE (newt);
2683 type = TREE_TYPE (type);
2685 if (TYPE_NAME (newt)
2686 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2687 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2688 && newt != type
2689 && TYPE_NAME (newt) == TYPE_NAME (type))
2690 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2692 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2693 if (val != DECL_VALUE_EXPR (var))
2694 SET_DECL_VALUE_EXPR (var, val);
2697 delete id.cb.decl_map;
2700 /* Fold the MEM_REF *E. */
2701 bool
2702 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2704 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2705 *ref_p = fold (*ref_p);
2706 return true;
2709 /* Do "everything else" to clean up or complete state collected by the
2710 various walking passes -- lay out the types and decls, generate code
2711 to initialize the frame decl, store critical expressions in the
2712 struct function for rtl to find. */
2714 static void
2715 finalize_nesting_tree_1 (struct nesting_info *root)
2717 gimple_seq stmt_list;
2718 gimple stmt;
2719 tree context = root->context;
2720 struct function *sf;
2722 stmt_list = NULL;
2724 /* If we created a non-local frame type or decl, we need to lay them
2725 out at this time. */
2726 if (root->frame_type)
2728 /* In some cases the frame type will trigger the -Wpadded warning.
2729 This is not helpful; suppress it. */
2730 int save_warn_padded = warn_padded;
2731 tree *adjust;
2733 warn_padded = 0;
2734 layout_type (root->frame_type);
2735 warn_padded = save_warn_padded;
2736 layout_decl (root->frame_decl, 0);
2738 /* Remove root->frame_decl from root->new_local_var_chain, so
2739 that we can declare it also in the lexical blocks, which
2740 helps ensure virtual regs that end up appearing in its RTL
2741 expression get substituted in instantiate_virtual_regs(). */
2742 for (adjust = &root->new_local_var_chain;
2743 *adjust != root->frame_decl;
2744 adjust = &DECL_CHAIN (*adjust))
2745 gcc_assert (DECL_CHAIN (*adjust));
2746 *adjust = DECL_CHAIN (*adjust);
2748 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2749 declare_vars (root->frame_decl,
2750 gimple_seq_first_stmt (gimple_body (context)), true);
2753 /* If any parameters were referenced non-locally, then we need to
2754 insert a copy. Likewise, if any variables were referenced by
2755 pointer, we need to initialize the address. */
2756 if (root->any_parm_remapped)
2758 tree p;
2759 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2761 tree field, x, y;
2763 field = lookup_field_for_decl (root, p, NO_INSERT);
2764 if (!field)
2765 continue;
2767 if (use_pointer_in_frame (p))
2768 x = build_addr (p, context);
2769 else
2770 x = p;
2772 /* If the assignment is from a non-register the stmt is
2773 not valid gimple. Make it so by using a temporary instead. */
2774 if (!is_gimple_reg (x)
2775 && is_gimple_reg_type (TREE_TYPE (x)))
2777 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2778 x = init_tmp_var (root, x, &gsi);
2781 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2782 root->frame_decl, field, NULL_TREE);
2783 stmt = gimple_build_assign (y, x);
2784 gimple_seq_add_stmt (&stmt_list, stmt);
2788 /* If a chain_field was created, then it needs to be initialized
2789 from chain_decl. */
2790 if (root->chain_field)
2792 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2793 root->frame_decl, root->chain_field, NULL_TREE);
2794 stmt = gimple_build_assign (x, get_chain_decl (root));
2795 gimple_seq_add_stmt (&stmt_list, stmt);
2798 /* If trampolines were created, then we need to initialize them. */
2799 if (root->any_tramp_created)
2801 struct nesting_info *i;
2802 for (i = root->inner; i ; i = i->next)
2804 tree arg1, arg2, arg3, x, field;
2806 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2807 if (!field)
2808 continue;
2810 gcc_assert (DECL_STATIC_CHAIN (i->context));
2811 arg3 = build_addr (root->frame_decl, context);
2813 arg2 = build_addr (i->context, context);
2815 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2816 root->frame_decl, field, NULL_TREE);
2817 arg1 = build_addr (x, context);
2819 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2820 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2821 gimple_seq_add_stmt (&stmt_list, stmt);
2825 /* If we created initialization statements, insert them. */
2826 if (stmt_list)
2828 gbind *bind;
2829 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2830 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
2831 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2832 gimple_bind_set_body (bind, stmt_list);
2835 /* If a chain_decl was created, then it needs to be registered with
2836 struct function so that it gets initialized from the static chain
2837 register at the beginning of the function. */
2838 sf = DECL_STRUCT_FUNCTION (root->context);
2839 sf->static_chain_decl = root->chain_decl;
2841 /* Similarly for the non-local goto save area. */
2842 if (root->nl_goto_field)
2844 sf->nonlocal_goto_save_area
2845 = get_frame_field (root, context, root->nl_goto_field, NULL);
2846 sf->has_nonlocal_label = 1;
2849 /* Make sure all new local variables get inserted into the
2850 proper BIND_EXPR. */
2851 if (root->new_local_var_chain)
2852 declare_vars (root->new_local_var_chain,
2853 gimple_seq_first_stmt (gimple_body (root->context)),
2854 false);
2856 if (root->debug_var_chain)
2858 tree debug_var;
2859 gbind *scope;
2861 remap_vla_decls (DECL_INITIAL (root->context), root);
2863 for (debug_var = root->debug_var_chain; debug_var;
2864 debug_var = DECL_CHAIN (debug_var))
2865 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2866 break;
2868 /* If there are any debug decls with variable length types,
2869 remap those types using other debug_var_chain variables. */
2870 if (debug_var)
2872 struct nesting_copy_body_data id;
2874 memset (&id, 0, sizeof (id));
2875 id.cb.copy_decl = nesting_copy_decl;
2876 id.cb.decl_map = new hash_map<tree, tree>;
2877 id.root = root;
2879 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2880 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2882 tree type = TREE_TYPE (debug_var);
2883 tree newt, t = type;
2884 struct nesting_info *i;
2886 for (i = root; i; i = i->outer)
2887 if (variably_modified_type_p (type, i->context))
2888 break;
2890 if (i == NULL)
2891 continue;
2893 id.cb.src_fn = i->context;
2894 id.cb.dst_fn = i->context;
2895 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2897 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2898 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2900 newt = TREE_TYPE (newt);
2901 t = TREE_TYPE (t);
2903 if (TYPE_NAME (newt)
2904 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2905 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2906 && newt != t
2907 && TYPE_NAME (newt) == TYPE_NAME (t))
2908 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2911 delete id.cb.decl_map;
2914 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
2915 if (gimple_bind_block (scope))
2916 declare_vars (root->debug_var_chain, scope, true);
2917 else
2918 BLOCK_VARS (DECL_INITIAL (root->context))
2919 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2920 root->debug_var_chain);
2923 /* Fold the rewritten MEM_REF trees. */
2924 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
2926 /* Dump the translated tree function. */
2927 if (dump_file)
2929 fputs ("\n\n", dump_file);
2930 dump_function_to_file (root->context, dump_file, dump_flags);
2934 static void
2935 finalize_nesting_tree (struct nesting_info *root)
2937 struct nesting_info *n;
2938 FOR_EACH_NEST_INFO (n, root)
2939 finalize_nesting_tree_1 (n);
2942 /* Unnest the nodes and pass them to cgraph. */
2944 static void
2945 unnest_nesting_tree_1 (struct nesting_info *root)
2947 struct cgraph_node *node = cgraph_node::get (root->context);
2949 /* For nested functions update the cgraph to reflect unnesting.
2950 We also delay finalizing of these functions up to this point. */
2951 if (node->origin)
2953 node->unnest ();
2954 cgraph_node::finalize_function (root->context, true);
2958 static void
2959 unnest_nesting_tree (struct nesting_info *root)
2961 struct nesting_info *n;
2962 FOR_EACH_NEST_INFO (n, root)
2963 unnest_nesting_tree_1 (n);
2966 /* Free the data structures allocated during this pass. */
2968 static void
2969 free_nesting_tree (struct nesting_info *root)
2971 struct nesting_info *node, *next;
2973 node = iter_nestinfo_start (root);
2976 next = iter_nestinfo_next (node);
2977 delete node->var_map;
2978 delete node->field_map;
2979 delete node->mem_refs;
2980 free (node);
2981 node = next;
2983 while (node);
2986 /* Gimplify a function and all its nested functions. */
2987 static void
2988 gimplify_all_functions (struct cgraph_node *root)
2990 struct cgraph_node *iter;
2991 if (!gimple_body (root->decl))
2992 gimplify_function_tree (root->decl);
2993 for (iter = root->nested; iter; iter = iter->next_nested)
2994 gimplify_all_functions (iter);
2997 /* Main entry point for this pass. Process FNDECL and all of its nested
2998 subroutines and turn them into something less tightly bound. */
3000 void
3001 lower_nested_functions (tree fndecl)
3003 struct cgraph_node *cgn;
3004 struct nesting_info *root;
3006 /* If there are no nested functions, there's nothing to do. */
3007 cgn = cgraph_node::get (fndecl);
3008 if (!cgn->nested)
3009 return;
3011 gimplify_all_functions (cgn);
3013 dump_file = dump_begin (TDI_nested, &dump_flags);
3014 if (dump_file)
3015 fprintf (dump_file, "\n;; Function %s\n\n",
3016 lang_hooks.decl_printable_name (fndecl, 2));
3018 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3019 root = create_nesting_tree (cgn);
3021 walk_all_functions (convert_nonlocal_reference_stmt,
3022 convert_nonlocal_reference_op,
3023 root);
3024 walk_all_functions (convert_local_reference_stmt,
3025 convert_local_reference_op,
3026 root);
3027 walk_all_functions (convert_nl_goto_reference, NULL, root);
3028 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3030 convert_all_function_calls (root);
3031 finalize_nesting_tree (root);
3032 unnest_nesting_tree (root);
3034 free_nesting_tree (root);
3035 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3037 if (dump_file)
3039 dump_end (TDI_nested, dump_file);
3040 dump_file = NULL;
3044 #include "gt-tree-nested.h"