libgomp: Use pthread mutexes in the nvptx plugin.
[official-gcc.git] / gcc / tree-nested.c
blob60066fccf800e0e46f05df6a5ccf9330d0b4c5cb
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "hash-set.h"
25 #include "machmode.h"
26 #include "vec.h"
27 #include "double-int.h"
28 #include "input.h"
29 #include "alias.h"
30 #include "symtab.h"
31 #include "wide-int.h"
32 #include "inchash.h"
33 #include "tree.h"
34 #include "fold-const.h"
35 #include "stringpool.h"
36 #include "stor-layout.h"
37 #include "tm_p.h"
38 #include "hard-reg-set.h"
39 #include "input.h"
40 #include "function.h"
41 #include "tree-dump.h"
42 #include "tree-inline.h"
43 #include "predict.h"
44 #include "basic-block.h"
45 #include "tree-ssa-alias.h"
46 #include "internal-fn.h"
47 #include "gimple-expr.h"
48 #include "is-a.h"
49 #include "gimple.h"
50 #include "gimplify.h"
51 #include "gimple-iterator.h"
52 #include "gimple-walk.h"
53 #include "tree-iterator.h"
54 #include "bitmap.h"
55 #include "hash-map.h"
56 #include "plugin-api.h"
57 #include "ipa-ref.h"
58 #include "cgraph.h"
59 #include "tree-cfg.h"
60 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
61 #include "langhooks.h"
62 #include "gimple-low.h"
65 /* The object of this pass is to lower the representation of a set of nested
66 functions in order to expose all of the gory details of the various
67 nonlocal references. We want to do this sooner rather than later, in
68 order to give us more freedom in emitting all of the functions in question.
70 Back in olden times, when gcc was young, we developed an insanely
71 complicated scheme whereby variables which were referenced nonlocally
72 were forced to live in the stack of the declaring function, and then
73 the nested functions magically discovered where these variables were
74 placed. In order for this scheme to function properly, it required
75 that the outer function be partially expanded, then we switch to
76 compiling the inner function, and once done with those we switch back
77 to compiling the outer function. Such delicate ordering requirements
78 makes it difficult to do whole translation unit optimizations
79 involving such functions.
81 The implementation here is much more direct. Everything that can be
82 referenced by an inner function is a member of an explicitly created
83 structure herein called the "nonlocal frame struct". The incoming
84 static chain for a nested function is a pointer to this struct in
85 the parent. In this way, we settle on known offsets from a known
86 base, and so are decoupled from the logic that places objects in the
87 function's stack frame. More importantly, we don't have to wait for
88 that to happen -- since the compilation of the inner function is no
89 longer tied to a real stack frame, the nonlocal frame struct can be
90 allocated anywhere. Which means that the outer function is now
91 inlinable.
93 Theory of operation here is very simple. Iterate over all the
94 statements in all the functions (depth first) several times,
95 allocating structures and fields on demand. In general we want to
96 examine inner functions first, so that we can avoid making changes
97 to outer functions which are unnecessary.
99 The order of the passes matters a bit, in that later passes will be
100 skipped if it is discovered that the functions don't actually interact
101 at all. That is, they're nested in the lexical sense but could have
102 been written as independent functions without change. */
105 struct nesting_info
107 struct nesting_info *outer;
108 struct nesting_info *inner;
109 struct nesting_info *next;
111 hash_map<tree, tree> *field_map;
112 hash_map<tree, tree> *var_map;
113 hash_set<tree *> *mem_refs;
114 bitmap suppress_expansion;
116 tree context;
117 tree new_local_var_chain;
118 tree debug_var_chain;
119 tree frame_type;
120 tree frame_decl;
121 tree chain_field;
122 tree chain_decl;
123 tree nl_goto_field;
125 bool any_parm_remapped;
126 bool any_tramp_created;
127 char static_chain_added;
131 /* Iterate over the nesting tree, starting with ROOT, depth first. */
133 static inline struct nesting_info *
134 iter_nestinfo_start (struct nesting_info *root)
136 while (root->inner)
137 root = root->inner;
138 return root;
141 static inline struct nesting_info *
142 iter_nestinfo_next (struct nesting_info *node)
144 if (node->next)
145 return iter_nestinfo_start (node->next);
146 return node->outer;
149 #define FOR_EACH_NEST_INFO(I, ROOT) \
150 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
152 /* Obstack used for the bitmaps in the struct above. */
153 static struct bitmap_obstack nesting_info_bitmap_obstack;
156 /* We're working in so many different function contexts simultaneously,
157 that create_tmp_var is dangerous. Prevent mishap. */
158 #define create_tmp_var cant_use_create_tmp_var_here_dummy
160 /* Like create_tmp_var, except record the variable for registration at
161 the given nesting level. */
163 static tree
164 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
166 tree tmp_var;
168 /* If the type is of variable size or a type which must be created by the
169 frontend, something is wrong. Note that we explicitly allow
170 incomplete types here, since we create them ourselves here. */
171 gcc_assert (!TREE_ADDRESSABLE (type));
172 gcc_assert (!TYPE_SIZE_UNIT (type)
173 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
175 tmp_var = create_tmp_var_raw (type, prefix);
176 DECL_CONTEXT (tmp_var) = info->context;
177 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
178 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
179 if (TREE_CODE (type) == COMPLEX_TYPE
180 || TREE_CODE (type) == VECTOR_TYPE)
181 DECL_GIMPLE_REG_P (tmp_var) = 1;
183 info->new_local_var_chain = tmp_var;
185 return tmp_var;
188 /* Take the address of EXP to be used within function CONTEXT.
189 Mark it for addressability as necessary. */
191 tree
192 build_addr (tree exp, tree context)
194 tree base = exp;
195 tree save_context;
196 tree retval;
198 while (handled_component_p (base))
199 base = TREE_OPERAND (base, 0);
201 if (DECL_P (base))
202 TREE_ADDRESSABLE (base) = 1;
204 /* Building the ADDR_EXPR will compute a set of properties for
205 that ADDR_EXPR. Those properties are unfortunately context
206 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
208 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
209 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
210 way the properties are for the ADDR_EXPR are computed properly. */
211 save_context = current_function_decl;
212 current_function_decl = context;
213 retval = build_fold_addr_expr (exp);
214 current_function_decl = save_context;
215 return retval;
218 /* Insert FIELD into TYPE, sorted by alignment requirements. */
220 void
221 insert_field_into_struct (tree type, tree field)
223 tree *p;
225 DECL_CONTEXT (field) = type;
227 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
228 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
229 break;
231 DECL_CHAIN (field) = *p;
232 *p = field;
234 /* Set correct alignment for frame struct type. */
235 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
236 TYPE_ALIGN (type) = DECL_ALIGN (field);
239 /* Build or return the RECORD_TYPE that describes the frame state that is
240 shared between INFO->CONTEXT and its nested functions. This record will
241 not be complete until finalize_nesting_tree; up until that point we'll
242 be adding fields as necessary.
244 We also build the DECL that represents this frame in the function. */
246 static tree
247 get_frame_type (struct nesting_info *info)
249 tree type = info->frame_type;
250 if (!type)
252 char *name;
254 type = make_node (RECORD_TYPE);
256 name = concat ("FRAME.",
257 IDENTIFIER_POINTER (DECL_NAME (info->context)),
258 NULL);
259 TYPE_NAME (type) = get_identifier (name);
260 free (name);
262 info->frame_type = type;
263 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
264 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
266 /* ??? Always make it addressable for now, since it is meant to
267 be pointed to by the static chain pointer. This pessimizes
268 when it turns out that no static chains are needed because
269 the nested functions referencing non-local variables are not
270 reachable, but the true pessimization is to create the non-
271 local frame structure in the first place. */
272 TREE_ADDRESSABLE (info->frame_decl) = 1;
274 return type;
277 /* Return true if DECL should be referenced by pointer in the non-local
278 frame structure. */
280 static bool
281 use_pointer_in_frame (tree decl)
283 if (TREE_CODE (decl) == PARM_DECL)
285 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
286 sized decls, and inefficient to copy large aggregates. Don't bother
287 moving anything but scalar variables. */
288 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
290 else
292 /* Variable sized types make things "interesting" in the frame. */
293 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
297 /* Given DECL, a non-locally accessed variable, find or create a field
298 in the non-local frame structure for the given nesting context. */
300 static tree
301 lookup_field_for_decl (struct nesting_info *info, tree decl,
302 enum insert_option insert)
304 if (insert == NO_INSERT)
306 tree *slot = info->field_map->get (decl);
307 return slot ? *slot : NULL_TREE;
310 tree *slot = &info->field_map->get_or_insert (decl);
311 if (!*slot)
313 tree field = make_node (FIELD_DECL);
314 DECL_NAME (field) = DECL_NAME (decl);
316 if (use_pointer_in_frame (decl))
318 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
319 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
320 DECL_NONADDRESSABLE_P (field) = 1;
322 else
324 TREE_TYPE (field) = TREE_TYPE (decl);
325 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
326 DECL_ALIGN (field) = DECL_ALIGN (decl);
327 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
328 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
329 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
330 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
333 insert_field_into_struct (get_frame_type (info), field);
334 *slot = field;
336 if (TREE_CODE (decl) == PARM_DECL)
337 info->any_parm_remapped = true;
340 return *slot;
343 /* Build or return the variable that holds the static chain within
344 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
346 static tree
347 get_chain_decl (struct nesting_info *info)
349 tree decl = info->chain_decl;
351 if (!decl)
353 tree type;
355 type = get_frame_type (info->outer);
356 type = build_pointer_type (type);
358 /* Note that this variable is *not* entered into any BIND_EXPR;
359 the construction of this variable is handled specially in
360 expand_function_start and initialize_inlined_parameters.
361 Note also that it's represented as a parameter. This is more
362 close to the truth, since the initial value does come from
363 the caller. */
364 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
365 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
366 DECL_ARTIFICIAL (decl) = 1;
367 DECL_IGNORED_P (decl) = 1;
368 TREE_USED (decl) = 1;
369 DECL_CONTEXT (decl) = info->context;
370 DECL_ARG_TYPE (decl) = type;
372 /* Tell tree-inline.c that we never write to this variable, so
373 it can copy-prop the replacement value immediately. */
374 TREE_READONLY (decl) = 1;
376 info->chain_decl = decl;
378 if (dump_file
379 && (dump_flags & TDF_DETAILS)
380 && !DECL_STATIC_CHAIN (info->context))
381 fprintf (dump_file, "Setting static-chain for %s\n",
382 lang_hooks.decl_printable_name (info->context, 2));
384 DECL_STATIC_CHAIN (info->context) = 1;
386 return decl;
389 /* Build or return the field within the non-local frame state that holds
390 the static chain for INFO->CONTEXT. This is the way to walk back up
391 multiple nesting levels. */
393 static tree
394 get_chain_field (struct nesting_info *info)
396 tree field = info->chain_field;
398 if (!field)
400 tree type = build_pointer_type (get_frame_type (info->outer));
402 field = make_node (FIELD_DECL);
403 DECL_NAME (field) = get_identifier ("__chain");
404 TREE_TYPE (field) = type;
405 DECL_ALIGN (field) = TYPE_ALIGN (type);
406 DECL_NONADDRESSABLE_P (field) = 1;
408 insert_field_into_struct (get_frame_type (info), field);
410 info->chain_field = field;
412 if (dump_file
413 && (dump_flags & TDF_DETAILS)
414 && !DECL_STATIC_CHAIN (info->context))
415 fprintf (dump_file, "Setting static-chain for %s\n",
416 lang_hooks.decl_printable_name (info->context, 2));
418 DECL_STATIC_CHAIN (info->context) = 1;
420 return field;
423 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
425 static tree
426 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
427 gcall *call)
429 tree t;
431 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
432 gimple_call_set_lhs (call, t);
433 if (! gsi_end_p (*gsi))
434 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
435 gsi_insert_before (gsi, call, GSI_SAME_STMT);
437 return t;
441 /* Copy EXP into a temporary. Allocate the temporary in the context of
442 INFO and insert the initialization statement before GSI. */
444 static tree
445 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
447 tree t;
448 gimple stmt;
450 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
451 stmt = gimple_build_assign (t, exp);
452 if (! gsi_end_p (*gsi))
453 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
454 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
456 return t;
460 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
462 static tree
463 gsi_gimplify_val (struct nesting_info *info, tree exp,
464 gimple_stmt_iterator *gsi)
466 if (is_gimple_val (exp))
467 return exp;
468 else
469 return init_tmp_var (info, exp, gsi);
472 /* Similarly, but copy from the temporary and insert the statement
473 after the iterator. */
475 static tree
476 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
478 tree t;
479 gimple stmt;
481 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
482 stmt = gimple_build_assign (exp, t);
483 if (! gsi_end_p (*gsi))
484 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
485 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
487 return t;
490 /* Build or return the type used to represent a nested function trampoline. */
492 static GTY(()) tree trampoline_type;
494 static tree
495 get_trampoline_type (struct nesting_info *info)
497 unsigned align, size;
498 tree t;
500 if (trampoline_type)
501 return trampoline_type;
503 align = TRAMPOLINE_ALIGNMENT;
504 size = TRAMPOLINE_SIZE;
506 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
507 then allocate extra space so that we can do dynamic alignment. */
508 if (align > STACK_BOUNDARY)
510 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
511 align = STACK_BOUNDARY;
514 t = build_index_type (size_int (size - 1));
515 t = build_array_type (char_type_node, t);
516 t = build_decl (DECL_SOURCE_LOCATION (info->context),
517 FIELD_DECL, get_identifier ("__data"), t);
518 DECL_ALIGN (t) = align;
519 DECL_USER_ALIGN (t) = 1;
521 trampoline_type = make_node (RECORD_TYPE);
522 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
523 TYPE_FIELDS (trampoline_type) = t;
524 layout_type (trampoline_type);
525 DECL_CONTEXT (t) = trampoline_type;
527 return trampoline_type;
530 /* Given DECL, a nested function, find or create a field in the non-local
531 frame structure for a trampoline for this function. */
533 static tree
534 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
535 enum insert_option insert)
537 if (insert == NO_INSERT)
539 tree *slot = info->var_map->get (decl);
540 return slot ? *slot : NULL_TREE;
543 tree *slot = &info->var_map->get_or_insert (decl);
544 if (!*slot)
546 tree field = make_node (FIELD_DECL);
547 DECL_NAME (field) = DECL_NAME (decl);
548 TREE_TYPE (field) = get_trampoline_type (info);
549 TREE_ADDRESSABLE (field) = 1;
551 insert_field_into_struct (get_frame_type (info), field);
552 *slot = field;
554 info->any_tramp_created = true;
557 return *slot;
560 /* Build or return the field within the non-local frame state that holds
561 the non-local goto "jmp_buf". The buffer itself is maintained by the
562 rtl middle-end as dynamic stack space is allocated. */
564 static tree
565 get_nl_goto_field (struct nesting_info *info)
567 tree field = info->nl_goto_field;
568 if (!field)
570 unsigned size;
571 tree type;
573 /* For __builtin_nonlocal_goto, we need N words. The first is the
574 frame pointer, the rest is for the target's stack pointer save
575 area. The number of words is controlled by STACK_SAVEAREA_MODE;
576 not the best interface, but it'll do for now. */
577 if (Pmode == ptr_mode)
578 type = ptr_type_node;
579 else
580 type = lang_hooks.types.type_for_mode (Pmode, 1);
582 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
583 size = size / GET_MODE_SIZE (Pmode);
584 size = size + 1;
586 type = build_array_type
587 (type, build_index_type (size_int (size)));
589 field = make_node (FIELD_DECL);
590 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
591 TREE_TYPE (field) = type;
592 DECL_ALIGN (field) = TYPE_ALIGN (type);
593 TREE_ADDRESSABLE (field) = 1;
595 insert_field_into_struct (get_frame_type (info), field);
597 info->nl_goto_field = field;
600 return field;
603 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
605 static void
606 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
607 struct nesting_info *info, gimple_seq *pseq)
609 struct walk_stmt_info wi;
611 memset (&wi, 0, sizeof (wi));
612 wi.info = info;
613 wi.val_only = true;
614 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
618 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
620 static inline void
621 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
622 struct nesting_info *info)
624 gimple_seq body = gimple_body (info->context);
625 walk_body (callback_stmt, callback_op, info, &body);
626 gimple_set_body (info->context, body);
629 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
631 static void
632 walk_gimple_omp_for (gomp_for *for_stmt,
633 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
634 struct nesting_info *info)
636 struct walk_stmt_info wi;
637 gimple_seq seq;
638 tree t;
639 size_t i;
641 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
643 seq = NULL;
644 memset (&wi, 0, sizeof (wi));
645 wi.info = info;
646 wi.gsi = gsi_last (seq);
648 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
650 wi.val_only = false;
651 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
652 &wi, NULL);
653 wi.val_only = true;
654 wi.is_lhs = false;
655 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
656 &wi, NULL);
658 wi.val_only = true;
659 wi.is_lhs = false;
660 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
661 &wi, NULL);
663 t = gimple_omp_for_incr (for_stmt, i);
664 gcc_assert (BINARY_CLASS_P (t));
665 wi.val_only = false;
666 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
667 wi.val_only = true;
668 wi.is_lhs = false;
669 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
672 seq = gsi_seq (wi.gsi);
673 if (!gimple_seq_empty_p (seq))
675 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
676 annotate_all_with_location (seq, gimple_location (for_stmt));
677 gimple_seq_add_seq (&pre_body, seq);
678 gimple_omp_for_set_pre_body (for_stmt, pre_body);
682 /* Similarly for ROOT and all functions nested underneath, depth first. */
684 static void
685 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
686 struct nesting_info *root)
688 struct nesting_info *n;
689 FOR_EACH_NEST_INFO (n, root)
690 walk_function (callback_stmt, callback_op, n);
694 /* We have to check for a fairly pathological case. The operands of function
695 nested function are to be interpreted in the context of the enclosing
696 function. So if any are variably-sized, they will get remapped when the
697 enclosing function is inlined. But that remapping would also have to be
698 done in the types of the PARM_DECLs of the nested function, meaning the
699 argument types of that function will disagree with the arguments in the
700 calls to that function. So we'd either have to make a copy of the nested
701 function corresponding to each time the enclosing function was inlined or
702 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
703 function. The former is not practical. The latter would still require
704 detecting this case to know when to add the conversions. So, for now at
705 least, we don't inline such an enclosing function.
707 We have to do that check recursively, so here return indicating whether
708 FNDECL has such a nested function. ORIG_FN is the function we were
709 trying to inline to use for checking whether any argument is variably
710 modified by anything in it.
712 It would be better to do this in tree-inline.c so that we could give
713 the appropriate warning for why a function can't be inlined, but that's
714 too late since the nesting structure has already been flattened and
715 adding a flag just to record this fact seems a waste of a flag. */
717 static bool
718 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
720 struct cgraph_node *cgn = cgraph_node::get (fndecl);
721 tree arg;
723 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
725 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
726 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
727 return true;
729 if (check_for_nested_with_variably_modified (cgn->decl,
730 orig_fndecl))
731 return true;
734 return false;
737 /* Construct our local datastructure describing the function nesting
738 tree rooted by CGN. */
740 static struct nesting_info *
741 create_nesting_tree (struct cgraph_node *cgn)
743 struct nesting_info *info = XCNEW (struct nesting_info);
744 info->field_map = new hash_map<tree, tree>;
745 info->var_map = new hash_map<tree, tree>;
746 info->mem_refs = new hash_set<tree *>;
747 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
748 info->context = cgn->decl;
750 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
752 struct nesting_info *sub = create_nesting_tree (cgn);
753 sub->outer = info;
754 sub->next = info->inner;
755 info->inner = sub;
758 /* See discussion at check_for_nested_with_variably_modified for a
759 discussion of why this has to be here. */
760 if (check_for_nested_with_variably_modified (info->context, info->context))
761 DECL_UNINLINABLE (info->context) = true;
763 return info;
766 /* Return an expression computing the static chain for TARGET_CONTEXT
767 from INFO->CONTEXT. Insert any necessary computations before TSI. */
769 static tree
770 get_static_chain (struct nesting_info *info, tree target_context,
771 gimple_stmt_iterator *gsi)
773 struct nesting_info *i;
774 tree x;
776 if (info->context == target_context)
778 x = build_addr (info->frame_decl, target_context);
780 else
782 x = get_chain_decl (info);
784 for (i = info->outer; i->context != target_context; i = i->outer)
786 tree field = get_chain_field (i);
788 x = build_simple_mem_ref (x);
789 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
790 x = init_tmp_var (info, x, gsi);
794 return x;
798 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
799 frame as seen from INFO->CONTEXT. Insert any necessary computations
800 before GSI. */
802 static tree
803 get_frame_field (struct nesting_info *info, tree target_context,
804 tree field, gimple_stmt_iterator *gsi)
806 struct nesting_info *i;
807 tree x;
809 if (info->context == target_context)
811 /* Make sure frame_decl gets created. */
812 (void) get_frame_type (info);
813 x = info->frame_decl;
815 else
817 x = get_chain_decl (info);
819 for (i = info->outer; i->context != target_context; i = i->outer)
821 tree field = get_chain_field (i);
823 x = build_simple_mem_ref (x);
824 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
825 x = init_tmp_var (info, x, gsi);
828 x = build_simple_mem_ref (x);
831 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
832 return x;
835 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
837 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
838 in the nested function with DECL_VALUE_EXPR set to reference the true
839 variable in the parent function. This is used both for debug info
840 and in OMP lowering. */
842 static tree
843 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
845 tree target_context;
846 struct nesting_info *i;
847 tree x, field, new_decl;
849 tree *slot = &info->var_map->get_or_insert (decl);
851 if (*slot)
852 return *slot;
854 target_context = decl_function_context (decl);
856 /* A copy of the code in get_frame_field, but without the temporaries. */
857 if (info->context == target_context)
859 /* Make sure frame_decl gets created. */
860 (void) get_frame_type (info);
861 x = info->frame_decl;
862 i = info;
864 else
866 x = get_chain_decl (info);
867 for (i = info->outer; i->context != target_context; i = i->outer)
869 field = get_chain_field (i);
870 x = build_simple_mem_ref (x);
871 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
873 x = build_simple_mem_ref (x);
876 field = lookup_field_for_decl (i, decl, INSERT);
877 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
878 if (use_pointer_in_frame (decl))
879 x = build_simple_mem_ref (x);
881 /* ??? We should be remapping types as well, surely. */
882 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
883 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
884 DECL_CONTEXT (new_decl) = info->context;
885 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
886 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
887 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
888 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
889 TREE_READONLY (new_decl) = TREE_READONLY (decl);
890 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
891 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
892 if ((TREE_CODE (decl) == PARM_DECL
893 || TREE_CODE (decl) == RESULT_DECL
894 || TREE_CODE (decl) == VAR_DECL)
895 && DECL_BY_REFERENCE (decl))
896 DECL_BY_REFERENCE (new_decl) = 1;
898 SET_DECL_VALUE_EXPR (new_decl, x);
899 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
901 *slot = new_decl;
902 DECL_CHAIN (new_decl) = info->debug_var_chain;
903 info->debug_var_chain = new_decl;
905 if (!optimize
906 && info->context != target_context
907 && variably_modified_type_p (TREE_TYPE (decl), NULL))
908 note_nonlocal_vla_type (info, TREE_TYPE (decl));
910 return new_decl;
914 /* Callback for walk_gimple_stmt, rewrite all references to VAR
915 and PARM_DECLs that belong to outer functions.
917 The rewrite will involve some number of structure accesses back up
918 the static chain. E.g. for a variable FOO up one nesting level it'll
919 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
920 indirections apply to decls for which use_pointer_in_frame is true. */
922 static tree
923 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
925 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
926 struct nesting_info *const info = (struct nesting_info *) wi->info;
927 tree t = *tp;
929 *walk_subtrees = 0;
930 switch (TREE_CODE (t))
932 case VAR_DECL:
933 /* Non-automatic variables are never processed. */
934 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
935 break;
936 /* FALLTHRU */
938 case PARM_DECL:
939 if (decl_function_context (t) != info->context)
941 tree x;
942 wi->changed = true;
944 x = get_nonlocal_debug_decl (info, t);
945 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
947 tree target_context = decl_function_context (t);
948 struct nesting_info *i;
949 for (i = info->outer; i->context != target_context; i = i->outer)
950 continue;
951 x = lookup_field_for_decl (i, t, INSERT);
952 x = get_frame_field (info, target_context, x, &wi->gsi);
953 if (use_pointer_in_frame (t))
955 x = init_tmp_var (info, x, &wi->gsi);
956 x = build_simple_mem_ref (x);
960 if (wi->val_only)
962 if (wi->is_lhs)
963 x = save_tmp_var (info, x, &wi->gsi);
964 else
965 x = init_tmp_var (info, x, &wi->gsi);
968 *tp = x;
970 break;
972 case LABEL_DECL:
973 /* We're taking the address of a label from a parent function, but
974 this is not itself a non-local goto. Mark the label such that it
975 will not be deleted, much as we would with a label address in
976 static storage. */
977 if (decl_function_context (t) != info->context)
978 FORCED_LABEL (t) = 1;
979 break;
981 case ADDR_EXPR:
983 bool save_val_only = wi->val_only;
985 wi->val_only = false;
986 wi->is_lhs = false;
987 wi->changed = false;
988 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
989 wi->val_only = true;
991 if (wi->changed)
993 tree save_context;
995 /* If we changed anything, we might no longer be directly
996 referencing a decl. */
997 save_context = current_function_decl;
998 current_function_decl = info->context;
999 recompute_tree_invariant_for_addr_expr (t);
1000 current_function_decl = save_context;
1002 /* If the callback converted the address argument in a context
1003 where we only accept variables (and min_invariant, presumably),
1004 then compute the address into a temporary. */
1005 if (save_val_only)
1006 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1007 t, &wi->gsi);
1010 break;
1012 case REALPART_EXPR:
1013 case IMAGPART_EXPR:
1014 case COMPONENT_REF:
1015 case ARRAY_REF:
1016 case ARRAY_RANGE_REF:
1017 case BIT_FIELD_REF:
1018 /* Go down this entire nest and just look at the final prefix and
1019 anything that describes the references. Otherwise, we lose track
1020 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1021 wi->val_only = true;
1022 wi->is_lhs = false;
1023 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1025 if (TREE_CODE (t) == COMPONENT_REF)
1026 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1027 NULL);
1028 else if (TREE_CODE (t) == ARRAY_REF
1029 || TREE_CODE (t) == ARRAY_RANGE_REF)
1031 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1032 wi, NULL);
1033 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1034 wi, NULL);
1035 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1036 wi, NULL);
1039 wi->val_only = false;
1040 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1041 break;
1043 case VIEW_CONVERT_EXPR:
1044 /* Just request to look at the subtrees, leaving val_only and lhs
1045 untouched. This might actually be for !val_only + lhs, in which
1046 case we don't want to force a replacement by a temporary. */
1047 *walk_subtrees = 1;
1048 break;
1050 default:
1051 if (!IS_TYPE_OR_DECL_P (t))
1053 *walk_subtrees = 1;
1054 wi->val_only = true;
1055 wi->is_lhs = false;
1057 break;
1060 return NULL_TREE;
1063 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1064 struct walk_stmt_info *);
1066 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1067 and PARM_DECLs that belong to outer functions. */
1069 static bool
1070 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1072 struct nesting_info *const info = (struct nesting_info *) wi->info;
1073 bool need_chain = false, need_stmts = false;
1074 tree clause, decl;
1075 int dummy;
1076 bitmap new_suppress;
1078 new_suppress = BITMAP_GGC_ALLOC ();
1079 bitmap_copy (new_suppress, info->suppress_expansion);
1081 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1083 switch (OMP_CLAUSE_CODE (clause))
1085 case OMP_CLAUSE_REDUCTION:
1086 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1087 need_stmts = true;
1088 goto do_decl_clause;
1090 case OMP_CLAUSE_LASTPRIVATE:
1091 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1092 need_stmts = true;
1093 goto do_decl_clause;
1095 case OMP_CLAUSE_LINEAR:
1096 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1097 need_stmts = true;
1098 wi->val_only = true;
1099 wi->is_lhs = false;
1100 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1101 &dummy, wi);
1102 goto do_decl_clause;
1104 case OMP_CLAUSE_PRIVATE:
1105 case OMP_CLAUSE_FIRSTPRIVATE:
1106 case OMP_CLAUSE_COPYPRIVATE:
1107 case OMP_CLAUSE_SHARED:
1108 do_decl_clause:
1109 decl = OMP_CLAUSE_DECL (clause);
1110 if (TREE_CODE (decl) == VAR_DECL
1111 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1112 break;
1113 if (decl_function_context (decl) != info->context)
1115 bitmap_set_bit (new_suppress, DECL_UID (decl));
1116 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1117 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1118 need_chain = true;
1120 break;
1122 case OMP_CLAUSE_SCHEDULE:
1123 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1124 break;
1125 /* FALLTHRU */
1126 case OMP_CLAUSE_FINAL:
1127 case OMP_CLAUSE_IF:
1128 case OMP_CLAUSE_NUM_THREADS:
1129 case OMP_CLAUSE_DEPEND:
1130 case OMP_CLAUSE_DEVICE:
1131 case OMP_CLAUSE_NUM_TEAMS:
1132 case OMP_CLAUSE_THREAD_LIMIT:
1133 case OMP_CLAUSE_SAFELEN:
1134 case OMP_CLAUSE__CILK_FOR_COUNT_:
1135 wi->val_only = true;
1136 wi->is_lhs = false;
1137 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1138 &dummy, wi);
1139 break;
1141 case OMP_CLAUSE_DIST_SCHEDULE:
1142 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1144 wi->val_only = true;
1145 wi->is_lhs = false;
1146 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1147 &dummy, wi);
1149 break;
1151 case OMP_CLAUSE_MAP:
1152 case OMP_CLAUSE_TO:
1153 case OMP_CLAUSE_FROM:
1154 if (OMP_CLAUSE_SIZE (clause))
1156 wi->val_only = true;
1157 wi->is_lhs = false;
1158 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1159 &dummy, wi);
1161 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1162 goto do_decl_clause;
1163 wi->val_only = true;
1164 wi->is_lhs = false;
1165 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1166 wi, NULL);
1167 break;
1169 case OMP_CLAUSE_ALIGNED:
1170 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1172 wi->val_only = true;
1173 wi->is_lhs = false;
1174 convert_nonlocal_reference_op
1175 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1177 /* Like do_decl_clause, but don't add any suppression. */
1178 decl = OMP_CLAUSE_DECL (clause);
1179 if (TREE_CODE (decl) == VAR_DECL
1180 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1181 break;
1182 if (decl_function_context (decl) != info->context)
1184 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1185 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1186 need_chain = true;
1188 break;
1190 case OMP_CLAUSE_NOWAIT:
1191 case OMP_CLAUSE_ORDERED:
1192 case OMP_CLAUSE_DEFAULT:
1193 case OMP_CLAUSE_COPYIN:
1194 case OMP_CLAUSE_COLLAPSE:
1195 case OMP_CLAUSE_UNTIED:
1196 case OMP_CLAUSE_MERGEABLE:
1197 case OMP_CLAUSE_PROC_BIND:
1198 break;
1200 default:
1201 gcc_unreachable ();
1205 info->suppress_expansion = new_suppress;
1207 if (need_stmts)
1208 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1209 switch (OMP_CLAUSE_CODE (clause))
1211 case OMP_CLAUSE_REDUCTION:
1212 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1214 tree old_context
1215 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1216 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1217 = info->context;
1218 walk_body (convert_nonlocal_reference_stmt,
1219 convert_nonlocal_reference_op, info,
1220 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1221 walk_body (convert_nonlocal_reference_stmt,
1222 convert_nonlocal_reference_op, info,
1223 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1224 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1225 = old_context;
1227 break;
1229 case OMP_CLAUSE_LASTPRIVATE:
1230 walk_body (convert_nonlocal_reference_stmt,
1231 convert_nonlocal_reference_op, info,
1232 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1233 break;
1235 case OMP_CLAUSE_LINEAR:
1236 walk_body (convert_nonlocal_reference_stmt,
1237 convert_nonlocal_reference_op, info,
1238 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1239 break;
1241 default:
1242 break;
1245 return need_chain;
1248 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1250 static void
1251 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1253 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1254 type = TREE_TYPE (type);
1256 if (TYPE_NAME (type)
1257 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1258 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1259 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1261 while (POINTER_TYPE_P (type)
1262 || TREE_CODE (type) == VECTOR_TYPE
1263 || TREE_CODE (type) == FUNCTION_TYPE
1264 || TREE_CODE (type) == METHOD_TYPE)
1265 type = TREE_TYPE (type);
1267 if (TREE_CODE (type) == ARRAY_TYPE)
1269 tree domain, t;
1271 note_nonlocal_vla_type (info, TREE_TYPE (type));
1272 domain = TYPE_DOMAIN (type);
1273 if (domain)
1275 t = TYPE_MIN_VALUE (domain);
1276 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1277 && decl_function_context (t) != info->context)
1278 get_nonlocal_debug_decl (info, t);
1279 t = TYPE_MAX_VALUE (domain);
1280 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1281 && decl_function_context (t) != info->context)
1282 get_nonlocal_debug_decl (info, t);
1287 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1288 in BLOCK. */
1290 static void
1291 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1293 tree var;
1295 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1296 if (TREE_CODE (var) == VAR_DECL
1297 && variably_modified_type_p (TREE_TYPE (var), NULL)
1298 && DECL_HAS_VALUE_EXPR_P (var)
1299 && decl_function_context (var) != info->context)
1300 note_nonlocal_vla_type (info, TREE_TYPE (var));
1303 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1304 PARM_DECLs that belong to outer functions. This handles statements
1305 that are not handled via the standard recursion done in
1306 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1307 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1308 operands of STMT have been handled by this function. */
1310 static tree
1311 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1312 struct walk_stmt_info *wi)
1314 struct nesting_info *info = (struct nesting_info *) wi->info;
1315 tree save_local_var_chain;
1316 bitmap save_suppress;
1317 gimple stmt = gsi_stmt (*gsi);
1319 switch (gimple_code (stmt))
1321 case GIMPLE_GOTO:
1322 /* Don't walk non-local gotos for now. */
1323 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1325 wi->val_only = true;
1326 wi->is_lhs = false;
1327 *handled_ops_p = true;
1328 return NULL_TREE;
1330 break;
1332 case GIMPLE_OMP_PARALLEL:
1333 case GIMPLE_OMP_TASK:
1334 save_suppress = info->suppress_expansion;
1335 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1336 wi))
1338 tree c, decl;
1339 decl = get_chain_decl (info);
1340 c = build_omp_clause (gimple_location (stmt),
1341 OMP_CLAUSE_FIRSTPRIVATE);
1342 OMP_CLAUSE_DECL (c) = decl;
1343 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1344 gimple_omp_taskreg_set_clauses (stmt, c);
1347 save_local_var_chain = info->new_local_var_chain;
1348 info->new_local_var_chain = NULL;
1350 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1351 info, gimple_omp_body_ptr (stmt));
1353 if (info->new_local_var_chain)
1354 declare_vars (info->new_local_var_chain,
1355 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1356 false);
1357 info->new_local_var_chain = save_local_var_chain;
1358 info->suppress_expansion = save_suppress;
1359 break;
1361 case GIMPLE_OMP_FOR:
1362 save_suppress = info->suppress_expansion;
1363 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1364 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1365 convert_nonlocal_reference_stmt,
1366 convert_nonlocal_reference_op, info);
1367 walk_body (convert_nonlocal_reference_stmt,
1368 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1369 info->suppress_expansion = save_suppress;
1370 break;
1372 case GIMPLE_OMP_SECTIONS:
1373 save_suppress = info->suppress_expansion;
1374 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1375 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1376 info, gimple_omp_body_ptr (stmt));
1377 info->suppress_expansion = save_suppress;
1378 break;
1380 case GIMPLE_OMP_SINGLE:
1381 save_suppress = info->suppress_expansion;
1382 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1383 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1384 info, gimple_omp_body_ptr (stmt));
1385 info->suppress_expansion = save_suppress;
1386 break;
1388 case GIMPLE_OMP_TARGET:
1389 if (!is_gimple_omp_offloaded (stmt))
1391 save_suppress = info->suppress_expansion;
1392 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1393 wi);
1394 info->suppress_expansion = save_suppress;
1395 walk_body (convert_nonlocal_reference_stmt,
1396 convert_nonlocal_reference_op, info,
1397 gimple_omp_body_ptr (stmt));
1398 break;
1400 save_suppress = info->suppress_expansion;
1401 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1402 wi))
1404 tree c, decl;
1405 decl = get_chain_decl (info);
1406 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1407 OMP_CLAUSE_DECL (c) = decl;
1408 OMP_CLAUSE_MAP_KIND (c) = OMP_CLAUSE_MAP_TO;
1409 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1410 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1411 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1414 save_local_var_chain = info->new_local_var_chain;
1415 info->new_local_var_chain = NULL;
1417 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1418 info, gimple_omp_body_ptr (stmt));
1420 if (info->new_local_var_chain)
1421 declare_vars (info->new_local_var_chain,
1422 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1423 false);
1424 info->new_local_var_chain = save_local_var_chain;
1425 info->suppress_expansion = save_suppress;
1426 break;
1428 case GIMPLE_OMP_TEAMS:
1429 save_suppress = info->suppress_expansion;
1430 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1431 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1432 info, gimple_omp_body_ptr (stmt));
1433 info->suppress_expansion = save_suppress;
1434 break;
1436 case GIMPLE_OMP_SECTION:
1437 case GIMPLE_OMP_MASTER:
1438 case GIMPLE_OMP_TASKGROUP:
1439 case GIMPLE_OMP_ORDERED:
1440 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1441 info, gimple_omp_body_ptr (stmt));
1442 break;
1444 case GIMPLE_BIND:
1446 gbind *bind_stmt = as_a <gbind *> (stmt);
1447 if (!optimize && gimple_bind_block (bind_stmt))
1448 note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
1450 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1451 if (TREE_CODE (var) == NAMELIST_DECL)
1453 /* Adjust decls mentioned in NAMELIST_DECL. */
1454 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1455 tree decl;
1456 unsigned int i;
1458 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1460 if (TREE_CODE (decl) == VAR_DECL
1461 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1462 continue;
1463 if (decl_function_context (decl) != info->context)
1464 CONSTRUCTOR_ELT (decls, i)->value
1465 = get_nonlocal_debug_decl (info, decl);
1469 *handled_ops_p = false;
1470 return NULL_TREE;
1472 case GIMPLE_COND:
1473 wi->val_only = true;
1474 wi->is_lhs = false;
1475 *handled_ops_p = false;
1476 return NULL_TREE;
1478 default:
1479 /* For every other statement that we are not interested in
1480 handling here, let the walker traverse the operands. */
1481 *handled_ops_p = false;
1482 return NULL_TREE;
1485 /* We have handled all of STMT operands, no need to traverse the operands. */
1486 *handled_ops_p = true;
1487 return NULL_TREE;
1491 /* A subroutine of convert_local_reference. Create a local variable
1492 in the parent function with DECL_VALUE_EXPR set to reference the
1493 field in FRAME. This is used both for debug info and in OMP
1494 lowering. */
1496 static tree
1497 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1499 tree x, new_decl;
1501 tree *slot = &info->var_map->get_or_insert (decl);
1502 if (*slot)
1503 return *slot;
1505 /* Make sure frame_decl gets created. */
1506 (void) get_frame_type (info);
1507 x = info->frame_decl;
1508 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1510 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1511 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1512 DECL_CONTEXT (new_decl) = info->context;
1513 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1514 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1515 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1516 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1517 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1518 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1519 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1520 if ((TREE_CODE (decl) == PARM_DECL
1521 || TREE_CODE (decl) == RESULT_DECL
1522 || TREE_CODE (decl) == VAR_DECL)
1523 && DECL_BY_REFERENCE (decl))
1524 DECL_BY_REFERENCE (new_decl) = 1;
1526 SET_DECL_VALUE_EXPR (new_decl, x);
1527 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1528 *slot = new_decl;
1530 DECL_CHAIN (new_decl) = info->debug_var_chain;
1531 info->debug_var_chain = new_decl;
1533 /* Do not emit debug info twice. */
1534 DECL_IGNORED_P (decl) = 1;
1536 return new_decl;
1540 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1541 and PARM_DECLs that were referenced by inner nested functions.
1542 The rewrite will be a structure reference to the local frame variable. */
1544 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1546 static tree
1547 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1549 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1550 struct nesting_info *const info = (struct nesting_info *) wi->info;
1551 tree t = *tp, field, x;
1552 bool save_val_only;
1554 *walk_subtrees = 0;
1555 switch (TREE_CODE (t))
1557 case VAR_DECL:
1558 /* Non-automatic variables are never processed. */
1559 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1560 break;
1561 /* FALLTHRU */
1563 case PARM_DECL:
1564 if (decl_function_context (t) == info->context)
1566 /* If we copied a pointer to the frame, then the original decl
1567 is used unchanged in the parent function. */
1568 if (use_pointer_in_frame (t))
1569 break;
1571 /* No need to transform anything if no child references the
1572 variable. */
1573 field = lookup_field_for_decl (info, t, NO_INSERT);
1574 if (!field)
1575 break;
1576 wi->changed = true;
1578 x = get_local_debug_decl (info, t, field);
1579 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1580 x = get_frame_field (info, info->context, field, &wi->gsi);
1582 if (wi->val_only)
1584 if (wi->is_lhs)
1585 x = save_tmp_var (info, x, &wi->gsi);
1586 else
1587 x = init_tmp_var (info, x, &wi->gsi);
1590 *tp = x;
1592 break;
1594 case ADDR_EXPR:
1595 save_val_only = wi->val_only;
1596 wi->val_only = false;
1597 wi->is_lhs = false;
1598 wi->changed = false;
1599 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1600 wi->val_only = save_val_only;
1602 /* If we converted anything ... */
1603 if (wi->changed)
1605 tree save_context;
1607 /* Then the frame decl is now addressable. */
1608 TREE_ADDRESSABLE (info->frame_decl) = 1;
1610 save_context = current_function_decl;
1611 current_function_decl = info->context;
1612 recompute_tree_invariant_for_addr_expr (t);
1613 current_function_decl = save_context;
1615 /* If we are in a context where we only accept values, then
1616 compute the address into a temporary. */
1617 if (save_val_only)
1618 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1619 t, &wi->gsi);
1621 break;
1623 case REALPART_EXPR:
1624 case IMAGPART_EXPR:
1625 case COMPONENT_REF:
1626 case ARRAY_REF:
1627 case ARRAY_RANGE_REF:
1628 case BIT_FIELD_REF:
1629 /* Go down this entire nest and just look at the final prefix and
1630 anything that describes the references. Otherwise, we lose track
1631 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1632 save_val_only = wi->val_only;
1633 wi->val_only = true;
1634 wi->is_lhs = false;
1635 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1637 if (TREE_CODE (t) == COMPONENT_REF)
1638 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1639 NULL);
1640 else if (TREE_CODE (t) == ARRAY_REF
1641 || TREE_CODE (t) == ARRAY_RANGE_REF)
1643 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1644 NULL);
1645 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1646 NULL);
1647 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1648 NULL);
1651 wi->val_only = false;
1652 walk_tree (tp, convert_local_reference_op, wi, NULL);
1653 wi->val_only = save_val_only;
1654 break;
1656 case MEM_REF:
1657 save_val_only = wi->val_only;
1658 wi->val_only = true;
1659 wi->is_lhs = false;
1660 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1661 wi, NULL);
1662 /* We need to re-fold the MEM_REF as component references as
1663 part of a ADDR_EXPR address are not allowed. But we cannot
1664 fold here, as the chain record type is not yet finalized. */
1665 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1666 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1667 info->mem_refs->add (tp);
1668 wi->val_only = save_val_only;
1669 break;
1671 case VIEW_CONVERT_EXPR:
1672 /* Just request to look at the subtrees, leaving val_only and lhs
1673 untouched. This might actually be for !val_only + lhs, in which
1674 case we don't want to force a replacement by a temporary. */
1675 *walk_subtrees = 1;
1676 break;
1678 default:
1679 if (!IS_TYPE_OR_DECL_P (t))
1681 *walk_subtrees = 1;
1682 wi->val_only = true;
1683 wi->is_lhs = false;
1685 break;
1688 return NULL_TREE;
1691 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1692 struct walk_stmt_info *);
1694 /* Helper for convert_local_reference. Convert all the references in
1695 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1697 static bool
1698 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1700 struct nesting_info *const info = (struct nesting_info *) wi->info;
1701 bool need_frame = false, need_stmts = false;
1702 tree clause, decl;
1703 int dummy;
1704 bitmap new_suppress;
1706 new_suppress = BITMAP_GGC_ALLOC ();
1707 bitmap_copy (new_suppress, info->suppress_expansion);
1709 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1711 switch (OMP_CLAUSE_CODE (clause))
1713 case OMP_CLAUSE_REDUCTION:
1714 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1715 need_stmts = true;
1716 goto do_decl_clause;
1718 case OMP_CLAUSE_LASTPRIVATE:
1719 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1720 need_stmts = true;
1721 goto do_decl_clause;
1723 case OMP_CLAUSE_LINEAR:
1724 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1725 need_stmts = true;
1726 wi->val_only = true;
1727 wi->is_lhs = false;
1728 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1729 wi);
1730 goto do_decl_clause;
1732 case OMP_CLAUSE_PRIVATE:
1733 case OMP_CLAUSE_FIRSTPRIVATE:
1734 case OMP_CLAUSE_COPYPRIVATE:
1735 case OMP_CLAUSE_SHARED:
1736 do_decl_clause:
1737 decl = OMP_CLAUSE_DECL (clause);
1738 if (TREE_CODE (decl) == VAR_DECL
1739 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1740 break;
1741 if (decl_function_context (decl) == info->context
1742 && !use_pointer_in_frame (decl))
1744 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1745 if (field)
1747 bitmap_set_bit (new_suppress, DECL_UID (decl));
1748 OMP_CLAUSE_DECL (clause)
1749 = get_local_debug_decl (info, decl, field);
1750 need_frame = true;
1753 break;
1755 case OMP_CLAUSE_SCHEDULE:
1756 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1757 break;
1758 /* FALLTHRU */
1759 case OMP_CLAUSE_FINAL:
1760 case OMP_CLAUSE_IF:
1761 case OMP_CLAUSE_NUM_THREADS:
1762 case OMP_CLAUSE_DEPEND:
1763 case OMP_CLAUSE_DEVICE:
1764 case OMP_CLAUSE_NUM_TEAMS:
1765 case OMP_CLAUSE_THREAD_LIMIT:
1766 case OMP_CLAUSE_SAFELEN:
1767 case OMP_CLAUSE__CILK_FOR_COUNT_:
1768 wi->val_only = true;
1769 wi->is_lhs = false;
1770 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1771 wi);
1772 break;
1774 case OMP_CLAUSE_DIST_SCHEDULE:
1775 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1777 wi->val_only = true;
1778 wi->is_lhs = false;
1779 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1780 &dummy, wi);
1782 break;
1784 case OMP_CLAUSE_MAP:
1785 case OMP_CLAUSE_TO:
1786 case OMP_CLAUSE_FROM:
1787 if (OMP_CLAUSE_SIZE (clause))
1789 wi->val_only = true;
1790 wi->is_lhs = false;
1791 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1792 &dummy, wi);
1794 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1795 goto do_decl_clause;
1796 wi->val_only = true;
1797 wi->is_lhs = false;
1798 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1799 wi, NULL);
1800 break;
1802 case OMP_CLAUSE_ALIGNED:
1803 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1805 wi->val_only = true;
1806 wi->is_lhs = false;
1807 convert_local_reference_op
1808 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1810 /* Like do_decl_clause, but don't add any suppression. */
1811 decl = OMP_CLAUSE_DECL (clause);
1812 if (TREE_CODE (decl) == VAR_DECL
1813 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1814 break;
1815 if (decl_function_context (decl) == info->context
1816 && !use_pointer_in_frame (decl))
1818 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1819 if (field)
1821 OMP_CLAUSE_DECL (clause)
1822 = get_local_debug_decl (info, decl, field);
1823 need_frame = true;
1826 break;
1828 case OMP_CLAUSE_NOWAIT:
1829 case OMP_CLAUSE_ORDERED:
1830 case OMP_CLAUSE_DEFAULT:
1831 case OMP_CLAUSE_COPYIN:
1832 case OMP_CLAUSE_COLLAPSE:
1833 case OMP_CLAUSE_UNTIED:
1834 case OMP_CLAUSE_MERGEABLE:
1835 case OMP_CLAUSE_PROC_BIND:
1836 break;
1838 default:
1839 gcc_unreachable ();
1843 info->suppress_expansion = new_suppress;
1845 if (need_stmts)
1846 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1847 switch (OMP_CLAUSE_CODE (clause))
1849 case OMP_CLAUSE_REDUCTION:
1850 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1852 tree old_context
1853 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1854 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1855 = info->context;
1856 walk_body (convert_local_reference_stmt,
1857 convert_local_reference_op, info,
1858 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1859 walk_body (convert_local_reference_stmt,
1860 convert_local_reference_op, info,
1861 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1862 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1863 = old_context;
1865 break;
1867 case OMP_CLAUSE_LASTPRIVATE:
1868 walk_body (convert_local_reference_stmt,
1869 convert_local_reference_op, info,
1870 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1871 break;
1873 case OMP_CLAUSE_LINEAR:
1874 walk_body (convert_local_reference_stmt,
1875 convert_local_reference_op, info,
1876 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1877 break;
1879 default:
1880 break;
1883 return need_frame;
1887 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1888 and PARM_DECLs that were referenced by inner nested functions.
1889 The rewrite will be a structure reference to the local frame variable. */
1891 static tree
1892 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1893 struct walk_stmt_info *wi)
1895 struct nesting_info *info = (struct nesting_info *) wi->info;
1896 tree save_local_var_chain;
1897 bitmap save_suppress;
1898 gimple stmt = gsi_stmt (*gsi);
1900 switch (gimple_code (stmt))
1902 case GIMPLE_OMP_PARALLEL:
1903 case GIMPLE_OMP_TASK:
1904 save_suppress = info->suppress_expansion;
1905 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1906 wi))
1908 tree c;
1909 (void) get_frame_type (info);
1910 c = build_omp_clause (gimple_location (stmt),
1911 OMP_CLAUSE_SHARED);
1912 OMP_CLAUSE_DECL (c) = info->frame_decl;
1913 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1914 gimple_omp_taskreg_set_clauses (stmt, c);
1917 save_local_var_chain = info->new_local_var_chain;
1918 info->new_local_var_chain = NULL;
1920 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1921 gimple_omp_body_ptr (stmt));
1923 if (info->new_local_var_chain)
1924 declare_vars (info->new_local_var_chain,
1925 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1926 info->new_local_var_chain = save_local_var_chain;
1927 info->suppress_expansion = save_suppress;
1928 break;
1930 case GIMPLE_OMP_FOR:
1931 save_suppress = info->suppress_expansion;
1932 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1933 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1934 convert_local_reference_stmt,
1935 convert_local_reference_op, info);
1936 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1937 info, gimple_omp_body_ptr (stmt));
1938 info->suppress_expansion = save_suppress;
1939 break;
1941 case GIMPLE_OMP_SECTIONS:
1942 save_suppress = info->suppress_expansion;
1943 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1944 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1945 info, gimple_omp_body_ptr (stmt));
1946 info->suppress_expansion = save_suppress;
1947 break;
1949 case GIMPLE_OMP_SINGLE:
1950 save_suppress = info->suppress_expansion;
1951 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1952 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1953 info, gimple_omp_body_ptr (stmt));
1954 info->suppress_expansion = save_suppress;
1955 break;
1957 case GIMPLE_OMP_TARGET:
1958 if (!is_gimple_omp_offloaded (stmt))
1960 save_suppress = info->suppress_expansion;
1961 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1962 info->suppress_expansion = save_suppress;
1963 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1964 info, gimple_omp_body_ptr (stmt));
1965 break;
1967 save_suppress = info->suppress_expansion;
1968 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
1970 tree c;
1971 (void) get_frame_type (info);
1972 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1973 OMP_CLAUSE_DECL (c) = info->frame_decl;
1974 OMP_CLAUSE_MAP_KIND (c) = OMP_CLAUSE_MAP_TOFROM;
1975 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
1976 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1977 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1980 save_local_var_chain = info->new_local_var_chain;
1981 info->new_local_var_chain = NULL;
1983 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1984 gimple_omp_body_ptr (stmt));
1986 if (info->new_local_var_chain)
1987 declare_vars (info->new_local_var_chain,
1988 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1989 info->new_local_var_chain = save_local_var_chain;
1990 info->suppress_expansion = save_suppress;
1991 break;
1993 case GIMPLE_OMP_TEAMS:
1994 save_suppress = info->suppress_expansion;
1995 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1996 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1997 info, gimple_omp_body_ptr (stmt));
1998 info->suppress_expansion = save_suppress;
1999 break;
2001 case GIMPLE_OMP_SECTION:
2002 case GIMPLE_OMP_MASTER:
2003 case GIMPLE_OMP_TASKGROUP:
2004 case GIMPLE_OMP_ORDERED:
2005 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2006 info, gimple_omp_body_ptr (stmt));
2007 break;
2009 case GIMPLE_COND:
2010 wi->val_only = true;
2011 wi->is_lhs = false;
2012 *handled_ops_p = false;
2013 return NULL_TREE;
2015 case GIMPLE_ASSIGN:
2016 if (gimple_clobber_p (stmt))
2018 tree lhs = gimple_assign_lhs (stmt);
2019 if (!use_pointer_in_frame (lhs)
2020 && lookup_field_for_decl (info, lhs, NO_INSERT))
2022 gsi_replace (gsi, gimple_build_nop (), true);
2023 break;
2026 *handled_ops_p = false;
2027 return NULL_TREE;
2029 case GIMPLE_BIND:
2030 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2031 var;
2032 var = DECL_CHAIN (var))
2033 if (TREE_CODE (var) == NAMELIST_DECL)
2035 /* Adjust decls mentioned in NAMELIST_DECL. */
2036 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2037 tree decl;
2038 unsigned int i;
2040 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2042 if (TREE_CODE (decl) == VAR_DECL
2043 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2044 continue;
2045 if (decl_function_context (decl) == info->context
2046 && !use_pointer_in_frame (decl))
2048 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2049 if (field)
2051 CONSTRUCTOR_ELT (decls, i)->value
2052 = get_local_debug_decl (info, decl, field);
2058 *handled_ops_p = false;
2059 return NULL_TREE;
2061 default:
2062 /* For every other statement that we are not interested in
2063 handling here, let the walker traverse the operands. */
2064 *handled_ops_p = false;
2065 return NULL_TREE;
2068 /* Indicate that we have handled all the operands ourselves. */
2069 *handled_ops_p = true;
2070 return NULL_TREE;
2074 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2075 that reference labels from outer functions. The rewrite will be a
2076 call to __builtin_nonlocal_goto. */
2078 static tree
2079 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2080 struct walk_stmt_info *wi)
2082 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2083 tree label, new_label, target_context, x, field;
2084 gcall *call;
2085 gimple stmt = gsi_stmt (*gsi);
2087 if (gimple_code (stmt) != GIMPLE_GOTO)
2089 *handled_ops_p = false;
2090 return NULL_TREE;
2093 label = gimple_goto_dest (stmt);
2094 if (TREE_CODE (label) != LABEL_DECL)
2096 *handled_ops_p = false;
2097 return NULL_TREE;
2100 target_context = decl_function_context (label);
2101 if (target_context == info->context)
2103 *handled_ops_p = false;
2104 return NULL_TREE;
2107 for (i = info->outer; target_context != i->context; i = i->outer)
2108 continue;
2110 /* The original user label may also be use for a normal goto, therefore
2111 we must create a new label that will actually receive the abnormal
2112 control transfer. This new label will be marked LABEL_NONLOCAL; this
2113 mark will trigger proper behavior in the cfg, as well as cause the
2114 (hairy target-specific) non-local goto receiver code to be generated
2115 when we expand rtl. Enter this association into var_map so that we
2116 can insert the new label into the IL during a second pass. */
2117 tree *slot = &i->var_map->get_or_insert (label);
2118 if (*slot == NULL)
2120 new_label = create_artificial_label (UNKNOWN_LOCATION);
2121 DECL_NONLOCAL (new_label) = 1;
2122 *slot = new_label;
2124 else
2125 new_label = *slot;
2127 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2128 field = get_nl_goto_field (i);
2129 x = get_frame_field (info, target_context, field, gsi);
2130 x = build_addr (x, target_context);
2131 x = gsi_gimplify_val (info, x, gsi);
2132 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2133 2, build_addr (new_label, target_context), x);
2134 gsi_replace (gsi, call, false);
2136 /* We have handled all of STMT's operands, no need to keep going. */
2137 *handled_ops_p = true;
2138 return NULL_TREE;
2142 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2143 are referenced via nonlocal goto from a nested function. The rewrite
2144 will involve installing a newly generated DECL_NONLOCAL label, and
2145 (potentially) a branch around the rtl gunk that is assumed to be
2146 attached to such a label. */
2148 static tree
2149 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2150 struct walk_stmt_info *wi)
2152 struct nesting_info *const info = (struct nesting_info *) wi->info;
2153 tree label, new_label;
2154 gimple_stmt_iterator tmp_gsi;
2155 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2157 if (!stmt)
2159 *handled_ops_p = false;
2160 return NULL_TREE;
2163 label = gimple_label_label (stmt);
2165 tree *slot = info->var_map->get (label);
2166 if (!slot)
2168 *handled_ops_p = false;
2169 return NULL_TREE;
2172 /* If there's any possibility that the previous statement falls through,
2173 then we must branch around the new non-local label. */
2174 tmp_gsi = wi->gsi;
2175 gsi_prev (&tmp_gsi);
2176 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2178 gimple stmt = gimple_build_goto (label);
2179 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2182 new_label = (tree) *slot;
2183 stmt = gimple_build_label (new_label);
2184 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2186 *handled_ops_p = true;
2187 return NULL_TREE;
2191 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2192 of nested functions that require the use of trampolines. The rewrite
2193 will involve a reference a trampoline generated for the occasion. */
2195 static tree
2196 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2198 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2199 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2200 tree t = *tp, decl, target_context, x, builtin;
2201 gcall *call;
2203 *walk_subtrees = 0;
2204 switch (TREE_CODE (t))
2206 case ADDR_EXPR:
2207 /* Build
2208 T.1 = &CHAIN->tramp;
2209 T.2 = __builtin_adjust_trampoline (T.1);
2210 T.3 = (func_type)T.2;
2213 decl = TREE_OPERAND (t, 0);
2214 if (TREE_CODE (decl) != FUNCTION_DECL)
2215 break;
2217 /* Only need to process nested functions. */
2218 target_context = decl_function_context (decl);
2219 if (!target_context)
2220 break;
2222 /* If the nested function doesn't use a static chain, then
2223 it doesn't need a trampoline. */
2224 if (!DECL_STATIC_CHAIN (decl))
2225 break;
2227 /* If we don't want a trampoline, then don't build one. */
2228 if (TREE_NO_TRAMPOLINE (t))
2229 break;
2231 /* Lookup the immediate parent of the callee, as that's where
2232 we need to insert the trampoline. */
2233 for (i = info; i->context != target_context; i = i->outer)
2234 continue;
2235 x = lookup_tramp_for_decl (i, decl, INSERT);
2237 /* Compute the address of the field holding the trampoline. */
2238 x = get_frame_field (info, target_context, x, &wi->gsi);
2239 x = build_addr (x, target_context);
2240 x = gsi_gimplify_val (info, x, &wi->gsi);
2242 /* Do machine-specific ugliness. Normally this will involve
2243 computing extra alignment, but it can really be anything. */
2244 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2245 call = gimple_build_call (builtin, 1, x);
2246 x = init_tmp_var_with_call (info, &wi->gsi, call);
2248 /* Cast back to the proper function type. */
2249 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2250 x = init_tmp_var (info, x, &wi->gsi);
2252 *tp = x;
2253 break;
2255 default:
2256 if (!IS_TYPE_OR_DECL_P (t))
2257 *walk_subtrees = 1;
2258 break;
2261 return NULL_TREE;
2265 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2266 to addresses of nested functions that require the use of
2267 trampolines. The rewrite will involve a reference a trampoline
2268 generated for the occasion. */
2270 static tree
2271 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2272 struct walk_stmt_info *wi)
2274 struct nesting_info *info = (struct nesting_info *) wi->info;
2275 gimple stmt = gsi_stmt (*gsi);
2277 switch (gimple_code (stmt))
2279 case GIMPLE_CALL:
2281 /* Only walk call arguments, lest we generate trampolines for
2282 direct calls. */
2283 unsigned long i, nargs = gimple_call_num_args (stmt);
2284 for (i = 0; i < nargs; i++)
2285 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2286 wi, NULL);
2287 break;
2290 case GIMPLE_OMP_TARGET:
2291 if (!is_gimple_omp_offloaded (stmt))
2293 *handled_ops_p = false;
2294 return NULL_TREE;
2296 /* FALLTHRU */
2297 case GIMPLE_OMP_PARALLEL:
2298 case GIMPLE_OMP_TASK:
2300 tree save_local_var_chain;
2301 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2302 save_local_var_chain = info->new_local_var_chain;
2303 info->new_local_var_chain = NULL;
2304 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2305 info, gimple_omp_body_ptr (stmt));
2306 if (info->new_local_var_chain)
2307 declare_vars (info->new_local_var_chain,
2308 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2309 false);
2310 info->new_local_var_chain = save_local_var_chain;
2312 break;
2314 default:
2315 *handled_ops_p = false;
2316 return NULL_TREE;
2319 *handled_ops_p = true;
2320 return NULL_TREE;
2325 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2326 that reference nested functions to make sure that the static chain
2327 is set up properly for the call. */
2329 static tree
2330 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2331 struct walk_stmt_info *wi)
2333 struct nesting_info *const info = (struct nesting_info *) wi->info;
2334 tree decl, target_context;
2335 char save_static_chain_added;
2336 int i;
2337 gimple stmt = gsi_stmt (*gsi);
2339 switch (gimple_code (stmt))
2341 case GIMPLE_CALL:
2342 if (gimple_call_chain (stmt))
2343 break;
2344 decl = gimple_call_fndecl (stmt);
2345 if (!decl)
2346 break;
2347 target_context = decl_function_context (decl);
2348 if (target_context && DECL_STATIC_CHAIN (decl))
2350 gimple_call_set_chain (as_a <gcall *> (stmt),
2351 get_static_chain (info, target_context,
2352 &wi->gsi));
2353 info->static_chain_added |= (1 << (info->context != target_context));
2355 break;
2357 case GIMPLE_OMP_PARALLEL:
2358 case GIMPLE_OMP_TASK:
2359 save_static_chain_added = info->static_chain_added;
2360 info->static_chain_added = 0;
2361 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2362 for (i = 0; i < 2; i++)
2364 tree c, decl;
2365 if ((info->static_chain_added & (1 << i)) == 0)
2366 continue;
2367 decl = i ? get_chain_decl (info) : info->frame_decl;
2368 /* Don't add CHAIN.* or FRAME.* twice. */
2369 for (c = gimple_omp_taskreg_clauses (stmt);
2371 c = OMP_CLAUSE_CHAIN (c))
2372 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2373 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2374 && OMP_CLAUSE_DECL (c) == decl)
2375 break;
2376 if (c == NULL)
2378 c = build_omp_clause (gimple_location (stmt),
2379 i ? OMP_CLAUSE_FIRSTPRIVATE
2380 : OMP_CLAUSE_SHARED);
2381 OMP_CLAUSE_DECL (c) = decl;
2382 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2383 gimple_omp_taskreg_set_clauses (stmt, c);
2386 info->static_chain_added |= save_static_chain_added;
2387 break;
2389 case GIMPLE_OMP_TARGET:
2390 if (!is_gimple_omp_offloaded (stmt))
2392 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2393 break;
2395 save_static_chain_added = info->static_chain_added;
2396 info->static_chain_added = 0;
2397 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2398 for (i = 0; i < 2; i++)
2400 tree c, decl;
2401 if ((info->static_chain_added & (1 << i)) == 0)
2402 continue;
2403 decl = i ? get_chain_decl (info) : info->frame_decl;
2404 /* Don't add CHAIN.* or FRAME.* twice. */
2405 for (c = gimple_omp_target_clauses (stmt);
2407 c = OMP_CLAUSE_CHAIN (c))
2408 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2409 && OMP_CLAUSE_DECL (c) == decl)
2410 break;
2411 if (c == NULL)
2413 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2414 OMP_CLAUSE_DECL (c) = decl;
2415 OMP_CLAUSE_MAP_KIND (c)
2416 = i ? OMP_CLAUSE_MAP_TO : OMP_CLAUSE_MAP_TOFROM;
2417 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2418 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2419 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2423 info->static_chain_added |= save_static_chain_added;
2424 break;
2426 case GIMPLE_OMP_FOR:
2427 walk_body (convert_gimple_call, NULL, info,
2428 gimple_omp_for_pre_body_ptr (stmt));
2429 /* FALLTHRU */
2430 case GIMPLE_OMP_SECTIONS:
2431 case GIMPLE_OMP_SECTION:
2432 case GIMPLE_OMP_SINGLE:
2433 case GIMPLE_OMP_TEAMS:
2434 case GIMPLE_OMP_MASTER:
2435 case GIMPLE_OMP_TASKGROUP:
2436 case GIMPLE_OMP_ORDERED:
2437 case GIMPLE_OMP_CRITICAL:
2438 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2439 break;
2441 default:
2442 /* Keep looking for other operands. */
2443 *handled_ops_p = false;
2444 return NULL_TREE;
2447 *handled_ops_p = true;
2448 return NULL_TREE;
2451 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2452 call expressions. At the same time, determine if a nested function
2453 actually uses its static chain; if not, remember that. */
2455 static void
2456 convert_all_function_calls (struct nesting_info *root)
2458 unsigned int chain_count = 0, old_chain_count, iter_count;
2459 struct nesting_info *n;
2461 /* First, optimistically clear static_chain for all decls that haven't
2462 used the static chain already for variable access. But always create
2463 it if not optimizing. This makes it possible to reconstruct the static
2464 nesting tree at run time and thus to resolve up-level references from
2465 within the debugger. */
2466 FOR_EACH_NEST_INFO (n, root)
2468 tree decl = n->context;
2469 if (!optimize)
2471 if (n->inner)
2472 (void) get_frame_type (n);
2473 if (n->outer)
2474 (void) get_chain_decl (n);
2476 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2478 DECL_STATIC_CHAIN (decl) = 0;
2479 if (dump_file && (dump_flags & TDF_DETAILS))
2480 fprintf (dump_file, "Guessing no static-chain for %s\n",
2481 lang_hooks.decl_printable_name (decl, 2));
2483 else
2484 DECL_STATIC_CHAIN (decl) = 1;
2485 chain_count += DECL_STATIC_CHAIN (decl);
2488 /* Walk the functions and perform transformations. Note that these
2489 transformations can induce new uses of the static chain, which in turn
2490 require re-examining all users of the decl. */
2491 /* ??? It would make sense to try to use the call graph to speed this up,
2492 but the call graph hasn't really been built yet. Even if it did, we
2493 would still need to iterate in this loop since address-of references
2494 wouldn't show up in the callgraph anyway. */
2495 iter_count = 0;
2498 old_chain_count = chain_count;
2499 chain_count = 0;
2500 iter_count++;
2502 if (dump_file && (dump_flags & TDF_DETAILS))
2503 fputc ('\n', dump_file);
2505 FOR_EACH_NEST_INFO (n, root)
2507 tree decl = n->context;
2508 walk_function (convert_tramp_reference_stmt,
2509 convert_tramp_reference_op, n);
2510 walk_function (convert_gimple_call, NULL, n);
2511 chain_count += DECL_STATIC_CHAIN (decl);
2514 while (chain_count != old_chain_count);
2516 if (dump_file && (dump_flags & TDF_DETAILS))
2517 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2518 iter_count);
2521 struct nesting_copy_body_data
2523 copy_body_data cb;
2524 struct nesting_info *root;
2527 /* A helper subroutine for debug_var_chain type remapping. */
2529 static tree
2530 nesting_copy_decl (tree decl, copy_body_data *id)
2532 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2533 tree *slot = nid->root->var_map->get (decl);
2535 if (slot)
2536 return (tree) *slot;
2538 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2540 tree new_decl = copy_decl_no_change (decl, id);
2541 DECL_ORIGINAL_TYPE (new_decl)
2542 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2543 return new_decl;
2546 if (TREE_CODE (decl) == VAR_DECL
2547 || TREE_CODE (decl) == PARM_DECL
2548 || TREE_CODE (decl) == RESULT_DECL)
2549 return decl;
2551 return copy_decl_no_change (decl, id);
2554 /* A helper function for remap_vla_decls. See if *TP contains
2555 some remapped variables. */
2557 static tree
2558 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2560 struct nesting_info *root = (struct nesting_info *) data;
2561 tree t = *tp;
2563 if (DECL_P (t))
2565 *walk_subtrees = 0;
2566 tree *slot = root->var_map->get (t);
2568 if (slot)
2569 return *slot;
2571 return NULL;
2574 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2575 involved. */
2577 static void
2578 remap_vla_decls (tree block, struct nesting_info *root)
2580 tree var, subblock, val, type;
2581 struct nesting_copy_body_data id;
2583 for (subblock = BLOCK_SUBBLOCKS (block);
2584 subblock;
2585 subblock = BLOCK_CHAIN (subblock))
2586 remap_vla_decls (subblock, root);
2588 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2589 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2591 val = DECL_VALUE_EXPR (var);
2592 type = TREE_TYPE (var);
2594 if (!(TREE_CODE (val) == INDIRECT_REF
2595 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2596 && variably_modified_type_p (type, NULL)))
2597 continue;
2599 if (root->var_map->get (TREE_OPERAND (val, 0))
2600 || walk_tree (&type, contains_remapped_vars, root, NULL))
2601 break;
2604 if (var == NULL_TREE)
2605 return;
2607 memset (&id, 0, sizeof (id));
2608 id.cb.copy_decl = nesting_copy_decl;
2609 id.cb.decl_map = new hash_map<tree, tree>;
2610 id.root = root;
2612 for (; var; var = DECL_CHAIN (var))
2613 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2615 struct nesting_info *i;
2616 tree newt, context;
2618 val = DECL_VALUE_EXPR (var);
2619 type = TREE_TYPE (var);
2621 if (!(TREE_CODE (val) == INDIRECT_REF
2622 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2623 && variably_modified_type_p (type, NULL)))
2624 continue;
2626 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2627 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2628 continue;
2630 context = decl_function_context (var);
2631 for (i = root; i; i = i->outer)
2632 if (i->context == context)
2633 break;
2635 if (i == NULL)
2636 continue;
2638 /* Fully expand value expressions. This avoids having debug variables
2639 only referenced from them and that can be swept during GC. */
2640 if (slot)
2642 tree t = (tree) *slot;
2643 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2644 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2647 id.cb.src_fn = i->context;
2648 id.cb.dst_fn = i->context;
2649 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2651 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2652 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2654 newt = TREE_TYPE (newt);
2655 type = TREE_TYPE (type);
2657 if (TYPE_NAME (newt)
2658 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2659 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2660 && newt != type
2661 && TYPE_NAME (newt) == TYPE_NAME (type))
2662 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2664 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2665 if (val != DECL_VALUE_EXPR (var))
2666 SET_DECL_VALUE_EXPR (var, val);
2669 delete id.cb.decl_map;
2672 /* Fold the MEM_REF *E. */
2673 bool
2674 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2676 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2677 *ref_p = fold (*ref_p);
2678 return true;
2681 /* Do "everything else" to clean up or complete state collected by the
2682 various walking passes -- lay out the types and decls, generate code
2683 to initialize the frame decl, store critical expressions in the
2684 struct function for rtl to find. */
2686 static void
2687 finalize_nesting_tree_1 (struct nesting_info *root)
2689 gimple_seq stmt_list;
2690 gimple stmt;
2691 tree context = root->context;
2692 struct function *sf;
2694 stmt_list = NULL;
2696 /* If we created a non-local frame type or decl, we need to lay them
2697 out at this time. */
2698 if (root->frame_type)
2700 /* In some cases the frame type will trigger the -Wpadded warning.
2701 This is not helpful; suppress it. */
2702 int save_warn_padded = warn_padded;
2703 tree *adjust;
2705 warn_padded = 0;
2706 layout_type (root->frame_type);
2707 warn_padded = save_warn_padded;
2708 layout_decl (root->frame_decl, 0);
2710 /* Remove root->frame_decl from root->new_local_var_chain, so
2711 that we can declare it also in the lexical blocks, which
2712 helps ensure virtual regs that end up appearing in its RTL
2713 expression get substituted in instantiate_virtual_regs(). */
2714 for (adjust = &root->new_local_var_chain;
2715 *adjust != root->frame_decl;
2716 adjust = &DECL_CHAIN (*adjust))
2717 gcc_assert (DECL_CHAIN (*adjust));
2718 *adjust = DECL_CHAIN (*adjust);
2720 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2721 declare_vars (root->frame_decl,
2722 gimple_seq_first_stmt (gimple_body (context)), true);
2725 /* If any parameters were referenced non-locally, then we need to
2726 insert a copy. Likewise, if any variables were referenced by
2727 pointer, we need to initialize the address. */
2728 if (root->any_parm_remapped)
2730 tree p;
2731 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2733 tree field, x, y;
2735 field = lookup_field_for_decl (root, p, NO_INSERT);
2736 if (!field)
2737 continue;
2739 if (use_pointer_in_frame (p))
2740 x = build_addr (p, context);
2741 else
2742 x = p;
2744 /* If the assignment is from a non-register the stmt is
2745 not valid gimple. Make it so by using a temporary instead. */
2746 if (!is_gimple_reg (x)
2747 && is_gimple_reg_type (TREE_TYPE (x)))
2749 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2750 x = init_tmp_var (root, x, &gsi);
2753 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2754 root->frame_decl, field, NULL_TREE);
2755 stmt = gimple_build_assign (y, x);
2756 gimple_seq_add_stmt (&stmt_list, stmt);
2760 /* If a chain_field was created, then it needs to be initialized
2761 from chain_decl. */
2762 if (root->chain_field)
2764 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2765 root->frame_decl, root->chain_field, NULL_TREE);
2766 stmt = gimple_build_assign (x, get_chain_decl (root));
2767 gimple_seq_add_stmt (&stmt_list, stmt);
2770 /* If trampolines were created, then we need to initialize them. */
2771 if (root->any_tramp_created)
2773 struct nesting_info *i;
2774 for (i = root->inner; i ; i = i->next)
2776 tree arg1, arg2, arg3, x, field;
2778 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2779 if (!field)
2780 continue;
2782 gcc_assert (DECL_STATIC_CHAIN (i->context));
2783 arg3 = build_addr (root->frame_decl, context);
2785 arg2 = build_addr (i->context, context);
2787 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2788 root->frame_decl, field, NULL_TREE);
2789 arg1 = build_addr (x, context);
2791 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2792 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2793 gimple_seq_add_stmt (&stmt_list, stmt);
2797 /* If we created initialization statements, insert them. */
2798 if (stmt_list)
2800 gbind *bind;
2801 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2802 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
2803 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2804 gimple_bind_set_body (bind, stmt_list);
2807 /* If a chain_decl was created, then it needs to be registered with
2808 struct function so that it gets initialized from the static chain
2809 register at the beginning of the function. */
2810 sf = DECL_STRUCT_FUNCTION (root->context);
2811 sf->static_chain_decl = root->chain_decl;
2813 /* Similarly for the non-local goto save area. */
2814 if (root->nl_goto_field)
2816 sf->nonlocal_goto_save_area
2817 = get_frame_field (root, context, root->nl_goto_field, NULL);
2818 sf->has_nonlocal_label = 1;
2821 /* Make sure all new local variables get inserted into the
2822 proper BIND_EXPR. */
2823 if (root->new_local_var_chain)
2824 declare_vars (root->new_local_var_chain,
2825 gimple_seq_first_stmt (gimple_body (root->context)),
2826 false);
2828 if (root->debug_var_chain)
2830 tree debug_var;
2831 gbind *scope;
2833 remap_vla_decls (DECL_INITIAL (root->context), root);
2835 for (debug_var = root->debug_var_chain; debug_var;
2836 debug_var = DECL_CHAIN (debug_var))
2837 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2838 break;
2840 /* If there are any debug decls with variable length types,
2841 remap those types using other debug_var_chain variables. */
2842 if (debug_var)
2844 struct nesting_copy_body_data id;
2846 memset (&id, 0, sizeof (id));
2847 id.cb.copy_decl = nesting_copy_decl;
2848 id.cb.decl_map = new hash_map<tree, tree>;
2849 id.root = root;
2851 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2852 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2854 tree type = TREE_TYPE (debug_var);
2855 tree newt, t = type;
2856 struct nesting_info *i;
2858 for (i = root; i; i = i->outer)
2859 if (variably_modified_type_p (type, i->context))
2860 break;
2862 if (i == NULL)
2863 continue;
2865 id.cb.src_fn = i->context;
2866 id.cb.dst_fn = i->context;
2867 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2869 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2870 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2872 newt = TREE_TYPE (newt);
2873 t = TREE_TYPE (t);
2875 if (TYPE_NAME (newt)
2876 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2877 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2878 && newt != t
2879 && TYPE_NAME (newt) == TYPE_NAME (t))
2880 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2883 delete id.cb.decl_map;
2886 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
2887 if (gimple_bind_block (scope))
2888 declare_vars (root->debug_var_chain, scope, true);
2889 else
2890 BLOCK_VARS (DECL_INITIAL (root->context))
2891 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2892 root->debug_var_chain);
2895 /* Fold the rewritten MEM_REF trees. */
2896 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
2898 /* Dump the translated tree function. */
2899 if (dump_file)
2901 fputs ("\n\n", dump_file);
2902 dump_function_to_file (root->context, dump_file, dump_flags);
2906 static void
2907 finalize_nesting_tree (struct nesting_info *root)
2909 struct nesting_info *n;
2910 FOR_EACH_NEST_INFO (n, root)
2911 finalize_nesting_tree_1 (n);
2914 /* Unnest the nodes and pass them to cgraph. */
2916 static void
2917 unnest_nesting_tree_1 (struct nesting_info *root)
2919 struct cgraph_node *node = cgraph_node::get (root->context);
2921 /* For nested functions update the cgraph to reflect unnesting.
2922 We also delay finalizing of these functions up to this point. */
2923 if (node->origin)
2925 node->unnest ();
2926 cgraph_node::finalize_function (root->context, true);
2930 static void
2931 unnest_nesting_tree (struct nesting_info *root)
2933 struct nesting_info *n;
2934 FOR_EACH_NEST_INFO (n, root)
2935 unnest_nesting_tree_1 (n);
2938 /* Free the data structures allocated during this pass. */
2940 static void
2941 free_nesting_tree (struct nesting_info *root)
2943 struct nesting_info *node, *next;
2945 node = iter_nestinfo_start (root);
2948 next = iter_nestinfo_next (node);
2949 delete node->var_map;
2950 delete node->field_map;
2951 delete node->mem_refs;
2952 free (node);
2953 node = next;
2955 while (node);
2958 /* Gimplify a function and all its nested functions. */
2959 static void
2960 gimplify_all_functions (struct cgraph_node *root)
2962 struct cgraph_node *iter;
2963 if (!gimple_body (root->decl))
2964 gimplify_function_tree (root->decl);
2965 for (iter = root->nested; iter; iter = iter->next_nested)
2966 gimplify_all_functions (iter);
2969 /* Main entry point for this pass. Process FNDECL and all of its nested
2970 subroutines and turn them into something less tightly bound. */
2972 void
2973 lower_nested_functions (tree fndecl)
2975 struct cgraph_node *cgn;
2976 struct nesting_info *root;
2978 /* If there are no nested functions, there's nothing to do. */
2979 cgn = cgraph_node::get (fndecl);
2980 if (!cgn->nested)
2981 return;
2983 gimplify_all_functions (cgn);
2985 dump_file = dump_begin (TDI_nested, &dump_flags);
2986 if (dump_file)
2987 fprintf (dump_file, "\n;; Function %s\n\n",
2988 lang_hooks.decl_printable_name (fndecl, 2));
2990 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2991 root = create_nesting_tree (cgn);
2993 walk_all_functions (convert_nonlocal_reference_stmt,
2994 convert_nonlocal_reference_op,
2995 root);
2996 walk_all_functions (convert_local_reference_stmt,
2997 convert_local_reference_op,
2998 root);
2999 walk_all_functions (convert_nl_goto_reference, NULL, root);
3000 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3002 convert_all_function_calls (root);
3003 finalize_nesting_tree (root);
3004 unnest_nesting_tree (root);
3006 free_nesting_tree (root);
3007 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3009 if (dump_file)
3011 dump_end (TDI_nested, dump_file);
3012 dump_file = NULL;
3016 #include "gt-tree-nested.h"