Update ChangeLog and version files for release
[official-gcc.git] / gcc / tree-nested.c
blob4991219c19bfffcae55beb4ea48faa4136f21589
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "hash-set.h"
25 #include "machmode.h"
26 #include "vec.h"
27 #include "double-int.h"
28 #include "input.h"
29 #include "alias.h"
30 #include "symtab.h"
31 #include "wide-int.h"
32 #include "inchash.h"
33 #include "tree.h"
34 #include "fold-const.h"
35 #include "stringpool.h"
36 #include "stor-layout.h"
37 #include "tm_p.h"
38 #include "hard-reg-set.h"
39 #include "function.h"
40 #include "tree-dump.h"
41 #include "tree-inline.h"
42 #include "predict.h"
43 #include "basic-block.h"
44 #include "tree-ssa-alias.h"
45 #include "internal-fn.h"
46 #include "gimple-expr.h"
47 #include "is-a.h"
48 #include "gimple.h"
49 #include "gimplify.h"
50 #include "gimple-iterator.h"
51 #include "gimple-walk.h"
52 #include "tree-iterator.h"
53 #include "bitmap.h"
54 #include "hash-map.h"
55 #include "plugin-api.h"
56 #include "ipa-ref.h"
57 #include "cgraph.h"
58 #include "tree-cfg.h"
59 #include "hashtab.h"
60 #include "rtl.h"
61 #include "flags.h"
62 #include "statistics.h"
63 #include "real.h"
64 #include "fixed-value.h"
65 #include "insn-config.h"
66 #include "expmed.h"
67 #include "dojump.h"
68 #include "explow.h"
69 #include "calls.h"
70 #include "emit-rtl.h"
71 #include "varasm.h"
72 #include "stmt.h"
73 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
74 #include "langhooks.h"
75 #include "gimple-low.h"
76 #include "gomp-constants.h"
79 /* The object of this pass is to lower the representation of a set of nested
80 functions in order to expose all of the gory details of the various
81 nonlocal references. We want to do this sooner rather than later, in
82 order to give us more freedom in emitting all of the functions in question.
84 Back in olden times, when gcc was young, we developed an insanely
85 complicated scheme whereby variables which were referenced nonlocally
86 were forced to live in the stack of the declaring function, and then
87 the nested functions magically discovered where these variables were
88 placed. In order for this scheme to function properly, it required
89 that the outer function be partially expanded, then we switch to
90 compiling the inner function, and once done with those we switch back
91 to compiling the outer function. Such delicate ordering requirements
92 makes it difficult to do whole translation unit optimizations
93 involving such functions.
95 The implementation here is much more direct. Everything that can be
96 referenced by an inner function is a member of an explicitly created
97 structure herein called the "nonlocal frame struct". The incoming
98 static chain for a nested function is a pointer to this struct in
99 the parent. In this way, we settle on known offsets from a known
100 base, and so are decoupled from the logic that places objects in the
101 function's stack frame. More importantly, we don't have to wait for
102 that to happen -- since the compilation of the inner function is no
103 longer tied to a real stack frame, the nonlocal frame struct can be
104 allocated anywhere. Which means that the outer function is now
105 inlinable.
107 Theory of operation here is very simple. Iterate over all the
108 statements in all the functions (depth first) several times,
109 allocating structures and fields on demand. In general we want to
110 examine inner functions first, so that we can avoid making changes
111 to outer functions which are unnecessary.
113 The order of the passes matters a bit, in that later passes will be
114 skipped if it is discovered that the functions don't actually interact
115 at all. That is, they're nested in the lexical sense but could have
116 been written as independent functions without change. */
119 struct nesting_info
121 struct nesting_info *outer;
122 struct nesting_info *inner;
123 struct nesting_info *next;
125 hash_map<tree, tree> *field_map;
126 hash_map<tree, tree> *var_map;
127 hash_set<tree *> *mem_refs;
128 bitmap suppress_expansion;
130 tree context;
131 tree new_local_var_chain;
132 tree debug_var_chain;
133 tree frame_type;
134 tree frame_decl;
135 tree chain_field;
136 tree chain_decl;
137 tree nl_goto_field;
139 bool any_parm_remapped;
140 bool any_tramp_created;
141 char static_chain_added;
145 /* Iterate over the nesting tree, starting with ROOT, depth first. */
147 static inline struct nesting_info *
148 iter_nestinfo_start (struct nesting_info *root)
150 while (root->inner)
151 root = root->inner;
152 return root;
155 static inline struct nesting_info *
156 iter_nestinfo_next (struct nesting_info *node)
158 if (node->next)
159 return iter_nestinfo_start (node->next);
160 return node->outer;
163 #define FOR_EACH_NEST_INFO(I, ROOT) \
164 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
166 /* Obstack used for the bitmaps in the struct above. */
167 static struct bitmap_obstack nesting_info_bitmap_obstack;
170 /* We're working in so many different function contexts simultaneously,
171 that create_tmp_var is dangerous. Prevent mishap. */
172 #define create_tmp_var cant_use_create_tmp_var_here_dummy
174 /* Like create_tmp_var, except record the variable for registration at
175 the given nesting level. */
177 static tree
178 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
180 tree tmp_var;
182 /* If the type is of variable size or a type which must be created by the
183 frontend, something is wrong. Note that we explicitly allow
184 incomplete types here, since we create them ourselves here. */
185 gcc_assert (!TREE_ADDRESSABLE (type));
186 gcc_assert (!TYPE_SIZE_UNIT (type)
187 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
189 tmp_var = create_tmp_var_raw (type, prefix);
190 DECL_CONTEXT (tmp_var) = info->context;
191 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
192 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
193 if (TREE_CODE (type) == COMPLEX_TYPE
194 || TREE_CODE (type) == VECTOR_TYPE)
195 DECL_GIMPLE_REG_P (tmp_var) = 1;
197 info->new_local_var_chain = tmp_var;
199 return tmp_var;
202 /* Take the address of EXP to be used within function CONTEXT.
203 Mark it for addressability as necessary. */
205 tree
206 build_addr (tree exp, tree context)
208 tree base = exp;
209 tree save_context;
210 tree retval;
212 while (handled_component_p (base))
213 base = TREE_OPERAND (base, 0);
215 if (DECL_P (base))
216 TREE_ADDRESSABLE (base) = 1;
218 /* Building the ADDR_EXPR will compute a set of properties for
219 that ADDR_EXPR. Those properties are unfortunately context
220 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
222 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
223 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
224 way the properties are for the ADDR_EXPR are computed properly. */
225 save_context = current_function_decl;
226 current_function_decl = context;
227 retval = build_fold_addr_expr (exp);
228 current_function_decl = save_context;
229 return retval;
232 /* Insert FIELD into TYPE, sorted by alignment requirements. */
234 void
235 insert_field_into_struct (tree type, tree field)
237 tree *p;
239 DECL_CONTEXT (field) = type;
241 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
242 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
243 break;
245 DECL_CHAIN (field) = *p;
246 *p = field;
248 /* Set correct alignment for frame struct type. */
249 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
250 TYPE_ALIGN (type) = DECL_ALIGN (field);
253 /* Build or return the RECORD_TYPE that describes the frame state that is
254 shared between INFO->CONTEXT and its nested functions. This record will
255 not be complete until finalize_nesting_tree; up until that point we'll
256 be adding fields as necessary.
258 We also build the DECL that represents this frame in the function. */
260 static tree
261 get_frame_type (struct nesting_info *info)
263 tree type = info->frame_type;
264 if (!type)
266 char *name;
268 type = make_node (RECORD_TYPE);
270 name = concat ("FRAME.",
271 IDENTIFIER_POINTER (DECL_NAME (info->context)),
272 NULL);
273 TYPE_NAME (type) = get_identifier (name);
274 free (name);
276 info->frame_type = type;
277 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
278 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
280 /* ??? Always make it addressable for now, since it is meant to
281 be pointed to by the static chain pointer. This pessimizes
282 when it turns out that no static chains are needed because
283 the nested functions referencing non-local variables are not
284 reachable, but the true pessimization is to create the non-
285 local frame structure in the first place. */
286 TREE_ADDRESSABLE (info->frame_decl) = 1;
288 return type;
291 /* Return true if DECL should be referenced by pointer in the non-local
292 frame structure. */
294 static bool
295 use_pointer_in_frame (tree decl)
297 if (TREE_CODE (decl) == PARM_DECL)
299 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
300 sized decls, and inefficient to copy large aggregates. Don't bother
301 moving anything but scalar variables. */
302 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
304 else
306 /* Variable sized types make things "interesting" in the frame. */
307 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
311 /* Given DECL, a non-locally accessed variable, find or create a field
312 in the non-local frame structure for the given nesting context. */
314 static tree
315 lookup_field_for_decl (struct nesting_info *info, tree decl,
316 enum insert_option insert)
318 if (insert == NO_INSERT)
320 tree *slot = info->field_map->get (decl);
321 return slot ? *slot : NULL_TREE;
324 tree *slot = &info->field_map->get_or_insert (decl);
325 if (!*slot)
327 tree field = make_node (FIELD_DECL);
328 DECL_NAME (field) = DECL_NAME (decl);
330 if (use_pointer_in_frame (decl))
332 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
333 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
334 DECL_NONADDRESSABLE_P (field) = 1;
336 else
338 TREE_TYPE (field) = TREE_TYPE (decl);
339 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
340 DECL_ALIGN (field) = DECL_ALIGN (decl);
341 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
342 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
343 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
344 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
347 insert_field_into_struct (get_frame_type (info), field);
348 *slot = field;
350 if (TREE_CODE (decl) == PARM_DECL)
351 info->any_parm_remapped = true;
354 return *slot;
357 /* Build or return the variable that holds the static chain within
358 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
360 static tree
361 get_chain_decl (struct nesting_info *info)
363 tree decl = info->chain_decl;
365 if (!decl)
367 tree type;
369 type = get_frame_type (info->outer);
370 type = build_pointer_type (type);
372 /* Note that this variable is *not* entered into any BIND_EXPR;
373 the construction of this variable is handled specially in
374 expand_function_start and initialize_inlined_parameters.
375 Note also that it's represented as a parameter. This is more
376 close to the truth, since the initial value does come from
377 the caller. */
378 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
379 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
380 DECL_ARTIFICIAL (decl) = 1;
381 DECL_IGNORED_P (decl) = 1;
382 TREE_USED (decl) = 1;
383 DECL_CONTEXT (decl) = info->context;
384 DECL_ARG_TYPE (decl) = type;
386 /* Tell tree-inline.c that we never write to this variable, so
387 it can copy-prop the replacement value immediately. */
388 TREE_READONLY (decl) = 1;
390 info->chain_decl = decl;
392 if (dump_file
393 && (dump_flags & TDF_DETAILS)
394 && !DECL_STATIC_CHAIN (info->context))
395 fprintf (dump_file, "Setting static-chain for %s\n",
396 lang_hooks.decl_printable_name (info->context, 2));
398 DECL_STATIC_CHAIN (info->context) = 1;
400 return decl;
403 /* Build or return the field within the non-local frame state that holds
404 the static chain for INFO->CONTEXT. This is the way to walk back up
405 multiple nesting levels. */
407 static tree
408 get_chain_field (struct nesting_info *info)
410 tree field = info->chain_field;
412 if (!field)
414 tree type = build_pointer_type (get_frame_type (info->outer));
416 field = make_node (FIELD_DECL);
417 DECL_NAME (field) = get_identifier ("__chain");
418 TREE_TYPE (field) = type;
419 DECL_ALIGN (field) = TYPE_ALIGN (type);
420 DECL_NONADDRESSABLE_P (field) = 1;
422 insert_field_into_struct (get_frame_type (info), field);
424 info->chain_field = field;
426 if (dump_file
427 && (dump_flags & TDF_DETAILS)
428 && !DECL_STATIC_CHAIN (info->context))
429 fprintf (dump_file, "Setting static-chain for %s\n",
430 lang_hooks.decl_printable_name (info->context, 2));
432 DECL_STATIC_CHAIN (info->context) = 1;
434 return field;
437 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
439 static tree
440 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
441 gcall *call)
443 tree t;
445 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
446 gimple_call_set_lhs (call, t);
447 if (! gsi_end_p (*gsi))
448 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
449 gsi_insert_before (gsi, call, GSI_SAME_STMT);
451 return t;
455 /* Copy EXP into a temporary. Allocate the temporary in the context of
456 INFO and insert the initialization statement before GSI. */
458 static tree
459 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
461 tree t;
462 gimple stmt;
464 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
465 stmt = gimple_build_assign (t, exp);
466 if (! gsi_end_p (*gsi))
467 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
468 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
470 return t;
474 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
476 static tree
477 gsi_gimplify_val (struct nesting_info *info, tree exp,
478 gimple_stmt_iterator *gsi)
480 if (is_gimple_val (exp))
481 return exp;
482 else
483 return init_tmp_var (info, exp, gsi);
486 /* Similarly, but copy from the temporary and insert the statement
487 after the iterator. */
489 static tree
490 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
492 tree t;
493 gimple stmt;
495 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
496 stmt = gimple_build_assign (exp, t);
497 if (! gsi_end_p (*gsi))
498 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
499 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
501 return t;
504 /* Build or return the type used to represent a nested function trampoline. */
506 static GTY(()) tree trampoline_type;
508 static tree
509 get_trampoline_type (struct nesting_info *info)
511 unsigned align, size;
512 tree t;
514 if (trampoline_type)
515 return trampoline_type;
517 align = TRAMPOLINE_ALIGNMENT;
518 size = TRAMPOLINE_SIZE;
520 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
521 then allocate extra space so that we can do dynamic alignment. */
522 if (align > STACK_BOUNDARY)
524 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
525 align = STACK_BOUNDARY;
528 t = build_index_type (size_int (size - 1));
529 t = build_array_type (char_type_node, t);
530 t = build_decl (DECL_SOURCE_LOCATION (info->context),
531 FIELD_DECL, get_identifier ("__data"), t);
532 DECL_ALIGN (t) = align;
533 DECL_USER_ALIGN (t) = 1;
535 trampoline_type = make_node (RECORD_TYPE);
536 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
537 TYPE_FIELDS (trampoline_type) = t;
538 layout_type (trampoline_type);
539 DECL_CONTEXT (t) = trampoline_type;
541 return trampoline_type;
544 /* Given DECL, a nested function, find or create a field in the non-local
545 frame structure for a trampoline for this function. */
547 static tree
548 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
549 enum insert_option insert)
551 if (insert == NO_INSERT)
553 tree *slot = info->var_map->get (decl);
554 return slot ? *slot : NULL_TREE;
557 tree *slot = &info->var_map->get_or_insert (decl);
558 if (!*slot)
560 tree field = make_node (FIELD_DECL);
561 DECL_NAME (field) = DECL_NAME (decl);
562 TREE_TYPE (field) = get_trampoline_type (info);
563 TREE_ADDRESSABLE (field) = 1;
565 insert_field_into_struct (get_frame_type (info), field);
566 *slot = field;
568 info->any_tramp_created = true;
571 return *slot;
574 /* Build or return the field within the non-local frame state that holds
575 the non-local goto "jmp_buf". The buffer itself is maintained by the
576 rtl middle-end as dynamic stack space is allocated. */
578 static tree
579 get_nl_goto_field (struct nesting_info *info)
581 tree field = info->nl_goto_field;
582 if (!field)
584 unsigned size;
585 tree type;
587 /* For __builtin_nonlocal_goto, we need N words. The first is the
588 frame pointer, the rest is for the target's stack pointer save
589 area. The number of words is controlled by STACK_SAVEAREA_MODE;
590 not the best interface, but it'll do for now. */
591 if (Pmode == ptr_mode)
592 type = ptr_type_node;
593 else
594 type = lang_hooks.types.type_for_mode (Pmode, 1);
596 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
597 size = size / GET_MODE_SIZE (Pmode);
598 size = size + 1;
600 type = build_array_type
601 (type, build_index_type (size_int (size)));
603 field = make_node (FIELD_DECL);
604 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
605 TREE_TYPE (field) = type;
606 DECL_ALIGN (field) = TYPE_ALIGN (type);
607 TREE_ADDRESSABLE (field) = 1;
609 insert_field_into_struct (get_frame_type (info), field);
611 info->nl_goto_field = field;
614 return field;
617 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
619 static void
620 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
621 struct nesting_info *info, gimple_seq *pseq)
623 struct walk_stmt_info wi;
625 memset (&wi, 0, sizeof (wi));
626 wi.info = info;
627 wi.val_only = true;
628 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
632 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
634 static inline void
635 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
636 struct nesting_info *info)
638 gimple_seq body = gimple_body (info->context);
639 walk_body (callback_stmt, callback_op, info, &body);
640 gimple_set_body (info->context, body);
643 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
645 static void
646 walk_gimple_omp_for (gomp_for *for_stmt,
647 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
648 struct nesting_info *info)
650 struct walk_stmt_info wi;
651 gimple_seq seq;
652 tree t;
653 size_t i;
655 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
657 seq = NULL;
658 memset (&wi, 0, sizeof (wi));
659 wi.info = info;
660 wi.gsi = gsi_last (seq);
662 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
664 wi.val_only = false;
665 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
666 &wi, NULL);
667 wi.val_only = true;
668 wi.is_lhs = false;
669 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
670 &wi, NULL);
672 wi.val_only = true;
673 wi.is_lhs = false;
674 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
675 &wi, NULL);
677 t = gimple_omp_for_incr (for_stmt, i);
678 gcc_assert (BINARY_CLASS_P (t));
679 wi.val_only = false;
680 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
681 wi.val_only = true;
682 wi.is_lhs = false;
683 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
686 seq = gsi_seq (wi.gsi);
687 if (!gimple_seq_empty_p (seq))
689 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
690 annotate_all_with_location (seq, gimple_location (for_stmt));
691 gimple_seq_add_seq (&pre_body, seq);
692 gimple_omp_for_set_pre_body (for_stmt, pre_body);
696 /* Similarly for ROOT and all functions nested underneath, depth first. */
698 static void
699 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
700 struct nesting_info *root)
702 struct nesting_info *n;
703 FOR_EACH_NEST_INFO (n, root)
704 walk_function (callback_stmt, callback_op, n);
708 /* We have to check for a fairly pathological case. The operands of function
709 nested function are to be interpreted in the context of the enclosing
710 function. So if any are variably-sized, they will get remapped when the
711 enclosing function is inlined. But that remapping would also have to be
712 done in the types of the PARM_DECLs of the nested function, meaning the
713 argument types of that function will disagree with the arguments in the
714 calls to that function. So we'd either have to make a copy of the nested
715 function corresponding to each time the enclosing function was inlined or
716 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
717 function. The former is not practical. The latter would still require
718 detecting this case to know when to add the conversions. So, for now at
719 least, we don't inline such an enclosing function.
721 We have to do that check recursively, so here return indicating whether
722 FNDECL has such a nested function. ORIG_FN is the function we were
723 trying to inline to use for checking whether any argument is variably
724 modified by anything in it.
726 It would be better to do this in tree-inline.c so that we could give
727 the appropriate warning for why a function can't be inlined, but that's
728 too late since the nesting structure has already been flattened and
729 adding a flag just to record this fact seems a waste of a flag. */
731 static bool
732 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
734 struct cgraph_node *cgn = cgraph_node::get (fndecl);
735 tree arg;
737 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
739 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
740 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
741 return true;
743 if (check_for_nested_with_variably_modified (cgn->decl,
744 orig_fndecl))
745 return true;
748 return false;
751 /* Construct our local datastructure describing the function nesting
752 tree rooted by CGN. */
754 static struct nesting_info *
755 create_nesting_tree (struct cgraph_node *cgn)
757 struct nesting_info *info = XCNEW (struct nesting_info);
758 info->field_map = new hash_map<tree, tree>;
759 info->var_map = new hash_map<tree, tree>;
760 info->mem_refs = new hash_set<tree *>;
761 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
762 info->context = cgn->decl;
764 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
766 struct nesting_info *sub = create_nesting_tree (cgn);
767 sub->outer = info;
768 sub->next = info->inner;
769 info->inner = sub;
772 /* See discussion at check_for_nested_with_variably_modified for a
773 discussion of why this has to be here. */
774 if (check_for_nested_with_variably_modified (info->context, info->context))
775 DECL_UNINLINABLE (info->context) = true;
777 return info;
780 /* Return an expression computing the static chain for TARGET_CONTEXT
781 from INFO->CONTEXT. Insert any necessary computations before TSI. */
783 static tree
784 get_static_chain (struct nesting_info *info, tree target_context,
785 gimple_stmt_iterator *gsi)
787 struct nesting_info *i;
788 tree x;
790 if (info->context == target_context)
792 x = build_addr (info->frame_decl, target_context);
793 info->static_chain_added |= 1;
795 else
797 x = get_chain_decl (info);
798 info->static_chain_added |= 2;
800 for (i = info->outer; i->context != target_context; i = i->outer)
802 tree field = get_chain_field (i);
804 x = build_simple_mem_ref (x);
805 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
806 x = init_tmp_var (info, x, gsi);
810 return x;
814 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
815 frame as seen from INFO->CONTEXT. Insert any necessary computations
816 before GSI. */
818 static tree
819 get_frame_field (struct nesting_info *info, tree target_context,
820 tree field, gimple_stmt_iterator *gsi)
822 struct nesting_info *i;
823 tree x;
825 if (info->context == target_context)
827 /* Make sure frame_decl gets created. */
828 (void) get_frame_type (info);
829 x = info->frame_decl;
830 info->static_chain_added |= 1;
832 else
834 x = get_chain_decl (info);
835 info->static_chain_added |= 2;
837 for (i = info->outer; i->context != target_context; i = i->outer)
839 tree field = get_chain_field (i);
841 x = build_simple_mem_ref (x);
842 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
843 x = init_tmp_var (info, x, gsi);
846 x = build_simple_mem_ref (x);
849 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
850 return x;
853 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
855 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
856 in the nested function with DECL_VALUE_EXPR set to reference the true
857 variable in the parent function. This is used both for debug info
858 and in OMP lowering. */
860 static tree
861 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
863 tree target_context;
864 struct nesting_info *i;
865 tree x, field, new_decl;
867 tree *slot = &info->var_map->get_or_insert (decl);
869 if (*slot)
870 return *slot;
872 target_context = decl_function_context (decl);
874 /* A copy of the code in get_frame_field, but without the temporaries. */
875 if (info->context == target_context)
877 /* Make sure frame_decl gets created. */
878 (void) get_frame_type (info);
879 x = info->frame_decl;
880 i = info;
881 info->static_chain_added |= 1;
883 else
885 x = get_chain_decl (info);
886 info->static_chain_added |= 2;
887 for (i = info->outer; i->context != target_context; i = i->outer)
889 field = get_chain_field (i);
890 x = build_simple_mem_ref (x);
891 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
893 x = build_simple_mem_ref (x);
896 field = lookup_field_for_decl (i, decl, INSERT);
897 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
898 if (use_pointer_in_frame (decl))
899 x = build_simple_mem_ref (x);
901 /* ??? We should be remapping types as well, surely. */
902 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
903 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
904 DECL_CONTEXT (new_decl) = info->context;
905 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
906 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
907 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
908 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
909 TREE_READONLY (new_decl) = TREE_READONLY (decl);
910 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
911 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
912 if ((TREE_CODE (decl) == PARM_DECL
913 || TREE_CODE (decl) == RESULT_DECL
914 || TREE_CODE (decl) == VAR_DECL)
915 && DECL_BY_REFERENCE (decl))
916 DECL_BY_REFERENCE (new_decl) = 1;
918 SET_DECL_VALUE_EXPR (new_decl, x);
919 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
921 *slot = new_decl;
922 DECL_CHAIN (new_decl) = info->debug_var_chain;
923 info->debug_var_chain = new_decl;
925 if (!optimize
926 && info->context != target_context
927 && variably_modified_type_p (TREE_TYPE (decl), NULL))
928 note_nonlocal_vla_type (info, TREE_TYPE (decl));
930 return new_decl;
934 /* Callback for walk_gimple_stmt, rewrite all references to VAR
935 and PARM_DECLs that belong to outer functions.
937 The rewrite will involve some number of structure accesses back up
938 the static chain. E.g. for a variable FOO up one nesting level it'll
939 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
940 indirections apply to decls for which use_pointer_in_frame is true. */
942 static tree
943 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
945 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
946 struct nesting_info *const info = (struct nesting_info *) wi->info;
947 tree t = *tp;
949 *walk_subtrees = 0;
950 switch (TREE_CODE (t))
952 case VAR_DECL:
953 /* Non-automatic variables are never processed. */
954 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
955 break;
956 /* FALLTHRU */
958 case PARM_DECL:
959 if (decl_function_context (t) != info->context)
961 tree x;
962 wi->changed = true;
964 x = get_nonlocal_debug_decl (info, t);
965 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
967 tree target_context = decl_function_context (t);
968 struct nesting_info *i;
969 for (i = info->outer; i->context != target_context; i = i->outer)
970 continue;
971 x = lookup_field_for_decl (i, t, INSERT);
972 x = get_frame_field (info, target_context, x, &wi->gsi);
973 if (use_pointer_in_frame (t))
975 x = init_tmp_var (info, x, &wi->gsi);
976 x = build_simple_mem_ref (x);
980 if (wi->val_only)
982 if (wi->is_lhs)
983 x = save_tmp_var (info, x, &wi->gsi);
984 else
985 x = init_tmp_var (info, x, &wi->gsi);
988 *tp = x;
990 break;
992 case LABEL_DECL:
993 /* We're taking the address of a label from a parent function, but
994 this is not itself a non-local goto. Mark the label such that it
995 will not be deleted, much as we would with a label address in
996 static storage. */
997 if (decl_function_context (t) != info->context)
998 FORCED_LABEL (t) = 1;
999 break;
1001 case ADDR_EXPR:
1003 bool save_val_only = wi->val_only;
1005 wi->val_only = false;
1006 wi->is_lhs = false;
1007 wi->changed = false;
1008 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1009 wi->val_only = true;
1011 if (wi->changed)
1013 tree save_context;
1015 /* If we changed anything, we might no longer be directly
1016 referencing a decl. */
1017 save_context = current_function_decl;
1018 current_function_decl = info->context;
1019 recompute_tree_invariant_for_addr_expr (t);
1020 current_function_decl = save_context;
1022 /* If the callback converted the address argument in a context
1023 where we only accept variables (and min_invariant, presumably),
1024 then compute the address into a temporary. */
1025 if (save_val_only)
1026 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1027 t, &wi->gsi);
1030 break;
1032 case REALPART_EXPR:
1033 case IMAGPART_EXPR:
1034 case COMPONENT_REF:
1035 case ARRAY_REF:
1036 case ARRAY_RANGE_REF:
1037 case BIT_FIELD_REF:
1038 /* Go down this entire nest and just look at the final prefix and
1039 anything that describes the references. Otherwise, we lose track
1040 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1041 wi->val_only = true;
1042 wi->is_lhs = false;
1043 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1045 if (TREE_CODE (t) == COMPONENT_REF)
1046 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1047 NULL);
1048 else if (TREE_CODE (t) == ARRAY_REF
1049 || TREE_CODE (t) == ARRAY_RANGE_REF)
1051 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1052 wi, NULL);
1053 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1054 wi, NULL);
1055 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1056 wi, NULL);
1059 wi->val_only = false;
1060 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1061 break;
1063 case VIEW_CONVERT_EXPR:
1064 /* Just request to look at the subtrees, leaving val_only and lhs
1065 untouched. This might actually be for !val_only + lhs, in which
1066 case we don't want to force a replacement by a temporary. */
1067 *walk_subtrees = 1;
1068 break;
1070 default:
1071 if (!IS_TYPE_OR_DECL_P (t))
1073 *walk_subtrees = 1;
1074 wi->val_only = true;
1075 wi->is_lhs = false;
1077 break;
1080 return NULL_TREE;
1083 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1084 struct walk_stmt_info *);
1086 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1087 and PARM_DECLs that belong to outer functions. */
1089 static bool
1090 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1092 struct nesting_info *const info = (struct nesting_info *) wi->info;
1093 bool need_chain = false, need_stmts = false;
1094 tree clause, decl;
1095 int dummy;
1096 bitmap new_suppress;
1098 new_suppress = BITMAP_GGC_ALLOC ();
1099 bitmap_copy (new_suppress, info->suppress_expansion);
1101 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1103 switch (OMP_CLAUSE_CODE (clause))
1105 case OMP_CLAUSE_REDUCTION:
1106 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1107 need_stmts = true;
1108 goto do_decl_clause;
1110 case OMP_CLAUSE_LASTPRIVATE:
1111 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1112 need_stmts = true;
1113 goto do_decl_clause;
1115 case OMP_CLAUSE_LINEAR:
1116 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1117 need_stmts = true;
1118 wi->val_only = true;
1119 wi->is_lhs = false;
1120 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1121 &dummy, wi);
1122 goto do_decl_clause;
1124 case OMP_CLAUSE_PRIVATE:
1125 case OMP_CLAUSE_FIRSTPRIVATE:
1126 case OMP_CLAUSE_COPYPRIVATE:
1127 case OMP_CLAUSE_SHARED:
1128 do_decl_clause:
1129 decl = OMP_CLAUSE_DECL (clause);
1130 if (TREE_CODE (decl) == VAR_DECL
1131 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1132 break;
1133 if (decl_function_context (decl) != info->context)
1135 bitmap_set_bit (new_suppress, DECL_UID (decl));
1136 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1137 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1138 need_chain = true;
1140 break;
1142 case OMP_CLAUSE_SCHEDULE:
1143 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1144 break;
1145 /* FALLTHRU */
1146 case OMP_CLAUSE_FINAL:
1147 case OMP_CLAUSE_IF:
1148 case OMP_CLAUSE_NUM_THREADS:
1149 case OMP_CLAUSE_DEPEND:
1150 case OMP_CLAUSE_DEVICE:
1151 case OMP_CLAUSE_NUM_TEAMS:
1152 case OMP_CLAUSE_THREAD_LIMIT:
1153 case OMP_CLAUSE_SAFELEN:
1154 case OMP_CLAUSE__CILK_FOR_COUNT_:
1155 wi->val_only = true;
1156 wi->is_lhs = false;
1157 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1158 &dummy, wi);
1159 break;
1161 case OMP_CLAUSE_DIST_SCHEDULE:
1162 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1164 wi->val_only = true;
1165 wi->is_lhs = false;
1166 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1167 &dummy, wi);
1169 break;
1171 case OMP_CLAUSE_MAP:
1172 case OMP_CLAUSE_TO:
1173 case OMP_CLAUSE_FROM:
1174 if (OMP_CLAUSE_SIZE (clause))
1176 wi->val_only = true;
1177 wi->is_lhs = false;
1178 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1179 &dummy, wi);
1181 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1182 goto do_decl_clause;
1183 wi->val_only = true;
1184 wi->is_lhs = false;
1185 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1186 wi, NULL);
1187 break;
1189 case OMP_CLAUSE_ALIGNED:
1190 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1192 wi->val_only = true;
1193 wi->is_lhs = false;
1194 convert_nonlocal_reference_op
1195 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1197 /* Like do_decl_clause, but don't add any suppression. */
1198 decl = OMP_CLAUSE_DECL (clause);
1199 if (TREE_CODE (decl) == VAR_DECL
1200 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1201 break;
1202 if (decl_function_context (decl) != info->context)
1204 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1205 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1206 need_chain = true;
1208 break;
1210 case OMP_CLAUSE_NOWAIT:
1211 case OMP_CLAUSE_ORDERED:
1212 case OMP_CLAUSE_DEFAULT:
1213 case OMP_CLAUSE_COPYIN:
1214 case OMP_CLAUSE_COLLAPSE:
1215 case OMP_CLAUSE_UNTIED:
1216 case OMP_CLAUSE_MERGEABLE:
1217 case OMP_CLAUSE_PROC_BIND:
1218 break;
1220 default:
1221 gcc_unreachable ();
1225 info->suppress_expansion = new_suppress;
1227 if (need_stmts)
1228 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1229 switch (OMP_CLAUSE_CODE (clause))
1231 case OMP_CLAUSE_REDUCTION:
1232 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1234 tree old_context
1235 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1236 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1237 = info->context;
1238 walk_body (convert_nonlocal_reference_stmt,
1239 convert_nonlocal_reference_op, info,
1240 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1241 walk_body (convert_nonlocal_reference_stmt,
1242 convert_nonlocal_reference_op, info,
1243 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1244 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1245 = old_context;
1247 break;
1249 case OMP_CLAUSE_LASTPRIVATE:
1250 walk_body (convert_nonlocal_reference_stmt,
1251 convert_nonlocal_reference_op, info,
1252 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1253 break;
1255 case OMP_CLAUSE_LINEAR:
1256 walk_body (convert_nonlocal_reference_stmt,
1257 convert_nonlocal_reference_op, info,
1258 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1259 break;
1261 default:
1262 break;
1265 return need_chain;
1268 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1270 static void
1271 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1273 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1274 type = TREE_TYPE (type);
1276 if (TYPE_NAME (type)
1277 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1278 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1279 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1281 while (POINTER_TYPE_P (type)
1282 || TREE_CODE (type) == VECTOR_TYPE
1283 || TREE_CODE (type) == FUNCTION_TYPE
1284 || TREE_CODE (type) == METHOD_TYPE)
1285 type = TREE_TYPE (type);
1287 if (TREE_CODE (type) == ARRAY_TYPE)
1289 tree domain, t;
1291 note_nonlocal_vla_type (info, TREE_TYPE (type));
1292 domain = TYPE_DOMAIN (type);
1293 if (domain)
1295 t = TYPE_MIN_VALUE (domain);
1296 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1297 && decl_function_context (t) != info->context)
1298 get_nonlocal_debug_decl (info, t);
1299 t = TYPE_MAX_VALUE (domain);
1300 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1301 && decl_function_context (t) != info->context)
1302 get_nonlocal_debug_decl (info, t);
1307 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1308 in BLOCK. */
1310 static void
1311 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1313 tree var;
1315 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1316 if (TREE_CODE (var) == VAR_DECL
1317 && variably_modified_type_p (TREE_TYPE (var), NULL)
1318 && DECL_HAS_VALUE_EXPR_P (var)
1319 && decl_function_context (var) != info->context)
1320 note_nonlocal_vla_type (info, TREE_TYPE (var));
1323 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1324 PARM_DECLs that belong to outer functions. This handles statements
1325 that are not handled via the standard recursion done in
1326 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1327 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1328 operands of STMT have been handled by this function. */
1330 static tree
1331 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1332 struct walk_stmt_info *wi)
1334 struct nesting_info *info = (struct nesting_info *) wi->info;
1335 tree save_local_var_chain;
1336 bitmap save_suppress;
1337 gimple stmt = gsi_stmt (*gsi);
1339 switch (gimple_code (stmt))
1341 case GIMPLE_GOTO:
1342 /* Don't walk non-local gotos for now. */
1343 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1345 wi->val_only = true;
1346 wi->is_lhs = false;
1347 *handled_ops_p = true;
1348 return NULL_TREE;
1350 break;
1352 case GIMPLE_OMP_PARALLEL:
1353 case GIMPLE_OMP_TASK:
1354 save_suppress = info->suppress_expansion;
1355 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1356 wi))
1358 tree c, decl;
1359 decl = get_chain_decl (info);
1360 c = build_omp_clause (gimple_location (stmt),
1361 OMP_CLAUSE_FIRSTPRIVATE);
1362 OMP_CLAUSE_DECL (c) = decl;
1363 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1364 gimple_omp_taskreg_set_clauses (stmt, c);
1367 save_local_var_chain = info->new_local_var_chain;
1368 info->new_local_var_chain = NULL;
1370 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1371 info, gimple_omp_body_ptr (stmt));
1373 if (info->new_local_var_chain)
1374 declare_vars (info->new_local_var_chain,
1375 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1376 false);
1377 info->new_local_var_chain = save_local_var_chain;
1378 info->suppress_expansion = save_suppress;
1379 break;
1381 case GIMPLE_OMP_FOR:
1382 save_suppress = info->suppress_expansion;
1383 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1384 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1385 convert_nonlocal_reference_stmt,
1386 convert_nonlocal_reference_op, info);
1387 walk_body (convert_nonlocal_reference_stmt,
1388 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1389 info->suppress_expansion = save_suppress;
1390 break;
1392 case GIMPLE_OMP_SECTIONS:
1393 save_suppress = info->suppress_expansion;
1394 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1395 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1396 info, gimple_omp_body_ptr (stmt));
1397 info->suppress_expansion = save_suppress;
1398 break;
1400 case GIMPLE_OMP_SINGLE:
1401 save_suppress = info->suppress_expansion;
1402 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1403 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1404 info, gimple_omp_body_ptr (stmt));
1405 info->suppress_expansion = save_suppress;
1406 break;
1408 case GIMPLE_OMP_TARGET:
1409 if (!is_gimple_omp_offloaded (stmt))
1411 save_suppress = info->suppress_expansion;
1412 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1413 wi);
1414 info->suppress_expansion = save_suppress;
1415 walk_body (convert_nonlocal_reference_stmt,
1416 convert_nonlocal_reference_op, info,
1417 gimple_omp_body_ptr (stmt));
1418 break;
1420 save_suppress = info->suppress_expansion;
1421 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1422 wi))
1424 tree c, decl;
1425 decl = get_chain_decl (info);
1426 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1427 OMP_CLAUSE_DECL (c) = decl;
1428 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1429 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1430 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1431 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1434 save_local_var_chain = info->new_local_var_chain;
1435 info->new_local_var_chain = NULL;
1437 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1438 info, gimple_omp_body_ptr (stmt));
1440 if (info->new_local_var_chain)
1441 declare_vars (info->new_local_var_chain,
1442 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1443 false);
1444 info->new_local_var_chain = save_local_var_chain;
1445 info->suppress_expansion = save_suppress;
1446 break;
1448 case GIMPLE_OMP_TEAMS:
1449 save_suppress = info->suppress_expansion;
1450 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1451 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1452 info, gimple_omp_body_ptr (stmt));
1453 info->suppress_expansion = save_suppress;
1454 break;
1456 case GIMPLE_OMP_SECTION:
1457 case GIMPLE_OMP_MASTER:
1458 case GIMPLE_OMP_TASKGROUP:
1459 case GIMPLE_OMP_ORDERED:
1460 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1461 info, gimple_omp_body_ptr (stmt));
1462 break;
1464 case GIMPLE_BIND:
1466 gbind *bind_stmt = as_a <gbind *> (stmt);
1467 if (!optimize && gimple_bind_block (bind_stmt))
1468 note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
1470 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1471 if (TREE_CODE (var) == NAMELIST_DECL)
1473 /* Adjust decls mentioned in NAMELIST_DECL. */
1474 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1475 tree decl;
1476 unsigned int i;
1478 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1480 if (TREE_CODE (decl) == VAR_DECL
1481 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1482 continue;
1483 if (decl_function_context (decl) != info->context)
1484 CONSTRUCTOR_ELT (decls, i)->value
1485 = get_nonlocal_debug_decl (info, decl);
1489 *handled_ops_p = false;
1490 return NULL_TREE;
1492 case GIMPLE_COND:
1493 wi->val_only = true;
1494 wi->is_lhs = false;
1495 *handled_ops_p = false;
1496 return NULL_TREE;
1498 default:
1499 /* For every other statement that we are not interested in
1500 handling here, let the walker traverse the operands. */
1501 *handled_ops_p = false;
1502 return NULL_TREE;
1505 /* We have handled all of STMT operands, no need to traverse the operands. */
1506 *handled_ops_p = true;
1507 return NULL_TREE;
1511 /* A subroutine of convert_local_reference. Create a local variable
1512 in the parent function with DECL_VALUE_EXPR set to reference the
1513 field in FRAME. This is used both for debug info and in OMP
1514 lowering. */
1516 static tree
1517 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1519 tree x, new_decl;
1521 tree *slot = &info->var_map->get_or_insert (decl);
1522 if (*slot)
1523 return *slot;
1525 /* Make sure frame_decl gets created. */
1526 (void) get_frame_type (info);
1527 x = info->frame_decl;
1528 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1530 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1531 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1532 DECL_CONTEXT (new_decl) = info->context;
1533 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1534 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1535 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1536 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1537 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1538 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1539 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1540 if ((TREE_CODE (decl) == PARM_DECL
1541 || TREE_CODE (decl) == RESULT_DECL
1542 || TREE_CODE (decl) == VAR_DECL)
1543 && DECL_BY_REFERENCE (decl))
1544 DECL_BY_REFERENCE (new_decl) = 1;
1546 SET_DECL_VALUE_EXPR (new_decl, x);
1547 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1548 *slot = new_decl;
1550 DECL_CHAIN (new_decl) = info->debug_var_chain;
1551 info->debug_var_chain = new_decl;
1553 /* Do not emit debug info twice. */
1554 DECL_IGNORED_P (decl) = 1;
1556 return new_decl;
1560 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1561 and PARM_DECLs that were referenced by inner nested functions.
1562 The rewrite will be a structure reference to the local frame variable. */
1564 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1566 static tree
1567 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1569 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1570 struct nesting_info *const info = (struct nesting_info *) wi->info;
1571 tree t = *tp, field, x;
1572 bool save_val_only;
1574 *walk_subtrees = 0;
1575 switch (TREE_CODE (t))
1577 case VAR_DECL:
1578 /* Non-automatic variables are never processed. */
1579 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1580 break;
1581 /* FALLTHRU */
1583 case PARM_DECL:
1584 if (decl_function_context (t) == info->context)
1586 /* If we copied a pointer to the frame, then the original decl
1587 is used unchanged in the parent function. */
1588 if (use_pointer_in_frame (t))
1589 break;
1591 /* No need to transform anything if no child references the
1592 variable. */
1593 field = lookup_field_for_decl (info, t, NO_INSERT);
1594 if (!field)
1595 break;
1596 wi->changed = true;
1598 x = get_local_debug_decl (info, t, field);
1599 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1600 x = get_frame_field (info, info->context, field, &wi->gsi);
1602 if (wi->val_only)
1604 if (wi->is_lhs)
1605 x = save_tmp_var (info, x, &wi->gsi);
1606 else
1607 x = init_tmp_var (info, x, &wi->gsi);
1610 *tp = x;
1612 break;
1614 case ADDR_EXPR:
1615 save_val_only = wi->val_only;
1616 wi->val_only = false;
1617 wi->is_lhs = false;
1618 wi->changed = false;
1619 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1620 wi->val_only = save_val_only;
1622 /* If we converted anything ... */
1623 if (wi->changed)
1625 tree save_context;
1627 /* Then the frame decl is now addressable. */
1628 TREE_ADDRESSABLE (info->frame_decl) = 1;
1630 save_context = current_function_decl;
1631 current_function_decl = info->context;
1632 recompute_tree_invariant_for_addr_expr (t);
1633 current_function_decl = save_context;
1635 /* If we are in a context where we only accept values, then
1636 compute the address into a temporary. */
1637 if (save_val_only)
1638 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1639 t, &wi->gsi);
1641 break;
1643 case REALPART_EXPR:
1644 case IMAGPART_EXPR:
1645 case COMPONENT_REF:
1646 case ARRAY_REF:
1647 case ARRAY_RANGE_REF:
1648 case BIT_FIELD_REF:
1649 /* Go down this entire nest and just look at the final prefix and
1650 anything that describes the references. Otherwise, we lose track
1651 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1652 save_val_only = wi->val_only;
1653 wi->val_only = true;
1654 wi->is_lhs = false;
1655 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1657 if (TREE_CODE (t) == COMPONENT_REF)
1658 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1659 NULL);
1660 else if (TREE_CODE (t) == ARRAY_REF
1661 || TREE_CODE (t) == ARRAY_RANGE_REF)
1663 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1664 NULL);
1665 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1666 NULL);
1667 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1668 NULL);
1671 wi->val_only = false;
1672 walk_tree (tp, convert_local_reference_op, wi, NULL);
1673 wi->val_only = save_val_only;
1674 break;
1676 case MEM_REF:
1677 save_val_only = wi->val_only;
1678 wi->val_only = true;
1679 wi->is_lhs = false;
1680 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1681 wi, NULL);
1682 /* We need to re-fold the MEM_REF as component references as
1683 part of a ADDR_EXPR address are not allowed. But we cannot
1684 fold here, as the chain record type is not yet finalized. */
1685 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1686 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1687 info->mem_refs->add (tp);
1688 wi->val_only = save_val_only;
1689 break;
1691 case VIEW_CONVERT_EXPR:
1692 /* Just request to look at the subtrees, leaving val_only and lhs
1693 untouched. This might actually be for !val_only + lhs, in which
1694 case we don't want to force a replacement by a temporary. */
1695 *walk_subtrees = 1;
1696 break;
1698 default:
1699 if (!IS_TYPE_OR_DECL_P (t))
1701 *walk_subtrees = 1;
1702 wi->val_only = true;
1703 wi->is_lhs = false;
1705 break;
1708 return NULL_TREE;
1711 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1712 struct walk_stmt_info *);
1714 /* Helper for convert_local_reference. Convert all the references in
1715 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1717 static bool
1718 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1720 struct nesting_info *const info = (struct nesting_info *) wi->info;
1721 bool need_frame = false, need_stmts = false;
1722 tree clause, decl;
1723 int dummy;
1724 bitmap new_suppress;
1726 new_suppress = BITMAP_GGC_ALLOC ();
1727 bitmap_copy (new_suppress, info->suppress_expansion);
1729 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1731 switch (OMP_CLAUSE_CODE (clause))
1733 case OMP_CLAUSE_REDUCTION:
1734 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1735 need_stmts = true;
1736 goto do_decl_clause;
1738 case OMP_CLAUSE_LASTPRIVATE:
1739 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1740 need_stmts = true;
1741 goto do_decl_clause;
1743 case OMP_CLAUSE_LINEAR:
1744 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1745 need_stmts = true;
1746 wi->val_only = true;
1747 wi->is_lhs = false;
1748 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1749 wi);
1750 goto do_decl_clause;
1752 case OMP_CLAUSE_PRIVATE:
1753 case OMP_CLAUSE_FIRSTPRIVATE:
1754 case OMP_CLAUSE_COPYPRIVATE:
1755 case OMP_CLAUSE_SHARED:
1756 do_decl_clause:
1757 decl = OMP_CLAUSE_DECL (clause);
1758 if (TREE_CODE (decl) == VAR_DECL
1759 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1760 break;
1761 if (decl_function_context (decl) == info->context
1762 && !use_pointer_in_frame (decl))
1764 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1765 if (field)
1767 bitmap_set_bit (new_suppress, DECL_UID (decl));
1768 OMP_CLAUSE_DECL (clause)
1769 = get_local_debug_decl (info, decl, field);
1770 need_frame = true;
1773 break;
1775 case OMP_CLAUSE_SCHEDULE:
1776 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1777 break;
1778 /* FALLTHRU */
1779 case OMP_CLAUSE_FINAL:
1780 case OMP_CLAUSE_IF:
1781 case OMP_CLAUSE_NUM_THREADS:
1782 case OMP_CLAUSE_DEPEND:
1783 case OMP_CLAUSE_DEVICE:
1784 case OMP_CLAUSE_NUM_TEAMS:
1785 case OMP_CLAUSE_THREAD_LIMIT:
1786 case OMP_CLAUSE_SAFELEN:
1787 case OMP_CLAUSE__CILK_FOR_COUNT_:
1788 wi->val_only = true;
1789 wi->is_lhs = false;
1790 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1791 wi);
1792 break;
1794 case OMP_CLAUSE_DIST_SCHEDULE:
1795 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1797 wi->val_only = true;
1798 wi->is_lhs = false;
1799 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1800 &dummy, wi);
1802 break;
1804 case OMP_CLAUSE_MAP:
1805 case OMP_CLAUSE_TO:
1806 case OMP_CLAUSE_FROM:
1807 if (OMP_CLAUSE_SIZE (clause))
1809 wi->val_only = true;
1810 wi->is_lhs = false;
1811 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1812 &dummy, wi);
1814 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1815 goto do_decl_clause;
1816 wi->val_only = true;
1817 wi->is_lhs = false;
1818 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1819 wi, NULL);
1820 break;
1822 case OMP_CLAUSE_ALIGNED:
1823 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1825 wi->val_only = true;
1826 wi->is_lhs = false;
1827 convert_local_reference_op
1828 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1830 /* Like do_decl_clause, but don't add any suppression. */
1831 decl = OMP_CLAUSE_DECL (clause);
1832 if (TREE_CODE (decl) == VAR_DECL
1833 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1834 break;
1835 if (decl_function_context (decl) == info->context
1836 && !use_pointer_in_frame (decl))
1838 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1839 if (field)
1841 OMP_CLAUSE_DECL (clause)
1842 = get_local_debug_decl (info, decl, field);
1843 need_frame = true;
1846 break;
1848 case OMP_CLAUSE_NOWAIT:
1849 case OMP_CLAUSE_ORDERED:
1850 case OMP_CLAUSE_DEFAULT:
1851 case OMP_CLAUSE_COPYIN:
1852 case OMP_CLAUSE_COLLAPSE:
1853 case OMP_CLAUSE_UNTIED:
1854 case OMP_CLAUSE_MERGEABLE:
1855 case OMP_CLAUSE_PROC_BIND:
1856 break;
1858 default:
1859 gcc_unreachable ();
1863 info->suppress_expansion = new_suppress;
1865 if (need_stmts)
1866 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1867 switch (OMP_CLAUSE_CODE (clause))
1869 case OMP_CLAUSE_REDUCTION:
1870 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1872 tree old_context
1873 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1874 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1875 = info->context;
1876 walk_body (convert_local_reference_stmt,
1877 convert_local_reference_op, info,
1878 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1879 walk_body (convert_local_reference_stmt,
1880 convert_local_reference_op, info,
1881 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1882 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1883 = old_context;
1885 break;
1887 case OMP_CLAUSE_LASTPRIVATE:
1888 walk_body (convert_local_reference_stmt,
1889 convert_local_reference_op, info,
1890 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1891 break;
1893 case OMP_CLAUSE_LINEAR:
1894 walk_body (convert_local_reference_stmt,
1895 convert_local_reference_op, info,
1896 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1897 break;
1899 default:
1900 break;
1903 return need_frame;
1907 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1908 and PARM_DECLs that were referenced by inner nested functions.
1909 The rewrite will be a structure reference to the local frame variable. */
1911 static tree
1912 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1913 struct walk_stmt_info *wi)
1915 struct nesting_info *info = (struct nesting_info *) wi->info;
1916 tree save_local_var_chain;
1917 bitmap save_suppress;
1918 gimple stmt = gsi_stmt (*gsi);
1920 switch (gimple_code (stmt))
1922 case GIMPLE_OMP_PARALLEL:
1923 case GIMPLE_OMP_TASK:
1924 save_suppress = info->suppress_expansion;
1925 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1926 wi))
1928 tree c;
1929 (void) get_frame_type (info);
1930 c = build_omp_clause (gimple_location (stmt),
1931 OMP_CLAUSE_SHARED);
1932 OMP_CLAUSE_DECL (c) = info->frame_decl;
1933 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1934 gimple_omp_taskreg_set_clauses (stmt, c);
1937 save_local_var_chain = info->new_local_var_chain;
1938 info->new_local_var_chain = NULL;
1940 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1941 gimple_omp_body_ptr (stmt));
1943 if (info->new_local_var_chain)
1944 declare_vars (info->new_local_var_chain,
1945 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1946 info->new_local_var_chain = save_local_var_chain;
1947 info->suppress_expansion = save_suppress;
1948 break;
1950 case GIMPLE_OMP_FOR:
1951 save_suppress = info->suppress_expansion;
1952 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1953 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1954 convert_local_reference_stmt,
1955 convert_local_reference_op, info);
1956 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1957 info, gimple_omp_body_ptr (stmt));
1958 info->suppress_expansion = save_suppress;
1959 break;
1961 case GIMPLE_OMP_SECTIONS:
1962 save_suppress = info->suppress_expansion;
1963 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1964 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1965 info, gimple_omp_body_ptr (stmt));
1966 info->suppress_expansion = save_suppress;
1967 break;
1969 case GIMPLE_OMP_SINGLE:
1970 save_suppress = info->suppress_expansion;
1971 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1972 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1973 info, gimple_omp_body_ptr (stmt));
1974 info->suppress_expansion = save_suppress;
1975 break;
1977 case GIMPLE_OMP_TARGET:
1978 if (!is_gimple_omp_offloaded (stmt))
1980 save_suppress = info->suppress_expansion;
1981 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1982 info->suppress_expansion = save_suppress;
1983 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1984 info, gimple_omp_body_ptr (stmt));
1985 break;
1987 save_suppress = info->suppress_expansion;
1988 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
1990 tree c;
1991 (void) get_frame_type (info);
1992 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1993 OMP_CLAUSE_DECL (c) = info->frame_decl;
1994 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
1995 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
1996 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1997 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2000 save_local_var_chain = info->new_local_var_chain;
2001 info->new_local_var_chain = NULL;
2003 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2004 gimple_omp_body_ptr (stmt));
2006 if (info->new_local_var_chain)
2007 declare_vars (info->new_local_var_chain,
2008 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2009 info->new_local_var_chain = save_local_var_chain;
2010 info->suppress_expansion = save_suppress;
2011 break;
2013 case GIMPLE_OMP_TEAMS:
2014 save_suppress = info->suppress_expansion;
2015 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2016 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2017 info, gimple_omp_body_ptr (stmt));
2018 info->suppress_expansion = save_suppress;
2019 break;
2021 case GIMPLE_OMP_SECTION:
2022 case GIMPLE_OMP_MASTER:
2023 case GIMPLE_OMP_TASKGROUP:
2024 case GIMPLE_OMP_ORDERED:
2025 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2026 info, gimple_omp_body_ptr (stmt));
2027 break;
2029 case GIMPLE_COND:
2030 wi->val_only = true;
2031 wi->is_lhs = false;
2032 *handled_ops_p = false;
2033 return NULL_TREE;
2035 case GIMPLE_ASSIGN:
2036 if (gimple_clobber_p (stmt))
2038 tree lhs = gimple_assign_lhs (stmt);
2039 if (!use_pointer_in_frame (lhs)
2040 && lookup_field_for_decl (info, lhs, NO_INSERT))
2042 gsi_replace (gsi, gimple_build_nop (), true);
2043 break;
2046 *handled_ops_p = false;
2047 return NULL_TREE;
2049 case GIMPLE_BIND:
2050 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2051 var;
2052 var = DECL_CHAIN (var))
2053 if (TREE_CODE (var) == NAMELIST_DECL)
2055 /* Adjust decls mentioned in NAMELIST_DECL. */
2056 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2057 tree decl;
2058 unsigned int i;
2060 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2062 if (TREE_CODE (decl) == VAR_DECL
2063 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2064 continue;
2065 if (decl_function_context (decl) == info->context
2066 && !use_pointer_in_frame (decl))
2068 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2069 if (field)
2071 CONSTRUCTOR_ELT (decls, i)->value
2072 = get_local_debug_decl (info, decl, field);
2078 *handled_ops_p = false;
2079 return NULL_TREE;
2081 default:
2082 /* For every other statement that we are not interested in
2083 handling here, let the walker traverse the operands. */
2084 *handled_ops_p = false;
2085 return NULL_TREE;
2088 /* Indicate that we have handled all the operands ourselves. */
2089 *handled_ops_p = true;
2090 return NULL_TREE;
2094 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2095 that reference labels from outer functions. The rewrite will be a
2096 call to __builtin_nonlocal_goto. */
2098 static tree
2099 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2100 struct walk_stmt_info *wi)
2102 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2103 tree label, new_label, target_context, x, field;
2104 gcall *call;
2105 gimple stmt = gsi_stmt (*gsi);
2107 if (gimple_code (stmt) != GIMPLE_GOTO)
2109 *handled_ops_p = false;
2110 return NULL_TREE;
2113 label = gimple_goto_dest (stmt);
2114 if (TREE_CODE (label) != LABEL_DECL)
2116 *handled_ops_p = false;
2117 return NULL_TREE;
2120 target_context = decl_function_context (label);
2121 if (target_context == info->context)
2123 *handled_ops_p = false;
2124 return NULL_TREE;
2127 for (i = info->outer; target_context != i->context; i = i->outer)
2128 continue;
2130 /* The original user label may also be use for a normal goto, therefore
2131 we must create a new label that will actually receive the abnormal
2132 control transfer. This new label will be marked LABEL_NONLOCAL; this
2133 mark will trigger proper behavior in the cfg, as well as cause the
2134 (hairy target-specific) non-local goto receiver code to be generated
2135 when we expand rtl. Enter this association into var_map so that we
2136 can insert the new label into the IL during a second pass. */
2137 tree *slot = &i->var_map->get_or_insert (label);
2138 if (*slot == NULL)
2140 new_label = create_artificial_label (UNKNOWN_LOCATION);
2141 DECL_NONLOCAL (new_label) = 1;
2142 *slot = new_label;
2144 else
2145 new_label = *slot;
2147 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2148 field = get_nl_goto_field (i);
2149 x = get_frame_field (info, target_context, field, gsi);
2150 x = build_addr (x, target_context);
2151 x = gsi_gimplify_val (info, x, gsi);
2152 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2153 2, build_addr (new_label, target_context), x);
2154 gsi_replace (gsi, call, false);
2156 /* We have handled all of STMT's operands, no need to keep going. */
2157 *handled_ops_p = true;
2158 return NULL_TREE;
2162 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2163 are referenced via nonlocal goto from a nested function. The rewrite
2164 will involve installing a newly generated DECL_NONLOCAL label, and
2165 (potentially) a branch around the rtl gunk that is assumed to be
2166 attached to such a label. */
2168 static tree
2169 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2170 struct walk_stmt_info *wi)
2172 struct nesting_info *const info = (struct nesting_info *) wi->info;
2173 tree label, new_label;
2174 gimple_stmt_iterator tmp_gsi;
2175 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2177 if (!stmt)
2179 *handled_ops_p = false;
2180 return NULL_TREE;
2183 label = gimple_label_label (stmt);
2185 tree *slot = info->var_map->get (label);
2186 if (!slot)
2188 *handled_ops_p = false;
2189 return NULL_TREE;
2192 /* If there's any possibility that the previous statement falls through,
2193 then we must branch around the new non-local label. */
2194 tmp_gsi = wi->gsi;
2195 gsi_prev (&tmp_gsi);
2196 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2198 gimple stmt = gimple_build_goto (label);
2199 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2202 new_label = (tree) *slot;
2203 stmt = gimple_build_label (new_label);
2204 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2206 *handled_ops_p = true;
2207 return NULL_TREE;
2211 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2212 of nested functions that require the use of trampolines. The rewrite
2213 will involve a reference a trampoline generated for the occasion. */
2215 static tree
2216 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2218 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2219 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2220 tree t = *tp, decl, target_context, x, builtin;
2221 gcall *call;
2223 *walk_subtrees = 0;
2224 switch (TREE_CODE (t))
2226 case ADDR_EXPR:
2227 /* Build
2228 T.1 = &CHAIN->tramp;
2229 T.2 = __builtin_adjust_trampoline (T.1);
2230 T.3 = (func_type)T.2;
2233 decl = TREE_OPERAND (t, 0);
2234 if (TREE_CODE (decl) != FUNCTION_DECL)
2235 break;
2237 /* Only need to process nested functions. */
2238 target_context = decl_function_context (decl);
2239 if (!target_context)
2240 break;
2242 /* If the nested function doesn't use a static chain, then
2243 it doesn't need a trampoline. */
2244 if (!DECL_STATIC_CHAIN (decl))
2245 break;
2247 /* If we don't want a trampoline, then don't build one. */
2248 if (TREE_NO_TRAMPOLINE (t))
2249 break;
2251 /* Lookup the immediate parent of the callee, as that's where
2252 we need to insert the trampoline. */
2253 for (i = info; i->context != target_context; i = i->outer)
2254 continue;
2255 x = lookup_tramp_for_decl (i, decl, INSERT);
2257 /* Compute the address of the field holding the trampoline. */
2258 x = get_frame_field (info, target_context, x, &wi->gsi);
2259 x = build_addr (x, target_context);
2260 x = gsi_gimplify_val (info, x, &wi->gsi);
2262 /* Do machine-specific ugliness. Normally this will involve
2263 computing extra alignment, but it can really be anything. */
2264 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2265 call = gimple_build_call (builtin, 1, x);
2266 x = init_tmp_var_with_call (info, &wi->gsi, call);
2268 /* Cast back to the proper function type. */
2269 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2270 x = init_tmp_var (info, x, &wi->gsi);
2272 *tp = x;
2273 break;
2275 default:
2276 if (!IS_TYPE_OR_DECL_P (t))
2277 *walk_subtrees = 1;
2278 break;
2281 return NULL_TREE;
2285 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2286 to addresses of nested functions that require the use of
2287 trampolines. The rewrite will involve a reference a trampoline
2288 generated for the occasion. */
2290 static tree
2291 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2292 struct walk_stmt_info *wi)
2294 struct nesting_info *info = (struct nesting_info *) wi->info;
2295 gimple stmt = gsi_stmt (*gsi);
2297 switch (gimple_code (stmt))
2299 case GIMPLE_CALL:
2301 /* Only walk call arguments, lest we generate trampolines for
2302 direct calls. */
2303 unsigned long i, nargs = gimple_call_num_args (stmt);
2304 for (i = 0; i < nargs; i++)
2305 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2306 wi, NULL);
2307 break;
2310 case GIMPLE_OMP_TARGET:
2311 if (!is_gimple_omp_offloaded (stmt))
2313 *handled_ops_p = false;
2314 return NULL_TREE;
2316 /* FALLTHRU */
2317 case GIMPLE_OMP_PARALLEL:
2318 case GIMPLE_OMP_TASK:
2320 tree save_local_var_chain = info->new_local_var_chain;
2321 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2322 info->new_local_var_chain = NULL;
2323 char save_static_chain_added = info->static_chain_added;
2324 info->static_chain_added = 0;
2325 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2326 info, gimple_omp_body_ptr (stmt));
2327 if (info->new_local_var_chain)
2328 declare_vars (info->new_local_var_chain,
2329 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2330 false);
2331 for (int i = 0; i < 2; i++)
2333 tree c, decl;
2334 if ((info->static_chain_added & (1 << i)) == 0)
2335 continue;
2336 decl = i ? get_chain_decl (info) : info->frame_decl;
2337 /* Don't add CHAIN.* or FRAME.* twice. */
2338 for (c = gimple_omp_taskreg_clauses (stmt);
2340 c = OMP_CLAUSE_CHAIN (c))
2341 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2342 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2343 && OMP_CLAUSE_DECL (c) == decl)
2344 break;
2345 if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2347 c = build_omp_clause (gimple_location (stmt),
2348 i ? OMP_CLAUSE_FIRSTPRIVATE
2349 : OMP_CLAUSE_SHARED);
2350 OMP_CLAUSE_DECL (c) = decl;
2351 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2352 gimple_omp_taskreg_set_clauses (stmt, c);
2354 else if (c == NULL)
2356 c = build_omp_clause (gimple_location (stmt),
2357 OMP_CLAUSE_MAP);
2358 OMP_CLAUSE_DECL (c) = decl;
2359 OMP_CLAUSE_SET_MAP_KIND (c,
2360 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2361 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2362 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2363 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2367 info->new_local_var_chain = save_local_var_chain;
2368 info->static_chain_added |= save_static_chain_added;
2370 break;
2372 default:
2373 *handled_ops_p = false;
2374 return NULL_TREE;
2377 *handled_ops_p = true;
2378 return NULL_TREE;
2383 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2384 that reference nested functions to make sure that the static chain
2385 is set up properly for the call. */
2387 static tree
2388 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2389 struct walk_stmt_info *wi)
2391 struct nesting_info *const info = (struct nesting_info *) wi->info;
2392 tree decl, target_context;
2393 char save_static_chain_added;
2394 int i;
2395 gimple stmt = gsi_stmt (*gsi);
2397 switch (gimple_code (stmt))
2399 case GIMPLE_CALL:
2400 if (gimple_call_chain (stmt))
2401 break;
2402 decl = gimple_call_fndecl (stmt);
2403 if (!decl)
2404 break;
2405 target_context = decl_function_context (decl);
2406 if (target_context && DECL_STATIC_CHAIN (decl))
2408 gimple_call_set_chain (as_a <gcall *> (stmt),
2409 get_static_chain (info, target_context,
2410 &wi->gsi));
2411 info->static_chain_added |= (1 << (info->context != target_context));
2413 break;
2415 case GIMPLE_OMP_PARALLEL:
2416 case GIMPLE_OMP_TASK:
2417 save_static_chain_added = info->static_chain_added;
2418 info->static_chain_added = 0;
2419 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2420 for (i = 0; i < 2; i++)
2422 tree c, decl;
2423 if ((info->static_chain_added & (1 << i)) == 0)
2424 continue;
2425 decl = i ? get_chain_decl (info) : info->frame_decl;
2426 /* Don't add CHAIN.* or FRAME.* twice. */
2427 for (c = gimple_omp_taskreg_clauses (stmt);
2429 c = OMP_CLAUSE_CHAIN (c))
2430 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2431 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2432 && OMP_CLAUSE_DECL (c) == decl)
2433 break;
2434 if (c == NULL)
2436 c = build_omp_clause (gimple_location (stmt),
2437 i ? OMP_CLAUSE_FIRSTPRIVATE
2438 : OMP_CLAUSE_SHARED);
2439 OMP_CLAUSE_DECL (c) = decl;
2440 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2441 gimple_omp_taskreg_set_clauses (stmt, c);
2444 info->static_chain_added |= save_static_chain_added;
2445 break;
2447 case GIMPLE_OMP_TARGET:
2448 if (!is_gimple_omp_offloaded (stmt))
2450 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2451 break;
2453 save_static_chain_added = info->static_chain_added;
2454 info->static_chain_added = 0;
2455 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2456 for (i = 0; i < 2; i++)
2458 tree c, decl;
2459 if ((info->static_chain_added & (1 << i)) == 0)
2460 continue;
2461 decl = i ? get_chain_decl (info) : info->frame_decl;
2462 /* Don't add CHAIN.* or FRAME.* twice. */
2463 for (c = gimple_omp_target_clauses (stmt);
2465 c = OMP_CLAUSE_CHAIN (c))
2466 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2467 && OMP_CLAUSE_DECL (c) == decl)
2468 break;
2469 if (c == NULL)
2471 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2472 OMP_CLAUSE_DECL (c) = decl;
2473 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2474 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2475 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2476 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2480 info->static_chain_added |= save_static_chain_added;
2481 break;
2483 case GIMPLE_OMP_FOR:
2484 walk_body (convert_gimple_call, NULL, info,
2485 gimple_omp_for_pre_body_ptr (stmt));
2486 /* FALLTHRU */
2487 case GIMPLE_OMP_SECTIONS:
2488 case GIMPLE_OMP_SECTION:
2489 case GIMPLE_OMP_SINGLE:
2490 case GIMPLE_OMP_TEAMS:
2491 case GIMPLE_OMP_MASTER:
2492 case GIMPLE_OMP_TASKGROUP:
2493 case GIMPLE_OMP_ORDERED:
2494 case GIMPLE_OMP_CRITICAL:
2495 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2496 break;
2498 default:
2499 /* Keep looking for other operands. */
2500 *handled_ops_p = false;
2501 return NULL_TREE;
2504 *handled_ops_p = true;
2505 return NULL_TREE;
2508 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2509 call expressions. At the same time, determine if a nested function
2510 actually uses its static chain; if not, remember that. */
2512 static void
2513 convert_all_function_calls (struct nesting_info *root)
2515 unsigned int chain_count = 0, old_chain_count, iter_count;
2516 struct nesting_info *n;
2518 /* First, optimistically clear static_chain for all decls that haven't
2519 used the static chain already for variable access. But always create
2520 it if not optimizing. This makes it possible to reconstruct the static
2521 nesting tree at run time and thus to resolve up-level references from
2522 within the debugger. */
2523 FOR_EACH_NEST_INFO (n, root)
2525 tree decl = n->context;
2526 if (!optimize)
2528 if (n->inner)
2529 (void) get_frame_type (n);
2530 if (n->outer)
2531 (void) get_chain_decl (n);
2533 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2535 DECL_STATIC_CHAIN (decl) = 0;
2536 if (dump_file && (dump_flags & TDF_DETAILS))
2537 fprintf (dump_file, "Guessing no static-chain for %s\n",
2538 lang_hooks.decl_printable_name (decl, 2));
2540 else
2541 DECL_STATIC_CHAIN (decl) = 1;
2542 chain_count += DECL_STATIC_CHAIN (decl);
2545 /* Walk the functions and perform transformations. Note that these
2546 transformations can induce new uses of the static chain, which in turn
2547 require re-examining all users of the decl. */
2548 /* ??? It would make sense to try to use the call graph to speed this up,
2549 but the call graph hasn't really been built yet. Even if it did, we
2550 would still need to iterate in this loop since address-of references
2551 wouldn't show up in the callgraph anyway. */
2552 iter_count = 0;
2555 old_chain_count = chain_count;
2556 chain_count = 0;
2557 iter_count++;
2559 if (dump_file && (dump_flags & TDF_DETAILS))
2560 fputc ('\n', dump_file);
2562 FOR_EACH_NEST_INFO (n, root)
2564 tree decl = n->context;
2565 walk_function (convert_tramp_reference_stmt,
2566 convert_tramp_reference_op, n);
2567 walk_function (convert_gimple_call, NULL, n);
2568 chain_count += DECL_STATIC_CHAIN (decl);
2571 while (chain_count != old_chain_count);
2573 if (dump_file && (dump_flags & TDF_DETAILS))
2574 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2575 iter_count);
2578 struct nesting_copy_body_data
2580 copy_body_data cb;
2581 struct nesting_info *root;
2584 /* A helper subroutine for debug_var_chain type remapping. */
2586 static tree
2587 nesting_copy_decl (tree decl, copy_body_data *id)
2589 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2590 tree *slot = nid->root->var_map->get (decl);
2592 if (slot)
2593 return (tree) *slot;
2595 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2597 tree new_decl = copy_decl_no_change (decl, id);
2598 DECL_ORIGINAL_TYPE (new_decl)
2599 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2600 return new_decl;
2603 if (TREE_CODE (decl) == VAR_DECL
2604 || TREE_CODE (decl) == PARM_DECL
2605 || TREE_CODE (decl) == RESULT_DECL)
2606 return decl;
2608 return copy_decl_no_change (decl, id);
2611 /* A helper function for remap_vla_decls. See if *TP contains
2612 some remapped variables. */
2614 static tree
2615 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2617 struct nesting_info *root = (struct nesting_info *) data;
2618 tree t = *tp;
2620 if (DECL_P (t))
2622 *walk_subtrees = 0;
2623 tree *slot = root->var_map->get (t);
2625 if (slot)
2626 return *slot;
2628 return NULL;
2631 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2632 involved. */
2634 static void
2635 remap_vla_decls (tree block, struct nesting_info *root)
2637 tree var, subblock, val, type;
2638 struct nesting_copy_body_data id;
2640 for (subblock = BLOCK_SUBBLOCKS (block);
2641 subblock;
2642 subblock = BLOCK_CHAIN (subblock))
2643 remap_vla_decls (subblock, root);
2645 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2646 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2648 val = DECL_VALUE_EXPR (var);
2649 type = TREE_TYPE (var);
2651 if (!(TREE_CODE (val) == INDIRECT_REF
2652 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2653 && variably_modified_type_p (type, NULL)))
2654 continue;
2656 if (root->var_map->get (TREE_OPERAND (val, 0))
2657 || walk_tree (&type, contains_remapped_vars, root, NULL))
2658 break;
2661 if (var == NULL_TREE)
2662 return;
2664 memset (&id, 0, sizeof (id));
2665 id.cb.copy_decl = nesting_copy_decl;
2666 id.cb.decl_map = new hash_map<tree, tree>;
2667 id.root = root;
2669 for (; var; var = DECL_CHAIN (var))
2670 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2672 struct nesting_info *i;
2673 tree newt, context;
2675 val = DECL_VALUE_EXPR (var);
2676 type = TREE_TYPE (var);
2678 if (!(TREE_CODE (val) == INDIRECT_REF
2679 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2680 && variably_modified_type_p (type, NULL)))
2681 continue;
2683 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2684 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2685 continue;
2687 context = decl_function_context (var);
2688 for (i = root; i; i = i->outer)
2689 if (i->context == context)
2690 break;
2692 if (i == NULL)
2693 continue;
2695 /* Fully expand value expressions. This avoids having debug variables
2696 only referenced from them and that can be swept during GC. */
2697 if (slot)
2699 tree t = (tree) *slot;
2700 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2701 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2704 id.cb.src_fn = i->context;
2705 id.cb.dst_fn = i->context;
2706 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2708 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2709 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2711 newt = TREE_TYPE (newt);
2712 type = TREE_TYPE (type);
2714 if (TYPE_NAME (newt)
2715 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2716 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2717 && newt != type
2718 && TYPE_NAME (newt) == TYPE_NAME (type))
2719 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2721 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2722 if (val != DECL_VALUE_EXPR (var))
2723 SET_DECL_VALUE_EXPR (var, val);
2726 delete id.cb.decl_map;
2729 /* Fold the MEM_REF *E. */
2730 bool
2731 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2733 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2734 *ref_p = fold (*ref_p);
2735 return true;
2738 /* Do "everything else" to clean up or complete state collected by the
2739 various walking passes -- lay out the types and decls, generate code
2740 to initialize the frame decl, store critical expressions in the
2741 struct function for rtl to find. */
2743 static void
2744 finalize_nesting_tree_1 (struct nesting_info *root)
2746 gimple_seq stmt_list;
2747 gimple stmt;
2748 tree context = root->context;
2749 struct function *sf;
2751 stmt_list = NULL;
2753 /* If we created a non-local frame type or decl, we need to lay them
2754 out at this time. */
2755 if (root->frame_type)
2757 /* In some cases the frame type will trigger the -Wpadded warning.
2758 This is not helpful; suppress it. */
2759 int save_warn_padded = warn_padded;
2760 tree *adjust;
2762 warn_padded = 0;
2763 layout_type (root->frame_type);
2764 warn_padded = save_warn_padded;
2765 layout_decl (root->frame_decl, 0);
2767 /* Remove root->frame_decl from root->new_local_var_chain, so
2768 that we can declare it also in the lexical blocks, which
2769 helps ensure virtual regs that end up appearing in its RTL
2770 expression get substituted in instantiate_virtual_regs(). */
2771 for (adjust = &root->new_local_var_chain;
2772 *adjust != root->frame_decl;
2773 adjust = &DECL_CHAIN (*adjust))
2774 gcc_assert (DECL_CHAIN (*adjust));
2775 *adjust = DECL_CHAIN (*adjust);
2777 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2778 declare_vars (root->frame_decl,
2779 gimple_seq_first_stmt (gimple_body (context)), true);
2782 /* If any parameters were referenced non-locally, then we need to
2783 insert a copy. Likewise, if any variables were referenced by
2784 pointer, we need to initialize the address. */
2785 if (root->any_parm_remapped)
2787 tree p;
2788 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2790 tree field, x, y;
2792 field = lookup_field_for_decl (root, p, NO_INSERT);
2793 if (!field)
2794 continue;
2796 if (use_pointer_in_frame (p))
2797 x = build_addr (p, context);
2798 else
2799 x = p;
2801 /* If the assignment is from a non-register the stmt is
2802 not valid gimple. Make it so by using a temporary instead. */
2803 if (!is_gimple_reg (x)
2804 && is_gimple_reg_type (TREE_TYPE (x)))
2806 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2807 x = init_tmp_var (root, x, &gsi);
2810 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2811 root->frame_decl, field, NULL_TREE);
2812 stmt = gimple_build_assign (y, x);
2813 gimple_seq_add_stmt (&stmt_list, stmt);
2817 /* If a chain_field was created, then it needs to be initialized
2818 from chain_decl. */
2819 if (root->chain_field)
2821 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2822 root->frame_decl, root->chain_field, NULL_TREE);
2823 stmt = gimple_build_assign (x, get_chain_decl (root));
2824 gimple_seq_add_stmt (&stmt_list, stmt);
2827 /* If trampolines were created, then we need to initialize them. */
2828 if (root->any_tramp_created)
2830 struct nesting_info *i;
2831 for (i = root->inner; i ; i = i->next)
2833 tree arg1, arg2, arg3, x, field;
2835 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2836 if (!field)
2837 continue;
2839 gcc_assert (DECL_STATIC_CHAIN (i->context));
2840 arg3 = build_addr (root->frame_decl, context);
2842 arg2 = build_addr (i->context, context);
2844 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2845 root->frame_decl, field, NULL_TREE);
2846 arg1 = build_addr (x, context);
2848 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2849 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2850 gimple_seq_add_stmt (&stmt_list, stmt);
2854 /* If we created initialization statements, insert them. */
2855 if (stmt_list)
2857 gbind *bind;
2858 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2859 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
2860 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2861 gimple_bind_set_body (bind, stmt_list);
2864 /* If a chain_decl was created, then it needs to be registered with
2865 struct function so that it gets initialized from the static chain
2866 register at the beginning of the function. */
2867 sf = DECL_STRUCT_FUNCTION (root->context);
2868 sf->static_chain_decl = root->chain_decl;
2870 /* Similarly for the non-local goto save area. */
2871 if (root->nl_goto_field)
2873 sf->nonlocal_goto_save_area
2874 = get_frame_field (root, context, root->nl_goto_field, NULL);
2875 sf->has_nonlocal_label = 1;
2878 /* Make sure all new local variables get inserted into the
2879 proper BIND_EXPR. */
2880 if (root->new_local_var_chain)
2881 declare_vars (root->new_local_var_chain,
2882 gimple_seq_first_stmt (gimple_body (root->context)),
2883 false);
2885 if (root->debug_var_chain)
2887 tree debug_var;
2888 gbind *scope;
2890 remap_vla_decls (DECL_INITIAL (root->context), root);
2892 for (debug_var = root->debug_var_chain; debug_var;
2893 debug_var = DECL_CHAIN (debug_var))
2894 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2895 break;
2897 /* If there are any debug decls with variable length types,
2898 remap those types using other debug_var_chain variables. */
2899 if (debug_var)
2901 struct nesting_copy_body_data id;
2903 memset (&id, 0, sizeof (id));
2904 id.cb.copy_decl = nesting_copy_decl;
2905 id.cb.decl_map = new hash_map<tree, tree>;
2906 id.root = root;
2908 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2909 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2911 tree type = TREE_TYPE (debug_var);
2912 tree newt, t = type;
2913 struct nesting_info *i;
2915 for (i = root; i; i = i->outer)
2916 if (variably_modified_type_p (type, i->context))
2917 break;
2919 if (i == NULL)
2920 continue;
2922 id.cb.src_fn = i->context;
2923 id.cb.dst_fn = i->context;
2924 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2926 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2927 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2929 newt = TREE_TYPE (newt);
2930 t = TREE_TYPE (t);
2932 if (TYPE_NAME (newt)
2933 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2934 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2935 && newt != t
2936 && TYPE_NAME (newt) == TYPE_NAME (t))
2937 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2940 delete id.cb.decl_map;
2943 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
2944 if (gimple_bind_block (scope))
2945 declare_vars (root->debug_var_chain, scope, true);
2946 else
2947 BLOCK_VARS (DECL_INITIAL (root->context))
2948 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2949 root->debug_var_chain);
2952 /* Fold the rewritten MEM_REF trees. */
2953 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
2955 /* Dump the translated tree function. */
2956 if (dump_file)
2958 fputs ("\n\n", dump_file);
2959 dump_function_to_file (root->context, dump_file, dump_flags);
2963 static void
2964 finalize_nesting_tree (struct nesting_info *root)
2966 struct nesting_info *n;
2967 FOR_EACH_NEST_INFO (n, root)
2968 finalize_nesting_tree_1 (n);
2971 /* Unnest the nodes and pass them to cgraph. */
2973 static void
2974 unnest_nesting_tree_1 (struct nesting_info *root)
2976 struct cgraph_node *node = cgraph_node::get (root->context);
2978 /* For nested functions update the cgraph to reflect unnesting.
2979 We also delay finalizing of these functions up to this point. */
2980 if (node->origin)
2982 node->unnest ();
2983 cgraph_node::finalize_function (root->context, true);
2987 static void
2988 unnest_nesting_tree (struct nesting_info *root)
2990 struct nesting_info *n;
2991 FOR_EACH_NEST_INFO (n, root)
2992 unnest_nesting_tree_1 (n);
2995 /* Free the data structures allocated during this pass. */
2997 static void
2998 free_nesting_tree (struct nesting_info *root)
3000 struct nesting_info *node, *next;
3002 node = iter_nestinfo_start (root);
3005 next = iter_nestinfo_next (node);
3006 delete node->var_map;
3007 delete node->field_map;
3008 delete node->mem_refs;
3009 free (node);
3010 node = next;
3012 while (node);
3015 /* Gimplify a function and all its nested functions. */
3016 static void
3017 gimplify_all_functions (struct cgraph_node *root)
3019 struct cgraph_node *iter;
3020 if (!gimple_body (root->decl))
3021 gimplify_function_tree (root->decl);
3022 for (iter = root->nested; iter; iter = iter->next_nested)
3023 gimplify_all_functions (iter);
3026 /* Main entry point for this pass. Process FNDECL and all of its nested
3027 subroutines and turn them into something less tightly bound. */
3029 void
3030 lower_nested_functions (tree fndecl)
3032 struct cgraph_node *cgn;
3033 struct nesting_info *root;
3035 /* If there are no nested functions, there's nothing to do. */
3036 cgn = cgraph_node::get (fndecl);
3037 if (!cgn->nested)
3038 return;
3040 gimplify_all_functions (cgn);
3042 dump_file = dump_begin (TDI_nested, &dump_flags);
3043 if (dump_file)
3044 fprintf (dump_file, "\n;; Function %s\n\n",
3045 lang_hooks.decl_printable_name (fndecl, 2));
3047 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3048 root = create_nesting_tree (cgn);
3050 walk_all_functions (convert_nonlocal_reference_stmt,
3051 convert_nonlocal_reference_op,
3052 root);
3053 walk_all_functions (convert_local_reference_stmt,
3054 convert_local_reference_op,
3055 root);
3056 walk_all_functions (convert_nl_goto_reference, NULL, root);
3057 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3059 convert_all_function_calls (root);
3060 finalize_nesting_tree (root);
3061 unnest_nesting_tree (root);
3063 free_nesting_tree (root);
3064 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3066 if (dump_file)
3068 dump_end (TDI_nested, dump_file);
3069 dump_file = NULL;
3073 #include "gt-tree-nested.h"