Move array-type va_list handling to build_va_arg
[official-gcc.git] / gcc / tree-nested.c
blobf2e6d3acfd72239b4472c05a528fee21123e1412
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "hash-set.h"
25 #include "machmode.h"
26 #include "vec.h"
27 #include "double-int.h"
28 #include "input.h"
29 #include "alias.h"
30 #include "symtab.h"
31 #include "wide-int.h"
32 #include "inchash.h"
33 #include "tree.h"
34 #include "fold-const.h"
35 #include "stringpool.h"
36 #include "stor-layout.h"
37 #include "tm_p.h"
38 #include "hard-reg-set.h"
39 #include "function.h"
40 #include "tree-dump.h"
41 #include "tree-inline.h"
42 #include "predict.h"
43 #include "basic-block.h"
44 #include "tree-ssa-alias.h"
45 #include "internal-fn.h"
46 #include "gimple-expr.h"
47 #include "is-a.h"
48 #include "gimple.h"
49 #include "gimplify.h"
50 #include "gimple-iterator.h"
51 #include "gimple-walk.h"
52 #include "tree-iterator.h"
53 #include "bitmap.h"
54 #include "hash-map.h"
55 #include "plugin-api.h"
56 #include "ipa-ref.h"
57 #include "cgraph.h"
58 #include "tree-cfg.h"
59 #include "hashtab.h"
60 #include "rtl.h"
61 #include "flags.h"
62 #include "statistics.h"
63 #include "real.h"
64 #include "fixed-value.h"
65 #include "insn-config.h"
66 #include "expmed.h"
67 #include "dojump.h"
68 #include "explow.h"
69 #include "calls.h"
70 #include "emit-rtl.h"
71 #include "varasm.h"
72 #include "stmt.h"
73 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
74 #include "langhooks.h"
75 #include "gimple-low.h"
76 #include "gomp-constants.h"
79 /* The object of this pass is to lower the representation of a set of nested
80 functions in order to expose all of the gory details of the various
81 nonlocal references. We want to do this sooner rather than later, in
82 order to give us more freedom in emitting all of the functions in question.
84 Back in olden times, when gcc was young, we developed an insanely
85 complicated scheme whereby variables which were referenced nonlocally
86 were forced to live in the stack of the declaring function, and then
87 the nested functions magically discovered where these variables were
88 placed. In order for this scheme to function properly, it required
89 that the outer function be partially expanded, then we switch to
90 compiling the inner function, and once done with those we switch back
91 to compiling the outer function. Such delicate ordering requirements
92 makes it difficult to do whole translation unit optimizations
93 involving such functions.
95 The implementation here is much more direct. Everything that can be
96 referenced by an inner function is a member of an explicitly created
97 structure herein called the "nonlocal frame struct". The incoming
98 static chain for a nested function is a pointer to this struct in
99 the parent. In this way, we settle on known offsets from a known
100 base, and so are decoupled from the logic that places objects in the
101 function's stack frame. More importantly, we don't have to wait for
102 that to happen -- since the compilation of the inner function is no
103 longer tied to a real stack frame, the nonlocal frame struct can be
104 allocated anywhere. Which means that the outer function is now
105 inlinable.
107 Theory of operation here is very simple. Iterate over all the
108 statements in all the functions (depth first) several times,
109 allocating structures and fields on demand. In general we want to
110 examine inner functions first, so that we can avoid making changes
111 to outer functions which are unnecessary.
113 The order of the passes matters a bit, in that later passes will be
114 skipped if it is discovered that the functions don't actually interact
115 at all. That is, they're nested in the lexical sense but could have
116 been written as independent functions without change. */
119 struct nesting_info
121 struct nesting_info *outer;
122 struct nesting_info *inner;
123 struct nesting_info *next;
125 hash_map<tree, tree> *field_map;
126 hash_map<tree, tree> *var_map;
127 hash_set<tree *> *mem_refs;
128 bitmap suppress_expansion;
130 tree context;
131 tree new_local_var_chain;
132 tree debug_var_chain;
133 tree frame_type;
134 tree frame_decl;
135 tree chain_field;
136 tree chain_decl;
137 tree nl_goto_field;
139 bool any_parm_remapped;
140 bool any_tramp_created;
141 char static_chain_added;
145 /* Iterate over the nesting tree, starting with ROOT, depth first. */
147 static inline struct nesting_info *
148 iter_nestinfo_start (struct nesting_info *root)
150 while (root->inner)
151 root = root->inner;
152 return root;
155 static inline struct nesting_info *
156 iter_nestinfo_next (struct nesting_info *node)
158 if (node->next)
159 return iter_nestinfo_start (node->next);
160 return node->outer;
163 #define FOR_EACH_NEST_INFO(I, ROOT) \
164 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
166 /* Obstack used for the bitmaps in the struct above. */
167 static struct bitmap_obstack nesting_info_bitmap_obstack;
170 /* We're working in so many different function contexts simultaneously,
171 that create_tmp_var is dangerous. Prevent mishap. */
172 #define create_tmp_var cant_use_create_tmp_var_here_dummy
174 /* Like create_tmp_var, except record the variable for registration at
175 the given nesting level. */
177 static tree
178 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
180 tree tmp_var;
182 /* If the type is of variable size or a type which must be created by the
183 frontend, something is wrong. Note that we explicitly allow
184 incomplete types here, since we create them ourselves here. */
185 gcc_assert (!TREE_ADDRESSABLE (type));
186 gcc_assert (!TYPE_SIZE_UNIT (type)
187 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
189 tmp_var = create_tmp_var_raw (type, prefix);
190 DECL_CONTEXT (tmp_var) = info->context;
191 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
192 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
193 if (TREE_CODE (type) == COMPLEX_TYPE
194 || TREE_CODE (type) == VECTOR_TYPE)
195 DECL_GIMPLE_REG_P (tmp_var) = 1;
197 info->new_local_var_chain = tmp_var;
199 return tmp_var;
202 /* Take the address of EXP to be used within function CONTEXT.
203 Mark it for addressability as necessary. */
205 tree
206 build_addr (tree exp, tree context)
208 tree base = exp;
209 tree save_context;
210 tree retval;
212 while (handled_component_p (base))
213 base = TREE_OPERAND (base, 0);
215 if (DECL_P (base))
216 TREE_ADDRESSABLE (base) = 1;
218 /* Building the ADDR_EXPR will compute a set of properties for
219 that ADDR_EXPR. Those properties are unfortunately context
220 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
222 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
223 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
224 way the properties are for the ADDR_EXPR are computed properly. */
225 save_context = current_function_decl;
226 current_function_decl = context;
227 retval = build_fold_addr_expr (exp);
228 current_function_decl = save_context;
229 return retval;
232 /* Insert FIELD into TYPE, sorted by alignment requirements. */
234 void
235 insert_field_into_struct (tree type, tree field)
237 tree *p;
239 DECL_CONTEXT (field) = type;
241 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
242 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
243 break;
245 DECL_CHAIN (field) = *p;
246 *p = field;
248 /* Set correct alignment for frame struct type. */
249 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
250 TYPE_ALIGN (type) = DECL_ALIGN (field);
253 /* Build or return the RECORD_TYPE that describes the frame state that is
254 shared between INFO->CONTEXT and its nested functions. This record will
255 not be complete until finalize_nesting_tree; up until that point we'll
256 be adding fields as necessary.
258 We also build the DECL that represents this frame in the function. */
260 static tree
261 get_frame_type (struct nesting_info *info)
263 tree type = info->frame_type;
264 if (!type)
266 char *name;
268 type = make_node (RECORD_TYPE);
270 name = concat ("FRAME.",
271 IDENTIFIER_POINTER (DECL_NAME (info->context)),
272 NULL);
273 TYPE_NAME (type) = get_identifier (name);
274 free (name);
276 info->frame_type = type;
277 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
278 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
280 /* ??? Always make it addressable for now, since it is meant to
281 be pointed to by the static chain pointer. This pessimizes
282 when it turns out that no static chains are needed because
283 the nested functions referencing non-local variables are not
284 reachable, but the true pessimization is to create the non-
285 local frame structure in the first place. */
286 TREE_ADDRESSABLE (info->frame_decl) = 1;
288 return type;
291 /* Return true if DECL should be referenced by pointer in the non-local
292 frame structure. */
294 static bool
295 use_pointer_in_frame (tree decl)
297 if (TREE_CODE (decl) == PARM_DECL)
299 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
300 sized decls, and inefficient to copy large aggregates. Don't bother
301 moving anything but scalar variables. */
302 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
304 else
306 /* Variable sized types make things "interesting" in the frame. */
307 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
311 /* Given DECL, a non-locally accessed variable, find or create a field
312 in the non-local frame structure for the given nesting context. */
314 static tree
315 lookup_field_for_decl (struct nesting_info *info, tree decl,
316 enum insert_option insert)
318 if (insert == NO_INSERT)
320 tree *slot = info->field_map->get (decl);
321 return slot ? *slot : NULL_TREE;
324 tree *slot = &info->field_map->get_or_insert (decl);
325 if (!*slot)
327 tree field = make_node (FIELD_DECL);
328 DECL_NAME (field) = DECL_NAME (decl);
330 if (use_pointer_in_frame (decl))
332 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
333 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
334 DECL_NONADDRESSABLE_P (field) = 1;
336 else
338 TREE_TYPE (field) = TREE_TYPE (decl);
339 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
340 DECL_ALIGN (field) = DECL_ALIGN (decl);
341 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
342 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
343 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
344 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
347 insert_field_into_struct (get_frame_type (info), field);
348 *slot = field;
350 if (TREE_CODE (decl) == PARM_DECL)
351 info->any_parm_remapped = true;
354 return *slot;
357 /* Build or return the variable that holds the static chain within
358 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
360 static tree
361 get_chain_decl (struct nesting_info *info)
363 tree decl = info->chain_decl;
365 if (!decl)
367 tree type;
369 type = get_frame_type (info->outer);
370 type = build_pointer_type (type);
372 /* Note that this variable is *not* entered into any BIND_EXPR;
373 the construction of this variable is handled specially in
374 expand_function_start and initialize_inlined_parameters.
375 Note also that it's represented as a parameter. This is more
376 close to the truth, since the initial value does come from
377 the caller. */
378 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
379 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
380 DECL_ARTIFICIAL (decl) = 1;
381 DECL_IGNORED_P (decl) = 1;
382 TREE_USED (decl) = 1;
383 DECL_CONTEXT (decl) = info->context;
384 DECL_ARG_TYPE (decl) = type;
386 /* Tell tree-inline.c that we never write to this variable, so
387 it can copy-prop the replacement value immediately. */
388 TREE_READONLY (decl) = 1;
390 info->chain_decl = decl;
392 if (dump_file
393 && (dump_flags & TDF_DETAILS)
394 && !DECL_STATIC_CHAIN (info->context))
395 fprintf (dump_file, "Setting static-chain for %s\n",
396 lang_hooks.decl_printable_name (info->context, 2));
398 DECL_STATIC_CHAIN (info->context) = 1;
400 return decl;
403 /* Build or return the field within the non-local frame state that holds
404 the static chain for INFO->CONTEXT. This is the way to walk back up
405 multiple nesting levels. */
407 static tree
408 get_chain_field (struct nesting_info *info)
410 tree field = info->chain_field;
412 if (!field)
414 tree type = build_pointer_type (get_frame_type (info->outer));
416 field = make_node (FIELD_DECL);
417 DECL_NAME (field) = get_identifier ("__chain");
418 TREE_TYPE (field) = type;
419 DECL_ALIGN (field) = TYPE_ALIGN (type);
420 DECL_NONADDRESSABLE_P (field) = 1;
422 insert_field_into_struct (get_frame_type (info), field);
424 info->chain_field = field;
426 if (dump_file
427 && (dump_flags & TDF_DETAILS)
428 && !DECL_STATIC_CHAIN (info->context))
429 fprintf (dump_file, "Setting static-chain for %s\n",
430 lang_hooks.decl_printable_name (info->context, 2));
432 DECL_STATIC_CHAIN (info->context) = 1;
434 return field;
437 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
439 static tree
440 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
441 gcall *call)
443 tree t;
445 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
446 gimple_call_set_lhs (call, t);
447 if (! gsi_end_p (*gsi))
448 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
449 gsi_insert_before (gsi, call, GSI_SAME_STMT);
451 return t;
455 /* Copy EXP into a temporary. Allocate the temporary in the context of
456 INFO and insert the initialization statement before GSI. */
458 static tree
459 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
461 tree t;
462 gimple stmt;
464 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
465 stmt = gimple_build_assign (t, exp);
466 if (! gsi_end_p (*gsi))
467 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
468 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
470 return t;
474 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
476 static tree
477 gsi_gimplify_val (struct nesting_info *info, tree exp,
478 gimple_stmt_iterator *gsi)
480 if (is_gimple_val (exp))
481 return exp;
482 else
483 return init_tmp_var (info, exp, gsi);
486 /* Similarly, but copy from the temporary and insert the statement
487 after the iterator. */
489 static tree
490 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
492 tree t;
493 gimple stmt;
495 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
496 stmt = gimple_build_assign (exp, t);
497 if (! gsi_end_p (*gsi))
498 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
499 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
501 return t;
504 /* Build or return the type used to represent a nested function trampoline. */
506 static GTY(()) tree trampoline_type;
508 static tree
509 get_trampoline_type (struct nesting_info *info)
511 unsigned align, size;
512 tree t;
514 if (trampoline_type)
515 return trampoline_type;
517 align = TRAMPOLINE_ALIGNMENT;
518 size = TRAMPOLINE_SIZE;
520 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
521 then allocate extra space so that we can do dynamic alignment. */
522 if (align > STACK_BOUNDARY)
524 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
525 align = STACK_BOUNDARY;
528 t = build_index_type (size_int (size - 1));
529 t = build_array_type (char_type_node, t);
530 t = build_decl (DECL_SOURCE_LOCATION (info->context),
531 FIELD_DECL, get_identifier ("__data"), t);
532 DECL_ALIGN (t) = align;
533 DECL_USER_ALIGN (t) = 1;
535 trampoline_type = make_node (RECORD_TYPE);
536 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
537 TYPE_FIELDS (trampoline_type) = t;
538 layout_type (trampoline_type);
539 DECL_CONTEXT (t) = trampoline_type;
541 return trampoline_type;
544 /* Given DECL, a nested function, find or create a field in the non-local
545 frame structure for a trampoline for this function. */
547 static tree
548 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
549 enum insert_option insert)
551 if (insert == NO_INSERT)
553 tree *slot = info->var_map->get (decl);
554 return slot ? *slot : NULL_TREE;
557 tree *slot = &info->var_map->get_or_insert (decl);
558 if (!*slot)
560 tree field = make_node (FIELD_DECL);
561 DECL_NAME (field) = DECL_NAME (decl);
562 TREE_TYPE (field) = get_trampoline_type (info);
563 TREE_ADDRESSABLE (field) = 1;
565 insert_field_into_struct (get_frame_type (info), field);
566 *slot = field;
568 info->any_tramp_created = true;
571 return *slot;
574 /* Build or return the field within the non-local frame state that holds
575 the non-local goto "jmp_buf". The buffer itself is maintained by the
576 rtl middle-end as dynamic stack space is allocated. */
578 static tree
579 get_nl_goto_field (struct nesting_info *info)
581 tree field = info->nl_goto_field;
582 if (!field)
584 unsigned size;
585 tree type;
587 /* For __builtin_nonlocal_goto, we need N words. The first is the
588 frame pointer, the rest is for the target's stack pointer save
589 area. The number of words is controlled by STACK_SAVEAREA_MODE;
590 not the best interface, but it'll do for now. */
591 if (Pmode == ptr_mode)
592 type = ptr_type_node;
593 else
594 type = lang_hooks.types.type_for_mode (Pmode, 1);
596 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
597 size = size / GET_MODE_SIZE (Pmode);
598 size = size + 1;
600 type = build_array_type
601 (type, build_index_type (size_int (size)));
603 field = make_node (FIELD_DECL);
604 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
605 TREE_TYPE (field) = type;
606 DECL_ALIGN (field) = TYPE_ALIGN (type);
607 TREE_ADDRESSABLE (field) = 1;
609 insert_field_into_struct (get_frame_type (info), field);
611 info->nl_goto_field = field;
614 return field;
617 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
619 static void
620 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
621 struct nesting_info *info, gimple_seq *pseq)
623 struct walk_stmt_info wi;
625 memset (&wi, 0, sizeof (wi));
626 wi.info = info;
627 wi.val_only = true;
628 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
632 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
634 static inline void
635 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
636 struct nesting_info *info)
638 gimple_seq body = gimple_body (info->context);
639 walk_body (callback_stmt, callback_op, info, &body);
640 gimple_set_body (info->context, body);
643 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
645 static void
646 walk_gimple_omp_for (gomp_for *for_stmt,
647 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
648 struct nesting_info *info)
650 struct walk_stmt_info wi;
651 gimple_seq seq;
652 tree t;
653 size_t i;
655 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
657 seq = NULL;
658 memset (&wi, 0, sizeof (wi));
659 wi.info = info;
660 wi.gsi = gsi_last (seq);
662 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
664 wi.val_only = false;
665 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
666 &wi, NULL);
667 wi.val_only = true;
668 wi.is_lhs = false;
669 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
670 &wi, NULL);
672 wi.val_only = true;
673 wi.is_lhs = false;
674 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
675 &wi, NULL);
677 t = gimple_omp_for_incr (for_stmt, i);
678 gcc_assert (BINARY_CLASS_P (t));
679 wi.val_only = false;
680 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
681 wi.val_only = true;
682 wi.is_lhs = false;
683 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
686 seq = gsi_seq (wi.gsi);
687 if (!gimple_seq_empty_p (seq))
689 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
690 annotate_all_with_location (seq, gimple_location (for_stmt));
691 gimple_seq_add_seq (&pre_body, seq);
692 gimple_omp_for_set_pre_body (for_stmt, pre_body);
696 /* Similarly for ROOT and all functions nested underneath, depth first. */
698 static void
699 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
700 struct nesting_info *root)
702 struct nesting_info *n;
703 FOR_EACH_NEST_INFO (n, root)
704 walk_function (callback_stmt, callback_op, n);
708 /* We have to check for a fairly pathological case. The operands of function
709 nested function are to be interpreted in the context of the enclosing
710 function. So if any are variably-sized, they will get remapped when the
711 enclosing function is inlined. But that remapping would also have to be
712 done in the types of the PARM_DECLs of the nested function, meaning the
713 argument types of that function will disagree with the arguments in the
714 calls to that function. So we'd either have to make a copy of the nested
715 function corresponding to each time the enclosing function was inlined or
716 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
717 function. The former is not practical. The latter would still require
718 detecting this case to know when to add the conversions. So, for now at
719 least, we don't inline such an enclosing function.
721 We have to do that check recursively, so here return indicating whether
722 FNDECL has such a nested function. ORIG_FN is the function we were
723 trying to inline to use for checking whether any argument is variably
724 modified by anything in it.
726 It would be better to do this in tree-inline.c so that we could give
727 the appropriate warning for why a function can't be inlined, but that's
728 too late since the nesting structure has already been flattened and
729 adding a flag just to record this fact seems a waste of a flag. */
731 static bool
732 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
734 struct cgraph_node *cgn = cgraph_node::get (fndecl);
735 tree arg;
737 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
739 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
740 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
741 return true;
743 if (check_for_nested_with_variably_modified (cgn->decl,
744 orig_fndecl))
745 return true;
748 return false;
751 /* Construct our local datastructure describing the function nesting
752 tree rooted by CGN. */
754 static struct nesting_info *
755 create_nesting_tree (struct cgraph_node *cgn)
757 struct nesting_info *info = XCNEW (struct nesting_info);
758 info->field_map = new hash_map<tree, tree>;
759 info->var_map = new hash_map<tree, tree>;
760 info->mem_refs = new hash_set<tree *>;
761 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
762 info->context = cgn->decl;
764 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
766 struct nesting_info *sub = create_nesting_tree (cgn);
767 sub->outer = info;
768 sub->next = info->inner;
769 info->inner = sub;
772 /* See discussion at check_for_nested_with_variably_modified for a
773 discussion of why this has to be here. */
774 if (check_for_nested_with_variably_modified (info->context, info->context))
775 DECL_UNINLINABLE (info->context) = true;
777 return info;
780 /* Return an expression computing the static chain for TARGET_CONTEXT
781 from INFO->CONTEXT. Insert any necessary computations before TSI. */
783 static tree
784 get_static_chain (struct nesting_info *info, tree target_context,
785 gimple_stmt_iterator *gsi)
787 struct nesting_info *i;
788 tree x;
790 if (info->context == target_context)
792 x = build_addr (info->frame_decl, target_context);
794 else
796 x = get_chain_decl (info);
798 for (i = info->outer; i->context != target_context; i = i->outer)
800 tree field = get_chain_field (i);
802 x = build_simple_mem_ref (x);
803 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
804 x = init_tmp_var (info, x, gsi);
808 return x;
812 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
813 frame as seen from INFO->CONTEXT. Insert any necessary computations
814 before GSI. */
816 static tree
817 get_frame_field (struct nesting_info *info, tree target_context,
818 tree field, gimple_stmt_iterator *gsi)
820 struct nesting_info *i;
821 tree x;
823 if (info->context == target_context)
825 /* Make sure frame_decl gets created. */
826 (void) get_frame_type (info);
827 x = info->frame_decl;
829 else
831 x = get_chain_decl (info);
833 for (i = info->outer; i->context != target_context; i = i->outer)
835 tree field = get_chain_field (i);
837 x = build_simple_mem_ref (x);
838 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
839 x = init_tmp_var (info, x, gsi);
842 x = build_simple_mem_ref (x);
845 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
846 return x;
849 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
851 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
852 in the nested function with DECL_VALUE_EXPR set to reference the true
853 variable in the parent function. This is used both for debug info
854 and in OMP lowering. */
856 static tree
857 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
859 tree target_context;
860 struct nesting_info *i;
861 tree x, field, new_decl;
863 tree *slot = &info->var_map->get_or_insert (decl);
865 if (*slot)
866 return *slot;
868 target_context = decl_function_context (decl);
870 /* A copy of the code in get_frame_field, but without the temporaries. */
871 if (info->context == target_context)
873 /* Make sure frame_decl gets created. */
874 (void) get_frame_type (info);
875 x = info->frame_decl;
876 i = info;
878 else
880 x = get_chain_decl (info);
881 for (i = info->outer; i->context != target_context; i = i->outer)
883 field = get_chain_field (i);
884 x = build_simple_mem_ref (x);
885 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
887 x = build_simple_mem_ref (x);
890 field = lookup_field_for_decl (i, decl, INSERT);
891 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
892 if (use_pointer_in_frame (decl))
893 x = build_simple_mem_ref (x);
895 /* ??? We should be remapping types as well, surely. */
896 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
897 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
898 DECL_CONTEXT (new_decl) = info->context;
899 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
900 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
901 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
902 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
903 TREE_READONLY (new_decl) = TREE_READONLY (decl);
904 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
905 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
906 if ((TREE_CODE (decl) == PARM_DECL
907 || TREE_CODE (decl) == RESULT_DECL
908 || TREE_CODE (decl) == VAR_DECL)
909 && DECL_BY_REFERENCE (decl))
910 DECL_BY_REFERENCE (new_decl) = 1;
912 SET_DECL_VALUE_EXPR (new_decl, x);
913 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
915 *slot = new_decl;
916 DECL_CHAIN (new_decl) = info->debug_var_chain;
917 info->debug_var_chain = new_decl;
919 if (!optimize
920 && info->context != target_context
921 && variably_modified_type_p (TREE_TYPE (decl), NULL))
922 note_nonlocal_vla_type (info, TREE_TYPE (decl));
924 return new_decl;
928 /* Callback for walk_gimple_stmt, rewrite all references to VAR
929 and PARM_DECLs that belong to outer functions.
931 The rewrite will involve some number of structure accesses back up
932 the static chain. E.g. for a variable FOO up one nesting level it'll
933 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
934 indirections apply to decls for which use_pointer_in_frame is true. */
936 static tree
937 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
939 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
940 struct nesting_info *const info = (struct nesting_info *) wi->info;
941 tree t = *tp;
943 *walk_subtrees = 0;
944 switch (TREE_CODE (t))
946 case VAR_DECL:
947 /* Non-automatic variables are never processed. */
948 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
949 break;
950 /* FALLTHRU */
952 case PARM_DECL:
953 if (decl_function_context (t) != info->context)
955 tree x;
956 wi->changed = true;
958 x = get_nonlocal_debug_decl (info, t);
959 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
961 tree target_context = decl_function_context (t);
962 struct nesting_info *i;
963 for (i = info->outer; i->context != target_context; i = i->outer)
964 continue;
965 x = lookup_field_for_decl (i, t, INSERT);
966 x = get_frame_field (info, target_context, x, &wi->gsi);
967 if (use_pointer_in_frame (t))
969 x = init_tmp_var (info, x, &wi->gsi);
970 x = build_simple_mem_ref (x);
974 if (wi->val_only)
976 if (wi->is_lhs)
977 x = save_tmp_var (info, x, &wi->gsi);
978 else
979 x = init_tmp_var (info, x, &wi->gsi);
982 *tp = x;
984 break;
986 case LABEL_DECL:
987 /* We're taking the address of a label from a parent function, but
988 this is not itself a non-local goto. Mark the label such that it
989 will not be deleted, much as we would with a label address in
990 static storage. */
991 if (decl_function_context (t) != info->context)
992 FORCED_LABEL (t) = 1;
993 break;
995 case ADDR_EXPR:
997 bool save_val_only = wi->val_only;
999 wi->val_only = false;
1000 wi->is_lhs = false;
1001 wi->changed = false;
1002 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1003 wi->val_only = true;
1005 if (wi->changed)
1007 tree save_context;
1009 /* If we changed anything, we might no longer be directly
1010 referencing a decl. */
1011 save_context = current_function_decl;
1012 current_function_decl = info->context;
1013 recompute_tree_invariant_for_addr_expr (t);
1014 current_function_decl = save_context;
1016 /* If the callback converted the address argument in a context
1017 where we only accept variables (and min_invariant, presumably),
1018 then compute the address into a temporary. */
1019 if (save_val_only)
1020 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1021 t, &wi->gsi);
1024 break;
1026 case REALPART_EXPR:
1027 case IMAGPART_EXPR:
1028 case COMPONENT_REF:
1029 case ARRAY_REF:
1030 case ARRAY_RANGE_REF:
1031 case BIT_FIELD_REF:
1032 /* Go down this entire nest and just look at the final prefix and
1033 anything that describes the references. Otherwise, we lose track
1034 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1035 wi->val_only = true;
1036 wi->is_lhs = false;
1037 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1039 if (TREE_CODE (t) == COMPONENT_REF)
1040 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1041 NULL);
1042 else if (TREE_CODE (t) == ARRAY_REF
1043 || TREE_CODE (t) == ARRAY_RANGE_REF)
1045 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1046 wi, NULL);
1047 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1048 wi, NULL);
1049 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1050 wi, NULL);
1053 wi->val_only = false;
1054 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1055 break;
1057 case VIEW_CONVERT_EXPR:
1058 /* Just request to look at the subtrees, leaving val_only and lhs
1059 untouched. This might actually be for !val_only + lhs, in which
1060 case we don't want to force a replacement by a temporary. */
1061 *walk_subtrees = 1;
1062 break;
1064 default:
1065 if (!IS_TYPE_OR_DECL_P (t))
1067 *walk_subtrees = 1;
1068 wi->val_only = true;
1069 wi->is_lhs = false;
1071 break;
1074 return NULL_TREE;
1077 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1078 struct walk_stmt_info *);
1080 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1081 and PARM_DECLs that belong to outer functions. */
1083 static bool
1084 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1086 struct nesting_info *const info = (struct nesting_info *) wi->info;
1087 bool need_chain = false, need_stmts = false;
1088 tree clause, decl;
1089 int dummy;
1090 bitmap new_suppress;
1092 new_suppress = BITMAP_GGC_ALLOC ();
1093 bitmap_copy (new_suppress, info->suppress_expansion);
1095 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1097 switch (OMP_CLAUSE_CODE (clause))
1099 case OMP_CLAUSE_REDUCTION:
1100 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1101 need_stmts = true;
1102 goto do_decl_clause;
1104 case OMP_CLAUSE_LASTPRIVATE:
1105 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1106 need_stmts = true;
1107 goto do_decl_clause;
1109 case OMP_CLAUSE_LINEAR:
1110 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1111 need_stmts = true;
1112 wi->val_only = true;
1113 wi->is_lhs = false;
1114 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1115 &dummy, wi);
1116 goto do_decl_clause;
1118 case OMP_CLAUSE_PRIVATE:
1119 case OMP_CLAUSE_FIRSTPRIVATE:
1120 case OMP_CLAUSE_COPYPRIVATE:
1121 case OMP_CLAUSE_SHARED:
1122 do_decl_clause:
1123 decl = OMP_CLAUSE_DECL (clause);
1124 if (TREE_CODE (decl) == VAR_DECL
1125 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1126 break;
1127 if (decl_function_context (decl) != info->context)
1129 bitmap_set_bit (new_suppress, DECL_UID (decl));
1130 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1131 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1132 need_chain = true;
1134 break;
1136 case OMP_CLAUSE_SCHEDULE:
1137 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1138 break;
1139 /* FALLTHRU */
1140 case OMP_CLAUSE_FINAL:
1141 case OMP_CLAUSE_IF:
1142 case OMP_CLAUSE_NUM_THREADS:
1143 case OMP_CLAUSE_DEPEND:
1144 case OMP_CLAUSE_DEVICE:
1145 case OMP_CLAUSE_NUM_TEAMS:
1146 case OMP_CLAUSE_THREAD_LIMIT:
1147 case OMP_CLAUSE_SAFELEN:
1148 case OMP_CLAUSE__CILK_FOR_COUNT_:
1149 wi->val_only = true;
1150 wi->is_lhs = false;
1151 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1152 &dummy, wi);
1153 break;
1155 case OMP_CLAUSE_DIST_SCHEDULE:
1156 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1158 wi->val_only = true;
1159 wi->is_lhs = false;
1160 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1161 &dummy, wi);
1163 break;
1165 case OMP_CLAUSE_MAP:
1166 case OMP_CLAUSE_TO:
1167 case OMP_CLAUSE_FROM:
1168 if (OMP_CLAUSE_SIZE (clause))
1170 wi->val_only = true;
1171 wi->is_lhs = false;
1172 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1173 &dummy, wi);
1175 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1176 goto do_decl_clause;
1177 wi->val_only = true;
1178 wi->is_lhs = false;
1179 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1180 wi, NULL);
1181 break;
1183 case OMP_CLAUSE_ALIGNED:
1184 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1186 wi->val_only = true;
1187 wi->is_lhs = false;
1188 convert_nonlocal_reference_op
1189 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1191 /* Like do_decl_clause, but don't add any suppression. */
1192 decl = OMP_CLAUSE_DECL (clause);
1193 if (TREE_CODE (decl) == VAR_DECL
1194 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1195 break;
1196 if (decl_function_context (decl) != info->context)
1198 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1199 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1200 need_chain = true;
1202 break;
1204 case OMP_CLAUSE_NOWAIT:
1205 case OMP_CLAUSE_ORDERED:
1206 case OMP_CLAUSE_DEFAULT:
1207 case OMP_CLAUSE_COPYIN:
1208 case OMP_CLAUSE_COLLAPSE:
1209 case OMP_CLAUSE_UNTIED:
1210 case OMP_CLAUSE_MERGEABLE:
1211 case OMP_CLAUSE_PROC_BIND:
1212 break;
1214 default:
1215 gcc_unreachable ();
1219 info->suppress_expansion = new_suppress;
1221 if (need_stmts)
1222 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1223 switch (OMP_CLAUSE_CODE (clause))
1225 case OMP_CLAUSE_REDUCTION:
1226 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1228 tree old_context
1229 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1230 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1231 = info->context;
1232 walk_body (convert_nonlocal_reference_stmt,
1233 convert_nonlocal_reference_op, info,
1234 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1235 walk_body (convert_nonlocal_reference_stmt,
1236 convert_nonlocal_reference_op, info,
1237 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1238 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1239 = old_context;
1241 break;
1243 case OMP_CLAUSE_LASTPRIVATE:
1244 walk_body (convert_nonlocal_reference_stmt,
1245 convert_nonlocal_reference_op, info,
1246 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1247 break;
1249 case OMP_CLAUSE_LINEAR:
1250 walk_body (convert_nonlocal_reference_stmt,
1251 convert_nonlocal_reference_op, info,
1252 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1253 break;
1255 default:
1256 break;
1259 return need_chain;
1262 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1264 static void
1265 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1267 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1268 type = TREE_TYPE (type);
1270 if (TYPE_NAME (type)
1271 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1272 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1273 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1275 while (POINTER_TYPE_P (type)
1276 || TREE_CODE (type) == VECTOR_TYPE
1277 || TREE_CODE (type) == FUNCTION_TYPE
1278 || TREE_CODE (type) == METHOD_TYPE)
1279 type = TREE_TYPE (type);
1281 if (TREE_CODE (type) == ARRAY_TYPE)
1283 tree domain, t;
1285 note_nonlocal_vla_type (info, TREE_TYPE (type));
1286 domain = TYPE_DOMAIN (type);
1287 if (domain)
1289 t = TYPE_MIN_VALUE (domain);
1290 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1291 && decl_function_context (t) != info->context)
1292 get_nonlocal_debug_decl (info, t);
1293 t = TYPE_MAX_VALUE (domain);
1294 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1295 && decl_function_context (t) != info->context)
1296 get_nonlocal_debug_decl (info, t);
1301 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1302 in BLOCK. */
1304 static void
1305 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1307 tree var;
1309 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1310 if (TREE_CODE (var) == VAR_DECL
1311 && variably_modified_type_p (TREE_TYPE (var), NULL)
1312 && DECL_HAS_VALUE_EXPR_P (var)
1313 && decl_function_context (var) != info->context)
1314 note_nonlocal_vla_type (info, TREE_TYPE (var));
1317 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1318 PARM_DECLs that belong to outer functions. This handles statements
1319 that are not handled via the standard recursion done in
1320 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1321 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1322 operands of STMT have been handled by this function. */
1324 static tree
1325 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1326 struct walk_stmt_info *wi)
1328 struct nesting_info *info = (struct nesting_info *) wi->info;
1329 tree save_local_var_chain;
1330 bitmap save_suppress;
1331 gimple stmt = gsi_stmt (*gsi);
1333 switch (gimple_code (stmt))
1335 case GIMPLE_GOTO:
1336 /* Don't walk non-local gotos for now. */
1337 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1339 wi->val_only = true;
1340 wi->is_lhs = false;
1341 *handled_ops_p = true;
1342 return NULL_TREE;
1344 break;
1346 case GIMPLE_OMP_PARALLEL:
1347 case GIMPLE_OMP_TASK:
1348 save_suppress = info->suppress_expansion;
1349 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1350 wi))
1352 tree c, decl;
1353 decl = get_chain_decl (info);
1354 c = build_omp_clause (gimple_location (stmt),
1355 OMP_CLAUSE_FIRSTPRIVATE);
1356 OMP_CLAUSE_DECL (c) = decl;
1357 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1358 gimple_omp_taskreg_set_clauses (stmt, c);
1361 save_local_var_chain = info->new_local_var_chain;
1362 info->new_local_var_chain = NULL;
1364 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1365 info, gimple_omp_body_ptr (stmt));
1367 if (info->new_local_var_chain)
1368 declare_vars (info->new_local_var_chain,
1369 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1370 false);
1371 info->new_local_var_chain = save_local_var_chain;
1372 info->suppress_expansion = save_suppress;
1373 break;
1375 case GIMPLE_OMP_FOR:
1376 save_suppress = info->suppress_expansion;
1377 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1378 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1379 convert_nonlocal_reference_stmt,
1380 convert_nonlocal_reference_op, info);
1381 walk_body (convert_nonlocal_reference_stmt,
1382 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1383 info->suppress_expansion = save_suppress;
1384 break;
1386 case GIMPLE_OMP_SECTIONS:
1387 save_suppress = info->suppress_expansion;
1388 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1389 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1390 info, gimple_omp_body_ptr (stmt));
1391 info->suppress_expansion = save_suppress;
1392 break;
1394 case GIMPLE_OMP_SINGLE:
1395 save_suppress = info->suppress_expansion;
1396 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1397 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1398 info, gimple_omp_body_ptr (stmt));
1399 info->suppress_expansion = save_suppress;
1400 break;
1402 case GIMPLE_OMP_TARGET:
1403 if (!is_gimple_omp_offloaded (stmt))
1405 save_suppress = info->suppress_expansion;
1406 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1407 wi);
1408 info->suppress_expansion = save_suppress;
1409 walk_body (convert_nonlocal_reference_stmt,
1410 convert_nonlocal_reference_op, info,
1411 gimple_omp_body_ptr (stmt));
1412 break;
1414 save_suppress = info->suppress_expansion;
1415 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1416 wi))
1418 tree c, decl;
1419 decl = get_chain_decl (info);
1420 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1421 OMP_CLAUSE_DECL (c) = decl;
1422 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1423 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1424 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1425 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1428 save_local_var_chain = info->new_local_var_chain;
1429 info->new_local_var_chain = NULL;
1431 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1432 info, gimple_omp_body_ptr (stmt));
1434 if (info->new_local_var_chain)
1435 declare_vars (info->new_local_var_chain,
1436 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1437 false);
1438 info->new_local_var_chain = save_local_var_chain;
1439 info->suppress_expansion = save_suppress;
1440 break;
1442 case GIMPLE_OMP_TEAMS:
1443 save_suppress = info->suppress_expansion;
1444 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1445 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1446 info, gimple_omp_body_ptr (stmt));
1447 info->suppress_expansion = save_suppress;
1448 break;
1450 case GIMPLE_OMP_SECTION:
1451 case GIMPLE_OMP_MASTER:
1452 case GIMPLE_OMP_TASKGROUP:
1453 case GIMPLE_OMP_ORDERED:
1454 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1455 info, gimple_omp_body_ptr (stmt));
1456 break;
1458 case GIMPLE_BIND:
1460 gbind *bind_stmt = as_a <gbind *> (stmt);
1461 if (!optimize && gimple_bind_block (bind_stmt))
1462 note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
1464 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1465 if (TREE_CODE (var) == NAMELIST_DECL)
1467 /* Adjust decls mentioned in NAMELIST_DECL. */
1468 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1469 tree decl;
1470 unsigned int i;
1472 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1474 if (TREE_CODE (decl) == VAR_DECL
1475 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1476 continue;
1477 if (decl_function_context (decl) != info->context)
1478 CONSTRUCTOR_ELT (decls, i)->value
1479 = get_nonlocal_debug_decl (info, decl);
1483 *handled_ops_p = false;
1484 return NULL_TREE;
1486 case GIMPLE_COND:
1487 wi->val_only = true;
1488 wi->is_lhs = false;
1489 *handled_ops_p = false;
1490 return NULL_TREE;
1492 default:
1493 /* For every other statement that we are not interested in
1494 handling here, let the walker traverse the operands. */
1495 *handled_ops_p = false;
1496 return NULL_TREE;
1499 /* We have handled all of STMT operands, no need to traverse the operands. */
1500 *handled_ops_p = true;
1501 return NULL_TREE;
1505 /* A subroutine of convert_local_reference. Create a local variable
1506 in the parent function with DECL_VALUE_EXPR set to reference the
1507 field in FRAME. This is used both for debug info and in OMP
1508 lowering. */
1510 static tree
1511 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1513 tree x, new_decl;
1515 tree *slot = &info->var_map->get_or_insert (decl);
1516 if (*slot)
1517 return *slot;
1519 /* Make sure frame_decl gets created. */
1520 (void) get_frame_type (info);
1521 x = info->frame_decl;
1522 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1524 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1525 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1526 DECL_CONTEXT (new_decl) = info->context;
1527 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1528 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1529 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1530 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1531 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1532 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1533 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1534 if ((TREE_CODE (decl) == PARM_DECL
1535 || TREE_CODE (decl) == RESULT_DECL
1536 || TREE_CODE (decl) == VAR_DECL)
1537 && DECL_BY_REFERENCE (decl))
1538 DECL_BY_REFERENCE (new_decl) = 1;
1540 SET_DECL_VALUE_EXPR (new_decl, x);
1541 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1542 *slot = new_decl;
1544 DECL_CHAIN (new_decl) = info->debug_var_chain;
1545 info->debug_var_chain = new_decl;
1547 /* Do not emit debug info twice. */
1548 DECL_IGNORED_P (decl) = 1;
1550 return new_decl;
1554 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1555 and PARM_DECLs that were referenced by inner nested functions.
1556 The rewrite will be a structure reference to the local frame variable. */
1558 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1560 static tree
1561 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1563 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1564 struct nesting_info *const info = (struct nesting_info *) wi->info;
1565 tree t = *tp, field, x;
1566 bool save_val_only;
1568 *walk_subtrees = 0;
1569 switch (TREE_CODE (t))
1571 case VAR_DECL:
1572 /* Non-automatic variables are never processed. */
1573 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1574 break;
1575 /* FALLTHRU */
1577 case PARM_DECL:
1578 if (decl_function_context (t) == info->context)
1580 /* If we copied a pointer to the frame, then the original decl
1581 is used unchanged in the parent function. */
1582 if (use_pointer_in_frame (t))
1583 break;
1585 /* No need to transform anything if no child references the
1586 variable. */
1587 field = lookup_field_for_decl (info, t, NO_INSERT);
1588 if (!field)
1589 break;
1590 wi->changed = true;
1592 x = get_local_debug_decl (info, t, field);
1593 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1594 x = get_frame_field (info, info->context, field, &wi->gsi);
1596 if (wi->val_only)
1598 if (wi->is_lhs)
1599 x = save_tmp_var (info, x, &wi->gsi);
1600 else
1601 x = init_tmp_var (info, x, &wi->gsi);
1604 *tp = x;
1606 break;
1608 case ADDR_EXPR:
1609 save_val_only = wi->val_only;
1610 wi->val_only = false;
1611 wi->is_lhs = false;
1612 wi->changed = false;
1613 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1614 wi->val_only = save_val_only;
1616 /* If we converted anything ... */
1617 if (wi->changed)
1619 tree save_context;
1621 /* Then the frame decl is now addressable. */
1622 TREE_ADDRESSABLE (info->frame_decl) = 1;
1624 save_context = current_function_decl;
1625 current_function_decl = info->context;
1626 recompute_tree_invariant_for_addr_expr (t);
1627 current_function_decl = save_context;
1629 /* If we are in a context where we only accept values, then
1630 compute the address into a temporary. */
1631 if (save_val_only)
1632 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1633 t, &wi->gsi);
1635 break;
1637 case REALPART_EXPR:
1638 case IMAGPART_EXPR:
1639 case COMPONENT_REF:
1640 case ARRAY_REF:
1641 case ARRAY_RANGE_REF:
1642 case BIT_FIELD_REF:
1643 /* Go down this entire nest and just look at the final prefix and
1644 anything that describes the references. Otherwise, we lose track
1645 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1646 save_val_only = wi->val_only;
1647 wi->val_only = true;
1648 wi->is_lhs = false;
1649 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1651 if (TREE_CODE (t) == COMPONENT_REF)
1652 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1653 NULL);
1654 else if (TREE_CODE (t) == ARRAY_REF
1655 || TREE_CODE (t) == ARRAY_RANGE_REF)
1657 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1658 NULL);
1659 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1660 NULL);
1661 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1662 NULL);
1665 wi->val_only = false;
1666 walk_tree (tp, convert_local_reference_op, wi, NULL);
1667 wi->val_only = save_val_only;
1668 break;
1670 case MEM_REF:
1671 save_val_only = wi->val_only;
1672 wi->val_only = true;
1673 wi->is_lhs = false;
1674 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1675 wi, NULL);
1676 /* We need to re-fold the MEM_REF as component references as
1677 part of a ADDR_EXPR address are not allowed. But we cannot
1678 fold here, as the chain record type is not yet finalized. */
1679 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1680 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1681 info->mem_refs->add (tp);
1682 wi->val_only = save_val_only;
1683 break;
1685 case VIEW_CONVERT_EXPR:
1686 /* Just request to look at the subtrees, leaving val_only and lhs
1687 untouched. This might actually be for !val_only + lhs, in which
1688 case we don't want to force a replacement by a temporary. */
1689 *walk_subtrees = 1;
1690 break;
1692 default:
1693 if (!IS_TYPE_OR_DECL_P (t))
1695 *walk_subtrees = 1;
1696 wi->val_only = true;
1697 wi->is_lhs = false;
1699 break;
1702 return NULL_TREE;
1705 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1706 struct walk_stmt_info *);
1708 /* Helper for convert_local_reference. Convert all the references in
1709 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1711 static bool
1712 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1714 struct nesting_info *const info = (struct nesting_info *) wi->info;
1715 bool need_frame = false, need_stmts = false;
1716 tree clause, decl;
1717 int dummy;
1718 bitmap new_suppress;
1720 new_suppress = BITMAP_GGC_ALLOC ();
1721 bitmap_copy (new_suppress, info->suppress_expansion);
1723 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1725 switch (OMP_CLAUSE_CODE (clause))
1727 case OMP_CLAUSE_REDUCTION:
1728 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1729 need_stmts = true;
1730 goto do_decl_clause;
1732 case OMP_CLAUSE_LASTPRIVATE:
1733 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1734 need_stmts = true;
1735 goto do_decl_clause;
1737 case OMP_CLAUSE_LINEAR:
1738 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1739 need_stmts = true;
1740 wi->val_only = true;
1741 wi->is_lhs = false;
1742 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1743 wi);
1744 goto do_decl_clause;
1746 case OMP_CLAUSE_PRIVATE:
1747 case OMP_CLAUSE_FIRSTPRIVATE:
1748 case OMP_CLAUSE_COPYPRIVATE:
1749 case OMP_CLAUSE_SHARED:
1750 do_decl_clause:
1751 decl = OMP_CLAUSE_DECL (clause);
1752 if (TREE_CODE (decl) == VAR_DECL
1753 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1754 break;
1755 if (decl_function_context (decl) == info->context
1756 && !use_pointer_in_frame (decl))
1758 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1759 if (field)
1761 bitmap_set_bit (new_suppress, DECL_UID (decl));
1762 OMP_CLAUSE_DECL (clause)
1763 = get_local_debug_decl (info, decl, field);
1764 need_frame = true;
1767 break;
1769 case OMP_CLAUSE_SCHEDULE:
1770 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1771 break;
1772 /* FALLTHRU */
1773 case OMP_CLAUSE_FINAL:
1774 case OMP_CLAUSE_IF:
1775 case OMP_CLAUSE_NUM_THREADS:
1776 case OMP_CLAUSE_DEPEND:
1777 case OMP_CLAUSE_DEVICE:
1778 case OMP_CLAUSE_NUM_TEAMS:
1779 case OMP_CLAUSE_THREAD_LIMIT:
1780 case OMP_CLAUSE_SAFELEN:
1781 case OMP_CLAUSE__CILK_FOR_COUNT_:
1782 wi->val_only = true;
1783 wi->is_lhs = false;
1784 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1785 wi);
1786 break;
1788 case OMP_CLAUSE_DIST_SCHEDULE:
1789 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1791 wi->val_only = true;
1792 wi->is_lhs = false;
1793 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1794 &dummy, wi);
1796 break;
1798 case OMP_CLAUSE_MAP:
1799 case OMP_CLAUSE_TO:
1800 case OMP_CLAUSE_FROM:
1801 if (OMP_CLAUSE_SIZE (clause))
1803 wi->val_only = true;
1804 wi->is_lhs = false;
1805 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1806 &dummy, wi);
1808 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1809 goto do_decl_clause;
1810 wi->val_only = true;
1811 wi->is_lhs = false;
1812 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1813 wi, NULL);
1814 break;
1816 case OMP_CLAUSE_ALIGNED:
1817 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1819 wi->val_only = true;
1820 wi->is_lhs = false;
1821 convert_local_reference_op
1822 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1824 /* Like do_decl_clause, but don't add any suppression. */
1825 decl = OMP_CLAUSE_DECL (clause);
1826 if (TREE_CODE (decl) == VAR_DECL
1827 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1828 break;
1829 if (decl_function_context (decl) == info->context
1830 && !use_pointer_in_frame (decl))
1832 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1833 if (field)
1835 OMP_CLAUSE_DECL (clause)
1836 = get_local_debug_decl (info, decl, field);
1837 need_frame = true;
1840 break;
1842 case OMP_CLAUSE_NOWAIT:
1843 case OMP_CLAUSE_ORDERED:
1844 case OMP_CLAUSE_DEFAULT:
1845 case OMP_CLAUSE_COPYIN:
1846 case OMP_CLAUSE_COLLAPSE:
1847 case OMP_CLAUSE_UNTIED:
1848 case OMP_CLAUSE_MERGEABLE:
1849 case OMP_CLAUSE_PROC_BIND:
1850 break;
1852 default:
1853 gcc_unreachable ();
1857 info->suppress_expansion = new_suppress;
1859 if (need_stmts)
1860 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1861 switch (OMP_CLAUSE_CODE (clause))
1863 case OMP_CLAUSE_REDUCTION:
1864 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1866 tree old_context
1867 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1868 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1869 = info->context;
1870 walk_body (convert_local_reference_stmt,
1871 convert_local_reference_op, info,
1872 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1873 walk_body (convert_local_reference_stmt,
1874 convert_local_reference_op, info,
1875 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1876 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1877 = old_context;
1879 break;
1881 case OMP_CLAUSE_LASTPRIVATE:
1882 walk_body (convert_local_reference_stmt,
1883 convert_local_reference_op, info,
1884 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1885 break;
1887 case OMP_CLAUSE_LINEAR:
1888 walk_body (convert_local_reference_stmt,
1889 convert_local_reference_op, info,
1890 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1891 break;
1893 default:
1894 break;
1897 return need_frame;
1901 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1902 and PARM_DECLs that were referenced by inner nested functions.
1903 The rewrite will be a structure reference to the local frame variable. */
1905 static tree
1906 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1907 struct walk_stmt_info *wi)
1909 struct nesting_info *info = (struct nesting_info *) wi->info;
1910 tree save_local_var_chain;
1911 bitmap save_suppress;
1912 gimple stmt = gsi_stmt (*gsi);
1914 switch (gimple_code (stmt))
1916 case GIMPLE_OMP_PARALLEL:
1917 case GIMPLE_OMP_TASK:
1918 save_suppress = info->suppress_expansion;
1919 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1920 wi))
1922 tree c;
1923 (void) get_frame_type (info);
1924 c = build_omp_clause (gimple_location (stmt),
1925 OMP_CLAUSE_SHARED);
1926 OMP_CLAUSE_DECL (c) = info->frame_decl;
1927 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1928 gimple_omp_taskreg_set_clauses (stmt, c);
1931 save_local_var_chain = info->new_local_var_chain;
1932 info->new_local_var_chain = NULL;
1934 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1935 gimple_omp_body_ptr (stmt));
1937 if (info->new_local_var_chain)
1938 declare_vars (info->new_local_var_chain,
1939 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1940 info->new_local_var_chain = save_local_var_chain;
1941 info->suppress_expansion = save_suppress;
1942 break;
1944 case GIMPLE_OMP_FOR:
1945 save_suppress = info->suppress_expansion;
1946 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1947 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1948 convert_local_reference_stmt,
1949 convert_local_reference_op, info);
1950 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1951 info, gimple_omp_body_ptr (stmt));
1952 info->suppress_expansion = save_suppress;
1953 break;
1955 case GIMPLE_OMP_SECTIONS:
1956 save_suppress = info->suppress_expansion;
1957 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1958 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1959 info, gimple_omp_body_ptr (stmt));
1960 info->suppress_expansion = save_suppress;
1961 break;
1963 case GIMPLE_OMP_SINGLE:
1964 save_suppress = info->suppress_expansion;
1965 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1966 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1967 info, gimple_omp_body_ptr (stmt));
1968 info->suppress_expansion = save_suppress;
1969 break;
1971 case GIMPLE_OMP_TARGET:
1972 if (!is_gimple_omp_offloaded (stmt))
1974 save_suppress = info->suppress_expansion;
1975 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1976 info->suppress_expansion = save_suppress;
1977 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1978 info, gimple_omp_body_ptr (stmt));
1979 break;
1981 save_suppress = info->suppress_expansion;
1982 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
1984 tree c;
1985 (void) get_frame_type (info);
1986 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1987 OMP_CLAUSE_DECL (c) = info->frame_decl;
1988 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
1989 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
1990 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1991 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1994 save_local_var_chain = info->new_local_var_chain;
1995 info->new_local_var_chain = NULL;
1997 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1998 gimple_omp_body_ptr (stmt));
2000 if (info->new_local_var_chain)
2001 declare_vars (info->new_local_var_chain,
2002 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2003 info->new_local_var_chain = save_local_var_chain;
2004 info->suppress_expansion = save_suppress;
2005 break;
2007 case GIMPLE_OMP_TEAMS:
2008 save_suppress = info->suppress_expansion;
2009 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2010 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2011 info, gimple_omp_body_ptr (stmt));
2012 info->suppress_expansion = save_suppress;
2013 break;
2015 case GIMPLE_OMP_SECTION:
2016 case GIMPLE_OMP_MASTER:
2017 case GIMPLE_OMP_TASKGROUP:
2018 case GIMPLE_OMP_ORDERED:
2019 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2020 info, gimple_omp_body_ptr (stmt));
2021 break;
2023 case GIMPLE_COND:
2024 wi->val_only = true;
2025 wi->is_lhs = false;
2026 *handled_ops_p = false;
2027 return NULL_TREE;
2029 case GIMPLE_ASSIGN:
2030 if (gimple_clobber_p (stmt))
2032 tree lhs = gimple_assign_lhs (stmt);
2033 if (!use_pointer_in_frame (lhs)
2034 && lookup_field_for_decl (info, lhs, NO_INSERT))
2036 gsi_replace (gsi, gimple_build_nop (), true);
2037 break;
2040 *handled_ops_p = false;
2041 return NULL_TREE;
2043 case GIMPLE_BIND:
2044 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2045 var;
2046 var = DECL_CHAIN (var))
2047 if (TREE_CODE (var) == NAMELIST_DECL)
2049 /* Adjust decls mentioned in NAMELIST_DECL. */
2050 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2051 tree decl;
2052 unsigned int i;
2054 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2056 if (TREE_CODE (decl) == VAR_DECL
2057 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2058 continue;
2059 if (decl_function_context (decl) == info->context
2060 && !use_pointer_in_frame (decl))
2062 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2063 if (field)
2065 CONSTRUCTOR_ELT (decls, i)->value
2066 = get_local_debug_decl (info, decl, field);
2072 *handled_ops_p = false;
2073 return NULL_TREE;
2075 default:
2076 /* For every other statement that we are not interested in
2077 handling here, let the walker traverse the operands. */
2078 *handled_ops_p = false;
2079 return NULL_TREE;
2082 /* Indicate that we have handled all the operands ourselves. */
2083 *handled_ops_p = true;
2084 return NULL_TREE;
2088 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2089 that reference labels from outer functions. The rewrite will be a
2090 call to __builtin_nonlocal_goto. */
2092 static tree
2093 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2094 struct walk_stmt_info *wi)
2096 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2097 tree label, new_label, target_context, x, field;
2098 gcall *call;
2099 gimple stmt = gsi_stmt (*gsi);
2101 if (gimple_code (stmt) != GIMPLE_GOTO)
2103 *handled_ops_p = false;
2104 return NULL_TREE;
2107 label = gimple_goto_dest (stmt);
2108 if (TREE_CODE (label) != LABEL_DECL)
2110 *handled_ops_p = false;
2111 return NULL_TREE;
2114 target_context = decl_function_context (label);
2115 if (target_context == info->context)
2117 *handled_ops_p = false;
2118 return NULL_TREE;
2121 for (i = info->outer; target_context != i->context; i = i->outer)
2122 continue;
2124 /* The original user label may also be use for a normal goto, therefore
2125 we must create a new label that will actually receive the abnormal
2126 control transfer. This new label will be marked LABEL_NONLOCAL; this
2127 mark will trigger proper behavior in the cfg, as well as cause the
2128 (hairy target-specific) non-local goto receiver code to be generated
2129 when we expand rtl. Enter this association into var_map so that we
2130 can insert the new label into the IL during a second pass. */
2131 tree *slot = &i->var_map->get_or_insert (label);
2132 if (*slot == NULL)
2134 new_label = create_artificial_label (UNKNOWN_LOCATION);
2135 DECL_NONLOCAL (new_label) = 1;
2136 *slot = new_label;
2138 else
2139 new_label = *slot;
2141 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2142 field = get_nl_goto_field (i);
2143 x = get_frame_field (info, target_context, field, gsi);
2144 x = build_addr (x, target_context);
2145 x = gsi_gimplify_val (info, x, gsi);
2146 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2147 2, build_addr (new_label, target_context), x);
2148 gsi_replace (gsi, call, false);
2150 /* We have handled all of STMT's operands, no need to keep going. */
2151 *handled_ops_p = true;
2152 return NULL_TREE;
2156 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2157 are referenced via nonlocal goto from a nested function. The rewrite
2158 will involve installing a newly generated DECL_NONLOCAL label, and
2159 (potentially) a branch around the rtl gunk that is assumed to be
2160 attached to such a label. */
2162 static tree
2163 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2164 struct walk_stmt_info *wi)
2166 struct nesting_info *const info = (struct nesting_info *) wi->info;
2167 tree label, new_label;
2168 gimple_stmt_iterator tmp_gsi;
2169 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2171 if (!stmt)
2173 *handled_ops_p = false;
2174 return NULL_TREE;
2177 label = gimple_label_label (stmt);
2179 tree *slot = info->var_map->get (label);
2180 if (!slot)
2182 *handled_ops_p = false;
2183 return NULL_TREE;
2186 /* If there's any possibility that the previous statement falls through,
2187 then we must branch around the new non-local label. */
2188 tmp_gsi = wi->gsi;
2189 gsi_prev (&tmp_gsi);
2190 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2192 gimple stmt = gimple_build_goto (label);
2193 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2196 new_label = (tree) *slot;
2197 stmt = gimple_build_label (new_label);
2198 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2200 *handled_ops_p = true;
2201 return NULL_TREE;
2205 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2206 of nested functions that require the use of trampolines. The rewrite
2207 will involve a reference a trampoline generated for the occasion. */
2209 static tree
2210 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2212 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2213 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2214 tree t = *tp, decl, target_context, x, builtin;
2215 gcall *call;
2217 *walk_subtrees = 0;
2218 switch (TREE_CODE (t))
2220 case ADDR_EXPR:
2221 /* Build
2222 T.1 = &CHAIN->tramp;
2223 T.2 = __builtin_adjust_trampoline (T.1);
2224 T.3 = (func_type)T.2;
2227 decl = TREE_OPERAND (t, 0);
2228 if (TREE_CODE (decl) != FUNCTION_DECL)
2229 break;
2231 /* Only need to process nested functions. */
2232 target_context = decl_function_context (decl);
2233 if (!target_context)
2234 break;
2236 /* If the nested function doesn't use a static chain, then
2237 it doesn't need a trampoline. */
2238 if (!DECL_STATIC_CHAIN (decl))
2239 break;
2241 /* If we don't want a trampoline, then don't build one. */
2242 if (TREE_NO_TRAMPOLINE (t))
2243 break;
2245 /* Lookup the immediate parent of the callee, as that's where
2246 we need to insert the trampoline. */
2247 for (i = info; i->context != target_context; i = i->outer)
2248 continue;
2249 x = lookup_tramp_for_decl (i, decl, INSERT);
2251 /* Compute the address of the field holding the trampoline. */
2252 x = get_frame_field (info, target_context, x, &wi->gsi);
2253 x = build_addr (x, target_context);
2254 x = gsi_gimplify_val (info, x, &wi->gsi);
2256 /* Do machine-specific ugliness. Normally this will involve
2257 computing extra alignment, but it can really be anything. */
2258 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2259 call = gimple_build_call (builtin, 1, x);
2260 x = init_tmp_var_with_call (info, &wi->gsi, call);
2262 /* Cast back to the proper function type. */
2263 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2264 x = init_tmp_var (info, x, &wi->gsi);
2266 *tp = x;
2267 break;
2269 default:
2270 if (!IS_TYPE_OR_DECL_P (t))
2271 *walk_subtrees = 1;
2272 break;
2275 return NULL_TREE;
2279 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2280 to addresses of nested functions that require the use of
2281 trampolines. The rewrite will involve a reference a trampoline
2282 generated for the occasion. */
2284 static tree
2285 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2286 struct walk_stmt_info *wi)
2288 struct nesting_info *info = (struct nesting_info *) wi->info;
2289 gimple stmt = gsi_stmt (*gsi);
2291 switch (gimple_code (stmt))
2293 case GIMPLE_CALL:
2295 /* Only walk call arguments, lest we generate trampolines for
2296 direct calls. */
2297 unsigned long i, nargs = gimple_call_num_args (stmt);
2298 for (i = 0; i < nargs; i++)
2299 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2300 wi, NULL);
2301 break;
2304 case GIMPLE_OMP_TARGET:
2305 if (!is_gimple_omp_offloaded (stmt))
2307 *handled_ops_p = false;
2308 return NULL_TREE;
2310 /* FALLTHRU */
2311 case GIMPLE_OMP_PARALLEL:
2312 case GIMPLE_OMP_TASK:
2314 tree save_local_var_chain;
2315 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2316 save_local_var_chain = info->new_local_var_chain;
2317 info->new_local_var_chain = NULL;
2318 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2319 info, gimple_omp_body_ptr (stmt));
2320 if (info->new_local_var_chain)
2321 declare_vars (info->new_local_var_chain,
2322 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2323 false);
2324 info->new_local_var_chain = save_local_var_chain;
2326 break;
2328 default:
2329 *handled_ops_p = false;
2330 return NULL_TREE;
2333 *handled_ops_p = true;
2334 return NULL_TREE;
2339 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2340 that reference nested functions to make sure that the static chain
2341 is set up properly for the call. */
2343 static tree
2344 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2345 struct walk_stmt_info *wi)
2347 struct nesting_info *const info = (struct nesting_info *) wi->info;
2348 tree decl, target_context;
2349 char save_static_chain_added;
2350 int i;
2351 gimple stmt = gsi_stmt (*gsi);
2353 switch (gimple_code (stmt))
2355 case GIMPLE_CALL:
2356 if (gimple_call_chain (stmt))
2357 break;
2358 decl = gimple_call_fndecl (stmt);
2359 if (!decl)
2360 break;
2361 target_context = decl_function_context (decl);
2362 if (target_context && DECL_STATIC_CHAIN (decl))
2364 gimple_call_set_chain (as_a <gcall *> (stmt),
2365 get_static_chain (info, target_context,
2366 &wi->gsi));
2367 info->static_chain_added |= (1 << (info->context != target_context));
2369 break;
2371 case GIMPLE_OMP_PARALLEL:
2372 case GIMPLE_OMP_TASK:
2373 save_static_chain_added = info->static_chain_added;
2374 info->static_chain_added = 0;
2375 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2376 for (i = 0; i < 2; i++)
2378 tree c, decl;
2379 if ((info->static_chain_added & (1 << i)) == 0)
2380 continue;
2381 decl = i ? get_chain_decl (info) : info->frame_decl;
2382 /* Don't add CHAIN.* or FRAME.* twice. */
2383 for (c = gimple_omp_taskreg_clauses (stmt);
2385 c = OMP_CLAUSE_CHAIN (c))
2386 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2387 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2388 && OMP_CLAUSE_DECL (c) == decl)
2389 break;
2390 if (c == NULL)
2392 c = build_omp_clause (gimple_location (stmt),
2393 i ? OMP_CLAUSE_FIRSTPRIVATE
2394 : OMP_CLAUSE_SHARED);
2395 OMP_CLAUSE_DECL (c) = decl;
2396 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2397 gimple_omp_taskreg_set_clauses (stmt, c);
2400 info->static_chain_added |= save_static_chain_added;
2401 break;
2403 case GIMPLE_OMP_TARGET:
2404 if (!is_gimple_omp_offloaded (stmt))
2406 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2407 break;
2409 save_static_chain_added = info->static_chain_added;
2410 info->static_chain_added = 0;
2411 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2412 for (i = 0; i < 2; i++)
2414 tree c, decl;
2415 if ((info->static_chain_added & (1 << i)) == 0)
2416 continue;
2417 decl = i ? get_chain_decl (info) : info->frame_decl;
2418 /* Don't add CHAIN.* or FRAME.* twice. */
2419 for (c = gimple_omp_target_clauses (stmt);
2421 c = OMP_CLAUSE_CHAIN (c))
2422 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2423 && OMP_CLAUSE_DECL (c) == decl)
2424 break;
2425 if (c == NULL)
2427 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2428 OMP_CLAUSE_DECL (c) = decl;
2429 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2430 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2431 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2432 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2436 info->static_chain_added |= save_static_chain_added;
2437 break;
2439 case GIMPLE_OMP_FOR:
2440 walk_body (convert_gimple_call, NULL, info,
2441 gimple_omp_for_pre_body_ptr (stmt));
2442 /* FALLTHRU */
2443 case GIMPLE_OMP_SECTIONS:
2444 case GIMPLE_OMP_SECTION:
2445 case GIMPLE_OMP_SINGLE:
2446 case GIMPLE_OMP_TEAMS:
2447 case GIMPLE_OMP_MASTER:
2448 case GIMPLE_OMP_TASKGROUP:
2449 case GIMPLE_OMP_ORDERED:
2450 case GIMPLE_OMP_CRITICAL:
2451 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2452 break;
2454 default:
2455 /* Keep looking for other operands. */
2456 *handled_ops_p = false;
2457 return NULL_TREE;
2460 *handled_ops_p = true;
2461 return NULL_TREE;
2464 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2465 call expressions. At the same time, determine if a nested function
2466 actually uses its static chain; if not, remember that. */
2468 static void
2469 convert_all_function_calls (struct nesting_info *root)
2471 unsigned int chain_count = 0, old_chain_count, iter_count;
2472 struct nesting_info *n;
2474 /* First, optimistically clear static_chain for all decls that haven't
2475 used the static chain already for variable access. But always create
2476 it if not optimizing. This makes it possible to reconstruct the static
2477 nesting tree at run time and thus to resolve up-level references from
2478 within the debugger. */
2479 FOR_EACH_NEST_INFO (n, root)
2481 tree decl = n->context;
2482 if (!optimize)
2484 if (n->inner)
2485 (void) get_frame_type (n);
2486 if (n->outer)
2487 (void) get_chain_decl (n);
2489 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2491 DECL_STATIC_CHAIN (decl) = 0;
2492 if (dump_file && (dump_flags & TDF_DETAILS))
2493 fprintf (dump_file, "Guessing no static-chain for %s\n",
2494 lang_hooks.decl_printable_name (decl, 2));
2496 else
2497 DECL_STATIC_CHAIN (decl) = 1;
2498 chain_count += DECL_STATIC_CHAIN (decl);
2501 /* Walk the functions and perform transformations. Note that these
2502 transformations can induce new uses of the static chain, which in turn
2503 require re-examining all users of the decl. */
2504 /* ??? It would make sense to try to use the call graph to speed this up,
2505 but the call graph hasn't really been built yet. Even if it did, we
2506 would still need to iterate in this loop since address-of references
2507 wouldn't show up in the callgraph anyway. */
2508 iter_count = 0;
2511 old_chain_count = chain_count;
2512 chain_count = 0;
2513 iter_count++;
2515 if (dump_file && (dump_flags & TDF_DETAILS))
2516 fputc ('\n', dump_file);
2518 FOR_EACH_NEST_INFO (n, root)
2520 tree decl = n->context;
2521 walk_function (convert_tramp_reference_stmt,
2522 convert_tramp_reference_op, n);
2523 walk_function (convert_gimple_call, NULL, n);
2524 chain_count += DECL_STATIC_CHAIN (decl);
2527 while (chain_count != old_chain_count);
2529 if (dump_file && (dump_flags & TDF_DETAILS))
2530 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2531 iter_count);
2534 struct nesting_copy_body_data
2536 copy_body_data cb;
2537 struct nesting_info *root;
2540 /* A helper subroutine for debug_var_chain type remapping. */
2542 static tree
2543 nesting_copy_decl (tree decl, copy_body_data *id)
2545 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2546 tree *slot = nid->root->var_map->get (decl);
2548 if (slot)
2549 return (tree) *slot;
2551 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2553 tree new_decl = copy_decl_no_change (decl, id);
2554 DECL_ORIGINAL_TYPE (new_decl)
2555 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2556 return new_decl;
2559 if (TREE_CODE (decl) == VAR_DECL
2560 || TREE_CODE (decl) == PARM_DECL
2561 || TREE_CODE (decl) == RESULT_DECL)
2562 return decl;
2564 return copy_decl_no_change (decl, id);
2567 /* A helper function for remap_vla_decls. See if *TP contains
2568 some remapped variables. */
2570 static tree
2571 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2573 struct nesting_info *root = (struct nesting_info *) data;
2574 tree t = *tp;
2576 if (DECL_P (t))
2578 *walk_subtrees = 0;
2579 tree *slot = root->var_map->get (t);
2581 if (slot)
2582 return *slot;
2584 return NULL;
2587 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2588 involved. */
2590 static void
2591 remap_vla_decls (tree block, struct nesting_info *root)
2593 tree var, subblock, val, type;
2594 struct nesting_copy_body_data id;
2596 for (subblock = BLOCK_SUBBLOCKS (block);
2597 subblock;
2598 subblock = BLOCK_CHAIN (subblock))
2599 remap_vla_decls (subblock, root);
2601 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2602 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2604 val = DECL_VALUE_EXPR (var);
2605 type = TREE_TYPE (var);
2607 if (!(TREE_CODE (val) == INDIRECT_REF
2608 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2609 && variably_modified_type_p (type, NULL)))
2610 continue;
2612 if (root->var_map->get (TREE_OPERAND (val, 0))
2613 || walk_tree (&type, contains_remapped_vars, root, NULL))
2614 break;
2617 if (var == NULL_TREE)
2618 return;
2620 memset (&id, 0, sizeof (id));
2621 id.cb.copy_decl = nesting_copy_decl;
2622 id.cb.decl_map = new hash_map<tree, tree>;
2623 id.root = root;
2625 for (; var; var = DECL_CHAIN (var))
2626 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2628 struct nesting_info *i;
2629 tree newt, context;
2631 val = DECL_VALUE_EXPR (var);
2632 type = TREE_TYPE (var);
2634 if (!(TREE_CODE (val) == INDIRECT_REF
2635 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2636 && variably_modified_type_p (type, NULL)))
2637 continue;
2639 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2640 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2641 continue;
2643 context = decl_function_context (var);
2644 for (i = root; i; i = i->outer)
2645 if (i->context == context)
2646 break;
2648 if (i == NULL)
2649 continue;
2651 /* Fully expand value expressions. This avoids having debug variables
2652 only referenced from them and that can be swept during GC. */
2653 if (slot)
2655 tree t = (tree) *slot;
2656 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2657 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2660 id.cb.src_fn = i->context;
2661 id.cb.dst_fn = i->context;
2662 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2664 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2665 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2667 newt = TREE_TYPE (newt);
2668 type = TREE_TYPE (type);
2670 if (TYPE_NAME (newt)
2671 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2672 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2673 && newt != type
2674 && TYPE_NAME (newt) == TYPE_NAME (type))
2675 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2677 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2678 if (val != DECL_VALUE_EXPR (var))
2679 SET_DECL_VALUE_EXPR (var, val);
2682 delete id.cb.decl_map;
2685 /* Fold the MEM_REF *E. */
2686 bool
2687 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2689 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2690 *ref_p = fold (*ref_p);
2691 return true;
2694 /* Do "everything else" to clean up or complete state collected by the
2695 various walking passes -- lay out the types and decls, generate code
2696 to initialize the frame decl, store critical expressions in the
2697 struct function for rtl to find. */
2699 static void
2700 finalize_nesting_tree_1 (struct nesting_info *root)
2702 gimple_seq stmt_list;
2703 gimple stmt;
2704 tree context = root->context;
2705 struct function *sf;
2707 stmt_list = NULL;
2709 /* If we created a non-local frame type or decl, we need to lay them
2710 out at this time. */
2711 if (root->frame_type)
2713 /* In some cases the frame type will trigger the -Wpadded warning.
2714 This is not helpful; suppress it. */
2715 int save_warn_padded = warn_padded;
2716 tree *adjust;
2718 warn_padded = 0;
2719 layout_type (root->frame_type);
2720 warn_padded = save_warn_padded;
2721 layout_decl (root->frame_decl, 0);
2723 /* Remove root->frame_decl from root->new_local_var_chain, so
2724 that we can declare it also in the lexical blocks, which
2725 helps ensure virtual regs that end up appearing in its RTL
2726 expression get substituted in instantiate_virtual_regs(). */
2727 for (adjust = &root->new_local_var_chain;
2728 *adjust != root->frame_decl;
2729 adjust = &DECL_CHAIN (*adjust))
2730 gcc_assert (DECL_CHAIN (*adjust));
2731 *adjust = DECL_CHAIN (*adjust);
2733 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2734 declare_vars (root->frame_decl,
2735 gimple_seq_first_stmt (gimple_body (context)), true);
2738 /* If any parameters were referenced non-locally, then we need to
2739 insert a copy. Likewise, if any variables were referenced by
2740 pointer, we need to initialize the address. */
2741 if (root->any_parm_remapped)
2743 tree p;
2744 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2746 tree field, x, y;
2748 field = lookup_field_for_decl (root, p, NO_INSERT);
2749 if (!field)
2750 continue;
2752 if (use_pointer_in_frame (p))
2753 x = build_addr (p, context);
2754 else
2755 x = p;
2757 /* If the assignment is from a non-register the stmt is
2758 not valid gimple. Make it so by using a temporary instead. */
2759 if (!is_gimple_reg (x)
2760 && is_gimple_reg_type (TREE_TYPE (x)))
2762 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2763 x = init_tmp_var (root, x, &gsi);
2766 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2767 root->frame_decl, field, NULL_TREE);
2768 stmt = gimple_build_assign (y, x);
2769 gimple_seq_add_stmt (&stmt_list, stmt);
2773 /* If a chain_field was created, then it needs to be initialized
2774 from chain_decl. */
2775 if (root->chain_field)
2777 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2778 root->frame_decl, root->chain_field, NULL_TREE);
2779 stmt = gimple_build_assign (x, get_chain_decl (root));
2780 gimple_seq_add_stmt (&stmt_list, stmt);
2783 /* If trampolines were created, then we need to initialize them. */
2784 if (root->any_tramp_created)
2786 struct nesting_info *i;
2787 for (i = root->inner; i ; i = i->next)
2789 tree arg1, arg2, arg3, x, field;
2791 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2792 if (!field)
2793 continue;
2795 gcc_assert (DECL_STATIC_CHAIN (i->context));
2796 arg3 = build_addr (root->frame_decl, context);
2798 arg2 = build_addr (i->context, context);
2800 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2801 root->frame_decl, field, NULL_TREE);
2802 arg1 = build_addr (x, context);
2804 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2805 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2806 gimple_seq_add_stmt (&stmt_list, stmt);
2810 /* If we created initialization statements, insert them. */
2811 if (stmt_list)
2813 gbind *bind;
2814 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2815 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
2816 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2817 gimple_bind_set_body (bind, stmt_list);
2820 /* If a chain_decl was created, then it needs to be registered with
2821 struct function so that it gets initialized from the static chain
2822 register at the beginning of the function. */
2823 sf = DECL_STRUCT_FUNCTION (root->context);
2824 sf->static_chain_decl = root->chain_decl;
2826 /* Similarly for the non-local goto save area. */
2827 if (root->nl_goto_field)
2829 sf->nonlocal_goto_save_area
2830 = get_frame_field (root, context, root->nl_goto_field, NULL);
2831 sf->has_nonlocal_label = 1;
2834 /* Make sure all new local variables get inserted into the
2835 proper BIND_EXPR. */
2836 if (root->new_local_var_chain)
2837 declare_vars (root->new_local_var_chain,
2838 gimple_seq_first_stmt (gimple_body (root->context)),
2839 false);
2841 if (root->debug_var_chain)
2843 tree debug_var;
2844 gbind *scope;
2846 remap_vla_decls (DECL_INITIAL (root->context), root);
2848 for (debug_var = root->debug_var_chain; debug_var;
2849 debug_var = DECL_CHAIN (debug_var))
2850 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2851 break;
2853 /* If there are any debug decls with variable length types,
2854 remap those types using other debug_var_chain variables. */
2855 if (debug_var)
2857 struct nesting_copy_body_data id;
2859 memset (&id, 0, sizeof (id));
2860 id.cb.copy_decl = nesting_copy_decl;
2861 id.cb.decl_map = new hash_map<tree, tree>;
2862 id.root = root;
2864 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2865 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2867 tree type = TREE_TYPE (debug_var);
2868 tree newt, t = type;
2869 struct nesting_info *i;
2871 for (i = root; i; i = i->outer)
2872 if (variably_modified_type_p (type, i->context))
2873 break;
2875 if (i == NULL)
2876 continue;
2878 id.cb.src_fn = i->context;
2879 id.cb.dst_fn = i->context;
2880 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2882 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2883 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2885 newt = TREE_TYPE (newt);
2886 t = TREE_TYPE (t);
2888 if (TYPE_NAME (newt)
2889 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2890 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2891 && newt != t
2892 && TYPE_NAME (newt) == TYPE_NAME (t))
2893 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2896 delete id.cb.decl_map;
2899 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
2900 if (gimple_bind_block (scope))
2901 declare_vars (root->debug_var_chain, scope, true);
2902 else
2903 BLOCK_VARS (DECL_INITIAL (root->context))
2904 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2905 root->debug_var_chain);
2908 /* Fold the rewritten MEM_REF trees. */
2909 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
2911 /* Dump the translated tree function. */
2912 if (dump_file)
2914 fputs ("\n\n", dump_file);
2915 dump_function_to_file (root->context, dump_file, dump_flags);
2919 static void
2920 finalize_nesting_tree (struct nesting_info *root)
2922 struct nesting_info *n;
2923 FOR_EACH_NEST_INFO (n, root)
2924 finalize_nesting_tree_1 (n);
2927 /* Unnest the nodes and pass them to cgraph. */
2929 static void
2930 unnest_nesting_tree_1 (struct nesting_info *root)
2932 struct cgraph_node *node = cgraph_node::get (root->context);
2934 /* For nested functions update the cgraph to reflect unnesting.
2935 We also delay finalizing of these functions up to this point. */
2936 if (node->origin)
2938 node->unnest ();
2939 cgraph_node::finalize_function (root->context, true);
2943 static void
2944 unnest_nesting_tree (struct nesting_info *root)
2946 struct nesting_info *n;
2947 FOR_EACH_NEST_INFO (n, root)
2948 unnest_nesting_tree_1 (n);
2951 /* Free the data structures allocated during this pass. */
2953 static void
2954 free_nesting_tree (struct nesting_info *root)
2956 struct nesting_info *node, *next;
2958 node = iter_nestinfo_start (root);
2961 next = iter_nestinfo_next (node);
2962 delete node->var_map;
2963 delete node->field_map;
2964 delete node->mem_refs;
2965 free (node);
2966 node = next;
2968 while (node);
2971 /* Gimplify a function and all its nested functions. */
2972 static void
2973 gimplify_all_functions (struct cgraph_node *root)
2975 struct cgraph_node *iter;
2976 if (!gimple_body (root->decl))
2977 gimplify_function_tree (root->decl);
2978 for (iter = root->nested; iter; iter = iter->next_nested)
2979 gimplify_all_functions (iter);
2982 /* Main entry point for this pass. Process FNDECL and all of its nested
2983 subroutines and turn them into something less tightly bound. */
2985 void
2986 lower_nested_functions (tree fndecl)
2988 struct cgraph_node *cgn;
2989 struct nesting_info *root;
2991 /* If there are no nested functions, there's nothing to do. */
2992 cgn = cgraph_node::get (fndecl);
2993 if (!cgn->nested)
2994 return;
2996 gimplify_all_functions (cgn);
2998 dump_file = dump_begin (TDI_nested, &dump_flags);
2999 if (dump_file)
3000 fprintf (dump_file, "\n;; Function %s\n\n",
3001 lang_hooks.decl_printable_name (fndecl, 2));
3003 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3004 root = create_nesting_tree (cgn);
3006 walk_all_functions (convert_nonlocal_reference_stmt,
3007 convert_nonlocal_reference_op,
3008 root);
3009 walk_all_functions (convert_local_reference_stmt,
3010 convert_local_reference_op,
3011 root);
3012 walk_all_functions (convert_nl_goto_reference, NULL, root);
3013 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3015 convert_all_function_calls (root);
3016 finalize_nesting_tree (root);
3017 unnest_nesting_tree (root);
3019 free_nesting_tree (root);
3020 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3022 if (dump_file)
3024 dump_end (TDI_nested, dump_file);
3025 dump_file = NULL;
3029 #include "gt-tree-nested.h"