* pt.c (lookup_template_class_1): Splice out abi_tag attribute if
[official-gcc.git] / gcc / tree-nested.c
bloba378a02196d15213a88611bee0da3f4f11ec5d3e
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stringpool.h"
26 #include "stor-layout.h"
27 #include "tm_p.h"
28 #include "function.h"
29 #include "tree-dump.h"
30 #include "tree-inline.h"
31 #include "basic-block.h"
32 #include "tree-ssa-alias.h"
33 #include "internal-fn.h"
34 #include "gimple-expr.h"
35 #include "is-a.h"
36 #include "gimple.h"
37 #include "gimplify.h"
38 #include "gimple-iterator.h"
39 #include "gimple-walk.h"
40 #include "tree-iterator.h"
41 #include "bitmap.h"
42 #include "cgraph.h"
43 #include "tree-cfg.h"
44 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
45 #include "langhooks.h"
46 #include "gimple-low.h"
49 /* The object of this pass is to lower the representation of a set of nested
50 functions in order to expose all of the gory details of the various
51 nonlocal references. We want to do this sooner rather than later, in
52 order to give us more freedom in emitting all of the functions in question.
54 Back in olden times, when gcc was young, we developed an insanely
55 complicated scheme whereby variables which were referenced nonlocally
56 were forced to live in the stack of the declaring function, and then
57 the nested functions magically discovered where these variables were
58 placed. In order for this scheme to function properly, it required
59 that the outer function be partially expanded, then we switch to
60 compiling the inner function, and once done with those we switch back
61 to compiling the outer function. Such delicate ordering requirements
62 makes it difficult to do whole translation unit optimizations
63 involving such functions.
65 The implementation here is much more direct. Everything that can be
66 referenced by an inner function is a member of an explicitly created
67 structure herein called the "nonlocal frame struct". The incoming
68 static chain for a nested function is a pointer to this struct in
69 the parent. In this way, we settle on known offsets from a known
70 base, and so are decoupled from the logic that places objects in the
71 function's stack frame. More importantly, we don't have to wait for
72 that to happen -- since the compilation of the inner function is no
73 longer tied to a real stack frame, the nonlocal frame struct can be
74 allocated anywhere. Which means that the outer function is now
75 inlinable.
77 Theory of operation here is very simple. Iterate over all the
78 statements in all the functions (depth first) several times,
79 allocating structures and fields on demand. In general we want to
80 examine inner functions first, so that we can avoid making changes
81 to outer functions which are unnecessary.
83 The order of the passes matters a bit, in that later passes will be
84 skipped if it is discovered that the functions don't actually interact
85 at all. That is, they're nested in the lexical sense but could have
86 been written as independent functions without change. */
89 struct nesting_info
91 struct nesting_info *outer;
92 struct nesting_info *inner;
93 struct nesting_info *next;
95 hash_map<tree, tree> *field_map;
96 hash_map<tree, tree> *var_map;
97 hash_set<tree *> *mem_refs;
98 bitmap suppress_expansion;
100 tree context;
101 tree new_local_var_chain;
102 tree debug_var_chain;
103 tree frame_type;
104 tree frame_decl;
105 tree chain_field;
106 tree chain_decl;
107 tree nl_goto_field;
109 bool any_parm_remapped;
110 bool any_tramp_created;
111 char static_chain_added;
115 /* Iterate over the nesting tree, starting with ROOT, depth first. */
117 static inline struct nesting_info *
118 iter_nestinfo_start (struct nesting_info *root)
120 while (root->inner)
121 root = root->inner;
122 return root;
125 static inline struct nesting_info *
126 iter_nestinfo_next (struct nesting_info *node)
128 if (node->next)
129 return iter_nestinfo_start (node->next);
130 return node->outer;
133 #define FOR_EACH_NEST_INFO(I, ROOT) \
134 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
136 /* Obstack used for the bitmaps in the struct above. */
137 static struct bitmap_obstack nesting_info_bitmap_obstack;
140 /* We're working in so many different function contexts simultaneously,
141 that create_tmp_var is dangerous. Prevent mishap. */
142 #define create_tmp_var cant_use_create_tmp_var_here_dummy
144 /* Like create_tmp_var, except record the variable for registration at
145 the given nesting level. */
147 static tree
148 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
150 tree tmp_var;
152 /* If the type is of variable size or a type which must be created by the
153 frontend, something is wrong. Note that we explicitly allow
154 incomplete types here, since we create them ourselves here. */
155 gcc_assert (!TREE_ADDRESSABLE (type));
156 gcc_assert (!TYPE_SIZE_UNIT (type)
157 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
159 tmp_var = create_tmp_var_raw (type, prefix);
160 DECL_CONTEXT (tmp_var) = info->context;
161 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
162 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
163 if (TREE_CODE (type) == COMPLEX_TYPE
164 || TREE_CODE (type) == VECTOR_TYPE)
165 DECL_GIMPLE_REG_P (tmp_var) = 1;
167 info->new_local_var_chain = tmp_var;
169 return tmp_var;
172 /* Take the address of EXP to be used within function CONTEXT.
173 Mark it for addressability as necessary. */
175 tree
176 build_addr (tree exp, tree context)
178 tree base = exp;
179 tree save_context;
180 tree retval;
182 while (handled_component_p (base))
183 base = TREE_OPERAND (base, 0);
185 if (DECL_P (base))
186 TREE_ADDRESSABLE (base) = 1;
188 /* Building the ADDR_EXPR will compute a set of properties for
189 that ADDR_EXPR. Those properties are unfortunately context
190 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
192 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
193 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
194 way the properties are for the ADDR_EXPR are computed properly. */
195 save_context = current_function_decl;
196 current_function_decl = context;
197 retval = build_fold_addr_expr (exp);
198 current_function_decl = save_context;
199 return retval;
202 /* Insert FIELD into TYPE, sorted by alignment requirements. */
204 void
205 insert_field_into_struct (tree type, tree field)
207 tree *p;
209 DECL_CONTEXT (field) = type;
211 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
212 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
213 break;
215 DECL_CHAIN (field) = *p;
216 *p = field;
218 /* Set correct alignment for frame struct type. */
219 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
220 TYPE_ALIGN (type) = DECL_ALIGN (field);
223 /* Build or return the RECORD_TYPE that describes the frame state that is
224 shared between INFO->CONTEXT and its nested functions. This record will
225 not be complete until finalize_nesting_tree; up until that point we'll
226 be adding fields as necessary.
228 We also build the DECL that represents this frame in the function. */
230 static tree
231 get_frame_type (struct nesting_info *info)
233 tree type = info->frame_type;
234 if (!type)
236 char *name;
238 type = make_node (RECORD_TYPE);
240 name = concat ("FRAME.",
241 IDENTIFIER_POINTER (DECL_NAME (info->context)),
242 NULL);
243 TYPE_NAME (type) = get_identifier (name);
244 free (name);
246 info->frame_type = type;
247 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
248 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
250 /* ??? Always make it addressable for now, since it is meant to
251 be pointed to by the static chain pointer. This pessimizes
252 when it turns out that no static chains are needed because
253 the nested functions referencing non-local variables are not
254 reachable, but the true pessimization is to create the non-
255 local frame structure in the first place. */
256 TREE_ADDRESSABLE (info->frame_decl) = 1;
258 return type;
261 /* Return true if DECL should be referenced by pointer in the non-local
262 frame structure. */
264 static bool
265 use_pointer_in_frame (tree decl)
267 if (TREE_CODE (decl) == PARM_DECL)
269 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
270 sized decls, and inefficient to copy large aggregates. Don't bother
271 moving anything but scalar variables. */
272 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
274 else
276 /* Variable sized types make things "interesting" in the frame. */
277 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
281 /* Given DECL, a non-locally accessed variable, find or create a field
282 in the non-local frame structure for the given nesting context. */
284 static tree
285 lookup_field_for_decl (struct nesting_info *info, tree decl,
286 enum insert_option insert)
288 if (insert == NO_INSERT)
290 tree *slot = info->field_map->get (decl);
291 return slot ? *slot : NULL_TREE;
294 tree *slot = &info->field_map->get_or_insert (decl);
295 if (!*slot)
297 tree field = make_node (FIELD_DECL);
298 DECL_NAME (field) = DECL_NAME (decl);
300 if (use_pointer_in_frame (decl))
302 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
303 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
304 DECL_NONADDRESSABLE_P (field) = 1;
306 else
308 TREE_TYPE (field) = TREE_TYPE (decl);
309 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
310 DECL_ALIGN (field) = DECL_ALIGN (decl);
311 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
312 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
313 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
314 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
317 insert_field_into_struct (get_frame_type (info), field);
318 *slot = field;
320 if (TREE_CODE (decl) == PARM_DECL)
321 info->any_parm_remapped = true;
324 return *slot;
327 /* Build or return the variable that holds the static chain within
328 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
330 static tree
331 get_chain_decl (struct nesting_info *info)
333 tree decl = info->chain_decl;
335 if (!decl)
337 tree type;
339 type = get_frame_type (info->outer);
340 type = build_pointer_type (type);
342 /* Note that this variable is *not* entered into any BIND_EXPR;
343 the construction of this variable is handled specially in
344 expand_function_start and initialize_inlined_parameters.
345 Note also that it's represented as a parameter. This is more
346 close to the truth, since the initial value does come from
347 the caller. */
348 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
349 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
350 DECL_ARTIFICIAL (decl) = 1;
351 DECL_IGNORED_P (decl) = 1;
352 TREE_USED (decl) = 1;
353 DECL_CONTEXT (decl) = info->context;
354 DECL_ARG_TYPE (decl) = type;
356 /* Tell tree-inline.c that we never write to this variable, so
357 it can copy-prop the replacement value immediately. */
358 TREE_READONLY (decl) = 1;
360 info->chain_decl = decl;
362 if (dump_file
363 && (dump_flags & TDF_DETAILS)
364 && !DECL_STATIC_CHAIN (info->context))
365 fprintf (dump_file, "Setting static-chain for %s\n",
366 lang_hooks.decl_printable_name (info->context, 2));
368 DECL_STATIC_CHAIN (info->context) = 1;
370 return decl;
373 /* Build or return the field within the non-local frame state that holds
374 the static chain for INFO->CONTEXT. This is the way to walk back up
375 multiple nesting levels. */
377 static tree
378 get_chain_field (struct nesting_info *info)
380 tree field = info->chain_field;
382 if (!field)
384 tree type = build_pointer_type (get_frame_type (info->outer));
386 field = make_node (FIELD_DECL);
387 DECL_NAME (field) = get_identifier ("__chain");
388 TREE_TYPE (field) = type;
389 DECL_ALIGN (field) = TYPE_ALIGN (type);
390 DECL_NONADDRESSABLE_P (field) = 1;
392 insert_field_into_struct (get_frame_type (info), field);
394 info->chain_field = field;
396 if (dump_file
397 && (dump_flags & TDF_DETAILS)
398 && !DECL_STATIC_CHAIN (info->context))
399 fprintf (dump_file, "Setting static-chain for %s\n",
400 lang_hooks.decl_printable_name (info->context, 2));
402 DECL_STATIC_CHAIN (info->context) = 1;
404 return field;
407 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
409 static tree
410 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
411 gimple call)
413 tree t;
415 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
416 gimple_call_set_lhs (call, t);
417 if (! gsi_end_p (*gsi))
418 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
419 gsi_insert_before (gsi, call, GSI_SAME_STMT);
421 return t;
425 /* Copy EXP into a temporary. Allocate the temporary in the context of
426 INFO and insert the initialization statement before GSI. */
428 static tree
429 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
431 tree t;
432 gimple stmt;
434 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
435 stmt = gimple_build_assign (t, exp);
436 if (! gsi_end_p (*gsi))
437 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
438 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
440 return t;
444 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
446 static tree
447 gsi_gimplify_val (struct nesting_info *info, tree exp,
448 gimple_stmt_iterator *gsi)
450 if (is_gimple_val (exp))
451 return exp;
452 else
453 return init_tmp_var (info, exp, gsi);
456 /* Similarly, but copy from the temporary and insert the statement
457 after the iterator. */
459 static tree
460 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
462 tree t;
463 gimple stmt;
465 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
466 stmt = gimple_build_assign (exp, t);
467 if (! gsi_end_p (*gsi))
468 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
469 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
471 return t;
474 /* Build or return the type used to represent a nested function trampoline. */
476 static GTY(()) tree trampoline_type;
478 static tree
479 get_trampoline_type (struct nesting_info *info)
481 unsigned align, size;
482 tree t;
484 if (trampoline_type)
485 return trampoline_type;
487 align = TRAMPOLINE_ALIGNMENT;
488 size = TRAMPOLINE_SIZE;
490 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
491 then allocate extra space so that we can do dynamic alignment. */
492 if (align > STACK_BOUNDARY)
494 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
495 align = STACK_BOUNDARY;
498 t = build_index_type (size_int (size - 1));
499 t = build_array_type (char_type_node, t);
500 t = build_decl (DECL_SOURCE_LOCATION (info->context),
501 FIELD_DECL, get_identifier ("__data"), t);
502 DECL_ALIGN (t) = align;
503 DECL_USER_ALIGN (t) = 1;
505 trampoline_type = make_node (RECORD_TYPE);
506 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
507 TYPE_FIELDS (trampoline_type) = t;
508 layout_type (trampoline_type);
509 DECL_CONTEXT (t) = trampoline_type;
511 return trampoline_type;
514 /* Given DECL, a nested function, find or create a field in the non-local
515 frame structure for a trampoline for this function. */
517 static tree
518 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
519 enum insert_option insert)
521 if (insert == NO_INSERT)
523 tree *slot = info->var_map->get (decl);
524 return slot ? *slot : NULL_TREE;
527 tree *slot = &info->var_map->get_or_insert (decl);
528 if (!*slot)
530 tree field = make_node (FIELD_DECL);
531 DECL_NAME (field) = DECL_NAME (decl);
532 TREE_TYPE (field) = get_trampoline_type (info);
533 TREE_ADDRESSABLE (field) = 1;
535 insert_field_into_struct (get_frame_type (info), field);
536 *slot = field;
538 info->any_tramp_created = true;
541 return *slot;
544 /* Build or return the field within the non-local frame state that holds
545 the non-local goto "jmp_buf". The buffer itself is maintained by the
546 rtl middle-end as dynamic stack space is allocated. */
548 static tree
549 get_nl_goto_field (struct nesting_info *info)
551 tree field = info->nl_goto_field;
552 if (!field)
554 unsigned size;
555 tree type;
557 /* For __builtin_nonlocal_goto, we need N words. The first is the
558 frame pointer, the rest is for the target's stack pointer save
559 area. The number of words is controlled by STACK_SAVEAREA_MODE;
560 not the best interface, but it'll do for now. */
561 if (Pmode == ptr_mode)
562 type = ptr_type_node;
563 else
564 type = lang_hooks.types.type_for_mode (Pmode, 1);
566 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
567 size = size / GET_MODE_SIZE (Pmode);
568 size = size + 1;
570 type = build_array_type
571 (type, build_index_type (size_int (size)));
573 field = make_node (FIELD_DECL);
574 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
575 TREE_TYPE (field) = type;
576 DECL_ALIGN (field) = TYPE_ALIGN (type);
577 TREE_ADDRESSABLE (field) = 1;
579 insert_field_into_struct (get_frame_type (info), field);
581 info->nl_goto_field = field;
584 return field;
587 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
589 static void
590 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
591 struct nesting_info *info, gimple_seq *pseq)
593 struct walk_stmt_info wi;
595 memset (&wi, 0, sizeof (wi));
596 wi.info = info;
597 wi.val_only = true;
598 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
602 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
604 static inline void
605 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
606 struct nesting_info *info)
608 gimple_seq body = gimple_body (info->context);
609 walk_body (callback_stmt, callback_op, info, &body);
610 gimple_set_body (info->context, body);
613 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
615 static void
616 walk_gimple_omp_for (gimple for_stmt,
617 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
618 struct nesting_info *info)
620 struct walk_stmt_info wi;
621 gimple_seq seq;
622 tree t;
623 size_t i;
625 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
627 seq = NULL;
628 memset (&wi, 0, sizeof (wi));
629 wi.info = info;
630 wi.gsi = gsi_last (seq);
632 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
634 wi.val_only = false;
635 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
636 &wi, NULL);
637 wi.val_only = true;
638 wi.is_lhs = false;
639 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
640 &wi, NULL);
642 wi.val_only = true;
643 wi.is_lhs = false;
644 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
645 &wi, NULL);
647 t = gimple_omp_for_incr (for_stmt, i);
648 gcc_assert (BINARY_CLASS_P (t));
649 wi.val_only = false;
650 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
651 wi.val_only = true;
652 wi.is_lhs = false;
653 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
656 seq = gsi_seq (wi.gsi);
657 if (!gimple_seq_empty_p (seq))
659 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
660 annotate_all_with_location (seq, gimple_location (for_stmt));
661 gimple_seq_add_seq (&pre_body, seq);
662 gimple_omp_for_set_pre_body (for_stmt, pre_body);
666 /* Similarly for ROOT and all functions nested underneath, depth first. */
668 static void
669 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
670 struct nesting_info *root)
672 struct nesting_info *n;
673 FOR_EACH_NEST_INFO (n, root)
674 walk_function (callback_stmt, callback_op, n);
678 /* We have to check for a fairly pathological case. The operands of function
679 nested function are to be interpreted in the context of the enclosing
680 function. So if any are variably-sized, they will get remapped when the
681 enclosing function is inlined. But that remapping would also have to be
682 done in the types of the PARM_DECLs of the nested function, meaning the
683 argument types of that function will disagree with the arguments in the
684 calls to that function. So we'd either have to make a copy of the nested
685 function corresponding to each time the enclosing function was inlined or
686 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
687 function. The former is not practical. The latter would still require
688 detecting this case to know when to add the conversions. So, for now at
689 least, we don't inline such an enclosing function.
691 We have to do that check recursively, so here return indicating whether
692 FNDECL has such a nested function. ORIG_FN is the function we were
693 trying to inline to use for checking whether any argument is variably
694 modified by anything in it.
696 It would be better to do this in tree-inline.c so that we could give
697 the appropriate warning for why a function can't be inlined, but that's
698 too late since the nesting structure has already been flattened and
699 adding a flag just to record this fact seems a waste of a flag. */
701 static bool
702 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
704 struct cgraph_node *cgn = cgraph_node::get (fndecl);
705 tree arg;
707 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
709 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
710 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
711 return true;
713 if (check_for_nested_with_variably_modified (cgn->decl,
714 orig_fndecl))
715 return true;
718 return false;
721 /* Construct our local datastructure describing the function nesting
722 tree rooted by CGN. */
724 static struct nesting_info *
725 create_nesting_tree (struct cgraph_node *cgn)
727 struct nesting_info *info = XCNEW (struct nesting_info);
728 info->field_map = new hash_map<tree, tree>;
729 info->var_map = new hash_map<tree, tree>;
730 info->mem_refs = new hash_set<tree *>;
731 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
732 info->context = cgn->decl;
734 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
736 struct nesting_info *sub = create_nesting_tree (cgn);
737 sub->outer = info;
738 sub->next = info->inner;
739 info->inner = sub;
742 /* See discussion at check_for_nested_with_variably_modified for a
743 discussion of why this has to be here. */
744 if (check_for_nested_with_variably_modified (info->context, info->context))
745 DECL_UNINLINABLE (info->context) = true;
747 return info;
750 /* Return an expression computing the static chain for TARGET_CONTEXT
751 from INFO->CONTEXT. Insert any necessary computations before TSI. */
753 static tree
754 get_static_chain (struct nesting_info *info, tree target_context,
755 gimple_stmt_iterator *gsi)
757 struct nesting_info *i;
758 tree x;
760 if (info->context == target_context)
762 x = build_addr (info->frame_decl, target_context);
764 else
766 x = get_chain_decl (info);
768 for (i = info->outer; i->context != target_context; i = i->outer)
770 tree field = get_chain_field (i);
772 x = build_simple_mem_ref (x);
773 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
774 x = init_tmp_var (info, x, gsi);
778 return x;
782 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
783 frame as seen from INFO->CONTEXT. Insert any necessary computations
784 before GSI. */
786 static tree
787 get_frame_field (struct nesting_info *info, tree target_context,
788 tree field, gimple_stmt_iterator *gsi)
790 struct nesting_info *i;
791 tree x;
793 if (info->context == target_context)
795 /* Make sure frame_decl gets created. */
796 (void) get_frame_type (info);
797 x = info->frame_decl;
799 else
801 x = get_chain_decl (info);
803 for (i = info->outer; i->context != target_context; i = i->outer)
805 tree field = get_chain_field (i);
807 x = build_simple_mem_ref (x);
808 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
809 x = init_tmp_var (info, x, gsi);
812 x = build_simple_mem_ref (x);
815 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
816 return x;
819 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
821 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
822 in the nested function with DECL_VALUE_EXPR set to reference the true
823 variable in the parent function. This is used both for debug info
824 and in OpenMP lowering. */
826 static tree
827 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
829 tree target_context;
830 struct nesting_info *i;
831 tree x, field, new_decl;
833 tree *slot = &info->var_map->get_or_insert (decl);
835 if (*slot)
836 return *slot;
838 target_context = decl_function_context (decl);
840 /* A copy of the code in get_frame_field, but without the temporaries. */
841 if (info->context == target_context)
843 /* Make sure frame_decl gets created. */
844 (void) get_frame_type (info);
845 x = info->frame_decl;
846 i = info;
848 else
850 x = get_chain_decl (info);
851 for (i = info->outer; i->context != target_context; i = i->outer)
853 field = get_chain_field (i);
854 x = build_simple_mem_ref (x);
855 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
857 x = build_simple_mem_ref (x);
860 field = lookup_field_for_decl (i, decl, INSERT);
861 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
862 if (use_pointer_in_frame (decl))
863 x = build_simple_mem_ref (x);
865 /* ??? We should be remapping types as well, surely. */
866 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
867 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
868 DECL_CONTEXT (new_decl) = info->context;
869 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
870 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
871 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
872 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
873 TREE_READONLY (new_decl) = TREE_READONLY (decl);
874 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
875 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
876 if ((TREE_CODE (decl) == PARM_DECL
877 || TREE_CODE (decl) == RESULT_DECL
878 || TREE_CODE (decl) == VAR_DECL)
879 && DECL_BY_REFERENCE (decl))
880 DECL_BY_REFERENCE (new_decl) = 1;
882 SET_DECL_VALUE_EXPR (new_decl, x);
883 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
885 *slot = new_decl;
886 DECL_CHAIN (new_decl) = info->debug_var_chain;
887 info->debug_var_chain = new_decl;
889 if (!optimize
890 && info->context != target_context
891 && variably_modified_type_p (TREE_TYPE (decl), NULL))
892 note_nonlocal_vla_type (info, TREE_TYPE (decl));
894 return new_decl;
898 /* Callback for walk_gimple_stmt, rewrite all references to VAR
899 and PARM_DECLs that belong to outer functions.
901 The rewrite will involve some number of structure accesses back up
902 the static chain. E.g. for a variable FOO up one nesting level it'll
903 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
904 indirections apply to decls for which use_pointer_in_frame is true. */
906 static tree
907 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
909 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
910 struct nesting_info *const info = (struct nesting_info *) wi->info;
911 tree t = *tp;
913 *walk_subtrees = 0;
914 switch (TREE_CODE (t))
916 case VAR_DECL:
917 /* Non-automatic variables are never processed. */
918 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
919 break;
920 /* FALLTHRU */
922 case PARM_DECL:
923 if (decl_function_context (t) != info->context)
925 tree x;
926 wi->changed = true;
928 x = get_nonlocal_debug_decl (info, t);
929 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
931 tree target_context = decl_function_context (t);
932 struct nesting_info *i;
933 for (i = info->outer; i->context != target_context; i = i->outer)
934 continue;
935 x = lookup_field_for_decl (i, t, INSERT);
936 x = get_frame_field (info, target_context, x, &wi->gsi);
937 if (use_pointer_in_frame (t))
939 x = init_tmp_var (info, x, &wi->gsi);
940 x = build_simple_mem_ref (x);
944 if (wi->val_only)
946 if (wi->is_lhs)
947 x = save_tmp_var (info, x, &wi->gsi);
948 else
949 x = init_tmp_var (info, x, &wi->gsi);
952 *tp = x;
954 break;
956 case LABEL_DECL:
957 /* We're taking the address of a label from a parent function, but
958 this is not itself a non-local goto. Mark the label such that it
959 will not be deleted, much as we would with a label address in
960 static storage. */
961 if (decl_function_context (t) != info->context)
962 FORCED_LABEL (t) = 1;
963 break;
965 case ADDR_EXPR:
967 bool save_val_only = wi->val_only;
969 wi->val_only = false;
970 wi->is_lhs = false;
971 wi->changed = false;
972 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
973 wi->val_only = true;
975 if (wi->changed)
977 tree save_context;
979 /* If we changed anything, we might no longer be directly
980 referencing a decl. */
981 save_context = current_function_decl;
982 current_function_decl = info->context;
983 recompute_tree_invariant_for_addr_expr (t);
984 current_function_decl = save_context;
986 /* If the callback converted the address argument in a context
987 where we only accept variables (and min_invariant, presumably),
988 then compute the address into a temporary. */
989 if (save_val_only)
990 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
991 t, &wi->gsi);
994 break;
996 case REALPART_EXPR:
997 case IMAGPART_EXPR:
998 case COMPONENT_REF:
999 case ARRAY_REF:
1000 case ARRAY_RANGE_REF:
1001 case BIT_FIELD_REF:
1002 /* Go down this entire nest and just look at the final prefix and
1003 anything that describes the references. Otherwise, we lose track
1004 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1005 wi->val_only = true;
1006 wi->is_lhs = false;
1007 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1009 if (TREE_CODE (t) == COMPONENT_REF)
1010 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1011 NULL);
1012 else if (TREE_CODE (t) == ARRAY_REF
1013 || TREE_CODE (t) == ARRAY_RANGE_REF)
1015 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1016 wi, NULL);
1017 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1018 wi, NULL);
1019 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1020 wi, NULL);
1023 wi->val_only = false;
1024 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1025 break;
1027 case VIEW_CONVERT_EXPR:
1028 /* Just request to look at the subtrees, leaving val_only and lhs
1029 untouched. This might actually be for !val_only + lhs, in which
1030 case we don't want to force a replacement by a temporary. */
1031 *walk_subtrees = 1;
1032 break;
1034 default:
1035 if (!IS_TYPE_OR_DECL_P (t))
1037 *walk_subtrees = 1;
1038 wi->val_only = true;
1039 wi->is_lhs = false;
1041 break;
1044 return NULL_TREE;
1047 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1048 struct walk_stmt_info *);
1050 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1051 and PARM_DECLs that belong to outer functions. */
1053 static bool
1054 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1056 struct nesting_info *const info = (struct nesting_info *) wi->info;
1057 bool need_chain = false, need_stmts = false;
1058 tree clause, decl;
1059 int dummy;
1060 bitmap new_suppress;
1062 new_suppress = BITMAP_GGC_ALLOC ();
1063 bitmap_copy (new_suppress, info->suppress_expansion);
1065 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1067 switch (OMP_CLAUSE_CODE (clause))
1069 case OMP_CLAUSE_REDUCTION:
1070 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1071 need_stmts = true;
1072 goto do_decl_clause;
1074 case OMP_CLAUSE_LASTPRIVATE:
1075 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1076 need_stmts = true;
1077 goto do_decl_clause;
1079 case OMP_CLAUSE_LINEAR:
1080 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1081 need_stmts = true;
1082 wi->val_only = true;
1083 wi->is_lhs = false;
1084 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1085 &dummy, wi);
1086 goto do_decl_clause;
1088 case OMP_CLAUSE_PRIVATE:
1089 case OMP_CLAUSE_FIRSTPRIVATE:
1090 case OMP_CLAUSE_COPYPRIVATE:
1091 case OMP_CLAUSE_SHARED:
1092 do_decl_clause:
1093 decl = OMP_CLAUSE_DECL (clause);
1094 if (TREE_CODE (decl) == VAR_DECL
1095 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1096 break;
1097 if (decl_function_context (decl) != info->context)
1099 bitmap_set_bit (new_suppress, DECL_UID (decl));
1100 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1101 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1102 need_chain = true;
1104 break;
1106 case OMP_CLAUSE_SCHEDULE:
1107 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1108 break;
1109 /* FALLTHRU */
1110 case OMP_CLAUSE_FINAL:
1111 case OMP_CLAUSE_IF:
1112 case OMP_CLAUSE_NUM_THREADS:
1113 case OMP_CLAUSE_DEPEND:
1114 case OMP_CLAUSE_DEVICE:
1115 case OMP_CLAUSE_NUM_TEAMS:
1116 case OMP_CLAUSE_THREAD_LIMIT:
1117 case OMP_CLAUSE_SAFELEN:
1118 case OMP_CLAUSE__CILK_FOR_COUNT_:
1119 wi->val_only = true;
1120 wi->is_lhs = false;
1121 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1122 &dummy, wi);
1123 break;
1125 case OMP_CLAUSE_DIST_SCHEDULE:
1126 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1128 wi->val_only = true;
1129 wi->is_lhs = false;
1130 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1131 &dummy, wi);
1133 break;
1135 case OMP_CLAUSE_MAP:
1136 case OMP_CLAUSE_TO:
1137 case OMP_CLAUSE_FROM:
1138 if (OMP_CLAUSE_SIZE (clause))
1140 wi->val_only = true;
1141 wi->is_lhs = false;
1142 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1143 &dummy, wi);
1145 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1146 goto do_decl_clause;
1147 wi->val_only = true;
1148 wi->is_lhs = false;
1149 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1150 wi, NULL);
1151 break;
1153 case OMP_CLAUSE_ALIGNED:
1154 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1156 wi->val_only = true;
1157 wi->is_lhs = false;
1158 convert_nonlocal_reference_op
1159 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1161 /* Like do_decl_clause, but don't add any suppression. */
1162 decl = OMP_CLAUSE_DECL (clause);
1163 if (TREE_CODE (decl) == VAR_DECL
1164 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1165 break;
1166 if (decl_function_context (decl) != info->context)
1168 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1169 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1170 need_chain = true;
1172 break;
1174 case OMP_CLAUSE_NOWAIT:
1175 case OMP_CLAUSE_ORDERED:
1176 case OMP_CLAUSE_DEFAULT:
1177 case OMP_CLAUSE_COPYIN:
1178 case OMP_CLAUSE_COLLAPSE:
1179 case OMP_CLAUSE_UNTIED:
1180 case OMP_CLAUSE_MERGEABLE:
1181 case OMP_CLAUSE_PROC_BIND:
1182 break;
1184 default:
1185 gcc_unreachable ();
1189 info->suppress_expansion = new_suppress;
1191 if (need_stmts)
1192 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1193 switch (OMP_CLAUSE_CODE (clause))
1195 case OMP_CLAUSE_REDUCTION:
1196 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1198 tree old_context
1199 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1200 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1201 = info->context;
1202 walk_body (convert_nonlocal_reference_stmt,
1203 convert_nonlocal_reference_op, info,
1204 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1205 walk_body (convert_nonlocal_reference_stmt,
1206 convert_nonlocal_reference_op, info,
1207 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1208 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1209 = old_context;
1211 break;
1213 case OMP_CLAUSE_LASTPRIVATE:
1214 walk_body (convert_nonlocal_reference_stmt,
1215 convert_nonlocal_reference_op, info,
1216 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1217 break;
1219 case OMP_CLAUSE_LINEAR:
1220 walk_body (convert_nonlocal_reference_stmt,
1221 convert_nonlocal_reference_op, info,
1222 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1223 break;
1225 default:
1226 break;
1229 return need_chain;
1232 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1234 static void
1235 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1237 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1238 type = TREE_TYPE (type);
1240 if (TYPE_NAME (type)
1241 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1242 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1243 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1245 while (POINTER_TYPE_P (type)
1246 || TREE_CODE (type) == VECTOR_TYPE
1247 || TREE_CODE (type) == FUNCTION_TYPE
1248 || TREE_CODE (type) == METHOD_TYPE)
1249 type = TREE_TYPE (type);
1251 if (TREE_CODE (type) == ARRAY_TYPE)
1253 tree domain, t;
1255 note_nonlocal_vla_type (info, TREE_TYPE (type));
1256 domain = TYPE_DOMAIN (type);
1257 if (domain)
1259 t = TYPE_MIN_VALUE (domain);
1260 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1261 && decl_function_context (t) != info->context)
1262 get_nonlocal_debug_decl (info, t);
1263 t = TYPE_MAX_VALUE (domain);
1264 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1265 && decl_function_context (t) != info->context)
1266 get_nonlocal_debug_decl (info, t);
1271 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1272 in BLOCK. */
1274 static void
1275 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1277 tree var;
1279 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1280 if (TREE_CODE (var) == VAR_DECL
1281 && variably_modified_type_p (TREE_TYPE (var), NULL)
1282 && DECL_HAS_VALUE_EXPR_P (var)
1283 && decl_function_context (var) != info->context)
1284 note_nonlocal_vla_type (info, TREE_TYPE (var));
1287 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1288 PARM_DECLs that belong to outer functions. This handles statements
1289 that are not handled via the standard recursion done in
1290 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1291 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1292 operands of STMT have been handled by this function. */
1294 static tree
1295 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1296 struct walk_stmt_info *wi)
1298 struct nesting_info *info = (struct nesting_info *) wi->info;
1299 tree save_local_var_chain;
1300 bitmap save_suppress;
1301 gimple stmt = gsi_stmt (*gsi);
1303 switch (gimple_code (stmt))
1305 case GIMPLE_GOTO:
1306 /* Don't walk non-local gotos for now. */
1307 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1309 wi->val_only = true;
1310 wi->is_lhs = false;
1311 *handled_ops_p = true;
1312 return NULL_TREE;
1314 break;
1316 case GIMPLE_OMP_PARALLEL:
1317 case GIMPLE_OMP_TASK:
1318 save_suppress = info->suppress_expansion;
1319 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1320 wi))
1322 tree c, decl;
1323 decl = get_chain_decl (info);
1324 c = build_omp_clause (gimple_location (stmt),
1325 OMP_CLAUSE_FIRSTPRIVATE);
1326 OMP_CLAUSE_DECL (c) = decl;
1327 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1328 gimple_omp_taskreg_set_clauses (stmt, c);
1331 save_local_var_chain = info->new_local_var_chain;
1332 info->new_local_var_chain = NULL;
1334 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1335 info, gimple_omp_body_ptr (stmt));
1337 if (info->new_local_var_chain)
1338 declare_vars (info->new_local_var_chain,
1339 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1340 false);
1341 info->new_local_var_chain = save_local_var_chain;
1342 info->suppress_expansion = save_suppress;
1343 break;
1345 case GIMPLE_OMP_FOR:
1346 save_suppress = info->suppress_expansion;
1347 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1348 walk_gimple_omp_for (stmt, convert_nonlocal_reference_stmt,
1349 convert_nonlocal_reference_op, info);
1350 walk_body (convert_nonlocal_reference_stmt,
1351 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1352 info->suppress_expansion = save_suppress;
1353 break;
1355 case GIMPLE_OMP_SECTIONS:
1356 save_suppress = info->suppress_expansion;
1357 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1358 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1359 info, gimple_omp_body_ptr (stmt));
1360 info->suppress_expansion = save_suppress;
1361 break;
1363 case GIMPLE_OMP_SINGLE:
1364 save_suppress = info->suppress_expansion;
1365 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1366 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1367 info, gimple_omp_body_ptr (stmt));
1368 info->suppress_expansion = save_suppress;
1369 break;
1371 case GIMPLE_OMP_TARGET:
1372 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
1374 save_suppress = info->suppress_expansion;
1375 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1376 wi);
1377 info->suppress_expansion = save_suppress;
1378 walk_body (convert_nonlocal_reference_stmt,
1379 convert_nonlocal_reference_op, info,
1380 gimple_omp_body_ptr (stmt));
1381 break;
1383 save_suppress = info->suppress_expansion;
1384 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1385 wi))
1387 tree c, decl;
1388 decl = get_chain_decl (info);
1389 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1390 OMP_CLAUSE_DECL (c) = decl;
1391 OMP_CLAUSE_MAP_KIND (c) = OMP_CLAUSE_MAP_TO;
1392 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1393 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1394 gimple_omp_target_set_clauses (stmt, c);
1397 save_local_var_chain = info->new_local_var_chain;
1398 info->new_local_var_chain = NULL;
1400 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1401 info, gimple_omp_body_ptr (stmt));
1403 if (info->new_local_var_chain)
1404 declare_vars (info->new_local_var_chain,
1405 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1406 false);
1407 info->new_local_var_chain = save_local_var_chain;
1408 info->suppress_expansion = save_suppress;
1409 break;
1411 case GIMPLE_OMP_TEAMS:
1412 save_suppress = info->suppress_expansion;
1413 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1414 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1415 info, gimple_omp_body_ptr (stmt));
1416 info->suppress_expansion = save_suppress;
1417 break;
1419 case GIMPLE_OMP_SECTION:
1420 case GIMPLE_OMP_MASTER:
1421 case GIMPLE_OMP_TASKGROUP:
1422 case GIMPLE_OMP_ORDERED:
1423 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1424 info, gimple_omp_body_ptr (stmt));
1425 break;
1427 case GIMPLE_BIND:
1428 if (!optimize && gimple_bind_block (stmt))
1429 note_nonlocal_block_vlas (info, gimple_bind_block (stmt));
1431 for (tree var = gimple_bind_vars (stmt); var; var = DECL_CHAIN (var))
1432 if (TREE_CODE (var) == NAMELIST_DECL)
1434 /* Adjust decls mentioned in NAMELIST_DECL. */
1435 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1436 tree decl;
1437 unsigned int i;
1439 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1441 if (TREE_CODE (decl) == VAR_DECL
1442 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1443 continue;
1444 if (decl_function_context (decl) != info->context)
1445 CONSTRUCTOR_ELT (decls, i)->value
1446 = get_nonlocal_debug_decl (info, decl);
1450 *handled_ops_p = false;
1451 return NULL_TREE;
1453 case GIMPLE_COND:
1454 wi->val_only = true;
1455 wi->is_lhs = false;
1456 *handled_ops_p = false;
1457 return NULL_TREE;
1459 default:
1460 /* For every other statement that we are not interested in
1461 handling here, let the walker traverse the operands. */
1462 *handled_ops_p = false;
1463 return NULL_TREE;
1466 /* We have handled all of STMT operands, no need to traverse the operands. */
1467 *handled_ops_p = true;
1468 return NULL_TREE;
1472 /* A subroutine of convert_local_reference. Create a local variable
1473 in the parent function with DECL_VALUE_EXPR set to reference the
1474 field in FRAME. This is used both for debug info and in OpenMP
1475 lowering. */
1477 static tree
1478 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1480 tree x, new_decl;
1482 tree *slot = &info->var_map->get_or_insert (decl);
1483 if (*slot)
1484 return *slot;
1486 /* Make sure frame_decl gets created. */
1487 (void) get_frame_type (info);
1488 x = info->frame_decl;
1489 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1491 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1492 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1493 DECL_CONTEXT (new_decl) = info->context;
1494 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1495 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1496 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1497 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1498 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1499 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1500 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1501 if ((TREE_CODE (decl) == PARM_DECL
1502 || TREE_CODE (decl) == RESULT_DECL
1503 || TREE_CODE (decl) == VAR_DECL)
1504 && DECL_BY_REFERENCE (decl))
1505 DECL_BY_REFERENCE (new_decl) = 1;
1507 SET_DECL_VALUE_EXPR (new_decl, x);
1508 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1509 *slot = new_decl;
1511 DECL_CHAIN (new_decl) = info->debug_var_chain;
1512 info->debug_var_chain = new_decl;
1514 /* Do not emit debug info twice. */
1515 DECL_IGNORED_P (decl) = 1;
1517 return new_decl;
1521 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1522 and PARM_DECLs that were referenced by inner nested functions.
1523 The rewrite will be a structure reference to the local frame variable. */
1525 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1527 static tree
1528 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1530 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1531 struct nesting_info *const info = (struct nesting_info *) wi->info;
1532 tree t = *tp, field, x;
1533 bool save_val_only;
1535 *walk_subtrees = 0;
1536 switch (TREE_CODE (t))
1538 case VAR_DECL:
1539 /* Non-automatic variables are never processed. */
1540 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1541 break;
1542 /* FALLTHRU */
1544 case PARM_DECL:
1545 if (decl_function_context (t) == info->context)
1547 /* If we copied a pointer to the frame, then the original decl
1548 is used unchanged in the parent function. */
1549 if (use_pointer_in_frame (t))
1550 break;
1552 /* No need to transform anything if no child references the
1553 variable. */
1554 field = lookup_field_for_decl (info, t, NO_INSERT);
1555 if (!field)
1556 break;
1557 wi->changed = true;
1559 x = get_local_debug_decl (info, t, field);
1560 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1561 x = get_frame_field (info, info->context, field, &wi->gsi);
1563 if (wi->val_only)
1565 if (wi->is_lhs)
1566 x = save_tmp_var (info, x, &wi->gsi);
1567 else
1568 x = init_tmp_var (info, x, &wi->gsi);
1571 *tp = x;
1573 break;
1575 case ADDR_EXPR:
1576 save_val_only = wi->val_only;
1577 wi->val_only = false;
1578 wi->is_lhs = false;
1579 wi->changed = false;
1580 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1581 wi->val_only = save_val_only;
1583 /* If we converted anything ... */
1584 if (wi->changed)
1586 tree save_context;
1588 /* Then the frame decl is now addressable. */
1589 TREE_ADDRESSABLE (info->frame_decl) = 1;
1591 save_context = current_function_decl;
1592 current_function_decl = info->context;
1593 recompute_tree_invariant_for_addr_expr (t);
1594 current_function_decl = save_context;
1596 /* If we are in a context where we only accept values, then
1597 compute the address into a temporary. */
1598 if (save_val_only)
1599 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1600 t, &wi->gsi);
1602 break;
1604 case REALPART_EXPR:
1605 case IMAGPART_EXPR:
1606 case COMPONENT_REF:
1607 case ARRAY_REF:
1608 case ARRAY_RANGE_REF:
1609 case BIT_FIELD_REF:
1610 /* Go down this entire nest and just look at the final prefix and
1611 anything that describes the references. Otherwise, we lose track
1612 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1613 save_val_only = wi->val_only;
1614 wi->val_only = true;
1615 wi->is_lhs = false;
1616 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1618 if (TREE_CODE (t) == COMPONENT_REF)
1619 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1620 NULL);
1621 else if (TREE_CODE (t) == ARRAY_REF
1622 || TREE_CODE (t) == ARRAY_RANGE_REF)
1624 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1625 NULL);
1626 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1627 NULL);
1628 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1629 NULL);
1632 wi->val_only = false;
1633 walk_tree (tp, convert_local_reference_op, wi, NULL);
1634 wi->val_only = save_val_only;
1635 break;
1637 case MEM_REF:
1638 save_val_only = wi->val_only;
1639 wi->val_only = true;
1640 wi->is_lhs = false;
1641 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1642 wi, NULL);
1643 /* We need to re-fold the MEM_REF as component references as
1644 part of a ADDR_EXPR address are not allowed. But we cannot
1645 fold here, as the chain record type is not yet finalized. */
1646 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1647 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1648 info->mem_refs->add (tp);
1649 wi->val_only = save_val_only;
1650 break;
1652 case VIEW_CONVERT_EXPR:
1653 /* Just request to look at the subtrees, leaving val_only and lhs
1654 untouched. This might actually be for !val_only + lhs, in which
1655 case we don't want to force a replacement by a temporary. */
1656 *walk_subtrees = 1;
1657 break;
1659 default:
1660 if (!IS_TYPE_OR_DECL_P (t))
1662 *walk_subtrees = 1;
1663 wi->val_only = true;
1664 wi->is_lhs = false;
1666 break;
1669 return NULL_TREE;
1672 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1673 struct walk_stmt_info *);
1675 /* Helper for convert_local_reference. Convert all the references in
1676 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1678 static bool
1679 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1681 struct nesting_info *const info = (struct nesting_info *) wi->info;
1682 bool need_frame = false, need_stmts = false;
1683 tree clause, decl;
1684 int dummy;
1685 bitmap new_suppress;
1687 new_suppress = BITMAP_GGC_ALLOC ();
1688 bitmap_copy (new_suppress, info->suppress_expansion);
1690 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1692 switch (OMP_CLAUSE_CODE (clause))
1694 case OMP_CLAUSE_REDUCTION:
1695 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1696 need_stmts = true;
1697 goto do_decl_clause;
1699 case OMP_CLAUSE_LASTPRIVATE:
1700 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1701 need_stmts = true;
1702 goto do_decl_clause;
1704 case OMP_CLAUSE_LINEAR:
1705 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1706 need_stmts = true;
1707 wi->val_only = true;
1708 wi->is_lhs = false;
1709 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1710 wi);
1711 goto do_decl_clause;
1713 case OMP_CLAUSE_PRIVATE:
1714 case OMP_CLAUSE_FIRSTPRIVATE:
1715 case OMP_CLAUSE_COPYPRIVATE:
1716 case OMP_CLAUSE_SHARED:
1717 do_decl_clause:
1718 decl = OMP_CLAUSE_DECL (clause);
1719 if (TREE_CODE (decl) == VAR_DECL
1720 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1721 break;
1722 if (decl_function_context (decl) == info->context
1723 && !use_pointer_in_frame (decl))
1725 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1726 if (field)
1728 bitmap_set_bit (new_suppress, DECL_UID (decl));
1729 OMP_CLAUSE_DECL (clause)
1730 = get_local_debug_decl (info, decl, field);
1731 need_frame = true;
1734 break;
1736 case OMP_CLAUSE_SCHEDULE:
1737 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1738 break;
1739 /* FALLTHRU */
1740 case OMP_CLAUSE_FINAL:
1741 case OMP_CLAUSE_IF:
1742 case OMP_CLAUSE_NUM_THREADS:
1743 case OMP_CLAUSE_DEPEND:
1744 case OMP_CLAUSE_DEVICE:
1745 case OMP_CLAUSE_NUM_TEAMS:
1746 case OMP_CLAUSE_THREAD_LIMIT:
1747 case OMP_CLAUSE_SAFELEN:
1748 case OMP_CLAUSE__CILK_FOR_COUNT_:
1749 wi->val_only = true;
1750 wi->is_lhs = false;
1751 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1752 wi);
1753 break;
1755 case OMP_CLAUSE_DIST_SCHEDULE:
1756 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1758 wi->val_only = true;
1759 wi->is_lhs = false;
1760 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1761 &dummy, wi);
1763 break;
1765 case OMP_CLAUSE_MAP:
1766 case OMP_CLAUSE_TO:
1767 case OMP_CLAUSE_FROM:
1768 if (OMP_CLAUSE_SIZE (clause))
1770 wi->val_only = true;
1771 wi->is_lhs = false;
1772 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1773 &dummy, wi);
1775 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1776 goto do_decl_clause;
1777 wi->val_only = true;
1778 wi->is_lhs = false;
1779 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1780 wi, NULL);
1781 break;
1783 case OMP_CLAUSE_ALIGNED:
1784 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1786 wi->val_only = true;
1787 wi->is_lhs = false;
1788 convert_local_reference_op
1789 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1791 /* Like do_decl_clause, but don't add any suppression. */
1792 decl = OMP_CLAUSE_DECL (clause);
1793 if (TREE_CODE (decl) == VAR_DECL
1794 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1795 break;
1796 if (decl_function_context (decl) == info->context
1797 && !use_pointer_in_frame (decl))
1799 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1800 if (field)
1802 OMP_CLAUSE_DECL (clause)
1803 = get_local_debug_decl (info, decl, field);
1804 need_frame = true;
1807 break;
1809 case OMP_CLAUSE_NOWAIT:
1810 case OMP_CLAUSE_ORDERED:
1811 case OMP_CLAUSE_DEFAULT:
1812 case OMP_CLAUSE_COPYIN:
1813 case OMP_CLAUSE_COLLAPSE:
1814 case OMP_CLAUSE_UNTIED:
1815 case OMP_CLAUSE_MERGEABLE:
1816 case OMP_CLAUSE_PROC_BIND:
1817 break;
1819 default:
1820 gcc_unreachable ();
1824 info->suppress_expansion = new_suppress;
1826 if (need_stmts)
1827 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1828 switch (OMP_CLAUSE_CODE (clause))
1830 case OMP_CLAUSE_REDUCTION:
1831 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1833 tree old_context
1834 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1835 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1836 = info->context;
1837 walk_body (convert_local_reference_stmt,
1838 convert_local_reference_op, info,
1839 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1840 walk_body (convert_local_reference_stmt,
1841 convert_local_reference_op, info,
1842 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1843 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1844 = old_context;
1846 break;
1848 case OMP_CLAUSE_LASTPRIVATE:
1849 walk_body (convert_local_reference_stmt,
1850 convert_local_reference_op, info,
1851 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1852 break;
1854 case OMP_CLAUSE_LINEAR:
1855 walk_body (convert_local_reference_stmt,
1856 convert_local_reference_op, info,
1857 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1858 break;
1860 default:
1861 break;
1864 return need_frame;
1868 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1869 and PARM_DECLs that were referenced by inner nested functions.
1870 The rewrite will be a structure reference to the local frame variable. */
1872 static tree
1873 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1874 struct walk_stmt_info *wi)
1876 struct nesting_info *info = (struct nesting_info *) wi->info;
1877 tree save_local_var_chain;
1878 bitmap save_suppress;
1879 gimple stmt = gsi_stmt (*gsi);
1881 switch (gimple_code (stmt))
1883 case GIMPLE_OMP_PARALLEL:
1884 case GIMPLE_OMP_TASK:
1885 save_suppress = info->suppress_expansion;
1886 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1887 wi))
1889 tree c;
1890 (void) get_frame_type (info);
1891 c = build_omp_clause (gimple_location (stmt),
1892 OMP_CLAUSE_SHARED);
1893 OMP_CLAUSE_DECL (c) = info->frame_decl;
1894 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1895 gimple_omp_taskreg_set_clauses (stmt, c);
1898 save_local_var_chain = info->new_local_var_chain;
1899 info->new_local_var_chain = NULL;
1901 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1902 gimple_omp_body_ptr (stmt));
1904 if (info->new_local_var_chain)
1905 declare_vars (info->new_local_var_chain,
1906 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1907 info->new_local_var_chain = save_local_var_chain;
1908 info->suppress_expansion = save_suppress;
1909 break;
1911 case GIMPLE_OMP_FOR:
1912 save_suppress = info->suppress_expansion;
1913 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1914 walk_gimple_omp_for (stmt, convert_local_reference_stmt,
1915 convert_local_reference_op, info);
1916 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1917 info, gimple_omp_body_ptr (stmt));
1918 info->suppress_expansion = save_suppress;
1919 break;
1921 case GIMPLE_OMP_SECTIONS:
1922 save_suppress = info->suppress_expansion;
1923 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1924 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1925 info, gimple_omp_body_ptr (stmt));
1926 info->suppress_expansion = save_suppress;
1927 break;
1929 case GIMPLE_OMP_SINGLE:
1930 save_suppress = info->suppress_expansion;
1931 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1932 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1933 info, gimple_omp_body_ptr (stmt));
1934 info->suppress_expansion = save_suppress;
1935 break;
1937 case GIMPLE_OMP_TARGET:
1938 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
1940 save_suppress = info->suppress_expansion;
1941 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1942 info->suppress_expansion = save_suppress;
1943 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1944 info, gimple_omp_body_ptr (stmt));
1945 break;
1947 save_suppress = info->suppress_expansion;
1948 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
1950 tree c;
1951 (void) get_frame_type (info);
1952 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1953 OMP_CLAUSE_DECL (c) = info->frame_decl;
1954 OMP_CLAUSE_MAP_KIND (c) = OMP_CLAUSE_MAP_TOFROM;
1955 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
1956 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1957 gimple_omp_target_set_clauses (stmt, c);
1960 save_local_var_chain = info->new_local_var_chain;
1961 info->new_local_var_chain = NULL;
1963 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1964 gimple_omp_body_ptr (stmt));
1966 if (info->new_local_var_chain)
1967 declare_vars (info->new_local_var_chain,
1968 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1969 info->new_local_var_chain = save_local_var_chain;
1970 info->suppress_expansion = save_suppress;
1971 break;
1973 case GIMPLE_OMP_TEAMS:
1974 save_suppress = info->suppress_expansion;
1975 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1976 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1977 info, gimple_omp_body_ptr (stmt));
1978 info->suppress_expansion = save_suppress;
1979 break;
1981 case GIMPLE_OMP_SECTION:
1982 case GIMPLE_OMP_MASTER:
1983 case GIMPLE_OMP_TASKGROUP:
1984 case GIMPLE_OMP_ORDERED:
1985 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1986 info, gimple_omp_body_ptr (stmt));
1987 break;
1989 case GIMPLE_COND:
1990 wi->val_only = true;
1991 wi->is_lhs = false;
1992 *handled_ops_p = false;
1993 return NULL_TREE;
1995 case GIMPLE_ASSIGN:
1996 if (gimple_clobber_p (stmt))
1998 tree lhs = gimple_assign_lhs (stmt);
1999 if (!use_pointer_in_frame (lhs)
2000 && lookup_field_for_decl (info, lhs, NO_INSERT))
2002 gsi_replace (gsi, gimple_build_nop (), true);
2003 break;
2006 *handled_ops_p = false;
2007 return NULL_TREE;
2009 case GIMPLE_BIND:
2010 for (tree var = gimple_bind_vars (stmt); var; var = DECL_CHAIN (var))
2011 if (TREE_CODE (var) == NAMELIST_DECL)
2013 /* Adjust decls mentioned in NAMELIST_DECL. */
2014 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2015 tree decl;
2016 unsigned int i;
2018 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2020 if (TREE_CODE (decl) == VAR_DECL
2021 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2022 continue;
2023 if (decl_function_context (decl) == info->context
2024 && !use_pointer_in_frame (decl))
2026 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2027 if (field)
2029 CONSTRUCTOR_ELT (decls, i)->value
2030 = get_local_debug_decl (info, decl, field);
2036 *handled_ops_p = false;
2037 return NULL_TREE;
2039 default:
2040 /* For every other statement that we are not interested in
2041 handling here, let the walker traverse the operands. */
2042 *handled_ops_p = false;
2043 return NULL_TREE;
2046 /* Indicate that we have handled all the operands ourselves. */
2047 *handled_ops_p = true;
2048 return NULL_TREE;
2052 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2053 that reference labels from outer functions. The rewrite will be a
2054 call to __builtin_nonlocal_goto. */
2056 static tree
2057 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2058 struct walk_stmt_info *wi)
2060 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2061 tree label, new_label, target_context, x, field;
2062 gimple call;
2063 gimple stmt = gsi_stmt (*gsi);
2065 if (gimple_code (stmt) != GIMPLE_GOTO)
2067 *handled_ops_p = false;
2068 return NULL_TREE;
2071 label = gimple_goto_dest (stmt);
2072 if (TREE_CODE (label) != LABEL_DECL)
2074 *handled_ops_p = false;
2075 return NULL_TREE;
2078 target_context = decl_function_context (label);
2079 if (target_context == info->context)
2081 *handled_ops_p = false;
2082 return NULL_TREE;
2085 for (i = info->outer; target_context != i->context; i = i->outer)
2086 continue;
2088 /* The original user label may also be use for a normal goto, therefore
2089 we must create a new label that will actually receive the abnormal
2090 control transfer. This new label will be marked LABEL_NONLOCAL; this
2091 mark will trigger proper behavior in the cfg, as well as cause the
2092 (hairy target-specific) non-local goto receiver code to be generated
2093 when we expand rtl. Enter this association into var_map so that we
2094 can insert the new label into the IL during a second pass. */
2095 tree *slot = &i->var_map->get_or_insert (label);
2096 if (*slot == NULL)
2098 new_label = create_artificial_label (UNKNOWN_LOCATION);
2099 DECL_NONLOCAL (new_label) = 1;
2100 *slot = new_label;
2102 else
2103 new_label = *slot;
2105 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2106 field = get_nl_goto_field (i);
2107 x = get_frame_field (info, target_context, field, gsi);
2108 x = build_addr (x, target_context);
2109 x = gsi_gimplify_val (info, x, gsi);
2110 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2111 2, build_addr (new_label, target_context), x);
2112 gsi_replace (gsi, call, false);
2114 /* We have handled all of STMT's operands, no need to keep going. */
2115 *handled_ops_p = true;
2116 return NULL_TREE;
2120 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2121 are referenced via nonlocal goto from a nested function. The rewrite
2122 will involve installing a newly generated DECL_NONLOCAL label, and
2123 (potentially) a branch around the rtl gunk that is assumed to be
2124 attached to such a label. */
2126 static tree
2127 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2128 struct walk_stmt_info *wi)
2130 struct nesting_info *const info = (struct nesting_info *) wi->info;
2131 tree label, new_label;
2132 gimple_stmt_iterator tmp_gsi;
2133 gimple stmt = gsi_stmt (*gsi);
2135 if (gimple_code (stmt) != GIMPLE_LABEL)
2137 *handled_ops_p = false;
2138 return NULL_TREE;
2141 label = gimple_label_label (stmt);
2143 tree *slot = info->var_map->get (label);
2144 if (!slot)
2146 *handled_ops_p = false;
2147 return NULL_TREE;
2150 /* If there's any possibility that the previous statement falls through,
2151 then we must branch around the new non-local label. */
2152 tmp_gsi = wi->gsi;
2153 gsi_prev (&tmp_gsi);
2154 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2156 gimple stmt = gimple_build_goto (label);
2157 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2160 new_label = (tree) *slot;
2161 stmt = gimple_build_label (new_label);
2162 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2164 *handled_ops_p = true;
2165 return NULL_TREE;
2169 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2170 of nested functions that require the use of trampolines. The rewrite
2171 will involve a reference a trampoline generated for the occasion. */
2173 static tree
2174 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2176 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2177 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2178 tree t = *tp, decl, target_context, x, builtin;
2179 gimple call;
2181 *walk_subtrees = 0;
2182 switch (TREE_CODE (t))
2184 case ADDR_EXPR:
2185 /* Build
2186 T.1 = &CHAIN->tramp;
2187 T.2 = __builtin_adjust_trampoline (T.1);
2188 T.3 = (func_type)T.2;
2191 decl = TREE_OPERAND (t, 0);
2192 if (TREE_CODE (decl) != FUNCTION_DECL)
2193 break;
2195 /* Only need to process nested functions. */
2196 target_context = decl_function_context (decl);
2197 if (!target_context)
2198 break;
2200 /* If the nested function doesn't use a static chain, then
2201 it doesn't need a trampoline. */
2202 if (!DECL_STATIC_CHAIN (decl))
2203 break;
2205 /* If we don't want a trampoline, then don't build one. */
2206 if (TREE_NO_TRAMPOLINE (t))
2207 break;
2209 /* Lookup the immediate parent of the callee, as that's where
2210 we need to insert the trampoline. */
2211 for (i = info; i->context != target_context; i = i->outer)
2212 continue;
2213 x = lookup_tramp_for_decl (i, decl, INSERT);
2215 /* Compute the address of the field holding the trampoline. */
2216 x = get_frame_field (info, target_context, x, &wi->gsi);
2217 x = build_addr (x, target_context);
2218 x = gsi_gimplify_val (info, x, &wi->gsi);
2220 /* Do machine-specific ugliness. Normally this will involve
2221 computing extra alignment, but it can really be anything. */
2222 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2223 call = gimple_build_call (builtin, 1, x);
2224 x = init_tmp_var_with_call (info, &wi->gsi, call);
2226 /* Cast back to the proper function type. */
2227 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2228 x = init_tmp_var (info, x, &wi->gsi);
2230 *tp = x;
2231 break;
2233 default:
2234 if (!IS_TYPE_OR_DECL_P (t))
2235 *walk_subtrees = 1;
2236 break;
2239 return NULL_TREE;
2243 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2244 to addresses of nested functions that require the use of
2245 trampolines. The rewrite will involve a reference a trampoline
2246 generated for the occasion. */
2248 static tree
2249 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2250 struct walk_stmt_info *wi)
2252 struct nesting_info *info = (struct nesting_info *) wi->info;
2253 gimple stmt = gsi_stmt (*gsi);
2255 switch (gimple_code (stmt))
2257 case GIMPLE_CALL:
2259 /* Only walk call arguments, lest we generate trampolines for
2260 direct calls. */
2261 unsigned long i, nargs = gimple_call_num_args (stmt);
2262 for (i = 0; i < nargs; i++)
2263 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2264 wi, NULL);
2265 break;
2268 case GIMPLE_OMP_TARGET:
2269 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
2271 *handled_ops_p = false;
2272 return NULL_TREE;
2274 /* FALLTHRU */
2275 case GIMPLE_OMP_PARALLEL:
2276 case GIMPLE_OMP_TASK:
2278 tree save_local_var_chain;
2279 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2280 save_local_var_chain = info->new_local_var_chain;
2281 info->new_local_var_chain = NULL;
2282 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2283 info, gimple_omp_body_ptr (stmt));
2284 if (info->new_local_var_chain)
2285 declare_vars (info->new_local_var_chain,
2286 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2287 false);
2288 info->new_local_var_chain = save_local_var_chain;
2290 break;
2292 default:
2293 *handled_ops_p = false;
2294 return NULL_TREE;
2297 *handled_ops_p = true;
2298 return NULL_TREE;
2303 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2304 that reference nested functions to make sure that the static chain
2305 is set up properly for the call. */
2307 static tree
2308 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2309 struct walk_stmt_info *wi)
2311 struct nesting_info *const info = (struct nesting_info *) wi->info;
2312 tree decl, target_context;
2313 char save_static_chain_added;
2314 int i;
2315 gimple stmt = gsi_stmt (*gsi);
2317 switch (gimple_code (stmt))
2319 case GIMPLE_CALL:
2320 if (gimple_call_chain (stmt))
2321 break;
2322 decl = gimple_call_fndecl (stmt);
2323 if (!decl)
2324 break;
2325 target_context = decl_function_context (decl);
2326 if (target_context && DECL_STATIC_CHAIN (decl))
2328 gimple_call_set_chain (stmt, get_static_chain (info, target_context,
2329 &wi->gsi));
2330 info->static_chain_added |= (1 << (info->context != target_context));
2332 break;
2334 case GIMPLE_OMP_PARALLEL:
2335 case GIMPLE_OMP_TASK:
2336 save_static_chain_added = info->static_chain_added;
2337 info->static_chain_added = 0;
2338 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2339 for (i = 0; i < 2; i++)
2341 tree c, decl;
2342 if ((info->static_chain_added & (1 << i)) == 0)
2343 continue;
2344 decl = i ? get_chain_decl (info) : info->frame_decl;
2345 /* Don't add CHAIN.* or FRAME.* twice. */
2346 for (c = gimple_omp_taskreg_clauses (stmt);
2348 c = OMP_CLAUSE_CHAIN (c))
2349 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2350 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2351 && OMP_CLAUSE_DECL (c) == decl)
2352 break;
2353 if (c == NULL)
2355 c = build_omp_clause (gimple_location (stmt),
2356 i ? OMP_CLAUSE_FIRSTPRIVATE
2357 : OMP_CLAUSE_SHARED);
2358 OMP_CLAUSE_DECL (c) = decl;
2359 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2360 gimple_omp_taskreg_set_clauses (stmt, c);
2363 info->static_chain_added |= save_static_chain_added;
2364 break;
2366 case GIMPLE_OMP_TARGET:
2367 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
2369 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2370 break;
2372 save_static_chain_added = info->static_chain_added;
2373 info->static_chain_added = 0;
2374 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2375 for (i = 0; i < 2; i++)
2377 tree c, decl;
2378 if ((info->static_chain_added & (1 << i)) == 0)
2379 continue;
2380 decl = i ? get_chain_decl (info) : info->frame_decl;
2381 /* Don't add CHAIN.* or FRAME.* twice. */
2382 for (c = gimple_omp_target_clauses (stmt);
2384 c = OMP_CLAUSE_CHAIN (c))
2385 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2386 && OMP_CLAUSE_DECL (c) == decl)
2387 break;
2388 if (c == NULL)
2390 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2391 OMP_CLAUSE_DECL (c) = decl;
2392 OMP_CLAUSE_MAP_KIND (c)
2393 = i ? OMP_CLAUSE_MAP_TO : OMP_CLAUSE_MAP_TOFROM;
2394 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2395 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2396 gimple_omp_target_set_clauses (stmt, c);
2399 info->static_chain_added |= save_static_chain_added;
2400 break;
2402 case GIMPLE_OMP_FOR:
2403 walk_body (convert_gimple_call, NULL, info,
2404 gimple_omp_for_pre_body_ptr (stmt));
2405 /* FALLTHRU */
2406 case GIMPLE_OMP_SECTIONS:
2407 case GIMPLE_OMP_SECTION:
2408 case GIMPLE_OMP_SINGLE:
2409 case GIMPLE_OMP_TEAMS:
2410 case GIMPLE_OMP_MASTER:
2411 case GIMPLE_OMP_TASKGROUP:
2412 case GIMPLE_OMP_ORDERED:
2413 case GIMPLE_OMP_CRITICAL:
2414 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2415 break;
2417 default:
2418 /* Keep looking for other operands. */
2419 *handled_ops_p = false;
2420 return NULL_TREE;
2423 *handled_ops_p = true;
2424 return NULL_TREE;
2427 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2428 call expressions. At the same time, determine if a nested function
2429 actually uses its static chain; if not, remember that. */
2431 static void
2432 convert_all_function_calls (struct nesting_info *root)
2434 unsigned int chain_count = 0, old_chain_count, iter_count;
2435 struct nesting_info *n;
2437 /* First, optimistically clear static_chain for all decls that haven't
2438 used the static chain already for variable access. But always create
2439 it if not optimizing. This makes it possible to reconstruct the static
2440 nesting tree at run time and thus to resolve up-level references from
2441 within the debugger. */
2442 FOR_EACH_NEST_INFO (n, root)
2444 tree decl = n->context;
2445 if (!optimize)
2447 if (n->inner)
2448 (void) get_frame_type (n);
2449 if (n->outer)
2450 (void) get_chain_decl (n);
2452 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2454 DECL_STATIC_CHAIN (decl) = 0;
2455 if (dump_file && (dump_flags & TDF_DETAILS))
2456 fprintf (dump_file, "Guessing no static-chain for %s\n",
2457 lang_hooks.decl_printable_name (decl, 2));
2459 else
2460 DECL_STATIC_CHAIN (decl) = 1;
2461 chain_count += DECL_STATIC_CHAIN (decl);
2464 /* Walk the functions and perform transformations. Note that these
2465 transformations can induce new uses of the static chain, which in turn
2466 require re-examining all users of the decl. */
2467 /* ??? It would make sense to try to use the call graph to speed this up,
2468 but the call graph hasn't really been built yet. Even if it did, we
2469 would still need to iterate in this loop since address-of references
2470 wouldn't show up in the callgraph anyway. */
2471 iter_count = 0;
2474 old_chain_count = chain_count;
2475 chain_count = 0;
2476 iter_count++;
2478 if (dump_file && (dump_flags & TDF_DETAILS))
2479 fputc ('\n', dump_file);
2481 FOR_EACH_NEST_INFO (n, root)
2483 tree decl = n->context;
2484 walk_function (convert_tramp_reference_stmt,
2485 convert_tramp_reference_op, n);
2486 walk_function (convert_gimple_call, NULL, n);
2487 chain_count += DECL_STATIC_CHAIN (decl);
2490 while (chain_count != old_chain_count);
2492 if (dump_file && (dump_flags & TDF_DETAILS))
2493 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2494 iter_count);
2497 struct nesting_copy_body_data
2499 copy_body_data cb;
2500 struct nesting_info *root;
2503 /* A helper subroutine for debug_var_chain type remapping. */
2505 static tree
2506 nesting_copy_decl (tree decl, copy_body_data *id)
2508 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2509 tree *slot = nid->root->var_map->get (decl);
2511 if (slot)
2512 return (tree) *slot;
2514 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2516 tree new_decl = copy_decl_no_change (decl, id);
2517 DECL_ORIGINAL_TYPE (new_decl)
2518 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2519 return new_decl;
2522 if (TREE_CODE (decl) == VAR_DECL
2523 || TREE_CODE (decl) == PARM_DECL
2524 || TREE_CODE (decl) == RESULT_DECL)
2525 return decl;
2527 return copy_decl_no_change (decl, id);
2530 /* A helper function for remap_vla_decls. See if *TP contains
2531 some remapped variables. */
2533 static tree
2534 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2536 struct nesting_info *root = (struct nesting_info *) data;
2537 tree t = *tp;
2539 if (DECL_P (t))
2541 *walk_subtrees = 0;
2542 tree *slot = root->var_map->get (t);
2544 if (slot)
2545 return *slot;
2547 return NULL;
2550 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2551 involved. */
2553 static void
2554 remap_vla_decls (tree block, struct nesting_info *root)
2556 tree var, subblock, val, type;
2557 struct nesting_copy_body_data id;
2559 for (subblock = BLOCK_SUBBLOCKS (block);
2560 subblock;
2561 subblock = BLOCK_CHAIN (subblock))
2562 remap_vla_decls (subblock, root);
2564 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2565 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2567 val = DECL_VALUE_EXPR (var);
2568 type = TREE_TYPE (var);
2570 if (!(TREE_CODE (val) == INDIRECT_REF
2571 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2572 && variably_modified_type_p (type, NULL)))
2573 continue;
2575 if (root->var_map->get (TREE_OPERAND (val, 0))
2576 || walk_tree (&type, contains_remapped_vars, root, NULL))
2577 break;
2580 if (var == NULL_TREE)
2581 return;
2583 memset (&id, 0, sizeof (id));
2584 id.cb.copy_decl = nesting_copy_decl;
2585 id.cb.decl_map = new hash_map<tree, tree>;
2586 id.root = root;
2588 for (; var; var = DECL_CHAIN (var))
2589 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2591 struct nesting_info *i;
2592 tree newt, context;
2594 val = DECL_VALUE_EXPR (var);
2595 type = TREE_TYPE (var);
2597 if (!(TREE_CODE (val) == INDIRECT_REF
2598 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2599 && variably_modified_type_p (type, NULL)))
2600 continue;
2602 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2603 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2604 continue;
2606 context = decl_function_context (var);
2607 for (i = root; i; i = i->outer)
2608 if (i->context == context)
2609 break;
2611 if (i == NULL)
2612 continue;
2614 /* Fully expand value expressions. This avoids having debug variables
2615 only referenced from them and that can be swept during GC. */
2616 if (slot)
2618 tree t = (tree) *slot;
2619 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2620 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2623 id.cb.src_fn = i->context;
2624 id.cb.dst_fn = i->context;
2625 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2627 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2628 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2630 newt = TREE_TYPE (newt);
2631 type = TREE_TYPE (type);
2633 if (TYPE_NAME (newt)
2634 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2635 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2636 && newt != type
2637 && TYPE_NAME (newt) == TYPE_NAME (type))
2638 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2640 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2641 if (val != DECL_VALUE_EXPR (var))
2642 SET_DECL_VALUE_EXPR (var, val);
2645 delete id.cb.decl_map;
2648 /* Fold the MEM_REF *E. */
2649 bool
2650 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2652 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2653 *ref_p = fold (*ref_p);
2654 return true;
2657 /* Do "everything else" to clean up or complete state collected by the
2658 various walking passes -- lay out the types and decls, generate code
2659 to initialize the frame decl, store critical expressions in the
2660 struct function for rtl to find. */
2662 static void
2663 finalize_nesting_tree_1 (struct nesting_info *root)
2665 gimple_seq stmt_list;
2666 gimple stmt;
2667 tree context = root->context;
2668 struct function *sf;
2670 stmt_list = NULL;
2672 /* If we created a non-local frame type or decl, we need to lay them
2673 out at this time. */
2674 if (root->frame_type)
2676 /* In some cases the frame type will trigger the -Wpadded warning.
2677 This is not helpful; suppress it. */
2678 int save_warn_padded = warn_padded;
2679 tree *adjust;
2681 warn_padded = 0;
2682 layout_type (root->frame_type);
2683 warn_padded = save_warn_padded;
2684 layout_decl (root->frame_decl, 0);
2686 /* Remove root->frame_decl from root->new_local_var_chain, so
2687 that we can declare it also in the lexical blocks, which
2688 helps ensure virtual regs that end up appearing in its RTL
2689 expression get substituted in instantiate_virtual_regs(). */
2690 for (adjust = &root->new_local_var_chain;
2691 *adjust != root->frame_decl;
2692 adjust = &DECL_CHAIN (*adjust))
2693 gcc_assert (DECL_CHAIN (*adjust));
2694 *adjust = DECL_CHAIN (*adjust);
2696 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2697 declare_vars (root->frame_decl,
2698 gimple_seq_first_stmt (gimple_body (context)), true);
2701 /* If any parameters were referenced non-locally, then we need to
2702 insert a copy. Likewise, if any variables were referenced by
2703 pointer, we need to initialize the address. */
2704 if (root->any_parm_remapped)
2706 tree p;
2707 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2709 tree field, x, y;
2711 field = lookup_field_for_decl (root, p, NO_INSERT);
2712 if (!field)
2713 continue;
2715 if (use_pointer_in_frame (p))
2716 x = build_addr (p, context);
2717 else
2718 x = p;
2720 /* If the assignment is from a non-register the stmt is
2721 not valid gimple. Make it so by using a temporary instead. */
2722 if (!is_gimple_reg (x)
2723 && is_gimple_reg_type (TREE_TYPE (x)))
2725 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2726 x = init_tmp_var (root, x, &gsi);
2729 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2730 root->frame_decl, field, NULL_TREE);
2731 stmt = gimple_build_assign (y, x);
2732 gimple_seq_add_stmt (&stmt_list, stmt);
2736 /* If a chain_field was created, then it needs to be initialized
2737 from chain_decl. */
2738 if (root->chain_field)
2740 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2741 root->frame_decl, root->chain_field, NULL_TREE);
2742 stmt = gimple_build_assign (x, get_chain_decl (root));
2743 gimple_seq_add_stmt (&stmt_list, stmt);
2746 /* If trampolines were created, then we need to initialize them. */
2747 if (root->any_tramp_created)
2749 struct nesting_info *i;
2750 for (i = root->inner; i ; i = i->next)
2752 tree arg1, arg2, arg3, x, field;
2754 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2755 if (!field)
2756 continue;
2758 gcc_assert (DECL_STATIC_CHAIN (i->context));
2759 arg3 = build_addr (root->frame_decl, context);
2761 arg2 = build_addr (i->context, context);
2763 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2764 root->frame_decl, field, NULL_TREE);
2765 arg1 = build_addr (x, context);
2767 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2768 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2769 gimple_seq_add_stmt (&stmt_list, stmt);
2773 /* If we created initialization statements, insert them. */
2774 if (stmt_list)
2776 gimple bind;
2777 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2778 bind = gimple_seq_first_stmt (gimple_body (context));
2779 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2780 gimple_bind_set_body (bind, stmt_list);
2783 /* If a chain_decl was created, then it needs to be registered with
2784 struct function so that it gets initialized from the static chain
2785 register at the beginning of the function. */
2786 sf = DECL_STRUCT_FUNCTION (root->context);
2787 sf->static_chain_decl = root->chain_decl;
2789 /* Similarly for the non-local goto save area. */
2790 if (root->nl_goto_field)
2792 sf->nonlocal_goto_save_area
2793 = get_frame_field (root, context, root->nl_goto_field, NULL);
2794 sf->has_nonlocal_label = 1;
2797 /* Make sure all new local variables get inserted into the
2798 proper BIND_EXPR. */
2799 if (root->new_local_var_chain)
2800 declare_vars (root->new_local_var_chain,
2801 gimple_seq_first_stmt (gimple_body (root->context)),
2802 false);
2804 if (root->debug_var_chain)
2806 tree debug_var;
2807 gimple scope;
2809 remap_vla_decls (DECL_INITIAL (root->context), root);
2811 for (debug_var = root->debug_var_chain; debug_var;
2812 debug_var = DECL_CHAIN (debug_var))
2813 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2814 break;
2816 /* If there are any debug decls with variable length types,
2817 remap those types using other debug_var_chain variables. */
2818 if (debug_var)
2820 struct nesting_copy_body_data id;
2822 memset (&id, 0, sizeof (id));
2823 id.cb.copy_decl = nesting_copy_decl;
2824 id.cb.decl_map = new hash_map<tree, tree>;
2825 id.root = root;
2827 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2828 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2830 tree type = TREE_TYPE (debug_var);
2831 tree newt, t = type;
2832 struct nesting_info *i;
2834 for (i = root; i; i = i->outer)
2835 if (variably_modified_type_p (type, i->context))
2836 break;
2838 if (i == NULL)
2839 continue;
2841 id.cb.src_fn = i->context;
2842 id.cb.dst_fn = i->context;
2843 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2845 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2846 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2848 newt = TREE_TYPE (newt);
2849 t = TREE_TYPE (t);
2851 if (TYPE_NAME (newt)
2852 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2853 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2854 && newt != t
2855 && TYPE_NAME (newt) == TYPE_NAME (t))
2856 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2859 delete id.cb.decl_map;
2862 scope = gimple_seq_first_stmt (gimple_body (root->context));
2863 if (gimple_bind_block (scope))
2864 declare_vars (root->debug_var_chain, scope, true);
2865 else
2866 BLOCK_VARS (DECL_INITIAL (root->context))
2867 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2868 root->debug_var_chain);
2871 /* Fold the rewritten MEM_REF trees. */
2872 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
2874 /* Dump the translated tree function. */
2875 if (dump_file)
2877 fputs ("\n\n", dump_file);
2878 dump_function_to_file (root->context, dump_file, dump_flags);
2882 static void
2883 finalize_nesting_tree (struct nesting_info *root)
2885 struct nesting_info *n;
2886 FOR_EACH_NEST_INFO (n, root)
2887 finalize_nesting_tree_1 (n);
2890 /* Unnest the nodes and pass them to cgraph. */
2892 static void
2893 unnest_nesting_tree_1 (struct nesting_info *root)
2895 struct cgraph_node *node = cgraph_node::get (root->context);
2897 /* For nested functions update the cgraph to reflect unnesting.
2898 We also delay finalizing of these functions up to this point. */
2899 if (node->origin)
2901 node->unnest ();
2902 cgraph_node::finalize_function (root->context, true);
2906 static void
2907 unnest_nesting_tree (struct nesting_info *root)
2909 struct nesting_info *n;
2910 FOR_EACH_NEST_INFO (n, root)
2911 unnest_nesting_tree_1 (n);
2914 /* Free the data structures allocated during this pass. */
2916 static void
2917 free_nesting_tree (struct nesting_info *root)
2919 struct nesting_info *node, *next;
2921 node = iter_nestinfo_start (root);
2924 next = iter_nestinfo_next (node);
2925 delete node->var_map;
2926 delete node->field_map;
2927 delete node->mem_refs;
2928 free (node);
2929 node = next;
2931 while (node);
2934 /* Gimplify a function and all its nested functions. */
2935 static void
2936 gimplify_all_functions (struct cgraph_node *root)
2938 struct cgraph_node *iter;
2939 if (!gimple_body (root->decl))
2940 gimplify_function_tree (root->decl);
2941 for (iter = root->nested; iter; iter = iter->next_nested)
2942 gimplify_all_functions (iter);
2945 /* Main entry point for this pass. Process FNDECL and all of its nested
2946 subroutines and turn them into something less tightly bound. */
2948 void
2949 lower_nested_functions (tree fndecl)
2951 struct cgraph_node *cgn;
2952 struct nesting_info *root;
2954 /* If there are no nested functions, there's nothing to do. */
2955 cgn = cgraph_node::get (fndecl);
2956 if (!cgn->nested)
2957 return;
2959 gimplify_all_functions (cgn);
2961 dump_file = dump_begin (TDI_nested, &dump_flags);
2962 if (dump_file)
2963 fprintf (dump_file, "\n;; Function %s\n\n",
2964 lang_hooks.decl_printable_name (fndecl, 2));
2966 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2967 root = create_nesting_tree (cgn);
2969 walk_all_functions (convert_nonlocal_reference_stmt,
2970 convert_nonlocal_reference_op,
2971 root);
2972 walk_all_functions (convert_local_reference_stmt,
2973 convert_local_reference_op,
2974 root);
2975 walk_all_functions (convert_nl_goto_reference, NULL, root);
2976 walk_all_functions (convert_nl_goto_receiver, NULL, root);
2978 convert_all_function_calls (root);
2979 finalize_nesting_tree (root);
2980 unnest_nesting_tree (root);
2982 free_nesting_tree (root);
2983 bitmap_obstack_release (&nesting_info_bitmap_obstack);
2985 if (dump_file)
2987 dump_end (TDI_nested, dump_file);
2988 dump_file = NULL;
2992 #include "gt-tree-nested.h"