2014-10-24 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / tree-nested.c
blobce06c45d938fbf4d9e77ceda5e0b0cf66531dd70
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stringpool.h"
26 #include "stor-layout.h"
27 #include "tm_p.h"
28 #include "hashtab.h"
29 #include "hash-set.h"
30 #include "vec.h"
31 #include "machmode.h"
32 #include "hard-reg-set.h"
33 #include "input.h"
34 #include "function.h"
35 #include "tree-dump.h"
36 #include "tree-inline.h"
37 #include "basic-block.h"
38 #include "tree-ssa-alias.h"
39 #include "internal-fn.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "gimplify.h"
44 #include "gimple-iterator.h"
45 #include "gimple-walk.h"
46 #include "tree-iterator.h"
47 #include "bitmap.h"
48 #include "cgraph.h"
49 #include "tree-cfg.h"
50 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
51 #include "langhooks.h"
52 #include "gimple-low.h"
55 /* The object of this pass is to lower the representation of a set of nested
56 functions in order to expose all of the gory details of the various
57 nonlocal references. We want to do this sooner rather than later, in
58 order to give us more freedom in emitting all of the functions in question.
60 Back in olden times, when gcc was young, we developed an insanely
61 complicated scheme whereby variables which were referenced nonlocally
62 were forced to live in the stack of the declaring function, and then
63 the nested functions magically discovered where these variables were
64 placed. In order for this scheme to function properly, it required
65 that the outer function be partially expanded, then we switch to
66 compiling the inner function, and once done with those we switch back
67 to compiling the outer function. Such delicate ordering requirements
68 makes it difficult to do whole translation unit optimizations
69 involving such functions.
71 The implementation here is much more direct. Everything that can be
72 referenced by an inner function is a member of an explicitly created
73 structure herein called the "nonlocal frame struct". The incoming
74 static chain for a nested function is a pointer to this struct in
75 the parent. In this way, we settle on known offsets from a known
76 base, and so are decoupled from the logic that places objects in the
77 function's stack frame. More importantly, we don't have to wait for
78 that to happen -- since the compilation of the inner function is no
79 longer tied to a real stack frame, the nonlocal frame struct can be
80 allocated anywhere. Which means that the outer function is now
81 inlinable.
83 Theory of operation here is very simple. Iterate over all the
84 statements in all the functions (depth first) several times,
85 allocating structures and fields on demand. In general we want to
86 examine inner functions first, so that we can avoid making changes
87 to outer functions which are unnecessary.
89 The order of the passes matters a bit, in that later passes will be
90 skipped if it is discovered that the functions don't actually interact
91 at all. That is, they're nested in the lexical sense but could have
92 been written as independent functions without change. */
95 struct nesting_info
97 struct nesting_info *outer;
98 struct nesting_info *inner;
99 struct nesting_info *next;
101 hash_map<tree, tree> *field_map;
102 hash_map<tree, tree> *var_map;
103 hash_set<tree *> *mem_refs;
104 bitmap suppress_expansion;
106 tree context;
107 tree new_local_var_chain;
108 tree debug_var_chain;
109 tree frame_type;
110 tree frame_decl;
111 tree chain_field;
112 tree chain_decl;
113 tree nl_goto_field;
115 bool any_parm_remapped;
116 bool any_tramp_created;
117 char static_chain_added;
121 /* Iterate over the nesting tree, starting with ROOT, depth first. */
123 static inline struct nesting_info *
124 iter_nestinfo_start (struct nesting_info *root)
126 while (root->inner)
127 root = root->inner;
128 return root;
131 static inline struct nesting_info *
132 iter_nestinfo_next (struct nesting_info *node)
134 if (node->next)
135 return iter_nestinfo_start (node->next);
136 return node->outer;
139 #define FOR_EACH_NEST_INFO(I, ROOT) \
140 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
142 /* Obstack used for the bitmaps in the struct above. */
143 static struct bitmap_obstack nesting_info_bitmap_obstack;
146 /* We're working in so many different function contexts simultaneously,
147 that create_tmp_var is dangerous. Prevent mishap. */
148 #define create_tmp_var cant_use_create_tmp_var_here_dummy
150 /* Like create_tmp_var, except record the variable for registration at
151 the given nesting level. */
153 static tree
154 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
156 tree tmp_var;
158 /* If the type is of variable size or a type which must be created by the
159 frontend, something is wrong. Note that we explicitly allow
160 incomplete types here, since we create them ourselves here. */
161 gcc_assert (!TREE_ADDRESSABLE (type));
162 gcc_assert (!TYPE_SIZE_UNIT (type)
163 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
165 tmp_var = create_tmp_var_raw (type, prefix);
166 DECL_CONTEXT (tmp_var) = info->context;
167 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
168 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
169 if (TREE_CODE (type) == COMPLEX_TYPE
170 || TREE_CODE (type) == VECTOR_TYPE)
171 DECL_GIMPLE_REG_P (tmp_var) = 1;
173 info->new_local_var_chain = tmp_var;
175 return tmp_var;
178 /* Take the address of EXP to be used within function CONTEXT.
179 Mark it for addressability as necessary. */
181 tree
182 build_addr (tree exp, tree context)
184 tree base = exp;
185 tree save_context;
186 tree retval;
188 while (handled_component_p (base))
189 base = TREE_OPERAND (base, 0);
191 if (DECL_P (base))
192 TREE_ADDRESSABLE (base) = 1;
194 /* Building the ADDR_EXPR will compute a set of properties for
195 that ADDR_EXPR. Those properties are unfortunately context
196 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
198 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
199 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
200 way the properties are for the ADDR_EXPR are computed properly. */
201 save_context = current_function_decl;
202 current_function_decl = context;
203 retval = build_fold_addr_expr (exp);
204 current_function_decl = save_context;
205 return retval;
208 /* Insert FIELD into TYPE, sorted by alignment requirements. */
210 void
211 insert_field_into_struct (tree type, tree field)
213 tree *p;
215 DECL_CONTEXT (field) = type;
217 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
218 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
219 break;
221 DECL_CHAIN (field) = *p;
222 *p = field;
224 /* Set correct alignment for frame struct type. */
225 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
226 TYPE_ALIGN (type) = DECL_ALIGN (field);
229 /* Build or return the RECORD_TYPE that describes the frame state that is
230 shared between INFO->CONTEXT and its nested functions. This record will
231 not be complete until finalize_nesting_tree; up until that point we'll
232 be adding fields as necessary.
234 We also build the DECL that represents this frame in the function. */
236 static tree
237 get_frame_type (struct nesting_info *info)
239 tree type = info->frame_type;
240 if (!type)
242 char *name;
244 type = make_node (RECORD_TYPE);
246 name = concat ("FRAME.",
247 IDENTIFIER_POINTER (DECL_NAME (info->context)),
248 NULL);
249 TYPE_NAME (type) = get_identifier (name);
250 free (name);
252 info->frame_type = type;
253 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
254 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
256 /* ??? Always make it addressable for now, since it is meant to
257 be pointed to by the static chain pointer. This pessimizes
258 when it turns out that no static chains are needed because
259 the nested functions referencing non-local variables are not
260 reachable, but the true pessimization is to create the non-
261 local frame structure in the first place. */
262 TREE_ADDRESSABLE (info->frame_decl) = 1;
264 return type;
267 /* Return true if DECL should be referenced by pointer in the non-local
268 frame structure. */
270 static bool
271 use_pointer_in_frame (tree decl)
273 if (TREE_CODE (decl) == PARM_DECL)
275 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
276 sized decls, and inefficient to copy large aggregates. Don't bother
277 moving anything but scalar variables. */
278 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
280 else
282 /* Variable sized types make things "interesting" in the frame. */
283 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
287 /* Given DECL, a non-locally accessed variable, find or create a field
288 in the non-local frame structure for the given nesting context. */
290 static tree
291 lookup_field_for_decl (struct nesting_info *info, tree decl,
292 enum insert_option insert)
294 if (insert == NO_INSERT)
296 tree *slot = info->field_map->get (decl);
297 return slot ? *slot : NULL_TREE;
300 tree *slot = &info->field_map->get_or_insert (decl);
301 if (!*slot)
303 tree field = make_node (FIELD_DECL);
304 DECL_NAME (field) = DECL_NAME (decl);
306 if (use_pointer_in_frame (decl))
308 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
309 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
310 DECL_NONADDRESSABLE_P (field) = 1;
312 else
314 TREE_TYPE (field) = TREE_TYPE (decl);
315 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
316 DECL_ALIGN (field) = DECL_ALIGN (decl);
317 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
318 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
319 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
320 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
323 insert_field_into_struct (get_frame_type (info), field);
324 *slot = field;
326 if (TREE_CODE (decl) == PARM_DECL)
327 info->any_parm_remapped = true;
330 return *slot;
333 /* Build or return the variable that holds the static chain within
334 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
336 static tree
337 get_chain_decl (struct nesting_info *info)
339 tree decl = info->chain_decl;
341 if (!decl)
343 tree type;
345 type = get_frame_type (info->outer);
346 type = build_pointer_type (type);
348 /* Note that this variable is *not* entered into any BIND_EXPR;
349 the construction of this variable is handled specially in
350 expand_function_start and initialize_inlined_parameters.
351 Note also that it's represented as a parameter. This is more
352 close to the truth, since the initial value does come from
353 the caller. */
354 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
355 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
356 DECL_ARTIFICIAL (decl) = 1;
357 DECL_IGNORED_P (decl) = 1;
358 TREE_USED (decl) = 1;
359 DECL_CONTEXT (decl) = info->context;
360 DECL_ARG_TYPE (decl) = type;
362 /* Tell tree-inline.c that we never write to this variable, so
363 it can copy-prop the replacement value immediately. */
364 TREE_READONLY (decl) = 1;
366 info->chain_decl = decl;
368 if (dump_file
369 && (dump_flags & TDF_DETAILS)
370 && !DECL_STATIC_CHAIN (info->context))
371 fprintf (dump_file, "Setting static-chain for %s\n",
372 lang_hooks.decl_printable_name (info->context, 2));
374 DECL_STATIC_CHAIN (info->context) = 1;
376 return decl;
379 /* Build or return the field within the non-local frame state that holds
380 the static chain for INFO->CONTEXT. This is the way to walk back up
381 multiple nesting levels. */
383 static tree
384 get_chain_field (struct nesting_info *info)
386 tree field = info->chain_field;
388 if (!field)
390 tree type = build_pointer_type (get_frame_type (info->outer));
392 field = make_node (FIELD_DECL);
393 DECL_NAME (field) = get_identifier ("__chain");
394 TREE_TYPE (field) = type;
395 DECL_ALIGN (field) = TYPE_ALIGN (type);
396 DECL_NONADDRESSABLE_P (field) = 1;
398 insert_field_into_struct (get_frame_type (info), field);
400 info->chain_field = field;
402 if (dump_file
403 && (dump_flags & TDF_DETAILS)
404 && !DECL_STATIC_CHAIN (info->context))
405 fprintf (dump_file, "Setting static-chain for %s\n",
406 lang_hooks.decl_printable_name (info->context, 2));
408 DECL_STATIC_CHAIN (info->context) = 1;
410 return field;
413 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
415 static tree
416 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
417 gimple call)
419 tree t;
421 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
422 gimple_call_set_lhs (call, t);
423 if (! gsi_end_p (*gsi))
424 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
425 gsi_insert_before (gsi, call, GSI_SAME_STMT);
427 return t;
431 /* Copy EXP into a temporary. Allocate the temporary in the context of
432 INFO and insert the initialization statement before GSI. */
434 static tree
435 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
437 tree t;
438 gimple stmt;
440 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
441 stmt = gimple_build_assign (t, exp);
442 if (! gsi_end_p (*gsi))
443 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
444 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
446 return t;
450 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
452 static tree
453 gsi_gimplify_val (struct nesting_info *info, tree exp,
454 gimple_stmt_iterator *gsi)
456 if (is_gimple_val (exp))
457 return exp;
458 else
459 return init_tmp_var (info, exp, gsi);
462 /* Similarly, but copy from the temporary and insert the statement
463 after the iterator. */
465 static tree
466 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
468 tree t;
469 gimple stmt;
471 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
472 stmt = gimple_build_assign (exp, t);
473 if (! gsi_end_p (*gsi))
474 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
475 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
477 return t;
480 /* Build or return the type used to represent a nested function trampoline. */
482 static GTY(()) tree trampoline_type;
484 static tree
485 get_trampoline_type (struct nesting_info *info)
487 unsigned align, size;
488 tree t;
490 if (trampoline_type)
491 return trampoline_type;
493 align = TRAMPOLINE_ALIGNMENT;
494 size = TRAMPOLINE_SIZE;
496 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
497 then allocate extra space so that we can do dynamic alignment. */
498 if (align > STACK_BOUNDARY)
500 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
501 align = STACK_BOUNDARY;
504 t = build_index_type (size_int (size - 1));
505 t = build_array_type (char_type_node, t);
506 t = build_decl (DECL_SOURCE_LOCATION (info->context),
507 FIELD_DECL, get_identifier ("__data"), t);
508 DECL_ALIGN (t) = align;
509 DECL_USER_ALIGN (t) = 1;
511 trampoline_type = make_node (RECORD_TYPE);
512 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
513 TYPE_FIELDS (trampoline_type) = t;
514 layout_type (trampoline_type);
515 DECL_CONTEXT (t) = trampoline_type;
517 return trampoline_type;
520 /* Given DECL, a nested function, find or create a field in the non-local
521 frame structure for a trampoline for this function. */
523 static tree
524 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
525 enum insert_option insert)
527 if (insert == NO_INSERT)
529 tree *slot = info->var_map->get (decl);
530 return slot ? *slot : NULL_TREE;
533 tree *slot = &info->var_map->get_or_insert (decl);
534 if (!*slot)
536 tree field = make_node (FIELD_DECL);
537 DECL_NAME (field) = DECL_NAME (decl);
538 TREE_TYPE (field) = get_trampoline_type (info);
539 TREE_ADDRESSABLE (field) = 1;
541 insert_field_into_struct (get_frame_type (info), field);
542 *slot = field;
544 info->any_tramp_created = true;
547 return *slot;
550 /* Build or return the field within the non-local frame state that holds
551 the non-local goto "jmp_buf". The buffer itself is maintained by the
552 rtl middle-end as dynamic stack space is allocated. */
554 static tree
555 get_nl_goto_field (struct nesting_info *info)
557 tree field = info->nl_goto_field;
558 if (!field)
560 unsigned size;
561 tree type;
563 /* For __builtin_nonlocal_goto, we need N words. The first is the
564 frame pointer, the rest is for the target's stack pointer save
565 area. The number of words is controlled by STACK_SAVEAREA_MODE;
566 not the best interface, but it'll do for now. */
567 if (Pmode == ptr_mode)
568 type = ptr_type_node;
569 else
570 type = lang_hooks.types.type_for_mode (Pmode, 1);
572 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
573 size = size / GET_MODE_SIZE (Pmode);
574 size = size + 1;
576 type = build_array_type
577 (type, build_index_type (size_int (size)));
579 field = make_node (FIELD_DECL);
580 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
581 TREE_TYPE (field) = type;
582 DECL_ALIGN (field) = TYPE_ALIGN (type);
583 TREE_ADDRESSABLE (field) = 1;
585 insert_field_into_struct (get_frame_type (info), field);
587 info->nl_goto_field = field;
590 return field;
593 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
595 static void
596 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
597 struct nesting_info *info, gimple_seq *pseq)
599 struct walk_stmt_info wi;
601 memset (&wi, 0, sizeof (wi));
602 wi.info = info;
603 wi.val_only = true;
604 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
608 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
610 static inline void
611 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
612 struct nesting_info *info)
614 gimple_seq body = gimple_body (info->context);
615 walk_body (callback_stmt, callback_op, info, &body);
616 gimple_set_body (info->context, body);
619 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
621 static void
622 walk_gimple_omp_for (gimple for_stmt,
623 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
624 struct nesting_info *info)
626 struct walk_stmt_info wi;
627 gimple_seq seq;
628 tree t;
629 size_t i;
631 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
633 seq = NULL;
634 memset (&wi, 0, sizeof (wi));
635 wi.info = info;
636 wi.gsi = gsi_last (seq);
638 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
640 wi.val_only = false;
641 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
642 &wi, NULL);
643 wi.val_only = true;
644 wi.is_lhs = false;
645 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
646 &wi, NULL);
648 wi.val_only = true;
649 wi.is_lhs = false;
650 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
651 &wi, NULL);
653 t = gimple_omp_for_incr (for_stmt, i);
654 gcc_assert (BINARY_CLASS_P (t));
655 wi.val_only = false;
656 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
657 wi.val_only = true;
658 wi.is_lhs = false;
659 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
662 seq = gsi_seq (wi.gsi);
663 if (!gimple_seq_empty_p (seq))
665 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
666 annotate_all_with_location (seq, gimple_location (for_stmt));
667 gimple_seq_add_seq (&pre_body, seq);
668 gimple_omp_for_set_pre_body (for_stmt, pre_body);
672 /* Similarly for ROOT and all functions nested underneath, depth first. */
674 static void
675 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
676 struct nesting_info *root)
678 struct nesting_info *n;
679 FOR_EACH_NEST_INFO (n, root)
680 walk_function (callback_stmt, callback_op, n);
684 /* We have to check for a fairly pathological case. The operands of function
685 nested function are to be interpreted in the context of the enclosing
686 function. So if any are variably-sized, they will get remapped when the
687 enclosing function is inlined. But that remapping would also have to be
688 done in the types of the PARM_DECLs of the nested function, meaning the
689 argument types of that function will disagree with the arguments in the
690 calls to that function. So we'd either have to make a copy of the nested
691 function corresponding to each time the enclosing function was inlined or
692 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
693 function. The former is not practical. The latter would still require
694 detecting this case to know when to add the conversions. So, for now at
695 least, we don't inline such an enclosing function.
697 We have to do that check recursively, so here return indicating whether
698 FNDECL has such a nested function. ORIG_FN is the function we were
699 trying to inline to use for checking whether any argument is variably
700 modified by anything in it.
702 It would be better to do this in tree-inline.c so that we could give
703 the appropriate warning for why a function can't be inlined, but that's
704 too late since the nesting structure has already been flattened and
705 adding a flag just to record this fact seems a waste of a flag. */
707 static bool
708 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
710 struct cgraph_node *cgn = cgraph_node::get (fndecl);
711 tree arg;
713 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
715 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
716 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
717 return true;
719 if (check_for_nested_with_variably_modified (cgn->decl,
720 orig_fndecl))
721 return true;
724 return false;
727 /* Construct our local datastructure describing the function nesting
728 tree rooted by CGN. */
730 static struct nesting_info *
731 create_nesting_tree (struct cgraph_node *cgn)
733 struct nesting_info *info = XCNEW (struct nesting_info);
734 info->field_map = new hash_map<tree, tree>;
735 info->var_map = new hash_map<tree, tree>;
736 info->mem_refs = new hash_set<tree *>;
737 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
738 info->context = cgn->decl;
740 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
742 struct nesting_info *sub = create_nesting_tree (cgn);
743 sub->outer = info;
744 sub->next = info->inner;
745 info->inner = sub;
748 /* See discussion at check_for_nested_with_variably_modified for a
749 discussion of why this has to be here. */
750 if (check_for_nested_with_variably_modified (info->context, info->context))
751 DECL_UNINLINABLE (info->context) = true;
753 return info;
756 /* Return an expression computing the static chain for TARGET_CONTEXT
757 from INFO->CONTEXT. Insert any necessary computations before TSI. */
759 static tree
760 get_static_chain (struct nesting_info *info, tree target_context,
761 gimple_stmt_iterator *gsi)
763 struct nesting_info *i;
764 tree x;
766 if (info->context == target_context)
768 x = build_addr (info->frame_decl, target_context);
770 else
772 x = get_chain_decl (info);
774 for (i = info->outer; i->context != target_context; i = i->outer)
776 tree field = get_chain_field (i);
778 x = build_simple_mem_ref (x);
779 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
780 x = init_tmp_var (info, x, gsi);
784 return x;
788 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
789 frame as seen from INFO->CONTEXT. Insert any necessary computations
790 before GSI. */
792 static tree
793 get_frame_field (struct nesting_info *info, tree target_context,
794 tree field, gimple_stmt_iterator *gsi)
796 struct nesting_info *i;
797 tree x;
799 if (info->context == target_context)
801 /* Make sure frame_decl gets created. */
802 (void) get_frame_type (info);
803 x = info->frame_decl;
805 else
807 x = get_chain_decl (info);
809 for (i = info->outer; i->context != target_context; i = i->outer)
811 tree field = get_chain_field (i);
813 x = build_simple_mem_ref (x);
814 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
815 x = init_tmp_var (info, x, gsi);
818 x = build_simple_mem_ref (x);
821 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
822 return x;
825 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
827 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
828 in the nested function with DECL_VALUE_EXPR set to reference the true
829 variable in the parent function. This is used both for debug info
830 and in OpenMP lowering. */
832 static tree
833 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
835 tree target_context;
836 struct nesting_info *i;
837 tree x, field, new_decl;
839 tree *slot = &info->var_map->get_or_insert (decl);
841 if (*slot)
842 return *slot;
844 target_context = decl_function_context (decl);
846 /* A copy of the code in get_frame_field, but without the temporaries. */
847 if (info->context == target_context)
849 /* Make sure frame_decl gets created. */
850 (void) get_frame_type (info);
851 x = info->frame_decl;
852 i = info;
854 else
856 x = get_chain_decl (info);
857 for (i = info->outer; i->context != target_context; i = i->outer)
859 field = get_chain_field (i);
860 x = build_simple_mem_ref (x);
861 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
863 x = build_simple_mem_ref (x);
866 field = lookup_field_for_decl (i, decl, INSERT);
867 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
868 if (use_pointer_in_frame (decl))
869 x = build_simple_mem_ref (x);
871 /* ??? We should be remapping types as well, surely. */
872 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
873 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
874 DECL_CONTEXT (new_decl) = info->context;
875 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
876 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
877 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
878 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
879 TREE_READONLY (new_decl) = TREE_READONLY (decl);
880 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
881 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
882 if ((TREE_CODE (decl) == PARM_DECL
883 || TREE_CODE (decl) == RESULT_DECL
884 || TREE_CODE (decl) == VAR_DECL)
885 && DECL_BY_REFERENCE (decl))
886 DECL_BY_REFERENCE (new_decl) = 1;
888 SET_DECL_VALUE_EXPR (new_decl, x);
889 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
891 *slot = new_decl;
892 DECL_CHAIN (new_decl) = info->debug_var_chain;
893 info->debug_var_chain = new_decl;
895 if (!optimize
896 && info->context != target_context
897 && variably_modified_type_p (TREE_TYPE (decl), NULL))
898 note_nonlocal_vla_type (info, TREE_TYPE (decl));
900 return new_decl;
904 /* Callback for walk_gimple_stmt, rewrite all references to VAR
905 and PARM_DECLs that belong to outer functions.
907 The rewrite will involve some number of structure accesses back up
908 the static chain. E.g. for a variable FOO up one nesting level it'll
909 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
910 indirections apply to decls for which use_pointer_in_frame is true. */
912 static tree
913 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
915 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
916 struct nesting_info *const info = (struct nesting_info *) wi->info;
917 tree t = *tp;
919 *walk_subtrees = 0;
920 switch (TREE_CODE (t))
922 case VAR_DECL:
923 /* Non-automatic variables are never processed. */
924 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
925 break;
926 /* FALLTHRU */
928 case PARM_DECL:
929 if (decl_function_context (t) != info->context)
931 tree x;
932 wi->changed = true;
934 x = get_nonlocal_debug_decl (info, t);
935 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
937 tree target_context = decl_function_context (t);
938 struct nesting_info *i;
939 for (i = info->outer; i->context != target_context; i = i->outer)
940 continue;
941 x = lookup_field_for_decl (i, t, INSERT);
942 x = get_frame_field (info, target_context, x, &wi->gsi);
943 if (use_pointer_in_frame (t))
945 x = init_tmp_var (info, x, &wi->gsi);
946 x = build_simple_mem_ref (x);
950 if (wi->val_only)
952 if (wi->is_lhs)
953 x = save_tmp_var (info, x, &wi->gsi);
954 else
955 x = init_tmp_var (info, x, &wi->gsi);
958 *tp = x;
960 break;
962 case LABEL_DECL:
963 /* We're taking the address of a label from a parent function, but
964 this is not itself a non-local goto. Mark the label such that it
965 will not be deleted, much as we would with a label address in
966 static storage. */
967 if (decl_function_context (t) != info->context)
968 FORCED_LABEL (t) = 1;
969 break;
971 case ADDR_EXPR:
973 bool save_val_only = wi->val_only;
975 wi->val_only = false;
976 wi->is_lhs = false;
977 wi->changed = false;
978 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
979 wi->val_only = true;
981 if (wi->changed)
983 tree save_context;
985 /* If we changed anything, we might no longer be directly
986 referencing a decl. */
987 save_context = current_function_decl;
988 current_function_decl = info->context;
989 recompute_tree_invariant_for_addr_expr (t);
990 current_function_decl = save_context;
992 /* If the callback converted the address argument in a context
993 where we only accept variables (and min_invariant, presumably),
994 then compute the address into a temporary. */
995 if (save_val_only)
996 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
997 t, &wi->gsi);
1000 break;
1002 case REALPART_EXPR:
1003 case IMAGPART_EXPR:
1004 case COMPONENT_REF:
1005 case ARRAY_REF:
1006 case ARRAY_RANGE_REF:
1007 case BIT_FIELD_REF:
1008 /* Go down this entire nest and just look at the final prefix and
1009 anything that describes the references. Otherwise, we lose track
1010 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1011 wi->val_only = true;
1012 wi->is_lhs = false;
1013 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1015 if (TREE_CODE (t) == COMPONENT_REF)
1016 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1017 NULL);
1018 else if (TREE_CODE (t) == ARRAY_REF
1019 || TREE_CODE (t) == ARRAY_RANGE_REF)
1021 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1022 wi, NULL);
1023 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1024 wi, NULL);
1025 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1026 wi, NULL);
1029 wi->val_only = false;
1030 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1031 break;
1033 case VIEW_CONVERT_EXPR:
1034 /* Just request to look at the subtrees, leaving val_only and lhs
1035 untouched. This might actually be for !val_only + lhs, in which
1036 case we don't want to force a replacement by a temporary. */
1037 *walk_subtrees = 1;
1038 break;
1040 default:
1041 if (!IS_TYPE_OR_DECL_P (t))
1043 *walk_subtrees = 1;
1044 wi->val_only = true;
1045 wi->is_lhs = false;
1047 break;
1050 return NULL_TREE;
1053 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1054 struct walk_stmt_info *);
1056 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1057 and PARM_DECLs that belong to outer functions. */
1059 static bool
1060 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1062 struct nesting_info *const info = (struct nesting_info *) wi->info;
1063 bool need_chain = false, need_stmts = false;
1064 tree clause, decl;
1065 int dummy;
1066 bitmap new_suppress;
1068 new_suppress = BITMAP_GGC_ALLOC ();
1069 bitmap_copy (new_suppress, info->suppress_expansion);
1071 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1073 switch (OMP_CLAUSE_CODE (clause))
1075 case OMP_CLAUSE_REDUCTION:
1076 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1077 need_stmts = true;
1078 goto do_decl_clause;
1080 case OMP_CLAUSE_LASTPRIVATE:
1081 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1082 need_stmts = true;
1083 goto do_decl_clause;
1085 case OMP_CLAUSE_LINEAR:
1086 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1087 need_stmts = true;
1088 wi->val_only = true;
1089 wi->is_lhs = false;
1090 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1091 &dummy, wi);
1092 goto do_decl_clause;
1094 case OMP_CLAUSE_PRIVATE:
1095 case OMP_CLAUSE_FIRSTPRIVATE:
1096 case OMP_CLAUSE_COPYPRIVATE:
1097 case OMP_CLAUSE_SHARED:
1098 do_decl_clause:
1099 decl = OMP_CLAUSE_DECL (clause);
1100 if (TREE_CODE (decl) == VAR_DECL
1101 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1102 break;
1103 if (decl_function_context (decl) != info->context)
1105 bitmap_set_bit (new_suppress, DECL_UID (decl));
1106 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1107 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1108 need_chain = true;
1110 break;
1112 case OMP_CLAUSE_SCHEDULE:
1113 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1114 break;
1115 /* FALLTHRU */
1116 case OMP_CLAUSE_FINAL:
1117 case OMP_CLAUSE_IF:
1118 case OMP_CLAUSE_NUM_THREADS:
1119 case OMP_CLAUSE_DEPEND:
1120 case OMP_CLAUSE_DEVICE:
1121 case OMP_CLAUSE_NUM_TEAMS:
1122 case OMP_CLAUSE_THREAD_LIMIT:
1123 case OMP_CLAUSE_SAFELEN:
1124 case OMP_CLAUSE__CILK_FOR_COUNT_:
1125 wi->val_only = true;
1126 wi->is_lhs = false;
1127 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1128 &dummy, wi);
1129 break;
1131 case OMP_CLAUSE_DIST_SCHEDULE:
1132 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1134 wi->val_only = true;
1135 wi->is_lhs = false;
1136 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1137 &dummy, wi);
1139 break;
1141 case OMP_CLAUSE_MAP:
1142 case OMP_CLAUSE_TO:
1143 case OMP_CLAUSE_FROM:
1144 if (OMP_CLAUSE_SIZE (clause))
1146 wi->val_only = true;
1147 wi->is_lhs = false;
1148 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1149 &dummy, wi);
1151 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1152 goto do_decl_clause;
1153 wi->val_only = true;
1154 wi->is_lhs = false;
1155 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1156 wi, NULL);
1157 break;
1159 case OMP_CLAUSE_ALIGNED:
1160 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1162 wi->val_only = true;
1163 wi->is_lhs = false;
1164 convert_nonlocal_reference_op
1165 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1167 /* Like do_decl_clause, but don't add any suppression. */
1168 decl = OMP_CLAUSE_DECL (clause);
1169 if (TREE_CODE (decl) == VAR_DECL
1170 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1171 break;
1172 if (decl_function_context (decl) != info->context)
1174 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1175 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1176 need_chain = true;
1178 break;
1180 case OMP_CLAUSE_NOWAIT:
1181 case OMP_CLAUSE_ORDERED:
1182 case OMP_CLAUSE_DEFAULT:
1183 case OMP_CLAUSE_COPYIN:
1184 case OMP_CLAUSE_COLLAPSE:
1185 case OMP_CLAUSE_UNTIED:
1186 case OMP_CLAUSE_MERGEABLE:
1187 case OMP_CLAUSE_PROC_BIND:
1188 break;
1190 default:
1191 gcc_unreachable ();
1195 info->suppress_expansion = new_suppress;
1197 if (need_stmts)
1198 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1199 switch (OMP_CLAUSE_CODE (clause))
1201 case OMP_CLAUSE_REDUCTION:
1202 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1204 tree old_context
1205 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1206 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1207 = info->context;
1208 walk_body (convert_nonlocal_reference_stmt,
1209 convert_nonlocal_reference_op, info,
1210 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1211 walk_body (convert_nonlocal_reference_stmt,
1212 convert_nonlocal_reference_op, info,
1213 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1214 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1215 = old_context;
1217 break;
1219 case OMP_CLAUSE_LASTPRIVATE:
1220 walk_body (convert_nonlocal_reference_stmt,
1221 convert_nonlocal_reference_op, info,
1222 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1223 break;
1225 case OMP_CLAUSE_LINEAR:
1226 walk_body (convert_nonlocal_reference_stmt,
1227 convert_nonlocal_reference_op, info,
1228 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1229 break;
1231 default:
1232 break;
1235 return need_chain;
1238 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1240 static void
1241 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1243 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1244 type = TREE_TYPE (type);
1246 if (TYPE_NAME (type)
1247 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1248 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1249 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1251 while (POINTER_TYPE_P (type)
1252 || TREE_CODE (type) == VECTOR_TYPE
1253 || TREE_CODE (type) == FUNCTION_TYPE
1254 || TREE_CODE (type) == METHOD_TYPE)
1255 type = TREE_TYPE (type);
1257 if (TREE_CODE (type) == ARRAY_TYPE)
1259 tree domain, t;
1261 note_nonlocal_vla_type (info, TREE_TYPE (type));
1262 domain = TYPE_DOMAIN (type);
1263 if (domain)
1265 t = TYPE_MIN_VALUE (domain);
1266 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1267 && decl_function_context (t) != info->context)
1268 get_nonlocal_debug_decl (info, t);
1269 t = TYPE_MAX_VALUE (domain);
1270 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1271 && decl_function_context (t) != info->context)
1272 get_nonlocal_debug_decl (info, t);
1277 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1278 in BLOCK. */
1280 static void
1281 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1283 tree var;
1285 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1286 if (TREE_CODE (var) == VAR_DECL
1287 && variably_modified_type_p (TREE_TYPE (var), NULL)
1288 && DECL_HAS_VALUE_EXPR_P (var)
1289 && decl_function_context (var) != info->context)
1290 note_nonlocal_vla_type (info, TREE_TYPE (var));
1293 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1294 PARM_DECLs that belong to outer functions. This handles statements
1295 that are not handled via the standard recursion done in
1296 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1297 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1298 operands of STMT have been handled by this function. */
1300 static tree
1301 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1302 struct walk_stmt_info *wi)
1304 struct nesting_info *info = (struct nesting_info *) wi->info;
1305 tree save_local_var_chain;
1306 bitmap save_suppress;
1307 gimple stmt = gsi_stmt (*gsi);
1309 switch (gimple_code (stmt))
1311 case GIMPLE_GOTO:
1312 /* Don't walk non-local gotos for now. */
1313 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1315 wi->val_only = true;
1316 wi->is_lhs = false;
1317 *handled_ops_p = true;
1318 return NULL_TREE;
1320 break;
1322 case GIMPLE_OMP_PARALLEL:
1323 case GIMPLE_OMP_TASK:
1324 save_suppress = info->suppress_expansion;
1325 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1326 wi))
1328 tree c, decl;
1329 decl = get_chain_decl (info);
1330 c = build_omp_clause (gimple_location (stmt),
1331 OMP_CLAUSE_FIRSTPRIVATE);
1332 OMP_CLAUSE_DECL (c) = decl;
1333 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1334 gimple_omp_taskreg_set_clauses (stmt, c);
1337 save_local_var_chain = info->new_local_var_chain;
1338 info->new_local_var_chain = NULL;
1340 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1341 info, gimple_omp_body_ptr (stmt));
1343 if (info->new_local_var_chain)
1344 declare_vars (info->new_local_var_chain,
1345 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1346 false);
1347 info->new_local_var_chain = save_local_var_chain;
1348 info->suppress_expansion = save_suppress;
1349 break;
1351 case GIMPLE_OMP_FOR:
1352 save_suppress = info->suppress_expansion;
1353 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1354 walk_gimple_omp_for (stmt, convert_nonlocal_reference_stmt,
1355 convert_nonlocal_reference_op, info);
1356 walk_body (convert_nonlocal_reference_stmt,
1357 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1358 info->suppress_expansion = save_suppress;
1359 break;
1361 case GIMPLE_OMP_SECTIONS:
1362 save_suppress = info->suppress_expansion;
1363 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1364 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1365 info, gimple_omp_body_ptr (stmt));
1366 info->suppress_expansion = save_suppress;
1367 break;
1369 case GIMPLE_OMP_SINGLE:
1370 save_suppress = info->suppress_expansion;
1371 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1372 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1373 info, gimple_omp_body_ptr (stmt));
1374 info->suppress_expansion = save_suppress;
1375 break;
1377 case GIMPLE_OMP_TARGET:
1378 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
1380 save_suppress = info->suppress_expansion;
1381 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1382 wi);
1383 info->suppress_expansion = save_suppress;
1384 walk_body (convert_nonlocal_reference_stmt,
1385 convert_nonlocal_reference_op, info,
1386 gimple_omp_body_ptr (stmt));
1387 break;
1389 save_suppress = info->suppress_expansion;
1390 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1391 wi))
1393 tree c, decl;
1394 decl = get_chain_decl (info);
1395 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1396 OMP_CLAUSE_DECL (c) = decl;
1397 OMP_CLAUSE_MAP_KIND (c) = OMP_CLAUSE_MAP_TO;
1398 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1399 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1400 gimple_omp_target_set_clauses (stmt, c);
1403 save_local_var_chain = info->new_local_var_chain;
1404 info->new_local_var_chain = NULL;
1406 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1407 info, gimple_omp_body_ptr (stmt));
1409 if (info->new_local_var_chain)
1410 declare_vars (info->new_local_var_chain,
1411 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1412 false);
1413 info->new_local_var_chain = save_local_var_chain;
1414 info->suppress_expansion = save_suppress;
1415 break;
1417 case GIMPLE_OMP_TEAMS:
1418 save_suppress = info->suppress_expansion;
1419 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1420 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1421 info, gimple_omp_body_ptr (stmt));
1422 info->suppress_expansion = save_suppress;
1423 break;
1425 case GIMPLE_OMP_SECTION:
1426 case GIMPLE_OMP_MASTER:
1427 case GIMPLE_OMP_TASKGROUP:
1428 case GIMPLE_OMP_ORDERED:
1429 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1430 info, gimple_omp_body_ptr (stmt));
1431 break;
1433 case GIMPLE_BIND:
1434 if (!optimize && gimple_bind_block (stmt))
1435 note_nonlocal_block_vlas (info, gimple_bind_block (stmt));
1437 for (tree var = gimple_bind_vars (stmt); var; var = DECL_CHAIN (var))
1438 if (TREE_CODE (var) == NAMELIST_DECL)
1440 /* Adjust decls mentioned in NAMELIST_DECL. */
1441 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1442 tree decl;
1443 unsigned int i;
1445 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1447 if (TREE_CODE (decl) == VAR_DECL
1448 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1449 continue;
1450 if (decl_function_context (decl) != info->context)
1451 CONSTRUCTOR_ELT (decls, i)->value
1452 = get_nonlocal_debug_decl (info, decl);
1456 *handled_ops_p = false;
1457 return NULL_TREE;
1459 case GIMPLE_COND:
1460 wi->val_only = true;
1461 wi->is_lhs = false;
1462 *handled_ops_p = false;
1463 return NULL_TREE;
1465 default:
1466 /* For every other statement that we are not interested in
1467 handling here, let the walker traverse the operands. */
1468 *handled_ops_p = false;
1469 return NULL_TREE;
1472 /* We have handled all of STMT operands, no need to traverse the operands. */
1473 *handled_ops_p = true;
1474 return NULL_TREE;
1478 /* A subroutine of convert_local_reference. Create a local variable
1479 in the parent function with DECL_VALUE_EXPR set to reference the
1480 field in FRAME. This is used both for debug info and in OpenMP
1481 lowering. */
1483 static tree
1484 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1486 tree x, new_decl;
1488 tree *slot = &info->var_map->get_or_insert (decl);
1489 if (*slot)
1490 return *slot;
1492 /* Make sure frame_decl gets created. */
1493 (void) get_frame_type (info);
1494 x = info->frame_decl;
1495 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1497 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1498 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1499 DECL_CONTEXT (new_decl) = info->context;
1500 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1501 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1502 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1503 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1504 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1505 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1506 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1507 if ((TREE_CODE (decl) == PARM_DECL
1508 || TREE_CODE (decl) == RESULT_DECL
1509 || TREE_CODE (decl) == VAR_DECL)
1510 && DECL_BY_REFERENCE (decl))
1511 DECL_BY_REFERENCE (new_decl) = 1;
1513 SET_DECL_VALUE_EXPR (new_decl, x);
1514 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1515 *slot = new_decl;
1517 DECL_CHAIN (new_decl) = info->debug_var_chain;
1518 info->debug_var_chain = new_decl;
1520 /* Do not emit debug info twice. */
1521 DECL_IGNORED_P (decl) = 1;
1523 return new_decl;
1527 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1528 and PARM_DECLs that were referenced by inner nested functions.
1529 The rewrite will be a structure reference to the local frame variable. */
1531 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1533 static tree
1534 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1536 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1537 struct nesting_info *const info = (struct nesting_info *) wi->info;
1538 tree t = *tp, field, x;
1539 bool save_val_only;
1541 *walk_subtrees = 0;
1542 switch (TREE_CODE (t))
1544 case VAR_DECL:
1545 /* Non-automatic variables are never processed. */
1546 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1547 break;
1548 /* FALLTHRU */
1550 case PARM_DECL:
1551 if (decl_function_context (t) == info->context)
1553 /* If we copied a pointer to the frame, then the original decl
1554 is used unchanged in the parent function. */
1555 if (use_pointer_in_frame (t))
1556 break;
1558 /* No need to transform anything if no child references the
1559 variable. */
1560 field = lookup_field_for_decl (info, t, NO_INSERT);
1561 if (!field)
1562 break;
1563 wi->changed = true;
1565 x = get_local_debug_decl (info, t, field);
1566 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1567 x = get_frame_field (info, info->context, field, &wi->gsi);
1569 if (wi->val_only)
1571 if (wi->is_lhs)
1572 x = save_tmp_var (info, x, &wi->gsi);
1573 else
1574 x = init_tmp_var (info, x, &wi->gsi);
1577 *tp = x;
1579 break;
1581 case ADDR_EXPR:
1582 save_val_only = wi->val_only;
1583 wi->val_only = false;
1584 wi->is_lhs = false;
1585 wi->changed = false;
1586 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1587 wi->val_only = save_val_only;
1589 /* If we converted anything ... */
1590 if (wi->changed)
1592 tree save_context;
1594 /* Then the frame decl is now addressable. */
1595 TREE_ADDRESSABLE (info->frame_decl) = 1;
1597 save_context = current_function_decl;
1598 current_function_decl = info->context;
1599 recompute_tree_invariant_for_addr_expr (t);
1600 current_function_decl = save_context;
1602 /* If we are in a context where we only accept values, then
1603 compute the address into a temporary. */
1604 if (save_val_only)
1605 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1606 t, &wi->gsi);
1608 break;
1610 case REALPART_EXPR:
1611 case IMAGPART_EXPR:
1612 case COMPONENT_REF:
1613 case ARRAY_REF:
1614 case ARRAY_RANGE_REF:
1615 case BIT_FIELD_REF:
1616 /* Go down this entire nest and just look at the final prefix and
1617 anything that describes the references. Otherwise, we lose track
1618 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1619 save_val_only = wi->val_only;
1620 wi->val_only = true;
1621 wi->is_lhs = false;
1622 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1624 if (TREE_CODE (t) == COMPONENT_REF)
1625 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1626 NULL);
1627 else if (TREE_CODE (t) == ARRAY_REF
1628 || TREE_CODE (t) == ARRAY_RANGE_REF)
1630 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1631 NULL);
1632 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1633 NULL);
1634 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1635 NULL);
1638 wi->val_only = false;
1639 walk_tree (tp, convert_local_reference_op, wi, NULL);
1640 wi->val_only = save_val_only;
1641 break;
1643 case MEM_REF:
1644 save_val_only = wi->val_only;
1645 wi->val_only = true;
1646 wi->is_lhs = false;
1647 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1648 wi, NULL);
1649 /* We need to re-fold the MEM_REF as component references as
1650 part of a ADDR_EXPR address are not allowed. But we cannot
1651 fold here, as the chain record type is not yet finalized. */
1652 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1653 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1654 info->mem_refs->add (tp);
1655 wi->val_only = save_val_only;
1656 break;
1658 case VIEW_CONVERT_EXPR:
1659 /* Just request to look at the subtrees, leaving val_only and lhs
1660 untouched. This might actually be for !val_only + lhs, in which
1661 case we don't want to force a replacement by a temporary. */
1662 *walk_subtrees = 1;
1663 break;
1665 default:
1666 if (!IS_TYPE_OR_DECL_P (t))
1668 *walk_subtrees = 1;
1669 wi->val_only = true;
1670 wi->is_lhs = false;
1672 break;
1675 return NULL_TREE;
1678 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1679 struct walk_stmt_info *);
1681 /* Helper for convert_local_reference. Convert all the references in
1682 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1684 static bool
1685 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1687 struct nesting_info *const info = (struct nesting_info *) wi->info;
1688 bool need_frame = false, need_stmts = false;
1689 tree clause, decl;
1690 int dummy;
1691 bitmap new_suppress;
1693 new_suppress = BITMAP_GGC_ALLOC ();
1694 bitmap_copy (new_suppress, info->suppress_expansion);
1696 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1698 switch (OMP_CLAUSE_CODE (clause))
1700 case OMP_CLAUSE_REDUCTION:
1701 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1702 need_stmts = true;
1703 goto do_decl_clause;
1705 case OMP_CLAUSE_LASTPRIVATE:
1706 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1707 need_stmts = true;
1708 goto do_decl_clause;
1710 case OMP_CLAUSE_LINEAR:
1711 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1712 need_stmts = true;
1713 wi->val_only = true;
1714 wi->is_lhs = false;
1715 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1716 wi);
1717 goto do_decl_clause;
1719 case OMP_CLAUSE_PRIVATE:
1720 case OMP_CLAUSE_FIRSTPRIVATE:
1721 case OMP_CLAUSE_COPYPRIVATE:
1722 case OMP_CLAUSE_SHARED:
1723 do_decl_clause:
1724 decl = OMP_CLAUSE_DECL (clause);
1725 if (TREE_CODE (decl) == VAR_DECL
1726 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1727 break;
1728 if (decl_function_context (decl) == info->context
1729 && !use_pointer_in_frame (decl))
1731 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1732 if (field)
1734 bitmap_set_bit (new_suppress, DECL_UID (decl));
1735 OMP_CLAUSE_DECL (clause)
1736 = get_local_debug_decl (info, decl, field);
1737 need_frame = true;
1740 break;
1742 case OMP_CLAUSE_SCHEDULE:
1743 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1744 break;
1745 /* FALLTHRU */
1746 case OMP_CLAUSE_FINAL:
1747 case OMP_CLAUSE_IF:
1748 case OMP_CLAUSE_NUM_THREADS:
1749 case OMP_CLAUSE_DEPEND:
1750 case OMP_CLAUSE_DEVICE:
1751 case OMP_CLAUSE_NUM_TEAMS:
1752 case OMP_CLAUSE_THREAD_LIMIT:
1753 case OMP_CLAUSE_SAFELEN:
1754 case OMP_CLAUSE__CILK_FOR_COUNT_:
1755 wi->val_only = true;
1756 wi->is_lhs = false;
1757 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1758 wi);
1759 break;
1761 case OMP_CLAUSE_DIST_SCHEDULE:
1762 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1764 wi->val_only = true;
1765 wi->is_lhs = false;
1766 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1767 &dummy, wi);
1769 break;
1771 case OMP_CLAUSE_MAP:
1772 case OMP_CLAUSE_TO:
1773 case OMP_CLAUSE_FROM:
1774 if (OMP_CLAUSE_SIZE (clause))
1776 wi->val_only = true;
1777 wi->is_lhs = false;
1778 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1779 &dummy, wi);
1781 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1782 goto do_decl_clause;
1783 wi->val_only = true;
1784 wi->is_lhs = false;
1785 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1786 wi, NULL);
1787 break;
1789 case OMP_CLAUSE_ALIGNED:
1790 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1792 wi->val_only = true;
1793 wi->is_lhs = false;
1794 convert_local_reference_op
1795 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1797 /* Like do_decl_clause, but don't add any suppression. */
1798 decl = OMP_CLAUSE_DECL (clause);
1799 if (TREE_CODE (decl) == VAR_DECL
1800 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1801 break;
1802 if (decl_function_context (decl) == info->context
1803 && !use_pointer_in_frame (decl))
1805 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1806 if (field)
1808 OMP_CLAUSE_DECL (clause)
1809 = get_local_debug_decl (info, decl, field);
1810 need_frame = true;
1813 break;
1815 case OMP_CLAUSE_NOWAIT:
1816 case OMP_CLAUSE_ORDERED:
1817 case OMP_CLAUSE_DEFAULT:
1818 case OMP_CLAUSE_COPYIN:
1819 case OMP_CLAUSE_COLLAPSE:
1820 case OMP_CLAUSE_UNTIED:
1821 case OMP_CLAUSE_MERGEABLE:
1822 case OMP_CLAUSE_PROC_BIND:
1823 break;
1825 default:
1826 gcc_unreachable ();
1830 info->suppress_expansion = new_suppress;
1832 if (need_stmts)
1833 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1834 switch (OMP_CLAUSE_CODE (clause))
1836 case OMP_CLAUSE_REDUCTION:
1837 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1839 tree old_context
1840 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1841 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1842 = info->context;
1843 walk_body (convert_local_reference_stmt,
1844 convert_local_reference_op, info,
1845 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1846 walk_body (convert_local_reference_stmt,
1847 convert_local_reference_op, info,
1848 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1849 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1850 = old_context;
1852 break;
1854 case OMP_CLAUSE_LASTPRIVATE:
1855 walk_body (convert_local_reference_stmt,
1856 convert_local_reference_op, info,
1857 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1858 break;
1860 case OMP_CLAUSE_LINEAR:
1861 walk_body (convert_local_reference_stmt,
1862 convert_local_reference_op, info,
1863 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1864 break;
1866 default:
1867 break;
1870 return need_frame;
1874 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1875 and PARM_DECLs that were referenced by inner nested functions.
1876 The rewrite will be a structure reference to the local frame variable. */
1878 static tree
1879 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1880 struct walk_stmt_info *wi)
1882 struct nesting_info *info = (struct nesting_info *) wi->info;
1883 tree save_local_var_chain;
1884 bitmap save_suppress;
1885 gimple stmt = gsi_stmt (*gsi);
1887 switch (gimple_code (stmt))
1889 case GIMPLE_OMP_PARALLEL:
1890 case GIMPLE_OMP_TASK:
1891 save_suppress = info->suppress_expansion;
1892 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1893 wi))
1895 tree c;
1896 (void) get_frame_type (info);
1897 c = build_omp_clause (gimple_location (stmt),
1898 OMP_CLAUSE_SHARED);
1899 OMP_CLAUSE_DECL (c) = info->frame_decl;
1900 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1901 gimple_omp_taskreg_set_clauses (stmt, c);
1904 save_local_var_chain = info->new_local_var_chain;
1905 info->new_local_var_chain = NULL;
1907 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1908 gimple_omp_body_ptr (stmt));
1910 if (info->new_local_var_chain)
1911 declare_vars (info->new_local_var_chain,
1912 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1913 info->new_local_var_chain = save_local_var_chain;
1914 info->suppress_expansion = save_suppress;
1915 break;
1917 case GIMPLE_OMP_FOR:
1918 save_suppress = info->suppress_expansion;
1919 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1920 walk_gimple_omp_for (stmt, convert_local_reference_stmt,
1921 convert_local_reference_op, info);
1922 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1923 info, gimple_omp_body_ptr (stmt));
1924 info->suppress_expansion = save_suppress;
1925 break;
1927 case GIMPLE_OMP_SECTIONS:
1928 save_suppress = info->suppress_expansion;
1929 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1930 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1931 info, gimple_omp_body_ptr (stmt));
1932 info->suppress_expansion = save_suppress;
1933 break;
1935 case GIMPLE_OMP_SINGLE:
1936 save_suppress = info->suppress_expansion;
1937 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1938 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1939 info, gimple_omp_body_ptr (stmt));
1940 info->suppress_expansion = save_suppress;
1941 break;
1943 case GIMPLE_OMP_TARGET:
1944 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
1946 save_suppress = info->suppress_expansion;
1947 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1948 info->suppress_expansion = save_suppress;
1949 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1950 info, gimple_omp_body_ptr (stmt));
1951 break;
1953 save_suppress = info->suppress_expansion;
1954 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
1956 tree c;
1957 (void) get_frame_type (info);
1958 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1959 OMP_CLAUSE_DECL (c) = info->frame_decl;
1960 OMP_CLAUSE_MAP_KIND (c) = OMP_CLAUSE_MAP_TOFROM;
1961 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
1962 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1963 gimple_omp_target_set_clauses (stmt, c);
1966 save_local_var_chain = info->new_local_var_chain;
1967 info->new_local_var_chain = NULL;
1969 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1970 gimple_omp_body_ptr (stmt));
1972 if (info->new_local_var_chain)
1973 declare_vars (info->new_local_var_chain,
1974 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1975 info->new_local_var_chain = save_local_var_chain;
1976 info->suppress_expansion = save_suppress;
1977 break;
1979 case GIMPLE_OMP_TEAMS:
1980 save_suppress = info->suppress_expansion;
1981 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1982 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1983 info, gimple_omp_body_ptr (stmt));
1984 info->suppress_expansion = save_suppress;
1985 break;
1987 case GIMPLE_OMP_SECTION:
1988 case GIMPLE_OMP_MASTER:
1989 case GIMPLE_OMP_TASKGROUP:
1990 case GIMPLE_OMP_ORDERED:
1991 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1992 info, gimple_omp_body_ptr (stmt));
1993 break;
1995 case GIMPLE_COND:
1996 wi->val_only = true;
1997 wi->is_lhs = false;
1998 *handled_ops_p = false;
1999 return NULL_TREE;
2001 case GIMPLE_ASSIGN:
2002 if (gimple_clobber_p (stmt))
2004 tree lhs = gimple_assign_lhs (stmt);
2005 if (!use_pointer_in_frame (lhs)
2006 && lookup_field_for_decl (info, lhs, NO_INSERT))
2008 gsi_replace (gsi, gimple_build_nop (), true);
2009 break;
2012 *handled_ops_p = false;
2013 return NULL_TREE;
2015 case GIMPLE_BIND:
2016 for (tree var = gimple_bind_vars (stmt); var; var = DECL_CHAIN (var))
2017 if (TREE_CODE (var) == NAMELIST_DECL)
2019 /* Adjust decls mentioned in NAMELIST_DECL. */
2020 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2021 tree decl;
2022 unsigned int i;
2024 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2026 if (TREE_CODE (decl) == VAR_DECL
2027 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2028 continue;
2029 if (decl_function_context (decl) == info->context
2030 && !use_pointer_in_frame (decl))
2032 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2033 if (field)
2035 CONSTRUCTOR_ELT (decls, i)->value
2036 = get_local_debug_decl (info, decl, field);
2042 *handled_ops_p = false;
2043 return NULL_TREE;
2045 default:
2046 /* For every other statement that we are not interested in
2047 handling here, let the walker traverse the operands. */
2048 *handled_ops_p = false;
2049 return NULL_TREE;
2052 /* Indicate that we have handled all the operands ourselves. */
2053 *handled_ops_p = true;
2054 return NULL_TREE;
2058 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2059 that reference labels from outer functions. The rewrite will be a
2060 call to __builtin_nonlocal_goto. */
2062 static tree
2063 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2064 struct walk_stmt_info *wi)
2066 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2067 tree label, new_label, target_context, x, field;
2068 gimple call;
2069 gimple stmt = gsi_stmt (*gsi);
2071 if (gimple_code (stmt) != GIMPLE_GOTO)
2073 *handled_ops_p = false;
2074 return NULL_TREE;
2077 label = gimple_goto_dest (stmt);
2078 if (TREE_CODE (label) != LABEL_DECL)
2080 *handled_ops_p = false;
2081 return NULL_TREE;
2084 target_context = decl_function_context (label);
2085 if (target_context == info->context)
2087 *handled_ops_p = false;
2088 return NULL_TREE;
2091 for (i = info->outer; target_context != i->context; i = i->outer)
2092 continue;
2094 /* The original user label may also be use for a normal goto, therefore
2095 we must create a new label that will actually receive the abnormal
2096 control transfer. This new label will be marked LABEL_NONLOCAL; this
2097 mark will trigger proper behavior in the cfg, as well as cause the
2098 (hairy target-specific) non-local goto receiver code to be generated
2099 when we expand rtl. Enter this association into var_map so that we
2100 can insert the new label into the IL during a second pass. */
2101 tree *slot = &i->var_map->get_or_insert (label);
2102 if (*slot == NULL)
2104 new_label = create_artificial_label (UNKNOWN_LOCATION);
2105 DECL_NONLOCAL (new_label) = 1;
2106 *slot = new_label;
2108 else
2109 new_label = *slot;
2111 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2112 field = get_nl_goto_field (i);
2113 x = get_frame_field (info, target_context, field, gsi);
2114 x = build_addr (x, target_context);
2115 x = gsi_gimplify_val (info, x, gsi);
2116 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2117 2, build_addr (new_label, target_context), x);
2118 gsi_replace (gsi, call, false);
2120 /* We have handled all of STMT's operands, no need to keep going. */
2121 *handled_ops_p = true;
2122 return NULL_TREE;
2126 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2127 are referenced via nonlocal goto from a nested function. The rewrite
2128 will involve installing a newly generated DECL_NONLOCAL label, and
2129 (potentially) a branch around the rtl gunk that is assumed to be
2130 attached to such a label. */
2132 static tree
2133 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2134 struct walk_stmt_info *wi)
2136 struct nesting_info *const info = (struct nesting_info *) wi->info;
2137 tree label, new_label;
2138 gimple_stmt_iterator tmp_gsi;
2139 gimple stmt = gsi_stmt (*gsi);
2141 if (gimple_code (stmt) != GIMPLE_LABEL)
2143 *handled_ops_p = false;
2144 return NULL_TREE;
2147 label = gimple_label_label (stmt);
2149 tree *slot = info->var_map->get (label);
2150 if (!slot)
2152 *handled_ops_p = false;
2153 return NULL_TREE;
2156 /* If there's any possibility that the previous statement falls through,
2157 then we must branch around the new non-local label. */
2158 tmp_gsi = wi->gsi;
2159 gsi_prev (&tmp_gsi);
2160 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2162 gimple stmt = gimple_build_goto (label);
2163 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2166 new_label = (tree) *slot;
2167 stmt = gimple_build_label (new_label);
2168 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2170 *handled_ops_p = true;
2171 return NULL_TREE;
2175 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2176 of nested functions that require the use of trampolines. The rewrite
2177 will involve a reference a trampoline generated for the occasion. */
2179 static tree
2180 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2182 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2183 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2184 tree t = *tp, decl, target_context, x, builtin;
2185 gimple call;
2187 *walk_subtrees = 0;
2188 switch (TREE_CODE (t))
2190 case ADDR_EXPR:
2191 /* Build
2192 T.1 = &CHAIN->tramp;
2193 T.2 = __builtin_adjust_trampoline (T.1);
2194 T.3 = (func_type)T.2;
2197 decl = TREE_OPERAND (t, 0);
2198 if (TREE_CODE (decl) != FUNCTION_DECL)
2199 break;
2201 /* Only need to process nested functions. */
2202 target_context = decl_function_context (decl);
2203 if (!target_context)
2204 break;
2206 /* If the nested function doesn't use a static chain, then
2207 it doesn't need a trampoline. */
2208 if (!DECL_STATIC_CHAIN (decl))
2209 break;
2211 /* If we don't want a trampoline, then don't build one. */
2212 if (TREE_NO_TRAMPOLINE (t))
2213 break;
2215 /* Lookup the immediate parent of the callee, as that's where
2216 we need to insert the trampoline. */
2217 for (i = info; i->context != target_context; i = i->outer)
2218 continue;
2219 x = lookup_tramp_for_decl (i, decl, INSERT);
2221 /* Compute the address of the field holding the trampoline. */
2222 x = get_frame_field (info, target_context, x, &wi->gsi);
2223 x = build_addr (x, target_context);
2224 x = gsi_gimplify_val (info, x, &wi->gsi);
2226 /* Do machine-specific ugliness. Normally this will involve
2227 computing extra alignment, but it can really be anything. */
2228 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2229 call = gimple_build_call (builtin, 1, x);
2230 x = init_tmp_var_with_call (info, &wi->gsi, call);
2232 /* Cast back to the proper function type. */
2233 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2234 x = init_tmp_var (info, x, &wi->gsi);
2236 *tp = x;
2237 break;
2239 default:
2240 if (!IS_TYPE_OR_DECL_P (t))
2241 *walk_subtrees = 1;
2242 break;
2245 return NULL_TREE;
2249 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2250 to addresses of nested functions that require the use of
2251 trampolines. The rewrite will involve a reference a trampoline
2252 generated for the occasion. */
2254 static tree
2255 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2256 struct walk_stmt_info *wi)
2258 struct nesting_info *info = (struct nesting_info *) wi->info;
2259 gimple stmt = gsi_stmt (*gsi);
2261 switch (gimple_code (stmt))
2263 case GIMPLE_CALL:
2265 /* Only walk call arguments, lest we generate trampolines for
2266 direct calls. */
2267 unsigned long i, nargs = gimple_call_num_args (stmt);
2268 for (i = 0; i < nargs; i++)
2269 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2270 wi, NULL);
2271 break;
2274 case GIMPLE_OMP_TARGET:
2275 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
2277 *handled_ops_p = false;
2278 return NULL_TREE;
2280 /* FALLTHRU */
2281 case GIMPLE_OMP_PARALLEL:
2282 case GIMPLE_OMP_TASK:
2284 tree save_local_var_chain;
2285 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2286 save_local_var_chain = info->new_local_var_chain;
2287 info->new_local_var_chain = NULL;
2288 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2289 info, gimple_omp_body_ptr (stmt));
2290 if (info->new_local_var_chain)
2291 declare_vars (info->new_local_var_chain,
2292 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2293 false);
2294 info->new_local_var_chain = save_local_var_chain;
2296 break;
2298 default:
2299 *handled_ops_p = false;
2300 return NULL_TREE;
2303 *handled_ops_p = true;
2304 return NULL_TREE;
2309 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2310 that reference nested functions to make sure that the static chain
2311 is set up properly for the call. */
2313 static tree
2314 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2315 struct walk_stmt_info *wi)
2317 struct nesting_info *const info = (struct nesting_info *) wi->info;
2318 tree decl, target_context;
2319 char save_static_chain_added;
2320 int i;
2321 gimple stmt = gsi_stmt (*gsi);
2323 switch (gimple_code (stmt))
2325 case GIMPLE_CALL:
2326 if (gimple_call_chain (stmt))
2327 break;
2328 decl = gimple_call_fndecl (stmt);
2329 if (!decl)
2330 break;
2331 target_context = decl_function_context (decl);
2332 if (target_context && DECL_STATIC_CHAIN (decl))
2334 gimple_call_set_chain (stmt, get_static_chain (info, target_context,
2335 &wi->gsi));
2336 info->static_chain_added |= (1 << (info->context != target_context));
2338 break;
2340 case GIMPLE_OMP_PARALLEL:
2341 case GIMPLE_OMP_TASK:
2342 save_static_chain_added = info->static_chain_added;
2343 info->static_chain_added = 0;
2344 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2345 for (i = 0; i < 2; i++)
2347 tree c, decl;
2348 if ((info->static_chain_added & (1 << i)) == 0)
2349 continue;
2350 decl = i ? get_chain_decl (info) : info->frame_decl;
2351 /* Don't add CHAIN.* or FRAME.* twice. */
2352 for (c = gimple_omp_taskreg_clauses (stmt);
2354 c = OMP_CLAUSE_CHAIN (c))
2355 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2356 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2357 && OMP_CLAUSE_DECL (c) == decl)
2358 break;
2359 if (c == NULL)
2361 c = build_omp_clause (gimple_location (stmt),
2362 i ? OMP_CLAUSE_FIRSTPRIVATE
2363 : OMP_CLAUSE_SHARED);
2364 OMP_CLAUSE_DECL (c) = decl;
2365 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2366 gimple_omp_taskreg_set_clauses (stmt, c);
2369 info->static_chain_added |= save_static_chain_added;
2370 break;
2372 case GIMPLE_OMP_TARGET:
2373 if (gimple_omp_target_kind (stmt) != GF_OMP_TARGET_KIND_REGION)
2375 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2376 break;
2378 save_static_chain_added = info->static_chain_added;
2379 info->static_chain_added = 0;
2380 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2381 for (i = 0; i < 2; i++)
2383 tree c, decl;
2384 if ((info->static_chain_added & (1 << i)) == 0)
2385 continue;
2386 decl = i ? get_chain_decl (info) : info->frame_decl;
2387 /* Don't add CHAIN.* or FRAME.* twice. */
2388 for (c = gimple_omp_target_clauses (stmt);
2390 c = OMP_CLAUSE_CHAIN (c))
2391 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2392 && OMP_CLAUSE_DECL (c) == decl)
2393 break;
2394 if (c == NULL)
2396 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2397 OMP_CLAUSE_DECL (c) = decl;
2398 OMP_CLAUSE_MAP_KIND (c)
2399 = i ? OMP_CLAUSE_MAP_TO : OMP_CLAUSE_MAP_TOFROM;
2400 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2401 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2402 gimple_omp_target_set_clauses (stmt, c);
2405 info->static_chain_added |= save_static_chain_added;
2406 break;
2408 case GIMPLE_OMP_FOR:
2409 walk_body (convert_gimple_call, NULL, info,
2410 gimple_omp_for_pre_body_ptr (stmt));
2411 /* FALLTHRU */
2412 case GIMPLE_OMP_SECTIONS:
2413 case GIMPLE_OMP_SECTION:
2414 case GIMPLE_OMP_SINGLE:
2415 case GIMPLE_OMP_TEAMS:
2416 case GIMPLE_OMP_MASTER:
2417 case GIMPLE_OMP_TASKGROUP:
2418 case GIMPLE_OMP_ORDERED:
2419 case GIMPLE_OMP_CRITICAL:
2420 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2421 break;
2423 default:
2424 /* Keep looking for other operands. */
2425 *handled_ops_p = false;
2426 return NULL_TREE;
2429 *handled_ops_p = true;
2430 return NULL_TREE;
2433 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2434 call expressions. At the same time, determine if a nested function
2435 actually uses its static chain; if not, remember that. */
2437 static void
2438 convert_all_function_calls (struct nesting_info *root)
2440 unsigned int chain_count = 0, old_chain_count, iter_count;
2441 struct nesting_info *n;
2443 /* First, optimistically clear static_chain for all decls that haven't
2444 used the static chain already for variable access. But always create
2445 it if not optimizing. This makes it possible to reconstruct the static
2446 nesting tree at run time and thus to resolve up-level references from
2447 within the debugger. */
2448 FOR_EACH_NEST_INFO (n, root)
2450 tree decl = n->context;
2451 if (!optimize)
2453 if (n->inner)
2454 (void) get_frame_type (n);
2455 if (n->outer)
2456 (void) get_chain_decl (n);
2458 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2460 DECL_STATIC_CHAIN (decl) = 0;
2461 if (dump_file && (dump_flags & TDF_DETAILS))
2462 fprintf (dump_file, "Guessing no static-chain for %s\n",
2463 lang_hooks.decl_printable_name (decl, 2));
2465 else
2466 DECL_STATIC_CHAIN (decl) = 1;
2467 chain_count += DECL_STATIC_CHAIN (decl);
2470 /* Walk the functions and perform transformations. Note that these
2471 transformations can induce new uses of the static chain, which in turn
2472 require re-examining all users of the decl. */
2473 /* ??? It would make sense to try to use the call graph to speed this up,
2474 but the call graph hasn't really been built yet. Even if it did, we
2475 would still need to iterate in this loop since address-of references
2476 wouldn't show up in the callgraph anyway. */
2477 iter_count = 0;
2480 old_chain_count = chain_count;
2481 chain_count = 0;
2482 iter_count++;
2484 if (dump_file && (dump_flags & TDF_DETAILS))
2485 fputc ('\n', dump_file);
2487 FOR_EACH_NEST_INFO (n, root)
2489 tree decl = n->context;
2490 walk_function (convert_tramp_reference_stmt,
2491 convert_tramp_reference_op, n);
2492 walk_function (convert_gimple_call, NULL, n);
2493 chain_count += DECL_STATIC_CHAIN (decl);
2496 while (chain_count != old_chain_count);
2498 if (dump_file && (dump_flags & TDF_DETAILS))
2499 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2500 iter_count);
2503 struct nesting_copy_body_data
2505 copy_body_data cb;
2506 struct nesting_info *root;
2509 /* A helper subroutine for debug_var_chain type remapping. */
2511 static tree
2512 nesting_copy_decl (tree decl, copy_body_data *id)
2514 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2515 tree *slot = nid->root->var_map->get (decl);
2517 if (slot)
2518 return (tree) *slot;
2520 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2522 tree new_decl = copy_decl_no_change (decl, id);
2523 DECL_ORIGINAL_TYPE (new_decl)
2524 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2525 return new_decl;
2528 if (TREE_CODE (decl) == VAR_DECL
2529 || TREE_CODE (decl) == PARM_DECL
2530 || TREE_CODE (decl) == RESULT_DECL)
2531 return decl;
2533 return copy_decl_no_change (decl, id);
2536 /* A helper function for remap_vla_decls. See if *TP contains
2537 some remapped variables. */
2539 static tree
2540 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2542 struct nesting_info *root = (struct nesting_info *) data;
2543 tree t = *tp;
2545 if (DECL_P (t))
2547 *walk_subtrees = 0;
2548 tree *slot = root->var_map->get (t);
2550 if (slot)
2551 return *slot;
2553 return NULL;
2556 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2557 involved. */
2559 static void
2560 remap_vla_decls (tree block, struct nesting_info *root)
2562 tree var, subblock, val, type;
2563 struct nesting_copy_body_data id;
2565 for (subblock = BLOCK_SUBBLOCKS (block);
2566 subblock;
2567 subblock = BLOCK_CHAIN (subblock))
2568 remap_vla_decls (subblock, root);
2570 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2571 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2573 val = DECL_VALUE_EXPR (var);
2574 type = TREE_TYPE (var);
2576 if (!(TREE_CODE (val) == INDIRECT_REF
2577 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2578 && variably_modified_type_p (type, NULL)))
2579 continue;
2581 if (root->var_map->get (TREE_OPERAND (val, 0))
2582 || walk_tree (&type, contains_remapped_vars, root, NULL))
2583 break;
2586 if (var == NULL_TREE)
2587 return;
2589 memset (&id, 0, sizeof (id));
2590 id.cb.copy_decl = nesting_copy_decl;
2591 id.cb.decl_map = new hash_map<tree, tree>;
2592 id.root = root;
2594 for (; var; var = DECL_CHAIN (var))
2595 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2597 struct nesting_info *i;
2598 tree newt, context;
2600 val = DECL_VALUE_EXPR (var);
2601 type = TREE_TYPE (var);
2603 if (!(TREE_CODE (val) == INDIRECT_REF
2604 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2605 && variably_modified_type_p (type, NULL)))
2606 continue;
2608 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2609 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2610 continue;
2612 context = decl_function_context (var);
2613 for (i = root; i; i = i->outer)
2614 if (i->context == context)
2615 break;
2617 if (i == NULL)
2618 continue;
2620 /* Fully expand value expressions. This avoids having debug variables
2621 only referenced from them and that can be swept during GC. */
2622 if (slot)
2624 tree t = (tree) *slot;
2625 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2626 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2629 id.cb.src_fn = i->context;
2630 id.cb.dst_fn = i->context;
2631 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2633 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2634 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2636 newt = TREE_TYPE (newt);
2637 type = TREE_TYPE (type);
2639 if (TYPE_NAME (newt)
2640 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2641 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2642 && newt != type
2643 && TYPE_NAME (newt) == TYPE_NAME (type))
2644 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2646 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2647 if (val != DECL_VALUE_EXPR (var))
2648 SET_DECL_VALUE_EXPR (var, val);
2651 delete id.cb.decl_map;
2654 /* Fold the MEM_REF *E. */
2655 bool
2656 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2658 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2659 *ref_p = fold (*ref_p);
2660 return true;
2663 /* Do "everything else" to clean up or complete state collected by the
2664 various walking passes -- lay out the types and decls, generate code
2665 to initialize the frame decl, store critical expressions in the
2666 struct function for rtl to find. */
2668 static void
2669 finalize_nesting_tree_1 (struct nesting_info *root)
2671 gimple_seq stmt_list;
2672 gimple stmt;
2673 tree context = root->context;
2674 struct function *sf;
2676 stmt_list = NULL;
2678 /* If we created a non-local frame type or decl, we need to lay them
2679 out at this time. */
2680 if (root->frame_type)
2682 /* In some cases the frame type will trigger the -Wpadded warning.
2683 This is not helpful; suppress it. */
2684 int save_warn_padded = warn_padded;
2685 tree *adjust;
2687 warn_padded = 0;
2688 layout_type (root->frame_type);
2689 warn_padded = save_warn_padded;
2690 layout_decl (root->frame_decl, 0);
2692 /* Remove root->frame_decl from root->new_local_var_chain, so
2693 that we can declare it also in the lexical blocks, which
2694 helps ensure virtual regs that end up appearing in its RTL
2695 expression get substituted in instantiate_virtual_regs(). */
2696 for (adjust = &root->new_local_var_chain;
2697 *adjust != root->frame_decl;
2698 adjust = &DECL_CHAIN (*adjust))
2699 gcc_assert (DECL_CHAIN (*adjust));
2700 *adjust = DECL_CHAIN (*adjust);
2702 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2703 declare_vars (root->frame_decl,
2704 gimple_seq_first_stmt (gimple_body (context)), true);
2707 /* If any parameters were referenced non-locally, then we need to
2708 insert a copy. Likewise, if any variables were referenced by
2709 pointer, we need to initialize the address. */
2710 if (root->any_parm_remapped)
2712 tree p;
2713 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2715 tree field, x, y;
2717 field = lookup_field_for_decl (root, p, NO_INSERT);
2718 if (!field)
2719 continue;
2721 if (use_pointer_in_frame (p))
2722 x = build_addr (p, context);
2723 else
2724 x = p;
2726 /* If the assignment is from a non-register the stmt is
2727 not valid gimple. Make it so by using a temporary instead. */
2728 if (!is_gimple_reg (x)
2729 && is_gimple_reg_type (TREE_TYPE (x)))
2731 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2732 x = init_tmp_var (root, x, &gsi);
2735 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2736 root->frame_decl, field, NULL_TREE);
2737 stmt = gimple_build_assign (y, x);
2738 gimple_seq_add_stmt (&stmt_list, stmt);
2742 /* If a chain_field was created, then it needs to be initialized
2743 from chain_decl. */
2744 if (root->chain_field)
2746 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2747 root->frame_decl, root->chain_field, NULL_TREE);
2748 stmt = gimple_build_assign (x, get_chain_decl (root));
2749 gimple_seq_add_stmt (&stmt_list, stmt);
2752 /* If trampolines were created, then we need to initialize them. */
2753 if (root->any_tramp_created)
2755 struct nesting_info *i;
2756 for (i = root->inner; i ; i = i->next)
2758 tree arg1, arg2, arg3, x, field;
2760 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2761 if (!field)
2762 continue;
2764 gcc_assert (DECL_STATIC_CHAIN (i->context));
2765 arg3 = build_addr (root->frame_decl, context);
2767 arg2 = build_addr (i->context, context);
2769 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2770 root->frame_decl, field, NULL_TREE);
2771 arg1 = build_addr (x, context);
2773 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2774 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2775 gimple_seq_add_stmt (&stmt_list, stmt);
2779 /* If we created initialization statements, insert them. */
2780 if (stmt_list)
2782 gimple bind;
2783 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2784 bind = gimple_seq_first_stmt (gimple_body (context));
2785 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2786 gimple_bind_set_body (bind, stmt_list);
2789 /* If a chain_decl was created, then it needs to be registered with
2790 struct function so that it gets initialized from the static chain
2791 register at the beginning of the function. */
2792 sf = DECL_STRUCT_FUNCTION (root->context);
2793 sf->static_chain_decl = root->chain_decl;
2795 /* Similarly for the non-local goto save area. */
2796 if (root->nl_goto_field)
2798 sf->nonlocal_goto_save_area
2799 = get_frame_field (root, context, root->nl_goto_field, NULL);
2800 sf->has_nonlocal_label = 1;
2803 /* Make sure all new local variables get inserted into the
2804 proper BIND_EXPR. */
2805 if (root->new_local_var_chain)
2806 declare_vars (root->new_local_var_chain,
2807 gimple_seq_first_stmt (gimple_body (root->context)),
2808 false);
2810 if (root->debug_var_chain)
2812 tree debug_var;
2813 gimple scope;
2815 remap_vla_decls (DECL_INITIAL (root->context), root);
2817 for (debug_var = root->debug_var_chain; debug_var;
2818 debug_var = DECL_CHAIN (debug_var))
2819 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2820 break;
2822 /* If there are any debug decls with variable length types,
2823 remap those types using other debug_var_chain variables. */
2824 if (debug_var)
2826 struct nesting_copy_body_data id;
2828 memset (&id, 0, sizeof (id));
2829 id.cb.copy_decl = nesting_copy_decl;
2830 id.cb.decl_map = new hash_map<tree, tree>;
2831 id.root = root;
2833 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2834 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2836 tree type = TREE_TYPE (debug_var);
2837 tree newt, t = type;
2838 struct nesting_info *i;
2840 for (i = root; i; i = i->outer)
2841 if (variably_modified_type_p (type, i->context))
2842 break;
2844 if (i == NULL)
2845 continue;
2847 id.cb.src_fn = i->context;
2848 id.cb.dst_fn = i->context;
2849 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2851 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2852 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2854 newt = TREE_TYPE (newt);
2855 t = TREE_TYPE (t);
2857 if (TYPE_NAME (newt)
2858 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2859 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2860 && newt != t
2861 && TYPE_NAME (newt) == TYPE_NAME (t))
2862 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2865 delete id.cb.decl_map;
2868 scope = gimple_seq_first_stmt (gimple_body (root->context));
2869 if (gimple_bind_block (scope))
2870 declare_vars (root->debug_var_chain, scope, true);
2871 else
2872 BLOCK_VARS (DECL_INITIAL (root->context))
2873 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2874 root->debug_var_chain);
2877 /* Fold the rewritten MEM_REF trees. */
2878 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
2880 /* Dump the translated tree function. */
2881 if (dump_file)
2883 fputs ("\n\n", dump_file);
2884 dump_function_to_file (root->context, dump_file, dump_flags);
2888 static void
2889 finalize_nesting_tree (struct nesting_info *root)
2891 struct nesting_info *n;
2892 FOR_EACH_NEST_INFO (n, root)
2893 finalize_nesting_tree_1 (n);
2896 /* Unnest the nodes and pass them to cgraph. */
2898 static void
2899 unnest_nesting_tree_1 (struct nesting_info *root)
2901 struct cgraph_node *node = cgraph_node::get (root->context);
2903 /* For nested functions update the cgraph to reflect unnesting.
2904 We also delay finalizing of these functions up to this point. */
2905 if (node->origin)
2907 node->unnest ();
2908 cgraph_node::finalize_function (root->context, true);
2912 static void
2913 unnest_nesting_tree (struct nesting_info *root)
2915 struct nesting_info *n;
2916 FOR_EACH_NEST_INFO (n, root)
2917 unnest_nesting_tree_1 (n);
2920 /* Free the data structures allocated during this pass. */
2922 static void
2923 free_nesting_tree (struct nesting_info *root)
2925 struct nesting_info *node, *next;
2927 node = iter_nestinfo_start (root);
2930 next = iter_nestinfo_next (node);
2931 delete node->var_map;
2932 delete node->field_map;
2933 delete node->mem_refs;
2934 free (node);
2935 node = next;
2937 while (node);
2940 /* Gimplify a function and all its nested functions. */
2941 static void
2942 gimplify_all_functions (struct cgraph_node *root)
2944 struct cgraph_node *iter;
2945 if (!gimple_body (root->decl))
2946 gimplify_function_tree (root->decl);
2947 for (iter = root->nested; iter; iter = iter->next_nested)
2948 gimplify_all_functions (iter);
2951 /* Main entry point for this pass. Process FNDECL and all of its nested
2952 subroutines and turn them into something less tightly bound. */
2954 void
2955 lower_nested_functions (tree fndecl)
2957 struct cgraph_node *cgn;
2958 struct nesting_info *root;
2960 /* If there are no nested functions, there's nothing to do. */
2961 cgn = cgraph_node::get (fndecl);
2962 if (!cgn->nested)
2963 return;
2965 gimplify_all_functions (cgn);
2967 dump_file = dump_begin (TDI_nested, &dump_flags);
2968 if (dump_file)
2969 fprintf (dump_file, "\n;; Function %s\n\n",
2970 lang_hooks.decl_printable_name (fndecl, 2));
2972 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2973 root = create_nesting_tree (cgn);
2975 walk_all_functions (convert_nonlocal_reference_stmt,
2976 convert_nonlocal_reference_op,
2977 root);
2978 walk_all_functions (convert_local_reference_stmt,
2979 convert_local_reference_op,
2980 root);
2981 walk_all_functions (convert_nl_goto_reference, NULL, root);
2982 walk_all_functions (convert_nl_goto_receiver, NULL, root);
2984 convert_all_function_calls (root);
2985 finalize_nesting_tree (root);
2986 unnest_nesting_tree (root);
2988 free_nesting_tree (root);
2989 bitmap_obstack_release (&nesting_info_bitmap_obstack);
2991 if (dump_file)
2993 dump_end (TDI_nested, dump_file);
2994 dump_file = NULL;
2998 #include "gt-tree-nested.h"