Wno-frame-address.c: Skip on hppa*-*-*.
[official-gcc.git] / gcc / tree-nested.c
blob1f6311c295cd7f5e0f5972cf325b16970d08bd39
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tm_p.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-cfg.h"
38 #include "explow.h"
39 #include "langhooks.h"
40 #include "gimple-low.h"
41 #include "gomp-constants.h"
44 /* The object of this pass is to lower the representation of a set of nested
45 functions in order to expose all of the gory details of the various
46 nonlocal references. We want to do this sooner rather than later, in
47 order to give us more freedom in emitting all of the functions in question.
49 Back in olden times, when gcc was young, we developed an insanely
50 complicated scheme whereby variables which were referenced nonlocally
51 were forced to live in the stack of the declaring function, and then
52 the nested functions magically discovered where these variables were
53 placed. In order for this scheme to function properly, it required
54 that the outer function be partially expanded, then we switch to
55 compiling the inner function, and once done with those we switch back
56 to compiling the outer function. Such delicate ordering requirements
57 makes it difficult to do whole translation unit optimizations
58 involving such functions.
60 The implementation here is much more direct. Everything that can be
61 referenced by an inner function is a member of an explicitly created
62 structure herein called the "nonlocal frame struct". The incoming
63 static chain for a nested function is a pointer to this struct in
64 the parent. In this way, we settle on known offsets from a known
65 base, and so are decoupled from the logic that places objects in the
66 function's stack frame. More importantly, we don't have to wait for
67 that to happen -- since the compilation of the inner function is no
68 longer tied to a real stack frame, the nonlocal frame struct can be
69 allocated anywhere. Which means that the outer function is now
70 inlinable.
72 Theory of operation here is very simple. Iterate over all the
73 statements in all the functions (depth first) several times,
74 allocating structures and fields on demand. In general we want to
75 examine inner functions first, so that we can avoid making changes
76 to outer functions which are unnecessary.
78 The order of the passes matters a bit, in that later passes will be
79 skipped if it is discovered that the functions don't actually interact
80 at all. That is, they're nested in the lexical sense but could have
81 been written as independent functions without change. */
84 struct nesting_info
86 struct nesting_info *outer;
87 struct nesting_info *inner;
88 struct nesting_info *next;
90 hash_map<tree, tree> *field_map;
91 hash_map<tree, tree> *var_map;
92 hash_set<tree *> *mem_refs;
93 bitmap suppress_expansion;
95 tree context;
96 tree new_local_var_chain;
97 tree debug_var_chain;
98 tree frame_type;
99 tree frame_decl;
100 tree chain_field;
101 tree chain_decl;
102 tree nl_goto_field;
104 bool any_parm_remapped;
105 bool any_tramp_created;
106 char static_chain_added;
110 /* Iterate over the nesting tree, starting with ROOT, depth first. */
112 static inline struct nesting_info *
113 iter_nestinfo_start (struct nesting_info *root)
115 while (root->inner)
116 root = root->inner;
117 return root;
120 static inline struct nesting_info *
121 iter_nestinfo_next (struct nesting_info *node)
123 if (node->next)
124 return iter_nestinfo_start (node->next);
125 return node->outer;
128 #define FOR_EACH_NEST_INFO(I, ROOT) \
129 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
131 /* Obstack used for the bitmaps in the struct above. */
132 static struct bitmap_obstack nesting_info_bitmap_obstack;
135 /* We're working in so many different function contexts simultaneously,
136 that create_tmp_var is dangerous. Prevent mishap. */
137 #define create_tmp_var cant_use_create_tmp_var_here_dummy
139 /* Like create_tmp_var, except record the variable for registration at
140 the given nesting level. */
142 static tree
143 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
145 tree tmp_var;
147 /* If the type is of variable size or a type which must be created by the
148 frontend, something is wrong. Note that we explicitly allow
149 incomplete types here, since we create them ourselves here. */
150 gcc_assert (!TREE_ADDRESSABLE (type));
151 gcc_assert (!TYPE_SIZE_UNIT (type)
152 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
154 tmp_var = create_tmp_var_raw (type, prefix);
155 DECL_CONTEXT (tmp_var) = info->context;
156 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
157 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
158 if (TREE_CODE (type) == COMPLEX_TYPE
159 || TREE_CODE (type) == VECTOR_TYPE)
160 DECL_GIMPLE_REG_P (tmp_var) = 1;
162 info->new_local_var_chain = tmp_var;
164 return tmp_var;
167 /* Take the address of EXP to be used within function CONTEXT.
168 Mark it for addressability as necessary. */
170 tree
171 build_addr (tree exp)
173 mark_addressable (exp);
174 return build_fold_addr_expr (exp);
177 /* Insert FIELD into TYPE, sorted by alignment requirements. */
179 void
180 insert_field_into_struct (tree type, tree field)
182 tree *p;
184 DECL_CONTEXT (field) = type;
186 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
187 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
188 break;
190 DECL_CHAIN (field) = *p;
191 *p = field;
193 /* Set correct alignment for frame struct type. */
194 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
195 TYPE_ALIGN (type) = DECL_ALIGN (field);
198 /* Build or return the RECORD_TYPE that describes the frame state that is
199 shared between INFO->CONTEXT and its nested functions. This record will
200 not be complete until finalize_nesting_tree; up until that point we'll
201 be adding fields as necessary.
203 We also build the DECL that represents this frame in the function. */
205 static tree
206 get_frame_type (struct nesting_info *info)
208 tree type = info->frame_type;
209 if (!type)
211 char *name;
213 type = make_node (RECORD_TYPE);
215 name = concat ("FRAME.",
216 IDENTIFIER_POINTER (DECL_NAME (info->context)),
217 NULL);
218 TYPE_NAME (type) = get_identifier (name);
219 free (name);
221 info->frame_type = type;
222 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
223 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
225 /* ??? Always make it addressable for now, since it is meant to
226 be pointed to by the static chain pointer. This pessimizes
227 when it turns out that no static chains are needed because
228 the nested functions referencing non-local variables are not
229 reachable, but the true pessimization is to create the non-
230 local frame structure in the first place. */
231 TREE_ADDRESSABLE (info->frame_decl) = 1;
233 return type;
236 /* Return true if DECL should be referenced by pointer in the non-local
237 frame structure. */
239 static bool
240 use_pointer_in_frame (tree decl)
242 if (TREE_CODE (decl) == PARM_DECL)
244 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
245 sized decls, and inefficient to copy large aggregates. Don't bother
246 moving anything but scalar variables. */
247 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
249 else
251 /* Variable sized types make things "interesting" in the frame. */
252 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
256 /* Given DECL, a non-locally accessed variable, find or create a field
257 in the non-local frame structure for the given nesting context. */
259 static tree
260 lookup_field_for_decl (struct nesting_info *info, tree decl,
261 enum insert_option insert)
263 if (insert == NO_INSERT)
265 tree *slot = info->field_map->get (decl);
266 return slot ? *slot : NULL_TREE;
269 tree *slot = &info->field_map->get_or_insert (decl);
270 if (!*slot)
272 tree field = make_node (FIELD_DECL);
273 DECL_NAME (field) = DECL_NAME (decl);
275 if (use_pointer_in_frame (decl))
277 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
278 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
279 DECL_NONADDRESSABLE_P (field) = 1;
281 else
283 TREE_TYPE (field) = TREE_TYPE (decl);
284 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
285 DECL_ALIGN (field) = DECL_ALIGN (decl);
286 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
287 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
288 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
289 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
292 insert_field_into_struct (get_frame_type (info), field);
293 *slot = field;
295 if (TREE_CODE (decl) == PARM_DECL)
296 info->any_parm_remapped = true;
299 return *slot;
302 /* Build or return the variable that holds the static chain within
303 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
305 static tree
306 get_chain_decl (struct nesting_info *info)
308 tree decl = info->chain_decl;
310 if (!decl)
312 tree type;
314 type = get_frame_type (info->outer);
315 type = build_pointer_type (type);
317 /* Note that this variable is *not* entered into any BIND_EXPR;
318 the construction of this variable is handled specially in
319 expand_function_start and initialize_inlined_parameters.
320 Note also that it's represented as a parameter. This is more
321 close to the truth, since the initial value does come from
322 the caller. */
323 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
324 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
325 DECL_ARTIFICIAL (decl) = 1;
326 DECL_IGNORED_P (decl) = 1;
327 TREE_USED (decl) = 1;
328 DECL_CONTEXT (decl) = info->context;
329 DECL_ARG_TYPE (decl) = type;
331 /* Tell tree-inline.c that we never write to this variable, so
332 it can copy-prop the replacement value immediately. */
333 TREE_READONLY (decl) = 1;
335 info->chain_decl = decl;
337 if (dump_file
338 && (dump_flags & TDF_DETAILS)
339 && !DECL_STATIC_CHAIN (info->context))
340 fprintf (dump_file, "Setting static-chain for %s\n",
341 lang_hooks.decl_printable_name (info->context, 2));
343 DECL_STATIC_CHAIN (info->context) = 1;
345 return decl;
348 /* Build or return the field within the non-local frame state that holds
349 the static chain for INFO->CONTEXT. This is the way to walk back up
350 multiple nesting levels. */
352 static tree
353 get_chain_field (struct nesting_info *info)
355 tree field = info->chain_field;
357 if (!field)
359 tree type = build_pointer_type (get_frame_type (info->outer));
361 field = make_node (FIELD_DECL);
362 DECL_NAME (field) = get_identifier ("__chain");
363 TREE_TYPE (field) = type;
364 DECL_ALIGN (field) = TYPE_ALIGN (type);
365 DECL_NONADDRESSABLE_P (field) = 1;
367 insert_field_into_struct (get_frame_type (info), field);
369 info->chain_field = field;
371 if (dump_file
372 && (dump_flags & TDF_DETAILS)
373 && !DECL_STATIC_CHAIN (info->context))
374 fprintf (dump_file, "Setting static-chain for %s\n",
375 lang_hooks.decl_printable_name (info->context, 2));
377 DECL_STATIC_CHAIN (info->context) = 1;
379 return field;
382 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
384 static tree
385 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
386 gcall *call)
388 tree t;
390 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
391 gimple_call_set_lhs (call, t);
392 if (! gsi_end_p (*gsi))
393 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
394 gsi_insert_before (gsi, call, GSI_SAME_STMT);
396 return t;
400 /* Copy EXP into a temporary. Allocate the temporary in the context of
401 INFO and insert the initialization statement before GSI. */
403 static tree
404 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
406 tree t;
407 gimple *stmt;
409 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
410 stmt = gimple_build_assign (t, exp);
411 if (! gsi_end_p (*gsi))
412 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
413 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
415 return t;
419 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
421 static tree
422 gsi_gimplify_val (struct nesting_info *info, tree exp,
423 gimple_stmt_iterator *gsi)
425 if (is_gimple_val (exp))
426 return exp;
427 else
428 return init_tmp_var (info, exp, gsi);
431 /* Similarly, but copy from the temporary and insert the statement
432 after the iterator. */
434 static tree
435 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
437 tree t;
438 gimple *stmt;
440 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
441 stmt = gimple_build_assign (exp, t);
442 if (! gsi_end_p (*gsi))
443 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
444 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
446 return t;
449 /* Build or return the type used to represent a nested function trampoline. */
451 static GTY(()) tree trampoline_type;
453 static tree
454 get_trampoline_type (struct nesting_info *info)
456 unsigned align, size;
457 tree t;
459 if (trampoline_type)
460 return trampoline_type;
462 align = TRAMPOLINE_ALIGNMENT;
463 size = TRAMPOLINE_SIZE;
465 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
466 then allocate extra space so that we can do dynamic alignment. */
467 if (align > STACK_BOUNDARY)
469 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
470 align = STACK_BOUNDARY;
473 t = build_index_type (size_int (size - 1));
474 t = build_array_type (char_type_node, t);
475 t = build_decl (DECL_SOURCE_LOCATION (info->context),
476 FIELD_DECL, get_identifier ("__data"), t);
477 DECL_ALIGN (t) = align;
478 DECL_USER_ALIGN (t) = 1;
480 trampoline_type = make_node (RECORD_TYPE);
481 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
482 TYPE_FIELDS (trampoline_type) = t;
483 layout_type (trampoline_type);
484 DECL_CONTEXT (t) = trampoline_type;
486 return trampoline_type;
489 /* Given DECL, a nested function, find or create a field in the non-local
490 frame structure for a trampoline for this function. */
492 static tree
493 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
494 enum insert_option insert)
496 if (insert == NO_INSERT)
498 tree *slot = info->var_map->get (decl);
499 return slot ? *slot : NULL_TREE;
502 tree *slot = &info->var_map->get_or_insert (decl);
503 if (!*slot)
505 tree field = make_node (FIELD_DECL);
506 DECL_NAME (field) = DECL_NAME (decl);
507 TREE_TYPE (field) = get_trampoline_type (info);
508 TREE_ADDRESSABLE (field) = 1;
510 insert_field_into_struct (get_frame_type (info), field);
511 *slot = field;
513 info->any_tramp_created = true;
516 return *slot;
519 /* Build or return the field within the non-local frame state that holds
520 the non-local goto "jmp_buf". The buffer itself is maintained by the
521 rtl middle-end as dynamic stack space is allocated. */
523 static tree
524 get_nl_goto_field (struct nesting_info *info)
526 tree field = info->nl_goto_field;
527 if (!field)
529 unsigned size;
530 tree type;
532 /* For __builtin_nonlocal_goto, we need N words. The first is the
533 frame pointer, the rest is for the target's stack pointer save
534 area. The number of words is controlled by STACK_SAVEAREA_MODE;
535 not the best interface, but it'll do for now. */
536 if (Pmode == ptr_mode)
537 type = ptr_type_node;
538 else
539 type = lang_hooks.types.type_for_mode (Pmode, 1);
541 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
542 size = size / GET_MODE_SIZE (Pmode);
543 size = size + 1;
545 type = build_array_type
546 (type, build_index_type (size_int (size)));
548 field = make_node (FIELD_DECL);
549 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
550 TREE_TYPE (field) = type;
551 DECL_ALIGN (field) = TYPE_ALIGN (type);
552 TREE_ADDRESSABLE (field) = 1;
554 insert_field_into_struct (get_frame_type (info), field);
556 info->nl_goto_field = field;
559 return field;
562 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
564 static void
565 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
566 struct nesting_info *info, gimple_seq *pseq)
568 struct walk_stmt_info wi;
570 memset (&wi, 0, sizeof (wi));
571 wi.info = info;
572 wi.val_only = true;
573 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
577 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
579 static inline void
580 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
581 struct nesting_info *info)
583 gimple_seq body = gimple_body (info->context);
584 walk_body (callback_stmt, callback_op, info, &body);
585 gimple_set_body (info->context, body);
588 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
590 static void
591 walk_gimple_omp_for (gomp_for *for_stmt,
592 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
593 struct nesting_info *info)
595 struct walk_stmt_info wi;
596 gimple_seq seq;
597 tree t;
598 size_t i;
600 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
602 seq = NULL;
603 memset (&wi, 0, sizeof (wi));
604 wi.info = info;
605 wi.gsi = gsi_last (seq);
607 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
609 wi.val_only = false;
610 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
611 &wi, NULL);
612 wi.val_only = true;
613 wi.is_lhs = false;
614 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
615 &wi, NULL);
617 wi.val_only = true;
618 wi.is_lhs = false;
619 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
620 &wi, NULL);
622 t = gimple_omp_for_incr (for_stmt, i);
623 gcc_assert (BINARY_CLASS_P (t));
624 wi.val_only = false;
625 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
626 wi.val_only = true;
627 wi.is_lhs = false;
628 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
631 seq = gsi_seq (wi.gsi);
632 if (!gimple_seq_empty_p (seq))
634 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
635 annotate_all_with_location (seq, gimple_location (for_stmt));
636 gimple_seq_add_seq (&pre_body, seq);
637 gimple_omp_for_set_pre_body (for_stmt, pre_body);
641 /* Similarly for ROOT and all functions nested underneath, depth first. */
643 static void
644 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
645 struct nesting_info *root)
647 struct nesting_info *n;
648 FOR_EACH_NEST_INFO (n, root)
649 walk_function (callback_stmt, callback_op, n);
653 /* We have to check for a fairly pathological case. The operands of function
654 nested function are to be interpreted in the context of the enclosing
655 function. So if any are variably-sized, they will get remapped when the
656 enclosing function is inlined. But that remapping would also have to be
657 done in the types of the PARM_DECLs of the nested function, meaning the
658 argument types of that function will disagree with the arguments in the
659 calls to that function. So we'd either have to make a copy of the nested
660 function corresponding to each time the enclosing function was inlined or
661 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
662 function. The former is not practical. The latter would still require
663 detecting this case to know when to add the conversions. So, for now at
664 least, we don't inline such an enclosing function.
666 We have to do that check recursively, so here return indicating whether
667 FNDECL has such a nested function. ORIG_FN is the function we were
668 trying to inline to use for checking whether any argument is variably
669 modified by anything in it.
671 It would be better to do this in tree-inline.c so that we could give
672 the appropriate warning for why a function can't be inlined, but that's
673 too late since the nesting structure has already been flattened and
674 adding a flag just to record this fact seems a waste of a flag. */
676 static bool
677 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
679 struct cgraph_node *cgn = cgraph_node::get (fndecl);
680 tree arg;
682 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
684 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
685 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
686 return true;
688 if (check_for_nested_with_variably_modified (cgn->decl,
689 orig_fndecl))
690 return true;
693 return false;
696 /* Construct our local datastructure describing the function nesting
697 tree rooted by CGN. */
699 static struct nesting_info *
700 create_nesting_tree (struct cgraph_node *cgn)
702 struct nesting_info *info = XCNEW (struct nesting_info);
703 info->field_map = new hash_map<tree, tree>;
704 info->var_map = new hash_map<tree, tree>;
705 info->mem_refs = new hash_set<tree *>;
706 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
707 info->context = cgn->decl;
709 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
711 struct nesting_info *sub = create_nesting_tree (cgn);
712 sub->outer = info;
713 sub->next = info->inner;
714 info->inner = sub;
717 /* See discussion at check_for_nested_with_variably_modified for a
718 discussion of why this has to be here. */
719 if (check_for_nested_with_variably_modified (info->context, info->context))
720 DECL_UNINLINABLE (info->context) = true;
722 return info;
725 /* Return an expression computing the static chain for TARGET_CONTEXT
726 from INFO->CONTEXT. Insert any necessary computations before TSI. */
728 static tree
729 get_static_chain (struct nesting_info *info, tree target_context,
730 gimple_stmt_iterator *gsi)
732 struct nesting_info *i;
733 tree x;
735 if (info->context == target_context)
737 x = build_addr (info->frame_decl);
738 info->static_chain_added |= 1;
740 else
742 x = get_chain_decl (info);
743 info->static_chain_added |= 2;
745 for (i = info->outer; i->context != target_context; i = i->outer)
747 tree field = get_chain_field (i);
749 x = build_simple_mem_ref (x);
750 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
751 x = init_tmp_var (info, x, gsi);
755 return x;
759 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
760 frame as seen from INFO->CONTEXT. Insert any necessary computations
761 before GSI. */
763 static tree
764 get_frame_field (struct nesting_info *info, tree target_context,
765 tree field, gimple_stmt_iterator *gsi)
767 struct nesting_info *i;
768 tree x;
770 if (info->context == target_context)
772 /* Make sure frame_decl gets created. */
773 (void) get_frame_type (info);
774 x = info->frame_decl;
775 info->static_chain_added |= 1;
777 else
779 x = get_chain_decl (info);
780 info->static_chain_added |= 2;
782 for (i = info->outer; i->context != target_context; i = i->outer)
784 tree field = get_chain_field (i);
786 x = build_simple_mem_ref (x);
787 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
788 x = init_tmp_var (info, x, gsi);
791 x = build_simple_mem_ref (x);
794 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
795 return x;
798 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
800 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
801 in the nested function with DECL_VALUE_EXPR set to reference the true
802 variable in the parent function. This is used both for debug info
803 and in OMP lowering. */
805 static tree
806 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
808 tree target_context;
809 struct nesting_info *i;
810 tree x, field, new_decl;
812 tree *slot = &info->var_map->get_or_insert (decl);
814 if (*slot)
815 return *slot;
817 target_context = decl_function_context (decl);
819 /* A copy of the code in get_frame_field, but without the temporaries. */
820 if (info->context == target_context)
822 /* Make sure frame_decl gets created. */
823 (void) get_frame_type (info);
824 x = info->frame_decl;
825 i = info;
826 info->static_chain_added |= 1;
828 else
830 x = get_chain_decl (info);
831 info->static_chain_added |= 2;
832 for (i = info->outer; i->context != target_context; i = i->outer)
834 field = get_chain_field (i);
835 x = build_simple_mem_ref (x);
836 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
838 x = build_simple_mem_ref (x);
841 field = lookup_field_for_decl (i, decl, INSERT);
842 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
843 if (use_pointer_in_frame (decl))
844 x = build_simple_mem_ref (x);
846 /* ??? We should be remapping types as well, surely. */
847 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
848 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
849 DECL_CONTEXT (new_decl) = info->context;
850 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
851 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
852 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
853 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
854 TREE_READONLY (new_decl) = TREE_READONLY (decl);
855 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
856 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
857 if ((TREE_CODE (decl) == PARM_DECL
858 || TREE_CODE (decl) == RESULT_DECL
859 || TREE_CODE (decl) == VAR_DECL)
860 && DECL_BY_REFERENCE (decl))
861 DECL_BY_REFERENCE (new_decl) = 1;
863 SET_DECL_VALUE_EXPR (new_decl, x);
864 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
866 *slot = new_decl;
867 DECL_CHAIN (new_decl) = info->debug_var_chain;
868 info->debug_var_chain = new_decl;
870 if (!optimize
871 && info->context != target_context
872 && variably_modified_type_p (TREE_TYPE (decl), NULL))
873 note_nonlocal_vla_type (info, TREE_TYPE (decl));
875 return new_decl;
879 /* Callback for walk_gimple_stmt, rewrite all references to VAR
880 and PARM_DECLs that belong to outer functions.
882 The rewrite will involve some number of structure accesses back up
883 the static chain. E.g. for a variable FOO up one nesting level it'll
884 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
885 indirections apply to decls for which use_pointer_in_frame is true. */
887 static tree
888 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
890 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
891 struct nesting_info *const info = (struct nesting_info *) wi->info;
892 tree t = *tp;
894 *walk_subtrees = 0;
895 switch (TREE_CODE (t))
897 case VAR_DECL:
898 /* Non-automatic variables are never processed. */
899 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
900 break;
901 /* FALLTHRU */
903 case PARM_DECL:
904 if (decl_function_context (t) != info->context)
906 tree x;
907 wi->changed = true;
909 x = get_nonlocal_debug_decl (info, t);
910 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
912 tree target_context = decl_function_context (t);
913 struct nesting_info *i;
914 for (i = info->outer; i->context != target_context; i = i->outer)
915 continue;
916 x = lookup_field_for_decl (i, t, INSERT);
917 x = get_frame_field (info, target_context, x, &wi->gsi);
918 if (use_pointer_in_frame (t))
920 x = init_tmp_var (info, x, &wi->gsi);
921 x = build_simple_mem_ref (x);
925 if (wi->val_only)
927 if (wi->is_lhs)
928 x = save_tmp_var (info, x, &wi->gsi);
929 else
930 x = init_tmp_var (info, x, &wi->gsi);
933 *tp = x;
935 break;
937 case LABEL_DECL:
938 /* We're taking the address of a label from a parent function, but
939 this is not itself a non-local goto. Mark the label such that it
940 will not be deleted, much as we would with a label address in
941 static storage. */
942 if (decl_function_context (t) != info->context)
943 FORCED_LABEL (t) = 1;
944 break;
946 case ADDR_EXPR:
948 bool save_val_only = wi->val_only;
950 wi->val_only = false;
951 wi->is_lhs = false;
952 wi->changed = false;
953 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
954 wi->val_only = true;
956 if (wi->changed)
958 tree save_context;
960 /* If we changed anything, we might no longer be directly
961 referencing a decl. */
962 save_context = current_function_decl;
963 current_function_decl = info->context;
964 recompute_tree_invariant_for_addr_expr (t);
965 current_function_decl = save_context;
967 /* If the callback converted the address argument in a context
968 where we only accept variables (and min_invariant, presumably),
969 then compute the address into a temporary. */
970 if (save_val_only)
971 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
972 t, &wi->gsi);
975 break;
977 case REALPART_EXPR:
978 case IMAGPART_EXPR:
979 case COMPONENT_REF:
980 case ARRAY_REF:
981 case ARRAY_RANGE_REF:
982 case BIT_FIELD_REF:
983 /* Go down this entire nest and just look at the final prefix and
984 anything that describes the references. Otherwise, we lose track
985 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
986 wi->val_only = true;
987 wi->is_lhs = false;
988 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
990 if (TREE_CODE (t) == COMPONENT_REF)
991 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
992 NULL);
993 else if (TREE_CODE (t) == ARRAY_REF
994 || TREE_CODE (t) == ARRAY_RANGE_REF)
996 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
997 wi, NULL);
998 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
999 wi, NULL);
1000 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1001 wi, NULL);
1004 wi->val_only = false;
1005 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1006 break;
1008 case VIEW_CONVERT_EXPR:
1009 /* Just request to look at the subtrees, leaving val_only and lhs
1010 untouched. This might actually be for !val_only + lhs, in which
1011 case we don't want to force a replacement by a temporary. */
1012 *walk_subtrees = 1;
1013 break;
1015 default:
1016 if (!IS_TYPE_OR_DECL_P (t))
1018 *walk_subtrees = 1;
1019 wi->val_only = true;
1020 wi->is_lhs = false;
1022 break;
1025 return NULL_TREE;
1028 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1029 struct walk_stmt_info *);
1031 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1032 and PARM_DECLs that belong to outer functions. */
1034 static bool
1035 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1037 struct nesting_info *const info = (struct nesting_info *) wi->info;
1038 bool need_chain = false, need_stmts = false;
1039 tree clause, decl;
1040 int dummy;
1041 bitmap new_suppress;
1043 new_suppress = BITMAP_GGC_ALLOC ();
1044 bitmap_copy (new_suppress, info->suppress_expansion);
1046 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1048 switch (OMP_CLAUSE_CODE (clause))
1050 case OMP_CLAUSE_REDUCTION:
1051 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1052 need_stmts = true;
1053 goto do_decl_clause;
1055 case OMP_CLAUSE_LASTPRIVATE:
1056 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1057 need_stmts = true;
1058 goto do_decl_clause;
1060 case OMP_CLAUSE_LINEAR:
1061 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1062 need_stmts = true;
1063 wi->val_only = true;
1064 wi->is_lhs = false;
1065 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1066 &dummy, wi);
1067 goto do_decl_clause;
1069 case OMP_CLAUSE_PRIVATE:
1070 case OMP_CLAUSE_FIRSTPRIVATE:
1071 case OMP_CLAUSE_COPYPRIVATE:
1072 case OMP_CLAUSE_SHARED:
1073 case OMP_CLAUSE_TO_DECLARE:
1074 case OMP_CLAUSE_LINK:
1075 case OMP_CLAUSE_USE_DEVICE_PTR:
1076 case OMP_CLAUSE_IS_DEVICE_PTR:
1077 do_decl_clause:
1078 decl = OMP_CLAUSE_DECL (clause);
1079 if (TREE_CODE (decl) == VAR_DECL
1080 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1081 break;
1082 if (decl_function_context (decl) != info->context)
1084 bitmap_set_bit (new_suppress, DECL_UID (decl));
1085 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1086 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1087 need_chain = true;
1089 break;
1091 case OMP_CLAUSE_SCHEDULE:
1092 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1093 break;
1094 /* FALLTHRU */
1095 case OMP_CLAUSE_FINAL:
1096 case OMP_CLAUSE_IF:
1097 case OMP_CLAUSE_NUM_THREADS:
1098 case OMP_CLAUSE_DEPEND:
1099 case OMP_CLAUSE_DEVICE:
1100 case OMP_CLAUSE_NUM_TEAMS:
1101 case OMP_CLAUSE_THREAD_LIMIT:
1102 case OMP_CLAUSE_SAFELEN:
1103 case OMP_CLAUSE_SIMDLEN:
1104 case OMP_CLAUSE_PRIORITY:
1105 case OMP_CLAUSE_GRAINSIZE:
1106 case OMP_CLAUSE_NUM_TASKS:
1107 case OMP_CLAUSE_HINT:
1108 case OMP_CLAUSE__CILK_FOR_COUNT_:
1109 wi->val_only = true;
1110 wi->is_lhs = false;
1111 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1112 &dummy, wi);
1113 break;
1115 case OMP_CLAUSE_DIST_SCHEDULE:
1116 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1118 wi->val_only = true;
1119 wi->is_lhs = false;
1120 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1121 &dummy, wi);
1123 break;
1125 case OMP_CLAUSE_MAP:
1126 case OMP_CLAUSE_TO:
1127 case OMP_CLAUSE_FROM:
1128 if (OMP_CLAUSE_SIZE (clause))
1130 wi->val_only = true;
1131 wi->is_lhs = false;
1132 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1133 &dummy, wi);
1135 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1136 goto do_decl_clause;
1137 wi->val_only = true;
1138 wi->is_lhs = false;
1139 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1140 wi, NULL);
1141 break;
1143 case OMP_CLAUSE_ALIGNED:
1144 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1146 wi->val_only = true;
1147 wi->is_lhs = false;
1148 convert_nonlocal_reference_op
1149 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1151 /* Like do_decl_clause, but don't add any suppression. */
1152 decl = OMP_CLAUSE_DECL (clause);
1153 if (TREE_CODE (decl) == VAR_DECL
1154 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1155 break;
1156 if (decl_function_context (decl) != info->context)
1158 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1159 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1160 need_chain = true;
1162 break;
1164 case OMP_CLAUSE_NOWAIT:
1165 case OMP_CLAUSE_ORDERED:
1166 case OMP_CLAUSE_DEFAULT:
1167 case OMP_CLAUSE_COPYIN:
1168 case OMP_CLAUSE_COLLAPSE:
1169 case OMP_CLAUSE_UNTIED:
1170 case OMP_CLAUSE_MERGEABLE:
1171 case OMP_CLAUSE_PROC_BIND:
1172 case OMP_CLAUSE_NOGROUP:
1173 case OMP_CLAUSE_THREADS:
1174 case OMP_CLAUSE_SIMD:
1175 case OMP_CLAUSE_DEFAULTMAP:
1176 break;
1178 default:
1179 gcc_unreachable ();
1183 info->suppress_expansion = new_suppress;
1185 if (need_stmts)
1186 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1187 switch (OMP_CLAUSE_CODE (clause))
1189 case OMP_CLAUSE_REDUCTION:
1190 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1192 tree old_context
1193 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1194 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1195 = info->context;
1196 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1197 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1198 = info->context;
1199 walk_body (convert_nonlocal_reference_stmt,
1200 convert_nonlocal_reference_op, info,
1201 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1202 walk_body (convert_nonlocal_reference_stmt,
1203 convert_nonlocal_reference_op, info,
1204 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1205 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1206 = old_context;
1207 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1208 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1209 = old_context;
1211 break;
1213 case OMP_CLAUSE_LASTPRIVATE:
1214 walk_body (convert_nonlocal_reference_stmt,
1215 convert_nonlocal_reference_op, info,
1216 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1217 break;
1219 case OMP_CLAUSE_LINEAR:
1220 walk_body (convert_nonlocal_reference_stmt,
1221 convert_nonlocal_reference_op, info,
1222 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1223 break;
1225 default:
1226 break;
1229 return need_chain;
1232 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1234 static void
1235 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1237 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1238 type = TREE_TYPE (type);
1240 if (TYPE_NAME (type)
1241 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1242 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1243 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1245 while (POINTER_TYPE_P (type)
1246 || TREE_CODE (type) == VECTOR_TYPE
1247 || TREE_CODE (type) == FUNCTION_TYPE
1248 || TREE_CODE (type) == METHOD_TYPE)
1249 type = TREE_TYPE (type);
1251 if (TREE_CODE (type) == ARRAY_TYPE)
1253 tree domain, t;
1255 note_nonlocal_vla_type (info, TREE_TYPE (type));
1256 domain = TYPE_DOMAIN (type);
1257 if (domain)
1259 t = TYPE_MIN_VALUE (domain);
1260 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1261 && decl_function_context (t) != info->context)
1262 get_nonlocal_debug_decl (info, t);
1263 t = TYPE_MAX_VALUE (domain);
1264 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1265 && decl_function_context (t) != info->context)
1266 get_nonlocal_debug_decl (info, t);
1271 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1272 in BLOCK. */
1274 static void
1275 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1277 tree var;
1279 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1280 if (TREE_CODE (var) == VAR_DECL
1281 && variably_modified_type_p (TREE_TYPE (var), NULL)
1282 && DECL_HAS_VALUE_EXPR_P (var)
1283 && decl_function_context (var) != info->context)
1284 note_nonlocal_vla_type (info, TREE_TYPE (var));
1287 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1288 PARM_DECLs that belong to outer functions. This handles statements
1289 that are not handled via the standard recursion done in
1290 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1291 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1292 operands of STMT have been handled by this function. */
1294 static tree
1295 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1296 struct walk_stmt_info *wi)
1298 struct nesting_info *info = (struct nesting_info *) wi->info;
1299 tree save_local_var_chain;
1300 bitmap save_suppress;
1301 gimple *stmt = gsi_stmt (*gsi);
1303 switch (gimple_code (stmt))
1305 case GIMPLE_GOTO:
1306 /* Don't walk non-local gotos for now. */
1307 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1309 wi->val_only = true;
1310 wi->is_lhs = false;
1311 *handled_ops_p = true;
1312 return NULL_TREE;
1314 break;
1316 case GIMPLE_OMP_PARALLEL:
1317 case GIMPLE_OMP_TASK:
1318 save_suppress = info->suppress_expansion;
1319 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1320 wi))
1322 tree c, decl;
1323 decl = get_chain_decl (info);
1324 c = build_omp_clause (gimple_location (stmt),
1325 OMP_CLAUSE_FIRSTPRIVATE);
1326 OMP_CLAUSE_DECL (c) = decl;
1327 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1328 gimple_omp_taskreg_set_clauses (stmt, c);
1331 save_local_var_chain = info->new_local_var_chain;
1332 info->new_local_var_chain = NULL;
1334 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1335 info, gimple_omp_body_ptr (stmt));
1337 if (info->new_local_var_chain)
1338 declare_vars (info->new_local_var_chain,
1339 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1340 false);
1341 info->new_local_var_chain = save_local_var_chain;
1342 info->suppress_expansion = save_suppress;
1343 break;
1345 case GIMPLE_OMP_FOR:
1346 save_suppress = info->suppress_expansion;
1347 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1348 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1349 convert_nonlocal_reference_stmt,
1350 convert_nonlocal_reference_op, info);
1351 walk_body (convert_nonlocal_reference_stmt,
1352 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1353 info->suppress_expansion = save_suppress;
1354 break;
1356 case GIMPLE_OMP_SECTIONS:
1357 save_suppress = info->suppress_expansion;
1358 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1359 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1360 info, gimple_omp_body_ptr (stmt));
1361 info->suppress_expansion = save_suppress;
1362 break;
1364 case GIMPLE_OMP_SINGLE:
1365 save_suppress = info->suppress_expansion;
1366 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1367 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1368 info, gimple_omp_body_ptr (stmt));
1369 info->suppress_expansion = save_suppress;
1370 break;
1372 case GIMPLE_OMP_TARGET:
1373 if (!is_gimple_omp_offloaded (stmt))
1375 save_suppress = info->suppress_expansion;
1376 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1377 wi);
1378 info->suppress_expansion = save_suppress;
1379 walk_body (convert_nonlocal_reference_stmt,
1380 convert_nonlocal_reference_op, info,
1381 gimple_omp_body_ptr (stmt));
1382 break;
1384 save_suppress = info->suppress_expansion;
1385 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1386 wi))
1388 tree c, decl;
1389 decl = get_chain_decl (info);
1390 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1391 OMP_CLAUSE_DECL (c) = decl;
1392 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1393 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1394 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1395 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1398 save_local_var_chain = info->new_local_var_chain;
1399 info->new_local_var_chain = NULL;
1401 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1402 info, gimple_omp_body_ptr (stmt));
1404 if (info->new_local_var_chain)
1405 declare_vars (info->new_local_var_chain,
1406 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1407 false);
1408 info->new_local_var_chain = save_local_var_chain;
1409 info->suppress_expansion = save_suppress;
1410 break;
1412 case GIMPLE_OMP_TEAMS:
1413 save_suppress = info->suppress_expansion;
1414 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1415 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1416 info, gimple_omp_body_ptr (stmt));
1417 info->suppress_expansion = save_suppress;
1418 break;
1420 case GIMPLE_OMP_SECTION:
1421 case GIMPLE_OMP_MASTER:
1422 case GIMPLE_OMP_TASKGROUP:
1423 case GIMPLE_OMP_ORDERED:
1424 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1425 info, gimple_omp_body_ptr (stmt));
1426 break;
1428 case GIMPLE_BIND:
1430 gbind *bind_stmt = as_a <gbind *> (stmt);
1431 if (!optimize && gimple_bind_block (bind_stmt))
1432 note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
1434 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1435 if (TREE_CODE (var) == NAMELIST_DECL)
1437 /* Adjust decls mentioned in NAMELIST_DECL. */
1438 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1439 tree decl;
1440 unsigned int i;
1442 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1444 if (TREE_CODE (decl) == VAR_DECL
1445 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1446 continue;
1447 if (decl_function_context (decl) != info->context)
1448 CONSTRUCTOR_ELT (decls, i)->value
1449 = get_nonlocal_debug_decl (info, decl);
1453 *handled_ops_p = false;
1454 return NULL_TREE;
1456 case GIMPLE_COND:
1457 wi->val_only = true;
1458 wi->is_lhs = false;
1459 *handled_ops_p = false;
1460 return NULL_TREE;
1462 default:
1463 /* For every other statement that we are not interested in
1464 handling here, let the walker traverse the operands. */
1465 *handled_ops_p = false;
1466 return NULL_TREE;
1469 /* We have handled all of STMT operands, no need to traverse the operands. */
1470 *handled_ops_p = true;
1471 return NULL_TREE;
1475 /* A subroutine of convert_local_reference. Create a local variable
1476 in the parent function with DECL_VALUE_EXPR set to reference the
1477 field in FRAME. This is used both for debug info and in OMP
1478 lowering. */
1480 static tree
1481 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1483 tree x, new_decl;
1485 tree *slot = &info->var_map->get_or_insert (decl);
1486 if (*slot)
1487 return *slot;
1489 /* Make sure frame_decl gets created. */
1490 (void) get_frame_type (info);
1491 x = info->frame_decl;
1492 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1494 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1495 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1496 DECL_CONTEXT (new_decl) = info->context;
1497 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1498 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1499 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1500 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1501 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1502 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1503 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1504 if ((TREE_CODE (decl) == PARM_DECL
1505 || TREE_CODE (decl) == RESULT_DECL
1506 || TREE_CODE (decl) == VAR_DECL)
1507 && DECL_BY_REFERENCE (decl))
1508 DECL_BY_REFERENCE (new_decl) = 1;
1510 SET_DECL_VALUE_EXPR (new_decl, x);
1511 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1512 *slot = new_decl;
1514 DECL_CHAIN (new_decl) = info->debug_var_chain;
1515 info->debug_var_chain = new_decl;
1517 /* Do not emit debug info twice. */
1518 DECL_IGNORED_P (decl) = 1;
1520 return new_decl;
1524 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1525 and PARM_DECLs that were referenced by inner nested functions.
1526 The rewrite will be a structure reference to the local frame variable. */
1528 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1530 static tree
1531 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1533 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1534 struct nesting_info *const info = (struct nesting_info *) wi->info;
1535 tree t = *tp, field, x;
1536 bool save_val_only;
1538 *walk_subtrees = 0;
1539 switch (TREE_CODE (t))
1541 case VAR_DECL:
1542 /* Non-automatic variables are never processed. */
1543 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1544 break;
1545 /* FALLTHRU */
1547 case PARM_DECL:
1548 if (decl_function_context (t) == info->context)
1550 /* If we copied a pointer to the frame, then the original decl
1551 is used unchanged in the parent function. */
1552 if (use_pointer_in_frame (t))
1553 break;
1555 /* No need to transform anything if no child references the
1556 variable. */
1557 field = lookup_field_for_decl (info, t, NO_INSERT);
1558 if (!field)
1559 break;
1560 wi->changed = true;
1562 x = get_local_debug_decl (info, t, field);
1563 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1564 x = get_frame_field (info, info->context, field, &wi->gsi);
1566 if (wi->val_only)
1568 if (wi->is_lhs)
1569 x = save_tmp_var (info, x, &wi->gsi);
1570 else
1571 x = init_tmp_var (info, x, &wi->gsi);
1574 *tp = x;
1576 break;
1578 case ADDR_EXPR:
1579 save_val_only = wi->val_only;
1580 wi->val_only = false;
1581 wi->is_lhs = false;
1582 wi->changed = false;
1583 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1584 wi->val_only = save_val_only;
1586 /* If we converted anything ... */
1587 if (wi->changed)
1589 tree save_context;
1591 /* Then the frame decl is now addressable. */
1592 TREE_ADDRESSABLE (info->frame_decl) = 1;
1594 save_context = current_function_decl;
1595 current_function_decl = info->context;
1596 recompute_tree_invariant_for_addr_expr (t);
1597 current_function_decl = save_context;
1599 /* If we are in a context where we only accept values, then
1600 compute the address into a temporary. */
1601 if (save_val_only)
1602 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1603 t, &wi->gsi);
1605 break;
1607 case REALPART_EXPR:
1608 case IMAGPART_EXPR:
1609 case COMPONENT_REF:
1610 case ARRAY_REF:
1611 case ARRAY_RANGE_REF:
1612 case BIT_FIELD_REF:
1613 /* Go down this entire nest and just look at the final prefix and
1614 anything that describes the references. Otherwise, we lose track
1615 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1616 save_val_only = wi->val_only;
1617 wi->val_only = true;
1618 wi->is_lhs = false;
1619 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1621 if (TREE_CODE (t) == COMPONENT_REF)
1622 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1623 NULL);
1624 else if (TREE_CODE (t) == ARRAY_REF
1625 || TREE_CODE (t) == ARRAY_RANGE_REF)
1627 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1628 NULL);
1629 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1630 NULL);
1631 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1632 NULL);
1635 wi->val_only = false;
1636 walk_tree (tp, convert_local_reference_op, wi, NULL);
1637 wi->val_only = save_val_only;
1638 break;
1640 case MEM_REF:
1641 save_val_only = wi->val_only;
1642 wi->val_only = true;
1643 wi->is_lhs = false;
1644 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1645 wi, NULL);
1646 /* We need to re-fold the MEM_REF as component references as
1647 part of a ADDR_EXPR address are not allowed. But we cannot
1648 fold here, as the chain record type is not yet finalized. */
1649 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1650 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1651 info->mem_refs->add (tp);
1652 wi->val_only = save_val_only;
1653 break;
1655 case VIEW_CONVERT_EXPR:
1656 /* Just request to look at the subtrees, leaving val_only and lhs
1657 untouched. This might actually be for !val_only + lhs, in which
1658 case we don't want to force a replacement by a temporary. */
1659 *walk_subtrees = 1;
1660 break;
1662 default:
1663 if (!IS_TYPE_OR_DECL_P (t))
1665 *walk_subtrees = 1;
1666 wi->val_only = true;
1667 wi->is_lhs = false;
1669 break;
1672 return NULL_TREE;
1675 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1676 struct walk_stmt_info *);
1678 /* Helper for convert_local_reference. Convert all the references in
1679 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1681 static bool
1682 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1684 struct nesting_info *const info = (struct nesting_info *) wi->info;
1685 bool need_frame = false, need_stmts = false;
1686 tree clause, decl;
1687 int dummy;
1688 bitmap new_suppress;
1690 new_suppress = BITMAP_GGC_ALLOC ();
1691 bitmap_copy (new_suppress, info->suppress_expansion);
1693 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1695 switch (OMP_CLAUSE_CODE (clause))
1697 case OMP_CLAUSE_REDUCTION:
1698 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1699 need_stmts = true;
1700 goto do_decl_clause;
1702 case OMP_CLAUSE_LASTPRIVATE:
1703 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1704 need_stmts = true;
1705 goto do_decl_clause;
1707 case OMP_CLAUSE_LINEAR:
1708 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1709 need_stmts = true;
1710 wi->val_only = true;
1711 wi->is_lhs = false;
1712 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1713 wi);
1714 goto do_decl_clause;
1716 case OMP_CLAUSE_PRIVATE:
1717 case OMP_CLAUSE_FIRSTPRIVATE:
1718 case OMP_CLAUSE_COPYPRIVATE:
1719 case OMP_CLAUSE_SHARED:
1720 case OMP_CLAUSE_TO_DECLARE:
1721 case OMP_CLAUSE_LINK:
1722 case OMP_CLAUSE_USE_DEVICE_PTR:
1723 case OMP_CLAUSE_IS_DEVICE_PTR:
1724 do_decl_clause:
1725 decl = OMP_CLAUSE_DECL (clause);
1726 if (TREE_CODE (decl) == VAR_DECL
1727 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1728 break;
1729 if (decl_function_context (decl) == info->context
1730 && !use_pointer_in_frame (decl))
1732 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1733 if (field)
1735 bitmap_set_bit (new_suppress, DECL_UID (decl));
1736 OMP_CLAUSE_DECL (clause)
1737 = get_local_debug_decl (info, decl, field);
1738 need_frame = true;
1741 break;
1743 case OMP_CLAUSE_SCHEDULE:
1744 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1745 break;
1746 /* FALLTHRU */
1747 case OMP_CLAUSE_FINAL:
1748 case OMP_CLAUSE_IF:
1749 case OMP_CLAUSE_NUM_THREADS:
1750 case OMP_CLAUSE_DEPEND:
1751 case OMP_CLAUSE_DEVICE:
1752 case OMP_CLAUSE_NUM_TEAMS:
1753 case OMP_CLAUSE_THREAD_LIMIT:
1754 case OMP_CLAUSE_SAFELEN:
1755 case OMP_CLAUSE_SIMDLEN:
1756 case OMP_CLAUSE_PRIORITY:
1757 case OMP_CLAUSE_GRAINSIZE:
1758 case OMP_CLAUSE_NUM_TASKS:
1759 case OMP_CLAUSE_HINT:
1760 case OMP_CLAUSE__CILK_FOR_COUNT_:
1761 wi->val_only = true;
1762 wi->is_lhs = false;
1763 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1764 wi);
1765 break;
1767 case OMP_CLAUSE_DIST_SCHEDULE:
1768 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1770 wi->val_only = true;
1771 wi->is_lhs = false;
1772 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1773 &dummy, wi);
1775 break;
1777 case OMP_CLAUSE_MAP:
1778 case OMP_CLAUSE_TO:
1779 case OMP_CLAUSE_FROM:
1780 if (OMP_CLAUSE_SIZE (clause))
1782 wi->val_only = true;
1783 wi->is_lhs = false;
1784 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1785 &dummy, wi);
1787 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1788 goto do_decl_clause;
1789 wi->val_only = true;
1790 wi->is_lhs = false;
1791 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1792 wi, NULL);
1793 break;
1795 case OMP_CLAUSE_ALIGNED:
1796 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1798 wi->val_only = true;
1799 wi->is_lhs = false;
1800 convert_local_reference_op
1801 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1803 /* Like do_decl_clause, but don't add any suppression. */
1804 decl = OMP_CLAUSE_DECL (clause);
1805 if (TREE_CODE (decl) == VAR_DECL
1806 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1807 break;
1808 if (decl_function_context (decl) == info->context
1809 && !use_pointer_in_frame (decl))
1811 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1812 if (field)
1814 OMP_CLAUSE_DECL (clause)
1815 = get_local_debug_decl (info, decl, field);
1816 need_frame = true;
1819 break;
1821 case OMP_CLAUSE_NOWAIT:
1822 case OMP_CLAUSE_ORDERED:
1823 case OMP_CLAUSE_DEFAULT:
1824 case OMP_CLAUSE_COPYIN:
1825 case OMP_CLAUSE_COLLAPSE:
1826 case OMP_CLAUSE_UNTIED:
1827 case OMP_CLAUSE_MERGEABLE:
1828 case OMP_CLAUSE_PROC_BIND:
1829 case OMP_CLAUSE_NOGROUP:
1830 case OMP_CLAUSE_THREADS:
1831 case OMP_CLAUSE_SIMD:
1832 case OMP_CLAUSE_DEFAULTMAP:
1833 break;
1835 default:
1836 gcc_unreachable ();
1840 info->suppress_expansion = new_suppress;
1842 if (need_stmts)
1843 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1844 switch (OMP_CLAUSE_CODE (clause))
1846 case OMP_CLAUSE_REDUCTION:
1847 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1849 tree old_context
1850 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1851 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1852 = info->context;
1853 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1854 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1855 = info->context;
1856 walk_body (convert_local_reference_stmt,
1857 convert_local_reference_op, info,
1858 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1859 walk_body (convert_local_reference_stmt,
1860 convert_local_reference_op, info,
1861 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1862 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1863 = old_context;
1864 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1865 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1866 = old_context;
1868 break;
1870 case OMP_CLAUSE_LASTPRIVATE:
1871 walk_body (convert_local_reference_stmt,
1872 convert_local_reference_op, info,
1873 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1874 break;
1876 case OMP_CLAUSE_LINEAR:
1877 walk_body (convert_local_reference_stmt,
1878 convert_local_reference_op, info,
1879 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1880 break;
1882 default:
1883 break;
1886 return need_frame;
1890 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1891 and PARM_DECLs that were referenced by inner nested functions.
1892 The rewrite will be a structure reference to the local frame variable. */
1894 static tree
1895 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1896 struct walk_stmt_info *wi)
1898 struct nesting_info *info = (struct nesting_info *) wi->info;
1899 tree save_local_var_chain;
1900 bitmap save_suppress;
1901 gimple *stmt = gsi_stmt (*gsi);
1903 switch (gimple_code (stmt))
1905 case GIMPLE_OMP_PARALLEL:
1906 case GIMPLE_OMP_TASK:
1907 save_suppress = info->suppress_expansion;
1908 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1909 wi))
1911 tree c;
1912 (void) get_frame_type (info);
1913 c = build_omp_clause (gimple_location (stmt),
1914 OMP_CLAUSE_SHARED);
1915 OMP_CLAUSE_DECL (c) = info->frame_decl;
1916 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1917 gimple_omp_taskreg_set_clauses (stmt, c);
1920 save_local_var_chain = info->new_local_var_chain;
1921 info->new_local_var_chain = NULL;
1923 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1924 gimple_omp_body_ptr (stmt));
1926 if (info->new_local_var_chain)
1927 declare_vars (info->new_local_var_chain,
1928 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1929 info->new_local_var_chain = save_local_var_chain;
1930 info->suppress_expansion = save_suppress;
1931 break;
1933 case GIMPLE_OMP_FOR:
1934 save_suppress = info->suppress_expansion;
1935 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1936 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1937 convert_local_reference_stmt,
1938 convert_local_reference_op, info);
1939 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1940 info, gimple_omp_body_ptr (stmt));
1941 info->suppress_expansion = save_suppress;
1942 break;
1944 case GIMPLE_OMP_SECTIONS:
1945 save_suppress = info->suppress_expansion;
1946 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1947 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1948 info, gimple_omp_body_ptr (stmt));
1949 info->suppress_expansion = save_suppress;
1950 break;
1952 case GIMPLE_OMP_SINGLE:
1953 save_suppress = info->suppress_expansion;
1954 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1955 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1956 info, gimple_omp_body_ptr (stmt));
1957 info->suppress_expansion = save_suppress;
1958 break;
1960 case GIMPLE_OMP_TARGET:
1961 if (!is_gimple_omp_offloaded (stmt))
1963 save_suppress = info->suppress_expansion;
1964 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1965 info->suppress_expansion = save_suppress;
1966 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1967 info, gimple_omp_body_ptr (stmt));
1968 break;
1970 save_suppress = info->suppress_expansion;
1971 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
1973 tree c;
1974 (void) get_frame_type (info);
1975 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1976 OMP_CLAUSE_DECL (c) = info->frame_decl;
1977 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
1978 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
1979 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1980 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1983 save_local_var_chain = info->new_local_var_chain;
1984 info->new_local_var_chain = NULL;
1986 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1987 gimple_omp_body_ptr (stmt));
1989 if (info->new_local_var_chain)
1990 declare_vars (info->new_local_var_chain,
1991 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1992 info->new_local_var_chain = save_local_var_chain;
1993 info->suppress_expansion = save_suppress;
1994 break;
1996 case GIMPLE_OMP_TEAMS:
1997 save_suppress = info->suppress_expansion;
1998 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1999 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2000 info, gimple_omp_body_ptr (stmt));
2001 info->suppress_expansion = save_suppress;
2002 break;
2004 case GIMPLE_OMP_SECTION:
2005 case GIMPLE_OMP_MASTER:
2006 case GIMPLE_OMP_TASKGROUP:
2007 case GIMPLE_OMP_ORDERED:
2008 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2009 info, gimple_omp_body_ptr (stmt));
2010 break;
2012 case GIMPLE_COND:
2013 wi->val_only = true;
2014 wi->is_lhs = false;
2015 *handled_ops_p = false;
2016 return NULL_TREE;
2018 case GIMPLE_ASSIGN:
2019 if (gimple_clobber_p (stmt))
2021 tree lhs = gimple_assign_lhs (stmt);
2022 if (!use_pointer_in_frame (lhs)
2023 && lookup_field_for_decl (info, lhs, NO_INSERT))
2025 gsi_replace (gsi, gimple_build_nop (), true);
2026 break;
2029 *handled_ops_p = false;
2030 return NULL_TREE;
2032 case GIMPLE_BIND:
2033 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2034 var;
2035 var = DECL_CHAIN (var))
2036 if (TREE_CODE (var) == NAMELIST_DECL)
2038 /* Adjust decls mentioned in NAMELIST_DECL. */
2039 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2040 tree decl;
2041 unsigned int i;
2043 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2045 if (TREE_CODE (decl) == VAR_DECL
2046 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2047 continue;
2048 if (decl_function_context (decl) == info->context
2049 && !use_pointer_in_frame (decl))
2051 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2052 if (field)
2054 CONSTRUCTOR_ELT (decls, i)->value
2055 = get_local_debug_decl (info, decl, field);
2061 *handled_ops_p = false;
2062 return NULL_TREE;
2064 default:
2065 /* For every other statement that we are not interested in
2066 handling here, let the walker traverse the operands. */
2067 *handled_ops_p = false;
2068 return NULL_TREE;
2071 /* Indicate that we have handled all the operands ourselves. */
2072 *handled_ops_p = true;
2073 return NULL_TREE;
2077 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2078 that reference labels from outer functions. The rewrite will be a
2079 call to __builtin_nonlocal_goto. */
2081 static tree
2082 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2083 struct walk_stmt_info *wi)
2085 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2086 tree label, new_label, target_context, x, field;
2087 gcall *call;
2088 gimple *stmt = gsi_stmt (*gsi);
2090 if (gimple_code (stmt) != GIMPLE_GOTO)
2092 *handled_ops_p = false;
2093 return NULL_TREE;
2096 label = gimple_goto_dest (stmt);
2097 if (TREE_CODE (label) != LABEL_DECL)
2099 *handled_ops_p = false;
2100 return NULL_TREE;
2103 target_context = decl_function_context (label);
2104 if (target_context == info->context)
2106 *handled_ops_p = false;
2107 return NULL_TREE;
2110 for (i = info->outer; target_context != i->context; i = i->outer)
2111 continue;
2113 /* The original user label may also be use for a normal goto, therefore
2114 we must create a new label that will actually receive the abnormal
2115 control transfer. This new label will be marked LABEL_NONLOCAL; this
2116 mark will trigger proper behavior in the cfg, as well as cause the
2117 (hairy target-specific) non-local goto receiver code to be generated
2118 when we expand rtl. Enter this association into var_map so that we
2119 can insert the new label into the IL during a second pass. */
2120 tree *slot = &i->var_map->get_or_insert (label);
2121 if (*slot == NULL)
2123 new_label = create_artificial_label (UNKNOWN_LOCATION);
2124 DECL_NONLOCAL (new_label) = 1;
2125 *slot = new_label;
2127 else
2128 new_label = *slot;
2130 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2131 field = get_nl_goto_field (i);
2132 x = get_frame_field (info, target_context, field, gsi);
2133 x = build_addr (x);
2134 x = gsi_gimplify_val (info, x, gsi);
2135 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2136 2, build_addr (new_label), x);
2137 gsi_replace (gsi, call, false);
2139 /* We have handled all of STMT's operands, no need to keep going. */
2140 *handled_ops_p = true;
2141 return NULL_TREE;
2145 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2146 are referenced via nonlocal goto from a nested function. The rewrite
2147 will involve installing a newly generated DECL_NONLOCAL label, and
2148 (potentially) a branch around the rtl gunk that is assumed to be
2149 attached to such a label. */
2151 static tree
2152 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2153 struct walk_stmt_info *wi)
2155 struct nesting_info *const info = (struct nesting_info *) wi->info;
2156 tree label, new_label;
2157 gimple_stmt_iterator tmp_gsi;
2158 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2160 if (!stmt)
2162 *handled_ops_p = false;
2163 return NULL_TREE;
2166 label = gimple_label_label (stmt);
2168 tree *slot = info->var_map->get (label);
2169 if (!slot)
2171 *handled_ops_p = false;
2172 return NULL_TREE;
2175 /* If there's any possibility that the previous statement falls through,
2176 then we must branch around the new non-local label. */
2177 tmp_gsi = wi->gsi;
2178 gsi_prev (&tmp_gsi);
2179 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2181 gimple *stmt = gimple_build_goto (label);
2182 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2185 new_label = (tree) *slot;
2186 stmt = gimple_build_label (new_label);
2187 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2189 *handled_ops_p = true;
2190 return NULL_TREE;
2194 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2195 of nested functions that require the use of trampolines. The rewrite
2196 will involve a reference a trampoline generated for the occasion. */
2198 static tree
2199 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2201 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2202 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2203 tree t = *tp, decl, target_context, x, builtin;
2204 gcall *call;
2206 *walk_subtrees = 0;
2207 switch (TREE_CODE (t))
2209 case ADDR_EXPR:
2210 /* Build
2211 T.1 = &CHAIN->tramp;
2212 T.2 = __builtin_adjust_trampoline (T.1);
2213 T.3 = (func_type)T.2;
2216 decl = TREE_OPERAND (t, 0);
2217 if (TREE_CODE (decl) != FUNCTION_DECL)
2218 break;
2220 /* Only need to process nested functions. */
2221 target_context = decl_function_context (decl);
2222 if (!target_context)
2223 break;
2225 /* If the nested function doesn't use a static chain, then
2226 it doesn't need a trampoline. */
2227 if (!DECL_STATIC_CHAIN (decl))
2228 break;
2230 /* If we don't want a trampoline, then don't build one. */
2231 if (TREE_NO_TRAMPOLINE (t))
2232 break;
2234 /* Lookup the immediate parent of the callee, as that's where
2235 we need to insert the trampoline. */
2236 for (i = info; i->context != target_context; i = i->outer)
2237 continue;
2238 x = lookup_tramp_for_decl (i, decl, INSERT);
2240 /* Compute the address of the field holding the trampoline. */
2241 x = get_frame_field (info, target_context, x, &wi->gsi);
2242 x = build_addr (x);
2243 x = gsi_gimplify_val (info, x, &wi->gsi);
2245 /* Do machine-specific ugliness. Normally this will involve
2246 computing extra alignment, but it can really be anything. */
2247 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2248 call = gimple_build_call (builtin, 1, x);
2249 x = init_tmp_var_with_call (info, &wi->gsi, call);
2251 /* Cast back to the proper function type. */
2252 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2253 x = init_tmp_var (info, x, &wi->gsi);
2255 *tp = x;
2256 break;
2258 default:
2259 if (!IS_TYPE_OR_DECL_P (t))
2260 *walk_subtrees = 1;
2261 break;
2264 return NULL_TREE;
2268 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2269 to addresses of nested functions that require the use of
2270 trampolines. The rewrite will involve a reference a trampoline
2271 generated for the occasion. */
2273 static tree
2274 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2275 struct walk_stmt_info *wi)
2277 struct nesting_info *info = (struct nesting_info *) wi->info;
2278 gimple *stmt = gsi_stmt (*gsi);
2280 switch (gimple_code (stmt))
2282 case GIMPLE_CALL:
2284 /* Only walk call arguments, lest we generate trampolines for
2285 direct calls. */
2286 unsigned long i, nargs = gimple_call_num_args (stmt);
2287 for (i = 0; i < nargs; i++)
2288 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2289 wi, NULL);
2290 break;
2293 case GIMPLE_OMP_TARGET:
2294 if (!is_gimple_omp_offloaded (stmt))
2296 *handled_ops_p = false;
2297 return NULL_TREE;
2299 /* FALLTHRU */
2300 case GIMPLE_OMP_PARALLEL:
2301 case GIMPLE_OMP_TASK:
2303 tree save_local_var_chain = info->new_local_var_chain;
2304 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2305 info->new_local_var_chain = NULL;
2306 char save_static_chain_added = info->static_chain_added;
2307 info->static_chain_added = 0;
2308 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2309 info, gimple_omp_body_ptr (stmt));
2310 if (info->new_local_var_chain)
2311 declare_vars (info->new_local_var_chain,
2312 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2313 false);
2314 for (int i = 0; i < 2; i++)
2316 tree c, decl;
2317 if ((info->static_chain_added & (1 << i)) == 0)
2318 continue;
2319 decl = i ? get_chain_decl (info) : info->frame_decl;
2320 /* Don't add CHAIN.* or FRAME.* twice. */
2321 for (c = gimple_omp_taskreg_clauses (stmt);
2323 c = OMP_CLAUSE_CHAIN (c))
2324 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2325 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2326 && OMP_CLAUSE_DECL (c) == decl)
2327 break;
2328 if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2330 c = build_omp_clause (gimple_location (stmt),
2331 i ? OMP_CLAUSE_FIRSTPRIVATE
2332 : OMP_CLAUSE_SHARED);
2333 OMP_CLAUSE_DECL (c) = decl;
2334 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2335 gimple_omp_taskreg_set_clauses (stmt, c);
2337 else if (c == NULL)
2339 c = build_omp_clause (gimple_location (stmt),
2340 OMP_CLAUSE_MAP);
2341 OMP_CLAUSE_DECL (c) = decl;
2342 OMP_CLAUSE_SET_MAP_KIND (c,
2343 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2344 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2345 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2346 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2350 info->new_local_var_chain = save_local_var_chain;
2351 info->static_chain_added |= save_static_chain_added;
2353 break;
2355 default:
2356 *handled_ops_p = false;
2357 return NULL_TREE;
2360 *handled_ops_p = true;
2361 return NULL_TREE;
2366 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2367 that reference nested functions to make sure that the static chain
2368 is set up properly for the call. */
2370 static tree
2371 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2372 struct walk_stmt_info *wi)
2374 struct nesting_info *const info = (struct nesting_info *) wi->info;
2375 tree decl, target_context;
2376 char save_static_chain_added;
2377 int i;
2378 gimple *stmt = gsi_stmt (*gsi);
2380 switch (gimple_code (stmt))
2382 case GIMPLE_CALL:
2383 if (gimple_call_chain (stmt))
2384 break;
2385 decl = gimple_call_fndecl (stmt);
2386 if (!decl)
2387 break;
2388 target_context = decl_function_context (decl);
2389 if (target_context && DECL_STATIC_CHAIN (decl))
2391 gimple_call_set_chain (as_a <gcall *> (stmt),
2392 get_static_chain (info, target_context,
2393 &wi->gsi));
2394 info->static_chain_added |= (1 << (info->context != target_context));
2396 break;
2398 case GIMPLE_OMP_PARALLEL:
2399 case GIMPLE_OMP_TASK:
2400 save_static_chain_added = info->static_chain_added;
2401 info->static_chain_added = 0;
2402 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2403 for (i = 0; i < 2; i++)
2405 tree c, decl;
2406 if ((info->static_chain_added & (1 << i)) == 0)
2407 continue;
2408 decl = i ? get_chain_decl (info) : info->frame_decl;
2409 /* Don't add CHAIN.* or FRAME.* twice. */
2410 for (c = gimple_omp_taskreg_clauses (stmt);
2412 c = OMP_CLAUSE_CHAIN (c))
2413 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2414 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2415 && OMP_CLAUSE_DECL (c) == decl)
2416 break;
2417 if (c == NULL)
2419 c = build_omp_clause (gimple_location (stmt),
2420 i ? OMP_CLAUSE_FIRSTPRIVATE
2421 : OMP_CLAUSE_SHARED);
2422 OMP_CLAUSE_DECL (c) = decl;
2423 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2424 gimple_omp_taskreg_set_clauses (stmt, c);
2427 info->static_chain_added |= save_static_chain_added;
2428 break;
2430 case GIMPLE_OMP_TARGET:
2431 if (!is_gimple_omp_offloaded (stmt))
2433 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2434 break;
2436 save_static_chain_added = info->static_chain_added;
2437 info->static_chain_added = 0;
2438 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2439 for (i = 0; i < 2; i++)
2441 tree c, decl;
2442 if ((info->static_chain_added & (1 << i)) == 0)
2443 continue;
2444 decl = i ? get_chain_decl (info) : info->frame_decl;
2445 /* Don't add CHAIN.* or FRAME.* twice. */
2446 for (c = gimple_omp_target_clauses (stmt);
2448 c = OMP_CLAUSE_CHAIN (c))
2449 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2450 && OMP_CLAUSE_DECL (c) == decl)
2451 break;
2452 if (c == NULL)
2454 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2455 OMP_CLAUSE_DECL (c) = decl;
2456 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2457 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2458 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2459 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2463 info->static_chain_added |= save_static_chain_added;
2464 break;
2466 case GIMPLE_OMP_FOR:
2467 walk_body (convert_gimple_call, NULL, info,
2468 gimple_omp_for_pre_body_ptr (stmt));
2469 /* FALLTHRU */
2470 case GIMPLE_OMP_SECTIONS:
2471 case GIMPLE_OMP_SECTION:
2472 case GIMPLE_OMP_SINGLE:
2473 case GIMPLE_OMP_TEAMS:
2474 case GIMPLE_OMP_MASTER:
2475 case GIMPLE_OMP_TASKGROUP:
2476 case GIMPLE_OMP_ORDERED:
2477 case GIMPLE_OMP_CRITICAL:
2478 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2479 break;
2481 default:
2482 /* Keep looking for other operands. */
2483 *handled_ops_p = false;
2484 return NULL_TREE;
2487 *handled_ops_p = true;
2488 return NULL_TREE;
2491 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2492 call expressions. At the same time, determine if a nested function
2493 actually uses its static chain; if not, remember that. */
2495 static void
2496 convert_all_function_calls (struct nesting_info *root)
2498 unsigned int chain_count = 0, old_chain_count, iter_count;
2499 struct nesting_info *n;
2501 /* First, optimistically clear static_chain for all decls that haven't
2502 used the static chain already for variable access. But always create
2503 it if not optimizing. This makes it possible to reconstruct the static
2504 nesting tree at run time and thus to resolve up-level references from
2505 within the debugger. */
2506 FOR_EACH_NEST_INFO (n, root)
2508 tree decl = n->context;
2509 if (!optimize)
2511 if (n->inner)
2512 (void) get_frame_type (n);
2513 if (n->outer)
2514 (void) get_chain_decl (n);
2516 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2518 DECL_STATIC_CHAIN (decl) = 0;
2519 if (dump_file && (dump_flags & TDF_DETAILS))
2520 fprintf (dump_file, "Guessing no static-chain for %s\n",
2521 lang_hooks.decl_printable_name (decl, 2));
2523 else
2524 DECL_STATIC_CHAIN (decl) = 1;
2525 chain_count += DECL_STATIC_CHAIN (decl);
2528 /* Walk the functions and perform transformations. Note that these
2529 transformations can induce new uses of the static chain, which in turn
2530 require re-examining all users of the decl. */
2531 /* ??? It would make sense to try to use the call graph to speed this up,
2532 but the call graph hasn't really been built yet. Even if it did, we
2533 would still need to iterate in this loop since address-of references
2534 wouldn't show up in the callgraph anyway. */
2535 iter_count = 0;
2538 old_chain_count = chain_count;
2539 chain_count = 0;
2540 iter_count++;
2542 if (dump_file && (dump_flags & TDF_DETAILS))
2543 fputc ('\n', dump_file);
2545 FOR_EACH_NEST_INFO (n, root)
2547 tree decl = n->context;
2548 walk_function (convert_tramp_reference_stmt,
2549 convert_tramp_reference_op, n);
2550 walk_function (convert_gimple_call, NULL, n);
2551 chain_count += DECL_STATIC_CHAIN (decl);
2554 while (chain_count != old_chain_count);
2556 if (dump_file && (dump_flags & TDF_DETAILS))
2557 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2558 iter_count);
2561 struct nesting_copy_body_data
2563 copy_body_data cb;
2564 struct nesting_info *root;
2567 /* A helper subroutine for debug_var_chain type remapping. */
2569 static tree
2570 nesting_copy_decl (tree decl, copy_body_data *id)
2572 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2573 tree *slot = nid->root->var_map->get (decl);
2575 if (slot)
2576 return (tree) *slot;
2578 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2580 tree new_decl = copy_decl_no_change (decl, id);
2581 DECL_ORIGINAL_TYPE (new_decl)
2582 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2583 return new_decl;
2586 if (TREE_CODE (decl) == VAR_DECL
2587 || TREE_CODE (decl) == PARM_DECL
2588 || TREE_CODE (decl) == RESULT_DECL)
2589 return decl;
2591 return copy_decl_no_change (decl, id);
2594 /* A helper function for remap_vla_decls. See if *TP contains
2595 some remapped variables. */
2597 static tree
2598 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2600 struct nesting_info *root = (struct nesting_info *) data;
2601 tree t = *tp;
2603 if (DECL_P (t))
2605 *walk_subtrees = 0;
2606 tree *slot = root->var_map->get (t);
2608 if (slot)
2609 return *slot;
2611 return NULL;
2614 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2615 involved. */
2617 static void
2618 remap_vla_decls (tree block, struct nesting_info *root)
2620 tree var, subblock, val, type;
2621 struct nesting_copy_body_data id;
2623 for (subblock = BLOCK_SUBBLOCKS (block);
2624 subblock;
2625 subblock = BLOCK_CHAIN (subblock))
2626 remap_vla_decls (subblock, root);
2628 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2629 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2631 val = DECL_VALUE_EXPR (var);
2632 type = TREE_TYPE (var);
2634 if (!(TREE_CODE (val) == INDIRECT_REF
2635 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2636 && variably_modified_type_p (type, NULL)))
2637 continue;
2639 if (root->var_map->get (TREE_OPERAND (val, 0))
2640 || walk_tree (&type, contains_remapped_vars, root, NULL))
2641 break;
2644 if (var == NULL_TREE)
2645 return;
2647 memset (&id, 0, sizeof (id));
2648 id.cb.copy_decl = nesting_copy_decl;
2649 id.cb.decl_map = new hash_map<tree, tree>;
2650 id.root = root;
2652 for (; var; var = DECL_CHAIN (var))
2653 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2655 struct nesting_info *i;
2656 tree newt, context;
2658 val = DECL_VALUE_EXPR (var);
2659 type = TREE_TYPE (var);
2661 if (!(TREE_CODE (val) == INDIRECT_REF
2662 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2663 && variably_modified_type_p (type, NULL)))
2664 continue;
2666 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2667 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2668 continue;
2670 context = decl_function_context (var);
2671 for (i = root; i; i = i->outer)
2672 if (i->context == context)
2673 break;
2675 if (i == NULL)
2676 continue;
2678 /* Fully expand value expressions. This avoids having debug variables
2679 only referenced from them and that can be swept during GC. */
2680 if (slot)
2682 tree t = (tree) *slot;
2683 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2684 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2687 id.cb.src_fn = i->context;
2688 id.cb.dst_fn = i->context;
2689 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2691 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2692 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2694 newt = TREE_TYPE (newt);
2695 type = TREE_TYPE (type);
2697 if (TYPE_NAME (newt)
2698 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2699 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2700 && newt != type
2701 && TYPE_NAME (newt) == TYPE_NAME (type))
2702 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2704 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2705 if (val != DECL_VALUE_EXPR (var))
2706 SET_DECL_VALUE_EXPR (var, val);
2709 delete id.cb.decl_map;
2712 /* Fold the MEM_REF *E. */
2713 bool
2714 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2716 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2717 *ref_p = fold (*ref_p);
2718 return true;
2721 /* Do "everything else" to clean up or complete state collected by the
2722 various walking passes -- lay out the types and decls, generate code
2723 to initialize the frame decl, store critical expressions in the
2724 struct function for rtl to find. */
2726 static void
2727 finalize_nesting_tree_1 (struct nesting_info *root)
2729 gimple_seq stmt_list;
2730 gimple *stmt;
2731 tree context = root->context;
2732 struct function *sf;
2734 stmt_list = NULL;
2736 /* If we created a non-local frame type or decl, we need to lay them
2737 out at this time. */
2738 if (root->frame_type)
2740 /* In some cases the frame type will trigger the -Wpadded warning.
2741 This is not helpful; suppress it. */
2742 int save_warn_padded = warn_padded;
2743 tree *adjust;
2745 warn_padded = 0;
2746 layout_type (root->frame_type);
2747 warn_padded = save_warn_padded;
2748 layout_decl (root->frame_decl, 0);
2750 /* Remove root->frame_decl from root->new_local_var_chain, so
2751 that we can declare it also in the lexical blocks, which
2752 helps ensure virtual regs that end up appearing in its RTL
2753 expression get substituted in instantiate_virtual_regs(). */
2754 for (adjust = &root->new_local_var_chain;
2755 *adjust != root->frame_decl;
2756 adjust = &DECL_CHAIN (*adjust))
2757 gcc_assert (DECL_CHAIN (*adjust));
2758 *adjust = DECL_CHAIN (*adjust);
2760 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2761 declare_vars (root->frame_decl,
2762 gimple_seq_first_stmt (gimple_body (context)), true);
2765 /* If any parameters were referenced non-locally, then we need to
2766 insert a copy. Likewise, if any variables were referenced by
2767 pointer, we need to initialize the address. */
2768 if (root->any_parm_remapped)
2770 tree p;
2771 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2773 tree field, x, y;
2775 field = lookup_field_for_decl (root, p, NO_INSERT);
2776 if (!field)
2777 continue;
2779 if (use_pointer_in_frame (p))
2780 x = build_addr (p);
2781 else
2782 x = p;
2784 /* If the assignment is from a non-register the stmt is
2785 not valid gimple. Make it so by using a temporary instead. */
2786 if (!is_gimple_reg (x)
2787 && is_gimple_reg_type (TREE_TYPE (x)))
2789 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2790 x = init_tmp_var (root, x, &gsi);
2793 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2794 root->frame_decl, field, NULL_TREE);
2795 stmt = gimple_build_assign (y, x);
2796 gimple_seq_add_stmt (&stmt_list, stmt);
2800 /* If a chain_field was created, then it needs to be initialized
2801 from chain_decl. */
2802 if (root->chain_field)
2804 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2805 root->frame_decl, root->chain_field, NULL_TREE);
2806 stmt = gimple_build_assign (x, get_chain_decl (root));
2807 gimple_seq_add_stmt (&stmt_list, stmt);
2810 /* If trampolines were created, then we need to initialize them. */
2811 if (root->any_tramp_created)
2813 struct nesting_info *i;
2814 for (i = root->inner; i ; i = i->next)
2816 tree arg1, arg2, arg3, x, field;
2818 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2819 if (!field)
2820 continue;
2822 gcc_assert (DECL_STATIC_CHAIN (i->context));
2823 arg3 = build_addr (root->frame_decl);
2825 arg2 = build_addr (i->context);
2827 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2828 root->frame_decl, field, NULL_TREE);
2829 arg1 = build_addr (x);
2831 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2832 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2833 gimple_seq_add_stmt (&stmt_list, stmt);
2837 /* If we created initialization statements, insert them. */
2838 if (stmt_list)
2840 gbind *bind;
2841 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2842 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
2843 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2844 gimple_bind_set_body (bind, stmt_list);
2847 /* If a chain_decl was created, then it needs to be registered with
2848 struct function so that it gets initialized from the static chain
2849 register at the beginning of the function. */
2850 sf = DECL_STRUCT_FUNCTION (root->context);
2851 sf->static_chain_decl = root->chain_decl;
2853 /* Similarly for the non-local goto save area. */
2854 if (root->nl_goto_field)
2856 sf->nonlocal_goto_save_area
2857 = get_frame_field (root, context, root->nl_goto_field, NULL);
2858 sf->has_nonlocal_label = 1;
2861 /* Make sure all new local variables get inserted into the
2862 proper BIND_EXPR. */
2863 if (root->new_local_var_chain)
2864 declare_vars (root->new_local_var_chain,
2865 gimple_seq_first_stmt (gimple_body (root->context)),
2866 false);
2868 if (root->debug_var_chain)
2870 tree debug_var;
2871 gbind *scope;
2873 remap_vla_decls (DECL_INITIAL (root->context), root);
2875 for (debug_var = root->debug_var_chain; debug_var;
2876 debug_var = DECL_CHAIN (debug_var))
2877 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2878 break;
2880 /* If there are any debug decls with variable length types,
2881 remap those types using other debug_var_chain variables. */
2882 if (debug_var)
2884 struct nesting_copy_body_data id;
2886 memset (&id, 0, sizeof (id));
2887 id.cb.copy_decl = nesting_copy_decl;
2888 id.cb.decl_map = new hash_map<tree, tree>;
2889 id.root = root;
2891 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2892 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2894 tree type = TREE_TYPE (debug_var);
2895 tree newt, t = type;
2896 struct nesting_info *i;
2898 for (i = root; i; i = i->outer)
2899 if (variably_modified_type_p (type, i->context))
2900 break;
2902 if (i == NULL)
2903 continue;
2905 id.cb.src_fn = i->context;
2906 id.cb.dst_fn = i->context;
2907 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2909 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2910 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2912 newt = TREE_TYPE (newt);
2913 t = TREE_TYPE (t);
2915 if (TYPE_NAME (newt)
2916 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2917 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2918 && newt != t
2919 && TYPE_NAME (newt) == TYPE_NAME (t))
2920 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2923 delete id.cb.decl_map;
2926 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
2927 if (gimple_bind_block (scope))
2928 declare_vars (root->debug_var_chain, scope, true);
2929 else
2930 BLOCK_VARS (DECL_INITIAL (root->context))
2931 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2932 root->debug_var_chain);
2935 /* Fold the rewritten MEM_REF trees. */
2936 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
2938 /* Dump the translated tree function. */
2939 if (dump_file)
2941 fputs ("\n\n", dump_file);
2942 dump_function_to_file (root->context, dump_file, dump_flags);
2946 static void
2947 finalize_nesting_tree (struct nesting_info *root)
2949 struct nesting_info *n;
2950 FOR_EACH_NEST_INFO (n, root)
2951 finalize_nesting_tree_1 (n);
2954 /* Unnest the nodes and pass them to cgraph. */
2956 static void
2957 unnest_nesting_tree_1 (struct nesting_info *root)
2959 struct cgraph_node *node = cgraph_node::get (root->context);
2961 /* For nested functions update the cgraph to reflect unnesting.
2962 We also delay finalizing of these functions up to this point. */
2963 if (node->origin)
2965 node->unnest ();
2966 cgraph_node::finalize_function (root->context, true);
2970 static void
2971 unnest_nesting_tree (struct nesting_info *root)
2973 struct nesting_info *n;
2974 FOR_EACH_NEST_INFO (n, root)
2975 unnest_nesting_tree_1 (n);
2978 /* Free the data structures allocated during this pass. */
2980 static void
2981 free_nesting_tree (struct nesting_info *root)
2983 struct nesting_info *node, *next;
2985 node = iter_nestinfo_start (root);
2988 next = iter_nestinfo_next (node);
2989 delete node->var_map;
2990 delete node->field_map;
2991 delete node->mem_refs;
2992 free (node);
2993 node = next;
2995 while (node);
2998 /* Gimplify a function and all its nested functions. */
2999 static void
3000 gimplify_all_functions (struct cgraph_node *root)
3002 struct cgraph_node *iter;
3003 if (!gimple_body (root->decl))
3004 gimplify_function_tree (root->decl);
3005 for (iter = root->nested; iter; iter = iter->next_nested)
3006 gimplify_all_functions (iter);
3009 /* Main entry point for this pass. Process FNDECL and all of its nested
3010 subroutines and turn them into something less tightly bound. */
3012 void
3013 lower_nested_functions (tree fndecl)
3015 struct cgraph_node *cgn;
3016 struct nesting_info *root;
3018 /* If there are no nested functions, there's nothing to do. */
3019 cgn = cgraph_node::get (fndecl);
3020 if (!cgn->nested)
3021 return;
3023 gimplify_all_functions (cgn);
3025 dump_file = dump_begin (TDI_nested, &dump_flags);
3026 if (dump_file)
3027 fprintf (dump_file, "\n;; Function %s\n\n",
3028 lang_hooks.decl_printable_name (fndecl, 2));
3030 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3031 root = create_nesting_tree (cgn);
3033 walk_all_functions (convert_nonlocal_reference_stmt,
3034 convert_nonlocal_reference_op,
3035 root);
3036 walk_all_functions (convert_local_reference_stmt,
3037 convert_local_reference_op,
3038 root);
3039 walk_all_functions (convert_nl_goto_reference, NULL, root);
3040 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3042 convert_all_function_calls (root);
3043 finalize_nesting_tree (root);
3044 unnest_nesting_tree (root);
3046 free_nesting_tree (root);
3047 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3049 if (dump_file)
3051 dump_end (TDI_nested, dump_file);
3052 dump_file = NULL;
3056 #include "gt-tree-nested.h"