Fix compilation failure with C++98 compilers
[official-gcc.git] / gcc / tree-nested.c
blob4579b4c58390a78f70ff07cd33383f8ff609052e
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "cgraph.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "dumpfile.h"
35 #include "tree-inline.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "tree-cfg.h"
40 #include "explow.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 #include "diagnostic.h"
47 /* The object of this pass is to lower the representation of a set of nested
48 functions in order to expose all of the gory details of the various
49 nonlocal references. We want to do this sooner rather than later, in
50 order to give us more freedom in emitting all of the functions in question.
52 Back in olden times, when gcc was young, we developed an insanely
53 complicated scheme whereby variables which were referenced nonlocally
54 were forced to live in the stack of the declaring function, and then
55 the nested functions magically discovered where these variables were
56 placed. In order for this scheme to function properly, it required
57 that the outer function be partially expanded, then we switch to
58 compiling the inner function, and once done with those we switch back
59 to compiling the outer function. Such delicate ordering requirements
60 makes it difficult to do whole translation unit optimizations
61 involving such functions.
63 The implementation here is much more direct. Everything that can be
64 referenced by an inner function is a member of an explicitly created
65 structure herein called the "nonlocal frame struct". The incoming
66 static chain for a nested function is a pointer to this struct in
67 the parent. In this way, we settle on known offsets from a known
68 base, and so are decoupled from the logic that places objects in the
69 function's stack frame. More importantly, we don't have to wait for
70 that to happen -- since the compilation of the inner function is no
71 longer tied to a real stack frame, the nonlocal frame struct can be
72 allocated anywhere. Which means that the outer function is now
73 inlinable.
75 Theory of operation here is very simple. Iterate over all the
76 statements in all the functions (depth first) several times,
77 allocating structures and fields on demand. In general we want to
78 examine inner functions first, so that we can avoid making changes
79 to outer functions which are unnecessary.
81 The order of the passes matters a bit, in that later passes will be
82 skipped if it is discovered that the functions don't actually interact
83 at all. That is, they're nested in the lexical sense but could have
84 been written as independent functions without change. */
87 struct nesting_info
89 struct nesting_info *outer;
90 struct nesting_info *inner;
91 struct nesting_info *next;
93 hash_map<tree, tree> *field_map;
94 hash_map<tree, tree> *var_map;
95 hash_set<tree *> *mem_refs;
96 bitmap suppress_expansion;
98 tree context;
99 tree new_local_var_chain;
100 tree debug_var_chain;
101 tree frame_type;
102 tree frame_decl;
103 tree chain_field;
104 tree chain_decl;
105 tree nl_goto_field;
107 bool thunk_p;
108 bool any_parm_remapped;
109 bool any_tramp_created;
110 bool any_descr_created;
111 char static_chain_added;
115 /* Iterate over the nesting tree, starting with ROOT, depth first. */
117 static inline struct nesting_info *
118 iter_nestinfo_start (struct nesting_info *root)
120 while (root->inner)
121 root = root->inner;
122 return root;
125 static inline struct nesting_info *
126 iter_nestinfo_next (struct nesting_info *node)
128 if (node->next)
129 return iter_nestinfo_start (node->next);
130 return node->outer;
133 #define FOR_EACH_NEST_INFO(I, ROOT) \
134 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
136 /* Obstack used for the bitmaps in the struct above. */
137 static struct bitmap_obstack nesting_info_bitmap_obstack;
140 /* We're working in so many different function contexts simultaneously,
141 that create_tmp_var is dangerous. Prevent mishap. */
142 #define create_tmp_var cant_use_create_tmp_var_here_dummy
144 /* Like create_tmp_var, except record the variable for registration at
145 the given nesting level. */
147 static tree
148 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
150 tree tmp_var;
152 /* If the type is of variable size or a type which must be created by the
153 frontend, something is wrong. Note that we explicitly allow
154 incomplete types here, since we create them ourselves here. */
155 gcc_assert (!TREE_ADDRESSABLE (type));
156 gcc_assert (!TYPE_SIZE_UNIT (type)
157 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
159 tmp_var = create_tmp_var_raw (type, prefix);
160 DECL_CONTEXT (tmp_var) = info->context;
161 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
162 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
163 if (TREE_CODE (type) == COMPLEX_TYPE
164 || TREE_CODE (type) == VECTOR_TYPE)
165 DECL_GIMPLE_REG_P (tmp_var) = 1;
167 info->new_local_var_chain = tmp_var;
169 return tmp_var;
172 /* Take the address of EXP to be used within function CONTEXT.
173 Mark it for addressability as necessary. */
175 tree
176 build_addr (tree exp)
178 mark_addressable (exp);
179 return build_fold_addr_expr (exp);
182 /* Insert FIELD into TYPE, sorted by alignment requirements. */
184 void
185 insert_field_into_struct (tree type, tree field)
187 tree *p;
189 DECL_CONTEXT (field) = type;
191 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
192 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
193 break;
195 DECL_CHAIN (field) = *p;
196 *p = field;
198 /* Set correct alignment for frame struct type. */
199 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
200 SET_TYPE_ALIGN (type, DECL_ALIGN (field));
203 /* Build or return the RECORD_TYPE that describes the frame state that is
204 shared between INFO->CONTEXT and its nested functions. This record will
205 not be complete until finalize_nesting_tree; up until that point we'll
206 be adding fields as necessary.
208 We also build the DECL that represents this frame in the function. */
210 static tree
211 get_frame_type (struct nesting_info *info)
213 tree type = info->frame_type;
214 if (!type)
216 char *name;
218 type = make_node (RECORD_TYPE);
220 name = concat ("FRAME.",
221 IDENTIFIER_POINTER (DECL_NAME (info->context)),
222 NULL);
223 TYPE_NAME (type) = get_identifier (name);
224 free (name);
226 info->frame_type = type;
228 /* Do not put info->frame_decl on info->new_local_var_chain,
229 so that we can declare it in the lexical blocks, which
230 makes sure virtual regs that end up appearing in its RTL
231 expression get substituted in instantiate_virtual_regs. */
232 info->frame_decl = create_tmp_var_raw (type, "FRAME");
233 DECL_CONTEXT (info->frame_decl) = info->context;
234 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
235 DECL_SEEN_IN_BIND_EXPR_P (info->frame_decl) = 1;
237 /* ??? Always make it addressable for now, since it is meant to
238 be pointed to by the static chain pointer. This pessimizes
239 when it turns out that no static chains are needed because
240 the nested functions referencing non-local variables are not
241 reachable, but the true pessimization is to create the non-
242 local frame structure in the first place. */
243 TREE_ADDRESSABLE (info->frame_decl) = 1;
246 return type;
249 /* Return true if DECL should be referenced by pointer in the non-local frame
250 structure. */
252 static bool
253 use_pointer_in_frame (tree decl)
255 if (TREE_CODE (decl) == PARM_DECL)
257 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
258 sized DECLs, and inefficient to copy large aggregates. Don't bother
259 moving anything but scalar parameters. */
260 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
262 else
264 /* Variable-sized DECLs can only come from OMP clauses at this point
265 since the gimplifier has already turned the regular variables into
266 pointers. Do the same as the gimplifier. */
267 return !DECL_SIZE (decl) || TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST;
271 /* Given DECL, a non-locally accessed variable, find or create a field
272 in the non-local frame structure for the given nesting context. */
274 static tree
275 lookup_field_for_decl (struct nesting_info *info, tree decl,
276 enum insert_option insert)
278 gcc_checking_assert (decl_function_context (decl) == info->context);
280 if (insert == NO_INSERT)
282 tree *slot = info->field_map->get (decl);
283 return slot ? *slot : NULL_TREE;
286 tree *slot = &info->field_map->get_or_insert (decl);
287 if (!*slot)
289 tree type = get_frame_type (info);
290 tree field = make_node (FIELD_DECL);
291 DECL_NAME (field) = DECL_NAME (decl);
293 if (use_pointer_in_frame (decl))
295 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
296 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
297 DECL_NONADDRESSABLE_P (field) = 1;
299 else
301 TREE_TYPE (field) = TREE_TYPE (decl);
302 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
303 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
304 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
305 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
306 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
307 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
309 /* Declare the transformation and adjust the original DECL. For a
310 variable or for a parameter when not optimizing, we make it point
311 to the field in the frame directly. For a parameter, we don't do
312 it when optimizing because the variable tracking pass will already
313 do the job, */
314 if (VAR_P (decl) || !optimize)
316 tree x
317 = build3 (COMPONENT_REF, TREE_TYPE (field), info->frame_decl,
318 field, NULL_TREE);
320 /* If the next declaration is a PARM_DECL pointing to the DECL,
321 we need to adjust its VALUE_EXPR directly, since chains of
322 VALUE_EXPRs run afoul of garbage collection. This occurs
323 in Ada for Out parameters that aren't copied in. */
324 tree next = DECL_CHAIN (decl);
325 if (next
326 && TREE_CODE (next) == PARM_DECL
327 && DECL_HAS_VALUE_EXPR_P (next)
328 && DECL_VALUE_EXPR (next) == decl)
329 SET_DECL_VALUE_EXPR (next, x);
331 SET_DECL_VALUE_EXPR (decl, x);
332 DECL_HAS_VALUE_EXPR_P (decl) = 1;
336 insert_field_into_struct (type, field);
337 *slot = field;
339 if (TREE_CODE (decl) == PARM_DECL)
340 info->any_parm_remapped = true;
343 return *slot;
346 /* Build or return the variable that holds the static chain within
347 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
349 static tree
350 get_chain_decl (struct nesting_info *info)
352 tree decl = info->chain_decl;
354 if (!decl)
356 tree type;
358 type = get_frame_type (info->outer);
359 type = build_pointer_type (type);
361 /* Note that this variable is *not* entered into any BIND_EXPR;
362 the construction of this variable is handled specially in
363 expand_function_start and initialize_inlined_parameters.
364 Note also that it's represented as a parameter. This is more
365 close to the truth, since the initial value does come from
366 the caller. */
367 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
368 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
369 DECL_ARTIFICIAL (decl) = 1;
370 DECL_IGNORED_P (decl) = 1;
371 TREE_USED (decl) = 1;
372 DECL_CONTEXT (decl) = info->context;
373 DECL_ARG_TYPE (decl) = type;
375 /* Tell tree-inline.c that we never write to this variable, so
376 it can copy-prop the replacement value immediately. */
377 TREE_READONLY (decl) = 1;
379 info->chain_decl = decl;
381 if (dump_file
382 && (dump_flags & TDF_DETAILS)
383 && !DECL_STATIC_CHAIN (info->context))
384 fprintf (dump_file, "Setting static-chain for %s\n",
385 lang_hooks.decl_printable_name (info->context, 2));
387 DECL_STATIC_CHAIN (info->context) = 1;
389 return decl;
392 /* Build or return the field within the non-local frame state that holds
393 the static chain for INFO->CONTEXT. This is the way to walk back up
394 multiple nesting levels. */
396 static tree
397 get_chain_field (struct nesting_info *info)
399 tree field = info->chain_field;
401 if (!field)
403 tree type = build_pointer_type (get_frame_type (info->outer));
405 field = make_node (FIELD_DECL);
406 DECL_NAME (field) = get_identifier ("__chain");
407 TREE_TYPE (field) = type;
408 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
409 DECL_NONADDRESSABLE_P (field) = 1;
411 insert_field_into_struct (get_frame_type (info), field);
413 info->chain_field = field;
415 if (dump_file
416 && (dump_flags & TDF_DETAILS)
417 && !DECL_STATIC_CHAIN (info->context))
418 fprintf (dump_file, "Setting static-chain for %s\n",
419 lang_hooks.decl_printable_name (info->context, 2));
421 DECL_STATIC_CHAIN (info->context) = 1;
423 return field;
426 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
428 static tree
429 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
430 gcall *call)
432 tree t;
434 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
435 gimple_call_set_lhs (call, t);
436 if (! gsi_end_p (*gsi))
437 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
438 gsi_insert_before (gsi, call, GSI_SAME_STMT);
440 return t;
444 /* Copy EXP into a temporary. Allocate the temporary in the context of
445 INFO and insert the initialization statement before GSI. */
447 static tree
448 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
450 tree t;
451 gimple *stmt;
453 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
454 stmt = gimple_build_assign (t, exp);
455 if (! gsi_end_p (*gsi))
456 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
457 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
459 return t;
463 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
465 static tree
466 gsi_gimplify_val (struct nesting_info *info, tree exp,
467 gimple_stmt_iterator *gsi)
469 if (is_gimple_val (exp))
470 return exp;
471 else
472 return init_tmp_var (info, exp, gsi);
475 /* Similarly, but copy from the temporary and insert the statement
476 after the iterator. */
478 static tree
479 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
481 tree t;
482 gimple *stmt;
484 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
485 stmt = gimple_build_assign (exp, t);
486 if (! gsi_end_p (*gsi))
487 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
488 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
490 return t;
493 /* Build or return the type used to represent a nested function trampoline. */
495 static GTY(()) tree trampoline_type;
497 static tree
498 get_trampoline_type (struct nesting_info *info)
500 unsigned align, size;
501 tree t;
503 if (trampoline_type)
504 return trampoline_type;
506 align = TRAMPOLINE_ALIGNMENT;
507 size = TRAMPOLINE_SIZE;
509 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
510 then allocate extra space so that we can do dynamic alignment. */
511 if (align > STACK_BOUNDARY)
513 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
514 align = STACK_BOUNDARY;
517 t = build_index_type (size_int (size - 1));
518 t = build_array_type (char_type_node, t);
519 t = build_decl (DECL_SOURCE_LOCATION (info->context),
520 FIELD_DECL, get_identifier ("__data"), t);
521 SET_DECL_ALIGN (t, align);
522 DECL_USER_ALIGN (t) = 1;
524 trampoline_type = make_node (RECORD_TYPE);
525 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
526 TYPE_FIELDS (trampoline_type) = t;
527 layout_type (trampoline_type);
528 DECL_CONTEXT (t) = trampoline_type;
530 return trampoline_type;
533 /* Build or return the type used to represent a nested function descriptor. */
535 static GTY(()) tree descriptor_type;
537 static tree
538 get_descriptor_type (struct nesting_info *info)
540 /* The base alignment is that of a function. */
541 const unsigned align = FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY);
542 tree t;
544 if (descriptor_type)
545 return descriptor_type;
547 t = build_index_type (integer_one_node);
548 t = build_array_type (ptr_type_node, t);
549 t = build_decl (DECL_SOURCE_LOCATION (info->context),
550 FIELD_DECL, get_identifier ("__data"), t);
551 SET_DECL_ALIGN (t, MAX (TYPE_ALIGN (ptr_type_node), align));
552 DECL_USER_ALIGN (t) = 1;
554 descriptor_type = make_node (RECORD_TYPE);
555 TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor");
556 TYPE_FIELDS (descriptor_type) = t;
557 layout_type (descriptor_type);
558 DECL_CONTEXT (t) = descriptor_type;
560 return descriptor_type;
563 /* Given DECL, a nested function, find or create an element in the
564 var map for this function. */
566 static tree
567 lookup_element_for_decl (struct nesting_info *info, tree decl,
568 enum insert_option insert)
570 if (insert == NO_INSERT)
572 tree *slot = info->var_map->get (decl);
573 return slot ? *slot : NULL_TREE;
576 tree *slot = &info->var_map->get_or_insert (decl);
577 if (!*slot)
578 *slot = build_tree_list (NULL_TREE, NULL_TREE);
580 return (tree) *slot;
583 /* Given DECL, a nested function, create a field in the non-local
584 frame structure for this function. */
586 static tree
587 create_field_for_decl (struct nesting_info *info, tree decl, tree type)
589 tree field = make_node (FIELD_DECL);
590 DECL_NAME (field) = DECL_NAME (decl);
591 TREE_TYPE (field) = type;
592 TREE_ADDRESSABLE (field) = 1;
593 insert_field_into_struct (get_frame_type (info), field);
594 return field;
597 /* Given DECL, a nested function, find or create a field in the non-local
598 frame structure for a trampoline for this function. */
600 static tree
601 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
602 enum insert_option insert)
604 tree elt, field;
606 elt = lookup_element_for_decl (info, decl, insert);
607 if (!elt)
608 return NULL_TREE;
610 field = TREE_PURPOSE (elt);
612 if (!field && insert == INSERT)
614 field = create_field_for_decl (info, decl, get_trampoline_type (info));
615 TREE_PURPOSE (elt) = field;
616 info->any_tramp_created = true;
619 return field;
622 /* Given DECL, a nested function, find or create a field in the non-local
623 frame structure for a descriptor for this function. */
625 static tree
626 lookup_descr_for_decl (struct nesting_info *info, tree decl,
627 enum insert_option insert)
629 tree elt, field;
631 elt = lookup_element_for_decl (info, decl, insert);
632 if (!elt)
633 return NULL_TREE;
635 field = TREE_VALUE (elt);
637 if (!field && insert == INSERT)
639 field = create_field_for_decl (info, decl, get_descriptor_type (info));
640 TREE_VALUE (elt) = field;
641 info->any_descr_created = true;
644 return field;
647 /* Build or return the field within the non-local frame state that holds
648 the non-local goto "jmp_buf". The buffer itself is maintained by the
649 rtl middle-end as dynamic stack space is allocated. */
651 static tree
652 get_nl_goto_field (struct nesting_info *info)
654 tree field = info->nl_goto_field;
655 if (!field)
657 unsigned size;
658 tree type;
660 /* For __builtin_nonlocal_goto, we need N words. The first is the
661 frame pointer, the rest is for the target's stack pointer save
662 area. The number of words is controlled by STACK_SAVEAREA_MODE;
663 not the best interface, but it'll do for now. */
664 if (Pmode == ptr_mode)
665 type = ptr_type_node;
666 else
667 type = lang_hooks.types.type_for_mode (Pmode, 1);
669 scalar_int_mode mode
670 = as_a <scalar_int_mode> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
671 size = GET_MODE_SIZE (mode);
672 size = size / GET_MODE_SIZE (Pmode);
673 size = size + 1;
675 type = build_array_type
676 (type, build_index_type (size_int (size)));
678 field = make_node (FIELD_DECL);
679 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
680 TREE_TYPE (field) = type;
681 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
682 TREE_ADDRESSABLE (field) = 1;
684 insert_field_into_struct (get_frame_type (info), field);
686 info->nl_goto_field = field;
689 return field;
692 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
694 static void
695 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
696 struct nesting_info *info, gimple_seq *pseq)
698 struct walk_stmt_info wi;
700 memset (&wi, 0, sizeof (wi));
701 wi.info = info;
702 wi.val_only = true;
703 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
707 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
709 static inline void
710 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
711 struct nesting_info *info)
713 gimple_seq body = gimple_body (info->context);
714 walk_body (callback_stmt, callback_op, info, &body);
715 gimple_set_body (info->context, body);
718 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
720 static void
721 walk_gimple_omp_for (gomp_for *for_stmt,
722 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
723 struct nesting_info *info)
725 struct walk_stmt_info wi;
726 gimple_seq seq;
727 tree t;
728 size_t i;
730 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
732 seq = NULL;
733 memset (&wi, 0, sizeof (wi));
734 wi.info = info;
735 wi.gsi = gsi_last (seq);
737 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
739 wi.val_only = false;
740 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
741 &wi, NULL);
742 wi.val_only = true;
743 wi.is_lhs = false;
744 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
745 &wi, NULL);
747 wi.val_only = true;
748 wi.is_lhs = false;
749 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
750 &wi, NULL);
752 t = gimple_omp_for_incr (for_stmt, i);
753 gcc_assert (BINARY_CLASS_P (t));
754 wi.val_only = false;
755 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
756 wi.val_only = true;
757 wi.is_lhs = false;
758 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
761 seq = gsi_seq (wi.gsi);
762 if (!gimple_seq_empty_p (seq))
764 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
765 annotate_all_with_location (seq, gimple_location (for_stmt));
766 gimple_seq_add_seq (&pre_body, seq);
767 gimple_omp_for_set_pre_body (for_stmt, pre_body);
771 /* Similarly for ROOT and all functions nested underneath, depth first. */
773 static void
774 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
775 struct nesting_info *root)
777 struct nesting_info *n;
778 FOR_EACH_NEST_INFO (n, root)
779 walk_function (callback_stmt, callback_op, n);
783 /* We have to check for a fairly pathological case. The operands of function
784 nested function are to be interpreted in the context of the enclosing
785 function. So if any are variably-sized, they will get remapped when the
786 enclosing function is inlined. But that remapping would also have to be
787 done in the types of the PARM_DECLs of the nested function, meaning the
788 argument types of that function will disagree with the arguments in the
789 calls to that function. So we'd either have to make a copy of the nested
790 function corresponding to each time the enclosing function was inlined or
791 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
792 function. The former is not practical. The latter would still require
793 detecting this case to know when to add the conversions. So, for now at
794 least, we don't inline such an enclosing function.
796 We have to do that check recursively, so here return indicating whether
797 FNDECL has such a nested function. ORIG_FN is the function we were
798 trying to inline to use for checking whether any argument is variably
799 modified by anything in it.
801 It would be better to do this in tree-inline.c so that we could give
802 the appropriate warning for why a function can't be inlined, but that's
803 too late since the nesting structure has already been flattened and
804 adding a flag just to record this fact seems a waste of a flag. */
806 static bool
807 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
809 struct cgraph_node *cgn = cgraph_node::get (fndecl);
810 tree arg;
812 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
814 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
815 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
816 return true;
818 if (check_for_nested_with_variably_modified (cgn->decl,
819 orig_fndecl))
820 return true;
823 return false;
826 /* Construct our local datastructure describing the function nesting
827 tree rooted by CGN. */
829 static struct nesting_info *
830 create_nesting_tree (struct cgraph_node *cgn)
832 struct nesting_info *info = XCNEW (struct nesting_info);
833 info->field_map = new hash_map<tree, tree>;
834 info->var_map = new hash_map<tree, tree>;
835 info->mem_refs = new hash_set<tree *>;
836 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
837 info->context = cgn->decl;
838 info->thunk_p = cgn->thunk.thunk_p;
840 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
842 struct nesting_info *sub = create_nesting_tree (cgn);
843 sub->outer = info;
844 sub->next = info->inner;
845 info->inner = sub;
848 /* See discussion at check_for_nested_with_variably_modified for a
849 discussion of why this has to be here. */
850 if (check_for_nested_with_variably_modified (info->context, info->context))
851 DECL_UNINLINABLE (info->context) = true;
853 return info;
856 /* Return an expression computing the static chain for TARGET_CONTEXT
857 from INFO->CONTEXT. Insert any necessary computations before TSI. */
859 static tree
860 get_static_chain (struct nesting_info *info, tree target_context,
861 gimple_stmt_iterator *gsi)
863 struct nesting_info *i;
864 tree x;
866 if (info->context == target_context)
868 x = build_addr (info->frame_decl);
869 info->static_chain_added |= 1;
871 else
873 x = get_chain_decl (info);
874 info->static_chain_added |= 2;
876 for (i = info->outer; i->context != target_context; i = i->outer)
878 tree field = get_chain_field (i);
880 x = build_simple_mem_ref (x);
881 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
882 x = init_tmp_var (info, x, gsi);
886 return x;
890 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
891 frame as seen from INFO->CONTEXT. Insert any necessary computations
892 before GSI. */
894 static tree
895 get_frame_field (struct nesting_info *info, tree target_context,
896 tree field, gimple_stmt_iterator *gsi)
898 struct nesting_info *i;
899 tree x;
901 if (info->context == target_context)
903 /* Make sure frame_decl gets created. */
904 (void) get_frame_type (info);
905 x = info->frame_decl;
906 info->static_chain_added |= 1;
908 else
910 x = get_chain_decl (info);
911 info->static_chain_added |= 2;
913 for (i = info->outer; i->context != target_context; i = i->outer)
915 tree field = get_chain_field (i);
917 x = build_simple_mem_ref (x);
918 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
919 x = init_tmp_var (info, x, gsi);
922 x = build_simple_mem_ref (x);
925 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
926 return x;
929 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
931 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
932 in the nested function with DECL_VALUE_EXPR set to reference the true
933 variable in the parent function. This is used both for debug info
934 and in OMP lowering. */
936 static tree
937 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
939 tree target_context;
940 struct nesting_info *i;
941 tree x, field, new_decl;
943 tree *slot = &info->var_map->get_or_insert (decl);
945 if (*slot)
946 return *slot;
948 target_context = decl_function_context (decl);
950 /* A copy of the code in get_frame_field, but without the temporaries. */
951 if (info->context == target_context)
953 /* Make sure frame_decl gets created. */
954 (void) get_frame_type (info);
955 x = info->frame_decl;
956 i = info;
957 info->static_chain_added |= 1;
959 else
961 x = get_chain_decl (info);
962 info->static_chain_added |= 2;
963 for (i = info->outer; i->context != target_context; i = i->outer)
965 field = get_chain_field (i);
966 x = build_simple_mem_ref (x);
967 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
969 x = build_simple_mem_ref (x);
972 field = lookup_field_for_decl (i, decl, INSERT);
973 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
974 if (use_pointer_in_frame (decl))
975 x = build_simple_mem_ref (x);
977 /* ??? We should be remapping types as well, surely. */
978 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
979 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
980 DECL_CONTEXT (new_decl) = info->context;
981 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
982 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
983 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
984 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
985 TREE_READONLY (new_decl) = TREE_READONLY (decl);
986 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
987 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
988 if ((TREE_CODE (decl) == PARM_DECL
989 || TREE_CODE (decl) == RESULT_DECL
990 || VAR_P (decl))
991 && DECL_BY_REFERENCE (decl))
992 DECL_BY_REFERENCE (new_decl) = 1;
994 SET_DECL_VALUE_EXPR (new_decl, x);
995 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
997 *slot = new_decl;
998 DECL_CHAIN (new_decl) = info->debug_var_chain;
999 info->debug_var_chain = new_decl;
1001 if (!optimize
1002 && info->context != target_context
1003 && variably_modified_type_p (TREE_TYPE (decl), NULL))
1004 note_nonlocal_vla_type (info, TREE_TYPE (decl));
1006 return new_decl;
1010 /* Callback for walk_gimple_stmt, rewrite all references to VAR
1011 and PARM_DECLs that belong to outer functions.
1013 The rewrite will involve some number of structure accesses back up
1014 the static chain. E.g. for a variable FOO up one nesting level it'll
1015 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
1016 indirections apply to decls for which use_pointer_in_frame is true. */
1018 static tree
1019 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
1021 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1022 struct nesting_info *const info = (struct nesting_info *) wi->info;
1023 tree t = *tp;
1025 *walk_subtrees = 0;
1026 switch (TREE_CODE (t))
1028 case VAR_DECL:
1029 /* Non-automatic variables are never processed. */
1030 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1031 break;
1032 /* FALLTHRU */
1034 case PARM_DECL:
1036 tree x, target_context = decl_function_context (t);
1038 if (info->context == target_context)
1039 break;
1041 wi->changed = true;
1043 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1044 x = get_nonlocal_debug_decl (info, t);
1045 else
1047 struct nesting_info *i = info;
1048 while (i && i->context != target_context)
1049 i = i->outer;
1050 /* If none of the outer contexts is the target context, this means
1051 that the VAR or PARM_DECL is referenced in a wrong context. */
1052 if (!i)
1053 internal_error ("%s from %s referenced in %s",
1054 IDENTIFIER_POINTER (DECL_NAME (t)),
1055 IDENTIFIER_POINTER (DECL_NAME (target_context)),
1056 IDENTIFIER_POINTER (DECL_NAME (info->context)));
1058 x = lookup_field_for_decl (i, t, INSERT);
1059 x = get_frame_field (info, target_context, x, &wi->gsi);
1060 if (use_pointer_in_frame (t))
1062 x = init_tmp_var (info, x, &wi->gsi);
1063 x = build_simple_mem_ref (x);
1067 if (wi->val_only)
1069 if (wi->is_lhs)
1070 x = save_tmp_var (info, x, &wi->gsi);
1071 else
1072 x = init_tmp_var (info, x, &wi->gsi);
1075 *tp = x;
1077 break;
1079 case LABEL_DECL:
1080 /* We're taking the address of a label from a parent function, but
1081 this is not itself a non-local goto. Mark the label such that it
1082 will not be deleted, much as we would with a label address in
1083 static storage. */
1084 if (decl_function_context (t) != info->context)
1085 FORCED_LABEL (t) = 1;
1086 break;
1088 case ADDR_EXPR:
1090 bool save_val_only = wi->val_only;
1092 wi->val_only = false;
1093 wi->is_lhs = false;
1094 wi->changed = false;
1095 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1096 wi->val_only = true;
1098 if (wi->changed)
1100 tree save_context;
1102 /* If we changed anything, we might no longer be directly
1103 referencing a decl. */
1104 save_context = current_function_decl;
1105 current_function_decl = info->context;
1106 recompute_tree_invariant_for_addr_expr (t);
1107 current_function_decl = save_context;
1109 /* If the callback converted the address argument in a context
1110 where we only accept variables (and min_invariant, presumably),
1111 then compute the address into a temporary. */
1112 if (save_val_only)
1113 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1114 t, &wi->gsi);
1117 break;
1119 case REALPART_EXPR:
1120 case IMAGPART_EXPR:
1121 case COMPONENT_REF:
1122 case ARRAY_REF:
1123 case ARRAY_RANGE_REF:
1124 case BIT_FIELD_REF:
1125 /* Go down this entire nest and just look at the final prefix and
1126 anything that describes the references. Otherwise, we lose track
1127 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1128 wi->val_only = true;
1129 wi->is_lhs = false;
1130 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1132 if (TREE_CODE (t) == COMPONENT_REF)
1133 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1134 NULL);
1135 else if (TREE_CODE (t) == ARRAY_REF
1136 || TREE_CODE (t) == ARRAY_RANGE_REF)
1138 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1139 wi, NULL);
1140 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1141 wi, NULL);
1142 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1143 wi, NULL);
1146 wi->val_only = false;
1147 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1148 break;
1150 case VIEW_CONVERT_EXPR:
1151 /* Just request to look at the subtrees, leaving val_only and lhs
1152 untouched. This might actually be for !val_only + lhs, in which
1153 case we don't want to force a replacement by a temporary. */
1154 *walk_subtrees = 1;
1155 break;
1157 default:
1158 if (!IS_TYPE_OR_DECL_P (t))
1160 *walk_subtrees = 1;
1161 wi->val_only = true;
1162 wi->is_lhs = false;
1164 break;
1167 return NULL_TREE;
1170 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1171 struct walk_stmt_info *);
1173 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1174 and PARM_DECLs that belong to outer functions. */
1176 static bool
1177 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1179 struct nesting_info *const info = (struct nesting_info *) wi->info;
1180 bool need_chain = false, need_stmts = false;
1181 tree clause, decl;
1182 int dummy;
1183 bitmap new_suppress;
1185 new_suppress = BITMAP_GGC_ALLOC ();
1186 bitmap_copy (new_suppress, info->suppress_expansion);
1188 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1190 switch (OMP_CLAUSE_CODE (clause))
1192 case OMP_CLAUSE_REDUCTION:
1193 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1194 need_stmts = true;
1195 goto do_decl_clause;
1197 case OMP_CLAUSE_LASTPRIVATE:
1198 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1199 need_stmts = true;
1200 goto do_decl_clause;
1202 case OMP_CLAUSE_LINEAR:
1203 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1204 need_stmts = true;
1205 wi->val_only = true;
1206 wi->is_lhs = false;
1207 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1208 &dummy, wi);
1209 goto do_decl_clause;
1211 case OMP_CLAUSE_PRIVATE:
1212 case OMP_CLAUSE_FIRSTPRIVATE:
1213 case OMP_CLAUSE_COPYPRIVATE:
1214 case OMP_CLAUSE_SHARED:
1215 case OMP_CLAUSE_TO_DECLARE:
1216 case OMP_CLAUSE_LINK:
1217 case OMP_CLAUSE_USE_DEVICE_PTR:
1218 case OMP_CLAUSE_IS_DEVICE_PTR:
1219 do_decl_clause:
1220 decl = OMP_CLAUSE_DECL (clause);
1221 if (VAR_P (decl)
1222 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1223 break;
1224 if (decl_function_context (decl) != info->context)
1226 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1227 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1228 bitmap_set_bit (new_suppress, DECL_UID (decl));
1229 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1230 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1231 need_chain = true;
1233 break;
1235 case OMP_CLAUSE_SCHEDULE:
1236 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1237 break;
1238 /* FALLTHRU */
1239 case OMP_CLAUSE_FINAL:
1240 case OMP_CLAUSE_IF:
1241 case OMP_CLAUSE_NUM_THREADS:
1242 case OMP_CLAUSE_DEPEND:
1243 case OMP_CLAUSE_DEVICE:
1244 case OMP_CLAUSE_NUM_TEAMS:
1245 case OMP_CLAUSE_THREAD_LIMIT:
1246 case OMP_CLAUSE_SAFELEN:
1247 case OMP_CLAUSE_SIMDLEN:
1248 case OMP_CLAUSE_PRIORITY:
1249 case OMP_CLAUSE_GRAINSIZE:
1250 case OMP_CLAUSE_NUM_TASKS:
1251 case OMP_CLAUSE_HINT:
1252 case OMP_CLAUSE_NUM_GANGS:
1253 case OMP_CLAUSE_NUM_WORKERS:
1254 case OMP_CLAUSE_VECTOR_LENGTH:
1255 case OMP_CLAUSE_GANG:
1256 case OMP_CLAUSE_WORKER:
1257 case OMP_CLAUSE_VECTOR:
1258 case OMP_CLAUSE_ASYNC:
1259 case OMP_CLAUSE_WAIT:
1260 /* Several OpenACC clauses have optional arguments. Check if they
1261 are present. */
1262 if (OMP_CLAUSE_OPERAND (clause, 0))
1264 wi->val_only = true;
1265 wi->is_lhs = false;
1266 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1267 &dummy, wi);
1270 /* The gang clause accepts two arguments. */
1271 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1272 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1274 wi->val_only = true;
1275 wi->is_lhs = false;
1276 convert_nonlocal_reference_op
1277 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1279 break;
1281 case OMP_CLAUSE_DIST_SCHEDULE:
1282 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1284 wi->val_only = true;
1285 wi->is_lhs = false;
1286 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1287 &dummy, wi);
1289 break;
1291 case OMP_CLAUSE_MAP:
1292 case OMP_CLAUSE_TO:
1293 case OMP_CLAUSE_FROM:
1294 if (OMP_CLAUSE_SIZE (clause))
1296 wi->val_only = true;
1297 wi->is_lhs = false;
1298 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1299 &dummy, wi);
1301 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1302 goto do_decl_clause;
1303 wi->val_only = true;
1304 wi->is_lhs = false;
1305 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1306 wi, NULL);
1307 break;
1309 case OMP_CLAUSE_ALIGNED:
1310 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1312 wi->val_only = true;
1313 wi->is_lhs = false;
1314 convert_nonlocal_reference_op
1315 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1317 /* Like do_decl_clause, but don't add any suppression. */
1318 decl = OMP_CLAUSE_DECL (clause);
1319 if (VAR_P (decl)
1320 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1321 break;
1322 if (decl_function_context (decl) != info->context)
1324 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1325 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1326 need_chain = true;
1328 break;
1330 case OMP_CLAUSE_NOWAIT:
1331 case OMP_CLAUSE_ORDERED:
1332 case OMP_CLAUSE_DEFAULT:
1333 case OMP_CLAUSE_COPYIN:
1334 case OMP_CLAUSE_COLLAPSE:
1335 case OMP_CLAUSE_TILE:
1336 case OMP_CLAUSE_UNTIED:
1337 case OMP_CLAUSE_MERGEABLE:
1338 case OMP_CLAUSE_PROC_BIND:
1339 case OMP_CLAUSE_NOGROUP:
1340 case OMP_CLAUSE_THREADS:
1341 case OMP_CLAUSE_SIMD:
1342 case OMP_CLAUSE_DEFAULTMAP:
1343 case OMP_CLAUSE_SEQ:
1344 case OMP_CLAUSE_INDEPENDENT:
1345 case OMP_CLAUSE_AUTO:
1346 case OMP_CLAUSE_IF_PRESENT:
1347 case OMP_CLAUSE_FINALIZE:
1348 break;
1350 /* The following clause belongs to the OpenACC cache directive, which
1351 is discarded during gimplification. */
1352 case OMP_CLAUSE__CACHE_:
1353 /* The following clauses are only allowed in the OpenMP declare simd
1354 directive, so not seen here. */
1355 case OMP_CLAUSE_UNIFORM:
1356 case OMP_CLAUSE_INBRANCH:
1357 case OMP_CLAUSE_NOTINBRANCH:
1358 /* The following clauses are only allowed on OpenMP cancel and
1359 cancellation point directives, which at this point have already
1360 been lowered into a function call. */
1361 case OMP_CLAUSE_FOR:
1362 case OMP_CLAUSE_PARALLEL:
1363 case OMP_CLAUSE_SECTIONS:
1364 case OMP_CLAUSE_TASKGROUP:
1365 /* The following clauses are only added during OMP lowering; nested
1366 function decomposition happens before that. */
1367 case OMP_CLAUSE__LOOPTEMP_:
1368 case OMP_CLAUSE__SIMDUID_:
1369 case OMP_CLAUSE__GRIDDIM_:
1370 /* Anything else. */
1371 default:
1372 gcc_unreachable ();
1376 info->suppress_expansion = new_suppress;
1378 if (need_stmts)
1379 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1380 switch (OMP_CLAUSE_CODE (clause))
1382 case OMP_CLAUSE_REDUCTION:
1383 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1385 tree old_context
1386 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1387 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1388 = info->context;
1389 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1390 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1391 = info->context;
1392 walk_body (convert_nonlocal_reference_stmt,
1393 convert_nonlocal_reference_op, info,
1394 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1395 walk_body (convert_nonlocal_reference_stmt,
1396 convert_nonlocal_reference_op, info,
1397 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1398 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1399 = old_context;
1400 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1401 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1402 = old_context;
1404 break;
1406 case OMP_CLAUSE_LASTPRIVATE:
1407 walk_body (convert_nonlocal_reference_stmt,
1408 convert_nonlocal_reference_op, info,
1409 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1410 break;
1412 case OMP_CLAUSE_LINEAR:
1413 walk_body (convert_nonlocal_reference_stmt,
1414 convert_nonlocal_reference_op, info,
1415 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1416 break;
1418 default:
1419 break;
1422 return need_chain;
1425 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1427 static void
1428 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1430 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1431 type = TREE_TYPE (type);
1433 if (TYPE_NAME (type)
1434 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1435 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1436 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1438 while (POINTER_TYPE_P (type)
1439 || TREE_CODE (type) == VECTOR_TYPE
1440 || TREE_CODE (type) == FUNCTION_TYPE
1441 || TREE_CODE (type) == METHOD_TYPE)
1442 type = TREE_TYPE (type);
1444 if (TREE_CODE (type) == ARRAY_TYPE)
1446 tree domain, t;
1448 note_nonlocal_vla_type (info, TREE_TYPE (type));
1449 domain = TYPE_DOMAIN (type);
1450 if (domain)
1452 t = TYPE_MIN_VALUE (domain);
1453 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1454 && decl_function_context (t) != info->context)
1455 get_nonlocal_debug_decl (info, t);
1456 t = TYPE_MAX_VALUE (domain);
1457 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1458 && decl_function_context (t) != info->context)
1459 get_nonlocal_debug_decl (info, t);
1464 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1465 PARM_DECLs that belong to outer functions. This handles statements
1466 that are not handled via the standard recursion done in
1467 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1468 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1469 operands of STMT have been handled by this function. */
1471 static tree
1472 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1473 struct walk_stmt_info *wi)
1475 struct nesting_info *info = (struct nesting_info *) wi->info;
1476 tree save_local_var_chain;
1477 bitmap save_suppress;
1478 gimple *stmt = gsi_stmt (*gsi);
1480 switch (gimple_code (stmt))
1482 case GIMPLE_GOTO:
1483 /* Don't walk non-local gotos for now. */
1484 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1486 wi->val_only = true;
1487 wi->is_lhs = false;
1488 *handled_ops_p = false;
1489 return NULL_TREE;
1491 break;
1493 case GIMPLE_OMP_PARALLEL:
1494 case GIMPLE_OMP_TASK:
1495 save_suppress = info->suppress_expansion;
1496 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1497 wi))
1499 tree c, decl;
1500 decl = get_chain_decl (info);
1501 c = build_omp_clause (gimple_location (stmt),
1502 OMP_CLAUSE_FIRSTPRIVATE);
1503 OMP_CLAUSE_DECL (c) = decl;
1504 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1505 gimple_omp_taskreg_set_clauses (stmt, c);
1508 save_local_var_chain = info->new_local_var_chain;
1509 info->new_local_var_chain = NULL;
1511 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1512 info, gimple_omp_body_ptr (stmt));
1514 if (info->new_local_var_chain)
1515 declare_vars (info->new_local_var_chain,
1516 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1517 false);
1518 info->new_local_var_chain = save_local_var_chain;
1519 info->suppress_expansion = save_suppress;
1520 break;
1522 case GIMPLE_OMP_FOR:
1523 save_suppress = info->suppress_expansion;
1524 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1525 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1526 convert_nonlocal_reference_stmt,
1527 convert_nonlocal_reference_op, info);
1528 walk_body (convert_nonlocal_reference_stmt,
1529 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1530 info->suppress_expansion = save_suppress;
1531 break;
1533 case GIMPLE_OMP_SECTIONS:
1534 save_suppress = info->suppress_expansion;
1535 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1536 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1537 info, gimple_omp_body_ptr (stmt));
1538 info->suppress_expansion = save_suppress;
1539 break;
1541 case GIMPLE_OMP_SINGLE:
1542 save_suppress = info->suppress_expansion;
1543 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1544 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1545 info, gimple_omp_body_ptr (stmt));
1546 info->suppress_expansion = save_suppress;
1547 break;
1549 case GIMPLE_OMP_TARGET:
1550 if (!is_gimple_omp_offloaded (stmt))
1552 save_suppress = info->suppress_expansion;
1553 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1554 wi);
1555 info->suppress_expansion = save_suppress;
1556 walk_body (convert_nonlocal_reference_stmt,
1557 convert_nonlocal_reference_op, info,
1558 gimple_omp_body_ptr (stmt));
1559 break;
1561 save_suppress = info->suppress_expansion;
1562 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1563 wi))
1565 tree c, decl;
1566 decl = get_chain_decl (info);
1567 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1568 OMP_CLAUSE_DECL (c) = decl;
1569 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1570 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1571 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1572 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1575 save_local_var_chain = info->new_local_var_chain;
1576 info->new_local_var_chain = NULL;
1578 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1579 info, gimple_omp_body_ptr (stmt));
1581 if (info->new_local_var_chain)
1582 declare_vars (info->new_local_var_chain,
1583 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1584 false);
1585 info->new_local_var_chain = save_local_var_chain;
1586 info->suppress_expansion = save_suppress;
1587 break;
1589 case GIMPLE_OMP_TEAMS:
1590 save_suppress = info->suppress_expansion;
1591 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1592 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1593 info, gimple_omp_body_ptr (stmt));
1594 info->suppress_expansion = save_suppress;
1595 break;
1597 case GIMPLE_OMP_SECTION:
1598 case GIMPLE_OMP_MASTER:
1599 case GIMPLE_OMP_TASKGROUP:
1600 case GIMPLE_OMP_ORDERED:
1601 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1602 info, gimple_omp_body_ptr (stmt));
1603 break;
1605 case GIMPLE_BIND:
1607 gbind *bind_stmt = as_a <gbind *> (stmt);
1609 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1610 if (TREE_CODE (var) == NAMELIST_DECL)
1612 /* Adjust decls mentioned in NAMELIST_DECL. */
1613 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1614 tree decl;
1615 unsigned int i;
1617 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1619 if (VAR_P (decl)
1620 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1621 continue;
1622 if (decl_function_context (decl) != info->context)
1623 CONSTRUCTOR_ELT (decls, i)->value
1624 = get_nonlocal_debug_decl (info, decl);
1628 *handled_ops_p = false;
1629 return NULL_TREE;
1631 case GIMPLE_COND:
1632 wi->val_only = true;
1633 wi->is_lhs = false;
1634 *handled_ops_p = false;
1635 return NULL_TREE;
1637 default:
1638 /* For every other statement that we are not interested in
1639 handling here, let the walker traverse the operands. */
1640 *handled_ops_p = false;
1641 return NULL_TREE;
1644 /* We have handled all of STMT operands, no need to traverse the operands. */
1645 *handled_ops_p = true;
1646 return NULL_TREE;
1650 /* A subroutine of convert_local_reference. Create a local variable
1651 in the parent function with DECL_VALUE_EXPR set to reference the
1652 field in FRAME. This is used both for debug info and in OMP
1653 lowering. */
1655 static tree
1656 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1658 tree x, new_decl;
1660 tree *slot = &info->var_map->get_or_insert (decl);
1661 if (*slot)
1662 return *slot;
1664 /* Make sure frame_decl gets created. */
1665 (void) get_frame_type (info);
1666 x = info->frame_decl;
1667 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1669 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1670 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1671 DECL_CONTEXT (new_decl) = info->context;
1672 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1673 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1674 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1675 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1676 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1677 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1678 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1679 if ((TREE_CODE (decl) == PARM_DECL
1680 || TREE_CODE (decl) == RESULT_DECL
1681 || VAR_P (decl))
1682 && DECL_BY_REFERENCE (decl))
1683 DECL_BY_REFERENCE (new_decl) = 1;
1685 SET_DECL_VALUE_EXPR (new_decl, x);
1686 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1687 *slot = new_decl;
1689 DECL_CHAIN (new_decl) = info->debug_var_chain;
1690 info->debug_var_chain = new_decl;
1692 /* Do not emit debug info twice. */
1693 DECL_IGNORED_P (decl) = 1;
1695 return new_decl;
1699 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1700 and PARM_DECLs that were referenced by inner nested functions.
1701 The rewrite will be a structure reference to the local frame variable. */
1703 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1705 static tree
1706 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1708 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1709 struct nesting_info *const info = (struct nesting_info *) wi->info;
1710 tree t = *tp, field, x;
1711 bool save_val_only;
1713 *walk_subtrees = 0;
1714 switch (TREE_CODE (t))
1716 case VAR_DECL:
1717 /* Non-automatic variables are never processed. */
1718 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1719 break;
1720 /* FALLTHRU */
1722 case PARM_DECL:
1723 if (t != info->frame_decl && decl_function_context (t) == info->context)
1725 /* If we copied a pointer to the frame, then the original decl
1726 is used unchanged in the parent function. */
1727 if (use_pointer_in_frame (t))
1728 break;
1730 /* No need to transform anything if no child references the
1731 variable. */
1732 field = lookup_field_for_decl (info, t, NO_INSERT);
1733 if (!field)
1734 break;
1735 wi->changed = true;
1737 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1738 x = get_local_debug_decl (info, t, field);
1739 else
1740 x = get_frame_field (info, info->context, field, &wi->gsi);
1742 if (wi->val_only)
1744 if (wi->is_lhs)
1745 x = save_tmp_var (info, x, &wi->gsi);
1746 else
1747 x = init_tmp_var (info, x, &wi->gsi);
1750 *tp = x;
1752 break;
1754 case ADDR_EXPR:
1755 save_val_only = wi->val_only;
1756 wi->val_only = false;
1757 wi->is_lhs = false;
1758 wi->changed = false;
1759 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1760 wi->val_only = save_val_only;
1762 /* If we converted anything ... */
1763 if (wi->changed)
1765 tree save_context;
1767 /* Then the frame decl is now addressable. */
1768 TREE_ADDRESSABLE (info->frame_decl) = 1;
1770 save_context = current_function_decl;
1771 current_function_decl = info->context;
1772 recompute_tree_invariant_for_addr_expr (t);
1773 current_function_decl = save_context;
1775 /* If we are in a context where we only accept values, then
1776 compute the address into a temporary. */
1777 if (save_val_only)
1778 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1779 t, &wi->gsi);
1781 break;
1783 case REALPART_EXPR:
1784 case IMAGPART_EXPR:
1785 case COMPONENT_REF:
1786 case ARRAY_REF:
1787 case ARRAY_RANGE_REF:
1788 case BIT_FIELD_REF:
1789 /* Go down this entire nest and just look at the final prefix and
1790 anything that describes the references. Otherwise, we lose track
1791 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1792 save_val_only = wi->val_only;
1793 wi->val_only = true;
1794 wi->is_lhs = false;
1795 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1797 if (TREE_CODE (t) == COMPONENT_REF)
1798 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1799 NULL);
1800 else if (TREE_CODE (t) == ARRAY_REF
1801 || TREE_CODE (t) == ARRAY_RANGE_REF)
1803 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1804 NULL);
1805 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1806 NULL);
1807 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1808 NULL);
1811 wi->val_only = false;
1812 walk_tree (tp, convert_local_reference_op, wi, NULL);
1813 wi->val_only = save_val_only;
1814 break;
1816 case MEM_REF:
1817 save_val_only = wi->val_only;
1818 wi->val_only = true;
1819 wi->is_lhs = false;
1820 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1821 wi, NULL);
1822 /* We need to re-fold the MEM_REF as component references as
1823 part of a ADDR_EXPR address are not allowed. But we cannot
1824 fold here, as the chain record type is not yet finalized. */
1825 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1826 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1827 info->mem_refs->add (tp);
1828 wi->val_only = save_val_only;
1829 break;
1831 case VIEW_CONVERT_EXPR:
1832 /* Just request to look at the subtrees, leaving val_only and lhs
1833 untouched. This might actually be for !val_only + lhs, in which
1834 case we don't want to force a replacement by a temporary. */
1835 *walk_subtrees = 1;
1836 break;
1838 default:
1839 if (!IS_TYPE_OR_DECL_P (t))
1841 *walk_subtrees = 1;
1842 wi->val_only = true;
1843 wi->is_lhs = false;
1845 break;
1848 return NULL_TREE;
1851 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1852 struct walk_stmt_info *);
1854 /* Helper for convert_local_reference. Convert all the references in
1855 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1857 static bool
1858 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1860 struct nesting_info *const info = (struct nesting_info *) wi->info;
1861 bool need_frame = false, need_stmts = false;
1862 tree clause, decl;
1863 int dummy;
1864 bitmap new_suppress;
1866 new_suppress = BITMAP_GGC_ALLOC ();
1867 bitmap_copy (new_suppress, info->suppress_expansion);
1869 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1871 switch (OMP_CLAUSE_CODE (clause))
1873 case OMP_CLAUSE_REDUCTION:
1874 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1875 need_stmts = true;
1876 goto do_decl_clause;
1878 case OMP_CLAUSE_LASTPRIVATE:
1879 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1880 need_stmts = true;
1881 goto do_decl_clause;
1883 case OMP_CLAUSE_LINEAR:
1884 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1885 need_stmts = true;
1886 wi->val_only = true;
1887 wi->is_lhs = false;
1888 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1889 wi);
1890 goto do_decl_clause;
1892 case OMP_CLAUSE_PRIVATE:
1893 case OMP_CLAUSE_FIRSTPRIVATE:
1894 case OMP_CLAUSE_COPYPRIVATE:
1895 case OMP_CLAUSE_SHARED:
1896 case OMP_CLAUSE_TO_DECLARE:
1897 case OMP_CLAUSE_LINK:
1898 case OMP_CLAUSE_USE_DEVICE_PTR:
1899 case OMP_CLAUSE_IS_DEVICE_PTR:
1900 do_decl_clause:
1901 decl = OMP_CLAUSE_DECL (clause);
1902 if (VAR_P (decl)
1903 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1904 break;
1905 if (decl_function_context (decl) == info->context
1906 && !use_pointer_in_frame (decl))
1908 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1909 if (field)
1911 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1912 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1913 bitmap_set_bit (new_suppress, DECL_UID (decl));
1914 OMP_CLAUSE_DECL (clause)
1915 = get_local_debug_decl (info, decl, field);
1916 need_frame = true;
1919 break;
1921 case OMP_CLAUSE_SCHEDULE:
1922 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1923 break;
1924 /* FALLTHRU */
1925 case OMP_CLAUSE_FINAL:
1926 case OMP_CLAUSE_IF:
1927 case OMP_CLAUSE_NUM_THREADS:
1928 case OMP_CLAUSE_DEPEND:
1929 case OMP_CLAUSE_DEVICE:
1930 case OMP_CLAUSE_NUM_TEAMS:
1931 case OMP_CLAUSE_THREAD_LIMIT:
1932 case OMP_CLAUSE_SAFELEN:
1933 case OMP_CLAUSE_SIMDLEN:
1934 case OMP_CLAUSE_PRIORITY:
1935 case OMP_CLAUSE_GRAINSIZE:
1936 case OMP_CLAUSE_NUM_TASKS:
1937 case OMP_CLAUSE_HINT:
1938 case OMP_CLAUSE_NUM_GANGS:
1939 case OMP_CLAUSE_NUM_WORKERS:
1940 case OMP_CLAUSE_VECTOR_LENGTH:
1941 case OMP_CLAUSE_GANG:
1942 case OMP_CLAUSE_WORKER:
1943 case OMP_CLAUSE_VECTOR:
1944 case OMP_CLAUSE_ASYNC:
1945 case OMP_CLAUSE_WAIT:
1946 /* Several OpenACC clauses have optional arguments. Check if they
1947 are present. */
1948 if (OMP_CLAUSE_OPERAND (clause, 0))
1950 wi->val_only = true;
1951 wi->is_lhs = false;
1952 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1953 &dummy, wi);
1956 /* The gang clause accepts two arguments. */
1957 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1958 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1960 wi->val_only = true;
1961 wi->is_lhs = false;
1962 convert_nonlocal_reference_op
1963 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1965 break;
1967 case OMP_CLAUSE_DIST_SCHEDULE:
1968 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1970 wi->val_only = true;
1971 wi->is_lhs = false;
1972 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1973 &dummy, wi);
1975 break;
1977 case OMP_CLAUSE_MAP:
1978 case OMP_CLAUSE_TO:
1979 case OMP_CLAUSE_FROM:
1980 if (OMP_CLAUSE_SIZE (clause))
1982 wi->val_only = true;
1983 wi->is_lhs = false;
1984 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1985 &dummy, wi);
1987 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1988 goto do_decl_clause;
1989 wi->val_only = true;
1990 wi->is_lhs = false;
1991 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1992 wi, NULL);
1993 break;
1995 case OMP_CLAUSE_ALIGNED:
1996 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1998 wi->val_only = true;
1999 wi->is_lhs = false;
2000 convert_local_reference_op
2001 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
2003 /* Like do_decl_clause, but don't add any suppression. */
2004 decl = OMP_CLAUSE_DECL (clause);
2005 if (VAR_P (decl)
2006 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2007 break;
2008 if (decl_function_context (decl) == info->context
2009 && !use_pointer_in_frame (decl))
2011 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2012 if (field)
2014 OMP_CLAUSE_DECL (clause)
2015 = get_local_debug_decl (info, decl, field);
2016 need_frame = true;
2019 break;
2021 case OMP_CLAUSE_NOWAIT:
2022 case OMP_CLAUSE_ORDERED:
2023 case OMP_CLAUSE_DEFAULT:
2024 case OMP_CLAUSE_COPYIN:
2025 case OMP_CLAUSE_COLLAPSE:
2026 case OMP_CLAUSE_TILE:
2027 case OMP_CLAUSE_UNTIED:
2028 case OMP_CLAUSE_MERGEABLE:
2029 case OMP_CLAUSE_PROC_BIND:
2030 case OMP_CLAUSE_NOGROUP:
2031 case OMP_CLAUSE_THREADS:
2032 case OMP_CLAUSE_SIMD:
2033 case OMP_CLAUSE_DEFAULTMAP:
2034 case OMP_CLAUSE_SEQ:
2035 case OMP_CLAUSE_INDEPENDENT:
2036 case OMP_CLAUSE_AUTO:
2037 case OMP_CLAUSE_IF_PRESENT:
2038 case OMP_CLAUSE_FINALIZE:
2039 break;
2041 /* The following clause belongs to the OpenACC cache directive, which
2042 is discarded during gimplification. */
2043 case OMP_CLAUSE__CACHE_:
2044 /* The following clauses are only allowed in the OpenMP declare simd
2045 directive, so not seen here. */
2046 case OMP_CLAUSE_UNIFORM:
2047 case OMP_CLAUSE_INBRANCH:
2048 case OMP_CLAUSE_NOTINBRANCH:
2049 /* The following clauses are only allowed on OpenMP cancel and
2050 cancellation point directives, which at this point have already
2051 been lowered into a function call. */
2052 case OMP_CLAUSE_FOR:
2053 case OMP_CLAUSE_PARALLEL:
2054 case OMP_CLAUSE_SECTIONS:
2055 case OMP_CLAUSE_TASKGROUP:
2056 /* The following clauses are only added during OMP lowering; nested
2057 function decomposition happens before that. */
2058 case OMP_CLAUSE__LOOPTEMP_:
2059 case OMP_CLAUSE__SIMDUID_:
2060 case OMP_CLAUSE__GRIDDIM_:
2061 /* Anything else. */
2062 default:
2063 gcc_unreachable ();
2067 info->suppress_expansion = new_suppress;
2069 if (need_stmts)
2070 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2071 switch (OMP_CLAUSE_CODE (clause))
2073 case OMP_CLAUSE_REDUCTION:
2074 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2076 tree old_context
2077 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
2078 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2079 = info->context;
2080 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2081 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2082 = info->context;
2083 walk_body (convert_local_reference_stmt,
2084 convert_local_reference_op, info,
2085 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
2086 walk_body (convert_local_reference_stmt,
2087 convert_local_reference_op, info,
2088 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
2089 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2090 = old_context;
2091 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2092 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2093 = old_context;
2095 break;
2097 case OMP_CLAUSE_LASTPRIVATE:
2098 walk_body (convert_local_reference_stmt,
2099 convert_local_reference_op, info,
2100 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
2101 break;
2103 case OMP_CLAUSE_LINEAR:
2104 walk_body (convert_local_reference_stmt,
2105 convert_local_reference_op, info,
2106 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
2107 break;
2109 default:
2110 break;
2113 return need_frame;
2117 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2118 and PARM_DECLs that were referenced by inner nested functions.
2119 The rewrite will be a structure reference to the local frame variable. */
2121 static tree
2122 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2123 struct walk_stmt_info *wi)
2125 struct nesting_info *info = (struct nesting_info *) wi->info;
2126 tree save_local_var_chain;
2127 bitmap save_suppress;
2128 char save_static_chain_added;
2129 bool frame_decl_added;
2130 gimple *stmt = gsi_stmt (*gsi);
2132 switch (gimple_code (stmt))
2134 case GIMPLE_OMP_PARALLEL:
2135 case GIMPLE_OMP_TASK:
2136 save_suppress = info->suppress_expansion;
2137 frame_decl_added = false;
2138 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
2139 wi))
2141 tree c = build_omp_clause (gimple_location (stmt),
2142 OMP_CLAUSE_SHARED);
2143 (void) get_frame_type (info);
2144 OMP_CLAUSE_DECL (c) = info->frame_decl;
2145 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2146 gimple_omp_taskreg_set_clauses (stmt, c);
2147 info->static_chain_added |= 4;
2148 frame_decl_added = true;
2151 save_local_var_chain = info->new_local_var_chain;
2152 save_static_chain_added = info->static_chain_added;
2153 info->new_local_var_chain = NULL;
2154 info->static_chain_added = 0;
2156 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2157 gimple_omp_body_ptr (stmt));
2159 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2161 tree c = build_omp_clause (gimple_location (stmt),
2162 OMP_CLAUSE_SHARED);
2163 (void) get_frame_type (info);
2164 OMP_CLAUSE_DECL (c) = info->frame_decl;
2165 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2166 info->static_chain_added |= 4;
2167 gimple_omp_taskreg_set_clauses (stmt, c);
2169 if (info->new_local_var_chain)
2170 declare_vars (info->new_local_var_chain,
2171 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2172 info->new_local_var_chain = save_local_var_chain;
2173 info->suppress_expansion = save_suppress;
2174 info->static_chain_added |= save_static_chain_added;
2175 break;
2177 case GIMPLE_OMP_FOR:
2178 save_suppress = info->suppress_expansion;
2179 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2180 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2181 convert_local_reference_stmt,
2182 convert_local_reference_op, info);
2183 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2184 info, gimple_omp_body_ptr (stmt));
2185 info->suppress_expansion = save_suppress;
2186 break;
2188 case GIMPLE_OMP_SECTIONS:
2189 save_suppress = info->suppress_expansion;
2190 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2191 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2192 info, gimple_omp_body_ptr (stmt));
2193 info->suppress_expansion = save_suppress;
2194 break;
2196 case GIMPLE_OMP_SINGLE:
2197 save_suppress = info->suppress_expansion;
2198 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2199 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2200 info, gimple_omp_body_ptr (stmt));
2201 info->suppress_expansion = save_suppress;
2202 break;
2204 case GIMPLE_OMP_TARGET:
2205 if (!is_gimple_omp_offloaded (stmt))
2207 save_suppress = info->suppress_expansion;
2208 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2209 info->suppress_expansion = save_suppress;
2210 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2211 info, gimple_omp_body_ptr (stmt));
2212 break;
2214 save_suppress = info->suppress_expansion;
2215 frame_decl_added = false;
2216 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2218 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2219 (void) get_frame_type (info);
2220 OMP_CLAUSE_DECL (c) = info->frame_decl;
2221 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2222 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2223 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2224 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2225 info->static_chain_added |= 4;
2226 frame_decl_added = true;
2229 save_local_var_chain = info->new_local_var_chain;
2230 save_static_chain_added = info->static_chain_added;
2231 info->new_local_var_chain = NULL;
2232 info->static_chain_added = 0;
2234 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2235 gimple_omp_body_ptr (stmt));
2237 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2239 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2240 (void) get_frame_type (info);
2241 OMP_CLAUSE_DECL (c) = info->frame_decl;
2242 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2243 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2244 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2245 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2246 info->static_chain_added |= 4;
2249 if (info->new_local_var_chain)
2250 declare_vars (info->new_local_var_chain,
2251 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2252 info->new_local_var_chain = save_local_var_chain;
2253 info->suppress_expansion = save_suppress;
2254 info->static_chain_added |= save_static_chain_added;
2255 break;
2257 case GIMPLE_OMP_TEAMS:
2258 save_suppress = info->suppress_expansion;
2259 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2260 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2261 info, gimple_omp_body_ptr (stmt));
2262 info->suppress_expansion = save_suppress;
2263 break;
2265 case GIMPLE_OMP_SECTION:
2266 case GIMPLE_OMP_MASTER:
2267 case GIMPLE_OMP_TASKGROUP:
2268 case GIMPLE_OMP_ORDERED:
2269 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2270 info, gimple_omp_body_ptr (stmt));
2271 break;
2273 case GIMPLE_COND:
2274 wi->val_only = true;
2275 wi->is_lhs = false;
2276 *handled_ops_p = false;
2277 return NULL_TREE;
2279 case GIMPLE_ASSIGN:
2280 if (gimple_clobber_p (stmt))
2282 tree lhs = gimple_assign_lhs (stmt);
2283 if (!use_pointer_in_frame (lhs)
2284 && lookup_field_for_decl (info, lhs, NO_INSERT))
2286 gsi_replace (gsi, gimple_build_nop (), true);
2287 break;
2290 *handled_ops_p = false;
2291 return NULL_TREE;
2293 case GIMPLE_BIND:
2294 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2295 var;
2296 var = DECL_CHAIN (var))
2297 if (TREE_CODE (var) == NAMELIST_DECL)
2299 /* Adjust decls mentioned in NAMELIST_DECL. */
2300 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2301 tree decl;
2302 unsigned int i;
2304 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2306 if (VAR_P (decl)
2307 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2308 continue;
2309 if (decl_function_context (decl) == info->context
2310 && !use_pointer_in_frame (decl))
2312 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2313 if (field)
2315 CONSTRUCTOR_ELT (decls, i)->value
2316 = get_local_debug_decl (info, decl, field);
2322 *handled_ops_p = false;
2323 return NULL_TREE;
2325 default:
2326 /* For every other statement that we are not interested in
2327 handling here, let the walker traverse the operands. */
2328 *handled_ops_p = false;
2329 return NULL_TREE;
2332 /* Indicate that we have handled all the operands ourselves. */
2333 *handled_ops_p = true;
2334 return NULL_TREE;
2338 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2339 that reference labels from outer functions. The rewrite will be a
2340 call to __builtin_nonlocal_goto. */
2342 static tree
2343 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2344 struct walk_stmt_info *wi)
2346 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2347 tree label, new_label, target_context, x, field;
2348 gcall *call;
2349 gimple *stmt = gsi_stmt (*gsi);
2351 if (gimple_code (stmt) != GIMPLE_GOTO)
2353 *handled_ops_p = false;
2354 return NULL_TREE;
2357 label = gimple_goto_dest (stmt);
2358 if (TREE_CODE (label) != LABEL_DECL)
2360 *handled_ops_p = false;
2361 return NULL_TREE;
2364 target_context = decl_function_context (label);
2365 if (target_context == info->context)
2367 *handled_ops_p = false;
2368 return NULL_TREE;
2371 for (i = info->outer; target_context != i->context; i = i->outer)
2372 continue;
2374 /* The original user label may also be use for a normal goto, therefore
2375 we must create a new label that will actually receive the abnormal
2376 control transfer. This new label will be marked LABEL_NONLOCAL; this
2377 mark will trigger proper behavior in the cfg, as well as cause the
2378 (hairy target-specific) non-local goto receiver code to be generated
2379 when we expand rtl. Enter this association into var_map so that we
2380 can insert the new label into the IL during a second pass. */
2381 tree *slot = &i->var_map->get_or_insert (label);
2382 if (*slot == NULL)
2384 new_label = create_artificial_label (UNKNOWN_LOCATION);
2385 DECL_NONLOCAL (new_label) = 1;
2386 *slot = new_label;
2388 else
2389 new_label = *slot;
2391 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2392 field = get_nl_goto_field (i);
2393 x = get_frame_field (info, target_context, field, gsi);
2394 x = build_addr (x);
2395 x = gsi_gimplify_val (info, x, gsi);
2396 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2397 2, build_addr (new_label), x);
2398 gsi_replace (gsi, call, false);
2400 /* We have handled all of STMT's operands, no need to keep going. */
2401 *handled_ops_p = true;
2402 return NULL_TREE;
2406 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2407 are referenced via nonlocal goto from a nested function. The rewrite
2408 will involve installing a newly generated DECL_NONLOCAL label, and
2409 (potentially) a branch around the rtl gunk that is assumed to be
2410 attached to such a label. */
2412 static tree
2413 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2414 struct walk_stmt_info *wi)
2416 struct nesting_info *const info = (struct nesting_info *) wi->info;
2417 tree label, new_label;
2418 gimple_stmt_iterator tmp_gsi;
2419 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2421 if (!stmt)
2423 *handled_ops_p = false;
2424 return NULL_TREE;
2427 label = gimple_label_label (stmt);
2429 tree *slot = info->var_map->get (label);
2430 if (!slot)
2432 *handled_ops_p = false;
2433 return NULL_TREE;
2436 /* If there's any possibility that the previous statement falls through,
2437 then we must branch around the new non-local label. */
2438 tmp_gsi = wi->gsi;
2439 gsi_prev (&tmp_gsi);
2440 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2442 gimple *stmt = gimple_build_goto (label);
2443 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2446 new_label = (tree) *slot;
2447 stmt = gimple_build_label (new_label);
2448 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2450 *handled_ops_p = true;
2451 return NULL_TREE;
2455 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2456 of nested functions that require the use of trampolines. The rewrite
2457 will involve a reference a trampoline generated for the occasion. */
2459 static tree
2460 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2462 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2463 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2464 tree t = *tp, decl, target_context, x, builtin;
2465 bool descr;
2466 gcall *call;
2468 *walk_subtrees = 0;
2469 switch (TREE_CODE (t))
2471 case ADDR_EXPR:
2472 /* Build
2473 T.1 = &CHAIN->tramp;
2474 T.2 = __builtin_adjust_trampoline (T.1);
2475 T.3 = (func_type)T.2;
2478 decl = TREE_OPERAND (t, 0);
2479 if (TREE_CODE (decl) != FUNCTION_DECL)
2480 break;
2482 /* Only need to process nested functions. */
2483 target_context = decl_function_context (decl);
2484 if (!target_context)
2485 break;
2487 /* If the nested function doesn't use a static chain, then
2488 it doesn't need a trampoline. */
2489 if (!DECL_STATIC_CHAIN (decl))
2490 break;
2492 /* If we don't want a trampoline, then don't build one. */
2493 if (TREE_NO_TRAMPOLINE (t))
2494 break;
2496 /* Lookup the immediate parent of the callee, as that's where
2497 we need to insert the trampoline. */
2498 for (i = info; i->context != target_context; i = i->outer)
2499 continue;
2501 /* Decide whether to generate a descriptor or a trampoline. */
2502 descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines;
2504 if (descr)
2505 x = lookup_descr_for_decl (i, decl, INSERT);
2506 else
2507 x = lookup_tramp_for_decl (i, decl, INSERT);
2509 /* Compute the address of the field holding the trampoline. */
2510 x = get_frame_field (info, target_context, x, &wi->gsi);
2511 x = build_addr (x);
2512 x = gsi_gimplify_val (info, x, &wi->gsi);
2514 /* Do machine-specific ugliness. Normally this will involve
2515 computing extra alignment, but it can really be anything. */
2516 if (descr)
2517 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR);
2518 else
2519 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2520 call = gimple_build_call (builtin, 1, x);
2521 x = init_tmp_var_with_call (info, &wi->gsi, call);
2523 /* Cast back to the proper function type. */
2524 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2525 x = init_tmp_var (info, x, &wi->gsi);
2527 *tp = x;
2528 break;
2530 default:
2531 if (!IS_TYPE_OR_DECL_P (t))
2532 *walk_subtrees = 1;
2533 break;
2536 return NULL_TREE;
2540 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2541 to addresses of nested functions that require the use of
2542 trampolines. The rewrite will involve a reference a trampoline
2543 generated for the occasion. */
2545 static tree
2546 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2547 struct walk_stmt_info *wi)
2549 struct nesting_info *info = (struct nesting_info *) wi->info;
2550 gimple *stmt = gsi_stmt (*gsi);
2552 switch (gimple_code (stmt))
2554 case GIMPLE_CALL:
2556 /* Only walk call arguments, lest we generate trampolines for
2557 direct calls. */
2558 unsigned long i, nargs = gimple_call_num_args (stmt);
2559 for (i = 0; i < nargs; i++)
2560 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2561 wi, NULL);
2562 break;
2565 case GIMPLE_OMP_TARGET:
2566 if (!is_gimple_omp_offloaded (stmt))
2568 *handled_ops_p = false;
2569 return NULL_TREE;
2571 /* FALLTHRU */
2572 case GIMPLE_OMP_PARALLEL:
2573 case GIMPLE_OMP_TASK:
2575 tree save_local_var_chain = info->new_local_var_chain;
2576 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2577 info->new_local_var_chain = NULL;
2578 char save_static_chain_added = info->static_chain_added;
2579 info->static_chain_added = 0;
2580 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2581 info, gimple_omp_body_ptr (stmt));
2582 if (info->new_local_var_chain)
2583 declare_vars (info->new_local_var_chain,
2584 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2585 false);
2586 for (int i = 0; i < 2; i++)
2588 tree c, decl;
2589 if ((info->static_chain_added & (1 << i)) == 0)
2590 continue;
2591 decl = i ? get_chain_decl (info) : info->frame_decl;
2592 /* Don't add CHAIN.* or FRAME.* twice. */
2593 for (c = gimple_omp_taskreg_clauses (stmt);
2595 c = OMP_CLAUSE_CHAIN (c))
2596 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2597 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2598 && OMP_CLAUSE_DECL (c) == decl)
2599 break;
2600 if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2602 c = build_omp_clause (gimple_location (stmt),
2603 i ? OMP_CLAUSE_FIRSTPRIVATE
2604 : OMP_CLAUSE_SHARED);
2605 OMP_CLAUSE_DECL (c) = decl;
2606 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2607 gimple_omp_taskreg_set_clauses (stmt, c);
2609 else if (c == NULL)
2611 c = build_omp_clause (gimple_location (stmt),
2612 OMP_CLAUSE_MAP);
2613 OMP_CLAUSE_DECL (c) = decl;
2614 OMP_CLAUSE_SET_MAP_KIND (c,
2615 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2616 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2617 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2618 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2622 info->new_local_var_chain = save_local_var_chain;
2623 info->static_chain_added |= save_static_chain_added;
2625 break;
2627 default:
2628 *handled_ops_p = false;
2629 return NULL_TREE;
2632 *handled_ops_p = true;
2633 return NULL_TREE;
2638 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2639 that reference nested functions to make sure that the static chain
2640 is set up properly for the call. */
2642 static tree
2643 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2644 struct walk_stmt_info *wi)
2646 struct nesting_info *const info = (struct nesting_info *) wi->info;
2647 tree decl, target_context;
2648 char save_static_chain_added;
2649 int i;
2650 gimple *stmt = gsi_stmt (*gsi);
2652 switch (gimple_code (stmt))
2654 case GIMPLE_CALL:
2655 if (gimple_call_chain (stmt))
2656 break;
2657 decl = gimple_call_fndecl (stmt);
2658 if (!decl)
2659 break;
2660 target_context = decl_function_context (decl);
2661 if (target_context && DECL_STATIC_CHAIN (decl))
2663 struct nesting_info *i = info;
2664 while (i && i->context != target_context)
2665 i = i->outer;
2666 /* If none of the outer contexts is the target context, this means
2667 that the function is called in a wrong context. */
2668 if (!i)
2669 internal_error ("%s from %s called in %s",
2670 IDENTIFIER_POINTER (DECL_NAME (decl)),
2671 IDENTIFIER_POINTER (DECL_NAME (target_context)),
2672 IDENTIFIER_POINTER (DECL_NAME (info->context)));
2674 gimple_call_set_chain (as_a <gcall *> (stmt),
2675 get_static_chain (info, target_context,
2676 &wi->gsi));
2677 info->static_chain_added |= (1 << (info->context != target_context));
2679 break;
2681 case GIMPLE_OMP_PARALLEL:
2682 case GIMPLE_OMP_TASK:
2683 save_static_chain_added = info->static_chain_added;
2684 info->static_chain_added = 0;
2685 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2686 for (i = 0; i < 2; i++)
2688 tree c, decl;
2689 if ((info->static_chain_added & (1 << i)) == 0)
2690 continue;
2691 decl = i ? get_chain_decl (info) : info->frame_decl;
2692 /* Don't add CHAIN.* or FRAME.* twice. */
2693 for (c = gimple_omp_taskreg_clauses (stmt);
2695 c = OMP_CLAUSE_CHAIN (c))
2696 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2697 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2698 && OMP_CLAUSE_DECL (c) == decl)
2699 break;
2700 if (c == NULL)
2702 c = build_omp_clause (gimple_location (stmt),
2703 i ? OMP_CLAUSE_FIRSTPRIVATE
2704 : OMP_CLAUSE_SHARED);
2705 OMP_CLAUSE_DECL (c) = decl;
2706 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2707 gimple_omp_taskreg_set_clauses (stmt, c);
2710 info->static_chain_added |= save_static_chain_added;
2711 break;
2713 case GIMPLE_OMP_TARGET:
2714 if (!is_gimple_omp_offloaded (stmt))
2716 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2717 break;
2719 save_static_chain_added = info->static_chain_added;
2720 info->static_chain_added = 0;
2721 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2722 for (i = 0; i < 2; i++)
2724 tree c, decl;
2725 if ((info->static_chain_added & (1 << i)) == 0)
2726 continue;
2727 decl = i ? get_chain_decl (info) : info->frame_decl;
2728 /* Don't add CHAIN.* or FRAME.* twice. */
2729 for (c = gimple_omp_target_clauses (stmt);
2731 c = OMP_CLAUSE_CHAIN (c))
2732 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2733 && OMP_CLAUSE_DECL (c) == decl)
2734 break;
2735 if (c == NULL)
2737 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2738 OMP_CLAUSE_DECL (c) = decl;
2739 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2740 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2741 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2742 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2746 info->static_chain_added |= save_static_chain_added;
2747 break;
2749 case GIMPLE_OMP_FOR:
2750 walk_body (convert_gimple_call, NULL, info,
2751 gimple_omp_for_pre_body_ptr (stmt));
2752 /* FALLTHRU */
2753 case GIMPLE_OMP_SECTIONS:
2754 case GIMPLE_OMP_SECTION:
2755 case GIMPLE_OMP_SINGLE:
2756 case GIMPLE_OMP_TEAMS:
2757 case GIMPLE_OMP_MASTER:
2758 case GIMPLE_OMP_TASKGROUP:
2759 case GIMPLE_OMP_ORDERED:
2760 case GIMPLE_OMP_CRITICAL:
2761 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2762 break;
2764 default:
2765 /* Keep looking for other operands. */
2766 *handled_ops_p = false;
2767 return NULL_TREE;
2770 *handled_ops_p = true;
2771 return NULL_TREE;
2774 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2775 call expressions. At the same time, determine if a nested function
2776 actually uses its static chain; if not, remember that. */
2778 static void
2779 convert_all_function_calls (struct nesting_info *root)
2781 unsigned int chain_count = 0, old_chain_count, iter_count;
2782 struct nesting_info *n;
2784 /* First, optimistically clear static_chain for all decls that haven't
2785 used the static chain already for variable access. But always create
2786 it if not optimizing. This makes it possible to reconstruct the static
2787 nesting tree at run time and thus to resolve up-level references from
2788 within the debugger. */
2789 FOR_EACH_NEST_INFO (n, root)
2791 if (n->thunk_p)
2792 continue;
2793 tree decl = n->context;
2794 if (!optimize)
2796 if (n->inner)
2797 (void) get_frame_type (n);
2798 if (n->outer)
2799 (void) get_chain_decl (n);
2801 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2803 DECL_STATIC_CHAIN (decl) = 0;
2804 if (dump_file && (dump_flags & TDF_DETAILS))
2805 fprintf (dump_file, "Guessing no static-chain for %s\n",
2806 lang_hooks.decl_printable_name (decl, 2));
2808 else
2809 DECL_STATIC_CHAIN (decl) = 1;
2810 chain_count += DECL_STATIC_CHAIN (decl);
2813 FOR_EACH_NEST_INFO (n, root)
2814 if (n->thunk_p)
2816 tree decl = n->context;
2817 tree alias = cgraph_node::get (decl)->thunk.alias;
2818 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
2821 /* Walk the functions and perform transformations. Note that these
2822 transformations can induce new uses of the static chain, which in turn
2823 require re-examining all users of the decl. */
2824 /* ??? It would make sense to try to use the call graph to speed this up,
2825 but the call graph hasn't really been built yet. Even if it did, we
2826 would still need to iterate in this loop since address-of references
2827 wouldn't show up in the callgraph anyway. */
2828 iter_count = 0;
2831 old_chain_count = chain_count;
2832 chain_count = 0;
2833 iter_count++;
2835 if (dump_file && (dump_flags & TDF_DETAILS))
2836 fputc ('\n', dump_file);
2838 FOR_EACH_NEST_INFO (n, root)
2840 if (n->thunk_p)
2841 continue;
2842 tree decl = n->context;
2843 walk_function (convert_tramp_reference_stmt,
2844 convert_tramp_reference_op, n);
2845 walk_function (convert_gimple_call, NULL, n);
2846 chain_count += DECL_STATIC_CHAIN (decl);
2849 FOR_EACH_NEST_INFO (n, root)
2850 if (n->thunk_p)
2852 tree decl = n->context;
2853 tree alias = cgraph_node::get (decl)->thunk.alias;
2854 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
2857 while (chain_count != old_chain_count);
2859 if (dump_file && (dump_flags & TDF_DETAILS))
2860 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2861 iter_count);
2864 struct nesting_copy_body_data
2866 copy_body_data cb;
2867 struct nesting_info *root;
2870 /* A helper subroutine for debug_var_chain type remapping. */
2872 static tree
2873 nesting_copy_decl (tree decl, copy_body_data *id)
2875 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2876 tree *slot = nid->root->var_map->get (decl);
2878 if (slot)
2879 return (tree) *slot;
2881 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2883 tree new_decl = copy_decl_no_change (decl, id);
2884 DECL_ORIGINAL_TYPE (new_decl)
2885 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2886 return new_decl;
2889 if (VAR_P (decl)
2890 || TREE_CODE (decl) == PARM_DECL
2891 || TREE_CODE (decl) == RESULT_DECL)
2892 return decl;
2894 return copy_decl_no_change (decl, id);
2897 /* A helper function for remap_vla_decls. See if *TP contains
2898 some remapped variables. */
2900 static tree
2901 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2903 struct nesting_info *root = (struct nesting_info *) data;
2904 tree t = *tp;
2906 if (DECL_P (t))
2908 *walk_subtrees = 0;
2909 tree *slot = root->var_map->get (t);
2911 if (slot)
2912 return *slot;
2914 return NULL;
2917 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2918 involved. */
2920 static void
2921 remap_vla_decls (tree block, struct nesting_info *root)
2923 tree var, subblock, val, type;
2924 struct nesting_copy_body_data id;
2926 for (subblock = BLOCK_SUBBLOCKS (block);
2927 subblock;
2928 subblock = BLOCK_CHAIN (subblock))
2929 remap_vla_decls (subblock, root);
2931 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2932 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
2934 val = DECL_VALUE_EXPR (var);
2935 type = TREE_TYPE (var);
2937 if (!(TREE_CODE (val) == INDIRECT_REF
2938 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2939 && variably_modified_type_p (type, NULL)))
2940 continue;
2942 if (root->var_map->get (TREE_OPERAND (val, 0))
2943 || walk_tree (&type, contains_remapped_vars, root, NULL))
2944 break;
2947 if (var == NULL_TREE)
2948 return;
2950 memset (&id, 0, sizeof (id));
2951 id.cb.copy_decl = nesting_copy_decl;
2952 id.cb.decl_map = new hash_map<tree, tree>;
2953 id.root = root;
2955 for (; var; var = DECL_CHAIN (var))
2956 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
2958 struct nesting_info *i;
2959 tree newt, context;
2961 val = DECL_VALUE_EXPR (var);
2962 type = TREE_TYPE (var);
2964 if (!(TREE_CODE (val) == INDIRECT_REF
2965 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2966 && variably_modified_type_p (type, NULL)))
2967 continue;
2969 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2970 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2971 continue;
2973 context = decl_function_context (var);
2974 for (i = root; i; i = i->outer)
2975 if (i->context == context)
2976 break;
2978 if (i == NULL)
2979 continue;
2981 /* Fully expand value expressions. This avoids having debug variables
2982 only referenced from them and that can be swept during GC. */
2983 if (slot)
2985 tree t = (tree) *slot;
2986 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2987 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2990 id.cb.src_fn = i->context;
2991 id.cb.dst_fn = i->context;
2992 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2994 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2995 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2997 newt = TREE_TYPE (newt);
2998 type = TREE_TYPE (type);
3000 if (TYPE_NAME (newt)
3001 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3002 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3003 && newt != type
3004 && TYPE_NAME (newt) == TYPE_NAME (type))
3005 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3007 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
3008 if (val != DECL_VALUE_EXPR (var))
3009 SET_DECL_VALUE_EXPR (var, val);
3012 delete id.cb.decl_map;
3015 /* Fixup VLA decls in BLOCK and subblocks if remapped variables are
3016 involved. */
3018 static void
3019 fixup_vla_decls (tree block)
3021 for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3022 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3024 tree val = DECL_VALUE_EXPR (var);
3026 if (!(TREE_CODE (val) == INDIRECT_REF
3027 && VAR_P (TREE_OPERAND (val, 0))
3028 && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val, 0))))
3029 continue;
3031 /* Fully expand value expressions. This avoids having debug variables
3032 only referenced from them and that can be swept during GC. */
3033 val = build1 (INDIRECT_REF, TREE_TYPE (val),
3034 DECL_VALUE_EXPR (TREE_OPERAND (val, 0)));
3035 SET_DECL_VALUE_EXPR (var, val);
3038 for (tree sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3039 fixup_vla_decls (sub);
3042 /* Fold the MEM_REF *E. */
3043 bool
3044 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
3046 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
3047 *ref_p = fold (*ref_p);
3048 return true;
3051 /* Given DECL, a nested function, build an initialization call for FIELD,
3052 the trampoline or descriptor for DECL, using FUNC as the function. */
3054 static gcall *
3055 build_init_call_stmt (struct nesting_info *info, tree decl, tree field,
3056 tree func)
3058 tree arg1, arg2, arg3, x;
3060 gcc_assert (DECL_STATIC_CHAIN (decl));
3061 arg3 = build_addr (info->frame_decl);
3063 arg2 = build_addr (decl);
3065 x = build3 (COMPONENT_REF, TREE_TYPE (field),
3066 info->frame_decl, field, NULL_TREE);
3067 arg1 = build_addr (x);
3069 return gimple_build_call (func, 3, arg1, arg2, arg3);
3072 /* Do "everything else" to clean up or complete state collected by the various
3073 walking passes -- create a field to hold the frame base address, lay out the
3074 types and decls, generate code to initialize the frame decl, store critical
3075 expressions in the struct function for rtl to find. */
3077 static void
3078 finalize_nesting_tree_1 (struct nesting_info *root)
3080 gimple_seq stmt_list = NULL;
3081 gimple *stmt;
3082 tree context = root->context;
3083 struct function *sf;
3085 if (root->thunk_p)
3086 return;
3088 /* If we created a non-local frame type or decl, we need to lay them
3089 out at this time. */
3090 if (root->frame_type)
3092 /* Debugging information needs to compute the frame base address of the
3093 parent frame out of the static chain from the nested frame.
3095 The static chain is the address of the FRAME record, so one could
3096 imagine it would be possible to compute the frame base address just
3097 adding a constant offset to this address. Unfortunately, this is not
3098 possible: if the FRAME object has alignment constraints that are
3099 stronger than the stack, then the offset between the frame base and
3100 the FRAME object will be dynamic.
3102 What we do instead is to append a field to the FRAME object that holds
3103 the frame base address: then debug info just has to fetch this
3104 field. */
3106 /* Debugging information will refer to the CFA as the frame base
3107 address: we will do the same here. */
3108 const tree frame_addr_fndecl
3109 = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
3111 /* Create a field in the FRAME record to hold the frame base address for
3112 this stack frame. Since it will be used only by the debugger, put it
3113 at the end of the record in order not to shift all other offsets. */
3114 tree fb_decl = make_node (FIELD_DECL);
3116 DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
3117 TREE_TYPE (fb_decl) = ptr_type_node;
3118 TREE_ADDRESSABLE (fb_decl) = 1;
3119 DECL_CONTEXT (fb_decl) = root->frame_type;
3120 TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
3121 fb_decl);
3123 /* In some cases the frame type will trigger the -Wpadded warning.
3124 This is not helpful; suppress it. */
3125 int save_warn_padded = warn_padded;
3126 warn_padded = 0;
3127 layout_type (root->frame_type);
3128 warn_padded = save_warn_padded;
3129 layout_decl (root->frame_decl, 0);
3131 /* Initialize the frame base address field. If the builtin we need is
3132 not available, set it to NULL so that debugging information does not
3133 reference junk. */
3134 tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
3135 root->frame_decl, fb_decl, NULL_TREE);
3136 tree fb_tmp;
3138 if (frame_addr_fndecl != NULL_TREE)
3140 gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
3141 integer_zero_node);
3142 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3144 fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
3146 else
3147 fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
3148 gimple_seq_add_stmt (&stmt_list,
3149 gimple_build_assign (fb_ref, fb_tmp));
3151 declare_vars (root->frame_decl,
3152 gimple_seq_first_stmt (gimple_body (context)), true);
3155 /* If any parameters were referenced non-locally, then we need to insert
3156 a copy or a pointer. */
3157 if (root->any_parm_remapped)
3159 tree p;
3160 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
3162 tree field, x, y;
3164 field = lookup_field_for_decl (root, p, NO_INSERT);
3165 if (!field)
3166 continue;
3168 if (use_pointer_in_frame (p))
3169 x = build_addr (p);
3170 else
3171 x = p;
3173 /* If the assignment is from a non-register the stmt is
3174 not valid gimple. Make it so by using a temporary instead. */
3175 if (!is_gimple_reg (x)
3176 && is_gimple_reg_type (TREE_TYPE (x)))
3178 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3179 x = init_tmp_var (root, x, &gsi);
3182 y = build3 (COMPONENT_REF, TREE_TYPE (field),
3183 root->frame_decl, field, NULL_TREE);
3184 stmt = gimple_build_assign (y, x);
3185 gimple_seq_add_stmt (&stmt_list, stmt);
3189 /* If a chain_field was created, then it needs to be initialized
3190 from chain_decl. */
3191 if (root->chain_field)
3193 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
3194 root->frame_decl, root->chain_field, NULL_TREE);
3195 stmt = gimple_build_assign (x, get_chain_decl (root));
3196 gimple_seq_add_stmt (&stmt_list, stmt);
3199 /* If trampolines were created, then we need to initialize them. */
3200 if (root->any_tramp_created)
3202 struct nesting_info *i;
3203 for (i = root->inner; i ; i = i->next)
3205 tree field, x;
3207 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
3208 if (!field)
3209 continue;
3211 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
3212 stmt = build_init_call_stmt (root, i->context, field, x);
3213 gimple_seq_add_stmt (&stmt_list, stmt);
3217 /* If descriptors were created, then we need to initialize them. */
3218 if (root->any_descr_created)
3220 struct nesting_info *i;
3221 for (i = root->inner; i ; i = i->next)
3223 tree field, x;
3225 field = lookup_descr_for_decl (root, i->context, NO_INSERT);
3226 if (!field)
3227 continue;
3229 x = builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR);
3230 stmt = build_init_call_stmt (root, i->context, field, x);
3231 gimple_seq_add_stmt (&stmt_list, stmt);
3235 /* If we created initialization statements, insert them. */
3236 if (stmt_list)
3238 gbind *bind;
3239 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3240 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3241 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3242 gimple_bind_set_body (bind, stmt_list);
3245 /* If a chain_decl was created, then it needs to be registered with
3246 struct function so that it gets initialized from the static chain
3247 register at the beginning of the function. */
3248 sf = DECL_STRUCT_FUNCTION (root->context);
3249 sf->static_chain_decl = root->chain_decl;
3251 /* Similarly for the non-local goto save area. */
3252 if (root->nl_goto_field)
3254 sf->nonlocal_goto_save_area
3255 = get_frame_field (root, context, root->nl_goto_field, NULL);
3256 sf->has_nonlocal_label = 1;
3259 /* Make sure all new local variables get inserted into the
3260 proper BIND_EXPR. */
3261 if (root->new_local_var_chain)
3262 declare_vars (root->new_local_var_chain,
3263 gimple_seq_first_stmt (gimple_body (root->context)),
3264 false);
3266 if (root->debug_var_chain)
3268 tree debug_var;
3269 gbind *scope;
3271 remap_vla_decls (DECL_INITIAL (root->context), root);
3273 for (debug_var = root->debug_var_chain; debug_var;
3274 debug_var = DECL_CHAIN (debug_var))
3275 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3276 break;
3278 /* If there are any debug decls with variable length types,
3279 remap those types using other debug_var_chain variables. */
3280 if (debug_var)
3282 struct nesting_copy_body_data id;
3284 memset (&id, 0, sizeof (id));
3285 id.cb.copy_decl = nesting_copy_decl;
3286 id.cb.decl_map = new hash_map<tree, tree>;
3287 id.root = root;
3289 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3290 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3292 tree type = TREE_TYPE (debug_var);
3293 tree newt, t = type;
3294 struct nesting_info *i;
3296 for (i = root; i; i = i->outer)
3297 if (variably_modified_type_p (type, i->context))
3298 break;
3300 if (i == NULL)
3301 continue;
3303 id.cb.src_fn = i->context;
3304 id.cb.dst_fn = i->context;
3305 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3307 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3308 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3310 newt = TREE_TYPE (newt);
3311 t = TREE_TYPE (t);
3313 if (TYPE_NAME (newt)
3314 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3315 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3316 && newt != t
3317 && TYPE_NAME (newt) == TYPE_NAME (t))
3318 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3321 delete id.cb.decl_map;
3324 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3325 if (gimple_bind_block (scope))
3326 declare_vars (root->debug_var_chain, scope, true);
3327 else
3328 BLOCK_VARS (DECL_INITIAL (root->context))
3329 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3330 root->debug_var_chain);
3332 else
3333 fixup_vla_decls (DECL_INITIAL (root->context));
3335 /* Fold the rewritten MEM_REF trees. */
3336 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3338 /* Dump the translated tree function. */
3339 if (dump_file)
3341 fputs ("\n\n", dump_file);
3342 dump_function_to_file (root->context, dump_file, dump_flags);
3346 static void
3347 finalize_nesting_tree (struct nesting_info *root)
3349 struct nesting_info *n;
3350 FOR_EACH_NEST_INFO (n, root)
3351 finalize_nesting_tree_1 (n);
3354 /* Unnest the nodes and pass them to cgraph. */
3356 static void
3357 unnest_nesting_tree_1 (struct nesting_info *root)
3359 struct cgraph_node *node = cgraph_node::get (root->context);
3361 /* For nested functions update the cgraph to reflect unnesting.
3362 We also delay finalizing of these functions up to this point. */
3363 if (node->origin)
3365 node->unnest ();
3366 if (!root->thunk_p)
3367 cgraph_node::finalize_function (root->context, true);
3371 static void
3372 unnest_nesting_tree (struct nesting_info *root)
3374 struct nesting_info *n;
3375 FOR_EACH_NEST_INFO (n, root)
3376 unnest_nesting_tree_1 (n);
3379 /* Free the data structures allocated during this pass. */
3381 static void
3382 free_nesting_tree (struct nesting_info *root)
3384 struct nesting_info *node, *next;
3386 node = iter_nestinfo_start (root);
3389 next = iter_nestinfo_next (node);
3390 delete node->var_map;
3391 delete node->field_map;
3392 delete node->mem_refs;
3393 free (node);
3394 node = next;
3396 while (node);
3399 /* Gimplify a function and all its nested functions. */
3400 static void
3401 gimplify_all_functions (struct cgraph_node *root)
3403 struct cgraph_node *iter;
3404 if (!gimple_body (root->decl))
3405 gimplify_function_tree (root->decl);
3406 for (iter = root->nested; iter; iter = iter->next_nested)
3407 if (!iter->thunk.thunk_p)
3408 gimplify_all_functions (iter);
3411 /* Main entry point for this pass. Process FNDECL and all of its nested
3412 subroutines and turn them into something less tightly bound. */
3414 void
3415 lower_nested_functions (tree fndecl)
3417 struct cgraph_node *cgn;
3418 struct nesting_info *root;
3420 /* If there are no nested functions, there's nothing to do. */
3421 cgn = cgraph_node::get (fndecl);
3422 if (!cgn->nested)
3423 return;
3425 gimplify_all_functions (cgn);
3427 set_dump_file (dump_begin (TDI_nested, &dump_flags));
3428 if (dump_file)
3429 fprintf (dump_file, "\n;; Function %s\n\n",
3430 lang_hooks.decl_printable_name (fndecl, 2));
3432 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3433 root = create_nesting_tree (cgn);
3435 walk_all_functions (convert_nonlocal_reference_stmt,
3436 convert_nonlocal_reference_op,
3437 root);
3438 walk_all_functions (convert_local_reference_stmt,
3439 convert_local_reference_op,
3440 root);
3441 walk_all_functions (convert_nl_goto_reference, NULL, root);
3442 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3444 convert_all_function_calls (root);
3445 finalize_nesting_tree (root);
3446 unnest_nesting_tree (root);
3448 free_nesting_tree (root);
3449 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3451 if (dump_file)
3453 dump_end (TDI_nested, dump_file);
3454 set_dump_file (NULL);
3458 #include "gt-tree-nested.h"