* fold-const.c (c_getstr): Clamp STRING_LENGTH to STRING_SIZE.
[official-gcc.git] / gcc / tree-nested.c
blob4c8eda94f1452eb46973cc71b9b8fd7ab4aeaf5a
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "cgraph.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "dumpfile.h"
35 #include "tree-inline.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "tree-cfg.h"
40 #include "explow.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 #include "diagnostic.h"
47 /* The object of this pass is to lower the representation of a set of nested
48 functions in order to expose all of the gory details of the various
49 nonlocal references. We want to do this sooner rather than later, in
50 order to give us more freedom in emitting all of the functions in question.
52 Back in olden times, when gcc was young, we developed an insanely
53 complicated scheme whereby variables which were referenced nonlocally
54 were forced to live in the stack of the declaring function, and then
55 the nested functions magically discovered where these variables were
56 placed. In order for this scheme to function properly, it required
57 that the outer function be partially expanded, then we switch to
58 compiling the inner function, and once done with those we switch back
59 to compiling the outer function. Such delicate ordering requirements
60 makes it difficult to do whole translation unit optimizations
61 involving such functions.
63 The implementation here is much more direct. Everything that can be
64 referenced by an inner function is a member of an explicitly created
65 structure herein called the "nonlocal frame struct". The incoming
66 static chain for a nested function is a pointer to this struct in
67 the parent. In this way, we settle on known offsets from a known
68 base, and so are decoupled from the logic that places objects in the
69 function's stack frame. More importantly, we don't have to wait for
70 that to happen -- since the compilation of the inner function is no
71 longer tied to a real stack frame, the nonlocal frame struct can be
72 allocated anywhere. Which means that the outer function is now
73 inlinable.
75 Theory of operation here is very simple. Iterate over all the
76 statements in all the functions (depth first) several times,
77 allocating structures and fields on demand. In general we want to
78 examine inner functions first, so that we can avoid making changes
79 to outer functions which are unnecessary.
81 The order of the passes matters a bit, in that later passes will be
82 skipped if it is discovered that the functions don't actually interact
83 at all. That is, they're nested in the lexical sense but could have
84 been written as independent functions without change. */
87 struct nesting_info
89 struct nesting_info *outer;
90 struct nesting_info *inner;
91 struct nesting_info *next;
93 hash_map<tree, tree> *field_map;
94 hash_map<tree, tree> *var_map;
95 hash_set<tree *> *mem_refs;
96 bitmap suppress_expansion;
98 tree context;
99 tree new_local_var_chain;
100 tree debug_var_chain;
101 tree frame_type;
102 tree frame_decl;
103 tree chain_field;
104 tree chain_decl;
105 tree nl_goto_field;
107 bool any_parm_remapped;
108 bool any_tramp_created;
109 bool any_descr_created;
110 char static_chain_added;
114 /* Iterate over the nesting tree, starting with ROOT, depth first. */
116 static inline struct nesting_info *
117 iter_nestinfo_start (struct nesting_info *root)
119 while (root->inner)
120 root = root->inner;
121 return root;
124 static inline struct nesting_info *
125 iter_nestinfo_next (struct nesting_info *node)
127 if (node->next)
128 return iter_nestinfo_start (node->next);
129 return node->outer;
132 #define FOR_EACH_NEST_INFO(I, ROOT) \
133 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
135 /* Obstack used for the bitmaps in the struct above. */
136 static struct bitmap_obstack nesting_info_bitmap_obstack;
139 /* We're working in so many different function contexts simultaneously,
140 that create_tmp_var is dangerous. Prevent mishap. */
141 #define create_tmp_var cant_use_create_tmp_var_here_dummy
143 /* Like create_tmp_var, except record the variable for registration at
144 the given nesting level. */
146 static tree
147 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
149 tree tmp_var;
151 /* If the type is of variable size or a type which must be created by the
152 frontend, something is wrong. Note that we explicitly allow
153 incomplete types here, since we create them ourselves here. */
154 gcc_assert (!TREE_ADDRESSABLE (type));
155 gcc_assert (!TYPE_SIZE_UNIT (type)
156 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
158 tmp_var = create_tmp_var_raw (type, prefix);
159 DECL_CONTEXT (tmp_var) = info->context;
160 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
161 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
162 if (TREE_CODE (type) == COMPLEX_TYPE
163 || TREE_CODE (type) == VECTOR_TYPE)
164 DECL_GIMPLE_REG_P (tmp_var) = 1;
166 info->new_local_var_chain = tmp_var;
168 return tmp_var;
171 /* Take the address of EXP to be used within function CONTEXT.
172 Mark it for addressability as necessary. */
174 tree
175 build_addr (tree exp)
177 mark_addressable (exp);
178 return build_fold_addr_expr (exp);
181 /* Insert FIELD into TYPE, sorted by alignment requirements. */
183 void
184 insert_field_into_struct (tree type, tree field)
186 tree *p;
188 DECL_CONTEXT (field) = type;
190 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
191 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
192 break;
194 DECL_CHAIN (field) = *p;
195 *p = field;
197 /* Set correct alignment for frame struct type. */
198 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
199 SET_TYPE_ALIGN (type, DECL_ALIGN (field));
202 /* Build or return the RECORD_TYPE that describes the frame state that is
203 shared between INFO->CONTEXT and its nested functions. This record will
204 not be complete until finalize_nesting_tree; up until that point we'll
205 be adding fields as necessary.
207 We also build the DECL that represents this frame in the function. */
209 static tree
210 get_frame_type (struct nesting_info *info)
212 tree type = info->frame_type;
213 if (!type)
215 char *name;
217 type = make_node (RECORD_TYPE);
219 name = concat ("FRAME.",
220 IDENTIFIER_POINTER (DECL_NAME (info->context)),
221 NULL);
222 TYPE_NAME (type) = get_identifier (name);
223 free (name);
225 info->frame_type = type;
227 /* Do not put info->frame_decl on info->new_local_var_chain,
228 so that we can declare it in the lexical blocks, which
229 makes sure virtual regs that end up appearing in its RTL
230 expression get substituted in instantiate_virtual_regs. */
231 info->frame_decl = create_tmp_var_raw (type, "FRAME");
232 DECL_CONTEXT (info->frame_decl) = info->context;
233 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
234 DECL_SEEN_IN_BIND_EXPR_P (info->frame_decl) = 1;
236 /* ??? Always make it addressable for now, since it is meant to
237 be pointed to by the static chain pointer. This pessimizes
238 when it turns out that no static chains are needed because
239 the nested functions referencing non-local variables are not
240 reachable, but the true pessimization is to create the non-
241 local frame structure in the first place. */
242 TREE_ADDRESSABLE (info->frame_decl) = 1;
245 return type;
248 /* Return true if DECL should be referenced by pointer in the non-local frame
249 structure. */
251 static bool
252 use_pointer_in_frame (tree decl)
254 if (TREE_CODE (decl) == PARM_DECL)
256 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
257 sized DECLs, and inefficient to copy large aggregates. Don't bother
258 moving anything but scalar parameters. */
259 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
261 else
263 /* Variable-sized DECLs can only come from OMP clauses at this point
264 since the gimplifier has already turned the regular variables into
265 pointers. Do the same as the gimplifier. */
266 return !DECL_SIZE (decl) || TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST;
270 /* Given DECL, a non-locally accessed variable, find or create a field
271 in the non-local frame structure for the given nesting context. */
273 static tree
274 lookup_field_for_decl (struct nesting_info *info, tree decl,
275 enum insert_option insert)
277 gcc_checking_assert (decl_function_context (decl) == info->context);
279 if (insert == NO_INSERT)
281 tree *slot = info->field_map->get (decl);
282 return slot ? *slot : NULL_TREE;
285 tree *slot = &info->field_map->get_or_insert (decl);
286 if (!*slot)
288 tree type = get_frame_type (info);
289 tree field = make_node (FIELD_DECL);
290 DECL_NAME (field) = DECL_NAME (decl);
292 if (use_pointer_in_frame (decl))
294 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
295 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
296 DECL_NONADDRESSABLE_P (field) = 1;
298 else
300 TREE_TYPE (field) = TREE_TYPE (decl);
301 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
302 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
303 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
304 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
305 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
306 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
308 /* Declare the transformation and adjust the original DECL. For a
309 variable or for a parameter when not optimizing, we make it point
310 to the field in the frame directly. For a parameter, we don't do
311 it when optimizing because the variable tracking pass will already
312 do the job, */
313 if (VAR_P (decl) || !optimize)
315 tree x
316 = build3 (COMPONENT_REF, TREE_TYPE (field), info->frame_decl,
317 field, NULL_TREE);
319 /* If the next declaration is a PARM_DECL pointing to the DECL,
320 we need to adjust its VALUE_EXPR directly, since chains of
321 VALUE_EXPRs run afoul of garbage collection. This occurs
322 in Ada for Out parameters that aren't copied in. */
323 tree next = DECL_CHAIN (decl);
324 if (next
325 && TREE_CODE (next) == PARM_DECL
326 && DECL_HAS_VALUE_EXPR_P (next)
327 && DECL_VALUE_EXPR (next) == decl)
328 SET_DECL_VALUE_EXPR (next, x);
330 SET_DECL_VALUE_EXPR (decl, x);
331 DECL_HAS_VALUE_EXPR_P (decl) = 1;
335 insert_field_into_struct (type, field);
336 *slot = field;
338 if (TREE_CODE (decl) == PARM_DECL)
339 info->any_parm_remapped = true;
342 return *slot;
345 /* Build or return the variable that holds the static chain within
346 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
348 static tree
349 get_chain_decl (struct nesting_info *info)
351 tree decl = info->chain_decl;
353 if (!decl)
355 tree type;
357 type = get_frame_type (info->outer);
358 type = build_pointer_type (type);
360 /* Note that this variable is *not* entered into any BIND_EXPR;
361 the construction of this variable is handled specially in
362 expand_function_start and initialize_inlined_parameters.
363 Note also that it's represented as a parameter. This is more
364 close to the truth, since the initial value does come from
365 the caller. */
366 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
367 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
368 DECL_ARTIFICIAL (decl) = 1;
369 DECL_IGNORED_P (decl) = 1;
370 TREE_USED (decl) = 1;
371 DECL_CONTEXT (decl) = info->context;
372 DECL_ARG_TYPE (decl) = type;
374 /* Tell tree-inline.c that we never write to this variable, so
375 it can copy-prop the replacement value immediately. */
376 TREE_READONLY (decl) = 1;
378 info->chain_decl = decl;
380 if (dump_file
381 && (dump_flags & TDF_DETAILS)
382 && !DECL_STATIC_CHAIN (info->context))
383 fprintf (dump_file, "Setting static-chain for %s\n",
384 lang_hooks.decl_printable_name (info->context, 2));
386 DECL_STATIC_CHAIN (info->context) = 1;
388 return decl;
391 /* Build or return the field within the non-local frame state that holds
392 the static chain for INFO->CONTEXT. This is the way to walk back up
393 multiple nesting levels. */
395 static tree
396 get_chain_field (struct nesting_info *info)
398 tree field = info->chain_field;
400 if (!field)
402 tree type = build_pointer_type (get_frame_type (info->outer));
404 field = make_node (FIELD_DECL);
405 DECL_NAME (field) = get_identifier ("__chain");
406 TREE_TYPE (field) = type;
407 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
408 DECL_NONADDRESSABLE_P (field) = 1;
410 insert_field_into_struct (get_frame_type (info), field);
412 info->chain_field = field;
414 if (dump_file
415 && (dump_flags & TDF_DETAILS)
416 && !DECL_STATIC_CHAIN (info->context))
417 fprintf (dump_file, "Setting static-chain for %s\n",
418 lang_hooks.decl_printable_name (info->context, 2));
420 DECL_STATIC_CHAIN (info->context) = 1;
422 return field;
425 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
427 static tree
428 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
429 gcall *call)
431 tree t;
433 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
434 gimple_call_set_lhs (call, t);
435 if (! gsi_end_p (*gsi))
436 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
437 gsi_insert_before (gsi, call, GSI_SAME_STMT);
439 return t;
443 /* Copy EXP into a temporary. Allocate the temporary in the context of
444 INFO and insert the initialization statement before GSI. */
446 static tree
447 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
449 tree t;
450 gimple *stmt;
452 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
453 stmt = gimple_build_assign (t, exp);
454 if (! gsi_end_p (*gsi))
455 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
456 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
458 return t;
462 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
464 static tree
465 gsi_gimplify_val (struct nesting_info *info, tree exp,
466 gimple_stmt_iterator *gsi)
468 if (is_gimple_val (exp))
469 return exp;
470 else
471 return init_tmp_var (info, exp, gsi);
474 /* Similarly, but copy from the temporary and insert the statement
475 after the iterator. */
477 static tree
478 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
480 tree t;
481 gimple *stmt;
483 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
484 stmt = gimple_build_assign (exp, t);
485 if (! gsi_end_p (*gsi))
486 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
487 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
489 return t;
492 /* Build or return the type used to represent a nested function trampoline. */
494 static GTY(()) tree trampoline_type;
496 static tree
497 get_trampoline_type (struct nesting_info *info)
499 unsigned align, size;
500 tree t;
502 if (trampoline_type)
503 return trampoline_type;
505 align = TRAMPOLINE_ALIGNMENT;
506 size = TRAMPOLINE_SIZE;
508 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
509 then allocate extra space so that we can do dynamic alignment. */
510 if (align > STACK_BOUNDARY)
512 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
513 align = STACK_BOUNDARY;
516 t = build_index_type (size_int (size - 1));
517 t = build_array_type (char_type_node, t);
518 t = build_decl (DECL_SOURCE_LOCATION (info->context),
519 FIELD_DECL, get_identifier ("__data"), t);
520 SET_DECL_ALIGN (t, align);
521 DECL_USER_ALIGN (t) = 1;
523 trampoline_type = make_node (RECORD_TYPE);
524 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
525 TYPE_FIELDS (trampoline_type) = t;
526 layout_type (trampoline_type);
527 DECL_CONTEXT (t) = trampoline_type;
529 return trampoline_type;
532 /* Build or return the type used to represent a nested function descriptor. */
534 static GTY(()) tree descriptor_type;
536 static tree
537 get_descriptor_type (struct nesting_info *info)
539 /* The base alignment is that of a function. */
540 const unsigned align = FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY);
541 tree t;
543 if (descriptor_type)
544 return descriptor_type;
546 t = build_index_type (integer_one_node);
547 t = build_array_type (ptr_type_node, t);
548 t = build_decl (DECL_SOURCE_LOCATION (info->context),
549 FIELD_DECL, get_identifier ("__data"), t);
550 SET_DECL_ALIGN (t, MAX (TYPE_ALIGN (ptr_type_node), align));
551 DECL_USER_ALIGN (t) = 1;
553 descriptor_type = make_node (RECORD_TYPE);
554 TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor");
555 TYPE_FIELDS (descriptor_type) = t;
556 layout_type (descriptor_type);
557 DECL_CONTEXT (t) = descriptor_type;
559 return descriptor_type;
562 /* Given DECL, a nested function, find or create an element in the
563 var map for this function. */
565 static tree
566 lookup_element_for_decl (struct nesting_info *info, tree decl,
567 enum insert_option insert)
569 if (insert == NO_INSERT)
571 tree *slot = info->var_map->get (decl);
572 return slot ? *slot : NULL_TREE;
575 tree *slot = &info->var_map->get_or_insert (decl);
576 if (!*slot)
577 *slot = build_tree_list (NULL_TREE, NULL_TREE);
579 return (tree) *slot;
582 /* Given DECL, a nested function, create a field in the non-local
583 frame structure for this function. */
585 static tree
586 create_field_for_decl (struct nesting_info *info, tree decl, tree type)
588 tree field = make_node (FIELD_DECL);
589 DECL_NAME (field) = DECL_NAME (decl);
590 TREE_TYPE (field) = type;
591 TREE_ADDRESSABLE (field) = 1;
592 insert_field_into_struct (get_frame_type (info), field);
593 return field;
596 /* Given DECL, a nested function, find or create a field in the non-local
597 frame structure for a trampoline for this function. */
599 static tree
600 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
601 enum insert_option insert)
603 tree elt, field;
605 elt = lookup_element_for_decl (info, decl, insert);
606 if (!elt)
607 return NULL_TREE;
609 field = TREE_PURPOSE (elt);
611 if (!field && insert == INSERT)
613 field = create_field_for_decl (info, decl, get_trampoline_type (info));
614 TREE_PURPOSE (elt) = field;
615 info->any_tramp_created = true;
618 return field;
621 /* Given DECL, a nested function, find or create a field in the non-local
622 frame structure for a descriptor for this function. */
624 static tree
625 lookup_descr_for_decl (struct nesting_info *info, tree decl,
626 enum insert_option insert)
628 tree elt, field;
630 elt = lookup_element_for_decl (info, decl, insert);
631 if (!elt)
632 return NULL_TREE;
634 field = TREE_VALUE (elt);
636 if (!field && insert == INSERT)
638 field = create_field_for_decl (info, decl, get_descriptor_type (info));
639 TREE_VALUE (elt) = field;
640 info->any_descr_created = true;
643 return field;
646 /* Build or return the field within the non-local frame state that holds
647 the non-local goto "jmp_buf". The buffer itself is maintained by the
648 rtl middle-end as dynamic stack space is allocated. */
650 static tree
651 get_nl_goto_field (struct nesting_info *info)
653 tree field = info->nl_goto_field;
654 if (!field)
656 unsigned size;
657 tree type;
659 /* For __builtin_nonlocal_goto, we need N words. The first is the
660 frame pointer, the rest is for the target's stack pointer save
661 area. The number of words is controlled by STACK_SAVEAREA_MODE;
662 not the best interface, but it'll do for now. */
663 if (Pmode == ptr_mode)
664 type = ptr_type_node;
665 else
666 type = lang_hooks.types.type_for_mode (Pmode, 1);
668 scalar_int_mode mode
669 = as_a <scalar_int_mode> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
670 size = GET_MODE_SIZE (mode);
671 size = size / GET_MODE_SIZE (Pmode);
672 size = size + 1;
674 type = build_array_type
675 (type, build_index_type (size_int (size)));
677 field = make_node (FIELD_DECL);
678 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
679 TREE_TYPE (field) = type;
680 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
681 TREE_ADDRESSABLE (field) = 1;
683 insert_field_into_struct (get_frame_type (info), field);
685 info->nl_goto_field = field;
688 return field;
691 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
693 static void
694 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
695 struct nesting_info *info, gimple_seq *pseq)
697 struct walk_stmt_info wi;
699 memset (&wi, 0, sizeof (wi));
700 wi.info = info;
701 wi.val_only = true;
702 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
706 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
708 static inline void
709 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
710 struct nesting_info *info)
712 gimple_seq body = gimple_body (info->context);
713 walk_body (callback_stmt, callback_op, info, &body);
714 gimple_set_body (info->context, body);
717 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
719 static void
720 walk_gimple_omp_for (gomp_for *for_stmt,
721 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
722 struct nesting_info *info)
724 struct walk_stmt_info wi;
725 gimple_seq seq;
726 tree t;
727 size_t i;
729 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
731 seq = NULL;
732 memset (&wi, 0, sizeof (wi));
733 wi.info = info;
734 wi.gsi = gsi_last (seq);
736 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
738 wi.val_only = false;
739 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
740 &wi, NULL);
741 wi.val_only = true;
742 wi.is_lhs = false;
743 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
744 &wi, NULL);
746 wi.val_only = true;
747 wi.is_lhs = false;
748 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
749 &wi, NULL);
751 t = gimple_omp_for_incr (for_stmt, i);
752 gcc_assert (BINARY_CLASS_P (t));
753 wi.val_only = false;
754 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
755 wi.val_only = true;
756 wi.is_lhs = false;
757 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
760 seq = gsi_seq (wi.gsi);
761 if (!gimple_seq_empty_p (seq))
763 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
764 annotate_all_with_location (seq, gimple_location (for_stmt));
765 gimple_seq_add_seq (&pre_body, seq);
766 gimple_omp_for_set_pre_body (for_stmt, pre_body);
770 /* Similarly for ROOT and all functions nested underneath, depth first. */
772 static void
773 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
774 struct nesting_info *root)
776 struct nesting_info *n;
777 FOR_EACH_NEST_INFO (n, root)
778 walk_function (callback_stmt, callback_op, n);
782 /* We have to check for a fairly pathological case. The operands of function
783 nested function are to be interpreted in the context of the enclosing
784 function. So if any are variably-sized, they will get remapped when the
785 enclosing function is inlined. But that remapping would also have to be
786 done in the types of the PARM_DECLs of the nested function, meaning the
787 argument types of that function will disagree with the arguments in the
788 calls to that function. So we'd either have to make a copy of the nested
789 function corresponding to each time the enclosing function was inlined or
790 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
791 function. The former is not practical. The latter would still require
792 detecting this case to know when to add the conversions. So, for now at
793 least, we don't inline such an enclosing function.
795 We have to do that check recursively, so here return indicating whether
796 FNDECL has such a nested function. ORIG_FN is the function we were
797 trying to inline to use for checking whether any argument is variably
798 modified by anything in it.
800 It would be better to do this in tree-inline.c so that we could give
801 the appropriate warning for why a function can't be inlined, but that's
802 too late since the nesting structure has already been flattened and
803 adding a flag just to record this fact seems a waste of a flag. */
805 static bool
806 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
808 struct cgraph_node *cgn = cgraph_node::get (fndecl);
809 tree arg;
811 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
813 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
814 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
815 return true;
817 if (check_for_nested_with_variably_modified (cgn->decl,
818 orig_fndecl))
819 return true;
822 return false;
825 /* Construct our local datastructure describing the function nesting
826 tree rooted by CGN. */
828 static struct nesting_info *
829 create_nesting_tree (struct cgraph_node *cgn)
831 struct nesting_info *info = XCNEW (struct nesting_info);
832 info->field_map = new hash_map<tree, tree>;
833 info->var_map = new hash_map<tree, tree>;
834 info->mem_refs = new hash_set<tree *>;
835 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
836 info->context = cgn->decl;
838 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
840 struct nesting_info *sub = create_nesting_tree (cgn);
841 sub->outer = info;
842 sub->next = info->inner;
843 info->inner = sub;
846 /* See discussion at check_for_nested_with_variably_modified for a
847 discussion of why this has to be here. */
848 if (check_for_nested_with_variably_modified (info->context, info->context))
849 DECL_UNINLINABLE (info->context) = true;
851 return info;
854 /* Return an expression computing the static chain for TARGET_CONTEXT
855 from INFO->CONTEXT. Insert any necessary computations before TSI. */
857 static tree
858 get_static_chain (struct nesting_info *info, tree target_context,
859 gimple_stmt_iterator *gsi)
861 struct nesting_info *i;
862 tree x;
864 if (info->context == target_context)
866 x = build_addr (info->frame_decl);
867 info->static_chain_added |= 1;
869 else
871 x = get_chain_decl (info);
872 info->static_chain_added |= 2;
874 for (i = info->outer; i->context != target_context; i = i->outer)
876 tree field = get_chain_field (i);
878 x = build_simple_mem_ref (x);
879 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
880 x = init_tmp_var (info, x, gsi);
884 return x;
888 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
889 frame as seen from INFO->CONTEXT. Insert any necessary computations
890 before GSI. */
892 static tree
893 get_frame_field (struct nesting_info *info, tree target_context,
894 tree field, gimple_stmt_iterator *gsi)
896 struct nesting_info *i;
897 tree x;
899 if (info->context == target_context)
901 /* Make sure frame_decl gets created. */
902 (void) get_frame_type (info);
903 x = info->frame_decl;
904 info->static_chain_added |= 1;
906 else
908 x = get_chain_decl (info);
909 info->static_chain_added |= 2;
911 for (i = info->outer; i->context != target_context; i = i->outer)
913 tree field = get_chain_field (i);
915 x = build_simple_mem_ref (x);
916 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
917 x = init_tmp_var (info, x, gsi);
920 x = build_simple_mem_ref (x);
923 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
924 return x;
927 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
929 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
930 in the nested function with DECL_VALUE_EXPR set to reference the true
931 variable in the parent function. This is used both for debug info
932 and in OMP lowering. */
934 static tree
935 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
937 tree target_context;
938 struct nesting_info *i;
939 tree x, field, new_decl;
941 tree *slot = &info->var_map->get_or_insert (decl);
943 if (*slot)
944 return *slot;
946 target_context = decl_function_context (decl);
948 /* A copy of the code in get_frame_field, but without the temporaries. */
949 if (info->context == target_context)
951 /* Make sure frame_decl gets created. */
952 (void) get_frame_type (info);
953 x = info->frame_decl;
954 i = info;
955 info->static_chain_added |= 1;
957 else
959 x = get_chain_decl (info);
960 info->static_chain_added |= 2;
961 for (i = info->outer; i->context != target_context; i = i->outer)
963 field = get_chain_field (i);
964 x = build_simple_mem_ref (x);
965 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
967 x = build_simple_mem_ref (x);
970 field = lookup_field_for_decl (i, decl, INSERT);
971 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
972 if (use_pointer_in_frame (decl))
973 x = build_simple_mem_ref (x);
975 /* ??? We should be remapping types as well, surely. */
976 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
977 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
978 DECL_CONTEXT (new_decl) = info->context;
979 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
980 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
981 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
982 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
983 TREE_READONLY (new_decl) = TREE_READONLY (decl);
984 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
985 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
986 if ((TREE_CODE (decl) == PARM_DECL
987 || TREE_CODE (decl) == RESULT_DECL
988 || VAR_P (decl))
989 && DECL_BY_REFERENCE (decl))
990 DECL_BY_REFERENCE (new_decl) = 1;
992 SET_DECL_VALUE_EXPR (new_decl, x);
993 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
995 *slot = new_decl;
996 DECL_CHAIN (new_decl) = info->debug_var_chain;
997 info->debug_var_chain = new_decl;
999 if (!optimize
1000 && info->context != target_context
1001 && variably_modified_type_p (TREE_TYPE (decl), NULL))
1002 note_nonlocal_vla_type (info, TREE_TYPE (decl));
1004 return new_decl;
1008 /* Callback for walk_gimple_stmt, rewrite all references to VAR
1009 and PARM_DECLs that belong to outer functions.
1011 The rewrite will involve some number of structure accesses back up
1012 the static chain. E.g. for a variable FOO up one nesting level it'll
1013 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
1014 indirections apply to decls for which use_pointer_in_frame is true. */
1016 static tree
1017 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
1019 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1020 struct nesting_info *const info = (struct nesting_info *) wi->info;
1021 tree t = *tp;
1023 *walk_subtrees = 0;
1024 switch (TREE_CODE (t))
1026 case VAR_DECL:
1027 /* Non-automatic variables are never processed. */
1028 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1029 break;
1030 /* FALLTHRU */
1032 case PARM_DECL:
1034 tree x, target_context = decl_function_context (t);
1036 if (info->context == target_context)
1037 break;
1039 wi->changed = true;
1041 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1042 x = get_nonlocal_debug_decl (info, t);
1043 else
1045 struct nesting_info *i = info;
1046 while (i && i->context != target_context)
1047 i = i->outer;
1048 /* If none of the outer contexts is the target context, this means
1049 that the VAR or PARM_DECL is referenced in a wrong context. */
1050 if (!i)
1051 internal_error ("%s from %s referenced in %s",
1052 IDENTIFIER_POINTER (DECL_NAME (t)),
1053 IDENTIFIER_POINTER (DECL_NAME (target_context)),
1054 IDENTIFIER_POINTER (DECL_NAME (info->context)));
1056 x = lookup_field_for_decl (i, t, INSERT);
1057 x = get_frame_field (info, target_context, x, &wi->gsi);
1058 if (use_pointer_in_frame (t))
1060 x = init_tmp_var (info, x, &wi->gsi);
1061 x = build_simple_mem_ref (x);
1065 if (wi->val_only)
1067 if (wi->is_lhs)
1068 x = save_tmp_var (info, x, &wi->gsi);
1069 else
1070 x = init_tmp_var (info, x, &wi->gsi);
1073 *tp = x;
1075 break;
1077 case LABEL_DECL:
1078 /* We're taking the address of a label from a parent function, but
1079 this is not itself a non-local goto. Mark the label such that it
1080 will not be deleted, much as we would with a label address in
1081 static storage. */
1082 if (decl_function_context (t) != info->context)
1083 FORCED_LABEL (t) = 1;
1084 break;
1086 case ADDR_EXPR:
1088 bool save_val_only = wi->val_only;
1090 wi->val_only = false;
1091 wi->is_lhs = false;
1092 wi->changed = false;
1093 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1094 wi->val_only = true;
1096 if (wi->changed)
1098 tree save_context;
1100 /* If we changed anything, we might no longer be directly
1101 referencing a decl. */
1102 save_context = current_function_decl;
1103 current_function_decl = info->context;
1104 recompute_tree_invariant_for_addr_expr (t);
1105 current_function_decl = save_context;
1107 /* If the callback converted the address argument in a context
1108 where we only accept variables (and min_invariant, presumably),
1109 then compute the address into a temporary. */
1110 if (save_val_only)
1111 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1112 t, &wi->gsi);
1115 break;
1117 case REALPART_EXPR:
1118 case IMAGPART_EXPR:
1119 case COMPONENT_REF:
1120 case ARRAY_REF:
1121 case ARRAY_RANGE_REF:
1122 case BIT_FIELD_REF:
1123 /* Go down this entire nest and just look at the final prefix and
1124 anything that describes the references. Otherwise, we lose track
1125 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1126 wi->val_only = true;
1127 wi->is_lhs = false;
1128 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1130 if (TREE_CODE (t) == COMPONENT_REF)
1131 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1132 NULL);
1133 else if (TREE_CODE (t) == ARRAY_REF
1134 || TREE_CODE (t) == ARRAY_RANGE_REF)
1136 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1137 wi, NULL);
1138 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1139 wi, NULL);
1140 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1141 wi, NULL);
1144 wi->val_only = false;
1145 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1146 break;
1148 case VIEW_CONVERT_EXPR:
1149 /* Just request to look at the subtrees, leaving val_only and lhs
1150 untouched. This might actually be for !val_only + lhs, in which
1151 case we don't want to force a replacement by a temporary. */
1152 *walk_subtrees = 1;
1153 break;
1155 default:
1156 if (!IS_TYPE_OR_DECL_P (t))
1158 *walk_subtrees = 1;
1159 wi->val_only = true;
1160 wi->is_lhs = false;
1162 break;
1165 return NULL_TREE;
1168 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1169 struct walk_stmt_info *);
1171 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1172 and PARM_DECLs that belong to outer functions. */
1174 static bool
1175 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1177 struct nesting_info *const info = (struct nesting_info *) wi->info;
1178 bool need_chain = false, need_stmts = false;
1179 tree clause, decl;
1180 int dummy;
1181 bitmap new_suppress;
1183 new_suppress = BITMAP_GGC_ALLOC ();
1184 bitmap_copy (new_suppress, info->suppress_expansion);
1186 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1188 switch (OMP_CLAUSE_CODE (clause))
1190 case OMP_CLAUSE_REDUCTION:
1191 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1192 need_stmts = true;
1193 goto do_decl_clause;
1195 case OMP_CLAUSE_LASTPRIVATE:
1196 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1197 need_stmts = true;
1198 goto do_decl_clause;
1200 case OMP_CLAUSE_LINEAR:
1201 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1202 need_stmts = true;
1203 wi->val_only = true;
1204 wi->is_lhs = false;
1205 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1206 &dummy, wi);
1207 goto do_decl_clause;
1209 case OMP_CLAUSE_PRIVATE:
1210 case OMP_CLAUSE_FIRSTPRIVATE:
1211 case OMP_CLAUSE_COPYPRIVATE:
1212 case OMP_CLAUSE_SHARED:
1213 case OMP_CLAUSE_TO_DECLARE:
1214 case OMP_CLAUSE_LINK:
1215 case OMP_CLAUSE_USE_DEVICE_PTR:
1216 case OMP_CLAUSE_IS_DEVICE_PTR:
1217 do_decl_clause:
1218 decl = OMP_CLAUSE_DECL (clause);
1219 if (VAR_P (decl)
1220 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1221 break;
1222 if (decl_function_context (decl) != info->context)
1224 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1225 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1226 bitmap_set_bit (new_suppress, DECL_UID (decl));
1227 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1228 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1229 need_chain = true;
1231 break;
1233 case OMP_CLAUSE_SCHEDULE:
1234 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1235 break;
1236 /* FALLTHRU */
1237 case OMP_CLAUSE_FINAL:
1238 case OMP_CLAUSE_IF:
1239 case OMP_CLAUSE_NUM_THREADS:
1240 case OMP_CLAUSE_DEPEND:
1241 case OMP_CLAUSE_DEVICE:
1242 case OMP_CLAUSE_NUM_TEAMS:
1243 case OMP_CLAUSE_THREAD_LIMIT:
1244 case OMP_CLAUSE_SAFELEN:
1245 case OMP_CLAUSE_SIMDLEN:
1246 case OMP_CLAUSE_PRIORITY:
1247 case OMP_CLAUSE_GRAINSIZE:
1248 case OMP_CLAUSE_NUM_TASKS:
1249 case OMP_CLAUSE_HINT:
1250 case OMP_CLAUSE_NUM_GANGS:
1251 case OMP_CLAUSE_NUM_WORKERS:
1252 case OMP_CLAUSE_VECTOR_LENGTH:
1253 case OMP_CLAUSE_GANG:
1254 case OMP_CLAUSE_WORKER:
1255 case OMP_CLAUSE_VECTOR:
1256 case OMP_CLAUSE_ASYNC:
1257 case OMP_CLAUSE_WAIT:
1258 /* Several OpenACC clauses have optional arguments. Check if they
1259 are present. */
1260 if (OMP_CLAUSE_OPERAND (clause, 0))
1262 wi->val_only = true;
1263 wi->is_lhs = false;
1264 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1265 &dummy, wi);
1268 /* The gang clause accepts two arguments. */
1269 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1270 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1272 wi->val_only = true;
1273 wi->is_lhs = false;
1274 convert_nonlocal_reference_op
1275 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1277 break;
1279 case OMP_CLAUSE_DIST_SCHEDULE:
1280 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1282 wi->val_only = true;
1283 wi->is_lhs = false;
1284 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1285 &dummy, wi);
1287 break;
1289 case OMP_CLAUSE_MAP:
1290 case OMP_CLAUSE_TO:
1291 case OMP_CLAUSE_FROM:
1292 if (OMP_CLAUSE_SIZE (clause))
1294 wi->val_only = true;
1295 wi->is_lhs = false;
1296 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1297 &dummy, wi);
1299 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1300 goto do_decl_clause;
1301 wi->val_only = true;
1302 wi->is_lhs = false;
1303 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1304 wi, NULL);
1305 break;
1307 case OMP_CLAUSE_ALIGNED:
1308 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1310 wi->val_only = true;
1311 wi->is_lhs = false;
1312 convert_nonlocal_reference_op
1313 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1315 /* Like do_decl_clause, but don't add any suppression. */
1316 decl = OMP_CLAUSE_DECL (clause);
1317 if (VAR_P (decl)
1318 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1319 break;
1320 if (decl_function_context (decl) != info->context)
1322 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1323 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1324 need_chain = true;
1326 break;
1328 case OMP_CLAUSE_NOWAIT:
1329 case OMP_CLAUSE_ORDERED:
1330 case OMP_CLAUSE_DEFAULT:
1331 case OMP_CLAUSE_COPYIN:
1332 case OMP_CLAUSE_COLLAPSE:
1333 case OMP_CLAUSE_TILE:
1334 case OMP_CLAUSE_UNTIED:
1335 case OMP_CLAUSE_MERGEABLE:
1336 case OMP_CLAUSE_PROC_BIND:
1337 case OMP_CLAUSE_NOGROUP:
1338 case OMP_CLAUSE_THREADS:
1339 case OMP_CLAUSE_SIMD:
1340 case OMP_CLAUSE_DEFAULTMAP:
1341 case OMP_CLAUSE_SEQ:
1342 case OMP_CLAUSE_INDEPENDENT:
1343 case OMP_CLAUSE_AUTO:
1344 case OMP_CLAUSE_IF_PRESENT:
1345 case OMP_CLAUSE_FINALIZE:
1346 break;
1348 /* The following clause belongs to the OpenACC cache directive, which
1349 is discarded during gimplification. */
1350 case OMP_CLAUSE__CACHE_:
1351 /* The following clauses are only allowed in the OpenMP declare simd
1352 directive, so not seen here. */
1353 case OMP_CLAUSE_UNIFORM:
1354 case OMP_CLAUSE_INBRANCH:
1355 case OMP_CLAUSE_NOTINBRANCH:
1356 /* The following clauses are only allowed on OpenMP cancel and
1357 cancellation point directives, which at this point have already
1358 been lowered into a function call. */
1359 case OMP_CLAUSE_FOR:
1360 case OMP_CLAUSE_PARALLEL:
1361 case OMP_CLAUSE_SECTIONS:
1362 case OMP_CLAUSE_TASKGROUP:
1363 /* The following clauses are only added during OMP lowering; nested
1364 function decomposition happens before that. */
1365 case OMP_CLAUSE__LOOPTEMP_:
1366 case OMP_CLAUSE__SIMDUID_:
1367 case OMP_CLAUSE__GRIDDIM_:
1368 /* Anything else. */
1369 default:
1370 gcc_unreachable ();
1374 info->suppress_expansion = new_suppress;
1376 if (need_stmts)
1377 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1378 switch (OMP_CLAUSE_CODE (clause))
1380 case OMP_CLAUSE_REDUCTION:
1381 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1383 tree old_context
1384 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1385 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1386 = info->context;
1387 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1388 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1389 = info->context;
1390 walk_body (convert_nonlocal_reference_stmt,
1391 convert_nonlocal_reference_op, info,
1392 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1393 walk_body (convert_nonlocal_reference_stmt,
1394 convert_nonlocal_reference_op, info,
1395 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1396 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1397 = old_context;
1398 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1399 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1400 = old_context;
1402 break;
1404 case OMP_CLAUSE_LASTPRIVATE:
1405 walk_body (convert_nonlocal_reference_stmt,
1406 convert_nonlocal_reference_op, info,
1407 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1408 break;
1410 case OMP_CLAUSE_LINEAR:
1411 walk_body (convert_nonlocal_reference_stmt,
1412 convert_nonlocal_reference_op, info,
1413 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1414 break;
1416 default:
1417 break;
1420 return need_chain;
1423 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1425 static void
1426 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1428 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1429 type = TREE_TYPE (type);
1431 if (TYPE_NAME (type)
1432 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1433 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1434 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1436 while (POINTER_TYPE_P (type)
1437 || TREE_CODE (type) == VECTOR_TYPE
1438 || TREE_CODE (type) == FUNCTION_TYPE
1439 || TREE_CODE (type) == METHOD_TYPE)
1440 type = TREE_TYPE (type);
1442 if (TREE_CODE (type) == ARRAY_TYPE)
1444 tree domain, t;
1446 note_nonlocal_vla_type (info, TREE_TYPE (type));
1447 domain = TYPE_DOMAIN (type);
1448 if (domain)
1450 t = TYPE_MIN_VALUE (domain);
1451 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1452 && decl_function_context (t) != info->context)
1453 get_nonlocal_debug_decl (info, t);
1454 t = TYPE_MAX_VALUE (domain);
1455 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1456 && decl_function_context (t) != info->context)
1457 get_nonlocal_debug_decl (info, t);
1462 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1463 PARM_DECLs that belong to outer functions. This handles statements
1464 that are not handled via the standard recursion done in
1465 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1466 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1467 operands of STMT have been handled by this function. */
1469 static tree
1470 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1471 struct walk_stmt_info *wi)
1473 struct nesting_info *info = (struct nesting_info *) wi->info;
1474 tree save_local_var_chain;
1475 bitmap save_suppress;
1476 gimple *stmt = gsi_stmt (*gsi);
1478 switch (gimple_code (stmt))
1480 case GIMPLE_GOTO:
1481 /* Don't walk non-local gotos for now. */
1482 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1484 wi->val_only = true;
1485 wi->is_lhs = false;
1486 *handled_ops_p = false;
1487 return NULL_TREE;
1489 break;
1491 case GIMPLE_OMP_PARALLEL:
1492 case GIMPLE_OMP_TASK:
1493 save_suppress = info->suppress_expansion;
1494 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1495 wi))
1497 tree c, decl;
1498 decl = get_chain_decl (info);
1499 c = build_omp_clause (gimple_location (stmt),
1500 OMP_CLAUSE_FIRSTPRIVATE);
1501 OMP_CLAUSE_DECL (c) = decl;
1502 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1503 gimple_omp_taskreg_set_clauses (stmt, c);
1506 save_local_var_chain = info->new_local_var_chain;
1507 info->new_local_var_chain = NULL;
1509 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1510 info, gimple_omp_body_ptr (stmt));
1512 if (info->new_local_var_chain)
1513 declare_vars (info->new_local_var_chain,
1514 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1515 false);
1516 info->new_local_var_chain = save_local_var_chain;
1517 info->suppress_expansion = save_suppress;
1518 break;
1520 case GIMPLE_OMP_FOR:
1521 save_suppress = info->suppress_expansion;
1522 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1523 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1524 convert_nonlocal_reference_stmt,
1525 convert_nonlocal_reference_op, info);
1526 walk_body (convert_nonlocal_reference_stmt,
1527 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1528 info->suppress_expansion = save_suppress;
1529 break;
1531 case GIMPLE_OMP_SECTIONS:
1532 save_suppress = info->suppress_expansion;
1533 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1534 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1535 info, gimple_omp_body_ptr (stmt));
1536 info->suppress_expansion = save_suppress;
1537 break;
1539 case GIMPLE_OMP_SINGLE:
1540 save_suppress = info->suppress_expansion;
1541 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1542 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1543 info, gimple_omp_body_ptr (stmt));
1544 info->suppress_expansion = save_suppress;
1545 break;
1547 case GIMPLE_OMP_TARGET:
1548 if (!is_gimple_omp_offloaded (stmt))
1550 save_suppress = info->suppress_expansion;
1551 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1552 wi);
1553 info->suppress_expansion = save_suppress;
1554 walk_body (convert_nonlocal_reference_stmt,
1555 convert_nonlocal_reference_op, info,
1556 gimple_omp_body_ptr (stmt));
1557 break;
1559 save_suppress = info->suppress_expansion;
1560 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1561 wi))
1563 tree c, decl;
1564 decl = get_chain_decl (info);
1565 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1566 OMP_CLAUSE_DECL (c) = decl;
1567 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1568 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1569 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1570 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1573 save_local_var_chain = info->new_local_var_chain;
1574 info->new_local_var_chain = NULL;
1576 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1577 info, gimple_omp_body_ptr (stmt));
1579 if (info->new_local_var_chain)
1580 declare_vars (info->new_local_var_chain,
1581 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1582 false);
1583 info->new_local_var_chain = save_local_var_chain;
1584 info->suppress_expansion = save_suppress;
1585 break;
1587 case GIMPLE_OMP_TEAMS:
1588 save_suppress = info->suppress_expansion;
1589 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1590 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1591 info, gimple_omp_body_ptr (stmt));
1592 info->suppress_expansion = save_suppress;
1593 break;
1595 case GIMPLE_OMP_SECTION:
1596 case GIMPLE_OMP_MASTER:
1597 case GIMPLE_OMP_TASKGROUP:
1598 case GIMPLE_OMP_ORDERED:
1599 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1600 info, gimple_omp_body_ptr (stmt));
1601 break;
1603 case GIMPLE_BIND:
1605 gbind *bind_stmt = as_a <gbind *> (stmt);
1607 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1608 if (TREE_CODE (var) == NAMELIST_DECL)
1610 /* Adjust decls mentioned in NAMELIST_DECL. */
1611 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1612 tree decl;
1613 unsigned int i;
1615 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1617 if (VAR_P (decl)
1618 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1619 continue;
1620 if (decl_function_context (decl) != info->context)
1621 CONSTRUCTOR_ELT (decls, i)->value
1622 = get_nonlocal_debug_decl (info, decl);
1626 *handled_ops_p = false;
1627 return NULL_TREE;
1629 case GIMPLE_COND:
1630 wi->val_only = true;
1631 wi->is_lhs = false;
1632 *handled_ops_p = false;
1633 return NULL_TREE;
1635 default:
1636 /* For every other statement that we are not interested in
1637 handling here, let the walker traverse the operands. */
1638 *handled_ops_p = false;
1639 return NULL_TREE;
1642 /* We have handled all of STMT operands, no need to traverse the operands. */
1643 *handled_ops_p = true;
1644 return NULL_TREE;
1648 /* A subroutine of convert_local_reference. Create a local variable
1649 in the parent function with DECL_VALUE_EXPR set to reference the
1650 field in FRAME. This is used both for debug info and in OMP
1651 lowering. */
1653 static tree
1654 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1656 tree x, new_decl;
1658 tree *slot = &info->var_map->get_or_insert (decl);
1659 if (*slot)
1660 return *slot;
1662 /* Make sure frame_decl gets created. */
1663 (void) get_frame_type (info);
1664 x = info->frame_decl;
1665 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1667 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1668 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1669 DECL_CONTEXT (new_decl) = info->context;
1670 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1671 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1672 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1673 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1674 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1675 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1676 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1677 if ((TREE_CODE (decl) == PARM_DECL
1678 || TREE_CODE (decl) == RESULT_DECL
1679 || VAR_P (decl))
1680 && DECL_BY_REFERENCE (decl))
1681 DECL_BY_REFERENCE (new_decl) = 1;
1683 SET_DECL_VALUE_EXPR (new_decl, x);
1684 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1685 *slot = new_decl;
1687 DECL_CHAIN (new_decl) = info->debug_var_chain;
1688 info->debug_var_chain = new_decl;
1690 /* Do not emit debug info twice. */
1691 DECL_IGNORED_P (decl) = 1;
1693 return new_decl;
1697 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1698 and PARM_DECLs that were referenced by inner nested functions.
1699 The rewrite will be a structure reference to the local frame variable. */
1701 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1703 static tree
1704 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1706 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1707 struct nesting_info *const info = (struct nesting_info *) wi->info;
1708 tree t = *tp, field, x;
1709 bool save_val_only;
1711 *walk_subtrees = 0;
1712 switch (TREE_CODE (t))
1714 case VAR_DECL:
1715 /* Non-automatic variables are never processed. */
1716 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1717 break;
1718 /* FALLTHRU */
1720 case PARM_DECL:
1721 if (t != info->frame_decl && decl_function_context (t) == info->context)
1723 /* If we copied a pointer to the frame, then the original decl
1724 is used unchanged in the parent function. */
1725 if (use_pointer_in_frame (t))
1726 break;
1728 /* No need to transform anything if no child references the
1729 variable. */
1730 field = lookup_field_for_decl (info, t, NO_INSERT);
1731 if (!field)
1732 break;
1733 wi->changed = true;
1735 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1736 x = get_local_debug_decl (info, t, field);
1737 else
1738 x = get_frame_field (info, info->context, field, &wi->gsi);
1740 if (wi->val_only)
1742 if (wi->is_lhs)
1743 x = save_tmp_var (info, x, &wi->gsi);
1744 else
1745 x = init_tmp_var (info, x, &wi->gsi);
1748 *tp = x;
1750 break;
1752 case ADDR_EXPR:
1753 save_val_only = wi->val_only;
1754 wi->val_only = false;
1755 wi->is_lhs = false;
1756 wi->changed = false;
1757 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1758 wi->val_only = save_val_only;
1760 /* If we converted anything ... */
1761 if (wi->changed)
1763 tree save_context;
1765 /* Then the frame decl is now addressable. */
1766 TREE_ADDRESSABLE (info->frame_decl) = 1;
1768 save_context = current_function_decl;
1769 current_function_decl = info->context;
1770 recompute_tree_invariant_for_addr_expr (t);
1771 current_function_decl = save_context;
1773 /* If we are in a context where we only accept values, then
1774 compute the address into a temporary. */
1775 if (save_val_only)
1776 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1777 t, &wi->gsi);
1779 break;
1781 case REALPART_EXPR:
1782 case IMAGPART_EXPR:
1783 case COMPONENT_REF:
1784 case ARRAY_REF:
1785 case ARRAY_RANGE_REF:
1786 case BIT_FIELD_REF:
1787 /* Go down this entire nest and just look at the final prefix and
1788 anything that describes the references. Otherwise, we lose track
1789 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1790 save_val_only = wi->val_only;
1791 wi->val_only = true;
1792 wi->is_lhs = false;
1793 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1795 if (TREE_CODE (t) == COMPONENT_REF)
1796 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1797 NULL);
1798 else if (TREE_CODE (t) == ARRAY_REF
1799 || TREE_CODE (t) == ARRAY_RANGE_REF)
1801 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1802 NULL);
1803 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1804 NULL);
1805 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1806 NULL);
1809 wi->val_only = false;
1810 walk_tree (tp, convert_local_reference_op, wi, NULL);
1811 wi->val_only = save_val_only;
1812 break;
1814 case MEM_REF:
1815 save_val_only = wi->val_only;
1816 wi->val_only = true;
1817 wi->is_lhs = false;
1818 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1819 wi, NULL);
1820 /* We need to re-fold the MEM_REF as component references as
1821 part of a ADDR_EXPR address are not allowed. But we cannot
1822 fold here, as the chain record type is not yet finalized. */
1823 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1824 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1825 info->mem_refs->add (tp);
1826 wi->val_only = save_val_only;
1827 break;
1829 case VIEW_CONVERT_EXPR:
1830 /* Just request to look at the subtrees, leaving val_only and lhs
1831 untouched. This might actually be for !val_only + lhs, in which
1832 case we don't want to force a replacement by a temporary. */
1833 *walk_subtrees = 1;
1834 break;
1836 default:
1837 if (!IS_TYPE_OR_DECL_P (t))
1839 *walk_subtrees = 1;
1840 wi->val_only = true;
1841 wi->is_lhs = false;
1843 break;
1846 return NULL_TREE;
1849 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1850 struct walk_stmt_info *);
1852 /* Helper for convert_local_reference. Convert all the references in
1853 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1855 static bool
1856 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1858 struct nesting_info *const info = (struct nesting_info *) wi->info;
1859 bool need_frame = false, need_stmts = false;
1860 tree clause, decl;
1861 int dummy;
1862 bitmap new_suppress;
1864 new_suppress = BITMAP_GGC_ALLOC ();
1865 bitmap_copy (new_suppress, info->suppress_expansion);
1867 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1869 switch (OMP_CLAUSE_CODE (clause))
1871 case OMP_CLAUSE_REDUCTION:
1872 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1873 need_stmts = true;
1874 goto do_decl_clause;
1876 case OMP_CLAUSE_LASTPRIVATE:
1877 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1878 need_stmts = true;
1879 goto do_decl_clause;
1881 case OMP_CLAUSE_LINEAR:
1882 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1883 need_stmts = true;
1884 wi->val_only = true;
1885 wi->is_lhs = false;
1886 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1887 wi);
1888 goto do_decl_clause;
1890 case OMP_CLAUSE_PRIVATE:
1891 case OMP_CLAUSE_FIRSTPRIVATE:
1892 case OMP_CLAUSE_COPYPRIVATE:
1893 case OMP_CLAUSE_SHARED:
1894 case OMP_CLAUSE_TO_DECLARE:
1895 case OMP_CLAUSE_LINK:
1896 case OMP_CLAUSE_USE_DEVICE_PTR:
1897 case OMP_CLAUSE_IS_DEVICE_PTR:
1898 do_decl_clause:
1899 decl = OMP_CLAUSE_DECL (clause);
1900 if (VAR_P (decl)
1901 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1902 break;
1903 if (decl_function_context (decl) == info->context
1904 && !use_pointer_in_frame (decl))
1906 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1907 if (field)
1909 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1910 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1911 bitmap_set_bit (new_suppress, DECL_UID (decl));
1912 OMP_CLAUSE_DECL (clause)
1913 = get_local_debug_decl (info, decl, field);
1914 need_frame = true;
1917 break;
1919 case OMP_CLAUSE_SCHEDULE:
1920 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1921 break;
1922 /* FALLTHRU */
1923 case OMP_CLAUSE_FINAL:
1924 case OMP_CLAUSE_IF:
1925 case OMP_CLAUSE_NUM_THREADS:
1926 case OMP_CLAUSE_DEPEND:
1927 case OMP_CLAUSE_DEVICE:
1928 case OMP_CLAUSE_NUM_TEAMS:
1929 case OMP_CLAUSE_THREAD_LIMIT:
1930 case OMP_CLAUSE_SAFELEN:
1931 case OMP_CLAUSE_SIMDLEN:
1932 case OMP_CLAUSE_PRIORITY:
1933 case OMP_CLAUSE_GRAINSIZE:
1934 case OMP_CLAUSE_NUM_TASKS:
1935 case OMP_CLAUSE_HINT:
1936 case OMP_CLAUSE_NUM_GANGS:
1937 case OMP_CLAUSE_NUM_WORKERS:
1938 case OMP_CLAUSE_VECTOR_LENGTH:
1939 case OMP_CLAUSE_GANG:
1940 case OMP_CLAUSE_WORKER:
1941 case OMP_CLAUSE_VECTOR:
1942 case OMP_CLAUSE_ASYNC:
1943 case OMP_CLAUSE_WAIT:
1944 /* Several OpenACC clauses have optional arguments. Check if they
1945 are present. */
1946 if (OMP_CLAUSE_OPERAND (clause, 0))
1948 wi->val_only = true;
1949 wi->is_lhs = false;
1950 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1951 &dummy, wi);
1954 /* The gang clause accepts two arguments. */
1955 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1956 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1958 wi->val_only = true;
1959 wi->is_lhs = false;
1960 convert_nonlocal_reference_op
1961 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1963 break;
1965 case OMP_CLAUSE_DIST_SCHEDULE:
1966 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1968 wi->val_only = true;
1969 wi->is_lhs = false;
1970 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1971 &dummy, wi);
1973 break;
1975 case OMP_CLAUSE_MAP:
1976 case OMP_CLAUSE_TO:
1977 case OMP_CLAUSE_FROM:
1978 if (OMP_CLAUSE_SIZE (clause))
1980 wi->val_only = true;
1981 wi->is_lhs = false;
1982 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1983 &dummy, wi);
1985 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1986 goto do_decl_clause;
1987 wi->val_only = true;
1988 wi->is_lhs = false;
1989 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1990 wi, NULL);
1991 break;
1993 case OMP_CLAUSE_ALIGNED:
1994 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1996 wi->val_only = true;
1997 wi->is_lhs = false;
1998 convert_local_reference_op
1999 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
2001 /* Like do_decl_clause, but don't add any suppression. */
2002 decl = OMP_CLAUSE_DECL (clause);
2003 if (VAR_P (decl)
2004 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2005 break;
2006 if (decl_function_context (decl) == info->context
2007 && !use_pointer_in_frame (decl))
2009 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2010 if (field)
2012 OMP_CLAUSE_DECL (clause)
2013 = get_local_debug_decl (info, decl, field);
2014 need_frame = true;
2017 break;
2019 case OMP_CLAUSE_NOWAIT:
2020 case OMP_CLAUSE_ORDERED:
2021 case OMP_CLAUSE_DEFAULT:
2022 case OMP_CLAUSE_COPYIN:
2023 case OMP_CLAUSE_COLLAPSE:
2024 case OMP_CLAUSE_TILE:
2025 case OMP_CLAUSE_UNTIED:
2026 case OMP_CLAUSE_MERGEABLE:
2027 case OMP_CLAUSE_PROC_BIND:
2028 case OMP_CLAUSE_NOGROUP:
2029 case OMP_CLAUSE_THREADS:
2030 case OMP_CLAUSE_SIMD:
2031 case OMP_CLAUSE_DEFAULTMAP:
2032 case OMP_CLAUSE_SEQ:
2033 case OMP_CLAUSE_INDEPENDENT:
2034 case OMP_CLAUSE_AUTO:
2035 case OMP_CLAUSE_IF_PRESENT:
2036 case OMP_CLAUSE_FINALIZE:
2037 break;
2039 /* The following clause belongs to the OpenACC cache directive, which
2040 is discarded during gimplification. */
2041 case OMP_CLAUSE__CACHE_:
2042 /* The following clauses are only allowed in the OpenMP declare simd
2043 directive, so not seen here. */
2044 case OMP_CLAUSE_UNIFORM:
2045 case OMP_CLAUSE_INBRANCH:
2046 case OMP_CLAUSE_NOTINBRANCH:
2047 /* The following clauses are only allowed on OpenMP cancel and
2048 cancellation point directives, which at this point have already
2049 been lowered into a function call. */
2050 case OMP_CLAUSE_FOR:
2051 case OMP_CLAUSE_PARALLEL:
2052 case OMP_CLAUSE_SECTIONS:
2053 case OMP_CLAUSE_TASKGROUP:
2054 /* The following clauses are only added during OMP lowering; nested
2055 function decomposition happens before that. */
2056 case OMP_CLAUSE__LOOPTEMP_:
2057 case OMP_CLAUSE__SIMDUID_:
2058 case OMP_CLAUSE__GRIDDIM_:
2059 /* Anything else. */
2060 default:
2061 gcc_unreachable ();
2065 info->suppress_expansion = new_suppress;
2067 if (need_stmts)
2068 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2069 switch (OMP_CLAUSE_CODE (clause))
2071 case OMP_CLAUSE_REDUCTION:
2072 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2074 tree old_context
2075 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
2076 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2077 = info->context;
2078 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2079 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2080 = info->context;
2081 walk_body (convert_local_reference_stmt,
2082 convert_local_reference_op, info,
2083 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
2084 walk_body (convert_local_reference_stmt,
2085 convert_local_reference_op, info,
2086 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
2087 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2088 = old_context;
2089 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2090 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2091 = old_context;
2093 break;
2095 case OMP_CLAUSE_LASTPRIVATE:
2096 walk_body (convert_local_reference_stmt,
2097 convert_local_reference_op, info,
2098 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
2099 break;
2101 case OMP_CLAUSE_LINEAR:
2102 walk_body (convert_local_reference_stmt,
2103 convert_local_reference_op, info,
2104 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
2105 break;
2107 default:
2108 break;
2111 return need_frame;
2115 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2116 and PARM_DECLs that were referenced by inner nested functions.
2117 The rewrite will be a structure reference to the local frame variable. */
2119 static tree
2120 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2121 struct walk_stmt_info *wi)
2123 struct nesting_info *info = (struct nesting_info *) wi->info;
2124 tree save_local_var_chain;
2125 bitmap save_suppress;
2126 char save_static_chain_added;
2127 bool frame_decl_added;
2128 gimple *stmt = gsi_stmt (*gsi);
2130 switch (gimple_code (stmt))
2132 case GIMPLE_OMP_PARALLEL:
2133 case GIMPLE_OMP_TASK:
2134 save_suppress = info->suppress_expansion;
2135 frame_decl_added = false;
2136 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
2137 wi))
2139 tree c = build_omp_clause (gimple_location (stmt),
2140 OMP_CLAUSE_SHARED);
2141 (void) get_frame_type (info);
2142 OMP_CLAUSE_DECL (c) = info->frame_decl;
2143 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2144 gimple_omp_taskreg_set_clauses (stmt, c);
2145 info->static_chain_added |= 4;
2146 frame_decl_added = true;
2149 save_local_var_chain = info->new_local_var_chain;
2150 save_static_chain_added = info->static_chain_added;
2151 info->new_local_var_chain = NULL;
2152 info->static_chain_added = 0;
2154 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2155 gimple_omp_body_ptr (stmt));
2157 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2159 tree c = build_omp_clause (gimple_location (stmt),
2160 OMP_CLAUSE_SHARED);
2161 (void) get_frame_type (info);
2162 OMP_CLAUSE_DECL (c) = info->frame_decl;
2163 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2164 info->static_chain_added |= 4;
2165 gimple_omp_taskreg_set_clauses (stmt, c);
2167 if (info->new_local_var_chain)
2168 declare_vars (info->new_local_var_chain,
2169 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2170 info->new_local_var_chain = save_local_var_chain;
2171 info->suppress_expansion = save_suppress;
2172 info->static_chain_added |= save_static_chain_added;
2173 break;
2175 case GIMPLE_OMP_FOR:
2176 save_suppress = info->suppress_expansion;
2177 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2178 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2179 convert_local_reference_stmt,
2180 convert_local_reference_op, info);
2181 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2182 info, gimple_omp_body_ptr (stmt));
2183 info->suppress_expansion = save_suppress;
2184 break;
2186 case GIMPLE_OMP_SECTIONS:
2187 save_suppress = info->suppress_expansion;
2188 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2189 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2190 info, gimple_omp_body_ptr (stmt));
2191 info->suppress_expansion = save_suppress;
2192 break;
2194 case GIMPLE_OMP_SINGLE:
2195 save_suppress = info->suppress_expansion;
2196 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2197 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2198 info, gimple_omp_body_ptr (stmt));
2199 info->suppress_expansion = save_suppress;
2200 break;
2202 case GIMPLE_OMP_TARGET:
2203 if (!is_gimple_omp_offloaded (stmt))
2205 save_suppress = info->suppress_expansion;
2206 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2207 info->suppress_expansion = save_suppress;
2208 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2209 info, gimple_omp_body_ptr (stmt));
2210 break;
2212 save_suppress = info->suppress_expansion;
2213 frame_decl_added = false;
2214 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2216 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2217 (void) get_frame_type (info);
2218 OMP_CLAUSE_DECL (c) = info->frame_decl;
2219 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2220 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2221 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2222 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2223 info->static_chain_added |= 4;
2224 frame_decl_added = true;
2227 save_local_var_chain = info->new_local_var_chain;
2228 save_static_chain_added = info->static_chain_added;
2229 info->new_local_var_chain = NULL;
2230 info->static_chain_added = 0;
2232 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2233 gimple_omp_body_ptr (stmt));
2235 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2237 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2238 (void) get_frame_type (info);
2239 OMP_CLAUSE_DECL (c) = info->frame_decl;
2240 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2241 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2242 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2243 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2244 info->static_chain_added |= 4;
2247 if (info->new_local_var_chain)
2248 declare_vars (info->new_local_var_chain,
2249 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2250 info->new_local_var_chain = save_local_var_chain;
2251 info->suppress_expansion = save_suppress;
2252 info->static_chain_added |= save_static_chain_added;
2253 break;
2255 case GIMPLE_OMP_TEAMS:
2256 save_suppress = info->suppress_expansion;
2257 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2258 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2259 info, gimple_omp_body_ptr (stmt));
2260 info->suppress_expansion = save_suppress;
2261 break;
2263 case GIMPLE_OMP_SECTION:
2264 case GIMPLE_OMP_MASTER:
2265 case GIMPLE_OMP_TASKGROUP:
2266 case GIMPLE_OMP_ORDERED:
2267 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2268 info, gimple_omp_body_ptr (stmt));
2269 break;
2271 case GIMPLE_COND:
2272 wi->val_only = true;
2273 wi->is_lhs = false;
2274 *handled_ops_p = false;
2275 return NULL_TREE;
2277 case GIMPLE_ASSIGN:
2278 if (gimple_clobber_p (stmt))
2280 tree lhs = gimple_assign_lhs (stmt);
2281 if (!use_pointer_in_frame (lhs)
2282 && lookup_field_for_decl (info, lhs, NO_INSERT))
2284 gsi_replace (gsi, gimple_build_nop (), true);
2285 break;
2288 *handled_ops_p = false;
2289 return NULL_TREE;
2291 case GIMPLE_BIND:
2292 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2293 var;
2294 var = DECL_CHAIN (var))
2295 if (TREE_CODE (var) == NAMELIST_DECL)
2297 /* Adjust decls mentioned in NAMELIST_DECL. */
2298 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2299 tree decl;
2300 unsigned int i;
2302 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2304 if (VAR_P (decl)
2305 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2306 continue;
2307 if (decl_function_context (decl) == info->context
2308 && !use_pointer_in_frame (decl))
2310 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2311 if (field)
2313 CONSTRUCTOR_ELT (decls, i)->value
2314 = get_local_debug_decl (info, decl, field);
2320 *handled_ops_p = false;
2321 return NULL_TREE;
2323 default:
2324 /* For every other statement that we are not interested in
2325 handling here, let the walker traverse the operands. */
2326 *handled_ops_p = false;
2327 return NULL_TREE;
2330 /* Indicate that we have handled all the operands ourselves. */
2331 *handled_ops_p = true;
2332 return NULL_TREE;
2336 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2337 that reference labels from outer functions. The rewrite will be a
2338 call to __builtin_nonlocal_goto. */
2340 static tree
2341 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2342 struct walk_stmt_info *wi)
2344 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2345 tree label, new_label, target_context, x, field;
2346 gcall *call;
2347 gimple *stmt = gsi_stmt (*gsi);
2349 if (gimple_code (stmt) != GIMPLE_GOTO)
2351 *handled_ops_p = false;
2352 return NULL_TREE;
2355 label = gimple_goto_dest (stmt);
2356 if (TREE_CODE (label) != LABEL_DECL)
2358 *handled_ops_p = false;
2359 return NULL_TREE;
2362 target_context = decl_function_context (label);
2363 if (target_context == info->context)
2365 *handled_ops_p = false;
2366 return NULL_TREE;
2369 for (i = info->outer; target_context != i->context; i = i->outer)
2370 continue;
2372 /* The original user label may also be use for a normal goto, therefore
2373 we must create a new label that will actually receive the abnormal
2374 control transfer. This new label will be marked LABEL_NONLOCAL; this
2375 mark will trigger proper behavior in the cfg, as well as cause the
2376 (hairy target-specific) non-local goto receiver code to be generated
2377 when we expand rtl. Enter this association into var_map so that we
2378 can insert the new label into the IL during a second pass. */
2379 tree *slot = &i->var_map->get_or_insert (label);
2380 if (*slot == NULL)
2382 new_label = create_artificial_label (UNKNOWN_LOCATION);
2383 DECL_NONLOCAL (new_label) = 1;
2384 *slot = new_label;
2386 else
2387 new_label = *slot;
2389 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2390 field = get_nl_goto_field (i);
2391 x = get_frame_field (info, target_context, field, gsi);
2392 x = build_addr (x);
2393 x = gsi_gimplify_val (info, x, gsi);
2394 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2395 2, build_addr (new_label), x);
2396 gsi_replace (gsi, call, false);
2398 /* We have handled all of STMT's operands, no need to keep going. */
2399 *handled_ops_p = true;
2400 return NULL_TREE;
2404 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2405 are referenced via nonlocal goto from a nested function. The rewrite
2406 will involve installing a newly generated DECL_NONLOCAL label, and
2407 (potentially) a branch around the rtl gunk that is assumed to be
2408 attached to such a label. */
2410 static tree
2411 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2412 struct walk_stmt_info *wi)
2414 struct nesting_info *const info = (struct nesting_info *) wi->info;
2415 tree label, new_label;
2416 gimple_stmt_iterator tmp_gsi;
2417 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2419 if (!stmt)
2421 *handled_ops_p = false;
2422 return NULL_TREE;
2425 label = gimple_label_label (stmt);
2427 tree *slot = info->var_map->get (label);
2428 if (!slot)
2430 *handled_ops_p = false;
2431 return NULL_TREE;
2434 /* If there's any possibility that the previous statement falls through,
2435 then we must branch around the new non-local label. */
2436 tmp_gsi = wi->gsi;
2437 gsi_prev (&tmp_gsi);
2438 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2440 gimple *stmt = gimple_build_goto (label);
2441 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2444 new_label = (tree) *slot;
2445 stmt = gimple_build_label (new_label);
2446 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2448 *handled_ops_p = true;
2449 return NULL_TREE;
2453 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2454 of nested functions that require the use of trampolines. The rewrite
2455 will involve a reference a trampoline generated for the occasion. */
2457 static tree
2458 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2460 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2461 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2462 tree t = *tp, decl, target_context, x, builtin;
2463 bool descr;
2464 gcall *call;
2466 *walk_subtrees = 0;
2467 switch (TREE_CODE (t))
2469 case ADDR_EXPR:
2470 /* Build
2471 T.1 = &CHAIN->tramp;
2472 T.2 = __builtin_adjust_trampoline (T.1);
2473 T.3 = (func_type)T.2;
2476 decl = TREE_OPERAND (t, 0);
2477 if (TREE_CODE (decl) != FUNCTION_DECL)
2478 break;
2480 /* Only need to process nested functions. */
2481 target_context = decl_function_context (decl);
2482 if (!target_context)
2483 break;
2485 /* If the nested function doesn't use a static chain, then
2486 it doesn't need a trampoline. */
2487 if (!DECL_STATIC_CHAIN (decl))
2488 break;
2490 /* If we don't want a trampoline, then don't build one. */
2491 if (TREE_NO_TRAMPOLINE (t))
2492 break;
2494 /* Lookup the immediate parent of the callee, as that's where
2495 we need to insert the trampoline. */
2496 for (i = info; i->context != target_context; i = i->outer)
2497 continue;
2499 /* Decide whether to generate a descriptor or a trampoline. */
2500 descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines;
2502 if (descr)
2503 x = lookup_descr_for_decl (i, decl, INSERT);
2504 else
2505 x = lookup_tramp_for_decl (i, decl, INSERT);
2507 /* Compute the address of the field holding the trampoline. */
2508 x = get_frame_field (info, target_context, x, &wi->gsi);
2509 x = build_addr (x);
2510 x = gsi_gimplify_val (info, x, &wi->gsi);
2512 /* Do machine-specific ugliness. Normally this will involve
2513 computing extra alignment, but it can really be anything. */
2514 if (descr)
2515 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR);
2516 else
2517 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2518 call = gimple_build_call (builtin, 1, x);
2519 x = init_tmp_var_with_call (info, &wi->gsi, call);
2521 /* Cast back to the proper function type. */
2522 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2523 x = init_tmp_var (info, x, &wi->gsi);
2525 *tp = x;
2526 break;
2528 default:
2529 if (!IS_TYPE_OR_DECL_P (t))
2530 *walk_subtrees = 1;
2531 break;
2534 return NULL_TREE;
2538 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2539 to addresses of nested functions that require the use of
2540 trampolines. The rewrite will involve a reference a trampoline
2541 generated for the occasion. */
2543 static tree
2544 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2545 struct walk_stmt_info *wi)
2547 struct nesting_info *info = (struct nesting_info *) wi->info;
2548 gimple *stmt = gsi_stmt (*gsi);
2550 switch (gimple_code (stmt))
2552 case GIMPLE_CALL:
2554 /* Only walk call arguments, lest we generate trampolines for
2555 direct calls. */
2556 unsigned long i, nargs = gimple_call_num_args (stmt);
2557 for (i = 0; i < nargs; i++)
2558 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2559 wi, NULL);
2560 break;
2563 case GIMPLE_OMP_TARGET:
2564 if (!is_gimple_omp_offloaded (stmt))
2566 *handled_ops_p = false;
2567 return NULL_TREE;
2569 /* FALLTHRU */
2570 case GIMPLE_OMP_PARALLEL:
2571 case GIMPLE_OMP_TASK:
2573 tree save_local_var_chain = info->new_local_var_chain;
2574 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2575 info->new_local_var_chain = NULL;
2576 char save_static_chain_added = info->static_chain_added;
2577 info->static_chain_added = 0;
2578 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2579 info, gimple_omp_body_ptr (stmt));
2580 if (info->new_local_var_chain)
2581 declare_vars (info->new_local_var_chain,
2582 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2583 false);
2584 for (int i = 0; i < 2; i++)
2586 tree c, decl;
2587 if ((info->static_chain_added & (1 << i)) == 0)
2588 continue;
2589 decl = i ? get_chain_decl (info) : info->frame_decl;
2590 /* Don't add CHAIN.* or FRAME.* twice. */
2591 for (c = gimple_omp_taskreg_clauses (stmt);
2593 c = OMP_CLAUSE_CHAIN (c))
2594 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2595 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2596 && OMP_CLAUSE_DECL (c) == decl)
2597 break;
2598 if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2600 c = build_omp_clause (gimple_location (stmt),
2601 i ? OMP_CLAUSE_FIRSTPRIVATE
2602 : OMP_CLAUSE_SHARED);
2603 OMP_CLAUSE_DECL (c) = decl;
2604 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2605 gimple_omp_taskreg_set_clauses (stmt, c);
2607 else if (c == NULL)
2609 c = build_omp_clause (gimple_location (stmt),
2610 OMP_CLAUSE_MAP);
2611 OMP_CLAUSE_DECL (c) = decl;
2612 OMP_CLAUSE_SET_MAP_KIND (c,
2613 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2614 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2615 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2616 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2620 info->new_local_var_chain = save_local_var_chain;
2621 info->static_chain_added |= save_static_chain_added;
2623 break;
2625 default:
2626 *handled_ops_p = false;
2627 return NULL_TREE;
2630 *handled_ops_p = true;
2631 return NULL_TREE;
2636 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2637 that reference nested functions to make sure that the static chain
2638 is set up properly for the call. */
2640 static tree
2641 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2642 struct walk_stmt_info *wi)
2644 struct nesting_info *const info = (struct nesting_info *) wi->info;
2645 tree decl, target_context;
2646 char save_static_chain_added;
2647 int i;
2648 gimple *stmt = gsi_stmt (*gsi);
2650 switch (gimple_code (stmt))
2652 case GIMPLE_CALL:
2653 if (gimple_call_chain (stmt))
2654 break;
2655 decl = gimple_call_fndecl (stmt);
2656 if (!decl)
2657 break;
2658 target_context = decl_function_context (decl);
2659 if (target_context && DECL_STATIC_CHAIN (decl))
2661 struct nesting_info *i = info;
2662 while (i && i->context != target_context)
2663 i = i->outer;
2664 /* If none of the outer contexts is the target context, this means
2665 that the function is called in a wrong context. */
2666 if (!i)
2667 internal_error ("%s from %s called in %s",
2668 IDENTIFIER_POINTER (DECL_NAME (decl)),
2669 IDENTIFIER_POINTER (DECL_NAME (target_context)),
2670 IDENTIFIER_POINTER (DECL_NAME (info->context)));
2672 gimple_call_set_chain (as_a <gcall *> (stmt),
2673 get_static_chain (info, target_context,
2674 &wi->gsi));
2675 info->static_chain_added |= (1 << (info->context != target_context));
2677 break;
2679 case GIMPLE_OMP_PARALLEL:
2680 case GIMPLE_OMP_TASK:
2681 save_static_chain_added = info->static_chain_added;
2682 info->static_chain_added = 0;
2683 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2684 for (i = 0; i < 2; i++)
2686 tree c, decl;
2687 if ((info->static_chain_added & (1 << i)) == 0)
2688 continue;
2689 decl = i ? get_chain_decl (info) : info->frame_decl;
2690 /* Don't add CHAIN.* or FRAME.* twice. */
2691 for (c = gimple_omp_taskreg_clauses (stmt);
2693 c = OMP_CLAUSE_CHAIN (c))
2694 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2695 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2696 && OMP_CLAUSE_DECL (c) == decl)
2697 break;
2698 if (c == NULL)
2700 c = build_omp_clause (gimple_location (stmt),
2701 i ? OMP_CLAUSE_FIRSTPRIVATE
2702 : OMP_CLAUSE_SHARED);
2703 OMP_CLAUSE_DECL (c) = decl;
2704 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2705 gimple_omp_taskreg_set_clauses (stmt, c);
2708 info->static_chain_added |= save_static_chain_added;
2709 break;
2711 case GIMPLE_OMP_TARGET:
2712 if (!is_gimple_omp_offloaded (stmt))
2714 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2715 break;
2717 save_static_chain_added = info->static_chain_added;
2718 info->static_chain_added = 0;
2719 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2720 for (i = 0; i < 2; i++)
2722 tree c, decl;
2723 if ((info->static_chain_added & (1 << i)) == 0)
2724 continue;
2725 decl = i ? get_chain_decl (info) : info->frame_decl;
2726 /* Don't add CHAIN.* or FRAME.* twice. */
2727 for (c = gimple_omp_target_clauses (stmt);
2729 c = OMP_CLAUSE_CHAIN (c))
2730 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2731 && OMP_CLAUSE_DECL (c) == decl)
2732 break;
2733 if (c == NULL)
2735 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2736 OMP_CLAUSE_DECL (c) = decl;
2737 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2738 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2739 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2740 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2744 info->static_chain_added |= save_static_chain_added;
2745 break;
2747 case GIMPLE_OMP_FOR:
2748 walk_body (convert_gimple_call, NULL, info,
2749 gimple_omp_for_pre_body_ptr (stmt));
2750 /* FALLTHRU */
2751 case GIMPLE_OMP_SECTIONS:
2752 case GIMPLE_OMP_SECTION:
2753 case GIMPLE_OMP_SINGLE:
2754 case GIMPLE_OMP_TEAMS:
2755 case GIMPLE_OMP_MASTER:
2756 case GIMPLE_OMP_TASKGROUP:
2757 case GIMPLE_OMP_ORDERED:
2758 case GIMPLE_OMP_CRITICAL:
2759 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2760 break;
2762 default:
2763 /* Keep looking for other operands. */
2764 *handled_ops_p = false;
2765 return NULL_TREE;
2768 *handled_ops_p = true;
2769 return NULL_TREE;
2772 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2773 call expressions. At the same time, determine if a nested function
2774 actually uses its static chain; if not, remember that. */
2776 static void
2777 convert_all_function_calls (struct nesting_info *root)
2779 unsigned int chain_count = 0, old_chain_count, iter_count;
2780 struct nesting_info *n;
2782 /* First, optimistically clear static_chain for all decls that haven't
2783 used the static chain already for variable access. But always create
2784 it if not optimizing. This makes it possible to reconstruct the static
2785 nesting tree at run time and thus to resolve up-level references from
2786 within the debugger. */
2787 FOR_EACH_NEST_INFO (n, root)
2789 tree decl = n->context;
2790 if (!optimize)
2792 if (n->inner)
2793 (void) get_frame_type (n);
2794 if (n->outer)
2795 (void) get_chain_decl (n);
2797 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2799 DECL_STATIC_CHAIN (decl) = 0;
2800 if (dump_file && (dump_flags & TDF_DETAILS))
2801 fprintf (dump_file, "Guessing no static-chain for %s\n",
2802 lang_hooks.decl_printable_name (decl, 2));
2804 else
2805 DECL_STATIC_CHAIN (decl) = 1;
2806 chain_count += DECL_STATIC_CHAIN (decl);
2809 /* Walk the functions and perform transformations. Note that these
2810 transformations can induce new uses of the static chain, which in turn
2811 require re-examining all users of the decl. */
2812 /* ??? It would make sense to try to use the call graph to speed this up,
2813 but the call graph hasn't really been built yet. Even if it did, we
2814 would still need to iterate in this loop since address-of references
2815 wouldn't show up in the callgraph anyway. */
2816 iter_count = 0;
2819 old_chain_count = chain_count;
2820 chain_count = 0;
2821 iter_count++;
2823 if (dump_file && (dump_flags & TDF_DETAILS))
2824 fputc ('\n', dump_file);
2826 FOR_EACH_NEST_INFO (n, root)
2828 tree decl = n->context;
2829 walk_function (convert_tramp_reference_stmt,
2830 convert_tramp_reference_op, n);
2831 walk_function (convert_gimple_call, NULL, n);
2832 chain_count += DECL_STATIC_CHAIN (decl);
2835 while (chain_count != old_chain_count);
2837 if (dump_file && (dump_flags & TDF_DETAILS))
2838 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2839 iter_count);
2842 struct nesting_copy_body_data
2844 copy_body_data cb;
2845 struct nesting_info *root;
2848 /* A helper subroutine for debug_var_chain type remapping. */
2850 static tree
2851 nesting_copy_decl (tree decl, copy_body_data *id)
2853 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2854 tree *slot = nid->root->var_map->get (decl);
2856 if (slot)
2857 return (tree) *slot;
2859 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2861 tree new_decl = copy_decl_no_change (decl, id);
2862 DECL_ORIGINAL_TYPE (new_decl)
2863 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2864 return new_decl;
2867 if (VAR_P (decl)
2868 || TREE_CODE (decl) == PARM_DECL
2869 || TREE_CODE (decl) == RESULT_DECL)
2870 return decl;
2872 return copy_decl_no_change (decl, id);
2875 /* A helper function for remap_vla_decls. See if *TP contains
2876 some remapped variables. */
2878 static tree
2879 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2881 struct nesting_info *root = (struct nesting_info *) data;
2882 tree t = *tp;
2884 if (DECL_P (t))
2886 *walk_subtrees = 0;
2887 tree *slot = root->var_map->get (t);
2889 if (slot)
2890 return *slot;
2892 return NULL;
2895 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2896 involved. */
2898 static void
2899 remap_vla_decls (tree block, struct nesting_info *root)
2901 tree var, subblock, val, type;
2902 struct nesting_copy_body_data id;
2904 for (subblock = BLOCK_SUBBLOCKS (block);
2905 subblock;
2906 subblock = BLOCK_CHAIN (subblock))
2907 remap_vla_decls (subblock, root);
2909 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2910 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
2912 val = DECL_VALUE_EXPR (var);
2913 type = TREE_TYPE (var);
2915 if (!(TREE_CODE (val) == INDIRECT_REF
2916 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2917 && variably_modified_type_p (type, NULL)))
2918 continue;
2920 if (root->var_map->get (TREE_OPERAND (val, 0))
2921 || walk_tree (&type, contains_remapped_vars, root, NULL))
2922 break;
2925 if (var == NULL_TREE)
2926 return;
2928 memset (&id, 0, sizeof (id));
2929 id.cb.copy_decl = nesting_copy_decl;
2930 id.cb.decl_map = new hash_map<tree, tree>;
2931 id.root = root;
2933 for (; var; var = DECL_CHAIN (var))
2934 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
2936 struct nesting_info *i;
2937 tree newt, context;
2939 val = DECL_VALUE_EXPR (var);
2940 type = TREE_TYPE (var);
2942 if (!(TREE_CODE (val) == INDIRECT_REF
2943 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2944 && variably_modified_type_p (type, NULL)))
2945 continue;
2947 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2948 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2949 continue;
2951 context = decl_function_context (var);
2952 for (i = root; i; i = i->outer)
2953 if (i->context == context)
2954 break;
2956 if (i == NULL)
2957 continue;
2959 /* Fully expand value expressions. This avoids having debug variables
2960 only referenced from them and that can be swept during GC. */
2961 if (slot)
2963 tree t = (tree) *slot;
2964 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2965 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2968 id.cb.src_fn = i->context;
2969 id.cb.dst_fn = i->context;
2970 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2972 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2973 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2975 newt = TREE_TYPE (newt);
2976 type = TREE_TYPE (type);
2978 if (TYPE_NAME (newt)
2979 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2980 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2981 && newt != type
2982 && TYPE_NAME (newt) == TYPE_NAME (type))
2983 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2985 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2986 if (val != DECL_VALUE_EXPR (var))
2987 SET_DECL_VALUE_EXPR (var, val);
2990 delete id.cb.decl_map;
2993 /* Fixup VLA decls in BLOCK and subblocks if remapped variables are
2994 involved. */
2996 static void
2997 fixup_vla_decls (tree block)
2999 for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3000 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3002 tree val = DECL_VALUE_EXPR (var);
3004 if (!(TREE_CODE (val) == INDIRECT_REF
3005 && VAR_P (TREE_OPERAND (val, 0))
3006 && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val, 0))))
3007 continue;
3009 /* Fully expand value expressions. This avoids having debug variables
3010 only referenced from them and that can be swept during GC. */
3011 val = build1 (INDIRECT_REF, TREE_TYPE (val),
3012 DECL_VALUE_EXPR (TREE_OPERAND (val, 0)));
3013 SET_DECL_VALUE_EXPR (var, val);
3016 for (tree sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3017 fixup_vla_decls (sub);
3020 /* Fold the MEM_REF *E. */
3021 bool
3022 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
3024 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
3025 *ref_p = fold (*ref_p);
3026 return true;
3029 /* Given DECL, a nested function, build an initialization call for FIELD,
3030 the trampoline or descriptor for DECL, using FUNC as the function. */
3032 static gcall *
3033 build_init_call_stmt (struct nesting_info *info, tree decl, tree field,
3034 tree func)
3036 tree arg1, arg2, arg3, x;
3038 gcc_assert (DECL_STATIC_CHAIN (decl));
3039 arg3 = build_addr (info->frame_decl);
3041 arg2 = build_addr (decl);
3043 x = build3 (COMPONENT_REF, TREE_TYPE (field),
3044 info->frame_decl, field, NULL_TREE);
3045 arg1 = build_addr (x);
3047 return gimple_build_call (func, 3, arg1, arg2, arg3);
3050 /* Do "everything else" to clean up or complete state collected by the various
3051 walking passes -- create a field to hold the frame base address, lay out the
3052 types and decls, generate code to initialize the frame decl, store critical
3053 expressions in the struct function for rtl to find. */
3055 static void
3056 finalize_nesting_tree_1 (struct nesting_info *root)
3058 gimple_seq stmt_list;
3059 gimple *stmt;
3060 tree context = root->context;
3061 struct function *sf;
3063 stmt_list = NULL;
3065 /* If we created a non-local frame type or decl, we need to lay them
3066 out at this time. */
3067 if (root->frame_type)
3069 /* Debugging information needs to compute the frame base address of the
3070 parent frame out of the static chain from the nested frame.
3072 The static chain is the address of the FRAME record, so one could
3073 imagine it would be possible to compute the frame base address just
3074 adding a constant offset to this address. Unfortunately, this is not
3075 possible: if the FRAME object has alignment constraints that are
3076 stronger than the stack, then the offset between the frame base and
3077 the FRAME object will be dynamic.
3079 What we do instead is to append a field to the FRAME object that holds
3080 the frame base address: then debug info just has to fetch this
3081 field. */
3083 /* Debugging information will refer to the CFA as the frame base
3084 address: we will do the same here. */
3085 const tree frame_addr_fndecl
3086 = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
3088 /* Create a field in the FRAME record to hold the frame base address for
3089 this stack frame. Since it will be used only by the debugger, put it
3090 at the end of the record in order not to shift all other offsets. */
3091 tree fb_decl = make_node (FIELD_DECL);
3093 DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
3094 TREE_TYPE (fb_decl) = ptr_type_node;
3095 TREE_ADDRESSABLE (fb_decl) = 1;
3096 DECL_CONTEXT (fb_decl) = root->frame_type;
3097 TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
3098 fb_decl);
3100 /* In some cases the frame type will trigger the -Wpadded warning.
3101 This is not helpful; suppress it. */
3102 int save_warn_padded = warn_padded;
3103 warn_padded = 0;
3104 layout_type (root->frame_type);
3105 warn_padded = save_warn_padded;
3106 layout_decl (root->frame_decl, 0);
3108 /* Initialize the frame base address field. If the builtin we need is
3109 not available, set it to NULL so that debugging information does not
3110 reference junk. */
3111 tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
3112 root->frame_decl, fb_decl, NULL_TREE);
3113 tree fb_tmp;
3115 if (frame_addr_fndecl != NULL_TREE)
3117 gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
3118 integer_zero_node);
3119 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3121 fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
3123 else
3124 fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
3125 gimple_seq_add_stmt (&stmt_list,
3126 gimple_build_assign (fb_ref, fb_tmp));
3128 declare_vars (root->frame_decl,
3129 gimple_seq_first_stmt (gimple_body (context)), true);
3132 /* If any parameters were referenced non-locally, then we need to insert
3133 a copy or a pointer. */
3134 if (root->any_parm_remapped)
3136 tree p;
3137 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
3139 tree field, x, y;
3141 field = lookup_field_for_decl (root, p, NO_INSERT);
3142 if (!field)
3143 continue;
3145 if (use_pointer_in_frame (p))
3146 x = build_addr (p);
3147 else
3148 x = p;
3150 /* If the assignment is from a non-register the stmt is
3151 not valid gimple. Make it so by using a temporary instead. */
3152 if (!is_gimple_reg (x)
3153 && is_gimple_reg_type (TREE_TYPE (x)))
3155 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3156 x = init_tmp_var (root, x, &gsi);
3159 y = build3 (COMPONENT_REF, TREE_TYPE (field),
3160 root->frame_decl, field, NULL_TREE);
3161 stmt = gimple_build_assign (y, x);
3162 gimple_seq_add_stmt (&stmt_list, stmt);
3166 /* If a chain_field was created, then it needs to be initialized
3167 from chain_decl. */
3168 if (root->chain_field)
3170 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
3171 root->frame_decl, root->chain_field, NULL_TREE);
3172 stmt = gimple_build_assign (x, get_chain_decl (root));
3173 gimple_seq_add_stmt (&stmt_list, stmt);
3176 /* If trampolines were created, then we need to initialize them. */
3177 if (root->any_tramp_created)
3179 struct nesting_info *i;
3180 for (i = root->inner; i ; i = i->next)
3182 tree field, x;
3184 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
3185 if (!field)
3186 continue;
3188 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
3189 stmt = build_init_call_stmt (root, i->context, field, x);
3190 gimple_seq_add_stmt (&stmt_list, stmt);
3194 /* If descriptors were created, then we need to initialize them. */
3195 if (root->any_descr_created)
3197 struct nesting_info *i;
3198 for (i = root->inner; i ; i = i->next)
3200 tree field, x;
3202 field = lookup_descr_for_decl (root, i->context, NO_INSERT);
3203 if (!field)
3204 continue;
3206 x = builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR);
3207 stmt = build_init_call_stmt (root, i->context, field, x);
3208 gimple_seq_add_stmt (&stmt_list, stmt);
3212 /* If we created initialization statements, insert them. */
3213 if (stmt_list)
3215 gbind *bind;
3216 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3217 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3218 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3219 gimple_bind_set_body (bind, stmt_list);
3222 /* If a chain_decl was created, then it needs to be registered with
3223 struct function so that it gets initialized from the static chain
3224 register at the beginning of the function. */
3225 sf = DECL_STRUCT_FUNCTION (root->context);
3226 sf->static_chain_decl = root->chain_decl;
3228 /* Similarly for the non-local goto save area. */
3229 if (root->nl_goto_field)
3231 sf->nonlocal_goto_save_area
3232 = get_frame_field (root, context, root->nl_goto_field, NULL);
3233 sf->has_nonlocal_label = 1;
3236 /* Make sure all new local variables get inserted into the
3237 proper BIND_EXPR. */
3238 if (root->new_local_var_chain)
3239 declare_vars (root->new_local_var_chain,
3240 gimple_seq_first_stmt (gimple_body (root->context)),
3241 false);
3243 if (root->debug_var_chain)
3245 tree debug_var;
3246 gbind *scope;
3248 remap_vla_decls (DECL_INITIAL (root->context), root);
3250 for (debug_var = root->debug_var_chain; debug_var;
3251 debug_var = DECL_CHAIN (debug_var))
3252 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3253 break;
3255 /* If there are any debug decls with variable length types,
3256 remap those types using other debug_var_chain variables. */
3257 if (debug_var)
3259 struct nesting_copy_body_data id;
3261 memset (&id, 0, sizeof (id));
3262 id.cb.copy_decl = nesting_copy_decl;
3263 id.cb.decl_map = new hash_map<tree, tree>;
3264 id.root = root;
3266 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3267 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3269 tree type = TREE_TYPE (debug_var);
3270 tree newt, t = type;
3271 struct nesting_info *i;
3273 for (i = root; i; i = i->outer)
3274 if (variably_modified_type_p (type, i->context))
3275 break;
3277 if (i == NULL)
3278 continue;
3280 id.cb.src_fn = i->context;
3281 id.cb.dst_fn = i->context;
3282 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3284 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3285 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3287 newt = TREE_TYPE (newt);
3288 t = TREE_TYPE (t);
3290 if (TYPE_NAME (newt)
3291 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3292 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3293 && newt != t
3294 && TYPE_NAME (newt) == TYPE_NAME (t))
3295 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3298 delete id.cb.decl_map;
3301 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3302 if (gimple_bind_block (scope))
3303 declare_vars (root->debug_var_chain, scope, true);
3304 else
3305 BLOCK_VARS (DECL_INITIAL (root->context))
3306 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3307 root->debug_var_chain);
3309 else
3310 fixup_vla_decls (DECL_INITIAL (root->context));
3312 /* Fold the rewritten MEM_REF trees. */
3313 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3315 /* Dump the translated tree function. */
3316 if (dump_file)
3318 fputs ("\n\n", dump_file);
3319 dump_function_to_file (root->context, dump_file, dump_flags);
3323 static void
3324 finalize_nesting_tree (struct nesting_info *root)
3326 struct nesting_info *n;
3327 FOR_EACH_NEST_INFO (n, root)
3328 finalize_nesting_tree_1 (n);
3331 /* Unnest the nodes and pass them to cgraph. */
3333 static void
3334 unnest_nesting_tree_1 (struct nesting_info *root)
3336 struct cgraph_node *node = cgraph_node::get (root->context);
3338 /* For nested functions update the cgraph to reflect unnesting.
3339 We also delay finalizing of these functions up to this point. */
3340 if (node->origin)
3342 node->unnest ();
3343 cgraph_node::finalize_function (root->context, true);
3347 static void
3348 unnest_nesting_tree (struct nesting_info *root)
3350 struct nesting_info *n;
3351 FOR_EACH_NEST_INFO (n, root)
3352 unnest_nesting_tree_1 (n);
3355 /* Free the data structures allocated during this pass. */
3357 static void
3358 free_nesting_tree (struct nesting_info *root)
3360 struct nesting_info *node, *next;
3362 node = iter_nestinfo_start (root);
3365 next = iter_nestinfo_next (node);
3366 delete node->var_map;
3367 delete node->field_map;
3368 delete node->mem_refs;
3369 free (node);
3370 node = next;
3372 while (node);
3375 /* Gimplify a function and all its nested functions. */
3376 static void
3377 gimplify_all_functions (struct cgraph_node *root)
3379 struct cgraph_node *iter;
3380 if (!gimple_body (root->decl))
3381 gimplify_function_tree (root->decl);
3382 for (iter = root->nested; iter; iter = iter->next_nested)
3383 gimplify_all_functions (iter);
3386 /* Main entry point for this pass. Process FNDECL and all of its nested
3387 subroutines and turn them into something less tightly bound. */
3389 void
3390 lower_nested_functions (tree fndecl)
3392 struct cgraph_node *cgn;
3393 struct nesting_info *root;
3395 /* If there are no nested functions, there's nothing to do. */
3396 cgn = cgraph_node::get (fndecl);
3397 if (!cgn->nested)
3398 return;
3400 gimplify_all_functions (cgn);
3402 set_dump_file (dump_begin (TDI_nested, &dump_flags));
3403 if (dump_file)
3404 fprintf (dump_file, "\n;; Function %s\n\n",
3405 lang_hooks.decl_printable_name (fndecl, 2));
3407 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3408 root = create_nesting_tree (cgn);
3410 walk_all_functions (convert_nonlocal_reference_stmt,
3411 convert_nonlocal_reference_op,
3412 root);
3413 walk_all_functions (convert_local_reference_stmt,
3414 convert_local_reference_op,
3415 root);
3416 walk_all_functions (convert_nl_goto_reference, NULL, root);
3417 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3419 convert_all_function_calls (root);
3420 finalize_nesting_tree (root);
3421 unnest_nesting_tree (root);
3423 free_nesting_tree (root);
3424 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3426 if (dump_file)
3428 dump_end (TDI_nested, dump_file);
3429 set_dump_file (NULL);
3433 #include "gt-tree-nested.h"