[Ada] Do not perform useless work in Check_No_Parts_Violations
[official-gcc.git] / gcc / tree-nested.c
blob41cbca9e3b853347d7e0571efbca6e9859393fd9
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "cgraph.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "dumpfile.h"
35 #include "tree-inline.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "tree-cfg.h"
40 #include "explow.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 #include "diagnostic.h"
45 #include "alloc-pool.h"
46 #include "tree-nested.h"
47 #include "symbol-summary.h"
48 #include "symtab-thunks.h"
50 /* Summary of nested functions. */
51 static function_summary <nested_function_info *>
52 *nested_function_sum = NULL;
54 /* Return nested_function_info, if available. */
55 nested_function_info *
56 nested_function_info::get (cgraph_node *node)
58 if (!nested_function_sum)
59 return NULL;
60 return nested_function_sum->get (node);
63 /* Return nested_function_info possibly creating new one. */
64 nested_function_info *
65 nested_function_info::get_create (cgraph_node *node)
67 if (!nested_function_sum)
69 nested_function_sum = new function_summary <nested_function_info *>
70 (symtab);
71 nested_function_sum->disable_insertion_hook ();
73 return nested_function_sum->get_create (node);
76 /* cgraph_node is no longer nested function; update cgraph accordingly. */
77 void
78 unnest_function (cgraph_node *node)
80 nested_function_info *info = nested_function_info::get (node);
81 cgraph_node **node2 = &nested_function_info::get
82 (nested_function_origin (node))->nested;
84 gcc_checking_assert (info->origin);
85 while (*node2 != node)
86 node2 = &nested_function_info::get (*node2)->next_nested;
87 *node2 = info->next_nested;
88 info->next_nested = NULL;
89 info->origin = NULL;
90 nested_function_sum->remove (node);
93 /* Destructor: unlink function from nested function lists. */
94 nested_function_info::~nested_function_info ()
96 cgraph_node *next;
97 for (cgraph_node *n = nested; n; n = next)
99 nested_function_info *info = nested_function_info::get (n);
100 next = info->next_nested;
101 info->origin = NULL;
102 info->next_nested = NULL;
104 nested = NULL;
105 if (origin)
107 cgraph_node **node2
108 = &nested_function_info::get (origin)->nested;
110 nested_function_info *info;
111 while ((info = nested_function_info::get (*node2)) != this && info)
112 node2 = &info->next_nested;
113 *node2 = next_nested;
117 /* Free nested function info summaries. */
118 void
119 nested_function_info::release ()
121 if (nested_function_sum)
122 delete (nested_function_sum);
123 nested_function_sum = NULL;
126 /* If NODE is nested function, record it. */
127 void
128 maybe_record_nested_function (cgraph_node *node)
130 /* All nested functions gets lowered during the construction of symtab. */
131 if (symtab->state > CONSTRUCTION)
132 return;
133 if (DECL_CONTEXT (node->decl)
134 && TREE_CODE (DECL_CONTEXT (node->decl)) == FUNCTION_DECL)
136 cgraph_node *origin = cgraph_node::get_create (DECL_CONTEXT (node->decl));
137 nested_function_info *info = nested_function_info::get_create (node);
138 nested_function_info *origin_info
139 = nested_function_info::get_create (origin);
141 info->origin = origin;
142 info->next_nested = origin_info->nested;
143 origin_info->nested = node;
147 /* The object of this pass is to lower the representation of a set of nested
148 functions in order to expose all of the gory details of the various
149 nonlocal references. We want to do this sooner rather than later, in
150 order to give us more freedom in emitting all of the functions in question.
152 Back in olden times, when gcc was young, we developed an insanely
153 complicated scheme whereby variables which were referenced nonlocally
154 were forced to live in the stack of the declaring function, and then
155 the nested functions magically discovered where these variables were
156 placed. In order for this scheme to function properly, it required
157 that the outer function be partially expanded, then we switch to
158 compiling the inner function, and once done with those we switch back
159 to compiling the outer function. Such delicate ordering requirements
160 makes it difficult to do whole translation unit optimizations
161 involving such functions.
163 The implementation here is much more direct. Everything that can be
164 referenced by an inner function is a member of an explicitly created
165 structure herein called the "nonlocal frame struct". The incoming
166 static chain for a nested function is a pointer to this struct in
167 the parent. In this way, we settle on known offsets from a known
168 base, and so are decoupled from the logic that places objects in the
169 function's stack frame. More importantly, we don't have to wait for
170 that to happen -- since the compilation of the inner function is no
171 longer tied to a real stack frame, the nonlocal frame struct can be
172 allocated anywhere. Which means that the outer function is now
173 inlinable.
175 Theory of operation here is very simple. Iterate over all the
176 statements in all the functions (depth first) several times,
177 allocating structures and fields on demand. In general we want to
178 examine inner functions first, so that we can avoid making changes
179 to outer functions which are unnecessary.
181 The order of the passes matters a bit, in that later passes will be
182 skipped if it is discovered that the functions don't actually interact
183 at all. That is, they're nested in the lexical sense but could have
184 been written as independent functions without change. */
187 struct nesting_info
189 struct nesting_info *outer;
190 struct nesting_info *inner;
191 struct nesting_info *next;
193 hash_map<tree, tree> *field_map;
194 hash_map<tree, tree> *var_map;
195 hash_set<tree *> *mem_refs;
196 bitmap suppress_expansion;
198 tree context;
199 tree new_local_var_chain;
200 tree debug_var_chain;
201 tree frame_type;
202 tree frame_decl;
203 tree chain_field;
204 tree chain_decl;
205 tree nl_goto_field;
207 bool thunk_p;
208 bool any_parm_remapped;
209 bool any_tramp_created;
210 bool any_descr_created;
211 char static_chain_added;
215 /* Iterate over the nesting tree, starting with ROOT, depth first. */
217 static inline struct nesting_info *
218 iter_nestinfo_start (struct nesting_info *root)
220 while (root->inner)
221 root = root->inner;
222 return root;
225 static inline struct nesting_info *
226 iter_nestinfo_next (struct nesting_info *node)
228 if (node->next)
229 return iter_nestinfo_start (node->next);
230 return node->outer;
233 #define FOR_EACH_NEST_INFO(I, ROOT) \
234 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
236 /* Obstack used for the bitmaps in the struct above. */
237 static struct bitmap_obstack nesting_info_bitmap_obstack;
240 /* We're working in so many different function contexts simultaneously,
241 that create_tmp_var is dangerous. Prevent mishap. */
242 #define create_tmp_var cant_use_create_tmp_var_here_dummy
244 /* Like create_tmp_var, except record the variable for registration at
245 the given nesting level. */
247 static tree
248 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
250 tree tmp_var;
252 /* If the type is of variable size or a type which must be created by the
253 frontend, something is wrong. Note that we explicitly allow
254 incomplete types here, since we create them ourselves here. */
255 gcc_assert (!TREE_ADDRESSABLE (type));
256 gcc_assert (!TYPE_SIZE_UNIT (type)
257 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
259 tmp_var = create_tmp_var_raw (type, prefix);
260 DECL_CONTEXT (tmp_var) = info->context;
261 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
262 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
264 info->new_local_var_chain = tmp_var;
266 return tmp_var;
269 /* Like build_simple_mem_ref, but set TREE_THIS_NOTRAP on the result. */
271 static tree
272 build_simple_mem_ref_notrap (tree ptr)
274 tree t = build_simple_mem_ref (ptr);
275 TREE_THIS_NOTRAP (t) = 1;
276 return t;
279 /* Take the address of EXP to be used within function CONTEXT.
280 Mark it for addressability as necessary. */
282 tree
283 build_addr (tree exp)
285 mark_addressable (exp);
286 return build_fold_addr_expr (exp);
289 /* Insert FIELD into TYPE, sorted by alignment requirements. */
291 void
292 insert_field_into_struct (tree type, tree field)
294 tree *p;
296 DECL_CONTEXT (field) = type;
298 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
299 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
300 break;
302 DECL_CHAIN (field) = *p;
303 *p = field;
305 /* Set correct alignment for frame struct type. */
306 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
307 SET_TYPE_ALIGN (type, DECL_ALIGN (field));
310 /* Build or return the RECORD_TYPE that describes the frame state that is
311 shared between INFO->CONTEXT and its nested functions. This record will
312 not be complete until finalize_nesting_tree; up until that point we'll
313 be adding fields as necessary.
315 We also build the DECL that represents this frame in the function. */
317 static tree
318 get_frame_type (struct nesting_info *info)
320 tree type = info->frame_type;
321 if (!type)
323 char *name;
325 type = make_node (RECORD_TYPE);
327 name = concat ("FRAME.",
328 IDENTIFIER_POINTER (DECL_NAME (info->context)),
329 NULL);
330 TYPE_NAME (type) = get_identifier (name);
331 free (name);
333 info->frame_type = type;
335 /* Do not put info->frame_decl on info->new_local_var_chain,
336 so that we can declare it in the lexical blocks, which
337 makes sure virtual regs that end up appearing in its RTL
338 expression get substituted in instantiate_virtual_regs. */
339 info->frame_decl = create_tmp_var_raw (type, "FRAME");
340 DECL_CONTEXT (info->frame_decl) = info->context;
341 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
342 DECL_SEEN_IN_BIND_EXPR_P (info->frame_decl) = 1;
344 /* ??? Always make it addressable for now, since it is meant to
345 be pointed to by the static chain pointer. This pessimizes
346 when it turns out that no static chains are needed because
347 the nested functions referencing non-local variables are not
348 reachable, but the true pessimization is to create the non-
349 local frame structure in the first place. */
350 TREE_ADDRESSABLE (info->frame_decl) = 1;
353 return type;
356 /* Return true if DECL should be referenced by pointer in the non-local frame
357 structure. */
359 static bool
360 use_pointer_in_frame (tree decl)
362 if (TREE_CODE (decl) == PARM_DECL)
364 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
365 sized DECLs, and inefficient to copy large aggregates. Don't bother
366 moving anything but scalar parameters. */
367 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
369 else
371 /* Variable-sized DECLs can only come from OMP clauses at this point
372 since the gimplifier has already turned the regular variables into
373 pointers. Do the same as the gimplifier. */
374 return !DECL_SIZE (decl) || TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST;
378 /* Given DECL, a non-locally accessed variable, find or create a field
379 in the non-local frame structure for the given nesting context. */
381 static tree
382 lookup_field_for_decl (struct nesting_info *info, tree decl,
383 enum insert_option insert)
385 gcc_checking_assert (decl_function_context (decl) == info->context);
387 if (insert == NO_INSERT)
389 tree *slot = info->field_map->get (decl);
390 return slot ? *slot : NULL_TREE;
393 tree *slot = &info->field_map->get_or_insert (decl);
394 if (!*slot)
396 tree type = get_frame_type (info);
397 tree field = make_node (FIELD_DECL);
398 DECL_NAME (field) = DECL_NAME (decl);
400 if (use_pointer_in_frame (decl))
402 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
403 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
404 DECL_NONADDRESSABLE_P (field) = 1;
406 else
408 TREE_TYPE (field) = TREE_TYPE (decl);
409 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
410 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
411 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
412 DECL_IGNORED_P (field) = DECL_IGNORED_P (decl);
413 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
414 TREE_NO_WARNING (field) = TREE_NO_WARNING (decl);
415 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
417 /* Declare the transformation and adjust the original DECL. For a
418 variable or for a parameter when not optimizing, we make it point
419 to the field in the frame directly. For a parameter, we don't do
420 it when optimizing because the variable tracking pass will already
421 do the job, */
422 if (VAR_P (decl) || !optimize)
424 tree x
425 = build3 (COMPONENT_REF, TREE_TYPE (field), info->frame_decl,
426 field, NULL_TREE);
428 /* If the next declaration is a PARM_DECL pointing to the DECL,
429 we need to adjust its VALUE_EXPR directly, since chains of
430 VALUE_EXPRs run afoul of garbage collection. This occurs
431 in Ada for Out parameters that aren't copied in. */
432 tree next = DECL_CHAIN (decl);
433 if (next
434 && TREE_CODE (next) == PARM_DECL
435 && DECL_HAS_VALUE_EXPR_P (next)
436 && DECL_VALUE_EXPR (next) == decl)
437 SET_DECL_VALUE_EXPR (next, x);
439 SET_DECL_VALUE_EXPR (decl, x);
440 DECL_HAS_VALUE_EXPR_P (decl) = 1;
444 insert_field_into_struct (type, field);
445 *slot = field;
447 if (TREE_CODE (decl) == PARM_DECL)
448 info->any_parm_remapped = true;
451 return *slot;
454 /* Build or return the variable that holds the static chain within
455 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
457 static tree
458 get_chain_decl (struct nesting_info *info)
460 tree decl = info->chain_decl;
462 if (!decl)
464 tree type;
466 type = get_frame_type (info->outer);
467 type = build_pointer_type (type);
469 /* Note that this variable is *not* entered into any BIND_EXPR;
470 the construction of this variable is handled specially in
471 expand_function_start and initialize_inlined_parameters.
472 Note also that it's represented as a parameter. This is more
473 close to the truth, since the initial value does come from
474 the caller. */
475 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
476 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
477 DECL_ARTIFICIAL (decl) = 1;
478 DECL_IGNORED_P (decl) = 1;
479 TREE_USED (decl) = 1;
480 DECL_CONTEXT (decl) = info->context;
481 DECL_ARG_TYPE (decl) = type;
483 /* Tell tree-inline.c that we never write to this variable, so
484 it can copy-prop the replacement value immediately. */
485 TREE_READONLY (decl) = 1;
487 info->chain_decl = decl;
489 if (dump_file
490 && (dump_flags & TDF_DETAILS)
491 && !DECL_STATIC_CHAIN (info->context))
492 fprintf (dump_file, "Setting static-chain for %s\n",
493 lang_hooks.decl_printable_name (info->context, 2));
495 DECL_STATIC_CHAIN (info->context) = 1;
497 return decl;
500 /* Build or return the field within the non-local frame state that holds
501 the static chain for INFO->CONTEXT. This is the way to walk back up
502 multiple nesting levels. */
504 static tree
505 get_chain_field (struct nesting_info *info)
507 tree field = info->chain_field;
509 if (!field)
511 tree type = build_pointer_type (get_frame_type (info->outer));
513 field = make_node (FIELD_DECL);
514 DECL_NAME (field) = get_identifier ("__chain");
515 TREE_TYPE (field) = type;
516 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
517 DECL_NONADDRESSABLE_P (field) = 1;
519 insert_field_into_struct (get_frame_type (info), field);
521 info->chain_field = field;
523 if (dump_file
524 && (dump_flags & TDF_DETAILS)
525 && !DECL_STATIC_CHAIN (info->context))
526 fprintf (dump_file, "Setting static-chain for %s\n",
527 lang_hooks.decl_printable_name (info->context, 2));
529 DECL_STATIC_CHAIN (info->context) = 1;
531 return field;
534 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
536 static tree
537 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
538 gcall *call)
540 tree t;
542 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
543 gimple_call_set_lhs (call, t);
544 if (! gsi_end_p (*gsi))
545 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
546 gsi_insert_before (gsi, call, GSI_SAME_STMT);
548 return t;
552 /* Copy EXP into a temporary. Allocate the temporary in the context of
553 INFO and insert the initialization statement before GSI. */
555 static tree
556 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
558 tree t;
559 gimple *stmt;
561 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
562 stmt = gimple_build_assign (t, exp);
563 if (! gsi_end_p (*gsi))
564 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
565 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
567 return t;
571 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
573 static tree
574 gsi_gimplify_val (struct nesting_info *info, tree exp,
575 gimple_stmt_iterator *gsi)
577 if (is_gimple_val (exp))
578 return exp;
579 else
580 return init_tmp_var (info, exp, gsi);
583 /* Similarly, but copy from the temporary and insert the statement
584 after the iterator. */
586 static tree
587 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
589 tree t;
590 gimple *stmt;
592 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
593 stmt = gimple_build_assign (exp, t);
594 if (! gsi_end_p (*gsi))
595 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
596 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
598 return t;
601 /* Build or return the type used to represent a nested function trampoline. */
603 static GTY(()) tree trampoline_type;
605 static tree
606 get_trampoline_type (struct nesting_info *info)
608 unsigned align, size;
609 tree t;
611 if (trampoline_type)
612 return trampoline_type;
614 align = TRAMPOLINE_ALIGNMENT;
615 size = TRAMPOLINE_SIZE;
617 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
618 then allocate extra space so that we can do dynamic alignment. */
619 if (align > STACK_BOUNDARY)
621 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
622 align = STACK_BOUNDARY;
625 t = build_index_type (size_int (size - 1));
626 t = build_array_type (char_type_node, t);
627 t = build_decl (DECL_SOURCE_LOCATION (info->context),
628 FIELD_DECL, get_identifier ("__data"), t);
629 SET_DECL_ALIGN (t, align);
630 DECL_USER_ALIGN (t) = 1;
632 trampoline_type = make_node (RECORD_TYPE);
633 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
634 TYPE_FIELDS (trampoline_type) = t;
635 layout_type (trampoline_type);
636 DECL_CONTEXT (t) = trampoline_type;
638 return trampoline_type;
641 /* Build or return the type used to represent a nested function descriptor. */
643 static GTY(()) tree descriptor_type;
645 static tree
646 get_descriptor_type (struct nesting_info *info)
648 /* The base alignment is that of a function. */
649 const unsigned align = FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY);
650 tree t;
652 if (descriptor_type)
653 return descriptor_type;
655 t = build_index_type (integer_one_node);
656 t = build_array_type (ptr_type_node, t);
657 t = build_decl (DECL_SOURCE_LOCATION (info->context),
658 FIELD_DECL, get_identifier ("__data"), t);
659 SET_DECL_ALIGN (t, MAX (TYPE_ALIGN (ptr_type_node), align));
660 DECL_USER_ALIGN (t) = 1;
662 descriptor_type = make_node (RECORD_TYPE);
663 TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor");
664 TYPE_FIELDS (descriptor_type) = t;
665 layout_type (descriptor_type);
666 DECL_CONTEXT (t) = descriptor_type;
668 return descriptor_type;
671 /* Given DECL, a nested function, find or create an element in the
672 var map for this function. */
674 static tree
675 lookup_element_for_decl (struct nesting_info *info, tree decl,
676 enum insert_option insert)
678 if (insert == NO_INSERT)
680 tree *slot = info->var_map->get (decl);
681 return slot ? *slot : NULL_TREE;
684 tree *slot = &info->var_map->get_or_insert (decl);
685 if (!*slot)
686 *slot = build_tree_list (NULL_TREE, NULL_TREE);
688 return (tree) *slot;
691 /* Given DECL, a nested function, create a field in the non-local
692 frame structure for this function. */
694 static tree
695 create_field_for_decl (struct nesting_info *info, tree decl, tree type)
697 tree field = make_node (FIELD_DECL);
698 DECL_NAME (field) = DECL_NAME (decl);
699 TREE_TYPE (field) = type;
700 TREE_ADDRESSABLE (field) = 1;
701 insert_field_into_struct (get_frame_type (info), field);
702 return field;
705 /* Given DECL, a nested function, find or create a field in the non-local
706 frame structure for a trampoline for this function. */
708 static tree
709 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
710 enum insert_option insert)
712 tree elt, field;
714 elt = lookup_element_for_decl (info, decl, insert);
715 if (!elt)
716 return NULL_TREE;
718 field = TREE_PURPOSE (elt);
720 if (!field && insert == INSERT)
722 field = create_field_for_decl (info, decl, get_trampoline_type (info));
723 TREE_PURPOSE (elt) = field;
724 info->any_tramp_created = true;
727 return field;
730 /* Given DECL, a nested function, find or create a field in the non-local
731 frame structure for a descriptor for this function. */
733 static tree
734 lookup_descr_for_decl (struct nesting_info *info, tree decl,
735 enum insert_option insert)
737 tree elt, field;
739 elt = lookup_element_for_decl (info, decl, insert);
740 if (!elt)
741 return NULL_TREE;
743 field = TREE_VALUE (elt);
745 if (!field && insert == INSERT)
747 field = create_field_for_decl (info, decl, get_descriptor_type (info));
748 TREE_VALUE (elt) = field;
749 info->any_descr_created = true;
752 return field;
755 /* Build or return the field within the non-local frame state that holds
756 the non-local goto "jmp_buf". The buffer itself is maintained by the
757 rtl middle-end as dynamic stack space is allocated. */
759 static tree
760 get_nl_goto_field (struct nesting_info *info)
762 tree field = info->nl_goto_field;
763 if (!field)
765 unsigned size;
766 tree type;
768 /* For __builtin_nonlocal_goto, we need N words. The first is the
769 frame pointer, the rest is for the target's stack pointer save
770 area. The number of words is controlled by STACK_SAVEAREA_MODE;
771 not the best interface, but it'll do for now. */
772 if (Pmode == ptr_mode)
773 type = ptr_type_node;
774 else
775 type = lang_hooks.types.type_for_mode (Pmode, 1);
777 scalar_int_mode mode
778 = as_a <scalar_int_mode> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
779 size = GET_MODE_SIZE (mode);
780 size = size / GET_MODE_SIZE (Pmode);
781 size = size + 1;
783 type = build_array_type
784 (type, build_index_type (size_int (size)));
786 field = make_node (FIELD_DECL);
787 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
788 TREE_TYPE (field) = type;
789 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
790 TREE_ADDRESSABLE (field) = 1;
792 insert_field_into_struct (get_frame_type (info), field);
794 info->nl_goto_field = field;
797 return field;
800 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
802 static void
803 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
804 struct nesting_info *info, gimple_seq *pseq)
806 struct walk_stmt_info wi;
808 memset (&wi, 0, sizeof (wi));
809 wi.info = info;
810 wi.val_only = true;
811 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
815 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
817 static inline void
818 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
819 struct nesting_info *info)
821 gimple_seq body = gimple_body (info->context);
822 walk_body (callback_stmt, callback_op, info, &body);
823 gimple_set_body (info->context, body);
826 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
828 static void
829 walk_gimple_omp_for (gomp_for *for_stmt,
830 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
831 struct nesting_info *info)
833 struct walk_stmt_info wi;
834 gimple_seq seq;
835 tree t;
836 size_t i;
838 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
840 seq = NULL;
841 memset (&wi, 0, sizeof (wi));
842 wi.info = info;
843 wi.gsi = gsi_last (seq);
845 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
847 wi.val_only = false;
848 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
849 &wi, NULL);
850 wi.val_only = true;
851 wi.is_lhs = false;
852 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
853 &wi, NULL);
855 wi.val_only = true;
856 wi.is_lhs = false;
857 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
858 &wi, NULL);
860 t = gimple_omp_for_incr (for_stmt, i);
861 gcc_assert (BINARY_CLASS_P (t));
862 wi.val_only = false;
863 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
864 wi.val_only = true;
865 wi.is_lhs = false;
866 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
869 seq = gsi_seq (wi.gsi);
870 if (!gimple_seq_empty_p (seq))
872 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
873 annotate_all_with_location (seq, gimple_location (for_stmt));
874 gimple_seq_add_seq (&pre_body, seq);
875 gimple_omp_for_set_pre_body (for_stmt, pre_body);
879 /* Similarly for ROOT and all functions nested underneath, depth first. */
881 static void
882 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
883 struct nesting_info *root)
885 struct nesting_info *n;
886 FOR_EACH_NEST_INFO (n, root)
887 walk_function (callback_stmt, callback_op, n);
891 /* We have to check for a fairly pathological case. The operands of function
892 nested function are to be interpreted in the context of the enclosing
893 function. So if any are variably-sized, they will get remapped when the
894 enclosing function is inlined. But that remapping would also have to be
895 done in the types of the PARM_DECLs of the nested function, meaning the
896 argument types of that function will disagree with the arguments in the
897 calls to that function. So we'd either have to make a copy of the nested
898 function corresponding to each time the enclosing function was inlined or
899 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
900 function. The former is not practical. The latter would still require
901 detecting this case to know when to add the conversions. So, for now at
902 least, we don't inline such an enclosing function.
904 We have to do that check recursively, so here return indicating whether
905 FNDECL has such a nested function. ORIG_FN is the function we were
906 trying to inline to use for checking whether any argument is variably
907 modified by anything in it.
909 It would be better to do this in tree-inline.c so that we could give
910 the appropriate warning for why a function can't be inlined, but that's
911 too late since the nesting structure has already been flattened and
912 adding a flag just to record this fact seems a waste of a flag. */
914 static bool
915 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
917 struct cgraph_node *cgn = cgraph_node::get (fndecl);
918 tree arg;
920 for (cgn = first_nested_function (cgn); cgn;
921 cgn = next_nested_function (cgn))
923 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
924 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
925 return true;
927 if (check_for_nested_with_variably_modified (cgn->decl,
928 orig_fndecl))
929 return true;
932 return false;
935 /* Construct our local datastructure describing the function nesting
936 tree rooted by CGN. */
938 static struct nesting_info *
939 create_nesting_tree (struct cgraph_node *cgn)
941 struct nesting_info *info = XCNEW (struct nesting_info);
942 info->field_map = new hash_map<tree, tree>;
943 info->var_map = new hash_map<tree, tree>;
944 info->mem_refs = new hash_set<tree *>;
945 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
946 info->context = cgn->decl;
947 info->thunk_p = cgn->thunk;
949 for (cgn = first_nested_function (cgn); cgn;
950 cgn = next_nested_function (cgn))
952 struct nesting_info *sub = create_nesting_tree (cgn);
953 sub->outer = info;
954 sub->next = info->inner;
955 info->inner = sub;
958 /* See discussion at check_for_nested_with_variably_modified for a
959 discussion of why this has to be here. */
960 if (check_for_nested_with_variably_modified (info->context, info->context))
961 DECL_UNINLINABLE (info->context) = true;
963 return info;
966 /* Return an expression computing the static chain for TARGET_CONTEXT
967 from INFO->CONTEXT. Insert any necessary computations before TSI. */
969 static tree
970 get_static_chain (struct nesting_info *info, tree target_context,
971 gimple_stmt_iterator *gsi)
973 struct nesting_info *i;
974 tree x;
976 if (info->context == target_context)
978 x = build_addr (info->frame_decl);
979 info->static_chain_added |= 1;
981 else
983 x = get_chain_decl (info);
984 info->static_chain_added |= 2;
986 for (i = info->outer; i->context != target_context; i = i->outer)
988 tree field = get_chain_field (i);
990 x = build_simple_mem_ref_notrap (x);
991 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
992 x = init_tmp_var (info, x, gsi);
996 return x;
1000 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
1001 frame as seen from INFO->CONTEXT. Insert any necessary computations
1002 before GSI. */
1004 static tree
1005 get_frame_field (struct nesting_info *info, tree target_context,
1006 tree field, gimple_stmt_iterator *gsi)
1008 struct nesting_info *i;
1009 tree x;
1011 if (info->context == target_context)
1013 /* Make sure frame_decl gets created. */
1014 (void) get_frame_type (info);
1015 x = info->frame_decl;
1016 info->static_chain_added |= 1;
1018 else
1020 x = get_chain_decl (info);
1021 info->static_chain_added |= 2;
1023 for (i = info->outer; i->context != target_context; i = i->outer)
1025 tree field = get_chain_field (i);
1027 x = build_simple_mem_ref_notrap (x);
1028 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1029 x = init_tmp_var (info, x, gsi);
1032 x = build_simple_mem_ref_notrap (x);
1035 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1036 return x;
1039 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
1041 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
1042 in the nested function with DECL_VALUE_EXPR set to reference the true
1043 variable in the parent function. This is used both for debug info
1044 and in OMP lowering. */
1046 static tree
1047 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
1049 tree target_context;
1050 struct nesting_info *i;
1051 tree x, field, new_decl;
1053 tree *slot = &info->var_map->get_or_insert (decl);
1055 if (*slot)
1056 return *slot;
1058 target_context = decl_function_context (decl);
1060 /* A copy of the code in get_frame_field, but without the temporaries. */
1061 if (info->context == target_context)
1063 /* Make sure frame_decl gets created. */
1064 (void) get_frame_type (info);
1065 x = info->frame_decl;
1066 i = info;
1067 info->static_chain_added |= 1;
1069 else
1071 x = get_chain_decl (info);
1072 info->static_chain_added |= 2;
1073 for (i = info->outer; i->context != target_context; i = i->outer)
1075 field = get_chain_field (i);
1076 x = build_simple_mem_ref_notrap (x);
1077 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1079 x = build_simple_mem_ref_notrap (x);
1082 field = lookup_field_for_decl (i, decl, INSERT);
1083 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1084 if (use_pointer_in_frame (decl))
1085 x = build_simple_mem_ref_notrap (x);
1087 /* ??? We should be remapping types as well, surely. */
1088 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1089 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1090 DECL_CONTEXT (new_decl) = info->context;
1091 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1092 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1093 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1094 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1095 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1096 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1097 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1098 if ((TREE_CODE (decl) == PARM_DECL
1099 || TREE_CODE (decl) == RESULT_DECL
1100 || VAR_P (decl))
1101 && DECL_BY_REFERENCE (decl))
1102 DECL_BY_REFERENCE (new_decl) = 1;
1104 SET_DECL_VALUE_EXPR (new_decl, x);
1105 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1107 *slot = new_decl;
1108 DECL_CHAIN (new_decl) = info->debug_var_chain;
1109 info->debug_var_chain = new_decl;
1111 if (!optimize
1112 && info->context != target_context
1113 && variably_modified_type_p (TREE_TYPE (decl), NULL))
1114 note_nonlocal_vla_type (info, TREE_TYPE (decl));
1116 return new_decl;
1120 /* Callback for walk_gimple_stmt, rewrite all references to VAR
1121 and PARM_DECLs that belong to outer functions.
1123 The rewrite will involve some number of structure accesses back up
1124 the static chain. E.g. for a variable FOO up one nesting level it'll
1125 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
1126 indirections apply to decls for which use_pointer_in_frame is true. */
1128 static tree
1129 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
1131 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1132 struct nesting_info *const info = (struct nesting_info *) wi->info;
1133 tree t = *tp;
1135 *walk_subtrees = 0;
1136 switch (TREE_CODE (t))
1138 case VAR_DECL:
1139 /* Non-automatic variables are never processed. */
1140 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1141 break;
1142 /* FALLTHRU */
1144 case PARM_DECL:
1146 tree x, target_context = decl_function_context (t);
1148 if (info->context == target_context)
1149 break;
1151 wi->changed = true;
1153 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1154 x = get_nonlocal_debug_decl (info, t);
1155 else
1157 struct nesting_info *i = info;
1158 while (i && i->context != target_context)
1159 i = i->outer;
1160 /* If none of the outer contexts is the target context, this means
1161 that the VAR or PARM_DECL is referenced in a wrong context. */
1162 if (!i)
1163 internal_error ("%s from %s referenced in %s",
1164 IDENTIFIER_POINTER (DECL_NAME (t)),
1165 IDENTIFIER_POINTER (DECL_NAME (target_context)),
1166 IDENTIFIER_POINTER (DECL_NAME (info->context)));
1168 x = lookup_field_for_decl (i, t, INSERT);
1169 x = get_frame_field (info, target_context, x, &wi->gsi);
1170 if (use_pointer_in_frame (t))
1172 x = init_tmp_var (info, x, &wi->gsi);
1173 x = build_simple_mem_ref_notrap (x);
1177 if (wi->val_only)
1179 if (wi->is_lhs)
1180 x = save_tmp_var (info, x, &wi->gsi);
1181 else
1182 x = init_tmp_var (info, x, &wi->gsi);
1185 *tp = x;
1187 break;
1189 case LABEL_DECL:
1190 /* We're taking the address of a label from a parent function, but
1191 this is not itself a non-local goto. Mark the label such that it
1192 will not be deleted, much as we would with a label address in
1193 static storage. */
1194 if (decl_function_context (t) != info->context)
1195 FORCED_LABEL (t) = 1;
1196 break;
1198 case ADDR_EXPR:
1200 bool save_val_only = wi->val_only;
1202 wi->val_only = false;
1203 wi->is_lhs = false;
1204 wi->changed = false;
1205 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1206 wi->val_only = true;
1208 if (wi->changed)
1210 tree save_context;
1212 /* If we changed anything, we might no longer be directly
1213 referencing a decl. */
1214 save_context = current_function_decl;
1215 current_function_decl = info->context;
1216 recompute_tree_invariant_for_addr_expr (t);
1218 /* If the callback converted the address argument in a context
1219 where we only accept variables (and min_invariant, presumably),
1220 then compute the address into a temporary. */
1221 if (save_val_only)
1222 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1223 t, &wi->gsi);
1224 current_function_decl = save_context;
1227 break;
1229 case REALPART_EXPR:
1230 case IMAGPART_EXPR:
1231 case COMPONENT_REF:
1232 case ARRAY_REF:
1233 case ARRAY_RANGE_REF:
1234 case BIT_FIELD_REF:
1235 /* Go down this entire nest and just look at the final prefix and
1236 anything that describes the references. Otherwise, we lose track
1237 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1238 wi->val_only = true;
1239 wi->is_lhs = false;
1240 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1242 if (TREE_CODE (t) == COMPONENT_REF)
1243 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1244 NULL);
1245 else if (TREE_CODE (t) == ARRAY_REF
1246 || TREE_CODE (t) == ARRAY_RANGE_REF)
1248 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1249 wi, NULL);
1250 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1251 wi, NULL);
1252 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1253 wi, NULL);
1256 wi->val_only = false;
1257 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1258 break;
1260 case VIEW_CONVERT_EXPR:
1261 /* Just request to look at the subtrees, leaving val_only and lhs
1262 untouched. This might actually be for !val_only + lhs, in which
1263 case we don't want to force a replacement by a temporary. */
1264 *walk_subtrees = 1;
1265 break;
1267 default:
1268 if (!IS_TYPE_OR_DECL_P (t))
1270 *walk_subtrees = 1;
1271 wi->val_only = true;
1272 wi->is_lhs = false;
1274 break;
1277 return NULL_TREE;
1280 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1281 struct walk_stmt_info *);
1283 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1284 and PARM_DECLs that belong to outer functions. */
1286 static bool
1287 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1289 struct nesting_info *const info = (struct nesting_info *) wi->info;
1290 bool need_chain = false, need_stmts = false;
1291 tree clause, decl, *pdecl;
1292 int dummy;
1293 bitmap new_suppress;
1295 new_suppress = BITMAP_GGC_ALLOC ();
1296 bitmap_copy (new_suppress, info->suppress_expansion);
1298 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1300 pdecl = NULL;
1301 switch (OMP_CLAUSE_CODE (clause))
1303 case OMP_CLAUSE_REDUCTION:
1304 case OMP_CLAUSE_IN_REDUCTION:
1305 case OMP_CLAUSE_TASK_REDUCTION:
1306 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1307 need_stmts = true;
1308 if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
1310 pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
1311 if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
1312 pdecl = &TREE_OPERAND (*pdecl, 0);
1313 if (TREE_CODE (*pdecl) == INDIRECT_REF
1314 || TREE_CODE (*pdecl) == ADDR_EXPR)
1315 pdecl = &TREE_OPERAND (*pdecl, 0);
1317 goto do_decl_clause;
1319 case OMP_CLAUSE_LASTPRIVATE:
1320 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1321 need_stmts = true;
1322 goto do_decl_clause;
1324 case OMP_CLAUSE_LINEAR:
1325 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1326 need_stmts = true;
1327 wi->val_only = true;
1328 wi->is_lhs = false;
1329 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1330 &dummy, wi);
1331 goto do_decl_clause;
1333 case OMP_CLAUSE_PRIVATE:
1334 case OMP_CLAUSE_FIRSTPRIVATE:
1335 case OMP_CLAUSE_COPYPRIVATE:
1336 case OMP_CLAUSE_SHARED:
1337 case OMP_CLAUSE_TO_DECLARE:
1338 case OMP_CLAUSE_LINK:
1339 case OMP_CLAUSE_USE_DEVICE_PTR:
1340 case OMP_CLAUSE_USE_DEVICE_ADDR:
1341 case OMP_CLAUSE_IS_DEVICE_PTR:
1342 case OMP_CLAUSE_DETACH:
1343 do_decl_clause:
1344 if (pdecl == NULL)
1345 pdecl = &OMP_CLAUSE_DECL (clause);
1346 decl = *pdecl;
1347 if (VAR_P (decl)
1348 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1349 break;
1350 if (decl_function_context (decl) != info->context)
1352 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1353 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1354 bitmap_set_bit (new_suppress, DECL_UID (decl));
1355 *pdecl = get_nonlocal_debug_decl (info, decl);
1356 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1357 need_chain = true;
1359 break;
1361 case OMP_CLAUSE_SCHEDULE:
1362 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1363 break;
1364 /* FALLTHRU */
1365 case OMP_CLAUSE_FINAL:
1366 case OMP_CLAUSE_IF:
1367 case OMP_CLAUSE_NUM_THREADS:
1368 case OMP_CLAUSE_DEPEND:
1369 case OMP_CLAUSE_DEVICE:
1370 case OMP_CLAUSE_NUM_TEAMS:
1371 case OMP_CLAUSE_THREAD_LIMIT:
1372 case OMP_CLAUSE_SAFELEN:
1373 case OMP_CLAUSE_SIMDLEN:
1374 case OMP_CLAUSE_PRIORITY:
1375 case OMP_CLAUSE_GRAINSIZE:
1376 case OMP_CLAUSE_NUM_TASKS:
1377 case OMP_CLAUSE_HINT:
1378 case OMP_CLAUSE_NUM_GANGS:
1379 case OMP_CLAUSE_NUM_WORKERS:
1380 case OMP_CLAUSE_VECTOR_LENGTH:
1381 case OMP_CLAUSE_GANG:
1382 case OMP_CLAUSE_WORKER:
1383 case OMP_CLAUSE_VECTOR:
1384 case OMP_CLAUSE_ASYNC:
1385 case OMP_CLAUSE_WAIT:
1386 /* Several OpenACC clauses have optional arguments. Check if they
1387 are present. */
1388 if (OMP_CLAUSE_OPERAND (clause, 0))
1390 wi->val_only = true;
1391 wi->is_lhs = false;
1392 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1393 &dummy, wi);
1396 /* The gang clause accepts two arguments. */
1397 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1398 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1400 wi->val_only = true;
1401 wi->is_lhs = false;
1402 convert_nonlocal_reference_op
1403 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1405 break;
1407 case OMP_CLAUSE_DIST_SCHEDULE:
1408 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1410 wi->val_only = true;
1411 wi->is_lhs = false;
1412 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1413 &dummy, wi);
1415 break;
1417 case OMP_CLAUSE_MAP:
1418 case OMP_CLAUSE_TO:
1419 case OMP_CLAUSE_FROM:
1420 if (OMP_CLAUSE_SIZE (clause))
1422 wi->val_only = true;
1423 wi->is_lhs = false;
1424 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1425 &dummy, wi);
1427 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1428 goto do_decl_clause;
1429 wi->val_only = true;
1430 wi->is_lhs = false;
1431 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1432 wi, NULL);
1433 break;
1435 case OMP_CLAUSE_ALIGNED:
1436 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1438 wi->val_only = true;
1439 wi->is_lhs = false;
1440 convert_nonlocal_reference_op
1441 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1443 /* FALLTHRU */
1444 case OMP_CLAUSE_NONTEMPORAL:
1445 do_decl_clause_no_supp:
1446 /* Like do_decl_clause, but don't add any suppression. */
1447 decl = OMP_CLAUSE_DECL (clause);
1448 if (VAR_P (decl)
1449 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1450 break;
1451 if (decl_function_context (decl) != info->context)
1453 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1454 need_chain = true;
1456 break;
1458 case OMP_CLAUSE_ALLOCATE:
1459 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause))
1461 wi->val_only = true;
1462 wi->is_lhs = false;
1463 convert_nonlocal_reference_op
1464 (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause), &dummy, wi);
1466 goto do_decl_clause_no_supp;
1468 case OMP_CLAUSE_NOWAIT:
1469 case OMP_CLAUSE_ORDERED:
1470 case OMP_CLAUSE_DEFAULT:
1471 case OMP_CLAUSE_COPYIN:
1472 case OMP_CLAUSE_COLLAPSE:
1473 case OMP_CLAUSE_TILE:
1474 case OMP_CLAUSE_UNTIED:
1475 case OMP_CLAUSE_MERGEABLE:
1476 case OMP_CLAUSE_PROC_BIND:
1477 case OMP_CLAUSE_NOGROUP:
1478 case OMP_CLAUSE_THREADS:
1479 case OMP_CLAUSE_SIMD:
1480 case OMP_CLAUSE_DEFAULTMAP:
1481 case OMP_CLAUSE_ORDER:
1482 case OMP_CLAUSE_SEQ:
1483 case OMP_CLAUSE_INDEPENDENT:
1484 case OMP_CLAUSE_AUTO:
1485 case OMP_CLAUSE_IF_PRESENT:
1486 case OMP_CLAUSE_FINALIZE:
1487 case OMP_CLAUSE_BIND:
1488 case OMP_CLAUSE__CONDTEMP_:
1489 case OMP_CLAUSE__SCANTEMP_:
1490 break;
1492 /* The following clause belongs to the OpenACC cache directive, which
1493 is discarded during gimplification. */
1494 case OMP_CLAUSE__CACHE_:
1495 /* The following clauses are only allowed in the OpenMP declare simd
1496 directive, so not seen here. */
1497 case OMP_CLAUSE_UNIFORM:
1498 case OMP_CLAUSE_INBRANCH:
1499 case OMP_CLAUSE_NOTINBRANCH:
1500 /* The following clauses are only allowed on OpenMP cancel and
1501 cancellation point directives, which at this point have already
1502 been lowered into a function call. */
1503 case OMP_CLAUSE_FOR:
1504 case OMP_CLAUSE_PARALLEL:
1505 case OMP_CLAUSE_SECTIONS:
1506 case OMP_CLAUSE_TASKGROUP:
1507 /* The following clauses are only added during OMP lowering; nested
1508 function decomposition happens before that. */
1509 case OMP_CLAUSE__LOOPTEMP_:
1510 case OMP_CLAUSE__REDUCTEMP_:
1511 case OMP_CLAUSE__SIMDUID_:
1512 case OMP_CLAUSE__SIMT_:
1513 /* Anything else. */
1514 default:
1515 gcc_unreachable ();
1519 info->suppress_expansion = new_suppress;
1521 if (need_stmts)
1522 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1523 switch (OMP_CLAUSE_CODE (clause))
1525 case OMP_CLAUSE_REDUCTION:
1526 case OMP_CLAUSE_IN_REDUCTION:
1527 case OMP_CLAUSE_TASK_REDUCTION:
1528 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1530 tree old_context
1531 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1532 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1533 = info->context;
1534 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1535 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1536 = info->context;
1537 tree save_local_var_chain = info->new_local_var_chain;
1538 info->new_local_var_chain = NULL;
1539 gimple_seq *seq = &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause);
1540 walk_body (convert_nonlocal_reference_stmt,
1541 convert_nonlocal_reference_op, info, seq);
1542 if (info->new_local_var_chain)
1543 declare_vars (info->new_local_var_chain,
1544 gimple_seq_first_stmt (*seq), false);
1545 info->new_local_var_chain = NULL;
1546 seq = &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause);
1547 walk_body (convert_nonlocal_reference_stmt,
1548 convert_nonlocal_reference_op, info, seq);
1549 if (info->new_local_var_chain)
1550 declare_vars (info->new_local_var_chain,
1551 gimple_seq_first_stmt (*seq), false);
1552 info->new_local_var_chain = save_local_var_chain;
1553 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1554 = old_context;
1555 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1556 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1557 = old_context;
1559 break;
1561 case OMP_CLAUSE_LASTPRIVATE:
1563 tree save_local_var_chain = info->new_local_var_chain;
1564 info->new_local_var_chain = NULL;
1565 gimple_seq *seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause);
1566 walk_body (convert_nonlocal_reference_stmt,
1567 convert_nonlocal_reference_op, info, seq);
1568 if (info->new_local_var_chain)
1569 declare_vars (info->new_local_var_chain,
1570 gimple_seq_first_stmt (*seq), false);
1571 info->new_local_var_chain = save_local_var_chain;
1573 break;
1575 case OMP_CLAUSE_LINEAR:
1577 tree save_local_var_chain = info->new_local_var_chain;
1578 info->new_local_var_chain = NULL;
1579 gimple_seq *seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause);
1580 walk_body (convert_nonlocal_reference_stmt,
1581 convert_nonlocal_reference_op, info, seq);
1582 if (info->new_local_var_chain)
1583 declare_vars (info->new_local_var_chain,
1584 gimple_seq_first_stmt (*seq), false);
1585 info->new_local_var_chain = save_local_var_chain;
1587 break;
1589 default:
1590 break;
1593 return need_chain;
1596 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1598 static void
1599 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1601 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1602 type = TREE_TYPE (type);
1604 if (TYPE_NAME (type)
1605 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1606 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1607 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1609 while (POINTER_TYPE_P (type)
1610 || TREE_CODE (type) == VECTOR_TYPE
1611 || TREE_CODE (type) == FUNCTION_TYPE
1612 || TREE_CODE (type) == METHOD_TYPE)
1613 type = TREE_TYPE (type);
1615 if (TREE_CODE (type) == ARRAY_TYPE)
1617 tree domain, t;
1619 note_nonlocal_vla_type (info, TREE_TYPE (type));
1620 domain = TYPE_DOMAIN (type);
1621 if (domain)
1623 t = TYPE_MIN_VALUE (domain);
1624 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1625 && decl_function_context (t) != info->context)
1626 get_nonlocal_debug_decl (info, t);
1627 t = TYPE_MAX_VALUE (domain);
1628 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1629 && decl_function_context (t) != info->context)
1630 get_nonlocal_debug_decl (info, t);
1635 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1636 PARM_DECLs that belong to outer functions. This handles statements
1637 that are not handled via the standard recursion done in
1638 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1639 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1640 operands of STMT have been handled by this function. */
1642 static tree
1643 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1644 struct walk_stmt_info *wi)
1646 struct nesting_info *info = (struct nesting_info *) wi->info;
1647 tree save_local_var_chain;
1648 bitmap save_suppress;
1649 gimple *stmt = gsi_stmt (*gsi);
1651 switch (gimple_code (stmt))
1653 case GIMPLE_GOTO:
1654 /* Don't walk non-local gotos for now. */
1655 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1657 wi->val_only = true;
1658 wi->is_lhs = false;
1659 *handled_ops_p = false;
1660 return NULL_TREE;
1662 break;
1664 case GIMPLE_OMP_TEAMS:
1665 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
1667 save_suppress = info->suppress_expansion;
1668 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt),
1669 wi);
1670 walk_body (convert_nonlocal_reference_stmt,
1671 convert_nonlocal_reference_op, info,
1672 gimple_omp_body_ptr (stmt));
1673 info->suppress_expansion = save_suppress;
1674 break;
1676 /* FALLTHRU */
1678 case GIMPLE_OMP_PARALLEL:
1679 case GIMPLE_OMP_TASK:
1680 save_suppress = info->suppress_expansion;
1681 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1682 wi))
1684 tree c, decl;
1685 decl = get_chain_decl (info);
1686 c = build_omp_clause (gimple_location (stmt),
1687 OMP_CLAUSE_FIRSTPRIVATE);
1688 OMP_CLAUSE_DECL (c) = decl;
1689 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1690 gimple_omp_taskreg_set_clauses (stmt, c);
1693 save_local_var_chain = info->new_local_var_chain;
1694 info->new_local_var_chain = NULL;
1696 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1697 info, gimple_omp_body_ptr (stmt));
1699 if (info->new_local_var_chain)
1700 declare_vars (info->new_local_var_chain,
1701 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1702 false);
1703 info->new_local_var_chain = save_local_var_chain;
1704 info->suppress_expansion = save_suppress;
1705 break;
1707 case GIMPLE_OMP_FOR:
1708 save_suppress = info->suppress_expansion;
1709 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1710 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1711 convert_nonlocal_reference_stmt,
1712 convert_nonlocal_reference_op, info);
1713 walk_body (convert_nonlocal_reference_stmt,
1714 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1715 info->suppress_expansion = save_suppress;
1716 break;
1718 case GIMPLE_OMP_SECTIONS:
1719 save_suppress = info->suppress_expansion;
1720 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1721 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1722 info, gimple_omp_body_ptr (stmt));
1723 info->suppress_expansion = save_suppress;
1724 break;
1726 case GIMPLE_OMP_SINGLE:
1727 save_suppress = info->suppress_expansion;
1728 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1729 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1730 info, gimple_omp_body_ptr (stmt));
1731 info->suppress_expansion = save_suppress;
1732 break;
1734 case GIMPLE_OMP_TASKGROUP:
1735 save_suppress = info->suppress_expansion;
1736 convert_nonlocal_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
1737 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1738 info, gimple_omp_body_ptr (stmt));
1739 info->suppress_expansion = save_suppress;
1740 break;
1742 case GIMPLE_OMP_TARGET:
1743 if (!is_gimple_omp_offloaded (stmt))
1745 save_suppress = info->suppress_expansion;
1746 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1747 wi);
1748 info->suppress_expansion = save_suppress;
1749 walk_body (convert_nonlocal_reference_stmt,
1750 convert_nonlocal_reference_op, info,
1751 gimple_omp_body_ptr (stmt));
1752 break;
1754 save_suppress = info->suppress_expansion;
1755 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1756 wi))
1758 tree c, decl;
1759 decl = get_chain_decl (info);
1760 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1761 OMP_CLAUSE_DECL (c) = decl;
1762 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1763 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1764 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1765 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1768 save_local_var_chain = info->new_local_var_chain;
1769 info->new_local_var_chain = NULL;
1771 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1772 info, gimple_omp_body_ptr (stmt));
1774 if (info->new_local_var_chain)
1775 declare_vars (info->new_local_var_chain,
1776 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1777 false);
1778 info->new_local_var_chain = save_local_var_chain;
1779 info->suppress_expansion = save_suppress;
1780 break;
1782 case GIMPLE_OMP_SECTION:
1783 case GIMPLE_OMP_MASTER:
1784 case GIMPLE_OMP_ORDERED:
1785 case GIMPLE_OMP_SCAN:
1786 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1787 info, gimple_omp_body_ptr (stmt));
1788 break;
1790 case GIMPLE_BIND:
1792 gbind *bind_stmt = as_a <gbind *> (stmt);
1794 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1795 if (TREE_CODE (var) == NAMELIST_DECL)
1797 /* Adjust decls mentioned in NAMELIST_DECL. */
1798 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1799 tree decl;
1800 unsigned int i;
1802 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1804 if (VAR_P (decl)
1805 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1806 continue;
1807 if (decl_function_context (decl) != info->context)
1808 CONSTRUCTOR_ELT (decls, i)->value
1809 = get_nonlocal_debug_decl (info, decl);
1813 *handled_ops_p = false;
1814 return NULL_TREE;
1816 case GIMPLE_COND:
1817 wi->val_only = true;
1818 wi->is_lhs = false;
1819 *handled_ops_p = false;
1820 return NULL_TREE;
1822 case GIMPLE_ASSIGN:
1823 if (gimple_clobber_p (stmt))
1825 tree lhs = gimple_assign_lhs (stmt);
1826 if (DECL_P (lhs)
1827 && !(TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
1828 && decl_function_context (lhs) != info->context)
1830 gsi_replace (gsi, gimple_build_nop (), true);
1831 break;
1834 *handled_ops_p = false;
1835 return NULL_TREE;
1837 default:
1838 /* For every other statement that we are not interested in
1839 handling here, let the walker traverse the operands. */
1840 *handled_ops_p = false;
1841 return NULL_TREE;
1844 /* We have handled all of STMT operands, no need to traverse the operands. */
1845 *handled_ops_p = true;
1846 return NULL_TREE;
1850 /* A subroutine of convert_local_reference. Create a local variable
1851 in the parent function with DECL_VALUE_EXPR set to reference the
1852 field in FRAME. This is used both for debug info and in OMP
1853 lowering. */
1855 static tree
1856 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1858 tree x, new_decl;
1860 tree *slot = &info->var_map->get_or_insert (decl);
1861 if (*slot)
1862 return *slot;
1864 /* Make sure frame_decl gets created. */
1865 (void) get_frame_type (info);
1866 x = info->frame_decl;
1867 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1869 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1870 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1871 DECL_CONTEXT (new_decl) = info->context;
1872 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1873 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1874 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1875 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1876 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1877 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1878 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1879 if ((TREE_CODE (decl) == PARM_DECL
1880 || TREE_CODE (decl) == RESULT_DECL
1881 || VAR_P (decl))
1882 && DECL_BY_REFERENCE (decl))
1883 DECL_BY_REFERENCE (new_decl) = 1;
1885 SET_DECL_VALUE_EXPR (new_decl, x);
1886 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1887 *slot = new_decl;
1889 DECL_CHAIN (new_decl) = info->debug_var_chain;
1890 info->debug_var_chain = new_decl;
1892 /* Do not emit debug info twice. */
1893 DECL_IGNORED_P (decl) = 1;
1895 return new_decl;
1899 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1900 and PARM_DECLs that were referenced by inner nested functions.
1901 The rewrite will be a structure reference to the local frame variable. */
1903 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1905 static tree
1906 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1908 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1909 struct nesting_info *const info = (struct nesting_info *) wi->info;
1910 tree t = *tp, field, x;
1911 bool save_val_only;
1913 *walk_subtrees = 0;
1914 switch (TREE_CODE (t))
1916 case VAR_DECL:
1917 /* Non-automatic variables are never processed. */
1918 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1919 break;
1920 /* FALLTHRU */
1922 case PARM_DECL:
1923 if (t != info->frame_decl && decl_function_context (t) == info->context)
1925 /* If we copied a pointer to the frame, then the original decl
1926 is used unchanged in the parent function. */
1927 if (use_pointer_in_frame (t))
1928 break;
1930 /* No need to transform anything if no child references the
1931 variable. */
1932 field = lookup_field_for_decl (info, t, NO_INSERT);
1933 if (!field)
1934 break;
1935 wi->changed = true;
1937 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1938 x = get_local_debug_decl (info, t, field);
1939 else
1940 x = get_frame_field (info, info->context, field, &wi->gsi);
1942 if (wi->val_only)
1944 if (wi->is_lhs)
1945 x = save_tmp_var (info, x, &wi->gsi);
1946 else
1947 x = init_tmp_var (info, x, &wi->gsi);
1950 *tp = x;
1952 break;
1954 case ADDR_EXPR:
1955 save_val_only = wi->val_only;
1956 wi->val_only = false;
1957 wi->is_lhs = false;
1958 wi->changed = false;
1959 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1960 wi->val_only = save_val_only;
1962 /* If we converted anything ... */
1963 if (wi->changed)
1965 tree save_context;
1967 /* Then the frame decl is now addressable. */
1968 TREE_ADDRESSABLE (info->frame_decl) = 1;
1970 save_context = current_function_decl;
1971 current_function_decl = info->context;
1972 recompute_tree_invariant_for_addr_expr (t);
1974 /* If we are in a context where we only accept values, then
1975 compute the address into a temporary. */
1976 if (save_val_only)
1977 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1978 t, &wi->gsi);
1979 current_function_decl = save_context;
1981 break;
1983 case REALPART_EXPR:
1984 case IMAGPART_EXPR:
1985 case COMPONENT_REF:
1986 case ARRAY_REF:
1987 case ARRAY_RANGE_REF:
1988 case BIT_FIELD_REF:
1989 /* Go down this entire nest and just look at the final prefix and
1990 anything that describes the references. Otherwise, we lose track
1991 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1992 save_val_only = wi->val_only;
1993 wi->val_only = true;
1994 wi->is_lhs = false;
1995 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1997 if (TREE_CODE (t) == COMPONENT_REF)
1998 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1999 NULL);
2000 else if (TREE_CODE (t) == ARRAY_REF
2001 || TREE_CODE (t) == ARRAY_RANGE_REF)
2003 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
2004 NULL);
2005 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
2006 NULL);
2007 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
2008 NULL);
2011 wi->val_only = false;
2012 walk_tree (tp, convert_local_reference_op, wi, NULL);
2013 wi->val_only = save_val_only;
2014 break;
2016 case MEM_REF:
2017 save_val_only = wi->val_only;
2018 wi->val_only = true;
2019 wi->is_lhs = false;
2020 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
2021 wi, NULL);
2022 /* We need to re-fold the MEM_REF as component references as
2023 part of a ADDR_EXPR address are not allowed. But we cannot
2024 fold here, as the chain record type is not yet finalized. */
2025 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
2026 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
2027 info->mem_refs->add (tp);
2028 wi->val_only = save_val_only;
2029 break;
2031 case VIEW_CONVERT_EXPR:
2032 /* Just request to look at the subtrees, leaving val_only and lhs
2033 untouched. This might actually be for !val_only + lhs, in which
2034 case we don't want to force a replacement by a temporary. */
2035 *walk_subtrees = 1;
2036 break;
2038 default:
2039 if (!IS_TYPE_OR_DECL_P (t))
2041 *walk_subtrees = 1;
2042 wi->val_only = true;
2043 wi->is_lhs = false;
2045 break;
2048 return NULL_TREE;
2051 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
2052 struct walk_stmt_info *);
2054 /* Helper for convert_local_reference. Convert all the references in
2055 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
2057 static bool
2058 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
2060 struct nesting_info *const info = (struct nesting_info *) wi->info;
2061 bool need_frame = false, need_stmts = false;
2062 tree clause, decl, *pdecl;
2063 int dummy;
2064 bitmap new_suppress;
2066 new_suppress = BITMAP_GGC_ALLOC ();
2067 bitmap_copy (new_suppress, info->suppress_expansion);
2069 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2071 pdecl = NULL;
2072 switch (OMP_CLAUSE_CODE (clause))
2074 case OMP_CLAUSE_REDUCTION:
2075 case OMP_CLAUSE_IN_REDUCTION:
2076 case OMP_CLAUSE_TASK_REDUCTION:
2077 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2078 need_stmts = true;
2079 if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
2081 pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
2082 if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
2083 pdecl = &TREE_OPERAND (*pdecl, 0);
2084 if (TREE_CODE (*pdecl) == INDIRECT_REF
2085 || TREE_CODE (*pdecl) == ADDR_EXPR)
2086 pdecl = &TREE_OPERAND (*pdecl, 0);
2088 goto do_decl_clause;
2090 case OMP_CLAUSE_LASTPRIVATE:
2091 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
2092 need_stmts = true;
2093 goto do_decl_clause;
2095 case OMP_CLAUSE_LINEAR:
2096 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
2097 need_stmts = true;
2098 wi->val_only = true;
2099 wi->is_lhs = false;
2100 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
2101 wi);
2102 goto do_decl_clause;
2104 case OMP_CLAUSE_PRIVATE:
2105 case OMP_CLAUSE_FIRSTPRIVATE:
2106 case OMP_CLAUSE_COPYPRIVATE:
2107 case OMP_CLAUSE_SHARED:
2108 case OMP_CLAUSE_TO_DECLARE:
2109 case OMP_CLAUSE_LINK:
2110 case OMP_CLAUSE_USE_DEVICE_PTR:
2111 case OMP_CLAUSE_USE_DEVICE_ADDR:
2112 case OMP_CLAUSE_IS_DEVICE_PTR:
2113 case OMP_CLAUSE_DETACH:
2114 do_decl_clause:
2115 if (pdecl == NULL)
2116 pdecl = &OMP_CLAUSE_DECL (clause);
2117 decl = *pdecl;
2118 if (VAR_P (decl)
2119 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2120 break;
2121 if (decl_function_context (decl) == info->context
2122 && !use_pointer_in_frame (decl))
2124 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2125 if (field)
2127 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
2128 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
2129 bitmap_set_bit (new_suppress, DECL_UID (decl));
2130 *pdecl = get_local_debug_decl (info, decl, field);
2131 need_frame = true;
2134 break;
2136 case OMP_CLAUSE_SCHEDULE:
2137 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
2138 break;
2139 /* FALLTHRU */
2140 case OMP_CLAUSE_FINAL:
2141 case OMP_CLAUSE_IF:
2142 case OMP_CLAUSE_NUM_THREADS:
2143 case OMP_CLAUSE_DEPEND:
2144 case OMP_CLAUSE_DEVICE:
2145 case OMP_CLAUSE_NUM_TEAMS:
2146 case OMP_CLAUSE_THREAD_LIMIT:
2147 case OMP_CLAUSE_SAFELEN:
2148 case OMP_CLAUSE_SIMDLEN:
2149 case OMP_CLAUSE_PRIORITY:
2150 case OMP_CLAUSE_GRAINSIZE:
2151 case OMP_CLAUSE_NUM_TASKS:
2152 case OMP_CLAUSE_HINT:
2153 case OMP_CLAUSE_NUM_GANGS:
2154 case OMP_CLAUSE_NUM_WORKERS:
2155 case OMP_CLAUSE_VECTOR_LENGTH:
2156 case OMP_CLAUSE_GANG:
2157 case OMP_CLAUSE_WORKER:
2158 case OMP_CLAUSE_VECTOR:
2159 case OMP_CLAUSE_ASYNC:
2160 case OMP_CLAUSE_WAIT:
2161 /* Several OpenACC clauses have optional arguments. Check if they
2162 are present. */
2163 if (OMP_CLAUSE_OPERAND (clause, 0))
2165 wi->val_only = true;
2166 wi->is_lhs = false;
2167 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2168 &dummy, wi);
2171 /* The gang clause accepts two arguments. */
2172 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
2173 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
2175 wi->val_only = true;
2176 wi->is_lhs = false;
2177 convert_nonlocal_reference_op
2178 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
2180 break;
2182 case OMP_CLAUSE_DIST_SCHEDULE:
2183 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
2185 wi->val_only = true;
2186 wi->is_lhs = false;
2187 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2188 &dummy, wi);
2190 break;
2192 case OMP_CLAUSE_MAP:
2193 case OMP_CLAUSE_TO:
2194 case OMP_CLAUSE_FROM:
2195 if (OMP_CLAUSE_SIZE (clause))
2197 wi->val_only = true;
2198 wi->is_lhs = false;
2199 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
2200 &dummy, wi);
2202 if (DECL_P (OMP_CLAUSE_DECL (clause)))
2203 goto do_decl_clause;
2204 wi->val_only = true;
2205 wi->is_lhs = false;
2206 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
2207 wi, NULL);
2208 break;
2210 case OMP_CLAUSE_ALIGNED:
2211 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
2213 wi->val_only = true;
2214 wi->is_lhs = false;
2215 convert_local_reference_op
2216 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
2218 /* FALLTHRU */
2219 case OMP_CLAUSE_NONTEMPORAL:
2220 do_decl_clause_no_supp:
2221 /* Like do_decl_clause, but don't add any suppression. */
2222 decl = OMP_CLAUSE_DECL (clause);
2223 if (VAR_P (decl)
2224 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2225 break;
2226 if (decl_function_context (decl) == info->context
2227 && !use_pointer_in_frame (decl))
2229 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2230 if (field)
2232 OMP_CLAUSE_DECL (clause)
2233 = get_local_debug_decl (info, decl, field);
2234 need_frame = true;
2237 break;
2239 case OMP_CLAUSE_ALLOCATE:
2240 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause))
2242 wi->val_only = true;
2243 wi->is_lhs = false;
2244 convert_local_reference_op
2245 (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause), &dummy, wi);
2247 goto do_decl_clause_no_supp;
2249 case OMP_CLAUSE_NOWAIT:
2250 case OMP_CLAUSE_ORDERED:
2251 case OMP_CLAUSE_DEFAULT:
2252 case OMP_CLAUSE_COPYIN:
2253 case OMP_CLAUSE_COLLAPSE:
2254 case OMP_CLAUSE_TILE:
2255 case OMP_CLAUSE_UNTIED:
2256 case OMP_CLAUSE_MERGEABLE:
2257 case OMP_CLAUSE_PROC_BIND:
2258 case OMP_CLAUSE_NOGROUP:
2259 case OMP_CLAUSE_THREADS:
2260 case OMP_CLAUSE_SIMD:
2261 case OMP_CLAUSE_DEFAULTMAP:
2262 case OMP_CLAUSE_ORDER:
2263 case OMP_CLAUSE_SEQ:
2264 case OMP_CLAUSE_INDEPENDENT:
2265 case OMP_CLAUSE_AUTO:
2266 case OMP_CLAUSE_IF_PRESENT:
2267 case OMP_CLAUSE_FINALIZE:
2268 case OMP_CLAUSE_BIND:
2269 case OMP_CLAUSE__CONDTEMP_:
2270 case OMP_CLAUSE__SCANTEMP_:
2271 break;
2273 /* The following clause belongs to the OpenACC cache directive, which
2274 is discarded during gimplification. */
2275 case OMP_CLAUSE__CACHE_:
2276 /* The following clauses are only allowed in the OpenMP declare simd
2277 directive, so not seen here. */
2278 case OMP_CLAUSE_UNIFORM:
2279 case OMP_CLAUSE_INBRANCH:
2280 case OMP_CLAUSE_NOTINBRANCH:
2281 /* The following clauses are only allowed on OpenMP cancel and
2282 cancellation point directives, which at this point have already
2283 been lowered into a function call. */
2284 case OMP_CLAUSE_FOR:
2285 case OMP_CLAUSE_PARALLEL:
2286 case OMP_CLAUSE_SECTIONS:
2287 case OMP_CLAUSE_TASKGROUP:
2288 /* The following clauses are only added during OMP lowering; nested
2289 function decomposition happens before that. */
2290 case OMP_CLAUSE__LOOPTEMP_:
2291 case OMP_CLAUSE__REDUCTEMP_:
2292 case OMP_CLAUSE__SIMDUID_:
2293 case OMP_CLAUSE__SIMT_:
2294 /* Anything else. */
2295 default:
2296 gcc_unreachable ();
2300 info->suppress_expansion = new_suppress;
2302 if (need_stmts)
2303 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2304 switch (OMP_CLAUSE_CODE (clause))
2306 case OMP_CLAUSE_REDUCTION:
2307 case OMP_CLAUSE_IN_REDUCTION:
2308 case OMP_CLAUSE_TASK_REDUCTION:
2309 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2311 tree old_context
2312 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
2313 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2314 = info->context;
2315 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2316 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2317 = info->context;
2318 walk_body (convert_local_reference_stmt,
2319 convert_local_reference_op, info,
2320 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
2321 walk_body (convert_local_reference_stmt,
2322 convert_local_reference_op, info,
2323 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
2324 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2325 = old_context;
2326 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2327 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2328 = old_context;
2330 break;
2332 case OMP_CLAUSE_LASTPRIVATE:
2333 walk_body (convert_local_reference_stmt,
2334 convert_local_reference_op, info,
2335 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
2336 break;
2338 case OMP_CLAUSE_LINEAR:
2339 walk_body (convert_local_reference_stmt,
2340 convert_local_reference_op, info,
2341 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
2342 break;
2344 default:
2345 break;
2348 return need_frame;
2352 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2353 and PARM_DECLs that were referenced by inner nested functions.
2354 The rewrite will be a structure reference to the local frame variable. */
2356 static tree
2357 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2358 struct walk_stmt_info *wi)
2360 struct nesting_info *info = (struct nesting_info *) wi->info;
2361 tree save_local_var_chain;
2362 bitmap save_suppress;
2363 char save_static_chain_added;
2364 bool frame_decl_added;
2365 gimple *stmt = gsi_stmt (*gsi);
2367 switch (gimple_code (stmt))
2369 case GIMPLE_OMP_TEAMS:
2370 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2372 save_suppress = info->suppress_expansion;
2373 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2374 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2375 info, gimple_omp_body_ptr (stmt));
2376 info->suppress_expansion = save_suppress;
2377 break;
2379 /* FALLTHRU */
2381 case GIMPLE_OMP_PARALLEL:
2382 case GIMPLE_OMP_TASK:
2383 save_suppress = info->suppress_expansion;
2384 frame_decl_added = false;
2385 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
2386 wi))
2388 tree c = build_omp_clause (gimple_location (stmt),
2389 OMP_CLAUSE_SHARED);
2390 (void) get_frame_type (info);
2391 OMP_CLAUSE_DECL (c) = info->frame_decl;
2392 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2393 gimple_omp_taskreg_set_clauses (stmt, c);
2394 info->static_chain_added |= 4;
2395 frame_decl_added = true;
2398 save_local_var_chain = info->new_local_var_chain;
2399 save_static_chain_added = info->static_chain_added;
2400 info->new_local_var_chain = NULL;
2401 info->static_chain_added = 0;
2403 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2404 gimple_omp_body_ptr (stmt));
2406 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2408 tree c = build_omp_clause (gimple_location (stmt),
2409 OMP_CLAUSE_SHARED);
2410 (void) get_frame_type (info);
2411 OMP_CLAUSE_DECL (c) = info->frame_decl;
2412 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2413 info->static_chain_added |= 4;
2414 gimple_omp_taskreg_set_clauses (stmt, c);
2416 if (info->new_local_var_chain)
2417 declare_vars (info->new_local_var_chain,
2418 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2419 info->new_local_var_chain = save_local_var_chain;
2420 info->suppress_expansion = save_suppress;
2421 info->static_chain_added |= save_static_chain_added;
2422 break;
2424 case GIMPLE_OMP_FOR:
2425 save_suppress = info->suppress_expansion;
2426 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2427 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2428 convert_local_reference_stmt,
2429 convert_local_reference_op, info);
2430 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2431 info, gimple_omp_body_ptr (stmt));
2432 info->suppress_expansion = save_suppress;
2433 break;
2435 case GIMPLE_OMP_SECTIONS:
2436 save_suppress = info->suppress_expansion;
2437 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2438 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2439 info, gimple_omp_body_ptr (stmt));
2440 info->suppress_expansion = save_suppress;
2441 break;
2443 case GIMPLE_OMP_SINGLE:
2444 save_suppress = info->suppress_expansion;
2445 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2446 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2447 info, gimple_omp_body_ptr (stmt));
2448 info->suppress_expansion = save_suppress;
2449 break;
2451 case GIMPLE_OMP_TASKGROUP:
2452 save_suppress = info->suppress_expansion;
2453 convert_local_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
2454 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2455 info, gimple_omp_body_ptr (stmt));
2456 info->suppress_expansion = save_suppress;
2457 break;
2459 case GIMPLE_OMP_TARGET:
2460 if (!is_gimple_omp_offloaded (stmt))
2462 save_suppress = info->suppress_expansion;
2463 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2464 info->suppress_expansion = save_suppress;
2465 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2466 info, gimple_omp_body_ptr (stmt));
2467 break;
2469 save_suppress = info->suppress_expansion;
2470 frame_decl_added = false;
2471 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2473 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2474 (void) get_frame_type (info);
2475 OMP_CLAUSE_DECL (c) = info->frame_decl;
2476 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2477 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2478 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2479 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2480 info->static_chain_added |= 4;
2481 frame_decl_added = true;
2484 save_local_var_chain = info->new_local_var_chain;
2485 save_static_chain_added = info->static_chain_added;
2486 info->new_local_var_chain = NULL;
2487 info->static_chain_added = 0;
2489 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2490 gimple_omp_body_ptr (stmt));
2492 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2494 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2495 (void) get_frame_type (info);
2496 OMP_CLAUSE_DECL (c) = info->frame_decl;
2497 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2498 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2499 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2500 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2501 info->static_chain_added |= 4;
2504 if (info->new_local_var_chain)
2505 declare_vars (info->new_local_var_chain,
2506 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2507 info->new_local_var_chain = save_local_var_chain;
2508 info->suppress_expansion = save_suppress;
2509 info->static_chain_added |= save_static_chain_added;
2510 break;
2512 case GIMPLE_OMP_SECTION:
2513 case GIMPLE_OMP_MASTER:
2514 case GIMPLE_OMP_ORDERED:
2515 case GIMPLE_OMP_SCAN:
2516 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2517 info, gimple_omp_body_ptr (stmt));
2518 break;
2520 case GIMPLE_COND:
2521 wi->val_only = true;
2522 wi->is_lhs = false;
2523 *handled_ops_p = false;
2524 return NULL_TREE;
2526 case GIMPLE_ASSIGN:
2527 if (gimple_clobber_p (stmt))
2529 tree lhs = gimple_assign_lhs (stmt);
2530 if (DECL_P (lhs)
2531 && decl_function_context (lhs) == info->context
2532 && !use_pointer_in_frame (lhs)
2533 && lookup_field_for_decl (info, lhs, NO_INSERT))
2535 gsi_replace (gsi, gimple_build_nop (), true);
2536 break;
2539 *handled_ops_p = false;
2540 return NULL_TREE;
2542 case GIMPLE_BIND:
2543 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2544 var;
2545 var = DECL_CHAIN (var))
2546 if (TREE_CODE (var) == NAMELIST_DECL)
2548 /* Adjust decls mentioned in NAMELIST_DECL. */
2549 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2550 tree decl;
2551 unsigned int i;
2553 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2555 if (VAR_P (decl)
2556 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2557 continue;
2558 if (decl_function_context (decl) == info->context
2559 && !use_pointer_in_frame (decl))
2561 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2562 if (field)
2564 CONSTRUCTOR_ELT (decls, i)->value
2565 = get_local_debug_decl (info, decl, field);
2571 *handled_ops_p = false;
2572 return NULL_TREE;
2574 default:
2575 /* For every other statement that we are not interested in
2576 handling here, let the walker traverse the operands. */
2577 *handled_ops_p = false;
2578 return NULL_TREE;
2581 /* Indicate that we have handled all the operands ourselves. */
2582 *handled_ops_p = true;
2583 return NULL_TREE;
2587 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2588 that reference labels from outer functions. The rewrite will be a
2589 call to __builtin_nonlocal_goto. */
2591 static tree
2592 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2593 struct walk_stmt_info *wi)
2595 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2596 tree label, new_label, target_context, x, field;
2597 gcall *call;
2598 gimple *stmt = gsi_stmt (*gsi);
2600 if (gimple_code (stmt) != GIMPLE_GOTO)
2602 *handled_ops_p = false;
2603 return NULL_TREE;
2606 label = gimple_goto_dest (stmt);
2607 if (TREE_CODE (label) != LABEL_DECL)
2609 *handled_ops_p = false;
2610 return NULL_TREE;
2613 target_context = decl_function_context (label);
2614 if (target_context == info->context)
2616 *handled_ops_p = false;
2617 return NULL_TREE;
2620 for (i = info->outer; target_context != i->context; i = i->outer)
2621 continue;
2623 /* The original user label may also be use for a normal goto, therefore
2624 we must create a new label that will actually receive the abnormal
2625 control transfer. This new label will be marked LABEL_NONLOCAL; this
2626 mark will trigger proper behavior in the cfg, as well as cause the
2627 (hairy target-specific) non-local goto receiver code to be generated
2628 when we expand rtl. Enter this association into var_map so that we
2629 can insert the new label into the IL during a second pass. */
2630 tree *slot = &i->var_map->get_or_insert (label);
2631 if (*slot == NULL)
2633 new_label = create_artificial_label (UNKNOWN_LOCATION);
2634 DECL_NONLOCAL (new_label) = 1;
2635 *slot = new_label;
2637 else
2638 new_label = *slot;
2640 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2641 field = get_nl_goto_field (i);
2642 x = get_frame_field (info, target_context, field, gsi);
2643 x = build_addr (x);
2644 x = gsi_gimplify_val (info, x, gsi);
2645 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2646 2, build_addr (new_label), x);
2647 gsi_replace (gsi, call, false);
2649 /* We have handled all of STMT's operands, no need to keep going. */
2650 *handled_ops_p = true;
2651 return NULL_TREE;
2655 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2656 are referenced via nonlocal goto from a nested function. The rewrite
2657 will involve installing a newly generated DECL_NONLOCAL label, and
2658 (potentially) a branch around the rtl gunk that is assumed to be
2659 attached to such a label. */
2661 static tree
2662 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2663 struct walk_stmt_info *wi)
2665 struct nesting_info *const info = (struct nesting_info *) wi->info;
2666 tree label, new_label;
2667 gimple_stmt_iterator tmp_gsi;
2668 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2670 if (!stmt)
2672 *handled_ops_p = false;
2673 return NULL_TREE;
2676 label = gimple_label_label (stmt);
2678 tree *slot = info->var_map->get (label);
2679 if (!slot)
2681 *handled_ops_p = false;
2682 return NULL_TREE;
2685 /* If there's any possibility that the previous statement falls through,
2686 then we must branch around the new non-local label. */
2687 tmp_gsi = wi->gsi;
2688 gsi_prev (&tmp_gsi);
2689 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2691 gimple *stmt = gimple_build_goto (label);
2692 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2695 new_label = (tree) *slot;
2696 stmt = gimple_build_label (new_label);
2697 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2699 *handled_ops_p = true;
2700 return NULL_TREE;
2704 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2705 of nested functions that require the use of trampolines. The rewrite
2706 will involve a reference a trampoline generated for the occasion. */
2708 static tree
2709 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2711 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2712 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2713 tree t = *tp, decl, target_context, x, builtin;
2714 bool descr;
2715 gcall *call;
2717 *walk_subtrees = 0;
2718 switch (TREE_CODE (t))
2720 case ADDR_EXPR:
2721 /* Build
2722 T.1 = &CHAIN->tramp;
2723 T.2 = __builtin_adjust_trampoline (T.1);
2724 T.3 = (func_type)T.2;
2727 decl = TREE_OPERAND (t, 0);
2728 if (TREE_CODE (decl) != FUNCTION_DECL)
2729 break;
2731 /* Only need to process nested functions. */
2732 target_context = decl_function_context (decl);
2733 if (!target_context)
2734 break;
2736 /* If the nested function doesn't use a static chain, then
2737 it doesn't need a trampoline. */
2738 if (!DECL_STATIC_CHAIN (decl))
2739 break;
2741 /* If we don't want a trampoline, then don't build one. */
2742 if (TREE_NO_TRAMPOLINE (t))
2743 break;
2745 /* Lookup the immediate parent of the callee, as that's where
2746 we need to insert the trampoline. */
2747 for (i = info; i->context != target_context; i = i->outer)
2748 continue;
2750 /* Decide whether to generate a descriptor or a trampoline. */
2751 descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines;
2753 if (descr)
2754 x = lookup_descr_for_decl (i, decl, INSERT);
2755 else
2756 x = lookup_tramp_for_decl (i, decl, INSERT);
2758 /* Compute the address of the field holding the trampoline. */
2759 x = get_frame_field (info, target_context, x, &wi->gsi);
2760 x = build_addr (x);
2761 x = gsi_gimplify_val (info, x, &wi->gsi);
2763 /* Do machine-specific ugliness. Normally this will involve
2764 computing extra alignment, but it can really be anything. */
2765 if (descr)
2766 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR);
2767 else
2768 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2769 call = gimple_build_call (builtin, 1, x);
2770 x = init_tmp_var_with_call (info, &wi->gsi, call);
2772 /* Cast back to the proper function type. */
2773 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2774 x = init_tmp_var (info, x, &wi->gsi);
2776 *tp = x;
2777 break;
2779 default:
2780 if (!IS_TYPE_OR_DECL_P (t))
2781 *walk_subtrees = 1;
2782 break;
2785 return NULL_TREE;
2789 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2790 to addresses of nested functions that require the use of
2791 trampolines. The rewrite will involve a reference a trampoline
2792 generated for the occasion. */
2794 static tree
2795 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2796 struct walk_stmt_info *wi)
2798 struct nesting_info *info = (struct nesting_info *) wi->info;
2799 gimple *stmt = gsi_stmt (*gsi);
2801 switch (gimple_code (stmt))
2803 case GIMPLE_CALL:
2805 /* Only walk call arguments, lest we generate trampolines for
2806 direct calls. */
2807 unsigned long i, nargs = gimple_call_num_args (stmt);
2808 for (i = 0; i < nargs; i++)
2809 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2810 wi, NULL);
2811 break;
2814 case GIMPLE_OMP_TEAMS:
2815 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2817 *handled_ops_p = false;
2818 return NULL_TREE;
2820 goto do_parallel;
2822 case GIMPLE_OMP_TARGET:
2823 if (!is_gimple_omp_offloaded (stmt))
2825 *handled_ops_p = false;
2826 return NULL_TREE;
2828 /* FALLTHRU */
2829 case GIMPLE_OMP_PARALLEL:
2830 case GIMPLE_OMP_TASK:
2831 do_parallel:
2833 tree save_local_var_chain = info->new_local_var_chain;
2834 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2835 info->new_local_var_chain = NULL;
2836 char save_static_chain_added = info->static_chain_added;
2837 info->static_chain_added = 0;
2838 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2839 info, gimple_omp_body_ptr (stmt));
2840 if (info->new_local_var_chain)
2841 declare_vars (info->new_local_var_chain,
2842 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2843 false);
2844 for (int i = 0; i < 2; i++)
2846 tree c, decl;
2847 if ((info->static_chain_added & (1 << i)) == 0)
2848 continue;
2849 decl = i ? get_chain_decl (info) : info->frame_decl;
2850 /* Don't add CHAIN.* or FRAME.* twice. */
2851 for (c = gimple_omp_taskreg_clauses (stmt);
2853 c = OMP_CLAUSE_CHAIN (c))
2854 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2855 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2856 && OMP_CLAUSE_DECL (c) == decl)
2857 break;
2858 if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2860 c = build_omp_clause (gimple_location (stmt),
2861 i ? OMP_CLAUSE_FIRSTPRIVATE
2862 : OMP_CLAUSE_SHARED);
2863 OMP_CLAUSE_DECL (c) = decl;
2864 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2865 gimple_omp_taskreg_set_clauses (stmt, c);
2867 else if (c == NULL)
2869 c = build_omp_clause (gimple_location (stmt),
2870 OMP_CLAUSE_MAP);
2871 OMP_CLAUSE_DECL (c) = decl;
2872 OMP_CLAUSE_SET_MAP_KIND (c,
2873 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2874 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2875 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2876 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2880 info->new_local_var_chain = save_local_var_chain;
2881 info->static_chain_added |= save_static_chain_added;
2883 break;
2885 default:
2886 *handled_ops_p = false;
2887 return NULL_TREE;
2890 *handled_ops_p = true;
2891 return NULL_TREE;
2896 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2897 that reference nested functions to make sure that the static chain
2898 is set up properly for the call. */
2900 static tree
2901 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2902 struct walk_stmt_info *wi)
2904 struct nesting_info *const info = (struct nesting_info *) wi->info;
2905 tree decl, target_context;
2906 char save_static_chain_added;
2907 int i;
2908 gimple *stmt = gsi_stmt (*gsi);
2910 switch (gimple_code (stmt))
2912 case GIMPLE_CALL:
2913 if (gimple_call_chain (stmt))
2914 break;
2915 decl = gimple_call_fndecl (stmt);
2916 if (!decl)
2917 break;
2918 target_context = decl_function_context (decl);
2919 if (target_context && DECL_STATIC_CHAIN (decl))
2921 struct nesting_info *i = info;
2922 while (i && i->context != target_context)
2923 i = i->outer;
2924 /* If none of the outer contexts is the target context, this means
2925 that the function is called in a wrong context. */
2926 if (!i)
2927 internal_error ("%s from %s called in %s",
2928 IDENTIFIER_POINTER (DECL_NAME (decl)),
2929 IDENTIFIER_POINTER (DECL_NAME (target_context)),
2930 IDENTIFIER_POINTER (DECL_NAME (info->context)));
2932 gimple_call_set_chain (as_a <gcall *> (stmt),
2933 get_static_chain (info, target_context,
2934 &wi->gsi));
2935 info->static_chain_added |= (1 << (info->context != target_context));
2937 break;
2939 case GIMPLE_OMP_TEAMS:
2940 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2942 walk_body (convert_gimple_call, NULL, info,
2943 gimple_omp_body_ptr (stmt));
2944 break;
2946 /* FALLTHRU */
2948 case GIMPLE_OMP_PARALLEL:
2949 case GIMPLE_OMP_TASK:
2950 save_static_chain_added = info->static_chain_added;
2951 info->static_chain_added = 0;
2952 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2953 for (i = 0; i < 2; i++)
2955 tree c, decl;
2956 if ((info->static_chain_added & (1 << i)) == 0)
2957 continue;
2958 decl = i ? get_chain_decl (info) : info->frame_decl;
2959 /* Don't add CHAIN.* or FRAME.* twice. */
2960 for (c = gimple_omp_taskreg_clauses (stmt);
2962 c = OMP_CLAUSE_CHAIN (c))
2963 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2964 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2965 && OMP_CLAUSE_DECL (c) == decl)
2966 break;
2967 if (c == NULL)
2969 c = build_omp_clause (gimple_location (stmt),
2970 i ? OMP_CLAUSE_FIRSTPRIVATE
2971 : OMP_CLAUSE_SHARED);
2972 OMP_CLAUSE_DECL (c) = decl;
2973 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2974 gimple_omp_taskreg_set_clauses (stmt, c);
2977 info->static_chain_added |= save_static_chain_added;
2978 break;
2980 case GIMPLE_OMP_TARGET:
2981 if (!is_gimple_omp_offloaded (stmt))
2983 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2984 break;
2986 save_static_chain_added = info->static_chain_added;
2987 info->static_chain_added = 0;
2988 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2989 for (i = 0; i < 2; i++)
2991 tree c, decl;
2992 if ((info->static_chain_added & (1 << i)) == 0)
2993 continue;
2994 decl = i ? get_chain_decl (info) : info->frame_decl;
2995 /* Don't add CHAIN.* or FRAME.* twice. */
2996 for (c = gimple_omp_target_clauses (stmt);
2998 c = OMP_CLAUSE_CHAIN (c))
2999 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
3000 && OMP_CLAUSE_DECL (c) == decl)
3001 break;
3002 if (c == NULL)
3004 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
3005 OMP_CLAUSE_DECL (c) = decl;
3006 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
3007 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
3008 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
3009 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
3013 info->static_chain_added |= save_static_chain_added;
3014 break;
3016 case GIMPLE_OMP_FOR:
3017 walk_body (convert_gimple_call, NULL, info,
3018 gimple_omp_for_pre_body_ptr (stmt));
3019 /* FALLTHRU */
3020 case GIMPLE_OMP_SECTIONS:
3021 case GIMPLE_OMP_SECTION:
3022 case GIMPLE_OMP_SINGLE:
3023 case GIMPLE_OMP_MASTER:
3024 case GIMPLE_OMP_TASKGROUP:
3025 case GIMPLE_OMP_ORDERED:
3026 case GIMPLE_OMP_SCAN:
3027 case GIMPLE_OMP_CRITICAL:
3028 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
3029 break;
3031 default:
3032 /* Keep looking for other operands. */
3033 *handled_ops_p = false;
3034 return NULL_TREE;
3037 *handled_ops_p = true;
3038 return NULL_TREE;
3041 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
3042 call expressions. At the same time, determine if a nested function
3043 actually uses its static chain; if not, remember that. */
3045 static void
3046 convert_all_function_calls (struct nesting_info *root)
3048 unsigned int chain_count = 0, old_chain_count, iter_count;
3049 struct nesting_info *n;
3051 /* First, optimistically clear static_chain for all decls that haven't
3052 used the static chain already for variable access. But always create
3053 it if not optimizing. This makes it possible to reconstruct the static
3054 nesting tree at run time and thus to resolve up-level references from
3055 within the debugger. */
3056 FOR_EACH_NEST_INFO (n, root)
3058 if (n->thunk_p)
3059 continue;
3060 tree decl = n->context;
3061 if (!optimize)
3063 if (n->inner)
3064 (void) get_frame_type (n);
3065 if (n->outer)
3066 (void) get_chain_decl (n);
3068 else if (!n->outer || (!n->chain_decl && !n->chain_field))
3070 DECL_STATIC_CHAIN (decl) = 0;
3071 if (dump_file && (dump_flags & TDF_DETAILS))
3072 fprintf (dump_file, "Guessing no static-chain for %s\n",
3073 lang_hooks.decl_printable_name (decl, 2));
3075 else
3076 DECL_STATIC_CHAIN (decl) = 1;
3077 chain_count += DECL_STATIC_CHAIN (decl);
3080 FOR_EACH_NEST_INFO (n, root)
3081 if (n->thunk_p)
3083 tree decl = n->context;
3084 tree alias = thunk_info::get (cgraph_node::get (decl))->alias;
3085 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
3088 /* Walk the functions and perform transformations. Note that these
3089 transformations can induce new uses of the static chain, which in turn
3090 require re-examining all users of the decl. */
3091 /* ??? It would make sense to try to use the call graph to speed this up,
3092 but the call graph hasn't really been built yet. Even if it did, we
3093 would still need to iterate in this loop since address-of references
3094 wouldn't show up in the callgraph anyway. */
3095 iter_count = 0;
3098 old_chain_count = chain_count;
3099 chain_count = 0;
3100 iter_count++;
3102 if (dump_file && (dump_flags & TDF_DETAILS))
3103 fputc ('\n', dump_file);
3105 FOR_EACH_NEST_INFO (n, root)
3107 if (n->thunk_p)
3108 continue;
3109 tree decl = n->context;
3110 walk_function (convert_tramp_reference_stmt,
3111 convert_tramp_reference_op, n);
3112 walk_function (convert_gimple_call, NULL, n);
3113 chain_count += DECL_STATIC_CHAIN (decl);
3116 FOR_EACH_NEST_INFO (n, root)
3117 if (n->thunk_p)
3119 tree decl = n->context;
3120 tree alias = thunk_info::get (cgraph_node::get (decl))->alias;
3121 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
3124 while (chain_count != old_chain_count);
3126 if (dump_file && (dump_flags & TDF_DETAILS))
3127 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
3128 iter_count);
3131 struct nesting_copy_body_data
3133 copy_body_data cb;
3134 struct nesting_info *root;
3137 /* A helper subroutine for debug_var_chain type remapping. */
3139 static tree
3140 nesting_copy_decl (tree decl, copy_body_data *id)
3142 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
3143 tree *slot = nid->root->var_map->get (decl);
3145 if (slot)
3146 return (tree) *slot;
3148 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
3150 tree new_decl = copy_decl_no_change (decl, id);
3151 DECL_ORIGINAL_TYPE (new_decl)
3152 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
3153 return new_decl;
3156 if (VAR_P (decl)
3157 || TREE_CODE (decl) == PARM_DECL
3158 || TREE_CODE (decl) == RESULT_DECL)
3159 return decl;
3161 return copy_decl_no_change (decl, id);
3164 /* A helper function for remap_vla_decls. See if *TP contains
3165 some remapped variables. */
3167 static tree
3168 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
3170 struct nesting_info *root = (struct nesting_info *) data;
3171 tree t = *tp;
3173 if (DECL_P (t))
3175 *walk_subtrees = 0;
3176 tree *slot = root->var_map->get (t);
3178 if (slot)
3179 return *slot;
3181 return NULL;
3184 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
3185 involved. */
3187 static void
3188 remap_vla_decls (tree block, struct nesting_info *root)
3190 tree var, subblock, val, type;
3191 struct nesting_copy_body_data id;
3193 for (subblock = BLOCK_SUBBLOCKS (block);
3194 subblock;
3195 subblock = BLOCK_CHAIN (subblock))
3196 remap_vla_decls (subblock, root);
3198 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3199 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3201 val = DECL_VALUE_EXPR (var);
3202 type = TREE_TYPE (var);
3204 if (!(TREE_CODE (val) == INDIRECT_REF
3205 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3206 && variably_modified_type_p (type, NULL)))
3207 continue;
3209 if (root->var_map->get (TREE_OPERAND (val, 0))
3210 || walk_tree (&type, contains_remapped_vars, root, NULL))
3211 break;
3214 if (var == NULL_TREE)
3215 return;
3217 memset (&id, 0, sizeof (id));
3218 id.cb.copy_decl = nesting_copy_decl;
3219 id.cb.decl_map = new hash_map<tree, tree>;
3220 id.root = root;
3222 for (; var; var = DECL_CHAIN (var))
3223 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3225 struct nesting_info *i;
3226 tree newt, context;
3228 val = DECL_VALUE_EXPR (var);
3229 type = TREE_TYPE (var);
3231 if (!(TREE_CODE (val) == INDIRECT_REF
3232 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3233 && variably_modified_type_p (type, NULL)))
3234 continue;
3236 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
3237 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
3238 continue;
3240 context = decl_function_context (var);
3241 for (i = root; i; i = i->outer)
3242 if (i->context == context)
3243 break;
3245 if (i == NULL)
3246 continue;
3248 /* Fully expand value expressions. This avoids having debug variables
3249 only referenced from them and that can be swept during GC. */
3250 if (slot)
3252 tree t = (tree) *slot;
3253 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
3254 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
3257 id.cb.src_fn = i->context;
3258 id.cb.dst_fn = i->context;
3259 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3261 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
3262 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3264 newt = TREE_TYPE (newt);
3265 type = TREE_TYPE (type);
3267 if (TYPE_NAME (newt)
3268 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3269 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3270 && newt != type
3271 && TYPE_NAME (newt) == TYPE_NAME (type))
3272 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3274 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
3275 if (val != DECL_VALUE_EXPR (var))
3276 SET_DECL_VALUE_EXPR (var, val);
3279 delete id.cb.decl_map;
3282 /* Fixup VLA decls in BLOCK and subblocks if remapped variables are
3283 involved. */
3285 static void
3286 fixup_vla_decls (tree block)
3288 for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3289 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3291 tree val = DECL_VALUE_EXPR (var);
3293 if (!(TREE_CODE (val) == INDIRECT_REF
3294 && VAR_P (TREE_OPERAND (val, 0))
3295 && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val, 0))))
3296 continue;
3298 /* Fully expand value expressions. This avoids having debug variables
3299 only referenced from them and that can be swept during GC. */
3300 val = build1 (INDIRECT_REF, TREE_TYPE (val),
3301 DECL_VALUE_EXPR (TREE_OPERAND (val, 0)));
3302 SET_DECL_VALUE_EXPR (var, val);
3305 for (tree sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3306 fixup_vla_decls (sub);
3309 /* Fold the MEM_REF *E. */
3310 bool
3311 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
3313 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
3314 *ref_p = fold (*ref_p);
3315 return true;
3318 /* Given DECL, a nested function, build an initialization call for FIELD,
3319 the trampoline or descriptor for DECL, using FUNC as the function. */
3321 static gcall *
3322 build_init_call_stmt (struct nesting_info *info, tree decl, tree field,
3323 tree func)
3325 tree arg1, arg2, arg3, x;
3327 gcc_assert (DECL_STATIC_CHAIN (decl));
3328 arg3 = build_addr (info->frame_decl);
3330 arg2 = build_addr (decl);
3332 x = build3 (COMPONENT_REF, TREE_TYPE (field),
3333 info->frame_decl, field, NULL_TREE);
3334 arg1 = build_addr (x);
3336 return gimple_build_call (func, 3, arg1, arg2, arg3);
3339 /* Do "everything else" to clean up or complete state collected by the various
3340 walking passes -- create a field to hold the frame base address, lay out the
3341 types and decls, generate code to initialize the frame decl, store critical
3342 expressions in the struct function for rtl to find. */
3344 static void
3345 finalize_nesting_tree_1 (struct nesting_info *root)
3347 gimple_seq stmt_list = NULL;
3348 gimple *stmt;
3349 tree context = root->context;
3350 struct function *sf;
3352 if (root->thunk_p)
3353 return;
3355 /* If we created a non-local frame type or decl, we need to lay them
3356 out at this time. */
3357 if (root->frame_type)
3359 /* Debugging information needs to compute the frame base address of the
3360 parent frame out of the static chain from the nested frame.
3362 The static chain is the address of the FRAME record, so one could
3363 imagine it would be possible to compute the frame base address just
3364 adding a constant offset to this address. Unfortunately, this is not
3365 possible: if the FRAME object has alignment constraints that are
3366 stronger than the stack, then the offset between the frame base and
3367 the FRAME object will be dynamic.
3369 What we do instead is to append a field to the FRAME object that holds
3370 the frame base address: then debug info just has to fetch this
3371 field. */
3373 /* Debugging information will refer to the CFA as the frame base
3374 address: we will do the same here. */
3375 const tree frame_addr_fndecl
3376 = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
3378 /* Create a field in the FRAME record to hold the frame base address for
3379 this stack frame. Since it will be used only by the debugger, put it
3380 at the end of the record in order not to shift all other offsets. */
3381 tree fb_decl = make_node (FIELD_DECL);
3383 DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
3384 TREE_TYPE (fb_decl) = ptr_type_node;
3385 TREE_ADDRESSABLE (fb_decl) = 1;
3386 DECL_CONTEXT (fb_decl) = root->frame_type;
3387 TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
3388 fb_decl);
3390 /* In some cases the frame type will trigger the -Wpadded warning.
3391 This is not helpful; suppress it. */
3392 int save_warn_padded = warn_padded;
3393 warn_padded = 0;
3394 layout_type (root->frame_type);
3395 warn_padded = save_warn_padded;
3396 layout_decl (root->frame_decl, 0);
3398 /* Initialize the frame base address field. If the builtin we need is
3399 not available, set it to NULL so that debugging information does not
3400 reference junk. */
3401 tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
3402 root->frame_decl, fb_decl, NULL_TREE);
3403 tree fb_tmp;
3405 if (frame_addr_fndecl != NULL_TREE)
3407 gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
3408 integer_zero_node);
3409 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3411 fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
3413 else
3414 fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
3415 gimple_seq_add_stmt (&stmt_list,
3416 gimple_build_assign (fb_ref, fb_tmp));
3418 declare_vars (root->frame_decl,
3419 gimple_seq_first_stmt (gimple_body (context)), true);
3422 /* If any parameters were referenced non-locally, then we need to insert
3423 a copy or a pointer. */
3424 if (root->any_parm_remapped)
3426 tree p;
3427 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
3429 tree field, x, y;
3431 field = lookup_field_for_decl (root, p, NO_INSERT);
3432 if (!field)
3433 continue;
3435 if (use_pointer_in_frame (p))
3436 x = build_addr (p);
3437 else
3438 x = p;
3440 /* If the assignment is from a non-register the stmt is
3441 not valid gimple. Make it so by using a temporary instead. */
3442 if (!is_gimple_reg (x)
3443 && is_gimple_reg_type (TREE_TYPE (x)))
3445 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3446 x = init_tmp_var (root, x, &gsi);
3449 y = build3 (COMPONENT_REF, TREE_TYPE (field),
3450 root->frame_decl, field, NULL_TREE);
3451 stmt = gimple_build_assign (y, x);
3452 gimple_seq_add_stmt (&stmt_list, stmt);
3456 /* If a chain_field was created, then it needs to be initialized
3457 from chain_decl. */
3458 if (root->chain_field)
3460 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
3461 root->frame_decl, root->chain_field, NULL_TREE);
3462 stmt = gimple_build_assign (x, get_chain_decl (root));
3463 gimple_seq_add_stmt (&stmt_list, stmt);
3466 /* If trampolines were created, then we need to initialize them. */
3467 if (root->any_tramp_created)
3469 struct nesting_info *i;
3470 for (i = root->inner; i ; i = i->next)
3472 tree field, x;
3474 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
3475 if (!field)
3476 continue;
3478 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
3479 stmt = build_init_call_stmt (root, i->context, field, x);
3480 gimple_seq_add_stmt (&stmt_list, stmt);
3484 /* If descriptors were created, then we need to initialize them. */
3485 if (root->any_descr_created)
3487 struct nesting_info *i;
3488 for (i = root->inner; i ; i = i->next)
3490 tree field, x;
3492 field = lookup_descr_for_decl (root, i->context, NO_INSERT);
3493 if (!field)
3494 continue;
3496 x = builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR);
3497 stmt = build_init_call_stmt (root, i->context, field, x);
3498 gimple_seq_add_stmt (&stmt_list, stmt);
3502 /* If we created initialization statements, insert them. */
3503 if (stmt_list)
3505 gbind *bind;
3506 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3507 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3508 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3509 gimple_bind_set_body (bind, stmt_list);
3512 /* If a chain_decl was created, then it needs to be registered with
3513 struct function so that it gets initialized from the static chain
3514 register at the beginning of the function. */
3515 sf = DECL_STRUCT_FUNCTION (root->context);
3516 sf->static_chain_decl = root->chain_decl;
3518 /* Similarly for the non-local goto save area. */
3519 if (root->nl_goto_field)
3521 sf->nonlocal_goto_save_area
3522 = get_frame_field (root, context, root->nl_goto_field, NULL);
3523 sf->has_nonlocal_label = 1;
3526 /* Make sure all new local variables get inserted into the
3527 proper BIND_EXPR. */
3528 if (root->new_local_var_chain)
3529 declare_vars (root->new_local_var_chain,
3530 gimple_seq_first_stmt (gimple_body (root->context)),
3531 false);
3533 if (root->debug_var_chain)
3535 tree debug_var;
3536 gbind *scope;
3538 remap_vla_decls (DECL_INITIAL (root->context), root);
3540 for (debug_var = root->debug_var_chain; debug_var;
3541 debug_var = DECL_CHAIN (debug_var))
3542 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3543 break;
3545 /* If there are any debug decls with variable length types,
3546 remap those types using other debug_var_chain variables. */
3547 if (debug_var)
3549 struct nesting_copy_body_data id;
3551 memset (&id, 0, sizeof (id));
3552 id.cb.copy_decl = nesting_copy_decl;
3553 id.cb.decl_map = new hash_map<tree, tree>;
3554 id.root = root;
3556 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3557 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3559 tree type = TREE_TYPE (debug_var);
3560 tree newt, t = type;
3561 struct nesting_info *i;
3563 for (i = root; i; i = i->outer)
3564 if (variably_modified_type_p (type, i->context))
3565 break;
3567 if (i == NULL)
3568 continue;
3570 id.cb.src_fn = i->context;
3571 id.cb.dst_fn = i->context;
3572 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3574 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3575 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3577 newt = TREE_TYPE (newt);
3578 t = TREE_TYPE (t);
3580 if (TYPE_NAME (newt)
3581 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3582 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3583 && newt != t
3584 && TYPE_NAME (newt) == TYPE_NAME (t))
3585 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3588 delete id.cb.decl_map;
3591 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3592 if (gimple_bind_block (scope))
3593 declare_vars (root->debug_var_chain, scope, true);
3594 else
3595 BLOCK_VARS (DECL_INITIAL (root->context))
3596 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3597 root->debug_var_chain);
3599 else
3600 fixup_vla_decls (DECL_INITIAL (root->context));
3602 /* Fold the rewritten MEM_REF trees. */
3603 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3605 /* Dump the translated tree function. */
3606 if (dump_file)
3608 fputs ("\n\n", dump_file);
3609 dump_function_to_file (root->context, dump_file, dump_flags);
3613 static void
3614 finalize_nesting_tree (struct nesting_info *root)
3616 struct nesting_info *n;
3617 FOR_EACH_NEST_INFO (n, root)
3618 finalize_nesting_tree_1 (n);
3621 /* Unnest the nodes and pass them to cgraph. */
3623 static void
3624 unnest_nesting_tree_1 (struct nesting_info *root)
3626 struct cgraph_node *node = cgraph_node::get (root->context);
3628 /* For nested functions update the cgraph to reflect unnesting.
3629 We also delay finalizing of these functions up to this point. */
3630 if (nested_function_info::get (node)->origin)
3632 unnest_function (node);
3633 if (!root->thunk_p)
3634 cgraph_node::finalize_function (root->context, true);
3638 static void
3639 unnest_nesting_tree (struct nesting_info *root)
3641 struct nesting_info *n;
3642 FOR_EACH_NEST_INFO (n, root)
3643 unnest_nesting_tree_1 (n);
3646 /* Free the data structures allocated during this pass. */
3648 static void
3649 free_nesting_tree (struct nesting_info *root)
3651 struct nesting_info *node, *next;
3653 node = iter_nestinfo_start (root);
3656 next = iter_nestinfo_next (node);
3657 delete node->var_map;
3658 delete node->field_map;
3659 delete node->mem_refs;
3660 free (node);
3661 node = next;
3663 while (node);
3666 /* Gimplify a function and all its nested functions. */
3667 static void
3668 gimplify_all_functions (struct cgraph_node *root)
3670 struct cgraph_node *iter;
3671 if (!gimple_body (root->decl))
3672 gimplify_function_tree (root->decl);
3673 for (iter = first_nested_function (root); iter;
3674 iter = next_nested_function (iter))
3675 if (!iter->thunk)
3676 gimplify_all_functions (iter);
3679 /* Main entry point for this pass. Process FNDECL and all of its nested
3680 subroutines and turn them into something less tightly bound. */
3682 void
3683 lower_nested_functions (tree fndecl)
3685 struct cgraph_node *cgn;
3686 struct nesting_info *root;
3688 /* If there are no nested functions, there's nothing to do. */
3689 cgn = cgraph_node::get (fndecl);
3690 if (!first_nested_function (cgn))
3691 return;
3693 gimplify_all_functions (cgn);
3695 set_dump_file (dump_begin (TDI_nested, &dump_flags));
3696 if (dump_file)
3697 fprintf (dump_file, "\n;; Function %s\n\n",
3698 lang_hooks.decl_printable_name (fndecl, 2));
3700 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3701 root = create_nesting_tree (cgn);
3703 walk_all_functions (convert_nonlocal_reference_stmt,
3704 convert_nonlocal_reference_op,
3705 root);
3706 walk_all_functions (convert_local_reference_stmt,
3707 convert_local_reference_op,
3708 root);
3709 walk_all_functions (convert_nl_goto_reference, NULL, root);
3710 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3712 convert_all_function_calls (root);
3713 finalize_nesting_tree (root);
3714 unnest_nesting_tree (root);
3716 free_nesting_tree (root);
3717 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3719 if (dump_file)
3721 dump_end (TDI_nested, dump_file);
3722 set_dump_file (NULL);
3726 #include "gt-tree-nested.h"