Fix installation of the jit header files
[official-gcc.git] / gcc / tree-nested.c
blobcea917a4d58a57e56f1335361574362aee416b77
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "cgraph.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "dumpfile.h"
35 #include "tree-inline.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "tree-cfg.h"
40 #include "explow.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 #include "diagnostic.h"
45 #include "alloc-pool.h"
46 #include "tree-nested.h"
47 #include "symbol-summary.h"
48 #include "symtab-thunks.h"
50 /* Summary of nested functions. */
51 static function_summary <nested_function_info *>
52 *nested_function_sum = NULL;
54 /* Return nested_function_info, if available. */
55 nested_function_info *
56 nested_function_info::get (cgraph_node *node)
58 if (!nested_function_sum)
59 return NULL;
60 return nested_function_sum->get (node);
63 /* Return nested_function_info possibly creating new one. */
64 nested_function_info *
65 nested_function_info::get_create (cgraph_node *node)
67 if (!nested_function_sum)
69 nested_function_sum = new function_summary <nested_function_info *>
70 (symtab);
71 nested_function_sum->disable_insertion_hook ();
73 return nested_function_sum->get_create (node);
76 /* cgraph_node is no longer nested function; update cgraph accordingly. */
77 void
78 unnest_function (cgraph_node *node)
80 nested_function_info *info = nested_function_info::get (node);
81 cgraph_node **node2 = &nested_function_info::get
82 (nested_function_origin (node))->nested;
84 gcc_checking_assert (info->origin);
85 while (*node2 != node)
86 node2 = &nested_function_info::get (*node2)->next_nested;
87 *node2 = info->next_nested;
88 info->next_nested = NULL;
89 info->origin = NULL;
90 nested_function_sum->remove (node);
93 /* Destructor: unlink function from nested function lists. */
94 nested_function_info::~nested_function_info ()
96 cgraph_node *next;
97 for (cgraph_node *n = nested; n; n = next)
99 nested_function_info *info = nested_function_info::get (n);
100 next = info->next_nested;
101 info->origin = NULL;
102 info->next_nested = NULL;
104 nested = NULL;
105 if (origin)
107 cgraph_node **node2
108 = &nested_function_info::get (origin)->nested;
110 nested_function_info *info;
111 while ((info = nested_function_info::get (*node2)) != this && info)
112 node2 = &info->next_nested;
113 *node2 = next_nested;
117 /* Free nested function info summaries. */
118 void
119 nested_function_info::release ()
121 if (nested_function_sum)
122 delete (nested_function_sum);
123 nested_function_sum = NULL;
126 /* If NODE is nested function, record it. */
127 void
128 maybe_record_nested_function (cgraph_node *node)
130 /* All nested functions gets lowered during the construction of symtab. */
131 if (symtab->state > CONSTRUCTION)
132 return;
133 if (DECL_CONTEXT (node->decl)
134 && TREE_CODE (DECL_CONTEXT (node->decl)) == FUNCTION_DECL)
136 cgraph_node *origin = cgraph_node::get_create (DECL_CONTEXT (node->decl));
137 nested_function_info *info = nested_function_info::get_create (node);
138 nested_function_info *origin_info
139 = nested_function_info::get_create (origin);
141 info->origin = origin;
142 info->next_nested = origin_info->nested;
143 origin_info->nested = node;
147 /* The object of this pass is to lower the representation of a set of nested
148 functions in order to expose all of the gory details of the various
149 nonlocal references. We want to do this sooner rather than later, in
150 order to give us more freedom in emitting all of the functions in question.
152 Back in olden times, when gcc was young, we developed an insanely
153 complicated scheme whereby variables which were referenced nonlocally
154 were forced to live in the stack of the declaring function, and then
155 the nested functions magically discovered where these variables were
156 placed. In order for this scheme to function properly, it required
157 that the outer function be partially expanded, then we switch to
158 compiling the inner function, and once done with those we switch back
159 to compiling the outer function. Such delicate ordering requirements
160 makes it difficult to do whole translation unit optimizations
161 involving such functions.
163 The implementation here is much more direct. Everything that can be
164 referenced by an inner function is a member of an explicitly created
165 structure herein called the "nonlocal frame struct". The incoming
166 static chain for a nested function is a pointer to this struct in
167 the parent. In this way, we settle on known offsets from a known
168 base, and so are decoupled from the logic that places objects in the
169 function's stack frame. More importantly, we don't have to wait for
170 that to happen -- since the compilation of the inner function is no
171 longer tied to a real stack frame, the nonlocal frame struct can be
172 allocated anywhere. Which means that the outer function is now
173 inlinable.
175 Theory of operation here is very simple. Iterate over all the
176 statements in all the functions (depth first) several times,
177 allocating structures and fields on demand. In general we want to
178 examine inner functions first, so that we can avoid making changes
179 to outer functions which are unnecessary.
181 The order of the passes matters a bit, in that later passes will be
182 skipped if it is discovered that the functions don't actually interact
183 at all. That is, they're nested in the lexical sense but could have
184 been written as independent functions without change. */
187 struct nesting_info
189 struct nesting_info *outer;
190 struct nesting_info *inner;
191 struct nesting_info *next;
193 hash_map<tree, tree> *field_map;
194 hash_map<tree, tree> *var_map;
195 hash_set<tree *> *mem_refs;
196 bitmap suppress_expansion;
198 tree context;
199 tree new_local_var_chain;
200 tree debug_var_chain;
201 tree frame_type;
202 tree frame_decl;
203 tree chain_field;
204 tree chain_decl;
205 tree nl_goto_field;
207 bool thunk_p;
208 bool any_parm_remapped;
209 bool any_tramp_created;
210 bool any_descr_created;
211 char static_chain_added;
215 /* Iterate over the nesting tree, starting with ROOT, depth first. */
217 static inline struct nesting_info *
218 iter_nestinfo_start (struct nesting_info *root)
220 while (root->inner)
221 root = root->inner;
222 return root;
225 static inline struct nesting_info *
226 iter_nestinfo_next (struct nesting_info *node)
228 if (node->next)
229 return iter_nestinfo_start (node->next);
230 return node->outer;
233 #define FOR_EACH_NEST_INFO(I, ROOT) \
234 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
236 /* Obstack used for the bitmaps in the struct above. */
237 static struct bitmap_obstack nesting_info_bitmap_obstack;
240 /* We're working in so many different function contexts simultaneously,
241 that create_tmp_var is dangerous. Prevent mishap. */
242 #define create_tmp_var cant_use_create_tmp_var_here_dummy
244 /* Like create_tmp_var, except record the variable for registration at
245 the given nesting level. */
247 static tree
248 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
250 tree tmp_var;
252 /* If the type is of variable size or a type which must be created by the
253 frontend, something is wrong. Note that we explicitly allow
254 incomplete types here, since we create them ourselves here. */
255 gcc_assert (!TREE_ADDRESSABLE (type));
256 gcc_assert (!TYPE_SIZE_UNIT (type)
257 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
259 tmp_var = create_tmp_var_raw (type, prefix);
260 DECL_CONTEXT (tmp_var) = info->context;
261 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
262 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
264 info->new_local_var_chain = tmp_var;
266 return tmp_var;
269 /* Like build_simple_mem_ref, but set TREE_THIS_NOTRAP on the result. */
271 static tree
272 build_simple_mem_ref_notrap (tree ptr)
274 tree t = build_simple_mem_ref (ptr);
275 TREE_THIS_NOTRAP (t) = 1;
276 return t;
279 /* Take the address of EXP to be used within function CONTEXT.
280 Mark it for addressability as necessary. */
282 tree
283 build_addr (tree exp)
285 mark_addressable (exp);
286 return build_fold_addr_expr (exp);
289 /* Insert FIELD into TYPE, sorted by alignment requirements. */
291 void
292 insert_field_into_struct (tree type, tree field)
294 tree *p;
296 DECL_CONTEXT (field) = type;
298 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
299 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
300 break;
302 DECL_CHAIN (field) = *p;
303 *p = field;
305 /* Set correct alignment for frame struct type. */
306 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
307 SET_TYPE_ALIGN (type, DECL_ALIGN (field));
310 /* Build or return the RECORD_TYPE that describes the frame state that is
311 shared between INFO->CONTEXT and its nested functions. This record will
312 not be complete until finalize_nesting_tree; up until that point we'll
313 be adding fields as necessary.
315 We also build the DECL that represents this frame in the function. */
317 static tree
318 get_frame_type (struct nesting_info *info)
320 tree type = info->frame_type;
321 if (!type)
323 char *name;
325 type = make_node (RECORD_TYPE);
327 name = concat ("FRAME.",
328 IDENTIFIER_POINTER (DECL_NAME (info->context)),
329 NULL);
330 TYPE_NAME (type) = get_identifier (name);
331 free (name);
333 info->frame_type = type;
335 /* Do not put info->frame_decl on info->new_local_var_chain,
336 so that we can declare it in the lexical blocks, which
337 makes sure virtual regs that end up appearing in its RTL
338 expression get substituted in instantiate_virtual_regs. */
339 info->frame_decl = create_tmp_var_raw (type, "FRAME");
340 DECL_CONTEXT (info->frame_decl) = info->context;
341 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
342 DECL_SEEN_IN_BIND_EXPR_P (info->frame_decl) = 1;
344 /* ??? Always make it addressable for now, since it is meant to
345 be pointed to by the static chain pointer. This pessimizes
346 when it turns out that no static chains are needed because
347 the nested functions referencing non-local variables are not
348 reachable, but the true pessimization is to create the non-
349 local frame structure in the first place. */
350 TREE_ADDRESSABLE (info->frame_decl) = 1;
353 return type;
356 /* Return true if DECL should be referenced by pointer in the non-local frame
357 structure. */
359 static bool
360 use_pointer_in_frame (tree decl)
362 if (TREE_CODE (decl) == PARM_DECL)
364 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
365 sized DECLs, and inefficient to copy large aggregates. Don't bother
366 moving anything but scalar parameters. */
367 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
369 else
371 /* Variable-sized DECLs can only come from OMP clauses at this point
372 since the gimplifier has already turned the regular variables into
373 pointers. Do the same as the gimplifier. */
374 return !DECL_SIZE (decl) || TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST;
378 /* Given DECL, a non-locally accessed variable, find or create a field
379 in the non-local frame structure for the given nesting context. */
381 static tree
382 lookup_field_for_decl (struct nesting_info *info, tree decl,
383 enum insert_option insert)
385 gcc_checking_assert (decl_function_context (decl) == info->context);
387 if (insert == NO_INSERT)
389 tree *slot = info->field_map->get (decl);
390 return slot ? *slot : NULL_TREE;
393 tree *slot = &info->field_map->get_or_insert (decl);
394 if (!*slot)
396 tree type = get_frame_type (info);
397 tree field = make_node (FIELD_DECL);
398 DECL_NAME (field) = DECL_NAME (decl);
400 if (use_pointer_in_frame (decl))
402 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
403 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
404 DECL_NONADDRESSABLE_P (field) = 1;
406 else
408 TREE_TYPE (field) = TREE_TYPE (decl);
409 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
410 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
411 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
412 DECL_IGNORED_P (field) = DECL_IGNORED_P (decl);
413 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
414 TREE_NO_WARNING (field) = TREE_NO_WARNING (decl);
415 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
417 /* Declare the transformation and adjust the original DECL. For a
418 variable or for a parameter when not optimizing, we make it point
419 to the field in the frame directly. For a parameter, we don't do
420 it when optimizing because the variable tracking pass will already
421 do the job, */
422 if (VAR_P (decl) || !optimize)
424 tree x
425 = build3 (COMPONENT_REF, TREE_TYPE (field), info->frame_decl,
426 field, NULL_TREE);
428 /* If the next declaration is a PARM_DECL pointing to the DECL,
429 we need to adjust its VALUE_EXPR directly, since chains of
430 VALUE_EXPRs run afoul of garbage collection. This occurs
431 in Ada for Out parameters that aren't copied in. */
432 tree next = DECL_CHAIN (decl);
433 if (next
434 && TREE_CODE (next) == PARM_DECL
435 && DECL_HAS_VALUE_EXPR_P (next)
436 && DECL_VALUE_EXPR (next) == decl)
437 SET_DECL_VALUE_EXPR (next, x);
439 SET_DECL_VALUE_EXPR (decl, x);
440 DECL_HAS_VALUE_EXPR_P (decl) = 1;
444 insert_field_into_struct (type, field);
445 *slot = field;
447 if (TREE_CODE (decl) == PARM_DECL)
448 info->any_parm_remapped = true;
451 return *slot;
454 /* Build or return the variable that holds the static chain within
455 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
457 static tree
458 get_chain_decl (struct nesting_info *info)
460 tree decl = info->chain_decl;
462 if (!decl)
464 tree type;
466 type = get_frame_type (info->outer);
467 type = build_pointer_type (type);
469 /* Note that this variable is *not* entered into any BIND_EXPR;
470 the construction of this variable is handled specially in
471 expand_function_start and initialize_inlined_parameters.
472 Note also that it's represented as a parameter. This is more
473 close to the truth, since the initial value does come from
474 the caller. */
475 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
476 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
477 DECL_ARTIFICIAL (decl) = 1;
478 DECL_IGNORED_P (decl) = 1;
479 TREE_USED (decl) = 1;
480 DECL_CONTEXT (decl) = info->context;
481 DECL_ARG_TYPE (decl) = type;
483 /* Tell tree-inline.c that we never write to this variable, so
484 it can copy-prop the replacement value immediately. */
485 TREE_READONLY (decl) = 1;
487 info->chain_decl = decl;
489 if (dump_file
490 && (dump_flags & TDF_DETAILS)
491 && !DECL_STATIC_CHAIN (info->context))
492 fprintf (dump_file, "Setting static-chain for %s\n",
493 lang_hooks.decl_printable_name (info->context, 2));
495 DECL_STATIC_CHAIN (info->context) = 1;
497 return decl;
500 /* Build or return the field within the non-local frame state that holds
501 the static chain for INFO->CONTEXT. This is the way to walk back up
502 multiple nesting levels. */
504 static tree
505 get_chain_field (struct nesting_info *info)
507 tree field = info->chain_field;
509 if (!field)
511 tree type = build_pointer_type (get_frame_type (info->outer));
513 field = make_node (FIELD_DECL);
514 DECL_NAME (field) = get_identifier ("__chain");
515 TREE_TYPE (field) = type;
516 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
517 DECL_NONADDRESSABLE_P (field) = 1;
519 insert_field_into_struct (get_frame_type (info), field);
521 info->chain_field = field;
523 if (dump_file
524 && (dump_flags & TDF_DETAILS)
525 && !DECL_STATIC_CHAIN (info->context))
526 fprintf (dump_file, "Setting static-chain for %s\n",
527 lang_hooks.decl_printable_name (info->context, 2));
529 DECL_STATIC_CHAIN (info->context) = 1;
531 return field;
534 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
536 static tree
537 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
538 gcall *call)
540 tree t;
542 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
543 gimple_call_set_lhs (call, t);
544 if (! gsi_end_p (*gsi))
545 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
546 gsi_insert_before (gsi, call, GSI_SAME_STMT);
548 return t;
552 /* Copy EXP into a temporary. Allocate the temporary in the context of
553 INFO and insert the initialization statement before GSI. */
555 static tree
556 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
558 tree t;
559 gimple *stmt;
561 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
562 stmt = gimple_build_assign (t, exp);
563 if (! gsi_end_p (*gsi))
564 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
565 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
567 return t;
571 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
573 static tree
574 gsi_gimplify_val (struct nesting_info *info, tree exp,
575 gimple_stmt_iterator *gsi)
577 if (is_gimple_val (exp))
578 return exp;
579 else
580 return init_tmp_var (info, exp, gsi);
583 /* Similarly, but copy from the temporary and insert the statement
584 after the iterator. */
586 static tree
587 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
589 tree t;
590 gimple *stmt;
592 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
593 stmt = gimple_build_assign (exp, t);
594 if (! gsi_end_p (*gsi))
595 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
596 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
598 return t;
601 /* Build or return the type used to represent a nested function trampoline. */
603 static GTY(()) tree trampoline_type;
605 static tree
606 get_trampoline_type (struct nesting_info *info)
608 unsigned align, size;
609 tree t;
611 if (trampoline_type)
612 return trampoline_type;
614 align = TRAMPOLINE_ALIGNMENT;
615 size = TRAMPOLINE_SIZE;
617 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
618 then allocate extra space so that we can do dynamic alignment. */
619 if (align > STACK_BOUNDARY)
621 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
622 align = STACK_BOUNDARY;
625 t = build_index_type (size_int (size - 1));
626 t = build_array_type (char_type_node, t);
627 t = build_decl (DECL_SOURCE_LOCATION (info->context),
628 FIELD_DECL, get_identifier ("__data"), t);
629 SET_DECL_ALIGN (t, align);
630 DECL_USER_ALIGN (t) = 1;
632 trampoline_type = make_node (RECORD_TYPE);
633 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
634 TYPE_FIELDS (trampoline_type) = t;
635 layout_type (trampoline_type);
636 DECL_CONTEXT (t) = trampoline_type;
638 return trampoline_type;
641 /* Build or return the type used to represent a nested function descriptor. */
643 static GTY(()) tree descriptor_type;
645 static tree
646 get_descriptor_type (struct nesting_info *info)
648 /* The base alignment is that of a function. */
649 const unsigned align = FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY);
650 tree t;
652 if (descriptor_type)
653 return descriptor_type;
655 t = build_index_type (integer_one_node);
656 t = build_array_type (ptr_type_node, t);
657 t = build_decl (DECL_SOURCE_LOCATION (info->context),
658 FIELD_DECL, get_identifier ("__data"), t);
659 SET_DECL_ALIGN (t, MAX (TYPE_ALIGN (ptr_type_node), align));
660 DECL_USER_ALIGN (t) = 1;
662 descriptor_type = make_node (RECORD_TYPE);
663 TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor");
664 TYPE_FIELDS (descriptor_type) = t;
665 layout_type (descriptor_type);
666 DECL_CONTEXT (t) = descriptor_type;
668 return descriptor_type;
671 /* Given DECL, a nested function, find or create an element in the
672 var map for this function. */
674 static tree
675 lookup_element_for_decl (struct nesting_info *info, tree decl,
676 enum insert_option insert)
678 if (insert == NO_INSERT)
680 tree *slot = info->var_map->get (decl);
681 return slot ? *slot : NULL_TREE;
684 tree *slot = &info->var_map->get_or_insert (decl);
685 if (!*slot)
686 *slot = build_tree_list (NULL_TREE, NULL_TREE);
688 return (tree) *slot;
691 /* Given DECL, a nested function, create a field in the non-local
692 frame structure for this function. */
694 static tree
695 create_field_for_decl (struct nesting_info *info, tree decl, tree type)
697 tree field = make_node (FIELD_DECL);
698 DECL_NAME (field) = DECL_NAME (decl);
699 TREE_TYPE (field) = type;
700 TREE_ADDRESSABLE (field) = 1;
701 insert_field_into_struct (get_frame_type (info), field);
702 return field;
705 /* Given DECL, a nested function, find or create a field in the non-local
706 frame structure for a trampoline for this function. */
708 static tree
709 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
710 enum insert_option insert)
712 tree elt, field;
714 elt = lookup_element_for_decl (info, decl, insert);
715 if (!elt)
716 return NULL_TREE;
718 field = TREE_PURPOSE (elt);
720 if (!field && insert == INSERT)
722 field = create_field_for_decl (info, decl, get_trampoline_type (info));
723 TREE_PURPOSE (elt) = field;
724 info->any_tramp_created = true;
727 return field;
730 /* Given DECL, a nested function, find or create a field in the non-local
731 frame structure for a descriptor for this function. */
733 static tree
734 lookup_descr_for_decl (struct nesting_info *info, tree decl,
735 enum insert_option insert)
737 tree elt, field;
739 elt = lookup_element_for_decl (info, decl, insert);
740 if (!elt)
741 return NULL_TREE;
743 field = TREE_VALUE (elt);
745 if (!field && insert == INSERT)
747 field = create_field_for_decl (info, decl, get_descriptor_type (info));
748 TREE_VALUE (elt) = field;
749 info->any_descr_created = true;
752 return field;
755 /* Build or return the field within the non-local frame state that holds
756 the non-local goto "jmp_buf". The buffer itself is maintained by the
757 rtl middle-end as dynamic stack space is allocated. */
759 static tree
760 get_nl_goto_field (struct nesting_info *info)
762 tree field = info->nl_goto_field;
763 if (!field)
765 unsigned size;
766 tree type;
768 /* For __builtin_nonlocal_goto, we need N words. The first is the
769 frame pointer, the rest is for the target's stack pointer save
770 area. The number of words is controlled by STACK_SAVEAREA_MODE;
771 not the best interface, but it'll do for now. */
772 if (Pmode == ptr_mode)
773 type = ptr_type_node;
774 else
775 type = lang_hooks.types.type_for_mode (Pmode, 1);
777 scalar_int_mode mode
778 = as_a <scalar_int_mode> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
779 size = GET_MODE_SIZE (mode);
780 size = size / GET_MODE_SIZE (Pmode);
781 size = size + 1;
783 type = build_array_type
784 (type, build_index_type (size_int (size)));
786 field = make_node (FIELD_DECL);
787 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
788 TREE_TYPE (field) = type;
789 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
790 TREE_ADDRESSABLE (field) = 1;
792 insert_field_into_struct (get_frame_type (info), field);
794 info->nl_goto_field = field;
797 return field;
800 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
802 static void
803 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
804 struct nesting_info *info, gimple_seq *pseq)
806 struct walk_stmt_info wi;
808 memset (&wi, 0, sizeof (wi));
809 wi.info = info;
810 wi.val_only = true;
811 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
815 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
817 static inline void
818 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
819 struct nesting_info *info)
821 gimple_seq body = gimple_body (info->context);
822 walk_body (callback_stmt, callback_op, info, &body);
823 gimple_set_body (info->context, body);
826 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
828 static void
829 walk_gimple_omp_for (gomp_for *for_stmt,
830 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
831 struct nesting_info *info)
833 struct walk_stmt_info wi;
834 gimple_seq seq;
835 tree t;
836 size_t i;
838 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
840 seq = NULL;
841 memset (&wi, 0, sizeof (wi));
842 wi.info = info;
843 wi.gsi = gsi_last (seq);
845 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
847 wi.val_only = false;
848 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
849 &wi, NULL);
850 wi.val_only = true;
851 wi.is_lhs = false;
852 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
853 &wi, NULL);
855 wi.val_only = true;
856 wi.is_lhs = false;
857 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
858 &wi, NULL);
860 t = gimple_omp_for_incr (for_stmt, i);
861 gcc_assert (BINARY_CLASS_P (t));
862 wi.val_only = false;
863 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
864 wi.val_only = true;
865 wi.is_lhs = false;
866 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
869 seq = gsi_seq (wi.gsi);
870 if (!gimple_seq_empty_p (seq))
872 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
873 annotate_all_with_location (seq, gimple_location (for_stmt));
874 gimple_seq_add_seq (&pre_body, seq);
875 gimple_omp_for_set_pre_body (for_stmt, pre_body);
879 /* Similarly for ROOT and all functions nested underneath, depth first. */
881 static void
882 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
883 struct nesting_info *root)
885 struct nesting_info *n;
886 FOR_EACH_NEST_INFO (n, root)
887 walk_function (callback_stmt, callback_op, n);
891 /* We have to check for a fairly pathological case. The operands of function
892 nested function are to be interpreted in the context of the enclosing
893 function. So if any are variably-sized, they will get remapped when the
894 enclosing function is inlined. But that remapping would also have to be
895 done in the types of the PARM_DECLs of the nested function, meaning the
896 argument types of that function will disagree with the arguments in the
897 calls to that function. So we'd either have to make a copy of the nested
898 function corresponding to each time the enclosing function was inlined or
899 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
900 function. The former is not practical. The latter would still require
901 detecting this case to know when to add the conversions. So, for now at
902 least, we don't inline such an enclosing function.
904 We have to do that check recursively, so here return indicating whether
905 FNDECL has such a nested function. ORIG_FN is the function we were
906 trying to inline to use for checking whether any argument is variably
907 modified by anything in it.
909 It would be better to do this in tree-inline.c so that we could give
910 the appropriate warning for why a function can't be inlined, but that's
911 too late since the nesting structure has already been flattened and
912 adding a flag just to record this fact seems a waste of a flag. */
914 static bool
915 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
917 struct cgraph_node *cgn = cgraph_node::get (fndecl);
918 tree arg;
920 for (cgn = first_nested_function (cgn); cgn;
921 cgn = next_nested_function (cgn))
923 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
924 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
925 return true;
927 if (check_for_nested_with_variably_modified (cgn->decl,
928 orig_fndecl))
929 return true;
932 return false;
935 /* Construct our local datastructure describing the function nesting
936 tree rooted by CGN. */
938 static struct nesting_info *
939 create_nesting_tree (struct cgraph_node *cgn)
941 struct nesting_info *info = XCNEW (struct nesting_info);
942 info->field_map = new hash_map<tree, tree>;
943 info->var_map = new hash_map<tree, tree>;
944 info->mem_refs = new hash_set<tree *>;
945 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
946 info->context = cgn->decl;
947 info->thunk_p = cgn->thunk;
949 for (cgn = first_nested_function (cgn); cgn;
950 cgn = next_nested_function (cgn))
952 struct nesting_info *sub = create_nesting_tree (cgn);
953 sub->outer = info;
954 sub->next = info->inner;
955 info->inner = sub;
958 /* See discussion at check_for_nested_with_variably_modified for a
959 discussion of why this has to be here. */
960 if (check_for_nested_with_variably_modified (info->context, info->context))
961 DECL_UNINLINABLE (info->context) = true;
963 return info;
966 /* Return an expression computing the static chain for TARGET_CONTEXT
967 from INFO->CONTEXT. Insert any necessary computations before TSI. */
969 static tree
970 get_static_chain (struct nesting_info *info, tree target_context,
971 gimple_stmt_iterator *gsi)
973 struct nesting_info *i;
974 tree x;
976 if (info->context == target_context)
978 x = build_addr (info->frame_decl);
979 info->static_chain_added |= 1;
981 else
983 x = get_chain_decl (info);
984 info->static_chain_added |= 2;
986 for (i = info->outer; i->context != target_context; i = i->outer)
988 tree field = get_chain_field (i);
990 x = build_simple_mem_ref_notrap (x);
991 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
992 x = init_tmp_var (info, x, gsi);
996 return x;
1000 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
1001 frame as seen from INFO->CONTEXT. Insert any necessary computations
1002 before GSI. */
1004 static tree
1005 get_frame_field (struct nesting_info *info, tree target_context,
1006 tree field, gimple_stmt_iterator *gsi)
1008 struct nesting_info *i;
1009 tree x;
1011 if (info->context == target_context)
1013 /* Make sure frame_decl gets created. */
1014 (void) get_frame_type (info);
1015 x = info->frame_decl;
1016 info->static_chain_added |= 1;
1018 else
1020 x = get_chain_decl (info);
1021 info->static_chain_added |= 2;
1023 for (i = info->outer; i->context != target_context; i = i->outer)
1025 tree field = get_chain_field (i);
1027 x = build_simple_mem_ref_notrap (x);
1028 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1029 x = init_tmp_var (info, x, gsi);
1032 x = build_simple_mem_ref_notrap (x);
1035 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1036 return x;
1039 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
1041 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
1042 in the nested function with DECL_VALUE_EXPR set to reference the true
1043 variable in the parent function. This is used both for debug info
1044 and in OMP lowering. */
1046 static tree
1047 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
1049 tree target_context;
1050 struct nesting_info *i;
1051 tree x, field, new_decl;
1053 tree *slot = &info->var_map->get_or_insert (decl);
1055 if (*slot)
1056 return *slot;
1058 target_context = decl_function_context (decl);
1060 /* A copy of the code in get_frame_field, but without the temporaries. */
1061 if (info->context == target_context)
1063 /* Make sure frame_decl gets created. */
1064 (void) get_frame_type (info);
1065 x = info->frame_decl;
1066 i = info;
1067 info->static_chain_added |= 1;
1069 else
1071 x = get_chain_decl (info);
1072 info->static_chain_added |= 2;
1073 for (i = info->outer; i->context != target_context; i = i->outer)
1075 field = get_chain_field (i);
1076 x = build_simple_mem_ref_notrap (x);
1077 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1079 x = build_simple_mem_ref_notrap (x);
1082 field = lookup_field_for_decl (i, decl, INSERT);
1083 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1084 if (use_pointer_in_frame (decl))
1085 x = build_simple_mem_ref_notrap (x);
1087 /* ??? We should be remapping types as well, surely. */
1088 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1089 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1090 DECL_CONTEXT (new_decl) = info->context;
1091 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1092 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1093 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1094 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1095 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1096 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1097 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1098 if ((TREE_CODE (decl) == PARM_DECL
1099 || TREE_CODE (decl) == RESULT_DECL
1100 || VAR_P (decl))
1101 && DECL_BY_REFERENCE (decl))
1102 DECL_BY_REFERENCE (new_decl) = 1;
1104 SET_DECL_VALUE_EXPR (new_decl, x);
1105 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1107 *slot = new_decl;
1108 DECL_CHAIN (new_decl) = info->debug_var_chain;
1109 info->debug_var_chain = new_decl;
1111 if (!optimize
1112 && info->context != target_context
1113 && variably_modified_type_p (TREE_TYPE (decl), NULL))
1114 note_nonlocal_vla_type (info, TREE_TYPE (decl));
1116 return new_decl;
1120 /* Callback for walk_gimple_stmt, rewrite all references to VAR
1121 and PARM_DECLs that belong to outer functions.
1123 The rewrite will involve some number of structure accesses back up
1124 the static chain. E.g. for a variable FOO up one nesting level it'll
1125 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
1126 indirections apply to decls for which use_pointer_in_frame is true. */
1128 static tree
1129 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
1131 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1132 struct nesting_info *const info = (struct nesting_info *) wi->info;
1133 tree t = *tp;
1135 *walk_subtrees = 0;
1136 switch (TREE_CODE (t))
1138 case VAR_DECL:
1139 /* Non-automatic variables are never processed. */
1140 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1141 break;
1142 /* FALLTHRU */
1144 case PARM_DECL:
1146 tree x, target_context = decl_function_context (t);
1148 if (info->context == target_context)
1149 break;
1151 wi->changed = true;
1153 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1154 x = get_nonlocal_debug_decl (info, t);
1155 else
1157 struct nesting_info *i = info;
1158 while (i && i->context != target_context)
1159 i = i->outer;
1160 /* If none of the outer contexts is the target context, this means
1161 that the VAR or PARM_DECL is referenced in a wrong context. */
1162 if (!i)
1163 internal_error ("%s from %s referenced in %s",
1164 IDENTIFIER_POINTER (DECL_NAME (t)),
1165 IDENTIFIER_POINTER (DECL_NAME (target_context)),
1166 IDENTIFIER_POINTER (DECL_NAME (info->context)));
1168 x = lookup_field_for_decl (i, t, INSERT);
1169 x = get_frame_field (info, target_context, x, &wi->gsi);
1170 if (use_pointer_in_frame (t))
1172 x = init_tmp_var (info, x, &wi->gsi);
1173 x = build_simple_mem_ref_notrap (x);
1177 if (wi->val_only)
1179 if (wi->is_lhs)
1180 x = save_tmp_var (info, x, &wi->gsi);
1181 else
1182 x = init_tmp_var (info, x, &wi->gsi);
1185 *tp = x;
1187 break;
1189 case LABEL_DECL:
1190 /* We're taking the address of a label from a parent function, but
1191 this is not itself a non-local goto. Mark the label such that it
1192 will not be deleted, much as we would with a label address in
1193 static storage. */
1194 if (decl_function_context (t) != info->context)
1195 FORCED_LABEL (t) = 1;
1196 break;
1198 case ADDR_EXPR:
1200 bool save_val_only = wi->val_only;
1202 wi->val_only = false;
1203 wi->is_lhs = false;
1204 wi->changed = false;
1205 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1206 wi->val_only = true;
1208 if (wi->changed)
1210 tree save_context;
1212 /* If we changed anything, we might no longer be directly
1213 referencing a decl. */
1214 save_context = current_function_decl;
1215 current_function_decl = info->context;
1216 recompute_tree_invariant_for_addr_expr (t);
1218 /* If the callback converted the address argument in a context
1219 where we only accept variables (and min_invariant, presumably),
1220 then compute the address into a temporary. */
1221 if (save_val_only)
1222 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1223 t, &wi->gsi);
1224 current_function_decl = save_context;
1227 break;
1229 case REALPART_EXPR:
1230 case IMAGPART_EXPR:
1231 case COMPONENT_REF:
1232 case ARRAY_REF:
1233 case ARRAY_RANGE_REF:
1234 case BIT_FIELD_REF:
1235 /* Go down this entire nest and just look at the final prefix and
1236 anything that describes the references. Otherwise, we lose track
1237 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1238 wi->val_only = true;
1239 wi->is_lhs = false;
1240 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1242 if (TREE_CODE (t) == COMPONENT_REF)
1243 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1244 NULL);
1245 else if (TREE_CODE (t) == ARRAY_REF
1246 || TREE_CODE (t) == ARRAY_RANGE_REF)
1248 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1249 wi, NULL);
1250 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1251 wi, NULL);
1252 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1253 wi, NULL);
1256 wi->val_only = false;
1257 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1258 break;
1260 case VIEW_CONVERT_EXPR:
1261 /* Just request to look at the subtrees, leaving val_only and lhs
1262 untouched. This might actually be for !val_only + lhs, in which
1263 case we don't want to force a replacement by a temporary. */
1264 *walk_subtrees = 1;
1265 break;
1267 default:
1268 if (!IS_TYPE_OR_DECL_P (t))
1270 *walk_subtrees = 1;
1271 wi->val_only = true;
1272 wi->is_lhs = false;
1274 break;
1277 return NULL_TREE;
1280 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1281 struct walk_stmt_info *);
1283 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1284 and PARM_DECLs that belong to outer functions. */
1286 static bool
1287 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1289 struct nesting_info *const info = (struct nesting_info *) wi->info;
1290 bool need_chain = false, need_stmts = false;
1291 tree clause, decl, *pdecl;
1292 int dummy;
1293 bitmap new_suppress;
1295 new_suppress = BITMAP_GGC_ALLOC ();
1296 bitmap_copy (new_suppress, info->suppress_expansion);
1298 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1300 pdecl = NULL;
1301 switch (OMP_CLAUSE_CODE (clause))
1303 case OMP_CLAUSE_REDUCTION:
1304 case OMP_CLAUSE_IN_REDUCTION:
1305 case OMP_CLAUSE_TASK_REDUCTION:
1306 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1307 need_stmts = true;
1308 if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
1310 pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
1311 if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
1312 pdecl = &TREE_OPERAND (*pdecl, 0);
1313 if (TREE_CODE (*pdecl) == INDIRECT_REF
1314 || TREE_CODE (*pdecl) == ADDR_EXPR)
1315 pdecl = &TREE_OPERAND (*pdecl, 0);
1317 goto do_decl_clause;
1319 case OMP_CLAUSE_LASTPRIVATE:
1320 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1321 need_stmts = true;
1322 goto do_decl_clause;
1324 case OMP_CLAUSE_LINEAR:
1325 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1326 need_stmts = true;
1327 wi->val_only = true;
1328 wi->is_lhs = false;
1329 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1330 &dummy, wi);
1331 goto do_decl_clause;
1333 case OMP_CLAUSE_PRIVATE:
1334 case OMP_CLAUSE_FIRSTPRIVATE:
1335 case OMP_CLAUSE_COPYPRIVATE:
1336 case OMP_CLAUSE_SHARED:
1337 case OMP_CLAUSE_TO_DECLARE:
1338 case OMP_CLAUSE_LINK:
1339 case OMP_CLAUSE_USE_DEVICE_PTR:
1340 case OMP_CLAUSE_USE_DEVICE_ADDR:
1341 case OMP_CLAUSE_IS_DEVICE_PTR:
1342 case OMP_CLAUSE_DETACH:
1343 do_decl_clause:
1344 if (pdecl == NULL)
1345 pdecl = &OMP_CLAUSE_DECL (clause);
1346 decl = *pdecl;
1347 if (VAR_P (decl)
1348 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1349 break;
1350 if (decl_function_context (decl) != info->context)
1352 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1353 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1354 bitmap_set_bit (new_suppress, DECL_UID (decl));
1355 *pdecl = get_nonlocal_debug_decl (info, decl);
1356 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1357 need_chain = true;
1359 break;
1361 case OMP_CLAUSE_SCHEDULE:
1362 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1363 break;
1364 /* FALLTHRU */
1365 case OMP_CLAUSE_FINAL:
1366 case OMP_CLAUSE_IF:
1367 case OMP_CLAUSE_NUM_THREADS:
1368 case OMP_CLAUSE_DEPEND:
1369 case OMP_CLAUSE_DEVICE:
1370 case OMP_CLAUSE_NUM_TEAMS:
1371 case OMP_CLAUSE_THREAD_LIMIT:
1372 case OMP_CLAUSE_SAFELEN:
1373 case OMP_CLAUSE_SIMDLEN:
1374 case OMP_CLAUSE_PRIORITY:
1375 case OMP_CLAUSE_GRAINSIZE:
1376 case OMP_CLAUSE_NUM_TASKS:
1377 case OMP_CLAUSE_HINT:
1378 case OMP_CLAUSE_NUM_GANGS:
1379 case OMP_CLAUSE_NUM_WORKERS:
1380 case OMP_CLAUSE_VECTOR_LENGTH:
1381 case OMP_CLAUSE_GANG:
1382 case OMP_CLAUSE_WORKER:
1383 case OMP_CLAUSE_VECTOR:
1384 case OMP_CLAUSE_ASYNC:
1385 case OMP_CLAUSE_WAIT:
1386 /* Several OpenACC clauses have optional arguments. Check if they
1387 are present. */
1388 if (OMP_CLAUSE_OPERAND (clause, 0))
1390 wi->val_only = true;
1391 wi->is_lhs = false;
1392 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1393 &dummy, wi);
1396 /* The gang clause accepts two arguments. */
1397 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1398 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1400 wi->val_only = true;
1401 wi->is_lhs = false;
1402 convert_nonlocal_reference_op
1403 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1405 break;
1407 case OMP_CLAUSE_DIST_SCHEDULE:
1408 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1410 wi->val_only = true;
1411 wi->is_lhs = false;
1412 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1413 &dummy, wi);
1415 break;
1417 case OMP_CLAUSE_MAP:
1418 case OMP_CLAUSE_TO:
1419 case OMP_CLAUSE_FROM:
1420 if (OMP_CLAUSE_SIZE (clause))
1422 wi->val_only = true;
1423 wi->is_lhs = false;
1424 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1425 &dummy, wi);
1427 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1428 goto do_decl_clause;
1429 wi->val_only = true;
1430 wi->is_lhs = false;
1431 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1432 wi, NULL);
1433 break;
1435 case OMP_CLAUSE_ALIGNED:
1436 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1438 wi->val_only = true;
1439 wi->is_lhs = false;
1440 convert_nonlocal_reference_op
1441 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1443 /* FALLTHRU */
1444 case OMP_CLAUSE_NONTEMPORAL:
1445 do_decl_clause_no_supp:
1446 /* Like do_decl_clause, but don't add any suppression. */
1447 decl = OMP_CLAUSE_DECL (clause);
1448 if (VAR_P (decl)
1449 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1450 break;
1451 if (decl_function_context (decl) != info->context)
1453 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1454 need_chain = true;
1456 break;
1458 case OMP_CLAUSE_ALLOCATE:
1459 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause))
1461 wi->val_only = true;
1462 wi->is_lhs = false;
1463 convert_nonlocal_reference_op
1464 (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause), &dummy, wi);
1466 goto do_decl_clause_no_supp;
1468 case OMP_CLAUSE_NOWAIT:
1469 case OMP_CLAUSE_ORDERED:
1470 case OMP_CLAUSE_DEFAULT:
1471 case OMP_CLAUSE_COPYIN:
1472 case OMP_CLAUSE_COLLAPSE:
1473 case OMP_CLAUSE_TILE:
1474 case OMP_CLAUSE_UNTIED:
1475 case OMP_CLAUSE_MERGEABLE:
1476 case OMP_CLAUSE_PROC_BIND:
1477 case OMP_CLAUSE_NOGROUP:
1478 case OMP_CLAUSE_THREADS:
1479 case OMP_CLAUSE_SIMD:
1480 case OMP_CLAUSE_DEFAULTMAP:
1481 case OMP_CLAUSE_ORDER:
1482 case OMP_CLAUSE_SEQ:
1483 case OMP_CLAUSE_INDEPENDENT:
1484 case OMP_CLAUSE_AUTO:
1485 case OMP_CLAUSE_IF_PRESENT:
1486 case OMP_CLAUSE_FINALIZE:
1487 case OMP_CLAUSE__CONDTEMP_:
1488 case OMP_CLAUSE__SCANTEMP_:
1489 break;
1491 /* The following clause belongs to the OpenACC cache directive, which
1492 is discarded during gimplification. */
1493 case OMP_CLAUSE__CACHE_:
1494 /* The following clauses are only allowed in the OpenMP declare simd
1495 directive, so not seen here. */
1496 case OMP_CLAUSE_UNIFORM:
1497 case OMP_CLAUSE_INBRANCH:
1498 case OMP_CLAUSE_NOTINBRANCH:
1499 /* The following clauses are only allowed on OpenMP cancel and
1500 cancellation point directives, which at this point have already
1501 been lowered into a function call. */
1502 case OMP_CLAUSE_FOR:
1503 case OMP_CLAUSE_PARALLEL:
1504 case OMP_CLAUSE_SECTIONS:
1505 case OMP_CLAUSE_TASKGROUP:
1506 /* The following clauses are only added during OMP lowering; nested
1507 function decomposition happens before that. */
1508 case OMP_CLAUSE__LOOPTEMP_:
1509 case OMP_CLAUSE__REDUCTEMP_:
1510 case OMP_CLAUSE__SIMDUID_:
1511 case OMP_CLAUSE__SIMT_:
1512 /* Anything else. */
1513 default:
1514 gcc_unreachable ();
1518 info->suppress_expansion = new_suppress;
1520 if (need_stmts)
1521 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1522 switch (OMP_CLAUSE_CODE (clause))
1524 case OMP_CLAUSE_REDUCTION:
1525 case OMP_CLAUSE_IN_REDUCTION:
1526 case OMP_CLAUSE_TASK_REDUCTION:
1527 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1529 tree old_context
1530 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1531 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1532 = info->context;
1533 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1534 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1535 = info->context;
1536 tree save_local_var_chain = info->new_local_var_chain;
1537 info->new_local_var_chain = NULL;
1538 gimple_seq *seq = &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause);
1539 walk_body (convert_nonlocal_reference_stmt,
1540 convert_nonlocal_reference_op, info, seq);
1541 if (info->new_local_var_chain)
1542 declare_vars (info->new_local_var_chain,
1543 gimple_seq_first_stmt (*seq), false);
1544 info->new_local_var_chain = NULL;
1545 seq = &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause);
1546 walk_body (convert_nonlocal_reference_stmt,
1547 convert_nonlocal_reference_op, info, seq);
1548 if (info->new_local_var_chain)
1549 declare_vars (info->new_local_var_chain,
1550 gimple_seq_first_stmt (*seq), false);
1551 info->new_local_var_chain = save_local_var_chain;
1552 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1553 = old_context;
1554 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1555 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1556 = old_context;
1558 break;
1560 case OMP_CLAUSE_LASTPRIVATE:
1562 tree save_local_var_chain = info->new_local_var_chain;
1563 info->new_local_var_chain = NULL;
1564 gimple_seq *seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause);
1565 walk_body (convert_nonlocal_reference_stmt,
1566 convert_nonlocal_reference_op, info, seq);
1567 if (info->new_local_var_chain)
1568 declare_vars (info->new_local_var_chain,
1569 gimple_seq_first_stmt (*seq), false);
1570 info->new_local_var_chain = save_local_var_chain;
1572 break;
1574 case OMP_CLAUSE_LINEAR:
1576 tree save_local_var_chain = info->new_local_var_chain;
1577 info->new_local_var_chain = NULL;
1578 gimple_seq *seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause);
1579 walk_body (convert_nonlocal_reference_stmt,
1580 convert_nonlocal_reference_op, info, seq);
1581 if (info->new_local_var_chain)
1582 declare_vars (info->new_local_var_chain,
1583 gimple_seq_first_stmt (*seq), false);
1584 info->new_local_var_chain = save_local_var_chain;
1586 break;
1588 default:
1589 break;
1592 return need_chain;
1595 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1597 static void
1598 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1600 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1601 type = TREE_TYPE (type);
1603 if (TYPE_NAME (type)
1604 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1605 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1606 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1608 while (POINTER_TYPE_P (type)
1609 || TREE_CODE (type) == VECTOR_TYPE
1610 || TREE_CODE (type) == FUNCTION_TYPE
1611 || TREE_CODE (type) == METHOD_TYPE)
1612 type = TREE_TYPE (type);
1614 if (TREE_CODE (type) == ARRAY_TYPE)
1616 tree domain, t;
1618 note_nonlocal_vla_type (info, TREE_TYPE (type));
1619 domain = TYPE_DOMAIN (type);
1620 if (domain)
1622 t = TYPE_MIN_VALUE (domain);
1623 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1624 && decl_function_context (t) != info->context)
1625 get_nonlocal_debug_decl (info, t);
1626 t = TYPE_MAX_VALUE (domain);
1627 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1628 && decl_function_context (t) != info->context)
1629 get_nonlocal_debug_decl (info, t);
1634 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1635 PARM_DECLs that belong to outer functions. This handles statements
1636 that are not handled via the standard recursion done in
1637 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1638 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1639 operands of STMT have been handled by this function. */
1641 static tree
1642 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1643 struct walk_stmt_info *wi)
1645 struct nesting_info *info = (struct nesting_info *) wi->info;
1646 tree save_local_var_chain;
1647 bitmap save_suppress;
1648 gimple *stmt = gsi_stmt (*gsi);
1650 switch (gimple_code (stmt))
1652 case GIMPLE_GOTO:
1653 /* Don't walk non-local gotos for now. */
1654 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1656 wi->val_only = true;
1657 wi->is_lhs = false;
1658 *handled_ops_p = false;
1659 return NULL_TREE;
1661 break;
1663 case GIMPLE_OMP_TEAMS:
1664 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
1666 save_suppress = info->suppress_expansion;
1667 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt),
1668 wi);
1669 walk_body (convert_nonlocal_reference_stmt,
1670 convert_nonlocal_reference_op, info,
1671 gimple_omp_body_ptr (stmt));
1672 info->suppress_expansion = save_suppress;
1673 break;
1675 /* FALLTHRU */
1677 case GIMPLE_OMP_PARALLEL:
1678 case GIMPLE_OMP_TASK:
1679 save_suppress = info->suppress_expansion;
1680 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1681 wi))
1683 tree c, decl;
1684 decl = get_chain_decl (info);
1685 c = build_omp_clause (gimple_location (stmt),
1686 OMP_CLAUSE_FIRSTPRIVATE);
1687 OMP_CLAUSE_DECL (c) = decl;
1688 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1689 gimple_omp_taskreg_set_clauses (stmt, c);
1692 save_local_var_chain = info->new_local_var_chain;
1693 info->new_local_var_chain = NULL;
1695 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1696 info, gimple_omp_body_ptr (stmt));
1698 if (info->new_local_var_chain)
1699 declare_vars (info->new_local_var_chain,
1700 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1701 false);
1702 info->new_local_var_chain = save_local_var_chain;
1703 info->suppress_expansion = save_suppress;
1704 break;
1706 case GIMPLE_OMP_FOR:
1707 save_suppress = info->suppress_expansion;
1708 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1709 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1710 convert_nonlocal_reference_stmt,
1711 convert_nonlocal_reference_op, info);
1712 walk_body (convert_nonlocal_reference_stmt,
1713 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1714 info->suppress_expansion = save_suppress;
1715 break;
1717 case GIMPLE_OMP_SECTIONS:
1718 save_suppress = info->suppress_expansion;
1719 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1720 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1721 info, gimple_omp_body_ptr (stmt));
1722 info->suppress_expansion = save_suppress;
1723 break;
1725 case GIMPLE_OMP_SINGLE:
1726 save_suppress = info->suppress_expansion;
1727 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1728 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1729 info, gimple_omp_body_ptr (stmt));
1730 info->suppress_expansion = save_suppress;
1731 break;
1733 case GIMPLE_OMP_TASKGROUP:
1734 save_suppress = info->suppress_expansion;
1735 convert_nonlocal_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
1736 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1737 info, gimple_omp_body_ptr (stmt));
1738 info->suppress_expansion = save_suppress;
1739 break;
1741 case GIMPLE_OMP_TARGET:
1742 if (!is_gimple_omp_offloaded (stmt))
1744 save_suppress = info->suppress_expansion;
1745 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1746 wi);
1747 info->suppress_expansion = save_suppress;
1748 walk_body (convert_nonlocal_reference_stmt,
1749 convert_nonlocal_reference_op, info,
1750 gimple_omp_body_ptr (stmt));
1751 break;
1753 save_suppress = info->suppress_expansion;
1754 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1755 wi))
1757 tree c, decl;
1758 decl = get_chain_decl (info);
1759 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1760 OMP_CLAUSE_DECL (c) = decl;
1761 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1762 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1763 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1764 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1767 save_local_var_chain = info->new_local_var_chain;
1768 info->new_local_var_chain = NULL;
1770 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1771 info, gimple_omp_body_ptr (stmt));
1773 if (info->new_local_var_chain)
1774 declare_vars (info->new_local_var_chain,
1775 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1776 false);
1777 info->new_local_var_chain = save_local_var_chain;
1778 info->suppress_expansion = save_suppress;
1779 break;
1781 case GIMPLE_OMP_SECTION:
1782 case GIMPLE_OMP_MASTER:
1783 case GIMPLE_OMP_ORDERED:
1784 case GIMPLE_OMP_SCAN:
1785 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1786 info, gimple_omp_body_ptr (stmt));
1787 break;
1789 case GIMPLE_BIND:
1791 gbind *bind_stmt = as_a <gbind *> (stmt);
1793 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1794 if (TREE_CODE (var) == NAMELIST_DECL)
1796 /* Adjust decls mentioned in NAMELIST_DECL. */
1797 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1798 tree decl;
1799 unsigned int i;
1801 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1803 if (VAR_P (decl)
1804 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1805 continue;
1806 if (decl_function_context (decl) != info->context)
1807 CONSTRUCTOR_ELT (decls, i)->value
1808 = get_nonlocal_debug_decl (info, decl);
1812 *handled_ops_p = false;
1813 return NULL_TREE;
1815 case GIMPLE_COND:
1816 wi->val_only = true;
1817 wi->is_lhs = false;
1818 *handled_ops_p = false;
1819 return NULL_TREE;
1821 case GIMPLE_ASSIGN:
1822 if (gimple_clobber_p (stmt))
1824 tree lhs = gimple_assign_lhs (stmt);
1825 if (DECL_P (lhs)
1826 && !(TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
1827 && decl_function_context (lhs) != info->context)
1829 gsi_replace (gsi, gimple_build_nop (), true);
1830 break;
1833 *handled_ops_p = false;
1834 return NULL_TREE;
1836 default:
1837 /* For every other statement that we are not interested in
1838 handling here, let the walker traverse the operands. */
1839 *handled_ops_p = false;
1840 return NULL_TREE;
1843 /* We have handled all of STMT operands, no need to traverse the operands. */
1844 *handled_ops_p = true;
1845 return NULL_TREE;
1849 /* A subroutine of convert_local_reference. Create a local variable
1850 in the parent function with DECL_VALUE_EXPR set to reference the
1851 field in FRAME. This is used both for debug info and in OMP
1852 lowering. */
1854 static tree
1855 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1857 tree x, new_decl;
1859 tree *slot = &info->var_map->get_or_insert (decl);
1860 if (*slot)
1861 return *slot;
1863 /* Make sure frame_decl gets created. */
1864 (void) get_frame_type (info);
1865 x = info->frame_decl;
1866 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1868 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1869 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1870 DECL_CONTEXT (new_decl) = info->context;
1871 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1872 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1873 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1874 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1875 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1876 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1877 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1878 if ((TREE_CODE (decl) == PARM_DECL
1879 || TREE_CODE (decl) == RESULT_DECL
1880 || VAR_P (decl))
1881 && DECL_BY_REFERENCE (decl))
1882 DECL_BY_REFERENCE (new_decl) = 1;
1884 SET_DECL_VALUE_EXPR (new_decl, x);
1885 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1886 *slot = new_decl;
1888 DECL_CHAIN (new_decl) = info->debug_var_chain;
1889 info->debug_var_chain = new_decl;
1891 /* Do not emit debug info twice. */
1892 DECL_IGNORED_P (decl) = 1;
1894 return new_decl;
1898 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1899 and PARM_DECLs that were referenced by inner nested functions.
1900 The rewrite will be a structure reference to the local frame variable. */
1902 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1904 static tree
1905 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1907 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1908 struct nesting_info *const info = (struct nesting_info *) wi->info;
1909 tree t = *tp, field, x;
1910 bool save_val_only;
1912 *walk_subtrees = 0;
1913 switch (TREE_CODE (t))
1915 case VAR_DECL:
1916 /* Non-automatic variables are never processed. */
1917 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1918 break;
1919 /* FALLTHRU */
1921 case PARM_DECL:
1922 if (t != info->frame_decl && decl_function_context (t) == info->context)
1924 /* If we copied a pointer to the frame, then the original decl
1925 is used unchanged in the parent function. */
1926 if (use_pointer_in_frame (t))
1927 break;
1929 /* No need to transform anything if no child references the
1930 variable. */
1931 field = lookup_field_for_decl (info, t, NO_INSERT);
1932 if (!field)
1933 break;
1934 wi->changed = true;
1936 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1937 x = get_local_debug_decl (info, t, field);
1938 else
1939 x = get_frame_field (info, info->context, field, &wi->gsi);
1941 if (wi->val_only)
1943 if (wi->is_lhs)
1944 x = save_tmp_var (info, x, &wi->gsi);
1945 else
1946 x = init_tmp_var (info, x, &wi->gsi);
1949 *tp = x;
1951 break;
1953 case ADDR_EXPR:
1954 save_val_only = wi->val_only;
1955 wi->val_only = false;
1956 wi->is_lhs = false;
1957 wi->changed = false;
1958 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1959 wi->val_only = save_val_only;
1961 /* If we converted anything ... */
1962 if (wi->changed)
1964 tree save_context;
1966 /* Then the frame decl is now addressable. */
1967 TREE_ADDRESSABLE (info->frame_decl) = 1;
1969 save_context = current_function_decl;
1970 current_function_decl = info->context;
1971 recompute_tree_invariant_for_addr_expr (t);
1973 /* If we are in a context where we only accept values, then
1974 compute the address into a temporary. */
1975 if (save_val_only)
1976 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1977 t, &wi->gsi);
1978 current_function_decl = save_context;
1980 break;
1982 case REALPART_EXPR:
1983 case IMAGPART_EXPR:
1984 case COMPONENT_REF:
1985 case ARRAY_REF:
1986 case ARRAY_RANGE_REF:
1987 case BIT_FIELD_REF:
1988 /* Go down this entire nest and just look at the final prefix and
1989 anything that describes the references. Otherwise, we lose track
1990 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1991 save_val_only = wi->val_only;
1992 wi->val_only = true;
1993 wi->is_lhs = false;
1994 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1996 if (TREE_CODE (t) == COMPONENT_REF)
1997 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1998 NULL);
1999 else if (TREE_CODE (t) == ARRAY_REF
2000 || TREE_CODE (t) == ARRAY_RANGE_REF)
2002 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
2003 NULL);
2004 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
2005 NULL);
2006 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
2007 NULL);
2010 wi->val_only = false;
2011 walk_tree (tp, convert_local_reference_op, wi, NULL);
2012 wi->val_only = save_val_only;
2013 break;
2015 case MEM_REF:
2016 save_val_only = wi->val_only;
2017 wi->val_only = true;
2018 wi->is_lhs = false;
2019 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
2020 wi, NULL);
2021 /* We need to re-fold the MEM_REF as component references as
2022 part of a ADDR_EXPR address are not allowed. But we cannot
2023 fold here, as the chain record type is not yet finalized. */
2024 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
2025 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
2026 info->mem_refs->add (tp);
2027 wi->val_only = save_val_only;
2028 break;
2030 case VIEW_CONVERT_EXPR:
2031 /* Just request to look at the subtrees, leaving val_only and lhs
2032 untouched. This might actually be for !val_only + lhs, in which
2033 case we don't want to force a replacement by a temporary. */
2034 *walk_subtrees = 1;
2035 break;
2037 default:
2038 if (!IS_TYPE_OR_DECL_P (t))
2040 *walk_subtrees = 1;
2041 wi->val_only = true;
2042 wi->is_lhs = false;
2044 break;
2047 return NULL_TREE;
2050 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
2051 struct walk_stmt_info *);
2053 /* Helper for convert_local_reference. Convert all the references in
2054 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
2056 static bool
2057 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
2059 struct nesting_info *const info = (struct nesting_info *) wi->info;
2060 bool need_frame = false, need_stmts = false;
2061 tree clause, decl, *pdecl;
2062 int dummy;
2063 bitmap new_suppress;
2065 new_suppress = BITMAP_GGC_ALLOC ();
2066 bitmap_copy (new_suppress, info->suppress_expansion);
2068 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2070 pdecl = NULL;
2071 switch (OMP_CLAUSE_CODE (clause))
2073 case OMP_CLAUSE_REDUCTION:
2074 case OMP_CLAUSE_IN_REDUCTION:
2075 case OMP_CLAUSE_TASK_REDUCTION:
2076 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2077 need_stmts = true;
2078 if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
2080 pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
2081 if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
2082 pdecl = &TREE_OPERAND (*pdecl, 0);
2083 if (TREE_CODE (*pdecl) == INDIRECT_REF
2084 || TREE_CODE (*pdecl) == ADDR_EXPR)
2085 pdecl = &TREE_OPERAND (*pdecl, 0);
2087 goto do_decl_clause;
2089 case OMP_CLAUSE_LASTPRIVATE:
2090 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
2091 need_stmts = true;
2092 goto do_decl_clause;
2094 case OMP_CLAUSE_LINEAR:
2095 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
2096 need_stmts = true;
2097 wi->val_only = true;
2098 wi->is_lhs = false;
2099 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
2100 wi);
2101 goto do_decl_clause;
2103 case OMP_CLAUSE_PRIVATE:
2104 case OMP_CLAUSE_FIRSTPRIVATE:
2105 case OMP_CLAUSE_COPYPRIVATE:
2106 case OMP_CLAUSE_SHARED:
2107 case OMP_CLAUSE_TO_DECLARE:
2108 case OMP_CLAUSE_LINK:
2109 case OMP_CLAUSE_USE_DEVICE_PTR:
2110 case OMP_CLAUSE_USE_DEVICE_ADDR:
2111 case OMP_CLAUSE_IS_DEVICE_PTR:
2112 case OMP_CLAUSE_DETACH:
2113 do_decl_clause:
2114 if (pdecl == NULL)
2115 pdecl = &OMP_CLAUSE_DECL (clause);
2116 decl = *pdecl;
2117 if (VAR_P (decl)
2118 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2119 break;
2120 if (decl_function_context (decl) == info->context
2121 && !use_pointer_in_frame (decl))
2123 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2124 if (field)
2126 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
2127 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
2128 bitmap_set_bit (new_suppress, DECL_UID (decl));
2129 *pdecl = get_local_debug_decl (info, decl, field);
2130 need_frame = true;
2133 break;
2135 case OMP_CLAUSE_SCHEDULE:
2136 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
2137 break;
2138 /* FALLTHRU */
2139 case OMP_CLAUSE_FINAL:
2140 case OMP_CLAUSE_IF:
2141 case OMP_CLAUSE_NUM_THREADS:
2142 case OMP_CLAUSE_DEPEND:
2143 case OMP_CLAUSE_DEVICE:
2144 case OMP_CLAUSE_NUM_TEAMS:
2145 case OMP_CLAUSE_THREAD_LIMIT:
2146 case OMP_CLAUSE_SAFELEN:
2147 case OMP_CLAUSE_SIMDLEN:
2148 case OMP_CLAUSE_PRIORITY:
2149 case OMP_CLAUSE_GRAINSIZE:
2150 case OMP_CLAUSE_NUM_TASKS:
2151 case OMP_CLAUSE_HINT:
2152 case OMP_CLAUSE_NUM_GANGS:
2153 case OMP_CLAUSE_NUM_WORKERS:
2154 case OMP_CLAUSE_VECTOR_LENGTH:
2155 case OMP_CLAUSE_GANG:
2156 case OMP_CLAUSE_WORKER:
2157 case OMP_CLAUSE_VECTOR:
2158 case OMP_CLAUSE_ASYNC:
2159 case OMP_CLAUSE_WAIT:
2160 /* Several OpenACC clauses have optional arguments. Check if they
2161 are present. */
2162 if (OMP_CLAUSE_OPERAND (clause, 0))
2164 wi->val_only = true;
2165 wi->is_lhs = false;
2166 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2167 &dummy, wi);
2170 /* The gang clause accepts two arguments. */
2171 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
2172 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
2174 wi->val_only = true;
2175 wi->is_lhs = false;
2176 convert_nonlocal_reference_op
2177 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
2179 break;
2181 case OMP_CLAUSE_DIST_SCHEDULE:
2182 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
2184 wi->val_only = true;
2185 wi->is_lhs = false;
2186 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2187 &dummy, wi);
2189 break;
2191 case OMP_CLAUSE_MAP:
2192 case OMP_CLAUSE_TO:
2193 case OMP_CLAUSE_FROM:
2194 if (OMP_CLAUSE_SIZE (clause))
2196 wi->val_only = true;
2197 wi->is_lhs = false;
2198 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
2199 &dummy, wi);
2201 if (DECL_P (OMP_CLAUSE_DECL (clause)))
2202 goto do_decl_clause;
2203 wi->val_only = true;
2204 wi->is_lhs = false;
2205 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
2206 wi, NULL);
2207 break;
2209 case OMP_CLAUSE_ALIGNED:
2210 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
2212 wi->val_only = true;
2213 wi->is_lhs = false;
2214 convert_local_reference_op
2215 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
2217 /* FALLTHRU */
2218 case OMP_CLAUSE_NONTEMPORAL:
2219 do_decl_clause_no_supp:
2220 /* Like do_decl_clause, but don't add any suppression. */
2221 decl = OMP_CLAUSE_DECL (clause);
2222 if (VAR_P (decl)
2223 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2224 break;
2225 if (decl_function_context (decl) == info->context
2226 && !use_pointer_in_frame (decl))
2228 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2229 if (field)
2231 OMP_CLAUSE_DECL (clause)
2232 = get_local_debug_decl (info, decl, field);
2233 need_frame = true;
2236 break;
2238 case OMP_CLAUSE_ALLOCATE:
2239 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause))
2241 wi->val_only = true;
2242 wi->is_lhs = false;
2243 convert_local_reference_op
2244 (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause), &dummy, wi);
2246 goto do_decl_clause_no_supp;
2248 case OMP_CLAUSE_NOWAIT:
2249 case OMP_CLAUSE_ORDERED:
2250 case OMP_CLAUSE_DEFAULT:
2251 case OMP_CLAUSE_COPYIN:
2252 case OMP_CLAUSE_COLLAPSE:
2253 case OMP_CLAUSE_TILE:
2254 case OMP_CLAUSE_UNTIED:
2255 case OMP_CLAUSE_MERGEABLE:
2256 case OMP_CLAUSE_PROC_BIND:
2257 case OMP_CLAUSE_NOGROUP:
2258 case OMP_CLAUSE_THREADS:
2259 case OMP_CLAUSE_SIMD:
2260 case OMP_CLAUSE_DEFAULTMAP:
2261 case OMP_CLAUSE_ORDER:
2262 case OMP_CLAUSE_SEQ:
2263 case OMP_CLAUSE_INDEPENDENT:
2264 case OMP_CLAUSE_AUTO:
2265 case OMP_CLAUSE_IF_PRESENT:
2266 case OMP_CLAUSE_FINALIZE:
2267 case OMP_CLAUSE__CONDTEMP_:
2268 case OMP_CLAUSE__SCANTEMP_:
2269 break;
2271 /* The following clause belongs to the OpenACC cache directive, which
2272 is discarded during gimplification. */
2273 case OMP_CLAUSE__CACHE_:
2274 /* The following clauses are only allowed in the OpenMP declare simd
2275 directive, so not seen here. */
2276 case OMP_CLAUSE_UNIFORM:
2277 case OMP_CLAUSE_INBRANCH:
2278 case OMP_CLAUSE_NOTINBRANCH:
2279 /* The following clauses are only allowed on OpenMP cancel and
2280 cancellation point directives, which at this point have already
2281 been lowered into a function call. */
2282 case OMP_CLAUSE_FOR:
2283 case OMP_CLAUSE_PARALLEL:
2284 case OMP_CLAUSE_SECTIONS:
2285 case OMP_CLAUSE_TASKGROUP:
2286 /* The following clauses are only added during OMP lowering; nested
2287 function decomposition happens before that. */
2288 case OMP_CLAUSE__LOOPTEMP_:
2289 case OMP_CLAUSE__REDUCTEMP_:
2290 case OMP_CLAUSE__SIMDUID_:
2291 case OMP_CLAUSE__SIMT_:
2292 /* Anything else. */
2293 default:
2294 gcc_unreachable ();
2298 info->suppress_expansion = new_suppress;
2300 if (need_stmts)
2301 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2302 switch (OMP_CLAUSE_CODE (clause))
2304 case OMP_CLAUSE_REDUCTION:
2305 case OMP_CLAUSE_IN_REDUCTION:
2306 case OMP_CLAUSE_TASK_REDUCTION:
2307 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2309 tree old_context
2310 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
2311 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2312 = info->context;
2313 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2314 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2315 = info->context;
2316 walk_body (convert_local_reference_stmt,
2317 convert_local_reference_op, info,
2318 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
2319 walk_body (convert_local_reference_stmt,
2320 convert_local_reference_op, info,
2321 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
2322 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2323 = old_context;
2324 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2325 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2326 = old_context;
2328 break;
2330 case OMP_CLAUSE_LASTPRIVATE:
2331 walk_body (convert_local_reference_stmt,
2332 convert_local_reference_op, info,
2333 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
2334 break;
2336 case OMP_CLAUSE_LINEAR:
2337 walk_body (convert_local_reference_stmt,
2338 convert_local_reference_op, info,
2339 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
2340 break;
2342 default:
2343 break;
2346 return need_frame;
2350 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2351 and PARM_DECLs that were referenced by inner nested functions.
2352 The rewrite will be a structure reference to the local frame variable. */
2354 static tree
2355 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2356 struct walk_stmt_info *wi)
2358 struct nesting_info *info = (struct nesting_info *) wi->info;
2359 tree save_local_var_chain;
2360 bitmap save_suppress;
2361 char save_static_chain_added;
2362 bool frame_decl_added;
2363 gimple *stmt = gsi_stmt (*gsi);
2365 switch (gimple_code (stmt))
2367 case GIMPLE_OMP_TEAMS:
2368 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2370 save_suppress = info->suppress_expansion;
2371 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2372 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2373 info, gimple_omp_body_ptr (stmt));
2374 info->suppress_expansion = save_suppress;
2375 break;
2377 /* FALLTHRU */
2379 case GIMPLE_OMP_PARALLEL:
2380 case GIMPLE_OMP_TASK:
2381 save_suppress = info->suppress_expansion;
2382 frame_decl_added = false;
2383 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
2384 wi))
2386 tree c = build_omp_clause (gimple_location (stmt),
2387 OMP_CLAUSE_SHARED);
2388 (void) get_frame_type (info);
2389 OMP_CLAUSE_DECL (c) = info->frame_decl;
2390 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2391 gimple_omp_taskreg_set_clauses (stmt, c);
2392 info->static_chain_added |= 4;
2393 frame_decl_added = true;
2396 save_local_var_chain = info->new_local_var_chain;
2397 save_static_chain_added = info->static_chain_added;
2398 info->new_local_var_chain = NULL;
2399 info->static_chain_added = 0;
2401 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2402 gimple_omp_body_ptr (stmt));
2404 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2406 tree c = build_omp_clause (gimple_location (stmt),
2407 OMP_CLAUSE_SHARED);
2408 (void) get_frame_type (info);
2409 OMP_CLAUSE_DECL (c) = info->frame_decl;
2410 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2411 info->static_chain_added |= 4;
2412 gimple_omp_taskreg_set_clauses (stmt, c);
2414 if (info->new_local_var_chain)
2415 declare_vars (info->new_local_var_chain,
2416 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2417 info->new_local_var_chain = save_local_var_chain;
2418 info->suppress_expansion = save_suppress;
2419 info->static_chain_added |= save_static_chain_added;
2420 break;
2422 case GIMPLE_OMP_FOR:
2423 save_suppress = info->suppress_expansion;
2424 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2425 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2426 convert_local_reference_stmt,
2427 convert_local_reference_op, info);
2428 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2429 info, gimple_omp_body_ptr (stmt));
2430 info->suppress_expansion = save_suppress;
2431 break;
2433 case GIMPLE_OMP_SECTIONS:
2434 save_suppress = info->suppress_expansion;
2435 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2436 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2437 info, gimple_omp_body_ptr (stmt));
2438 info->suppress_expansion = save_suppress;
2439 break;
2441 case GIMPLE_OMP_SINGLE:
2442 save_suppress = info->suppress_expansion;
2443 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2444 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2445 info, gimple_omp_body_ptr (stmt));
2446 info->suppress_expansion = save_suppress;
2447 break;
2449 case GIMPLE_OMP_TASKGROUP:
2450 save_suppress = info->suppress_expansion;
2451 convert_local_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
2452 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2453 info, gimple_omp_body_ptr (stmt));
2454 info->suppress_expansion = save_suppress;
2455 break;
2457 case GIMPLE_OMP_TARGET:
2458 if (!is_gimple_omp_offloaded (stmt))
2460 save_suppress = info->suppress_expansion;
2461 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2462 info->suppress_expansion = save_suppress;
2463 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2464 info, gimple_omp_body_ptr (stmt));
2465 break;
2467 save_suppress = info->suppress_expansion;
2468 frame_decl_added = false;
2469 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2471 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2472 (void) get_frame_type (info);
2473 OMP_CLAUSE_DECL (c) = info->frame_decl;
2474 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2475 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2476 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2477 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2478 info->static_chain_added |= 4;
2479 frame_decl_added = true;
2482 save_local_var_chain = info->new_local_var_chain;
2483 save_static_chain_added = info->static_chain_added;
2484 info->new_local_var_chain = NULL;
2485 info->static_chain_added = 0;
2487 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2488 gimple_omp_body_ptr (stmt));
2490 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2492 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2493 (void) get_frame_type (info);
2494 OMP_CLAUSE_DECL (c) = info->frame_decl;
2495 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2496 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2497 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2498 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2499 info->static_chain_added |= 4;
2502 if (info->new_local_var_chain)
2503 declare_vars (info->new_local_var_chain,
2504 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2505 info->new_local_var_chain = save_local_var_chain;
2506 info->suppress_expansion = save_suppress;
2507 info->static_chain_added |= save_static_chain_added;
2508 break;
2510 case GIMPLE_OMP_SECTION:
2511 case GIMPLE_OMP_MASTER:
2512 case GIMPLE_OMP_ORDERED:
2513 case GIMPLE_OMP_SCAN:
2514 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2515 info, gimple_omp_body_ptr (stmt));
2516 break;
2518 case GIMPLE_COND:
2519 wi->val_only = true;
2520 wi->is_lhs = false;
2521 *handled_ops_p = false;
2522 return NULL_TREE;
2524 case GIMPLE_ASSIGN:
2525 if (gimple_clobber_p (stmt))
2527 tree lhs = gimple_assign_lhs (stmt);
2528 if (DECL_P (lhs)
2529 && decl_function_context (lhs) == info->context
2530 && !use_pointer_in_frame (lhs)
2531 && lookup_field_for_decl (info, lhs, NO_INSERT))
2533 gsi_replace (gsi, gimple_build_nop (), true);
2534 break;
2537 *handled_ops_p = false;
2538 return NULL_TREE;
2540 case GIMPLE_BIND:
2541 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2542 var;
2543 var = DECL_CHAIN (var))
2544 if (TREE_CODE (var) == NAMELIST_DECL)
2546 /* Adjust decls mentioned in NAMELIST_DECL. */
2547 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2548 tree decl;
2549 unsigned int i;
2551 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2553 if (VAR_P (decl)
2554 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2555 continue;
2556 if (decl_function_context (decl) == info->context
2557 && !use_pointer_in_frame (decl))
2559 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2560 if (field)
2562 CONSTRUCTOR_ELT (decls, i)->value
2563 = get_local_debug_decl (info, decl, field);
2569 *handled_ops_p = false;
2570 return NULL_TREE;
2572 default:
2573 /* For every other statement that we are not interested in
2574 handling here, let the walker traverse the operands. */
2575 *handled_ops_p = false;
2576 return NULL_TREE;
2579 /* Indicate that we have handled all the operands ourselves. */
2580 *handled_ops_p = true;
2581 return NULL_TREE;
2585 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2586 that reference labels from outer functions. The rewrite will be a
2587 call to __builtin_nonlocal_goto. */
2589 static tree
2590 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2591 struct walk_stmt_info *wi)
2593 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2594 tree label, new_label, target_context, x, field;
2595 gcall *call;
2596 gimple *stmt = gsi_stmt (*gsi);
2598 if (gimple_code (stmt) != GIMPLE_GOTO)
2600 *handled_ops_p = false;
2601 return NULL_TREE;
2604 label = gimple_goto_dest (stmt);
2605 if (TREE_CODE (label) != LABEL_DECL)
2607 *handled_ops_p = false;
2608 return NULL_TREE;
2611 target_context = decl_function_context (label);
2612 if (target_context == info->context)
2614 *handled_ops_p = false;
2615 return NULL_TREE;
2618 for (i = info->outer; target_context != i->context; i = i->outer)
2619 continue;
2621 /* The original user label may also be use for a normal goto, therefore
2622 we must create a new label that will actually receive the abnormal
2623 control transfer. This new label will be marked LABEL_NONLOCAL; this
2624 mark will trigger proper behavior in the cfg, as well as cause the
2625 (hairy target-specific) non-local goto receiver code to be generated
2626 when we expand rtl. Enter this association into var_map so that we
2627 can insert the new label into the IL during a second pass. */
2628 tree *slot = &i->var_map->get_or_insert (label);
2629 if (*slot == NULL)
2631 new_label = create_artificial_label (UNKNOWN_LOCATION);
2632 DECL_NONLOCAL (new_label) = 1;
2633 *slot = new_label;
2635 else
2636 new_label = *slot;
2638 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2639 field = get_nl_goto_field (i);
2640 x = get_frame_field (info, target_context, field, gsi);
2641 x = build_addr (x);
2642 x = gsi_gimplify_val (info, x, gsi);
2643 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2644 2, build_addr (new_label), x);
2645 gsi_replace (gsi, call, false);
2647 /* We have handled all of STMT's operands, no need to keep going. */
2648 *handled_ops_p = true;
2649 return NULL_TREE;
2653 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2654 are referenced via nonlocal goto from a nested function. The rewrite
2655 will involve installing a newly generated DECL_NONLOCAL label, and
2656 (potentially) a branch around the rtl gunk that is assumed to be
2657 attached to such a label. */
2659 static tree
2660 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2661 struct walk_stmt_info *wi)
2663 struct nesting_info *const info = (struct nesting_info *) wi->info;
2664 tree label, new_label;
2665 gimple_stmt_iterator tmp_gsi;
2666 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2668 if (!stmt)
2670 *handled_ops_p = false;
2671 return NULL_TREE;
2674 label = gimple_label_label (stmt);
2676 tree *slot = info->var_map->get (label);
2677 if (!slot)
2679 *handled_ops_p = false;
2680 return NULL_TREE;
2683 /* If there's any possibility that the previous statement falls through,
2684 then we must branch around the new non-local label. */
2685 tmp_gsi = wi->gsi;
2686 gsi_prev (&tmp_gsi);
2687 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2689 gimple *stmt = gimple_build_goto (label);
2690 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2693 new_label = (tree) *slot;
2694 stmt = gimple_build_label (new_label);
2695 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2697 *handled_ops_p = true;
2698 return NULL_TREE;
2702 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2703 of nested functions that require the use of trampolines. The rewrite
2704 will involve a reference a trampoline generated for the occasion. */
2706 static tree
2707 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2709 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2710 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2711 tree t = *tp, decl, target_context, x, builtin;
2712 bool descr;
2713 gcall *call;
2715 *walk_subtrees = 0;
2716 switch (TREE_CODE (t))
2718 case ADDR_EXPR:
2719 /* Build
2720 T.1 = &CHAIN->tramp;
2721 T.2 = __builtin_adjust_trampoline (T.1);
2722 T.3 = (func_type)T.2;
2725 decl = TREE_OPERAND (t, 0);
2726 if (TREE_CODE (decl) != FUNCTION_DECL)
2727 break;
2729 /* Only need to process nested functions. */
2730 target_context = decl_function_context (decl);
2731 if (!target_context)
2732 break;
2734 /* If the nested function doesn't use a static chain, then
2735 it doesn't need a trampoline. */
2736 if (!DECL_STATIC_CHAIN (decl))
2737 break;
2739 /* If we don't want a trampoline, then don't build one. */
2740 if (TREE_NO_TRAMPOLINE (t))
2741 break;
2743 /* Lookup the immediate parent of the callee, as that's where
2744 we need to insert the trampoline. */
2745 for (i = info; i->context != target_context; i = i->outer)
2746 continue;
2748 /* Decide whether to generate a descriptor or a trampoline. */
2749 descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines;
2751 if (descr)
2752 x = lookup_descr_for_decl (i, decl, INSERT);
2753 else
2754 x = lookup_tramp_for_decl (i, decl, INSERT);
2756 /* Compute the address of the field holding the trampoline. */
2757 x = get_frame_field (info, target_context, x, &wi->gsi);
2758 x = build_addr (x);
2759 x = gsi_gimplify_val (info, x, &wi->gsi);
2761 /* Do machine-specific ugliness. Normally this will involve
2762 computing extra alignment, but it can really be anything. */
2763 if (descr)
2764 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR);
2765 else
2766 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2767 call = gimple_build_call (builtin, 1, x);
2768 x = init_tmp_var_with_call (info, &wi->gsi, call);
2770 /* Cast back to the proper function type. */
2771 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2772 x = init_tmp_var (info, x, &wi->gsi);
2774 *tp = x;
2775 break;
2777 default:
2778 if (!IS_TYPE_OR_DECL_P (t))
2779 *walk_subtrees = 1;
2780 break;
2783 return NULL_TREE;
2787 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2788 to addresses of nested functions that require the use of
2789 trampolines. The rewrite will involve a reference a trampoline
2790 generated for the occasion. */
2792 static tree
2793 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2794 struct walk_stmt_info *wi)
2796 struct nesting_info *info = (struct nesting_info *) wi->info;
2797 gimple *stmt = gsi_stmt (*gsi);
2799 switch (gimple_code (stmt))
2801 case GIMPLE_CALL:
2803 /* Only walk call arguments, lest we generate trampolines for
2804 direct calls. */
2805 unsigned long i, nargs = gimple_call_num_args (stmt);
2806 for (i = 0; i < nargs; i++)
2807 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2808 wi, NULL);
2809 break;
2812 case GIMPLE_OMP_TEAMS:
2813 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2815 *handled_ops_p = false;
2816 return NULL_TREE;
2818 goto do_parallel;
2820 case GIMPLE_OMP_TARGET:
2821 if (!is_gimple_omp_offloaded (stmt))
2823 *handled_ops_p = false;
2824 return NULL_TREE;
2826 /* FALLTHRU */
2827 case GIMPLE_OMP_PARALLEL:
2828 case GIMPLE_OMP_TASK:
2829 do_parallel:
2831 tree save_local_var_chain = info->new_local_var_chain;
2832 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2833 info->new_local_var_chain = NULL;
2834 char save_static_chain_added = info->static_chain_added;
2835 info->static_chain_added = 0;
2836 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2837 info, gimple_omp_body_ptr (stmt));
2838 if (info->new_local_var_chain)
2839 declare_vars (info->new_local_var_chain,
2840 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2841 false);
2842 for (int i = 0; i < 2; i++)
2844 tree c, decl;
2845 if ((info->static_chain_added & (1 << i)) == 0)
2846 continue;
2847 decl = i ? get_chain_decl (info) : info->frame_decl;
2848 /* Don't add CHAIN.* or FRAME.* twice. */
2849 for (c = gimple_omp_taskreg_clauses (stmt);
2851 c = OMP_CLAUSE_CHAIN (c))
2852 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2853 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2854 && OMP_CLAUSE_DECL (c) == decl)
2855 break;
2856 if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2858 c = build_omp_clause (gimple_location (stmt),
2859 i ? OMP_CLAUSE_FIRSTPRIVATE
2860 : OMP_CLAUSE_SHARED);
2861 OMP_CLAUSE_DECL (c) = decl;
2862 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2863 gimple_omp_taskreg_set_clauses (stmt, c);
2865 else if (c == NULL)
2867 c = build_omp_clause (gimple_location (stmt),
2868 OMP_CLAUSE_MAP);
2869 OMP_CLAUSE_DECL (c) = decl;
2870 OMP_CLAUSE_SET_MAP_KIND (c,
2871 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2872 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2873 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2874 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2878 info->new_local_var_chain = save_local_var_chain;
2879 info->static_chain_added |= save_static_chain_added;
2881 break;
2883 default:
2884 *handled_ops_p = false;
2885 return NULL_TREE;
2888 *handled_ops_p = true;
2889 return NULL_TREE;
2894 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2895 that reference nested functions to make sure that the static chain
2896 is set up properly for the call. */
2898 static tree
2899 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2900 struct walk_stmt_info *wi)
2902 struct nesting_info *const info = (struct nesting_info *) wi->info;
2903 tree decl, target_context;
2904 char save_static_chain_added;
2905 int i;
2906 gimple *stmt = gsi_stmt (*gsi);
2908 switch (gimple_code (stmt))
2910 case GIMPLE_CALL:
2911 if (gimple_call_chain (stmt))
2912 break;
2913 decl = gimple_call_fndecl (stmt);
2914 if (!decl)
2915 break;
2916 target_context = decl_function_context (decl);
2917 if (target_context && DECL_STATIC_CHAIN (decl))
2919 struct nesting_info *i = info;
2920 while (i && i->context != target_context)
2921 i = i->outer;
2922 /* If none of the outer contexts is the target context, this means
2923 that the function is called in a wrong context. */
2924 if (!i)
2925 internal_error ("%s from %s called in %s",
2926 IDENTIFIER_POINTER (DECL_NAME (decl)),
2927 IDENTIFIER_POINTER (DECL_NAME (target_context)),
2928 IDENTIFIER_POINTER (DECL_NAME (info->context)));
2930 gimple_call_set_chain (as_a <gcall *> (stmt),
2931 get_static_chain (info, target_context,
2932 &wi->gsi));
2933 info->static_chain_added |= (1 << (info->context != target_context));
2935 break;
2937 case GIMPLE_OMP_TEAMS:
2938 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2940 walk_body (convert_gimple_call, NULL, info,
2941 gimple_omp_body_ptr (stmt));
2942 break;
2944 /* FALLTHRU */
2946 case GIMPLE_OMP_PARALLEL:
2947 case GIMPLE_OMP_TASK:
2948 save_static_chain_added = info->static_chain_added;
2949 info->static_chain_added = 0;
2950 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2951 for (i = 0; i < 2; i++)
2953 tree c, decl;
2954 if ((info->static_chain_added & (1 << i)) == 0)
2955 continue;
2956 decl = i ? get_chain_decl (info) : info->frame_decl;
2957 /* Don't add CHAIN.* or FRAME.* twice. */
2958 for (c = gimple_omp_taskreg_clauses (stmt);
2960 c = OMP_CLAUSE_CHAIN (c))
2961 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2962 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2963 && OMP_CLAUSE_DECL (c) == decl)
2964 break;
2965 if (c == NULL)
2967 c = build_omp_clause (gimple_location (stmt),
2968 i ? OMP_CLAUSE_FIRSTPRIVATE
2969 : OMP_CLAUSE_SHARED);
2970 OMP_CLAUSE_DECL (c) = decl;
2971 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2972 gimple_omp_taskreg_set_clauses (stmt, c);
2975 info->static_chain_added |= save_static_chain_added;
2976 break;
2978 case GIMPLE_OMP_TARGET:
2979 if (!is_gimple_omp_offloaded (stmt))
2981 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2982 break;
2984 save_static_chain_added = info->static_chain_added;
2985 info->static_chain_added = 0;
2986 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2987 for (i = 0; i < 2; i++)
2989 tree c, decl;
2990 if ((info->static_chain_added & (1 << i)) == 0)
2991 continue;
2992 decl = i ? get_chain_decl (info) : info->frame_decl;
2993 /* Don't add CHAIN.* or FRAME.* twice. */
2994 for (c = gimple_omp_target_clauses (stmt);
2996 c = OMP_CLAUSE_CHAIN (c))
2997 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2998 && OMP_CLAUSE_DECL (c) == decl)
2999 break;
3000 if (c == NULL)
3002 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
3003 OMP_CLAUSE_DECL (c) = decl;
3004 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
3005 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
3006 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
3007 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
3011 info->static_chain_added |= save_static_chain_added;
3012 break;
3014 case GIMPLE_OMP_FOR:
3015 walk_body (convert_gimple_call, NULL, info,
3016 gimple_omp_for_pre_body_ptr (stmt));
3017 /* FALLTHRU */
3018 case GIMPLE_OMP_SECTIONS:
3019 case GIMPLE_OMP_SECTION:
3020 case GIMPLE_OMP_SINGLE:
3021 case GIMPLE_OMP_MASTER:
3022 case GIMPLE_OMP_TASKGROUP:
3023 case GIMPLE_OMP_ORDERED:
3024 case GIMPLE_OMP_SCAN:
3025 case GIMPLE_OMP_CRITICAL:
3026 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
3027 break;
3029 default:
3030 /* Keep looking for other operands. */
3031 *handled_ops_p = false;
3032 return NULL_TREE;
3035 *handled_ops_p = true;
3036 return NULL_TREE;
3039 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
3040 call expressions. At the same time, determine if a nested function
3041 actually uses its static chain; if not, remember that. */
3043 static void
3044 convert_all_function_calls (struct nesting_info *root)
3046 unsigned int chain_count = 0, old_chain_count, iter_count;
3047 struct nesting_info *n;
3049 /* First, optimistically clear static_chain for all decls that haven't
3050 used the static chain already for variable access. But always create
3051 it if not optimizing. This makes it possible to reconstruct the static
3052 nesting tree at run time and thus to resolve up-level references from
3053 within the debugger. */
3054 FOR_EACH_NEST_INFO (n, root)
3056 if (n->thunk_p)
3057 continue;
3058 tree decl = n->context;
3059 if (!optimize)
3061 if (n->inner)
3062 (void) get_frame_type (n);
3063 if (n->outer)
3064 (void) get_chain_decl (n);
3066 else if (!n->outer || (!n->chain_decl && !n->chain_field))
3068 DECL_STATIC_CHAIN (decl) = 0;
3069 if (dump_file && (dump_flags & TDF_DETAILS))
3070 fprintf (dump_file, "Guessing no static-chain for %s\n",
3071 lang_hooks.decl_printable_name (decl, 2));
3073 else
3074 DECL_STATIC_CHAIN (decl) = 1;
3075 chain_count += DECL_STATIC_CHAIN (decl);
3078 FOR_EACH_NEST_INFO (n, root)
3079 if (n->thunk_p)
3081 tree decl = n->context;
3082 tree alias = thunk_info::get (cgraph_node::get (decl))->alias;
3083 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
3086 /* Walk the functions and perform transformations. Note that these
3087 transformations can induce new uses of the static chain, which in turn
3088 require re-examining all users of the decl. */
3089 /* ??? It would make sense to try to use the call graph to speed this up,
3090 but the call graph hasn't really been built yet. Even if it did, we
3091 would still need to iterate in this loop since address-of references
3092 wouldn't show up in the callgraph anyway. */
3093 iter_count = 0;
3096 old_chain_count = chain_count;
3097 chain_count = 0;
3098 iter_count++;
3100 if (dump_file && (dump_flags & TDF_DETAILS))
3101 fputc ('\n', dump_file);
3103 FOR_EACH_NEST_INFO (n, root)
3105 if (n->thunk_p)
3106 continue;
3107 tree decl = n->context;
3108 walk_function (convert_tramp_reference_stmt,
3109 convert_tramp_reference_op, n);
3110 walk_function (convert_gimple_call, NULL, n);
3111 chain_count += DECL_STATIC_CHAIN (decl);
3114 FOR_EACH_NEST_INFO (n, root)
3115 if (n->thunk_p)
3117 tree decl = n->context;
3118 tree alias = thunk_info::get (cgraph_node::get (decl))->alias;
3119 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
3122 while (chain_count != old_chain_count);
3124 if (dump_file && (dump_flags & TDF_DETAILS))
3125 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
3126 iter_count);
3129 struct nesting_copy_body_data
3131 copy_body_data cb;
3132 struct nesting_info *root;
3135 /* A helper subroutine for debug_var_chain type remapping. */
3137 static tree
3138 nesting_copy_decl (tree decl, copy_body_data *id)
3140 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
3141 tree *slot = nid->root->var_map->get (decl);
3143 if (slot)
3144 return (tree) *slot;
3146 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
3148 tree new_decl = copy_decl_no_change (decl, id);
3149 DECL_ORIGINAL_TYPE (new_decl)
3150 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
3151 return new_decl;
3154 if (VAR_P (decl)
3155 || TREE_CODE (decl) == PARM_DECL
3156 || TREE_CODE (decl) == RESULT_DECL)
3157 return decl;
3159 return copy_decl_no_change (decl, id);
3162 /* A helper function for remap_vla_decls. See if *TP contains
3163 some remapped variables. */
3165 static tree
3166 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
3168 struct nesting_info *root = (struct nesting_info *) data;
3169 tree t = *tp;
3171 if (DECL_P (t))
3173 *walk_subtrees = 0;
3174 tree *slot = root->var_map->get (t);
3176 if (slot)
3177 return *slot;
3179 return NULL;
3182 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
3183 involved. */
3185 static void
3186 remap_vla_decls (tree block, struct nesting_info *root)
3188 tree var, subblock, val, type;
3189 struct nesting_copy_body_data id;
3191 for (subblock = BLOCK_SUBBLOCKS (block);
3192 subblock;
3193 subblock = BLOCK_CHAIN (subblock))
3194 remap_vla_decls (subblock, root);
3196 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3197 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3199 val = DECL_VALUE_EXPR (var);
3200 type = TREE_TYPE (var);
3202 if (!(TREE_CODE (val) == INDIRECT_REF
3203 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3204 && variably_modified_type_p (type, NULL)))
3205 continue;
3207 if (root->var_map->get (TREE_OPERAND (val, 0))
3208 || walk_tree (&type, contains_remapped_vars, root, NULL))
3209 break;
3212 if (var == NULL_TREE)
3213 return;
3215 memset (&id, 0, sizeof (id));
3216 id.cb.copy_decl = nesting_copy_decl;
3217 id.cb.decl_map = new hash_map<tree, tree>;
3218 id.root = root;
3220 for (; var; var = DECL_CHAIN (var))
3221 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3223 struct nesting_info *i;
3224 tree newt, context;
3226 val = DECL_VALUE_EXPR (var);
3227 type = TREE_TYPE (var);
3229 if (!(TREE_CODE (val) == INDIRECT_REF
3230 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3231 && variably_modified_type_p (type, NULL)))
3232 continue;
3234 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
3235 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
3236 continue;
3238 context = decl_function_context (var);
3239 for (i = root; i; i = i->outer)
3240 if (i->context == context)
3241 break;
3243 if (i == NULL)
3244 continue;
3246 /* Fully expand value expressions. This avoids having debug variables
3247 only referenced from them and that can be swept during GC. */
3248 if (slot)
3250 tree t = (tree) *slot;
3251 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
3252 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
3255 id.cb.src_fn = i->context;
3256 id.cb.dst_fn = i->context;
3257 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3259 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
3260 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3262 newt = TREE_TYPE (newt);
3263 type = TREE_TYPE (type);
3265 if (TYPE_NAME (newt)
3266 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3267 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3268 && newt != type
3269 && TYPE_NAME (newt) == TYPE_NAME (type))
3270 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3272 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
3273 if (val != DECL_VALUE_EXPR (var))
3274 SET_DECL_VALUE_EXPR (var, val);
3277 delete id.cb.decl_map;
3280 /* Fixup VLA decls in BLOCK and subblocks if remapped variables are
3281 involved. */
3283 static void
3284 fixup_vla_decls (tree block)
3286 for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3287 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3289 tree val = DECL_VALUE_EXPR (var);
3291 if (!(TREE_CODE (val) == INDIRECT_REF
3292 && VAR_P (TREE_OPERAND (val, 0))
3293 && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val, 0))))
3294 continue;
3296 /* Fully expand value expressions. This avoids having debug variables
3297 only referenced from them and that can be swept during GC. */
3298 val = build1 (INDIRECT_REF, TREE_TYPE (val),
3299 DECL_VALUE_EXPR (TREE_OPERAND (val, 0)));
3300 SET_DECL_VALUE_EXPR (var, val);
3303 for (tree sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3304 fixup_vla_decls (sub);
3307 /* Fold the MEM_REF *E. */
3308 bool
3309 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
3311 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
3312 *ref_p = fold (*ref_p);
3313 return true;
3316 /* Given DECL, a nested function, build an initialization call for FIELD,
3317 the trampoline or descriptor for DECL, using FUNC as the function. */
3319 static gcall *
3320 build_init_call_stmt (struct nesting_info *info, tree decl, tree field,
3321 tree func)
3323 tree arg1, arg2, arg3, x;
3325 gcc_assert (DECL_STATIC_CHAIN (decl));
3326 arg3 = build_addr (info->frame_decl);
3328 arg2 = build_addr (decl);
3330 x = build3 (COMPONENT_REF, TREE_TYPE (field),
3331 info->frame_decl, field, NULL_TREE);
3332 arg1 = build_addr (x);
3334 return gimple_build_call (func, 3, arg1, arg2, arg3);
3337 /* Do "everything else" to clean up or complete state collected by the various
3338 walking passes -- create a field to hold the frame base address, lay out the
3339 types and decls, generate code to initialize the frame decl, store critical
3340 expressions in the struct function for rtl to find. */
3342 static void
3343 finalize_nesting_tree_1 (struct nesting_info *root)
3345 gimple_seq stmt_list = NULL;
3346 gimple *stmt;
3347 tree context = root->context;
3348 struct function *sf;
3350 if (root->thunk_p)
3351 return;
3353 /* If we created a non-local frame type or decl, we need to lay them
3354 out at this time. */
3355 if (root->frame_type)
3357 /* Debugging information needs to compute the frame base address of the
3358 parent frame out of the static chain from the nested frame.
3360 The static chain is the address of the FRAME record, so one could
3361 imagine it would be possible to compute the frame base address just
3362 adding a constant offset to this address. Unfortunately, this is not
3363 possible: if the FRAME object has alignment constraints that are
3364 stronger than the stack, then the offset between the frame base and
3365 the FRAME object will be dynamic.
3367 What we do instead is to append a field to the FRAME object that holds
3368 the frame base address: then debug info just has to fetch this
3369 field. */
3371 /* Debugging information will refer to the CFA as the frame base
3372 address: we will do the same here. */
3373 const tree frame_addr_fndecl
3374 = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
3376 /* Create a field in the FRAME record to hold the frame base address for
3377 this stack frame. Since it will be used only by the debugger, put it
3378 at the end of the record in order not to shift all other offsets. */
3379 tree fb_decl = make_node (FIELD_DECL);
3381 DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
3382 TREE_TYPE (fb_decl) = ptr_type_node;
3383 TREE_ADDRESSABLE (fb_decl) = 1;
3384 DECL_CONTEXT (fb_decl) = root->frame_type;
3385 TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
3386 fb_decl);
3388 /* In some cases the frame type will trigger the -Wpadded warning.
3389 This is not helpful; suppress it. */
3390 int save_warn_padded = warn_padded;
3391 warn_padded = 0;
3392 layout_type (root->frame_type);
3393 warn_padded = save_warn_padded;
3394 layout_decl (root->frame_decl, 0);
3396 /* Initialize the frame base address field. If the builtin we need is
3397 not available, set it to NULL so that debugging information does not
3398 reference junk. */
3399 tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
3400 root->frame_decl, fb_decl, NULL_TREE);
3401 tree fb_tmp;
3403 if (frame_addr_fndecl != NULL_TREE)
3405 gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
3406 integer_zero_node);
3407 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3409 fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
3411 else
3412 fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
3413 gimple_seq_add_stmt (&stmt_list,
3414 gimple_build_assign (fb_ref, fb_tmp));
3416 declare_vars (root->frame_decl,
3417 gimple_seq_first_stmt (gimple_body (context)), true);
3420 /* If any parameters were referenced non-locally, then we need to insert
3421 a copy or a pointer. */
3422 if (root->any_parm_remapped)
3424 tree p;
3425 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
3427 tree field, x, y;
3429 field = lookup_field_for_decl (root, p, NO_INSERT);
3430 if (!field)
3431 continue;
3433 if (use_pointer_in_frame (p))
3434 x = build_addr (p);
3435 else
3436 x = p;
3438 /* If the assignment is from a non-register the stmt is
3439 not valid gimple. Make it so by using a temporary instead. */
3440 if (!is_gimple_reg (x)
3441 && is_gimple_reg_type (TREE_TYPE (x)))
3443 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3444 x = init_tmp_var (root, x, &gsi);
3447 y = build3 (COMPONENT_REF, TREE_TYPE (field),
3448 root->frame_decl, field, NULL_TREE);
3449 stmt = gimple_build_assign (y, x);
3450 gimple_seq_add_stmt (&stmt_list, stmt);
3454 /* If a chain_field was created, then it needs to be initialized
3455 from chain_decl. */
3456 if (root->chain_field)
3458 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
3459 root->frame_decl, root->chain_field, NULL_TREE);
3460 stmt = gimple_build_assign (x, get_chain_decl (root));
3461 gimple_seq_add_stmt (&stmt_list, stmt);
3464 /* If trampolines were created, then we need to initialize them. */
3465 if (root->any_tramp_created)
3467 struct nesting_info *i;
3468 for (i = root->inner; i ; i = i->next)
3470 tree field, x;
3472 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
3473 if (!field)
3474 continue;
3476 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
3477 stmt = build_init_call_stmt (root, i->context, field, x);
3478 gimple_seq_add_stmt (&stmt_list, stmt);
3482 /* If descriptors were created, then we need to initialize them. */
3483 if (root->any_descr_created)
3485 struct nesting_info *i;
3486 for (i = root->inner; i ; i = i->next)
3488 tree field, x;
3490 field = lookup_descr_for_decl (root, i->context, NO_INSERT);
3491 if (!field)
3492 continue;
3494 x = builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR);
3495 stmt = build_init_call_stmt (root, i->context, field, x);
3496 gimple_seq_add_stmt (&stmt_list, stmt);
3500 /* If we created initialization statements, insert them. */
3501 if (stmt_list)
3503 gbind *bind;
3504 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3505 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3506 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3507 gimple_bind_set_body (bind, stmt_list);
3510 /* If a chain_decl was created, then it needs to be registered with
3511 struct function so that it gets initialized from the static chain
3512 register at the beginning of the function. */
3513 sf = DECL_STRUCT_FUNCTION (root->context);
3514 sf->static_chain_decl = root->chain_decl;
3516 /* Similarly for the non-local goto save area. */
3517 if (root->nl_goto_field)
3519 sf->nonlocal_goto_save_area
3520 = get_frame_field (root, context, root->nl_goto_field, NULL);
3521 sf->has_nonlocal_label = 1;
3524 /* Make sure all new local variables get inserted into the
3525 proper BIND_EXPR. */
3526 if (root->new_local_var_chain)
3527 declare_vars (root->new_local_var_chain,
3528 gimple_seq_first_stmt (gimple_body (root->context)),
3529 false);
3531 if (root->debug_var_chain)
3533 tree debug_var;
3534 gbind *scope;
3536 remap_vla_decls (DECL_INITIAL (root->context), root);
3538 for (debug_var = root->debug_var_chain; debug_var;
3539 debug_var = DECL_CHAIN (debug_var))
3540 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3541 break;
3543 /* If there are any debug decls with variable length types,
3544 remap those types using other debug_var_chain variables. */
3545 if (debug_var)
3547 struct nesting_copy_body_data id;
3549 memset (&id, 0, sizeof (id));
3550 id.cb.copy_decl = nesting_copy_decl;
3551 id.cb.decl_map = new hash_map<tree, tree>;
3552 id.root = root;
3554 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3555 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3557 tree type = TREE_TYPE (debug_var);
3558 tree newt, t = type;
3559 struct nesting_info *i;
3561 for (i = root; i; i = i->outer)
3562 if (variably_modified_type_p (type, i->context))
3563 break;
3565 if (i == NULL)
3566 continue;
3568 id.cb.src_fn = i->context;
3569 id.cb.dst_fn = i->context;
3570 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3572 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3573 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3575 newt = TREE_TYPE (newt);
3576 t = TREE_TYPE (t);
3578 if (TYPE_NAME (newt)
3579 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3580 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3581 && newt != t
3582 && TYPE_NAME (newt) == TYPE_NAME (t))
3583 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3586 delete id.cb.decl_map;
3589 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3590 if (gimple_bind_block (scope))
3591 declare_vars (root->debug_var_chain, scope, true);
3592 else
3593 BLOCK_VARS (DECL_INITIAL (root->context))
3594 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3595 root->debug_var_chain);
3597 else
3598 fixup_vla_decls (DECL_INITIAL (root->context));
3600 /* Fold the rewritten MEM_REF trees. */
3601 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3603 /* Dump the translated tree function. */
3604 if (dump_file)
3606 fputs ("\n\n", dump_file);
3607 dump_function_to_file (root->context, dump_file, dump_flags);
3611 static void
3612 finalize_nesting_tree (struct nesting_info *root)
3614 struct nesting_info *n;
3615 FOR_EACH_NEST_INFO (n, root)
3616 finalize_nesting_tree_1 (n);
3619 /* Unnest the nodes and pass them to cgraph. */
3621 static void
3622 unnest_nesting_tree_1 (struct nesting_info *root)
3624 struct cgraph_node *node = cgraph_node::get (root->context);
3626 /* For nested functions update the cgraph to reflect unnesting.
3627 We also delay finalizing of these functions up to this point. */
3628 if (nested_function_info::get (node)->origin)
3630 unnest_function (node);
3631 if (!root->thunk_p)
3632 cgraph_node::finalize_function (root->context, true);
3636 static void
3637 unnest_nesting_tree (struct nesting_info *root)
3639 struct nesting_info *n;
3640 FOR_EACH_NEST_INFO (n, root)
3641 unnest_nesting_tree_1 (n);
3644 /* Free the data structures allocated during this pass. */
3646 static void
3647 free_nesting_tree (struct nesting_info *root)
3649 struct nesting_info *node, *next;
3651 node = iter_nestinfo_start (root);
3654 next = iter_nestinfo_next (node);
3655 delete node->var_map;
3656 delete node->field_map;
3657 delete node->mem_refs;
3658 free (node);
3659 node = next;
3661 while (node);
3664 /* Gimplify a function and all its nested functions. */
3665 static void
3666 gimplify_all_functions (struct cgraph_node *root)
3668 struct cgraph_node *iter;
3669 if (!gimple_body (root->decl))
3670 gimplify_function_tree (root->decl);
3671 for (iter = first_nested_function (root); iter;
3672 iter = next_nested_function (iter))
3673 if (!iter->thunk)
3674 gimplify_all_functions (iter);
3677 /* Main entry point for this pass. Process FNDECL and all of its nested
3678 subroutines and turn them into something less tightly bound. */
3680 void
3681 lower_nested_functions (tree fndecl)
3683 struct cgraph_node *cgn;
3684 struct nesting_info *root;
3686 /* If there are no nested functions, there's nothing to do. */
3687 cgn = cgraph_node::get (fndecl);
3688 if (!first_nested_function (cgn))
3689 return;
3691 gimplify_all_functions (cgn);
3693 set_dump_file (dump_begin (TDI_nested, &dump_flags));
3694 if (dump_file)
3695 fprintf (dump_file, "\n;; Function %s\n\n",
3696 lang_hooks.decl_printable_name (fndecl, 2));
3698 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3699 root = create_nesting_tree (cgn);
3701 walk_all_functions (convert_nonlocal_reference_stmt,
3702 convert_nonlocal_reference_op,
3703 root);
3704 walk_all_functions (convert_local_reference_stmt,
3705 convert_local_reference_op,
3706 root);
3707 walk_all_functions (convert_nl_goto_reference, NULL, root);
3708 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3710 convert_all_function_calls (root);
3711 finalize_nesting_tree (root);
3712 unnest_nesting_tree (root);
3714 free_nesting_tree (root);
3715 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3717 if (dump_file)
3719 dump_end (TDI_nested, dump_file);
3720 set_dump_file (NULL);
3724 #include "gt-tree-nested.h"