rtl: ICE with thread_local and inline asm [PR104777]
[official-gcc.git] / gcc / tree-nested.cc
blob078ceab3ca3ae0ed44ff87dddc21f41c8a38d2ef
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "cgraph.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "dumpfile.h"
35 #include "tree-inline.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "tree-cfg.h"
40 #include "explow.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 #include "diagnostic.h"
45 #include "alloc-pool.h"
46 #include "tree-nested.h"
47 #include "symbol-summary.h"
48 #include "symtab-thunks.h"
50 /* Summary of nested functions. */
51 static function_summary <nested_function_info *>
52 *nested_function_sum = NULL;
54 /* Return nested_function_info, if available. */
55 nested_function_info *
56 nested_function_info::get (cgraph_node *node)
58 if (!nested_function_sum)
59 return NULL;
60 return nested_function_sum->get (node);
63 /* Return nested_function_info possibly creating new one. */
64 nested_function_info *
65 nested_function_info::get_create (cgraph_node *node)
67 if (!nested_function_sum)
69 nested_function_sum = new function_summary <nested_function_info *>
70 (symtab);
71 nested_function_sum->disable_insertion_hook ();
73 return nested_function_sum->get_create (node);
76 /* cgraph_node is no longer nested function; update cgraph accordingly. */
77 void
78 unnest_function (cgraph_node *node)
80 nested_function_info *info = nested_function_info::get (node);
81 cgraph_node **node2 = &nested_function_info::get
82 (nested_function_origin (node))->nested;
84 gcc_checking_assert (info->origin);
85 while (*node2 != node)
86 node2 = &nested_function_info::get (*node2)->next_nested;
87 *node2 = info->next_nested;
88 info->next_nested = NULL;
89 info->origin = NULL;
90 nested_function_sum->remove (node);
93 /* Destructor: unlink function from nested function lists. */
94 nested_function_info::~nested_function_info ()
96 cgraph_node *next;
97 for (cgraph_node *n = nested; n; n = next)
99 nested_function_info *info = nested_function_info::get (n);
100 next = info->next_nested;
101 info->origin = NULL;
102 info->next_nested = NULL;
104 nested = NULL;
105 if (origin)
107 cgraph_node **node2
108 = &nested_function_info::get (origin)->nested;
110 nested_function_info *info;
111 while ((info = nested_function_info::get (*node2)) != this && info)
112 node2 = &info->next_nested;
113 *node2 = next_nested;
117 /* Free nested function info summaries. */
118 void
119 nested_function_info::release ()
121 if (nested_function_sum)
122 delete (nested_function_sum);
123 nested_function_sum = NULL;
126 /* If NODE is nested function, record it. */
127 void
128 maybe_record_nested_function (cgraph_node *node)
130 /* All nested functions gets lowered during the construction of symtab. */
131 if (symtab->state > CONSTRUCTION)
132 return;
133 if (DECL_CONTEXT (node->decl)
134 && TREE_CODE (DECL_CONTEXT (node->decl)) == FUNCTION_DECL)
136 cgraph_node *origin = cgraph_node::get_create (DECL_CONTEXT (node->decl));
137 nested_function_info *info = nested_function_info::get_create (node);
138 nested_function_info *origin_info
139 = nested_function_info::get_create (origin);
141 info->origin = origin;
142 info->next_nested = origin_info->nested;
143 origin_info->nested = node;
147 /* The object of this pass is to lower the representation of a set of nested
148 functions in order to expose all of the gory details of the various
149 nonlocal references. We want to do this sooner rather than later, in
150 order to give us more freedom in emitting all of the functions in question.
152 Back in olden times, when gcc was young, we developed an insanely
153 complicated scheme whereby variables which were referenced nonlocally
154 were forced to live in the stack of the declaring function, and then
155 the nested functions magically discovered where these variables were
156 placed. In order for this scheme to function properly, it required
157 that the outer function be partially expanded, then we switch to
158 compiling the inner function, and once done with those we switch back
159 to compiling the outer function. Such delicate ordering requirements
160 makes it difficult to do whole translation unit optimizations
161 involving such functions.
163 The implementation here is much more direct. Everything that can be
164 referenced by an inner function is a member of an explicitly created
165 structure herein called the "nonlocal frame struct". The incoming
166 static chain for a nested function is a pointer to this struct in
167 the parent. In this way, we settle on known offsets from a known
168 base, and so are decoupled from the logic that places objects in the
169 function's stack frame. More importantly, we don't have to wait for
170 that to happen -- since the compilation of the inner function is no
171 longer tied to a real stack frame, the nonlocal frame struct can be
172 allocated anywhere. Which means that the outer function is now
173 inlinable.
175 Theory of operation here is very simple. Iterate over all the
176 statements in all the functions (depth first) several times,
177 allocating structures and fields on demand. In general we want to
178 examine inner functions first, so that we can avoid making changes
179 to outer functions which are unnecessary.
181 The order of the passes matters a bit, in that later passes will be
182 skipped if it is discovered that the functions don't actually interact
183 at all. That is, they're nested in the lexical sense but could have
184 been written as independent functions without change. */
187 struct nesting_info
189 struct nesting_info *outer;
190 struct nesting_info *inner;
191 struct nesting_info *next;
193 hash_map<tree, tree> *field_map;
194 hash_map<tree, tree> *var_map;
195 hash_set<tree *> *mem_refs;
196 bitmap suppress_expansion;
198 tree context;
199 tree new_local_var_chain;
200 tree debug_var_chain;
201 tree frame_type;
202 tree frame_decl;
203 tree chain_field;
204 tree chain_decl;
205 tree nl_goto_field;
207 bool thunk_p;
208 bool any_parm_remapped;
209 bool any_tramp_created;
210 bool any_descr_created;
211 char static_chain_added;
215 /* Iterate over the nesting tree, starting with ROOT, depth first. */
217 static inline struct nesting_info *
218 iter_nestinfo_start (struct nesting_info *root)
220 while (root->inner)
221 root = root->inner;
222 return root;
225 static inline struct nesting_info *
226 iter_nestinfo_next (struct nesting_info *node)
228 if (node->next)
229 return iter_nestinfo_start (node->next);
230 return node->outer;
233 #define FOR_EACH_NEST_INFO(I, ROOT) \
234 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
236 /* Obstack used for the bitmaps in the struct above. */
237 static struct bitmap_obstack nesting_info_bitmap_obstack;
240 /* We're working in so many different function contexts simultaneously,
241 that create_tmp_var is dangerous. Prevent mishap. */
242 #define create_tmp_var cant_use_create_tmp_var_here_dummy
244 /* Like create_tmp_var, except record the variable for registration at
245 the given nesting level. */
247 static tree
248 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
250 tree tmp_var;
252 /* If the type is of variable size or a type which must be created by the
253 frontend, something is wrong. Note that we explicitly allow
254 incomplete types here, since we create them ourselves here. */
255 gcc_assert (!TREE_ADDRESSABLE (type));
256 gcc_assert (!TYPE_SIZE_UNIT (type)
257 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
259 tmp_var = create_tmp_var_raw (type, prefix);
260 DECL_CONTEXT (tmp_var) = info->context;
261 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
262 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
264 info->new_local_var_chain = tmp_var;
266 return tmp_var;
269 /* Like build_simple_mem_ref, but set TREE_THIS_NOTRAP on the result. */
271 static tree
272 build_simple_mem_ref_notrap (tree ptr)
274 tree t = build_simple_mem_ref (ptr);
275 TREE_THIS_NOTRAP (t) = 1;
276 return t;
279 /* Take the address of EXP to be used within function CONTEXT.
280 Mark it for addressability as necessary. */
282 tree
283 build_addr (tree exp)
285 mark_addressable (exp);
286 return build_fold_addr_expr (exp);
289 /* Insert FIELD into TYPE, sorted by alignment requirements. */
291 void
292 insert_field_into_struct (tree type, tree field)
294 tree *p;
296 DECL_CONTEXT (field) = type;
298 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
299 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
300 break;
302 DECL_CHAIN (field) = *p;
303 *p = field;
305 /* Set correct alignment for frame struct type. */
306 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
307 SET_TYPE_ALIGN (type, DECL_ALIGN (field));
310 /* Build or return the RECORD_TYPE that describes the frame state that is
311 shared between INFO->CONTEXT and its nested functions. This record will
312 not be complete until finalize_nesting_tree; up until that point we'll
313 be adding fields as necessary.
315 We also build the DECL that represents this frame in the function. */
317 static tree
318 get_frame_type (struct nesting_info *info)
320 tree type = info->frame_type;
321 if (!type)
323 char *name;
325 type = make_node (RECORD_TYPE);
327 name = concat ("FRAME.",
328 IDENTIFIER_POINTER (DECL_NAME (info->context)),
329 NULL);
330 TYPE_NAME (type) = get_identifier (name);
331 free (name);
333 info->frame_type = type;
335 /* Do not put info->frame_decl on info->new_local_var_chain,
336 so that we can declare it in the lexical blocks, which
337 makes sure virtual regs that end up appearing in its RTL
338 expression get substituted in instantiate_virtual_regs. */
339 info->frame_decl = create_tmp_var_raw (type, "FRAME");
340 DECL_CONTEXT (info->frame_decl) = info->context;
341 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
342 DECL_SEEN_IN_BIND_EXPR_P (info->frame_decl) = 1;
344 /* ??? Always make it addressable for now, since it is meant to
345 be pointed to by the static chain pointer. This pessimizes
346 when it turns out that no static chains are needed because
347 the nested functions referencing non-local variables are not
348 reachable, but the true pessimization is to create the non-
349 local frame structure in the first place. */
350 TREE_ADDRESSABLE (info->frame_decl) = 1;
353 return type;
356 /* Return true if DECL should be referenced by pointer in the non-local frame
357 structure. */
359 static bool
360 use_pointer_in_frame (tree decl)
362 if (TREE_CODE (decl) == PARM_DECL)
364 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
365 sized DECLs, and inefficient to copy large aggregates. Don't bother
366 moving anything but scalar parameters. */
367 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
369 else
371 /* Variable-sized DECLs can only come from OMP clauses at this point
372 since the gimplifier has already turned the regular variables into
373 pointers. Do the same as the gimplifier. */
374 return !DECL_SIZE (decl) || TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST;
378 /* Given DECL, a non-locally accessed variable, find or create a field
379 in the non-local frame structure for the given nesting context. */
381 static tree
382 lookup_field_for_decl (struct nesting_info *info, tree decl,
383 enum insert_option insert)
385 gcc_checking_assert (decl_function_context (decl) == info->context);
387 if (insert == NO_INSERT)
389 tree *slot = info->field_map->get (decl);
390 return slot ? *slot : NULL_TREE;
393 tree *slot = &info->field_map->get_or_insert (decl);
394 if (!*slot)
396 tree type = get_frame_type (info);
397 tree field = make_node (FIELD_DECL);
398 DECL_NAME (field) = DECL_NAME (decl);
400 if (use_pointer_in_frame (decl))
402 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
403 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
404 DECL_NONADDRESSABLE_P (field) = 1;
406 else
408 TREE_TYPE (field) = TREE_TYPE (decl);
409 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
410 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
411 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
412 DECL_IGNORED_P (field) = DECL_IGNORED_P (decl);
413 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
414 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
415 copy_warning (field, decl);
417 /* Declare the transformation and adjust the original DECL. For a
418 variable or for a parameter when not optimizing, we make it point
419 to the field in the frame directly. For a parameter, we don't do
420 it when optimizing because the variable tracking pass will already
421 do the job, */
422 if (VAR_P (decl) || !optimize)
424 tree x
425 = build3 (COMPONENT_REF, TREE_TYPE (field), info->frame_decl,
426 field, NULL_TREE);
428 /* If the next declaration is a PARM_DECL pointing to the DECL,
429 we need to adjust its VALUE_EXPR directly, since chains of
430 VALUE_EXPRs run afoul of garbage collection. This occurs
431 in Ada for Out parameters that aren't copied in. */
432 tree next = DECL_CHAIN (decl);
433 if (next
434 && TREE_CODE (next) == PARM_DECL
435 && DECL_HAS_VALUE_EXPR_P (next)
436 && DECL_VALUE_EXPR (next) == decl)
437 SET_DECL_VALUE_EXPR (next, x);
439 SET_DECL_VALUE_EXPR (decl, x);
440 DECL_HAS_VALUE_EXPR_P (decl) = 1;
444 insert_field_into_struct (type, field);
445 *slot = field;
447 if (TREE_CODE (decl) == PARM_DECL)
448 info->any_parm_remapped = true;
451 return *slot;
454 /* Build or return the variable that holds the static chain within
455 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
457 static tree
458 get_chain_decl (struct nesting_info *info)
460 tree decl = info->chain_decl;
462 if (!decl)
464 tree type;
466 type = get_frame_type (info->outer);
467 type = build_pointer_type (type);
469 /* Note that this variable is *not* entered into any BIND_EXPR;
470 the construction of this variable is handled specially in
471 expand_function_start and initialize_inlined_parameters.
472 Note also that it's represented as a parameter. This is more
473 close to the truth, since the initial value does come from
474 the caller. */
475 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
476 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
477 DECL_ARTIFICIAL (decl) = 1;
478 DECL_IGNORED_P (decl) = 1;
479 TREE_USED (decl) = 1;
480 DECL_CONTEXT (decl) = info->context;
481 DECL_ARG_TYPE (decl) = type;
483 /* Tell tree-inline.cc that we never write to this variable, so
484 it can copy-prop the replacement value immediately. */
485 TREE_READONLY (decl) = 1;
487 info->chain_decl = decl;
489 if (dump_file
490 && (dump_flags & TDF_DETAILS)
491 && !DECL_STATIC_CHAIN (info->context))
492 fprintf (dump_file, "Setting static-chain for %s\n",
493 lang_hooks.decl_printable_name (info->context, 2));
495 DECL_STATIC_CHAIN (info->context) = 1;
497 return decl;
500 /* Build or return the field within the non-local frame state that holds
501 the static chain for INFO->CONTEXT. This is the way to walk back up
502 multiple nesting levels. */
504 static tree
505 get_chain_field (struct nesting_info *info)
507 tree field = info->chain_field;
509 if (!field)
511 tree type = build_pointer_type (get_frame_type (info->outer));
513 field = make_node (FIELD_DECL);
514 DECL_NAME (field) = get_identifier ("__chain");
515 TREE_TYPE (field) = type;
516 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
517 DECL_NONADDRESSABLE_P (field) = 1;
519 insert_field_into_struct (get_frame_type (info), field);
521 info->chain_field = field;
523 if (dump_file
524 && (dump_flags & TDF_DETAILS)
525 && !DECL_STATIC_CHAIN (info->context))
526 fprintf (dump_file, "Setting static-chain for %s\n",
527 lang_hooks.decl_printable_name (info->context, 2));
529 DECL_STATIC_CHAIN (info->context) = 1;
531 return field;
534 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
536 static tree
537 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
538 gcall *call)
540 tree t;
542 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
543 gimple_call_set_lhs (call, t);
544 if (! gsi_end_p (*gsi))
545 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
546 gsi_insert_before (gsi, call, GSI_SAME_STMT);
548 return t;
552 /* Copy EXP into a temporary. Allocate the temporary in the context of
553 INFO and insert the initialization statement before GSI. */
555 static tree
556 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
558 tree t;
559 gimple *stmt;
561 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
562 stmt = gimple_build_assign (t, exp);
563 if (! gsi_end_p (*gsi))
564 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
565 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
567 return t;
571 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
573 static tree
574 gsi_gimplify_val (struct nesting_info *info, tree exp,
575 gimple_stmt_iterator *gsi)
577 if (is_gimple_val (exp))
578 return exp;
579 else
580 return init_tmp_var (info, exp, gsi);
583 /* Similarly, but copy from the temporary and insert the statement
584 after the iterator. */
586 static tree
587 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
589 tree t;
590 gimple *stmt;
592 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
593 stmt = gimple_build_assign (exp, t);
594 if (! gsi_end_p (*gsi))
595 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
596 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
598 return t;
601 /* Build or return the type used to represent a nested function trampoline. */
603 static GTY(()) tree trampoline_type;
605 static tree
606 get_trampoline_type (struct nesting_info *info)
608 unsigned align, size;
609 tree t;
611 if (trampoline_type)
612 return trampoline_type;
614 align = TRAMPOLINE_ALIGNMENT;
615 size = TRAMPOLINE_SIZE;
617 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
618 then allocate extra space so that we can do dynamic alignment. */
619 if (align > STACK_BOUNDARY)
621 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
622 align = STACK_BOUNDARY;
625 t = build_index_type (size_int (size - 1));
626 t = build_array_type (char_type_node, t);
627 t = build_decl (DECL_SOURCE_LOCATION (info->context),
628 FIELD_DECL, get_identifier ("__data"), t);
629 SET_DECL_ALIGN (t, align);
630 DECL_USER_ALIGN (t) = 1;
632 trampoline_type = make_node (RECORD_TYPE);
633 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
634 TYPE_FIELDS (trampoline_type) = t;
635 layout_type (trampoline_type);
636 DECL_CONTEXT (t) = trampoline_type;
638 return trampoline_type;
641 /* Build or return the type used to represent a nested function descriptor. */
643 static GTY(()) tree descriptor_type;
645 static tree
646 get_descriptor_type (struct nesting_info *info)
648 /* The base alignment is that of a function. */
649 const unsigned align = FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY);
650 tree t;
652 if (descriptor_type)
653 return descriptor_type;
655 t = build_index_type (integer_one_node);
656 t = build_array_type (ptr_type_node, t);
657 t = build_decl (DECL_SOURCE_LOCATION (info->context),
658 FIELD_DECL, get_identifier ("__data"), t);
659 SET_DECL_ALIGN (t, MAX (TYPE_ALIGN (ptr_type_node), align));
660 DECL_USER_ALIGN (t) = 1;
662 descriptor_type = make_node (RECORD_TYPE);
663 TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor");
664 TYPE_FIELDS (descriptor_type) = t;
665 layout_type (descriptor_type);
666 DECL_CONTEXT (t) = descriptor_type;
668 return descriptor_type;
671 /* Given DECL, a nested function, find or create an element in the
672 var map for this function. */
674 static tree
675 lookup_element_for_decl (struct nesting_info *info, tree decl,
676 enum insert_option insert)
678 if (insert == NO_INSERT)
680 tree *slot = info->var_map->get (decl);
681 return slot ? *slot : NULL_TREE;
684 tree *slot = &info->var_map->get_or_insert (decl);
685 if (!*slot)
686 *slot = build_tree_list (NULL_TREE, NULL_TREE);
688 return (tree) *slot;
691 /* Given DECL, a nested function, create a field in the non-local
692 frame structure for this function. */
694 static tree
695 create_field_for_decl (struct nesting_info *info, tree decl, tree type)
697 tree field = make_node (FIELD_DECL);
698 DECL_NAME (field) = DECL_NAME (decl);
699 TREE_TYPE (field) = type;
700 TREE_ADDRESSABLE (field) = 1;
701 insert_field_into_struct (get_frame_type (info), field);
702 return field;
705 /* Given DECL, a nested function, find or create a field in the non-local
706 frame structure for a trampoline for this function. */
708 static tree
709 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
710 enum insert_option insert)
712 tree elt, field;
714 elt = lookup_element_for_decl (info, decl, insert);
715 if (!elt)
716 return NULL_TREE;
718 field = TREE_PURPOSE (elt);
720 if (!field && insert == INSERT)
722 field = create_field_for_decl (info, decl, get_trampoline_type (info));
723 TREE_PURPOSE (elt) = field;
724 info->any_tramp_created = true;
727 return field;
730 /* Given DECL, a nested function, find or create a field in the non-local
731 frame structure for a descriptor for this function. */
733 static tree
734 lookup_descr_for_decl (struct nesting_info *info, tree decl,
735 enum insert_option insert)
737 tree elt, field;
739 elt = lookup_element_for_decl (info, decl, insert);
740 if (!elt)
741 return NULL_TREE;
743 field = TREE_VALUE (elt);
745 if (!field && insert == INSERT)
747 field = create_field_for_decl (info, decl, get_descriptor_type (info));
748 TREE_VALUE (elt) = field;
749 info->any_descr_created = true;
752 return field;
755 /* Build or return the field within the non-local frame state that holds
756 the non-local goto "jmp_buf". The buffer itself is maintained by the
757 rtl middle-end as dynamic stack space is allocated. */
759 static tree
760 get_nl_goto_field (struct nesting_info *info)
762 tree field = info->nl_goto_field;
763 if (!field)
765 unsigned size;
766 tree type;
768 /* For __builtin_nonlocal_goto, we need N words. The first is the
769 frame pointer, the rest is for the target's stack pointer save
770 area. The number of words is controlled by STACK_SAVEAREA_MODE;
771 not the best interface, but it'll do for now. */
772 if (Pmode == ptr_mode)
773 type = ptr_type_node;
774 else
775 type = lang_hooks.types.type_for_mode (Pmode, 1);
777 scalar_int_mode mode
778 = as_a <scalar_int_mode> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
779 size = GET_MODE_SIZE (mode);
780 size = size / GET_MODE_SIZE (Pmode);
781 size = size + 1;
783 type = build_array_type
784 (type, build_index_type (size_int (size)));
786 field = make_node (FIELD_DECL);
787 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
788 TREE_TYPE (field) = type;
789 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
790 TREE_ADDRESSABLE (field) = 1;
792 insert_field_into_struct (get_frame_type (info), field);
794 info->nl_goto_field = field;
797 return field;
800 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
802 static void
803 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
804 struct nesting_info *info, gimple_seq *pseq)
806 struct walk_stmt_info wi;
808 memset (&wi, 0, sizeof (wi));
809 wi.info = info;
810 wi.val_only = true;
811 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
815 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
817 static inline void
818 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
819 struct nesting_info *info)
821 gimple_seq body = gimple_body (info->context);
822 walk_body (callback_stmt, callback_op, info, &body);
823 gimple_set_body (info->context, body);
826 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
828 static void
829 walk_gimple_omp_for (gomp_for *for_stmt,
830 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
831 struct nesting_info *info)
833 struct walk_stmt_info wi;
834 gimple_seq seq;
835 tree t;
836 size_t i;
838 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
840 seq = NULL;
841 memset (&wi, 0, sizeof (wi));
842 wi.info = info;
843 wi.gsi = gsi_last (seq);
845 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
847 wi.val_only = false;
848 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
849 &wi, NULL);
850 wi.val_only = true;
851 wi.is_lhs = false;
852 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
853 &wi, NULL);
855 wi.val_only = true;
856 wi.is_lhs = false;
857 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
858 &wi, NULL);
860 t = gimple_omp_for_incr (for_stmt, i);
861 gcc_assert (BINARY_CLASS_P (t));
862 wi.val_only = false;
863 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
864 wi.val_only = true;
865 wi.is_lhs = false;
866 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
869 seq = gsi_seq (wi.gsi);
870 if (!gimple_seq_empty_p (seq))
872 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
873 annotate_all_with_location (seq, gimple_location (for_stmt));
874 gimple_seq_add_seq (&pre_body, seq);
875 gimple_omp_for_set_pre_body (for_stmt, pre_body);
879 /* Similarly for ROOT and all functions nested underneath, depth first. */
881 static void
882 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
883 struct nesting_info *root)
885 struct nesting_info *n;
886 FOR_EACH_NEST_INFO (n, root)
887 walk_function (callback_stmt, callback_op, n);
891 /* We have to check for a fairly pathological case. The operands of function
892 nested function are to be interpreted in the context of the enclosing
893 function. So if any are variably-sized, they will get remapped when the
894 enclosing function is inlined. But that remapping would also have to be
895 done in the types of the PARM_DECLs of the nested function, meaning the
896 argument types of that function will disagree with the arguments in the
897 calls to that function. So we'd either have to make a copy of the nested
898 function corresponding to each time the enclosing function was inlined or
899 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
900 function. The former is not practical. The latter would still require
901 detecting this case to know when to add the conversions. So, for now at
902 least, we don't inline such an enclosing function.
904 We have to do that check recursively, so here return indicating whether
905 FNDECL has such a nested function. ORIG_FN is the function we were
906 trying to inline to use for checking whether any argument is variably
907 modified by anything in it.
909 It would be better to do this in tree-inline.cc so that we could give
910 the appropriate warning for why a function can't be inlined, but that's
911 too late since the nesting structure has already been flattened and
912 adding a flag just to record this fact seems a waste of a flag. */
914 static bool
915 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
917 struct cgraph_node *cgn = cgraph_node::get (fndecl);
918 tree arg;
920 for (cgn = first_nested_function (cgn); cgn;
921 cgn = next_nested_function (cgn))
923 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
924 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
925 return true;
927 if (check_for_nested_with_variably_modified (cgn->decl,
928 orig_fndecl))
929 return true;
932 return false;
935 /* Construct our local datastructure describing the function nesting
936 tree rooted by CGN. */
938 static struct nesting_info *
939 create_nesting_tree (struct cgraph_node *cgn)
941 struct nesting_info *info = XCNEW (struct nesting_info);
942 info->field_map = new hash_map<tree, tree>;
943 info->var_map = new hash_map<tree, tree>;
944 info->mem_refs = new hash_set<tree *>;
945 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
946 info->context = cgn->decl;
947 info->thunk_p = cgn->thunk;
949 for (cgn = first_nested_function (cgn); cgn;
950 cgn = next_nested_function (cgn))
952 struct nesting_info *sub = create_nesting_tree (cgn);
953 sub->outer = info;
954 sub->next = info->inner;
955 info->inner = sub;
958 /* See discussion at check_for_nested_with_variably_modified for a
959 discussion of why this has to be here. */
960 if (check_for_nested_with_variably_modified (info->context, info->context))
961 DECL_UNINLINABLE (info->context) = true;
963 return info;
966 /* Return an expression computing the static chain for TARGET_CONTEXT
967 from INFO->CONTEXT. Insert any necessary computations before TSI. */
969 static tree
970 get_static_chain (struct nesting_info *info, tree target_context,
971 gimple_stmt_iterator *gsi)
973 struct nesting_info *i;
974 tree x;
976 if (info->context == target_context)
978 x = build_addr (info->frame_decl);
979 info->static_chain_added |= 1;
981 else
983 x = get_chain_decl (info);
984 info->static_chain_added |= 2;
986 for (i = info->outer; i->context != target_context; i = i->outer)
988 tree field = get_chain_field (i);
990 x = build_simple_mem_ref_notrap (x);
991 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
992 x = init_tmp_var (info, x, gsi);
996 return x;
1000 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
1001 frame as seen from INFO->CONTEXT. Insert any necessary computations
1002 before GSI. */
1004 static tree
1005 get_frame_field (struct nesting_info *info, tree target_context,
1006 tree field, gimple_stmt_iterator *gsi)
1008 struct nesting_info *i;
1009 tree x;
1011 if (info->context == target_context)
1013 /* Make sure frame_decl gets created. */
1014 (void) get_frame_type (info);
1015 x = info->frame_decl;
1016 info->static_chain_added |= 1;
1018 else
1020 x = get_chain_decl (info);
1021 info->static_chain_added |= 2;
1023 for (i = info->outer; i->context != target_context; i = i->outer)
1025 tree field = get_chain_field (i);
1027 x = build_simple_mem_ref_notrap (x);
1028 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1029 x = init_tmp_var (info, x, gsi);
1032 x = build_simple_mem_ref_notrap (x);
1035 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1036 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (field);
1037 return x;
1040 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
1042 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
1043 in the nested function with DECL_VALUE_EXPR set to reference the true
1044 variable in the parent function. This is used both for debug info
1045 and in OMP lowering. */
1047 static tree
1048 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
1050 tree target_context;
1051 struct nesting_info *i;
1052 tree x, field, new_decl;
1054 tree *slot = &info->var_map->get_or_insert (decl);
1056 if (*slot)
1057 return *slot;
1059 target_context = decl_function_context (decl);
1061 /* A copy of the code in get_frame_field, but without the temporaries. */
1062 if (info->context == target_context)
1064 /* Make sure frame_decl gets created. */
1065 (void) get_frame_type (info);
1066 x = info->frame_decl;
1067 i = info;
1068 info->static_chain_added |= 1;
1070 else
1072 x = get_chain_decl (info);
1073 info->static_chain_added |= 2;
1074 for (i = info->outer; i->context != target_context; i = i->outer)
1076 field = get_chain_field (i);
1077 x = build_simple_mem_ref_notrap (x);
1078 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1080 x = build_simple_mem_ref_notrap (x);
1083 field = lookup_field_for_decl (i, decl, INSERT);
1084 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1085 if (use_pointer_in_frame (decl))
1086 x = build_simple_mem_ref_notrap (x);
1088 /* ??? We should be remapping types as well, surely. */
1089 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1090 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1091 DECL_CONTEXT (new_decl) = info->context;
1092 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1093 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1094 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1095 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1096 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1097 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1098 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1099 if ((TREE_CODE (decl) == PARM_DECL
1100 || TREE_CODE (decl) == RESULT_DECL
1101 || VAR_P (decl))
1102 && DECL_BY_REFERENCE (decl))
1103 DECL_BY_REFERENCE (new_decl) = 1;
1105 SET_DECL_VALUE_EXPR (new_decl, x);
1106 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1108 *slot = new_decl;
1109 DECL_CHAIN (new_decl) = info->debug_var_chain;
1110 info->debug_var_chain = new_decl;
1112 if (!optimize
1113 && info->context != target_context
1114 && variably_modified_type_p (TREE_TYPE (decl), NULL))
1115 note_nonlocal_vla_type (info, TREE_TYPE (decl));
1117 return new_decl;
1121 /* Callback for walk_gimple_stmt, rewrite all references to VAR
1122 and PARM_DECLs that belong to outer functions.
1124 The rewrite will involve some number of structure accesses back up
1125 the static chain. E.g. for a variable FOO up one nesting level it'll
1126 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
1127 indirections apply to decls for which use_pointer_in_frame is true. */
1129 static tree
1130 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
1132 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1133 struct nesting_info *const info = (struct nesting_info *) wi->info;
1134 tree t = *tp;
1136 *walk_subtrees = 0;
1137 switch (TREE_CODE (t))
1139 case VAR_DECL:
1140 /* Non-automatic variables are never processed. */
1141 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1142 break;
1143 /* FALLTHRU */
1145 case PARM_DECL:
1147 tree x, target_context = decl_function_context (t);
1149 if (info->context == target_context)
1150 break;
1152 wi->changed = true;
1154 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1155 x = get_nonlocal_debug_decl (info, t);
1156 else
1158 struct nesting_info *i = info;
1159 while (i && i->context != target_context)
1160 i = i->outer;
1161 /* If none of the outer contexts is the target context, this means
1162 that the VAR or PARM_DECL is referenced in a wrong context. */
1163 if (!i)
1164 internal_error ("%s from %s referenced in %s",
1165 IDENTIFIER_POINTER (DECL_NAME (t)),
1166 IDENTIFIER_POINTER (DECL_NAME (target_context)),
1167 IDENTIFIER_POINTER (DECL_NAME (info->context)));
1169 x = lookup_field_for_decl (i, t, INSERT);
1170 x = get_frame_field (info, target_context, x, &wi->gsi);
1171 if (use_pointer_in_frame (t))
1173 x = init_tmp_var (info, x, &wi->gsi);
1174 x = build_simple_mem_ref_notrap (x);
1178 if (wi->val_only)
1180 if (wi->is_lhs)
1181 x = save_tmp_var (info, x, &wi->gsi);
1182 else
1183 x = init_tmp_var (info, x, &wi->gsi);
1186 *tp = x;
1188 break;
1190 case LABEL_DECL:
1191 /* We're taking the address of a label from a parent function, but
1192 this is not itself a non-local goto. Mark the label such that it
1193 will not be deleted, much as we would with a label address in
1194 static storage. */
1195 if (decl_function_context (t) != info->context)
1196 FORCED_LABEL (t) = 1;
1197 break;
1199 case ADDR_EXPR:
1201 bool save_val_only = wi->val_only;
1203 wi->val_only = false;
1204 wi->is_lhs = false;
1205 wi->changed = false;
1206 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1207 wi->val_only = true;
1209 if (wi->changed)
1211 tree save_context;
1213 /* If we changed anything, we might no longer be directly
1214 referencing a decl. */
1215 save_context = current_function_decl;
1216 current_function_decl = info->context;
1217 recompute_tree_invariant_for_addr_expr (t);
1219 /* If the callback converted the address argument in a context
1220 where we only accept variables (and min_invariant, presumably),
1221 then compute the address into a temporary. */
1222 if (save_val_only)
1223 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1224 t, &wi->gsi);
1225 current_function_decl = save_context;
1228 break;
1230 case REALPART_EXPR:
1231 case IMAGPART_EXPR:
1232 case COMPONENT_REF:
1233 case ARRAY_REF:
1234 case ARRAY_RANGE_REF:
1235 case BIT_FIELD_REF:
1236 /* Go down this entire nest and just look at the final prefix and
1237 anything that describes the references. Otherwise, we lose track
1238 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1239 wi->val_only = true;
1240 wi->is_lhs = false;
1241 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1243 if (TREE_CODE (t) == COMPONENT_REF)
1244 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1245 NULL);
1246 else if (TREE_CODE (t) == ARRAY_REF
1247 || TREE_CODE (t) == ARRAY_RANGE_REF)
1249 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1250 wi, NULL);
1251 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1252 wi, NULL);
1253 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1254 wi, NULL);
1257 wi->val_only = false;
1258 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1259 break;
1261 case VIEW_CONVERT_EXPR:
1262 /* Just request to look at the subtrees, leaving val_only and lhs
1263 untouched. This might actually be for !val_only + lhs, in which
1264 case we don't want to force a replacement by a temporary. */
1265 *walk_subtrees = 1;
1266 break;
1268 default:
1269 if (!IS_TYPE_OR_DECL_P (t))
1271 *walk_subtrees = 1;
1272 wi->val_only = true;
1273 wi->is_lhs = false;
1275 break;
1278 return NULL_TREE;
1281 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1282 struct walk_stmt_info *);
1284 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1285 and PARM_DECLs that belong to outer functions. */
1287 static bool
1288 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1290 struct nesting_info *const info = (struct nesting_info *) wi->info;
1291 bool need_chain = false, need_stmts = false;
1292 tree clause, decl, *pdecl;
1293 int dummy;
1294 bitmap new_suppress;
1296 new_suppress = BITMAP_GGC_ALLOC ();
1297 bitmap_copy (new_suppress, info->suppress_expansion);
1299 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1301 pdecl = NULL;
1302 switch (OMP_CLAUSE_CODE (clause))
1304 case OMP_CLAUSE_REDUCTION:
1305 case OMP_CLAUSE_IN_REDUCTION:
1306 case OMP_CLAUSE_TASK_REDUCTION:
1307 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1308 need_stmts = true;
1309 if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
1311 pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
1312 if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
1313 pdecl = &TREE_OPERAND (*pdecl, 0);
1314 if (TREE_CODE (*pdecl) == INDIRECT_REF
1315 || TREE_CODE (*pdecl) == ADDR_EXPR)
1316 pdecl = &TREE_OPERAND (*pdecl, 0);
1318 goto do_decl_clause;
1320 case OMP_CLAUSE_LASTPRIVATE:
1321 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1322 need_stmts = true;
1323 goto do_decl_clause;
1325 case OMP_CLAUSE_LINEAR:
1326 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1327 need_stmts = true;
1328 wi->val_only = true;
1329 wi->is_lhs = false;
1330 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1331 &dummy, wi);
1332 goto do_decl_clause;
1334 case OMP_CLAUSE_PRIVATE:
1335 case OMP_CLAUSE_FIRSTPRIVATE:
1336 case OMP_CLAUSE_COPYPRIVATE:
1337 case OMP_CLAUSE_SHARED:
1338 case OMP_CLAUSE_TO_DECLARE:
1339 case OMP_CLAUSE_LINK:
1340 case OMP_CLAUSE_USE_DEVICE_PTR:
1341 case OMP_CLAUSE_USE_DEVICE_ADDR:
1342 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1343 case OMP_CLAUSE_IS_DEVICE_PTR:
1344 case OMP_CLAUSE_DETACH:
1345 do_decl_clause:
1346 if (pdecl == NULL)
1347 pdecl = &OMP_CLAUSE_DECL (clause);
1348 decl = *pdecl;
1349 if (VAR_P (decl)
1350 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1351 break;
1352 if (decl_function_context (decl) != info->context)
1354 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1355 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1356 bitmap_set_bit (new_suppress, DECL_UID (decl));
1357 *pdecl = get_nonlocal_debug_decl (info, decl);
1358 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1359 need_chain = true;
1361 break;
1363 case OMP_CLAUSE_SCHEDULE:
1364 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1365 break;
1366 /* FALLTHRU */
1367 case OMP_CLAUSE_FINAL:
1368 case OMP_CLAUSE_IF:
1369 case OMP_CLAUSE_NUM_THREADS:
1370 case OMP_CLAUSE_DEPEND:
1371 case OMP_CLAUSE_DEVICE:
1372 case OMP_CLAUSE_NUM_TEAMS:
1373 case OMP_CLAUSE_THREAD_LIMIT:
1374 case OMP_CLAUSE_SAFELEN:
1375 case OMP_CLAUSE_SIMDLEN:
1376 case OMP_CLAUSE_PRIORITY:
1377 case OMP_CLAUSE_GRAINSIZE:
1378 case OMP_CLAUSE_NUM_TASKS:
1379 case OMP_CLAUSE_HINT:
1380 case OMP_CLAUSE_FILTER:
1381 case OMP_CLAUSE_NUM_GANGS:
1382 case OMP_CLAUSE_NUM_WORKERS:
1383 case OMP_CLAUSE_VECTOR_LENGTH:
1384 case OMP_CLAUSE_GANG:
1385 case OMP_CLAUSE_WORKER:
1386 case OMP_CLAUSE_VECTOR:
1387 case OMP_CLAUSE_ASYNC:
1388 case OMP_CLAUSE_WAIT:
1389 /* Several OpenACC clauses have optional arguments. Check if they
1390 are present. */
1391 if (OMP_CLAUSE_OPERAND (clause, 0))
1393 wi->val_only = true;
1394 wi->is_lhs = false;
1395 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1396 &dummy, wi);
1399 /* The gang clause accepts two arguments. */
1400 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1401 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1403 wi->val_only = true;
1404 wi->is_lhs = false;
1405 convert_nonlocal_reference_op
1406 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1408 break;
1410 case OMP_CLAUSE_DIST_SCHEDULE:
1411 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1413 wi->val_only = true;
1414 wi->is_lhs = false;
1415 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1416 &dummy, wi);
1418 break;
1420 case OMP_CLAUSE_MAP:
1421 case OMP_CLAUSE_TO:
1422 case OMP_CLAUSE_FROM:
1423 if (OMP_CLAUSE_SIZE (clause))
1425 wi->val_only = true;
1426 wi->is_lhs = false;
1427 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1428 &dummy, wi);
1430 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1431 goto do_decl_clause;
1432 wi->val_only = true;
1433 wi->is_lhs = false;
1434 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1435 wi, NULL);
1436 break;
1438 case OMP_CLAUSE_ALIGNED:
1439 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1441 wi->val_only = true;
1442 wi->is_lhs = false;
1443 convert_nonlocal_reference_op
1444 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1446 /* FALLTHRU */
1447 case OMP_CLAUSE_NONTEMPORAL:
1448 do_decl_clause_no_supp:
1449 /* Like do_decl_clause, but don't add any suppression. */
1450 decl = OMP_CLAUSE_DECL (clause);
1451 if (VAR_P (decl)
1452 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1453 break;
1454 if (decl_function_context (decl) != info->context)
1456 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1457 need_chain = true;
1459 break;
1461 case OMP_CLAUSE_ALLOCATE:
1462 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause))
1464 wi->val_only = true;
1465 wi->is_lhs = false;
1466 convert_nonlocal_reference_op
1467 (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause), &dummy, wi);
1469 goto do_decl_clause_no_supp;
1471 case OMP_CLAUSE_NOWAIT:
1472 case OMP_CLAUSE_ORDERED:
1473 case OMP_CLAUSE_DEFAULT:
1474 case OMP_CLAUSE_COPYIN:
1475 case OMP_CLAUSE_COLLAPSE:
1476 case OMP_CLAUSE_TILE:
1477 case OMP_CLAUSE_UNTIED:
1478 case OMP_CLAUSE_MERGEABLE:
1479 case OMP_CLAUSE_PROC_BIND:
1480 case OMP_CLAUSE_NOGROUP:
1481 case OMP_CLAUSE_THREADS:
1482 case OMP_CLAUSE_SIMD:
1483 case OMP_CLAUSE_DEFAULTMAP:
1484 case OMP_CLAUSE_ORDER:
1485 case OMP_CLAUSE_SEQ:
1486 case OMP_CLAUSE_INDEPENDENT:
1487 case OMP_CLAUSE_AUTO:
1488 case OMP_CLAUSE_IF_PRESENT:
1489 case OMP_CLAUSE_FINALIZE:
1490 case OMP_CLAUSE_BIND:
1491 case OMP_CLAUSE__CONDTEMP_:
1492 case OMP_CLAUSE__SCANTEMP_:
1493 break;
1495 /* The following clause belongs to the OpenACC cache directive, which
1496 is discarded during gimplification. */
1497 case OMP_CLAUSE__CACHE_:
1498 /* The following clauses are only allowed in the OpenMP declare simd
1499 directive, so not seen here. */
1500 case OMP_CLAUSE_UNIFORM:
1501 case OMP_CLAUSE_INBRANCH:
1502 case OMP_CLAUSE_NOTINBRANCH:
1503 /* The following clauses are only allowed on OpenMP cancel and
1504 cancellation point directives, which at this point have already
1505 been lowered into a function call. */
1506 case OMP_CLAUSE_FOR:
1507 case OMP_CLAUSE_PARALLEL:
1508 case OMP_CLAUSE_SECTIONS:
1509 case OMP_CLAUSE_TASKGROUP:
1510 /* The following clauses are only added during OMP lowering; nested
1511 function decomposition happens before that. */
1512 case OMP_CLAUSE__LOOPTEMP_:
1513 case OMP_CLAUSE__REDUCTEMP_:
1514 case OMP_CLAUSE__SIMDUID_:
1515 case OMP_CLAUSE__SIMT_:
1516 /* The following clauses are only allowed on OpenACC 'routine'
1517 directives, not seen here. */
1518 case OMP_CLAUSE_NOHOST:
1519 /* Anything else. */
1520 default:
1521 gcc_unreachable ();
1525 info->suppress_expansion = new_suppress;
1527 if (need_stmts)
1528 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1529 switch (OMP_CLAUSE_CODE (clause))
1531 case OMP_CLAUSE_REDUCTION:
1532 case OMP_CLAUSE_IN_REDUCTION:
1533 case OMP_CLAUSE_TASK_REDUCTION:
1534 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1536 tree old_context
1537 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1538 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1539 = info->context;
1540 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1541 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1542 = info->context;
1543 tree save_local_var_chain = info->new_local_var_chain;
1544 info->new_local_var_chain = NULL;
1545 gimple_seq *seq = &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause);
1546 walk_body (convert_nonlocal_reference_stmt,
1547 convert_nonlocal_reference_op, info, seq);
1548 if (info->new_local_var_chain)
1549 declare_vars (info->new_local_var_chain,
1550 gimple_seq_first_stmt (*seq), false);
1551 info->new_local_var_chain = NULL;
1552 seq = &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause);
1553 walk_body (convert_nonlocal_reference_stmt,
1554 convert_nonlocal_reference_op, info, seq);
1555 if (info->new_local_var_chain)
1556 declare_vars (info->new_local_var_chain,
1557 gimple_seq_first_stmt (*seq), false);
1558 info->new_local_var_chain = save_local_var_chain;
1559 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1560 = old_context;
1561 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1562 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1563 = old_context;
1565 break;
1567 case OMP_CLAUSE_LASTPRIVATE:
1569 tree save_local_var_chain = info->new_local_var_chain;
1570 info->new_local_var_chain = NULL;
1571 gimple_seq *seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause);
1572 walk_body (convert_nonlocal_reference_stmt,
1573 convert_nonlocal_reference_op, info, seq);
1574 if (info->new_local_var_chain)
1575 declare_vars (info->new_local_var_chain,
1576 gimple_seq_first_stmt (*seq), false);
1577 info->new_local_var_chain = save_local_var_chain;
1579 break;
1581 case OMP_CLAUSE_LINEAR:
1583 tree save_local_var_chain = info->new_local_var_chain;
1584 info->new_local_var_chain = NULL;
1585 gimple_seq *seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause);
1586 walk_body (convert_nonlocal_reference_stmt,
1587 convert_nonlocal_reference_op, info, seq);
1588 if (info->new_local_var_chain)
1589 declare_vars (info->new_local_var_chain,
1590 gimple_seq_first_stmt (*seq), false);
1591 info->new_local_var_chain = save_local_var_chain;
1593 break;
1595 default:
1596 break;
1599 return need_chain;
1602 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1604 static void
1605 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1607 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1608 type = TREE_TYPE (type);
1610 if (TYPE_NAME (type)
1611 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1612 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1613 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1615 while (POINTER_TYPE_P (type)
1616 || TREE_CODE (type) == VECTOR_TYPE
1617 || TREE_CODE (type) == FUNCTION_TYPE
1618 || TREE_CODE (type) == METHOD_TYPE)
1619 type = TREE_TYPE (type);
1621 if (TREE_CODE (type) == ARRAY_TYPE)
1623 tree domain, t;
1625 note_nonlocal_vla_type (info, TREE_TYPE (type));
1626 domain = TYPE_DOMAIN (type);
1627 if (domain)
1629 t = TYPE_MIN_VALUE (domain);
1630 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1631 && decl_function_context (t) != info->context)
1632 get_nonlocal_debug_decl (info, t);
1633 t = TYPE_MAX_VALUE (domain);
1634 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1635 && decl_function_context (t) != info->context)
1636 get_nonlocal_debug_decl (info, t);
1641 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1642 PARM_DECLs that belong to outer functions. This handles statements
1643 that are not handled via the standard recursion done in
1644 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1645 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1646 operands of STMT have been handled by this function. */
1648 static tree
1649 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1650 struct walk_stmt_info *wi)
1652 struct nesting_info *info = (struct nesting_info *) wi->info;
1653 tree save_local_var_chain;
1654 bitmap save_suppress;
1655 gimple *stmt = gsi_stmt (*gsi);
1657 switch (gimple_code (stmt))
1659 case GIMPLE_GOTO:
1660 /* Don't walk non-local gotos for now. */
1661 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1663 wi->val_only = true;
1664 wi->is_lhs = false;
1665 *handled_ops_p = false;
1666 return NULL_TREE;
1668 break;
1670 case GIMPLE_OMP_TEAMS:
1671 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
1673 save_suppress = info->suppress_expansion;
1674 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt),
1675 wi);
1676 walk_body (convert_nonlocal_reference_stmt,
1677 convert_nonlocal_reference_op, info,
1678 gimple_omp_body_ptr (stmt));
1679 info->suppress_expansion = save_suppress;
1680 break;
1682 /* FALLTHRU */
1684 case GIMPLE_OMP_PARALLEL:
1685 case GIMPLE_OMP_TASK:
1686 save_suppress = info->suppress_expansion;
1687 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1688 wi))
1690 tree c, decl;
1691 decl = get_chain_decl (info);
1692 c = build_omp_clause (gimple_location (stmt),
1693 OMP_CLAUSE_FIRSTPRIVATE);
1694 OMP_CLAUSE_DECL (c) = decl;
1695 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1696 gimple_omp_taskreg_set_clauses (stmt, c);
1699 save_local_var_chain = info->new_local_var_chain;
1700 info->new_local_var_chain = NULL;
1702 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1703 info, gimple_omp_body_ptr (stmt));
1705 if (info->new_local_var_chain)
1706 declare_vars (info->new_local_var_chain,
1707 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1708 false);
1709 info->new_local_var_chain = save_local_var_chain;
1710 info->suppress_expansion = save_suppress;
1711 break;
1713 case GIMPLE_OMP_FOR:
1714 save_suppress = info->suppress_expansion;
1715 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1716 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1717 convert_nonlocal_reference_stmt,
1718 convert_nonlocal_reference_op, info);
1719 walk_body (convert_nonlocal_reference_stmt,
1720 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1721 info->suppress_expansion = save_suppress;
1722 break;
1724 case GIMPLE_OMP_SECTIONS:
1725 save_suppress = info->suppress_expansion;
1726 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1727 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1728 info, gimple_omp_body_ptr (stmt));
1729 info->suppress_expansion = save_suppress;
1730 break;
1732 case GIMPLE_OMP_SINGLE:
1733 save_suppress = info->suppress_expansion;
1734 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1735 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1736 info, gimple_omp_body_ptr (stmt));
1737 info->suppress_expansion = save_suppress;
1738 break;
1740 case GIMPLE_OMP_SCOPE:
1741 save_suppress = info->suppress_expansion;
1742 convert_nonlocal_omp_clauses (gimple_omp_scope_clauses_ptr (stmt), wi);
1743 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1744 info, gimple_omp_body_ptr (stmt));
1745 info->suppress_expansion = save_suppress;
1746 break;
1748 case GIMPLE_OMP_TASKGROUP:
1749 save_suppress = info->suppress_expansion;
1750 convert_nonlocal_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
1751 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1752 info, gimple_omp_body_ptr (stmt));
1753 info->suppress_expansion = save_suppress;
1754 break;
1756 case GIMPLE_OMP_TARGET:
1757 if (!is_gimple_omp_offloaded (stmt))
1759 save_suppress = info->suppress_expansion;
1760 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1761 wi);
1762 info->suppress_expansion = save_suppress;
1763 walk_body (convert_nonlocal_reference_stmt,
1764 convert_nonlocal_reference_op, info,
1765 gimple_omp_body_ptr (stmt));
1766 break;
1768 save_suppress = info->suppress_expansion;
1769 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1770 wi))
1772 tree c, decl;
1773 decl = get_chain_decl (info);
1774 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1775 OMP_CLAUSE_DECL (c) = decl;
1776 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1777 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1778 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1779 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1782 save_local_var_chain = info->new_local_var_chain;
1783 info->new_local_var_chain = NULL;
1785 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1786 info, gimple_omp_body_ptr (stmt));
1788 if (info->new_local_var_chain)
1789 declare_vars (info->new_local_var_chain,
1790 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1791 false);
1792 info->new_local_var_chain = save_local_var_chain;
1793 info->suppress_expansion = save_suppress;
1794 break;
1796 case GIMPLE_OMP_SECTION:
1797 case GIMPLE_OMP_MASTER:
1798 case GIMPLE_OMP_MASKED:
1799 case GIMPLE_OMP_ORDERED:
1800 case GIMPLE_OMP_SCAN:
1801 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1802 info, gimple_omp_body_ptr (stmt));
1803 break;
1805 case GIMPLE_BIND:
1807 gbind *bind_stmt = as_a <gbind *> (stmt);
1809 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1810 if (TREE_CODE (var) == NAMELIST_DECL)
1812 /* Adjust decls mentioned in NAMELIST_DECL. */
1813 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1814 tree decl;
1815 unsigned int i;
1817 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1819 if (VAR_P (decl)
1820 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1821 continue;
1822 if (decl_function_context (decl) != info->context)
1823 CONSTRUCTOR_ELT (decls, i)->value
1824 = get_nonlocal_debug_decl (info, decl);
1828 *handled_ops_p = false;
1829 return NULL_TREE;
1831 case GIMPLE_COND:
1832 wi->val_only = true;
1833 wi->is_lhs = false;
1834 *handled_ops_p = false;
1835 return NULL_TREE;
1837 case GIMPLE_ASSIGN:
1838 if (gimple_clobber_p (stmt))
1840 tree lhs = gimple_assign_lhs (stmt);
1841 if (DECL_P (lhs)
1842 && !(TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
1843 && decl_function_context (lhs) != info->context)
1845 gsi_replace (gsi, gimple_build_nop (), true);
1846 break;
1849 *handled_ops_p = false;
1850 return NULL_TREE;
1852 default:
1853 /* For every other statement that we are not interested in
1854 handling here, let the walker traverse the operands. */
1855 *handled_ops_p = false;
1856 return NULL_TREE;
1859 /* We have handled all of STMT operands, no need to traverse the operands. */
1860 *handled_ops_p = true;
1861 return NULL_TREE;
1865 /* A subroutine of convert_local_reference. Create a local variable
1866 in the parent function with DECL_VALUE_EXPR set to reference the
1867 field in FRAME. This is used both for debug info and in OMP
1868 lowering. */
1870 static tree
1871 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1873 tree x, new_decl;
1875 tree *slot = &info->var_map->get_or_insert (decl);
1876 if (*slot)
1877 return *slot;
1879 /* Make sure frame_decl gets created. */
1880 (void) get_frame_type (info);
1881 x = info->frame_decl;
1882 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1884 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1885 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1886 DECL_CONTEXT (new_decl) = info->context;
1887 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1888 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1889 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1890 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1891 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1892 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1893 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1894 if ((TREE_CODE (decl) == PARM_DECL
1895 || TREE_CODE (decl) == RESULT_DECL
1896 || VAR_P (decl))
1897 && DECL_BY_REFERENCE (decl))
1898 DECL_BY_REFERENCE (new_decl) = 1;
1900 SET_DECL_VALUE_EXPR (new_decl, x);
1901 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1902 *slot = new_decl;
1904 DECL_CHAIN (new_decl) = info->debug_var_chain;
1905 info->debug_var_chain = new_decl;
1907 /* Do not emit debug info twice. */
1908 DECL_IGNORED_P (decl) = 1;
1910 return new_decl;
1914 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1915 and PARM_DECLs that were referenced by inner nested functions.
1916 The rewrite will be a structure reference to the local frame variable. */
1918 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1920 static tree
1921 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1923 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1924 struct nesting_info *const info = (struct nesting_info *) wi->info;
1925 tree t = *tp, field, x;
1926 bool save_val_only;
1928 *walk_subtrees = 0;
1929 switch (TREE_CODE (t))
1931 case VAR_DECL:
1932 /* Non-automatic variables are never processed. */
1933 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1934 break;
1935 /* FALLTHRU */
1937 case PARM_DECL:
1938 if (t != info->frame_decl && decl_function_context (t) == info->context)
1940 /* If we copied a pointer to the frame, then the original decl
1941 is used unchanged in the parent function. */
1942 if (use_pointer_in_frame (t))
1943 break;
1945 /* No need to transform anything if no child references the
1946 variable. */
1947 field = lookup_field_for_decl (info, t, NO_INSERT);
1948 if (!field)
1949 break;
1950 wi->changed = true;
1952 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1953 x = get_local_debug_decl (info, t, field);
1954 else
1955 x = get_frame_field (info, info->context, field, &wi->gsi);
1957 if (wi->val_only)
1959 if (wi->is_lhs)
1960 x = save_tmp_var (info, x, &wi->gsi);
1961 else
1962 x = init_tmp_var (info, x, &wi->gsi);
1965 *tp = x;
1967 break;
1969 case ADDR_EXPR:
1970 save_val_only = wi->val_only;
1971 wi->val_only = false;
1972 wi->is_lhs = false;
1973 wi->changed = false;
1974 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1975 wi->val_only = save_val_only;
1977 /* If we converted anything ... */
1978 if (wi->changed)
1980 tree save_context;
1982 /* Then the frame decl is now addressable. */
1983 TREE_ADDRESSABLE (info->frame_decl) = 1;
1985 save_context = current_function_decl;
1986 current_function_decl = info->context;
1987 recompute_tree_invariant_for_addr_expr (t);
1989 /* If we are in a context where we only accept values, then
1990 compute the address into a temporary. */
1991 if (save_val_only)
1992 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1993 t, &wi->gsi);
1994 current_function_decl = save_context;
1996 break;
1998 case REALPART_EXPR:
1999 case IMAGPART_EXPR:
2000 case COMPONENT_REF:
2001 case ARRAY_REF:
2002 case ARRAY_RANGE_REF:
2003 case BIT_FIELD_REF:
2004 /* Go down this entire nest and just look at the final prefix and
2005 anything that describes the references. Otherwise, we lose track
2006 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
2007 save_val_only = wi->val_only;
2008 wi->val_only = true;
2009 wi->is_lhs = false;
2010 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
2012 if (TREE_CODE (t) == COMPONENT_REF)
2013 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
2014 NULL);
2015 else if (TREE_CODE (t) == ARRAY_REF
2016 || TREE_CODE (t) == ARRAY_RANGE_REF)
2018 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
2019 NULL);
2020 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
2021 NULL);
2022 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
2023 NULL);
2026 wi->val_only = false;
2027 walk_tree (tp, convert_local_reference_op, wi, NULL);
2028 wi->val_only = save_val_only;
2029 break;
2031 case MEM_REF:
2032 save_val_only = wi->val_only;
2033 wi->val_only = true;
2034 wi->is_lhs = false;
2035 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
2036 wi, NULL);
2037 /* We need to re-fold the MEM_REF as component references as
2038 part of a ADDR_EXPR address are not allowed. But we cannot
2039 fold here, as the chain record type is not yet finalized. */
2040 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
2041 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
2042 info->mem_refs->add (tp);
2043 wi->val_only = save_val_only;
2044 break;
2046 case VIEW_CONVERT_EXPR:
2047 /* Just request to look at the subtrees, leaving val_only and lhs
2048 untouched. This might actually be for !val_only + lhs, in which
2049 case we don't want to force a replacement by a temporary. */
2050 *walk_subtrees = 1;
2051 break;
2053 default:
2054 if (!IS_TYPE_OR_DECL_P (t))
2056 *walk_subtrees = 1;
2057 wi->val_only = true;
2058 wi->is_lhs = false;
2060 break;
2063 return NULL_TREE;
2066 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
2067 struct walk_stmt_info *);
2069 /* Helper for convert_local_reference. Convert all the references in
2070 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
2072 static bool
2073 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
2075 struct nesting_info *const info = (struct nesting_info *) wi->info;
2076 bool need_frame = false, need_stmts = false;
2077 tree clause, decl, *pdecl;
2078 int dummy;
2079 bitmap new_suppress;
2081 new_suppress = BITMAP_GGC_ALLOC ();
2082 bitmap_copy (new_suppress, info->suppress_expansion);
2084 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2086 pdecl = NULL;
2087 switch (OMP_CLAUSE_CODE (clause))
2089 case OMP_CLAUSE_REDUCTION:
2090 case OMP_CLAUSE_IN_REDUCTION:
2091 case OMP_CLAUSE_TASK_REDUCTION:
2092 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2093 need_stmts = true;
2094 if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
2096 pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
2097 if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
2098 pdecl = &TREE_OPERAND (*pdecl, 0);
2099 if (TREE_CODE (*pdecl) == INDIRECT_REF
2100 || TREE_CODE (*pdecl) == ADDR_EXPR)
2101 pdecl = &TREE_OPERAND (*pdecl, 0);
2103 goto do_decl_clause;
2105 case OMP_CLAUSE_LASTPRIVATE:
2106 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
2107 need_stmts = true;
2108 goto do_decl_clause;
2110 case OMP_CLAUSE_LINEAR:
2111 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
2112 need_stmts = true;
2113 wi->val_only = true;
2114 wi->is_lhs = false;
2115 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
2116 wi);
2117 goto do_decl_clause;
2119 case OMP_CLAUSE_PRIVATE:
2120 case OMP_CLAUSE_FIRSTPRIVATE:
2121 case OMP_CLAUSE_COPYPRIVATE:
2122 case OMP_CLAUSE_SHARED:
2123 case OMP_CLAUSE_TO_DECLARE:
2124 case OMP_CLAUSE_LINK:
2125 case OMP_CLAUSE_USE_DEVICE_PTR:
2126 case OMP_CLAUSE_USE_DEVICE_ADDR:
2127 case OMP_CLAUSE_HAS_DEVICE_ADDR:
2128 case OMP_CLAUSE_IS_DEVICE_PTR:
2129 case OMP_CLAUSE_DETACH:
2130 do_decl_clause:
2131 if (pdecl == NULL)
2132 pdecl = &OMP_CLAUSE_DECL (clause);
2133 decl = *pdecl;
2134 if (VAR_P (decl)
2135 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2136 break;
2137 if (decl_function_context (decl) == info->context
2138 && !use_pointer_in_frame (decl))
2140 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2141 if (field)
2143 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
2144 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
2145 bitmap_set_bit (new_suppress, DECL_UID (decl));
2146 *pdecl = get_local_debug_decl (info, decl, field);
2147 need_frame = true;
2150 break;
2152 case OMP_CLAUSE_SCHEDULE:
2153 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
2154 break;
2155 /* FALLTHRU */
2156 case OMP_CLAUSE_FINAL:
2157 case OMP_CLAUSE_IF:
2158 case OMP_CLAUSE_NUM_THREADS:
2159 case OMP_CLAUSE_DEPEND:
2160 case OMP_CLAUSE_DEVICE:
2161 case OMP_CLAUSE_NUM_TEAMS:
2162 case OMP_CLAUSE_THREAD_LIMIT:
2163 case OMP_CLAUSE_SAFELEN:
2164 case OMP_CLAUSE_SIMDLEN:
2165 case OMP_CLAUSE_PRIORITY:
2166 case OMP_CLAUSE_GRAINSIZE:
2167 case OMP_CLAUSE_NUM_TASKS:
2168 case OMP_CLAUSE_HINT:
2169 case OMP_CLAUSE_FILTER:
2170 case OMP_CLAUSE_NUM_GANGS:
2171 case OMP_CLAUSE_NUM_WORKERS:
2172 case OMP_CLAUSE_VECTOR_LENGTH:
2173 case OMP_CLAUSE_GANG:
2174 case OMP_CLAUSE_WORKER:
2175 case OMP_CLAUSE_VECTOR:
2176 case OMP_CLAUSE_ASYNC:
2177 case OMP_CLAUSE_WAIT:
2178 /* Several OpenACC clauses have optional arguments. Check if they
2179 are present. */
2180 if (OMP_CLAUSE_OPERAND (clause, 0))
2182 wi->val_only = true;
2183 wi->is_lhs = false;
2184 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2185 &dummy, wi);
2188 /* The gang clause accepts two arguments. */
2189 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
2190 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
2192 wi->val_only = true;
2193 wi->is_lhs = false;
2194 convert_nonlocal_reference_op
2195 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
2197 break;
2199 case OMP_CLAUSE_DIST_SCHEDULE:
2200 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
2202 wi->val_only = true;
2203 wi->is_lhs = false;
2204 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2205 &dummy, wi);
2207 break;
2209 case OMP_CLAUSE_MAP:
2210 case OMP_CLAUSE_TO:
2211 case OMP_CLAUSE_FROM:
2212 if (OMP_CLAUSE_SIZE (clause))
2214 wi->val_only = true;
2215 wi->is_lhs = false;
2216 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
2217 &dummy, wi);
2219 if (DECL_P (OMP_CLAUSE_DECL (clause)))
2220 goto do_decl_clause;
2221 wi->val_only = true;
2222 wi->is_lhs = false;
2223 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
2224 wi, NULL);
2225 break;
2227 case OMP_CLAUSE_ALIGNED:
2228 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
2230 wi->val_only = true;
2231 wi->is_lhs = false;
2232 convert_local_reference_op
2233 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
2235 /* FALLTHRU */
2236 case OMP_CLAUSE_NONTEMPORAL:
2237 do_decl_clause_no_supp:
2238 /* Like do_decl_clause, but don't add any suppression. */
2239 decl = OMP_CLAUSE_DECL (clause);
2240 if (VAR_P (decl)
2241 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2242 break;
2243 if (decl_function_context (decl) == info->context
2244 && !use_pointer_in_frame (decl))
2246 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2247 if (field)
2249 OMP_CLAUSE_DECL (clause)
2250 = get_local_debug_decl (info, decl, field);
2251 need_frame = true;
2254 break;
2256 case OMP_CLAUSE_ALLOCATE:
2257 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause))
2259 wi->val_only = true;
2260 wi->is_lhs = false;
2261 convert_local_reference_op
2262 (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause), &dummy, wi);
2264 goto do_decl_clause_no_supp;
2266 case OMP_CLAUSE_NOWAIT:
2267 case OMP_CLAUSE_ORDERED:
2268 case OMP_CLAUSE_DEFAULT:
2269 case OMP_CLAUSE_COPYIN:
2270 case OMP_CLAUSE_COLLAPSE:
2271 case OMP_CLAUSE_TILE:
2272 case OMP_CLAUSE_UNTIED:
2273 case OMP_CLAUSE_MERGEABLE:
2274 case OMP_CLAUSE_PROC_BIND:
2275 case OMP_CLAUSE_NOGROUP:
2276 case OMP_CLAUSE_THREADS:
2277 case OMP_CLAUSE_SIMD:
2278 case OMP_CLAUSE_DEFAULTMAP:
2279 case OMP_CLAUSE_ORDER:
2280 case OMP_CLAUSE_SEQ:
2281 case OMP_CLAUSE_INDEPENDENT:
2282 case OMP_CLAUSE_AUTO:
2283 case OMP_CLAUSE_IF_PRESENT:
2284 case OMP_CLAUSE_FINALIZE:
2285 case OMP_CLAUSE_BIND:
2286 case OMP_CLAUSE__CONDTEMP_:
2287 case OMP_CLAUSE__SCANTEMP_:
2288 break;
2290 /* The following clause belongs to the OpenACC cache directive, which
2291 is discarded during gimplification. */
2292 case OMP_CLAUSE__CACHE_:
2293 /* The following clauses are only allowed in the OpenMP declare simd
2294 directive, so not seen here. */
2295 case OMP_CLAUSE_UNIFORM:
2296 case OMP_CLAUSE_INBRANCH:
2297 case OMP_CLAUSE_NOTINBRANCH:
2298 /* The following clauses are only allowed on OpenMP cancel and
2299 cancellation point directives, which at this point have already
2300 been lowered into a function call. */
2301 case OMP_CLAUSE_FOR:
2302 case OMP_CLAUSE_PARALLEL:
2303 case OMP_CLAUSE_SECTIONS:
2304 case OMP_CLAUSE_TASKGROUP:
2305 /* The following clauses are only added during OMP lowering; nested
2306 function decomposition happens before that. */
2307 case OMP_CLAUSE__LOOPTEMP_:
2308 case OMP_CLAUSE__REDUCTEMP_:
2309 case OMP_CLAUSE__SIMDUID_:
2310 case OMP_CLAUSE__SIMT_:
2311 /* The following clauses are only allowed on OpenACC 'routine'
2312 directives, not seen here. */
2313 case OMP_CLAUSE_NOHOST:
2314 /* Anything else. */
2315 default:
2316 gcc_unreachable ();
2320 info->suppress_expansion = new_suppress;
2322 if (need_stmts)
2323 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2324 switch (OMP_CLAUSE_CODE (clause))
2326 case OMP_CLAUSE_REDUCTION:
2327 case OMP_CLAUSE_IN_REDUCTION:
2328 case OMP_CLAUSE_TASK_REDUCTION:
2329 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2331 tree old_context
2332 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
2333 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2334 = info->context;
2335 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2336 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2337 = info->context;
2338 walk_body (convert_local_reference_stmt,
2339 convert_local_reference_op, info,
2340 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
2341 walk_body (convert_local_reference_stmt,
2342 convert_local_reference_op, info,
2343 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
2344 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2345 = old_context;
2346 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2347 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2348 = old_context;
2350 break;
2352 case OMP_CLAUSE_LASTPRIVATE:
2353 walk_body (convert_local_reference_stmt,
2354 convert_local_reference_op, info,
2355 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
2356 break;
2358 case OMP_CLAUSE_LINEAR:
2359 walk_body (convert_local_reference_stmt,
2360 convert_local_reference_op, info,
2361 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
2362 break;
2364 default:
2365 break;
2368 return need_frame;
2372 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2373 and PARM_DECLs that were referenced by inner nested functions.
2374 The rewrite will be a structure reference to the local frame variable. */
2376 static tree
2377 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2378 struct walk_stmt_info *wi)
2380 struct nesting_info *info = (struct nesting_info *) wi->info;
2381 tree save_local_var_chain;
2382 bitmap save_suppress;
2383 char save_static_chain_added;
2384 bool frame_decl_added;
2385 gimple *stmt = gsi_stmt (*gsi);
2387 switch (gimple_code (stmt))
2389 case GIMPLE_OMP_TEAMS:
2390 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2392 save_suppress = info->suppress_expansion;
2393 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2394 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2395 info, gimple_omp_body_ptr (stmt));
2396 info->suppress_expansion = save_suppress;
2397 break;
2399 /* FALLTHRU */
2401 case GIMPLE_OMP_PARALLEL:
2402 case GIMPLE_OMP_TASK:
2403 save_suppress = info->suppress_expansion;
2404 frame_decl_added = false;
2405 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
2406 wi))
2408 tree c = build_omp_clause (gimple_location (stmt),
2409 OMP_CLAUSE_SHARED);
2410 (void) get_frame_type (info);
2411 OMP_CLAUSE_DECL (c) = info->frame_decl;
2412 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2413 gimple_omp_taskreg_set_clauses (stmt, c);
2414 info->static_chain_added |= 4;
2415 frame_decl_added = true;
2418 save_local_var_chain = info->new_local_var_chain;
2419 save_static_chain_added = info->static_chain_added;
2420 info->new_local_var_chain = NULL;
2421 info->static_chain_added = 0;
2423 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2424 gimple_omp_body_ptr (stmt));
2426 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2428 tree c = build_omp_clause (gimple_location (stmt),
2429 OMP_CLAUSE_SHARED);
2430 (void) get_frame_type (info);
2431 OMP_CLAUSE_DECL (c) = info->frame_decl;
2432 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2433 info->static_chain_added |= 4;
2434 gimple_omp_taskreg_set_clauses (stmt, c);
2436 if (info->new_local_var_chain)
2437 declare_vars (info->new_local_var_chain,
2438 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2439 info->new_local_var_chain = save_local_var_chain;
2440 info->suppress_expansion = save_suppress;
2441 info->static_chain_added |= save_static_chain_added;
2442 break;
2444 case GIMPLE_OMP_FOR:
2445 save_suppress = info->suppress_expansion;
2446 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2447 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2448 convert_local_reference_stmt,
2449 convert_local_reference_op, info);
2450 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2451 info, gimple_omp_body_ptr (stmt));
2452 info->suppress_expansion = save_suppress;
2453 break;
2455 case GIMPLE_OMP_SECTIONS:
2456 save_suppress = info->suppress_expansion;
2457 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2458 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2459 info, gimple_omp_body_ptr (stmt));
2460 info->suppress_expansion = save_suppress;
2461 break;
2463 case GIMPLE_OMP_SINGLE:
2464 save_suppress = info->suppress_expansion;
2465 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2466 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2467 info, gimple_omp_body_ptr (stmt));
2468 info->suppress_expansion = save_suppress;
2469 break;
2471 case GIMPLE_OMP_SCOPE:
2472 save_suppress = info->suppress_expansion;
2473 convert_local_omp_clauses (gimple_omp_scope_clauses_ptr (stmt), wi);
2474 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2475 info, gimple_omp_body_ptr (stmt));
2476 info->suppress_expansion = save_suppress;
2477 break;
2479 case GIMPLE_OMP_TASKGROUP:
2480 save_suppress = info->suppress_expansion;
2481 convert_local_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
2482 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2483 info, gimple_omp_body_ptr (stmt));
2484 info->suppress_expansion = save_suppress;
2485 break;
2487 case GIMPLE_OMP_TARGET:
2488 if (!is_gimple_omp_offloaded (stmt))
2490 save_suppress = info->suppress_expansion;
2491 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2492 info->suppress_expansion = save_suppress;
2493 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2494 info, gimple_omp_body_ptr (stmt));
2495 break;
2497 save_suppress = info->suppress_expansion;
2498 frame_decl_added = false;
2499 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2501 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2502 (void) get_frame_type (info);
2503 OMP_CLAUSE_DECL (c) = info->frame_decl;
2504 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2505 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2506 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2507 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2508 info->static_chain_added |= 4;
2509 frame_decl_added = true;
2512 save_local_var_chain = info->new_local_var_chain;
2513 save_static_chain_added = info->static_chain_added;
2514 info->new_local_var_chain = NULL;
2515 info->static_chain_added = 0;
2517 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2518 gimple_omp_body_ptr (stmt));
2520 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2522 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2523 (void) get_frame_type (info);
2524 OMP_CLAUSE_DECL (c) = info->frame_decl;
2525 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2526 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2527 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2528 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2529 info->static_chain_added |= 4;
2532 if (info->new_local_var_chain)
2533 declare_vars (info->new_local_var_chain,
2534 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2535 info->new_local_var_chain = save_local_var_chain;
2536 info->suppress_expansion = save_suppress;
2537 info->static_chain_added |= save_static_chain_added;
2538 break;
2540 case GIMPLE_OMP_SECTION:
2541 case GIMPLE_OMP_MASTER:
2542 case GIMPLE_OMP_MASKED:
2543 case GIMPLE_OMP_ORDERED:
2544 case GIMPLE_OMP_SCAN:
2545 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2546 info, gimple_omp_body_ptr (stmt));
2547 break;
2549 case GIMPLE_COND:
2550 wi->val_only = true;
2551 wi->is_lhs = false;
2552 *handled_ops_p = false;
2553 return NULL_TREE;
2555 case GIMPLE_ASSIGN:
2556 if (gimple_clobber_p (stmt))
2558 tree lhs = gimple_assign_lhs (stmt);
2559 if (DECL_P (lhs)
2560 && decl_function_context (lhs) == info->context
2561 && !use_pointer_in_frame (lhs)
2562 && lookup_field_for_decl (info, lhs, NO_INSERT))
2564 gsi_replace (gsi, gimple_build_nop (), true);
2565 break;
2568 *handled_ops_p = false;
2569 return NULL_TREE;
2571 case GIMPLE_BIND:
2572 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2573 var;
2574 var = DECL_CHAIN (var))
2575 if (TREE_CODE (var) == NAMELIST_DECL)
2577 /* Adjust decls mentioned in NAMELIST_DECL. */
2578 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2579 tree decl;
2580 unsigned int i;
2582 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2584 if (VAR_P (decl)
2585 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2586 continue;
2587 if (decl_function_context (decl) == info->context
2588 && !use_pointer_in_frame (decl))
2590 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2591 if (field)
2593 CONSTRUCTOR_ELT (decls, i)->value
2594 = get_local_debug_decl (info, decl, field);
2600 *handled_ops_p = false;
2601 return NULL_TREE;
2603 default:
2604 /* For every other statement that we are not interested in
2605 handling here, let the walker traverse the operands. */
2606 *handled_ops_p = false;
2607 return NULL_TREE;
2610 /* Indicate that we have handled all the operands ourselves. */
2611 *handled_ops_p = true;
2612 return NULL_TREE;
2616 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2617 that reference labels from outer functions. The rewrite will be a
2618 call to __builtin_nonlocal_goto. */
2620 static tree
2621 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2622 struct walk_stmt_info *wi)
2624 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2625 tree label, new_label, target_context, x, field;
2626 gcall *call;
2627 gimple *stmt = gsi_stmt (*gsi);
2629 if (gimple_code (stmt) != GIMPLE_GOTO)
2631 *handled_ops_p = false;
2632 return NULL_TREE;
2635 label = gimple_goto_dest (stmt);
2636 if (TREE_CODE (label) != LABEL_DECL)
2638 *handled_ops_p = false;
2639 return NULL_TREE;
2642 target_context = decl_function_context (label);
2643 if (target_context == info->context)
2645 *handled_ops_p = false;
2646 return NULL_TREE;
2649 for (i = info->outer; target_context != i->context; i = i->outer)
2650 continue;
2652 /* The original user label may also be use for a normal goto, therefore
2653 we must create a new label that will actually receive the abnormal
2654 control transfer. This new label will be marked LABEL_NONLOCAL; this
2655 mark will trigger proper behavior in the cfg, as well as cause the
2656 (hairy target-specific) non-local goto receiver code to be generated
2657 when we expand rtl. Enter this association into var_map so that we
2658 can insert the new label into the IL during a second pass. */
2659 tree *slot = &i->var_map->get_or_insert (label);
2660 if (*slot == NULL)
2662 new_label = create_artificial_label (UNKNOWN_LOCATION);
2663 DECL_NONLOCAL (new_label) = 1;
2664 *slot = new_label;
2666 else
2667 new_label = *slot;
2669 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2670 field = get_nl_goto_field (i);
2671 x = get_frame_field (info, target_context, field, gsi);
2672 x = build_addr (x);
2673 x = gsi_gimplify_val (info, x, gsi);
2674 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2675 2, build_addr (new_label), x);
2676 gsi_replace (gsi, call, false);
2678 /* We have handled all of STMT's operands, no need to keep going. */
2679 *handled_ops_p = true;
2680 return NULL_TREE;
2684 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2685 are referenced via nonlocal goto from a nested function. The rewrite
2686 will involve installing a newly generated DECL_NONLOCAL label, and
2687 (potentially) a branch around the rtl gunk that is assumed to be
2688 attached to such a label. */
2690 static tree
2691 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2692 struct walk_stmt_info *wi)
2694 struct nesting_info *const info = (struct nesting_info *) wi->info;
2695 tree label, new_label;
2696 gimple_stmt_iterator tmp_gsi;
2697 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2699 if (!stmt)
2701 *handled_ops_p = false;
2702 return NULL_TREE;
2705 label = gimple_label_label (stmt);
2707 tree *slot = info->var_map->get (label);
2708 if (!slot)
2710 *handled_ops_p = false;
2711 return NULL_TREE;
2714 /* If there's any possibility that the previous statement falls through,
2715 then we must branch around the new non-local label. */
2716 tmp_gsi = wi->gsi;
2717 gsi_prev (&tmp_gsi);
2718 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2720 gimple *stmt = gimple_build_goto (label);
2721 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2724 new_label = (tree) *slot;
2725 stmt = gimple_build_label (new_label);
2726 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2728 *handled_ops_p = true;
2729 return NULL_TREE;
2733 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2734 of nested functions that require the use of trampolines. The rewrite
2735 will involve a reference a trampoline generated for the occasion. */
2737 static tree
2738 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2740 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2741 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2742 tree t = *tp, decl, target_context, x, builtin;
2743 bool descr;
2744 gcall *call;
2746 *walk_subtrees = 0;
2747 switch (TREE_CODE (t))
2749 case ADDR_EXPR:
2750 /* Build
2751 T.1 = &CHAIN->tramp;
2752 T.2 = __builtin_adjust_trampoline (T.1);
2753 T.3 = (func_type)T.2;
2756 decl = TREE_OPERAND (t, 0);
2757 if (TREE_CODE (decl) != FUNCTION_DECL)
2758 break;
2760 /* Only need to process nested functions. */
2761 target_context = decl_function_context (decl);
2762 if (!target_context)
2763 break;
2765 /* If the nested function doesn't use a static chain, then
2766 it doesn't need a trampoline. */
2767 if (!DECL_STATIC_CHAIN (decl))
2768 break;
2770 /* If we don't want a trampoline, then don't build one. */
2771 if (TREE_NO_TRAMPOLINE (t))
2772 break;
2774 /* Lookup the immediate parent of the callee, as that's where
2775 we need to insert the trampoline. */
2776 for (i = info; i->context != target_context; i = i->outer)
2777 continue;
2779 /* Decide whether to generate a descriptor or a trampoline. */
2780 descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines;
2782 if (descr)
2783 x = lookup_descr_for_decl (i, decl, INSERT);
2784 else
2785 x = lookup_tramp_for_decl (i, decl, INSERT);
2787 /* Compute the address of the field holding the trampoline. */
2788 x = get_frame_field (info, target_context, x, &wi->gsi);
2789 x = build_addr (x);
2790 x = gsi_gimplify_val (info, x, &wi->gsi);
2792 /* Do machine-specific ugliness. Normally this will involve
2793 computing extra alignment, but it can really be anything. */
2794 if (descr)
2795 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR);
2796 else
2797 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2798 call = gimple_build_call (builtin, 1, x);
2799 x = init_tmp_var_with_call (info, &wi->gsi, call);
2801 /* Cast back to the proper function type. */
2802 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2803 x = init_tmp_var (info, x, &wi->gsi);
2805 *tp = x;
2806 break;
2808 default:
2809 if (!IS_TYPE_OR_DECL_P (t))
2810 *walk_subtrees = 1;
2811 break;
2814 return NULL_TREE;
2818 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2819 to addresses of nested functions that require the use of
2820 trampolines. The rewrite will involve a reference a trampoline
2821 generated for the occasion. */
2823 static tree
2824 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2825 struct walk_stmt_info *wi)
2827 struct nesting_info *info = (struct nesting_info *) wi->info;
2828 gimple *stmt = gsi_stmt (*gsi);
2830 switch (gimple_code (stmt))
2832 case GIMPLE_CALL:
2834 /* Only walk call arguments, lest we generate trampolines for
2835 direct calls. */
2836 unsigned long i, nargs = gimple_call_num_args (stmt);
2837 for (i = 0; i < nargs; i++)
2838 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2839 wi, NULL);
2840 break;
2843 case GIMPLE_OMP_TEAMS:
2844 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2846 *handled_ops_p = false;
2847 return NULL_TREE;
2849 goto do_parallel;
2851 case GIMPLE_OMP_TARGET:
2852 if (!is_gimple_omp_offloaded (stmt))
2854 *handled_ops_p = false;
2855 return NULL_TREE;
2857 /* FALLTHRU */
2858 case GIMPLE_OMP_PARALLEL:
2859 case GIMPLE_OMP_TASK:
2860 do_parallel:
2862 tree save_local_var_chain = info->new_local_var_chain;
2863 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2864 info->new_local_var_chain = NULL;
2865 char save_static_chain_added = info->static_chain_added;
2866 info->static_chain_added = 0;
2867 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2868 info, gimple_omp_body_ptr (stmt));
2869 if (info->new_local_var_chain)
2870 declare_vars (info->new_local_var_chain,
2871 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2872 false);
2873 for (int i = 0; i < 2; i++)
2875 tree c, decl;
2876 if ((info->static_chain_added & (1 << i)) == 0)
2877 continue;
2878 decl = i ? get_chain_decl (info) : info->frame_decl;
2879 /* Don't add CHAIN.* or FRAME.* twice. */
2880 for (c = gimple_omp_taskreg_clauses (stmt);
2882 c = OMP_CLAUSE_CHAIN (c))
2883 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2884 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2885 && OMP_CLAUSE_DECL (c) == decl)
2886 break;
2887 if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2889 c = build_omp_clause (gimple_location (stmt),
2890 i ? OMP_CLAUSE_FIRSTPRIVATE
2891 : OMP_CLAUSE_SHARED);
2892 OMP_CLAUSE_DECL (c) = decl;
2893 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2894 gimple_omp_taskreg_set_clauses (stmt, c);
2896 else if (c == NULL)
2898 c = build_omp_clause (gimple_location (stmt),
2899 OMP_CLAUSE_MAP);
2900 OMP_CLAUSE_DECL (c) = decl;
2901 OMP_CLAUSE_SET_MAP_KIND (c,
2902 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2903 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2904 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2905 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2909 info->new_local_var_chain = save_local_var_chain;
2910 info->static_chain_added |= save_static_chain_added;
2912 break;
2914 default:
2915 *handled_ops_p = false;
2916 return NULL_TREE;
2919 *handled_ops_p = true;
2920 return NULL_TREE;
2925 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2926 that reference nested functions to make sure that the static chain
2927 is set up properly for the call. */
2929 static tree
2930 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2931 struct walk_stmt_info *wi)
2933 struct nesting_info *const info = (struct nesting_info *) wi->info;
2934 tree decl, target_context;
2935 char save_static_chain_added;
2936 int i;
2937 gimple *stmt = gsi_stmt (*gsi);
2939 switch (gimple_code (stmt))
2941 case GIMPLE_CALL:
2942 if (gimple_call_chain (stmt))
2943 break;
2944 decl = gimple_call_fndecl (stmt);
2945 if (!decl)
2946 break;
2947 target_context = decl_function_context (decl);
2948 if (target_context && DECL_STATIC_CHAIN (decl))
2950 struct nesting_info *i = info;
2951 while (i && i->context != target_context)
2952 i = i->outer;
2953 /* If none of the outer contexts is the target context, this means
2954 that the function is called in a wrong context. */
2955 if (!i)
2956 internal_error ("%s from %s called in %s",
2957 IDENTIFIER_POINTER (DECL_NAME (decl)),
2958 IDENTIFIER_POINTER (DECL_NAME (target_context)),
2959 IDENTIFIER_POINTER (DECL_NAME (info->context)));
2961 gimple_call_set_chain (as_a <gcall *> (stmt),
2962 get_static_chain (info, target_context,
2963 &wi->gsi));
2964 info->static_chain_added |= (1 << (info->context != target_context));
2966 break;
2968 case GIMPLE_OMP_TEAMS:
2969 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2971 walk_body (convert_gimple_call, NULL, info,
2972 gimple_omp_body_ptr (stmt));
2973 break;
2975 /* FALLTHRU */
2977 case GIMPLE_OMP_PARALLEL:
2978 case GIMPLE_OMP_TASK:
2979 save_static_chain_added = info->static_chain_added;
2980 info->static_chain_added = 0;
2981 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2982 for (i = 0; i < 2; i++)
2984 tree c, decl;
2985 if ((info->static_chain_added & (1 << i)) == 0)
2986 continue;
2987 decl = i ? get_chain_decl (info) : info->frame_decl;
2988 /* Don't add CHAIN.* or FRAME.* twice. */
2989 for (c = gimple_omp_taskreg_clauses (stmt);
2991 c = OMP_CLAUSE_CHAIN (c))
2992 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2993 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2994 && OMP_CLAUSE_DECL (c) == decl)
2995 break;
2996 if (c == NULL)
2998 c = build_omp_clause (gimple_location (stmt),
2999 i ? OMP_CLAUSE_FIRSTPRIVATE
3000 : OMP_CLAUSE_SHARED);
3001 OMP_CLAUSE_DECL (c) = decl;
3002 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
3003 gimple_omp_taskreg_set_clauses (stmt, c);
3006 info->static_chain_added |= save_static_chain_added;
3007 break;
3009 case GIMPLE_OMP_TARGET:
3010 if (!is_gimple_omp_offloaded (stmt))
3012 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
3013 break;
3015 save_static_chain_added = info->static_chain_added;
3016 info->static_chain_added = 0;
3017 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
3018 for (i = 0; i < 2; i++)
3020 tree c, decl;
3021 if ((info->static_chain_added & (1 << i)) == 0)
3022 continue;
3023 decl = i ? get_chain_decl (info) : info->frame_decl;
3024 /* Don't add CHAIN.* or FRAME.* twice. */
3025 for (c = gimple_omp_target_clauses (stmt);
3027 c = OMP_CLAUSE_CHAIN (c))
3028 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
3029 && OMP_CLAUSE_DECL (c) == decl)
3030 break;
3031 if (c == NULL)
3033 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
3034 OMP_CLAUSE_DECL (c) = decl;
3035 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
3036 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
3037 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
3038 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
3042 info->static_chain_added |= save_static_chain_added;
3043 break;
3045 case GIMPLE_OMP_FOR:
3046 walk_body (convert_gimple_call, NULL, info,
3047 gimple_omp_for_pre_body_ptr (stmt));
3048 /* FALLTHRU */
3049 case GIMPLE_OMP_SECTIONS:
3050 case GIMPLE_OMP_SECTION:
3051 case GIMPLE_OMP_SINGLE:
3052 case GIMPLE_OMP_SCOPE:
3053 case GIMPLE_OMP_MASTER:
3054 case GIMPLE_OMP_MASKED:
3055 case GIMPLE_OMP_TASKGROUP:
3056 case GIMPLE_OMP_ORDERED:
3057 case GIMPLE_OMP_SCAN:
3058 case GIMPLE_OMP_CRITICAL:
3059 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
3060 break;
3062 default:
3063 /* Keep looking for other operands. */
3064 *handled_ops_p = false;
3065 return NULL_TREE;
3068 *handled_ops_p = true;
3069 return NULL_TREE;
3072 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
3073 call expressions. At the same time, determine if a nested function
3074 actually uses its static chain; if not, remember that. */
3076 static void
3077 convert_all_function_calls (struct nesting_info *root)
3079 unsigned int chain_count = 0, old_chain_count, iter_count;
3080 struct nesting_info *n;
3082 /* First, optimistically clear static_chain for all decls that haven't
3083 used the static chain already for variable access. But always create
3084 it if not optimizing. This makes it possible to reconstruct the static
3085 nesting tree at run time and thus to resolve up-level references from
3086 within the debugger. */
3087 FOR_EACH_NEST_INFO (n, root)
3089 if (n->thunk_p)
3090 continue;
3091 tree decl = n->context;
3092 if (!optimize)
3094 if (n->inner)
3095 (void) get_frame_type (n);
3096 if (n->outer)
3097 (void) get_chain_decl (n);
3099 else if (!n->outer || (!n->chain_decl && !n->chain_field))
3101 DECL_STATIC_CHAIN (decl) = 0;
3102 if (dump_file && (dump_flags & TDF_DETAILS))
3103 fprintf (dump_file, "Guessing no static-chain for %s\n",
3104 lang_hooks.decl_printable_name (decl, 2));
3106 else
3107 DECL_STATIC_CHAIN (decl) = 1;
3108 chain_count += DECL_STATIC_CHAIN (decl);
3111 FOR_EACH_NEST_INFO (n, root)
3112 if (n->thunk_p)
3114 tree decl = n->context;
3115 tree alias = thunk_info::get (cgraph_node::get (decl))->alias;
3116 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
3119 /* Walk the functions and perform transformations. Note that these
3120 transformations can induce new uses of the static chain, which in turn
3121 require re-examining all users of the decl. */
3122 /* ??? It would make sense to try to use the call graph to speed this up,
3123 but the call graph hasn't really been built yet. Even if it did, we
3124 would still need to iterate in this loop since address-of references
3125 wouldn't show up in the callgraph anyway. */
3126 iter_count = 0;
3129 old_chain_count = chain_count;
3130 chain_count = 0;
3131 iter_count++;
3133 if (dump_file && (dump_flags & TDF_DETAILS))
3134 fputc ('\n', dump_file);
3136 FOR_EACH_NEST_INFO (n, root)
3138 if (n->thunk_p)
3139 continue;
3140 tree decl = n->context;
3141 walk_function (convert_tramp_reference_stmt,
3142 convert_tramp_reference_op, n);
3143 walk_function (convert_gimple_call, NULL, n);
3144 chain_count += DECL_STATIC_CHAIN (decl);
3147 FOR_EACH_NEST_INFO (n, root)
3148 if (n->thunk_p)
3150 tree decl = n->context;
3151 tree alias = thunk_info::get (cgraph_node::get (decl))->alias;
3152 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
3155 while (chain_count != old_chain_count);
3157 if (dump_file && (dump_flags & TDF_DETAILS))
3158 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
3159 iter_count);
3162 struct nesting_copy_body_data
3164 copy_body_data cb;
3165 struct nesting_info *root;
3168 /* A helper subroutine for debug_var_chain type remapping. */
3170 static tree
3171 nesting_copy_decl (tree decl, copy_body_data *id)
3173 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
3174 tree *slot = nid->root->var_map->get (decl);
3176 if (slot)
3177 return (tree) *slot;
3179 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
3181 tree new_decl = copy_decl_no_change (decl, id);
3182 DECL_ORIGINAL_TYPE (new_decl)
3183 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
3184 return new_decl;
3187 if (VAR_P (decl)
3188 || TREE_CODE (decl) == PARM_DECL
3189 || TREE_CODE (decl) == RESULT_DECL)
3190 return decl;
3192 return copy_decl_no_change (decl, id);
3195 /* A helper function for remap_vla_decls. See if *TP contains
3196 some remapped variables. */
3198 static tree
3199 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
3201 struct nesting_info *root = (struct nesting_info *) data;
3202 tree t = *tp;
3204 if (DECL_P (t))
3206 *walk_subtrees = 0;
3207 tree *slot = root->var_map->get (t);
3209 if (slot)
3210 return *slot;
3212 return NULL;
3215 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
3216 involved. */
3218 static void
3219 remap_vla_decls (tree block, struct nesting_info *root)
3221 tree var, subblock, val, type;
3222 struct nesting_copy_body_data id;
3224 for (subblock = BLOCK_SUBBLOCKS (block);
3225 subblock;
3226 subblock = BLOCK_CHAIN (subblock))
3227 remap_vla_decls (subblock, root);
3229 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3230 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3232 val = DECL_VALUE_EXPR (var);
3233 type = TREE_TYPE (var);
3235 if (!(TREE_CODE (val) == INDIRECT_REF
3236 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3237 && variably_modified_type_p (type, NULL)))
3238 continue;
3240 if (root->var_map->get (TREE_OPERAND (val, 0))
3241 || walk_tree (&type, contains_remapped_vars, root, NULL))
3242 break;
3245 if (var == NULL_TREE)
3246 return;
3248 memset (&id, 0, sizeof (id));
3249 id.cb.copy_decl = nesting_copy_decl;
3250 id.cb.decl_map = new hash_map<tree, tree>;
3251 id.root = root;
3253 for (; var; var = DECL_CHAIN (var))
3254 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3256 struct nesting_info *i;
3257 tree newt, context;
3259 val = DECL_VALUE_EXPR (var);
3260 type = TREE_TYPE (var);
3262 if (!(TREE_CODE (val) == INDIRECT_REF
3263 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3264 && variably_modified_type_p (type, NULL)))
3265 continue;
3267 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
3268 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
3269 continue;
3271 context = decl_function_context (var);
3272 for (i = root; i; i = i->outer)
3273 if (i->context == context)
3274 break;
3276 if (i == NULL)
3277 continue;
3279 /* Fully expand value expressions. This avoids having debug variables
3280 only referenced from them and that can be swept during GC. */
3281 if (slot)
3283 tree t = (tree) *slot;
3284 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
3285 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
3288 id.cb.src_fn = i->context;
3289 id.cb.dst_fn = i->context;
3290 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3292 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
3293 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3295 newt = TREE_TYPE (newt);
3296 type = TREE_TYPE (type);
3298 if (TYPE_NAME (newt)
3299 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3300 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3301 && newt != type
3302 && TYPE_NAME (newt) == TYPE_NAME (type))
3303 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3305 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
3306 if (val != DECL_VALUE_EXPR (var))
3307 SET_DECL_VALUE_EXPR (var, val);
3310 delete id.cb.decl_map;
3313 /* Fixup VLA decls in BLOCK and subblocks if remapped variables are
3314 involved. */
3316 static void
3317 fixup_vla_decls (tree block)
3319 for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3320 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3322 tree val = DECL_VALUE_EXPR (var);
3324 if (!(TREE_CODE (val) == INDIRECT_REF
3325 && VAR_P (TREE_OPERAND (val, 0))
3326 && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val, 0))))
3327 continue;
3329 /* Fully expand value expressions. This avoids having debug variables
3330 only referenced from them and that can be swept during GC. */
3331 val = build1 (INDIRECT_REF, TREE_TYPE (val),
3332 DECL_VALUE_EXPR (TREE_OPERAND (val, 0)));
3333 SET_DECL_VALUE_EXPR (var, val);
3336 for (tree sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3337 fixup_vla_decls (sub);
3340 /* Fold the MEM_REF *E. */
3341 bool
3342 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
3344 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
3345 *ref_p = fold (*ref_p);
3346 return true;
3349 /* Given DECL, a nested function, build an initialization call for FIELD,
3350 the trampoline or descriptor for DECL, using FUNC as the function. */
3352 static gcall *
3353 build_init_call_stmt (struct nesting_info *info, tree decl, tree field,
3354 tree func)
3356 tree arg1, arg2, arg3, x;
3358 gcc_assert (DECL_STATIC_CHAIN (decl));
3359 arg3 = build_addr (info->frame_decl);
3361 arg2 = build_addr (decl);
3363 x = build3 (COMPONENT_REF, TREE_TYPE (field),
3364 info->frame_decl, field, NULL_TREE);
3365 arg1 = build_addr (x);
3367 return gimple_build_call (func, 3, arg1, arg2, arg3);
3370 /* Do "everything else" to clean up or complete state collected by the various
3371 walking passes -- create a field to hold the frame base address, lay out the
3372 types and decls, generate code to initialize the frame decl, store critical
3373 expressions in the struct function for rtl to find. */
3375 static void
3376 finalize_nesting_tree_1 (struct nesting_info *root)
3378 gimple_seq stmt_list = NULL;
3379 gimple *stmt;
3380 tree context = root->context;
3381 struct function *sf;
3383 if (root->thunk_p)
3384 return;
3386 /* If we created a non-local frame type or decl, we need to lay them
3387 out at this time. */
3388 if (root->frame_type)
3390 /* Debugging information needs to compute the frame base address of the
3391 parent frame out of the static chain from the nested frame.
3393 The static chain is the address of the FRAME record, so one could
3394 imagine it would be possible to compute the frame base address just
3395 adding a constant offset to this address. Unfortunately, this is not
3396 possible: if the FRAME object has alignment constraints that are
3397 stronger than the stack, then the offset between the frame base and
3398 the FRAME object will be dynamic.
3400 What we do instead is to append a field to the FRAME object that holds
3401 the frame base address: then debug info just has to fetch this
3402 field. */
3404 /* Debugging information will refer to the CFA as the frame base
3405 address: we will do the same here. */
3406 const tree frame_addr_fndecl
3407 = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
3409 /* Create a field in the FRAME record to hold the frame base address for
3410 this stack frame. Since it will be used only by the debugger, put it
3411 at the end of the record in order not to shift all other offsets. */
3412 tree fb_decl = make_node (FIELD_DECL);
3414 DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
3415 TREE_TYPE (fb_decl) = ptr_type_node;
3416 TREE_ADDRESSABLE (fb_decl) = 1;
3417 DECL_CONTEXT (fb_decl) = root->frame_type;
3418 TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
3419 fb_decl);
3421 /* In some cases the frame type will trigger the -Wpadded warning.
3422 This is not helpful; suppress it. */
3423 int save_warn_padded = warn_padded;
3424 warn_padded = 0;
3425 layout_type (root->frame_type);
3426 warn_padded = save_warn_padded;
3427 layout_decl (root->frame_decl, 0);
3429 /* Initialize the frame base address field. If the builtin we need is
3430 not available, set it to NULL so that debugging information does not
3431 reference junk. */
3432 tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
3433 root->frame_decl, fb_decl, NULL_TREE);
3434 tree fb_tmp;
3436 if (frame_addr_fndecl != NULL_TREE)
3438 gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
3439 integer_zero_node);
3440 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3442 fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
3444 else
3445 fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
3446 gimple_seq_add_stmt (&stmt_list,
3447 gimple_build_assign (fb_ref, fb_tmp));
3449 declare_vars (root->frame_decl,
3450 gimple_seq_first_stmt (gimple_body (context)), true);
3453 /* If any parameters were referenced non-locally, then we need to insert
3454 a copy or a pointer. */
3455 if (root->any_parm_remapped)
3457 tree p;
3458 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
3460 tree field, x, y;
3462 field = lookup_field_for_decl (root, p, NO_INSERT);
3463 if (!field)
3464 continue;
3466 if (use_pointer_in_frame (p))
3467 x = build_addr (p);
3468 else
3469 x = p;
3471 /* If the assignment is from a non-register the stmt is
3472 not valid gimple. Make it so by using a temporary instead. */
3473 if (!is_gimple_reg (x)
3474 && is_gimple_reg_type (TREE_TYPE (x)))
3476 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3477 x = init_tmp_var (root, x, &gsi);
3480 y = build3 (COMPONENT_REF, TREE_TYPE (field),
3481 root->frame_decl, field, NULL_TREE);
3482 stmt = gimple_build_assign (y, x);
3483 gimple_seq_add_stmt (&stmt_list, stmt);
3487 /* If a chain_field was created, then it needs to be initialized
3488 from chain_decl. */
3489 if (root->chain_field)
3491 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
3492 root->frame_decl, root->chain_field, NULL_TREE);
3493 stmt = gimple_build_assign (x, get_chain_decl (root));
3494 gimple_seq_add_stmt (&stmt_list, stmt);
3497 /* If trampolines were created, then we need to initialize them. */
3498 if (root->any_tramp_created)
3500 struct nesting_info *i;
3501 for (i = root->inner; i ; i = i->next)
3503 tree field, x;
3505 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
3506 if (!field)
3507 continue;
3509 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
3510 stmt = build_init_call_stmt (root, i->context, field, x);
3511 gimple_seq_add_stmt (&stmt_list, stmt);
3515 /* If descriptors were created, then we need to initialize them. */
3516 if (root->any_descr_created)
3518 struct nesting_info *i;
3519 for (i = root->inner; i ; i = i->next)
3521 tree field, x;
3523 field = lookup_descr_for_decl (root, i->context, NO_INSERT);
3524 if (!field)
3525 continue;
3527 x = builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR);
3528 stmt = build_init_call_stmt (root, i->context, field, x);
3529 gimple_seq_add_stmt (&stmt_list, stmt);
3533 /* If we created initialization statements, insert them. */
3534 if (stmt_list)
3536 gbind *bind;
3537 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3538 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3539 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3540 gimple_bind_set_body (bind, stmt_list);
3543 /* If a chain_decl was created, then it needs to be registered with
3544 struct function so that it gets initialized from the static chain
3545 register at the beginning of the function. */
3546 sf = DECL_STRUCT_FUNCTION (root->context);
3547 sf->static_chain_decl = root->chain_decl;
3549 /* Similarly for the non-local goto save area. */
3550 if (root->nl_goto_field)
3552 sf->nonlocal_goto_save_area
3553 = get_frame_field (root, context, root->nl_goto_field, NULL);
3554 sf->has_nonlocal_label = 1;
3557 /* Make sure all new local variables get inserted into the
3558 proper BIND_EXPR. */
3559 if (root->new_local_var_chain)
3560 declare_vars (root->new_local_var_chain,
3561 gimple_seq_first_stmt (gimple_body (root->context)),
3562 false);
3564 if (root->debug_var_chain)
3566 tree debug_var;
3567 gbind *scope;
3569 remap_vla_decls (DECL_INITIAL (root->context), root);
3571 for (debug_var = root->debug_var_chain; debug_var;
3572 debug_var = DECL_CHAIN (debug_var))
3573 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3574 break;
3576 /* If there are any debug decls with variable length types,
3577 remap those types using other debug_var_chain variables. */
3578 if (debug_var)
3580 struct nesting_copy_body_data id;
3582 memset (&id, 0, sizeof (id));
3583 id.cb.copy_decl = nesting_copy_decl;
3584 id.cb.decl_map = new hash_map<tree, tree>;
3585 id.root = root;
3587 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3588 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3590 tree type = TREE_TYPE (debug_var);
3591 tree newt, t = type;
3592 struct nesting_info *i;
3594 for (i = root; i; i = i->outer)
3595 if (variably_modified_type_p (type, i->context))
3596 break;
3598 if (i == NULL)
3599 continue;
3601 id.cb.src_fn = i->context;
3602 id.cb.dst_fn = i->context;
3603 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3605 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3606 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3608 newt = TREE_TYPE (newt);
3609 t = TREE_TYPE (t);
3611 if (TYPE_NAME (newt)
3612 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3613 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3614 && newt != t
3615 && TYPE_NAME (newt) == TYPE_NAME (t))
3616 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3619 delete id.cb.decl_map;
3622 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3623 if (gimple_bind_block (scope))
3624 declare_vars (root->debug_var_chain, scope, true);
3625 else
3626 BLOCK_VARS (DECL_INITIAL (root->context))
3627 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3628 root->debug_var_chain);
3630 else
3631 fixup_vla_decls (DECL_INITIAL (root->context));
3633 /* Fold the rewritten MEM_REF trees. */
3634 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3636 /* Dump the translated tree function. */
3637 if (dump_file)
3639 fputs ("\n\n", dump_file);
3640 dump_function_to_file (root->context, dump_file, dump_flags);
3644 static void
3645 finalize_nesting_tree (struct nesting_info *root)
3647 struct nesting_info *n;
3648 FOR_EACH_NEST_INFO (n, root)
3649 finalize_nesting_tree_1 (n);
3652 /* Unnest the nodes and pass them to cgraph. */
3654 static void
3655 unnest_nesting_tree_1 (struct nesting_info *root)
3657 struct cgraph_node *node = cgraph_node::get (root->context);
3659 /* For nested functions update the cgraph to reflect unnesting.
3660 We also delay finalizing of these functions up to this point. */
3661 if (nested_function_info::get (node)->origin)
3663 unnest_function (node);
3664 if (!root->thunk_p)
3665 cgraph_node::finalize_function (root->context, true);
3669 static void
3670 unnest_nesting_tree (struct nesting_info *root)
3672 struct nesting_info *n;
3673 FOR_EACH_NEST_INFO (n, root)
3674 unnest_nesting_tree_1 (n);
3677 /* Free the data structures allocated during this pass. */
3679 static void
3680 free_nesting_tree (struct nesting_info *root)
3682 struct nesting_info *node, *next;
3684 node = iter_nestinfo_start (root);
3687 next = iter_nestinfo_next (node);
3688 delete node->var_map;
3689 delete node->field_map;
3690 delete node->mem_refs;
3691 free (node);
3692 node = next;
3694 while (node);
3697 /* Gimplify a function and all its nested functions. */
3698 static void
3699 gimplify_all_functions (struct cgraph_node *root)
3701 struct cgraph_node *iter;
3702 if (!gimple_body (root->decl))
3703 gimplify_function_tree (root->decl);
3704 for (iter = first_nested_function (root); iter;
3705 iter = next_nested_function (iter))
3706 if (!iter->thunk)
3707 gimplify_all_functions (iter);
3710 /* Main entry point for this pass. Process FNDECL and all of its nested
3711 subroutines and turn them into something less tightly bound. */
3713 void
3714 lower_nested_functions (tree fndecl)
3716 struct cgraph_node *cgn;
3717 struct nesting_info *root;
3719 /* If there are no nested functions, there's nothing to do. */
3720 cgn = cgraph_node::get (fndecl);
3721 if (!first_nested_function (cgn))
3722 return;
3724 gimplify_all_functions (cgn);
3726 set_dump_file (dump_begin (TDI_nested, &dump_flags));
3727 if (dump_file)
3728 fprintf (dump_file, "\n;; Function %s\n\n",
3729 lang_hooks.decl_printable_name (fndecl, 2));
3731 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3732 root = create_nesting_tree (cgn);
3734 walk_all_functions (convert_nonlocal_reference_stmt,
3735 convert_nonlocal_reference_op,
3736 root);
3737 walk_all_functions (convert_local_reference_stmt,
3738 convert_local_reference_op,
3739 root);
3740 walk_all_functions (convert_nl_goto_reference, NULL, root);
3741 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3743 convert_all_function_calls (root);
3744 finalize_nesting_tree (root);
3745 unnest_nesting_tree (root);
3747 free_nesting_tree (root);
3748 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3750 if (dump_file)
3752 dump_end (TDI_nested, dump_file);
3753 set_dump_file (NULL);
3757 #include "gt-tree-nested.h"