hppa: Always enable PIE on 64-bit target
[official-gcc.git] / gcc / tree-nested.cc
blob96718a66d01e3826d5d50ae1512aed4e00ea8cc6
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "cgraph.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "dumpfile.h"
35 #include "tree-inline.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "tree-cfg.h"
40 #include "explow.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 #include "diagnostic.h"
45 #include "alloc-pool.h"
46 #include "tree-nested.h"
47 #include "symbol-summary.h"
48 #include "symtab-thunks.h"
50 /* Summary of nested functions. */
51 static function_summary <nested_function_info *>
52 *nested_function_sum = NULL;
54 /* Return nested_function_info, if available. */
55 nested_function_info *
56 nested_function_info::get (cgraph_node *node)
58 if (!nested_function_sum)
59 return NULL;
60 return nested_function_sum->get (node);
63 /* Return nested_function_info possibly creating new one. */
64 nested_function_info *
65 nested_function_info::get_create (cgraph_node *node)
67 if (!nested_function_sum)
69 nested_function_sum = new function_summary <nested_function_info *>
70 (symtab);
71 nested_function_sum->disable_insertion_hook ();
73 return nested_function_sum->get_create (node);
76 /* cgraph_node is no longer nested function; update cgraph accordingly. */
77 void
78 unnest_function (cgraph_node *node)
80 nested_function_info *info = nested_function_info::get (node);
81 cgraph_node **node2 = &nested_function_info::get
82 (nested_function_origin (node))->nested;
84 gcc_checking_assert (info->origin);
85 while (*node2 != node)
86 node2 = &nested_function_info::get (*node2)->next_nested;
87 *node2 = info->next_nested;
88 info->next_nested = NULL;
89 info->origin = NULL;
90 nested_function_sum->remove (node);
93 /* Destructor: unlink function from nested function lists. */
94 nested_function_info::~nested_function_info ()
96 cgraph_node *next;
97 for (cgraph_node *n = nested; n; n = next)
99 nested_function_info *info = nested_function_info::get (n);
100 next = info->next_nested;
101 info->origin = NULL;
102 info->next_nested = NULL;
104 nested = NULL;
105 if (origin)
107 cgraph_node **node2
108 = &nested_function_info::get (origin)->nested;
110 nested_function_info *info;
111 while ((info = nested_function_info::get (*node2)) != this && info)
112 node2 = &info->next_nested;
113 *node2 = next_nested;
117 /* Free nested function info summaries. */
118 void
119 nested_function_info::release ()
121 if (nested_function_sum)
122 delete (nested_function_sum);
123 nested_function_sum = NULL;
126 /* If NODE is nested function, record it. */
127 void
128 maybe_record_nested_function (cgraph_node *node)
130 /* All nested functions gets lowered during the construction of symtab. */
131 if (symtab->state > CONSTRUCTION)
132 return;
133 if (DECL_CONTEXT (node->decl)
134 && TREE_CODE (DECL_CONTEXT (node->decl)) == FUNCTION_DECL)
136 cgraph_node *origin = cgraph_node::get_create (DECL_CONTEXT (node->decl));
137 nested_function_info *info = nested_function_info::get_create (node);
138 nested_function_info *origin_info
139 = nested_function_info::get_create (origin);
141 info->origin = origin;
142 info->next_nested = origin_info->nested;
143 origin_info->nested = node;
147 /* The object of this pass is to lower the representation of a set of nested
148 functions in order to expose all of the gory details of the various
149 nonlocal references. We want to do this sooner rather than later, in
150 order to give us more freedom in emitting all of the functions in question.
152 Back in olden times, when gcc was young, we developed an insanely
153 complicated scheme whereby variables which were referenced nonlocally
154 were forced to live in the stack of the declaring function, and then
155 the nested functions magically discovered where these variables were
156 placed. In order for this scheme to function properly, it required
157 that the outer function be partially expanded, then we switch to
158 compiling the inner function, and once done with those we switch back
159 to compiling the outer function. Such delicate ordering requirements
160 makes it difficult to do whole translation unit optimizations
161 involving such functions.
163 The implementation here is much more direct. Everything that can be
164 referenced by an inner function is a member of an explicitly created
165 structure herein called the "nonlocal frame struct". The incoming
166 static chain for a nested function is a pointer to this struct in
167 the parent. In this way, we settle on known offsets from a known
168 base, and so are decoupled from the logic that places objects in the
169 function's stack frame. More importantly, we don't have to wait for
170 that to happen -- since the compilation of the inner function is no
171 longer tied to a real stack frame, the nonlocal frame struct can be
172 allocated anywhere. Which means that the outer function is now
173 inlinable.
175 Theory of operation here is very simple. Iterate over all the
176 statements in all the functions (depth first) several times,
177 allocating structures and fields on demand. In general we want to
178 examine inner functions first, so that we can avoid making changes
179 to outer functions which are unnecessary.
181 The order of the passes matters a bit, in that later passes will be
182 skipped if it is discovered that the functions don't actually interact
183 at all. That is, they're nested in the lexical sense but could have
184 been written as independent functions without change. */
187 struct nesting_info
189 struct nesting_info *outer;
190 struct nesting_info *inner;
191 struct nesting_info *next;
193 hash_map<tree, tree> *field_map;
194 hash_map<tree, tree> *var_map;
195 hash_set<tree *> *mem_refs;
196 bitmap suppress_expansion;
198 tree context;
199 tree new_local_var_chain;
200 tree debug_var_chain;
201 tree frame_type;
202 tree frame_decl;
203 tree chain_field;
204 tree chain_decl;
205 tree nl_goto_field;
207 bool thunk_p;
208 bool any_parm_remapped;
209 bool any_tramp_created;
210 bool any_descr_created;
211 char static_chain_added;
215 /* Iterate over the nesting tree, starting with ROOT, depth first. */
217 static inline struct nesting_info *
218 iter_nestinfo_start (struct nesting_info *root)
220 while (root->inner)
221 root = root->inner;
222 return root;
225 static inline struct nesting_info *
226 iter_nestinfo_next (struct nesting_info *node)
228 if (node->next)
229 return iter_nestinfo_start (node->next);
230 return node->outer;
233 #define FOR_EACH_NEST_INFO(I, ROOT) \
234 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
236 /* Obstack used for the bitmaps in the struct above. */
237 static struct bitmap_obstack nesting_info_bitmap_obstack;
240 /* We're working in so many different function contexts simultaneously,
241 that create_tmp_var is dangerous. Prevent mishap. */
242 #define create_tmp_var cant_use_create_tmp_var_here_dummy
244 /* Like create_tmp_var, except record the variable for registration at
245 the given nesting level. */
247 static tree
248 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
250 tree tmp_var;
252 /* If the type is of variable size or a type which must be created by the
253 frontend, something is wrong. Note that we explicitly allow
254 incomplete types here, since we create them ourselves here. */
255 gcc_assert (!TREE_ADDRESSABLE (type));
256 gcc_assert (!TYPE_SIZE_UNIT (type)
257 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
259 tmp_var = create_tmp_var_raw (type, prefix);
260 DECL_CONTEXT (tmp_var) = info->context;
261 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
262 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
264 info->new_local_var_chain = tmp_var;
266 return tmp_var;
269 /* Like build_simple_mem_ref, but set TREE_THIS_NOTRAP on the result. */
271 static tree
272 build_simple_mem_ref_notrap (tree ptr)
274 tree t = build_simple_mem_ref (ptr);
275 TREE_THIS_NOTRAP (t) = 1;
276 return t;
279 /* Take the address of EXP to be used within function CONTEXT.
280 Mark it for addressability as necessary. */
282 tree
283 build_addr (tree exp)
285 mark_addressable (exp);
286 return build_fold_addr_expr (exp);
289 /* Insert FIELD into TYPE, sorted by alignment requirements. */
291 void
292 insert_field_into_struct (tree type, tree field)
294 tree *p;
296 DECL_CONTEXT (field) = type;
298 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
299 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
300 break;
302 DECL_CHAIN (field) = *p;
303 *p = field;
305 /* Set correct alignment for frame struct type. */
306 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
307 SET_TYPE_ALIGN (type, DECL_ALIGN (field));
310 /* Build or return the RECORD_TYPE that describes the frame state that is
311 shared between INFO->CONTEXT and its nested functions. This record will
312 not be complete until finalize_nesting_tree; up until that point we'll
313 be adding fields as necessary.
315 We also build the DECL that represents this frame in the function. */
317 static tree
318 get_frame_type (struct nesting_info *info)
320 tree type = info->frame_type;
321 if (!type)
323 char *name;
325 type = make_node (RECORD_TYPE);
327 name = concat ("FRAME.",
328 IDENTIFIER_POINTER (DECL_NAME (info->context)),
329 NULL);
330 TYPE_NAME (type) = get_identifier (name);
331 free (name);
333 info->frame_type = type;
335 /* Do not put info->frame_decl on info->new_local_var_chain,
336 so that we can declare it in the lexical blocks, which
337 makes sure virtual regs that end up appearing in its RTL
338 expression get substituted in instantiate_virtual_regs. */
339 info->frame_decl = create_tmp_var_raw (type, "FRAME");
340 DECL_CONTEXT (info->frame_decl) = info->context;
341 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
342 DECL_SEEN_IN_BIND_EXPR_P (info->frame_decl) = 1;
344 /* ??? Always make it addressable for now, since it is meant to
345 be pointed to by the static chain pointer. This pessimizes
346 when it turns out that no static chains are needed because
347 the nested functions referencing non-local variables are not
348 reachable, but the true pessimization is to create the non-
349 local frame structure in the first place. */
350 TREE_ADDRESSABLE (info->frame_decl) = 1;
353 return type;
356 /* Return true if DECL should be referenced by pointer in the non-local frame
357 structure. */
359 static bool
360 use_pointer_in_frame (tree decl)
362 if (TREE_CODE (decl) == PARM_DECL)
364 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
365 sized DECLs, and inefficient to copy large aggregates. Don't bother
366 moving anything but scalar parameters. */
367 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
369 else
371 /* Variable-sized DECLs can only come from OMP clauses at this point
372 since the gimplifier has already turned the regular variables into
373 pointers. Do the same as the gimplifier. */
374 return !DECL_SIZE (decl) || TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST;
378 /* Given DECL, a non-locally accessed variable, find or create a field
379 in the non-local frame structure for the given nesting context. */
381 static tree
382 lookup_field_for_decl (struct nesting_info *info, tree decl,
383 enum insert_option insert)
385 gcc_checking_assert (decl_function_context (decl) == info->context);
387 if (insert == NO_INSERT)
389 tree *slot = info->field_map->get (decl);
390 return slot ? *slot : NULL_TREE;
393 tree *slot = &info->field_map->get_or_insert (decl);
394 if (!*slot)
396 tree type = get_frame_type (info);
397 tree field = make_node (FIELD_DECL);
398 DECL_NAME (field) = DECL_NAME (decl);
400 if (use_pointer_in_frame (decl))
402 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
403 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
404 DECL_NONADDRESSABLE_P (field) = 1;
406 else
408 TREE_TYPE (field) = TREE_TYPE (decl);
409 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
410 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
411 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
412 DECL_IGNORED_P (field) = DECL_IGNORED_P (decl);
413 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
414 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
415 copy_warning (field, decl);
417 /* Declare the transformation and adjust the original DECL. For a
418 variable or for a parameter when not optimizing, we make it point
419 to the field in the frame directly. For a parameter, we don't do
420 it when optimizing because the variable tracking pass will already
421 do the job, */
422 if (VAR_P (decl) || !optimize)
424 tree x
425 = build3 (COMPONENT_REF, TREE_TYPE (field), info->frame_decl,
426 field, NULL_TREE);
428 /* If the next declaration is a PARM_DECL pointing to the DECL,
429 we need to adjust its VALUE_EXPR directly, since chains of
430 VALUE_EXPRs run afoul of garbage collection. This occurs
431 in Ada for Out parameters that aren't copied in. */
432 tree next = DECL_CHAIN (decl);
433 if (next
434 && TREE_CODE (next) == PARM_DECL
435 && DECL_HAS_VALUE_EXPR_P (next)
436 && DECL_VALUE_EXPR (next) == decl)
437 SET_DECL_VALUE_EXPR (next, x);
439 SET_DECL_VALUE_EXPR (decl, x);
440 DECL_HAS_VALUE_EXPR_P (decl) = 1;
444 insert_field_into_struct (type, field);
445 *slot = field;
447 if (TREE_CODE (decl) == PARM_DECL)
448 info->any_parm_remapped = true;
451 return *slot;
454 /* Build or return the variable that holds the static chain within
455 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
457 static tree
458 get_chain_decl (struct nesting_info *info)
460 tree decl = info->chain_decl;
462 if (!decl)
464 tree type;
466 type = get_frame_type (info->outer);
467 type = build_pointer_type (type);
469 /* Note that this variable is *not* entered into any BIND_EXPR;
470 the construction of this variable is handled specially in
471 expand_function_start and initialize_inlined_parameters.
472 Note also that it's represented as a parameter. This is more
473 close to the truth, since the initial value does come from
474 the caller. */
475 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
476 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
477 DECL_ARTIFICIAL (decl) = 1;
478 DECL_IGNORED_P (decl) = 1;
479 TREE_USED (decl) = 1;
480 DECL_CONTEXT (decl) = info->context;
481 DECL_ARG_TYPE (decl) = type;
483 /* Tell tree-inline.cc that we never write to this variable, so
484 it can copy-prop the replacement value immediately. */
485 TREE_READONLY (decl) = 1;
487 info->chain_decl = decl;
489 if (dump_file
490 && (dump_flags & TDF_DETAILS)
491 && !DECL_STATIC_CHAIN (info->context))
492 fprintf (dump_file, "Setting static-chain for %s\n",
493 lang_hooks.decl_printable_name (info->context, 2));
495 DECL_STATIC_CHAIN (info->context) = 1;
497 return decl;
500 /* Build or return the field within the non-local frame state that holds
501 the static chain for INFO->CONTEXT. This is the way to walk back up
502 multiple nesting levels. */
504 static tree
505 get_chain_field (struct nesting_info *info)
507 tree field = info->chain_field;
509 if (!field)
511 tree type = build_pointer_type (get_frame_type (info->outer));
513 field = make_node (FIELD_DECL);
514 DECL_NAME (field) = get_identifier ("__chain");
515 TREE_TYPE (field) = type;
516 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
517 DECL_NONADDRESSABLE_P (field) = 1;
519 insert_field_into_struct (get_frame_type (info), field);
521 info->chain_field = field;
523 if (dump_file
524 && (dump_flags & TDF_DETAILS)
525 && !DECL_STATIC_CHAIN (info->context))
526 fprintf (dump_file, "Setting static-chain for %s\n",
527 lang_hooks.decl_printable_name (info->context, 2));
529 DECL_STATIC_CHAIN (info->context) = 1;
531 return field;
534 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
536 static tree
537 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
538 gcall *call)
540 tree t;
542 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
543 gimple_call_set_lhs (call, t);
544 if (! gsi_end_p (*gsi))
545 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
546 gsi_insert_before (gsi, call, GSI_SAME_STMT);
548 return t;
552 /* Copy EXP into a temporary. Allocate the temporary in the context of
553 INFO and insert the initialization statement before GSI. */
555 static tree
556 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
558 tree t;
559 gimple *stmt;
561 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
562 stmt = gimple_build_assign (t, exp);
563 if (! gsi_end_p (*gsi))
564 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
565 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
567 return t;
571 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
573 static tree
574 gsi_gimplify_val (struct nesting_info *info, tree exp,
575 gimple_stmt_iterator *gsi)
577 if (is_gimple_val (exp))
578 return exp;
579 else
580 return init_tmp_var (info, exp, gsi);
583 /* Similarly, but copy from the temporary and insert the statement
584 after the iterator. */
586 static tree
587 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
589 tree t;
590 gimple *stmt;
592 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
593 stmt = gimple_build_assign (exp, t);
594 if (! gsi_end_p (*gsi))
595 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
596 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
598 return t;
601 /* Build or return the type used to represent a nested function trampoline. */
603 static GTY(()) tree trampoline_type;
605 static tree
606 get_trampoline_type (struct nesting_info *info)
608 unsigned align, size;
609 tree t;
611 if (trampoline_type)
612 return trampoline_type;
614 /* When trampolines are created off-stack then the only thing we need in the
615 local frame is a single pointer. */
616 if (flag_trampoline_impl == TRAMPOLINE_IMPL_HEAP)
618 trampoline_type = build_pointer_type (void_type_node);
619 return trampoline_type;
622 align = TRAMPOLINE_ALIGNMENT;
623 size = TRAMPOLINE_SIZE;
625 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
626 then allocate extra space so that we can do dynamic alignment. */
627 if (align > STACK_BOUNDARY)
629 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
630 align = STACK_BOUNDARY;
633 t = build_index_type (size_int (size - 1));
634 t = build_array_type (char_type_node, t);
635 t = build_decl (DECL_SOURCE_LOCATION (info->context),
636 FIELD_DECL, get_identifier ("__data"), t);
637 SET_DECL_ALIGN (t, align);
638 DECL_USER_ALIGN (t) = 1;
640 trampoline_type = make_node (RECORD_TYPE);
641 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
642 TYPE_FIELDS (trampoline_type) = t;
643 layout_type (trampoline_type);
644 DECL_CONTEXT (t) = trampoline_type;
646 return trampoline_type;
649 /* Build or return the type used to represent a nested function descriptor. */
651 static GTY(()) tree descriptor_type;
653 static tree
654 get_descriptor_type (struct nesting_info *info)
656 /* The base alignment is that of a function. */
657 const unsigned align = FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY);
658 tree t;
660 if (descriptor_type)
661 return descriptor_type;
663 t = build_index_type (integer_one_node);
664 t = build_array_type (ptr_type_node, t);
665 t = build_decl (DECL_SOURCE_LOCATION (info->context),
666 FIELD_DECL, get_identifier ("__data"), t);
667 SET_DECL_ALIGN (t, MAX (TYPE_ALIGN (ptr_type_node), align));
668 DECL_USER_ALIGN (t) = 1;
670 descriptor_type = make_node (RECORD_TYPE);
671 TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor");
672 TYPE_FIELDS (descriptor_type) = t;
673 layout_type (descriptor_type);
674 DECL_CONTEXT (t) = descriptor_type;
676 return descriptor_type;
679 /* Given DECL, a nested function, find or create an element in the
680 var map for this function. */
682 static tree
683 lookup_element_for_decl (struct nesting_info *info, tree decl,
684 enum insert_option insert)
686 if (insert == NO_INSERT)
688 tree *slot = info->var_map->get (decl);
689 return slot ? *slot : NULL_TREE;
692 tree *slot = &info->var_map->get_or_insert (decl);
693 if (!*slot)
694 *slot = build_tree_list (NULL_TREE, NULL_TREE);
696 return (tree) *slot;
699 /* Given DECL, a nested function, create a field in the non-local
700 frame structure for this function. */
702 static tree
703 create_field_for_decl (struct nesting_info *info, tree decl, tree type)
705 tree field = make_node (FIELD_DECL);
706 DECL_NAME (field) = DECL_NAME (decl);
707 TREE_TYPE (field) = type;
708 TREE_ADDRESSABLE (field) = 1;
709 insert_field_into_struct (get_frame_type (info), field);
710 return field;
713 /* Given DECL, a nested function, find or create a field in the non-local
714 frame structure for a trampoline for this function. */
716 static tree
717 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
718 enum insert_option insert)
720 tree elt, field;
722 elt = lookup_element_for_decl (info, decl, insert);
723 if (!elt)
724 return NULL_TREE;
726 field = TREE_PURPOSE (elt);
728 if (!field && insert == INSERT)
730 field = create_field_for_decl (info, decl, get_trampoline_type (info));
731 TREE_PURPOSE (elt) = field;
732 info->any_tramp_created = true;
735 return field;
738 /* Given DECL, a nested function, find or create a field in the non-local
739 frame structure for a descriptor for this function. */
741 static tree
742 lookup_descr_for_decl (struct nesting_info *info, tree decl,
743 enum insert_option insert)
745 tree elt, field;
747 elt = lookup_element_for_decl (info, decl, insert);
748 if (!elt)
749 return NULL_TREE;
751 field = TREE_VALUE (elt);
753 if (!field && insert == INSERT)
755 field = create_field_for_decl (info, decl, get_descriptor_type (info));
756 TREE_VALUE (elt) = field;
757 info->any_descr_created = true;
760 return field;
763 /* Build or return the field within the non-local frame state that holds
764 the non-local goto "jmp_buf". The buffer itself is maintained by the
765 rtl middle-end as dynamic stack space is allocated. */
767 static tree
768 get_nl_goto_field (struct nesting_info *info)
770 tree field = info->nl_goto_field;
771 if (!field)
773 unsigned size;
774 tree type;
776 /* For __builtin_nonlocal_goto, we need N words. The first is the
777 frame pointer, the rest is for the target's stack pointer save
778 area. The number of words is controlled by STACK_SAVEAREA_MODE;
779 not the best interface, but it'll do for now. */
780 if (Pmode == ptr_mode)
781 type = ptr_type_node;
782 else
783 type = lang_hooks.types.type_for_mode (Pmode, 1);
785 scalar_int_mode mode
786 = as_a <scalar_int_mode> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
787 size = GET_MODE_SIZE (mode);
788 size = size / GET_MODE_SIZE (Pmode);
789 size = size + 1;
791 type = build_array_type
792 (type, build_index_type (size_int (size)));
794 field = make_node (FIELD_DECL);
795 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
796 TREE_TYPE (field) = type;
797 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
798 TREE_ADDRESSABLE (field) = 1;
800 insert_field_into_struct (get_frame_type (info), field);
802 info->nl_goto_field = field;
805 return field;
808 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
810 static void
811 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
812 struct nesting_info *info, gimple_seq *pseq)
814 struct walk_stmt_info wi;
816 memset (&wi, 0, sizeof (wi));
817 wi.info = info;
818 wi.val_only = true;
819 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
823 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
825 static inline void
826 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
827 struct nesting_info *info)
829 gimple_seq body = gimple_body (info->context);
830 walk_body (callback_stmt, callback_op, info, &body);
831 gimple_set_body (info->context, body);
834 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
836 static void
837 walk_gimple_omp_for (gomp_for *for_stmt,
838 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
839 struct nesting_info *info)
841 struct walk_stmt_info wi;
842 gimple_seq seq;
843 tree t;
844 size_t i;
846 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
848 seq = NULL;
849 memset (&wi, 0, sizeof (wi));
850 wi.info = info;
851 wi.gsi = gsi_last (seq);
853 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
855 wi.val_only = false;
856 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
857 &wi, NULL);
858 wi.val_only = true;
859 wi.is_lhs = false;
860 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
861 &wi, NULL);
863 wi.val_only = true;
864 wi.is_lhs = false;
865 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
866 &wi, NULL);
868 t = gimple_omp_for_incr (for_stmt, i);
869 gcc_assert (BINARY_CLASS_P (t));
870 wi.val_only = false;
871 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
872 wi.val_only = true;
873 wi.is_lhs = false;
874 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
877 seq = gsi_seq (wi.gsi);
878 if (!gimple_seq_empty_p (seq))
880 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
881 annotate_all_with_location (seq, gimple_location (for_stmt));
882 gimple_seq_add_seq (&pre_body, seq);
883 gimple_omp_for_set_pre_body (for_stmt, pre_body);
887 /* Similarly for ROOT and all functions nested underneath, depth first. */
889 static void
890 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
891 struct nesting_info *root)
893 struct nesting_info *n;
894 FOR_EACH_NEST_INFO (n, root)
895 walk_function (callback_stmt, callback_op, n);
899 /* We have to check for a fairly pathological case. The operands of function
900 nested function are to be interpreted in the context of the enclosing
901 function. So if any are variably-sized, they will get remapped when the
902 enclosing function is inlined. But that remapping would also have to be
903 done in the types of the PARM_DECLs of the nested function, meaning the
904 argument types of that function will disagree with the arguments in the
905 calls to that function. So we'd either have to make a copy of the nested
906 function corresponding to each time the enclosing function was inlined or
907 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
908 function. The former is not practical. The latter would still require
909 detecting this case to know when to add the conversions. So, for now at
910 least, we don't inline such an enclosing function.
912 We have to do that check recursively, so here return indicating whether
913 FNDECL has such a nested function. ORIG_FN is the function we were
914 trying to inline to use for checking whether any argument is variably
915 modified by anything in it.
917 It would be better to do this in tree-inline.cc so that we could give
918 the appropriate warning for why a function can't be inlined, but that's
919 too late since the nesting structure has already been flattened and
920 adding a flag just to record this fact seems a waste of a flag. */
922 static bool
923 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
925 struct cgraph_node *cgn = cgraph_node::get (fndecl);
926 tree arg;
928 for (cgn = first_nested_function (cgn); cgn;
929 cgn = next_nested_function (cgn))
931 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
932 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
933 return true;
935 if (check_for_nested_with_variably_modified (cgn->decl,
936 orig_fndecl))
937 return true;
940 return false;
943 /* Construct our local datastructure describing the function nesting
944 tree rooted by CGN. */
946 static struct nesting_info *
947 create_nesting_tree (struct cgraph_node *cgn)
949 struct nesting_info *info = XCNEW (struct nesting_info);
950 info->field_map = new hash_map<tree, tree>;
951 info->var_map = new hash_map<tree, tree>;
952 info->mem_refs = new hash_set<tree *>;
953 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
954 info->context = cgn->decl;
955 info->thunk_p = cgn->thunk;
957 for (cgn = first_nested_function (cgn); cgn;
958 cgn = next_nested_function (cgn))
960 struct nesting_info *sub = create_nesting_tree (cgn);
961 sub->outer = info;
962 sub->next = info->inner;
963 info->inner = sub;
966 /* See discussion at check_for_nested_with_variably_modified for a
967 discussion of why this has to be here. */
968 if (check_for_nested_with_variably_modified (info->context, info->context))
969 DECL_UNINLINABLE (info->context) = true;
971 return info;
974 /* Return an expression computing the static chain for TARGET_CONTEXT
975 from INFO->CONTEXT. Insert any necessary computations before TSI. */
977 static tree
978 get_static_chain (struct nesting_info *info, tree target_context,
979 gimple_stmt_iterator *gsi)
981 struct nesting_info *i;
982 tree x;
984 if (info->context == target_context)
986 x = build_addr (info->frame_decl);
987 info->static_chain_added |= 1;
989 else
991 x = get_chain_decl (info);
992 info->static_chain_added |= 2;
994 for (i = info->outer; i->context != target_context; i = i->outer)
996 tree field = get_chain_field (i);
998 x = build_simple_mem_ref_notrap (x);
999 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1000 x = init_tmp_var (info, x, gsi);
1004 return x;
1008 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
1009 frame as seen from INFO->CONTEXT. Insert any necessary computations
1010 before GSI. */
1012 static tree
1013 get_frame_field (struct nesting_info *info, tree target_context,
1014 tree field, gimple_stmt_iterator *gsi)
1016 struct nesting_info *i;
1017 tree x;
1019 if (info->context == target_context)
1021 /* Make sure frame_decl gets created. */
1022 (void) get_frame_type (info);
1023 x = info->frame_decl;
1024 info->static_chain_added |= 1;
1026 else
1028 x = get_chain_decl (info);
1029 info->static_chain_added |= 2;
1031 for (i = info->outer; i->context != target_context; i = i->outer)
1033 tree field = get_chain_field (i);
1035 x = build_simple_mem_ref_notrap (x);
1036 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1037 x = init_tmp_var (info, x, gsi);
1040 x = build_simple_mem_ref_notrap (x);
1043 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1044 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (field);
1045 return x;
1048 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
1050 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
1051 in the nested function with DECL_VALUE_EXPR set to reference the true
1052 variable in the parent function. This is used both for debug info
1053 and in OMP lowering. */
1055 static tree
1056 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
1058 tree target_context;
1059 struct nesting_info *i;
1060 tree x, field, new_decl;
1062 tree *slot = &info->var_map->get_or_insert (decl);
1064 if (*slot)
1065 return *slot;
1067 target_context = decl_function_context (decl);
1069 /* A copy of the code in get_frame_field, but without the temporaries. */
1070 if (info->context == target_context)
1072 /* Make sure frame_decl gets created. */
1073 (void) get_frame_type (info);
1074 x = info->frame_decl;
1075 i = info;
1076 info->static_chain_added |= 1;
1078 else
1080 x = get_chain_decl (info);
1081 info->static_chain_added |= 2;
1082 for (i = info->outer; i->context != target_context; i = i->outer)
1084 field = get_chain_field (i);
1085 x = build_simple_mem_ref_notrap (x);
1086 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1088 x = build_simple_mem_ref_notrap (x);
1091 field = lookup_field_for_decl (i, decl, INSERT);
1092 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1093 if (use_pointer_in_frame (decl))
1094 x = build_simple_mem_ref_notrap (x);
1096 /* ??? We should be remapping types as well, surely. */
1097 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1098 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1099 DECL_CONTEXT (new_decl) = info->context;
1100 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1101 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1102 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1103 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1104 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1105 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1106 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1107 if ((TREE_CODE (decl) == PARM_DECL
1108 || TREE_CODE (decl) == RESULT_DECL
1109 || VAR_P (decl))
1110 && DECL_BY_REFERENCE (decl))
1111 DECL_BY_REFERENCE (new_decl) = 1;
1113 SET_DECL_VALUE_EXPR (new_decl, x);
1114 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1116 *slot = new_decl;
1117 DECL_CHAIN (new_decl) = info->debug_var_chain;
1118 info->debug_var_chain = new_decl;
1120 if (!optimize
1121 && info->context != target_context
1122 && variably_modified_type_p (TREE_TYPE (decl), NULL))
1123 note_nonlocal_vla_type (info, TREE_TYPE (decl));
1125 return new_decl;
1129 /* Callback for walk_gimple_stmt, rewrite all references to VAR
1130 and PARM_DECLs that belong to outer functions.
1132 The rewrite will involve some number of structure accesses back up
1133 the static chain. E.g. for a variable FOO up one nesting level it'll
1134 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
1135 indirections apply to decls for which use_pointer_in_frame is true. */
1137 static tree
1138 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
1140 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1141 struct nesting_info *const info = (struct nesting_info *) wi->info;
1142 tree t = *tp;
1144 *walk_subtrees = 0;
1145 switch (TREE_CODE (t))
1147 case VAR_DECL:
1148 /* Non-automatic variables are never processed. */
1149 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1150 break;
1151 /* FALLTHRU */
1153 case PARM_DECL:
1155 tree x, target_context = decl_function_context (t);
1157 if (info->context == target_context)
1158 break;
1160 wi->changed = true;
1162 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1163 x = get_nonlocal_debug_decl (info, t);
1164 else
1166 struct nesting_info *i = info;
1167 while (i && i->context != target_context)
1168 i = i->outer;
1169 /* If none of the outer contexts is the target context, this means
1170 that the VAR or PARM_DECL is referenced in a wrong context. */
1171 if (!i)
1172 internal_error ("%s from %s referenced in %s",
1173 IDENTIFIER_POINTER (DECL_NAME (t)),
1174 IDENTIFIER_POINTER (DECL_NAME (target_context)),
1175 IDENTIFIER_POINTER (DECL_NAME (info->context)));
1177 x = lookup_field_for_decl (i, t, INSERT);
1178 x = get_frame_field (info, target_context, x, &wi->gsi);
1179 if (use_pointer_in_frame (t))
1181 x = init_tmp_var (info, x, &wi->gsi);
1182 x = build_simple_mem_ref_notrap (x);
1186 if (wi->val_only)
1188 if (wi->is_lhs)
1189 x = save_tmp_var (info, x, &wi->gsi);
1190 else
1191 x = init_tmp_var (info, x, &wi->gsi);
1194 *tp = x;
1196 break;
1198 case LABEL_DECL:
1199 /* We're taking the address of a label from a parent function, but
1200 this is not itself a non-local goto. Mark the label such that it
1201 will not be deleted, much as we would with a label address in
1202 static storage. */
1203 if (decl_function_context (t) != info->context)
1204 FORCED_LABEL (t) = 1;
1205 break;
1207 case ADDR_EXPR:
1209 bool save_val_only = wi->val_only;
1211 wi->val_only = false;
1212 wi->is_lhs = false;
1213 wi->changed = false;
1214 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1215 wi->val_only = true;
1217 if (wi->changed)
1219 tree save_context;
1221 /* If we changed anything, we might no longer be directly
1222 referencing a decl. */
1223 save_context = current_function_decl;
1224 current_function_decl = info->context;
1225 recompute_tree_invariant_for_addr_expr (t);
1227 /* If the callback converted the address argument in a context
1228 where we only accept variables (and min_invariant, presumably),
1229 then compute the address into a temporary. */
1230 if (save_val_only)
1231 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1232 t, &wi->gsi);
1233 current_function_decl = save_context;
1236 break;
1238 case REALPART_EXPR:
1239 case IMAGPART_EXPR:
1240 case COMPONENT_REF:
1241 case ARRAY_REF:
1242 case ARRAY_RANGE_REF:
1243 case BIT_FIELD_REF:
1244 /* Go down this entire nest and just look at the final prefix and
1245 anything that describes the references. Otherwise, we lose track
1246 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1247 wi->val_only = true;
1248 wi->is_lhs = false;
1249 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1251 if (TREE_CODE (t) == COMPONENT_REF)
1252 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1253 NULL);
1254 else if (TREE_CODE (t) == ARRAY_REF
1255 || TREE_CODE (t) == ARRAY_RANGE_REF)
1257 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1258 wi, NULL);
1259 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1260 wi, NULL);
1261 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1262 wi, NULL);
1265 wi->val_only = false;
1266 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1267 break;
1269 case VIEW_CONVERT_EXPR:
1270 /* Just request to look at the subtrees, leaving val_only and lhs
1271 untouched. This might actually be for !val_only + lhs, in which
1272 case we don't want to force a replacement by a temporary. */
1273 *walk_subtrees = 1;
1274 break;
1276 default:
1277 if (!IS_TYPE_OR_DECL_P (t))
1279 *walk_subtrees = 1;
1280 wi->val_only = true;
1281 wi->is_lhs = false;
1283 break;
1286 return NULL_TREE;
1289 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1290 struct walk_stmt_info *);
1292 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1293 and PARM_DECLs that belong to outer functions. */
1295 static bool
1296 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1298 struct nesting_info *const info = (struct nesting_info *) wi->info;
1299 bool need_chain = false, need_stmts = false;
1300 tree clause, decl, *pdecl;
1301 int dummy;
1302 bitmap new_suppress;
1304 new_suppress = BITMAP_GGC_ALLOC ();
1305 bitmap_copy (new_suppress, info->suppress_expansion);
1307 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1309 pdecl = NULL;
1310 switch (OMP_CLAUSE_CODE (clause))
1312 case OMP_CLAUSE_REDUCTION:
1313 case OMP_CLAUSE_IN_REDUCTION:
1314 case OMP_CLAUSE_TASK_REDUCTION:
1315 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1316 need_stmts = true;
1317 if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
1319 pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
1320 if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
1321 pdecl = &TREE_OPERAND (*pdecl, 0);
1322 if (INDIRECT_REF_P (*pdecl)
1323 || TREE_CODE (*pdecl) == ADDR_EXPR)
1324 pdecl = &TREE_OPERAND (*pdecl, 0);
1326 goto do_decl_clause;
1328 case OMP_CLAUSE_LASTPRIVATE:
1329 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1330 need_stmts = true;
1331 goto do_decl_clause;
1333 case OMP_CLAUSE_LINEAR:
1334 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1335 need_stmts = true;
1336 wi->val_only = true;
1337 wi->is_lhs = false;
1338 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1339 &dummy, wi);
1340 goto do_decl_clause;
1342 case OMP_CLAUSE_PRIVATE:
1343 case OMP_CLAUSE_FIRSTPRIVATE:
1344 case OMP_CLAUSE_COPYPRIVATE:
1345 case OMP_CLAUSE_SHARED:
1346 case OMP_CLAUSE_ENTER:
1347 case OMP_CLAUSE_LINK:
1348 case OMP_CLAUSE_USE_DEVICE_PTR:
1349 case OMP_CLAUSE_USE_DEVICE_ADDR:
1350 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1351 case OMP_CLAUSE_IS_DEVICE_PTR:
1352 case OMP_CLAUSE_DETACH:
1353 do_decl_clause:
1354 if (pdecl == NULL)
1355 pdecl = &OMP_CLAUSE_DECL (clause);
1356 decl = *pdecl;
1357 if (VAR_P (decl)
1358 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1359 break;
1360 if (decl_function_context (decl) != info->context)
1362 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1363 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1364 bitmap_set_bit (new_suppress, DECL_UID (decl));
1365 *pdecl = get_nonlocal_debug_decl (info, decl);
1366 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1367 need_chain = true;
1369 break;
1371 case OMP_CLAUSE_SCHEDULE:
1372 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1373 break;
1374 /* FALLTHRU */
1375 case OMP_CLAUSE_FINAL:
1376 case OMP_CLAUSE_IF:
1377 case OMP_CLAUSE_SELF:
1378 case OMP_CLAUSE_NUM_THREADS:
1379 case OMP_CLAUSE_DEPEND:
1380 case OMP_CLAUSE_DOACROSS:
1381 case OMP_CLAUSE_DEVICE:
1382 case OMP_CLAUSE_NUM_TEAMS:
1383 case OMP_CLAUSE_THREAD_LIMIT:
1384 case OMP_CLAUSE_SAFELEN:
1385 case OMP_CLAUSE_SIMDLEN:
1386 case OMP_CLAUSE_PRIORITY:
1387 case OMP_CLAUSE_GRAINSIZE:
1388 case OMP_CLAUSE_NUM_TASKS:
1389 case OMP_CLAUSE_HINT:
1390 case OMP_CLAUSE_FILTER:
1391 case OMP_CLAUSE_NUM_GANGS:
1392 case OMP_CLAUSE_NUM_WORKERS:
1393 case OMP_CLAUSE_VECTOR_LENGTH:
1394 case OMP_CLAUSE_GANG:
1395 case OMP_CLAUSE_WORKER:
1396 case OMP_CLAUSE_VECTOR:
1397 case OMP_CLAUSE_ASYNC:
1398 case OMP_CLAUSE_WAIT:
1399 /* Several OpenACC clauses have optional arguments. Check if they
1400 are present. */
1401 if (OMP_CLAUSE_OPERAND (clause, 0))
1403 wi->val_only = true;
1404 wi->is_lhs = false;
1405 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1406 &dummy, wi);
1409 /* The gang clause accepts two arguments. */
1410 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1411 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1413 wi->val_only = true;
1414 wi->is_lhs = false;
1415 convert_nonlocal_reference_op
1416 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1418 break;
1420 case OMP_CLAUSE_DIST_SCHEDULE:
1421 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1423 wi->val_only = true;
1424 wi->is_lhs = false;
1425 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1426 &dummy, wi);
1428 break;
1430 case OMP_CLAUSE_MAP:
1431 case OMP_CLAUSE_TO:
1432 case OMP_CLAUSE_FROM:
1433 if (OMP_CLAUSE_SIZE (clause))
1435 wi->val_only = true;
1436 wi->is_lhs = false;
1437 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1438 &dummy, wi);
1440 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1441 goto do_decl_clause;
1442 wi->val_only = true;
1443 wi->is_lhs = false;
1444 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1445 wi, NULL);
1446 break;
1448 case OMP_CLAUSE_ALIGNED:
1449 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1451 wi->val_only = true;
1452 wi->is_lhs = false;
1453 convert_nonlocal_reference_op
1454 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1456 /* FALLTHRU */
1457 case OMP_CLAUSE_NONTEMPORAL:
1458 do_decl_clause_no_supp:
1459 /* Like do_decl_clause, but don't add any suppression. */
1460 decl = OMP_CLAUSE_DECL (clause);
1461 if (VAR_P (decl)
1462 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1463 break;
1464 if (decl_function_context (decl) != info->context)
1466 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1467 need_chain = true;
1469 break;
1471 case OMP_CLAUSE_ALLOCATE:
1472 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause))
1474 wi->val_only = true;
1475 wi->is_lhs = false;
1476 convert_nonlocal_reference_op
1477 (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause), &dummy, wi);
1479 goto do_decl_clause_no_supp;
1481 case OMP_CLAUSE_NOWAIT:
1482 case OMP_CLAUSE_ORDERED:
1483 case OMP_CLAUSE_DEFAULT:
1484 case OMP_CLAUSE_COPYIN:
1485 case OMP_CLAUSE_COLLAPSE:
1486 case OMP_CLAUSE_TILE:
1487 case OMP_CLAUSE_UNTIED:
1488 case OMP_CLAUSE_MERGEABLE:
1489 case OMP_CLAUSE_PROC_BIND:
1490 case OMP_CLAUSE_NOGROUP:
1491 case OMP_CLAUSE_THREADS:
1492 case OMP_CLAUSE_SIMD:
1493 case OMP_CLAUSE_DEFAULTMAP:
1494 case OMP_CLAUSE_ORDER:
1495 case OMP_CLAUSE_SEQ:
1496 case OMP_CLAUSE_INDEPENDENT:
1497 case OMP_CLAUSE_AUTO:
1498 case OMP_CLAUSE_IF_PRESENT:
1499 case OMP_CLAUSE_FINALIZE:
1500 case OMP_CLAUSE_BIND:
1501 case OMP_CLAUSE__CONDTEMP_:
1502 case OMP_CLAUSE__SCANTEMP_:
1503 break;
1505 /* The following clause belongs to the OpenACC cache directive, which
1506 is discarded during gimplification. */
1507 case OMP_CLAUSE__CACHE_:
1508 /* The following clauses are only allowed in the OpenMP declare simd
1509 directive, so not seen here. */
1510 case OMP_CLAUSE_UNIFORM:
1511 case OMP_CLAUSE_INBRANCH:
1512 case OMP_CLAUSE_NOTINBRANCH:
1513 /* The following clauses are only allowed on OpenMP cancel and
1514 cancellation point directives, which at this point have already
1515 been lowered into a function call. */
1516 case OMP_CLAUSE_FOR:
1517 case OMP_CLAUSE_PARALLEL:
1518 case OMP_CLAUSE_SECTIONS:
1519 case OMP_CLAUSE_TASKGROUP:
1520 /* The following clauses are only added during OMP lowering; nested
1521 function decomposition happens before that. */
1522 case OMP_CLAUSE__LOOPTEMP_:
1523 case OMP_CLAUSE__REDUCTEMP_:
1524 case OMP_CLAUSE__SIMDUID_:
1525 case OMP_CLAUSE__SIMT_:
1526 /* The following clauses are only allowed on OpenACC 'routine'
1527 directives, not seen here. */
1528 case OMP_CLAUSE_NOHOST:
1529 /* Anything else. */
1530 default:
1531 gcc_unreachable ();
1535 info->suppress_expansion = new_suppress;
1537 if (need_stmts)
1538 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1539 switch (OMP_CLAUSE_CODE (clause))
1541 case OMP_CLAUSE_REDUCTION:
1542 case OMP_CLAUSE_IN_REDUCTION:
1543 case OMP_CLAUSE_TASK_REDUCTION:
1544 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1546 tree old_context
1547 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1548 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1549 = info->context;
1550 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1551 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1552 = info->context;
1553 tree save_local_var_chain = info->new_local_var_chain;
1554 info->new_local_var_chain = NULL;
1555 gimple_seq *seq = &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause);
1556 walk_body (convert_nonlocal_reference_stmt,
1557 convert_nonlocal_reference_op, info, seq);
1558 if (info->new_local_var_chain)
1559 declare_vars (info->new_local_var_chain,
1560 gimple_seq_first_stmt (*seq), false);
1561 info->new_local_var_chain = NULL;
1562 seq = &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause);
1563 walk_body (convert_nonlocal_reference_stmt,
1564 convert_nonlocal_reference_op, info, seq);
1565 if (info->new_local_var_chain)
1566 declare_vars (info->new_local_var_chain,
1567 gimple_seq_first_stmt (*seq), false);
1568 info->new_local_var_chain = save_local_var_chain;
1569 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1570 = old_context;
1571 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1572 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1573 = old_context;
1575 break;
1577 case OMP_CLAUSE_LASTPRIVATE:
1578 case OMP_CLAUSE_LINEAR:
1580 tree save_local_var_chain = info->new_local_var_chain;
1581 info->new_local_var_chain = NULL;
1582 gimple_seq *seq;
1583 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE)
1584 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause);
1585 else
1586 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause);
1587 walk_body (convert_nonlocal_reference_stmt,
1588 convert_nonlocal_reference_op, info, seq);
1589 if (info->new_local_var_chain)
1591 gimple *g = gimple_seq_first_stmt (*seq);
1592 if (gimple_code (g) != GIMPLE_BIND)
1594 g = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
1595 *seq = NULL;
1596 gimple_seq_add_stmt_without_update (seq, g);
1598 declare_vars (info->new_local_var_chain,
1599 gimple_seq_first_stmt (*seq), false);
1601 info->new_local_var_chain = save_local_var_chain;
1603 break;
1605 default:
1606 break;
1609 return need_chain;
1612 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1614 static void
1615 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1617 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1618 type = TREE_TYPE (type);
1620 if (TYPE_NAME (type)
1621 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1622 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1623 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1625 while (POINTER_TYPE_P (type)
1626 || VECTOR_TYPE_P (type)
1627 || TREE_CODE (type) == FUNCTION_TYPE
1628 || TREE_CODE (type) == METHOD_TYPE)
1629 type = TREE_TYPE (type);
1631 if (TREE_CODE (type) == ARRAY_TYPE)
1633 tree domain, t;
1635 note_nonlocal_vla_type (info, TREE_TYPE (type));
1636 domain = TYPE_DOMAIN (type);
1637 if (domain)
1639 t = TYPE_MIN_VALUE (domain);
1640 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1641 && decl_function_context (t) != info->context)
1642 get_nonlocal_debug_decl (info, t);
1643 t = TYPE_MAX_VALUE (domain);
1644 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1645 && decl_function_context (t) != info->context)
1646 get_nonlocal_debug_decl (info, t);
1651 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1652 PARM_DECLs that belong to outer functions. This handles statements
1653 that are not handled via the standard recursion done in
1654 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1655 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1656 operands of STMT have been handled by this function. */
1658 static tree
1659 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1660 struct walk_stmt_info *wi)
1662 struct nesting_info *info = (struct nesting_info *) wi->info;
1663 tree save_local_var_chain;
1664 bitmap save_suppress;
1665 gimple *stmt = gsi_stmt (*gsi);
1667 switch (gimple_code (stmt))
1669 case GIMPLE_GOTO:
1670 /* Don't walk non-local gotos for now. */
1671 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1673 wi->val_only = true;
1674 wi->is_lhs = false;
1675 *handled_ops_p = false;
1676 return NULL_TREE;
1678 break;
1680 case GIMPLE_OMP_TEAMS:
1681 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
1683 save_suppress = info->suppress_expansion;
1684 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt),
1685 wi);
1686 walk_body (convert_nonlocal_reference_stmt,
1687 convert_nonlocal_reference_op, info,
1688 gimple_omp_body_ptr (stmt));
1689 info->suppress_expansion = save_suppress;
1690 break;
1692 /* FALLTHRU */
1694 case GIMPLE_OMP_PARALLEL:
1695 case GIMPLE_OMP_TASK:
1696 save_suppress = info->suppress_expansion;
1697 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1698 wi))
1700 tree c, decl;
1701 decl = get_chain_decl (info);
1702 c = build_omp_clause (gimple_location (stmt),
1703 OMP_CLAUSE_FIRSTPRIVATE);
1704 OMP_CLAUSE_DECL (c) = decl;
1705 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1706 gimple_omp_taskreg_set_clauses (stmt, c);
1709 save_local_var_chain = info->new_local_var_chain;
1710 info->new_local_var_chain = NULL;
1712 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1713 info, gimple_omp_body_ptr (stmt));
1715 if (info->new_local_var_chain)
1716 declare_vars (info->new_local_var_chain,
1717 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1718 false);
1719 info->new_local_var_chain = save_local_var_chain;
1720 info->suppress_expansion = save_suppress;
1721 break;
1723 case GIMPLE_OMP_FOR:
1724 save_suppress = info->suppress_expansion;
1725 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1726 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1727 convert_nonlocal_reference_stmt,
1728 convert_nonlocal_reference_op, info);
1729 walk_body (convert_nonlocal_reference_stmt,
1730 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1731 info->suppress_expansion = save_suppress;
1732 break;
1734 case GIMPLE_OMP_SECTIONS:
1735 save_suppress = info->suppress_expansion;
1736 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1737 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1738 info, gimple_omp_body_ptr (stmt));
1739 info->suppress_expansion = save_suppress;
1740 break;
1742 case GIMPLE_OMP_SINGLE:
1743 save_suppress = info->suppress_expansion;
1744 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1745 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1746 info, gimple_omp_body_ptr (stmt));
1747 info->suppress_expansion = save_suppress;
1748 break;
1750 case GIMPLE_OMP_SCOPE:
1751 save_suppress = info->suppress_expansion;
1752 convert_nonlocal_omp_clauses (gimple_omp_scope_clauses_ptr (stmt), wi);
1753 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1754 info, gimple_omp_body_ptr (stmt));
1755 info->suppress_expansion = save_suppress;
1756 break;
1758 case GIMPLE_OMP_TASKGROUP:
1759 save_suppress = info->suppress_expansion;
1760 convert_nonlocal_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
1761 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1762 info, gimple_omp_body_ptr (stmt));
1763 info->suppress_expansion = save_suppress;
1764 break;
1766 case GIMPLE_OMP_TARGET:
1767 if (!is_gimple_omp_offloaded (stmt))
1769 save_suppress = info->suppress_expansion;
1770 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1771 wi);
1772 info->suppress_expansion = save_suppress;
1773 walk_body (convert_nonlocal_reference_stmt,
1774 convert_nonlocal_reference_op, info,
1775 gimple_omp_body_ptr (stmt));
1776 break;
1778 save_suppress = info->suppress_expansion;
1779 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1780 wi))
1782 tree c, decl;
1783 decl = get_chain_decl (info);
1784 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1785 OMP_CLAUSE_DECL (c) = decl;
1786 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1787 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1788 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1789 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1792 save_local_var_chain = info->new_local_var_chain;
1793 info->new_local_var_chain = NULL;
1795 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1796 info, gimple_omp_body_ptr (stmt));
1798 if (info->new_local_var_chain)
1799 declare_vars (info->new_local_var_chain,
1800 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1801 false);
1802 info->new_local_var_chain = save_local_var_chain;
1803 info->suppress_expansion = save_suppress;
1804 break;
1806 case GIMPLE_OMP_SECTION:
1807 case GIMPLE_OMP_STRUCTURED_BLOCK:
1808 case GIMPLE_OMP_MASTER:
1809 case GIMPLE_OMP_MASKED:
1810 case GIMPLE_OMP_ORDERED:
1811 case GIMPLE_OMP_SCAN:
1812 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1813 info, gimple_omp_body_ptr (stmt));
1814 break;
1816 case GIMPLE_BIND:
1818 gbind *bind_stmt = as_a <gbind *> (stmt);
1820 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1821 if (TREE_CODE (var) == NAMELIST_DECL)
1823 /* Adjust decls mentioned in NAMELIST_DECL. */
1824 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1825 tree decl;
1826 unsigned int i;
1828 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1830 if (VAR_P (decl)
1831 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1832 continue;
1833 if (decl_function_context (decl) != info->context)
1834 CONSTRUCTOR_ELT (decls, i)->value
1835 = get_nonlocal_debug_decl (info, decl);
1839 *handled_ops_p = false;
1840 return NULL_TREE;
1842 case GIMPLE_COND:
1843 wi->val_only = true;
1844 wi->is_lhs = false;
1845 *handled_ops_p = false;
1846 return NULL_TREE;
1848 case GIMPLE_ASSIGN:
1849 if (gimple_clobber_p (stmt))
1851 tree lhs = gimple_assign_lhs (stmt);
1852 if (DECL_P (lhs)
1853 && !(TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
1854 && decl_function_context (lhs) != info->context)
1856 gsi_replace (gsi, gimple_build_nop (), true);
1857 break;
1860 *handled_ops_p = false;
1861 return NULL_TREE;
1863 default:
1864 /* For every other statement that we are not interested in
1865 handling here, let the walker traverse the operands. */
1866 *handled_ops_p = false;
1867 return NULL_TREE;
1870 /* We have handled all of STMT operands, no need to traverse the operands. */
1871 *handled_ops_p = true;
1872 return NULL_TREE;
1876 /* A subroutine of convert_local_reference. Create a local variable
1877 in the parent function with DECL_VALUE_EXPR set to reference the
1878 field in FRAME. This is used both for debug info and in OMP
1879 lowering. */
1881 static tree
1882 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1884 tree x, new_decl;
1886 tree *slot = &info->var_map->get_or_insert (decl);
1887 if (*slot)
1888 return *slot;
1890 /* Make sure frame_decl gets created. */
1891 (void) get_frame_type (info);
1892 x = info->frame_decl;
1893 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1895 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1896 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1897 DECL_CONTEXT (new_decl) = info->context;
1898 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1899 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1900 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1901 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1902 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1903 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1904 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1905 if ((TREE_CODE (decl) == PARM_DECL
1906 || TREE_CODE (decl) == RESULT_DECL
1907 || VAR_P (decl))
1908 && DECL_BY_REFERENCE (decl))
1909 DECL_BY_REFERENCE (new_decl) = 1;
1911 SET_DECL_VALUE_EXPR (new_decl, x);
1912 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1913 *slot = new_decl;
1915 DECL_CHAIN (new_decl) = info->debug_var_chain;
1916 info->debug_var_chain = new_decl;
1918 /* Do not emit debug info twice. */
1919 DECL_IGNORED_P (decl) = 1;
1921 return new_decl;
1925 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1926 and PARM_DECLs that were referenced by inner nested functions.
1927 The rewrite will be a structure reference to the local frame variable. */
1929 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1931 static tree
1932 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1934 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1935 struct nesting_info *const info = (struct nesting_info *) wi->info;
1936 tree t = *tp, field, x;
1937 bool save_val_only;
1939 *walk_subtrees = 0;
1940 switch (TREE_CODE (t))
1942 case VAR_DECL:
1943 /* Non-automatic variables are never processed. */
1944 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1945 break;
1946 /* FALLTHRU */
1948 case PARM_DECL:
1949 if (t != info->frame_decl && decl_function_context (t) == info->context)
1951 /* If we copied a pointer to the frame, then the original decl
1952 is used unchanged in the parent function. */
1953 if (use_pointer_in_frame (t))
1954 break;
1956 /* No need to transform anything if no child references the
1957 variable. */
1958 field = lookup_field_for_decl (info, t, NO_INSERT);
1959 if (!field)
1960 break;
1961 wi->changed = true;
1963 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1964 x = get_local_debug_decl (info, t, field);
1965 else
1966 x = get_frame_field (info, info->context, field, &wi->gsi);
1968 if (wi->val_only)
1970 if (wi->is_lhs)
1971 x = save_tmp_var (info, x, &wi->gsi);
1972 else
1973 x = init_tmp_var (info, x, &wi->gsi);
1976 *tp = x;
1978 break;
1980 case ADDR_EXPR:
1981 save_val_only = wi->val_only;
1982 wi->val_only = false;
1983 wi->is_lhs = false;
1984 wi->changed = false;
1985 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1986 wi->val_only = save_val_only;
1988 /* If we converted anything ... */
1989 if (wi->changed)
1991 tree save_context;
1993 /* Then the frame decl is now addressable. */
1994 TREE_ADDRESSABLE (info->frame_decl) = 1;
1996 save_context = current_function_decl;
1997 current_function_decl = info->context;
1998 recompute_tree_invariant_for_addr_expr (t);
2000 /* If we are in a context where we only accept values, then
2001 compute the address into a temporary. */
2002 if (save_val_only)
2003 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
2004 t, &wi->gsi);
2005 current_function_decl = save_context;
2007 break;
2009 case REALPART_EXPR:
2010 case IMAGPART_EXPR:
2011 case COMPONENT_REF:
2012 case ARRAY_REF:
2013 case ARRAY_RANGE_REF:
2014 case BIT_FIELD_REF:
2015 /* Go down this entire nest and just look at the final prefix and
2016 anything that describes the references. Otherwise, we lose track
2017 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
2018 save_val_only = wi->val_only;
2019 wi->val_only = true;
2020 wi->is_lhs = false;
2021 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
2023 if (TREE_CODE (t) == COMPONENT_REF)
2024 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
2025 NULL);
2026 else if (TREE_CODE (t) == ARRAY_REF
2027 || TREE_CODE (t) == ARRAY_RANGE_REF)
2029 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
2030 NULL);
2031 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
2032 NULL);
2033 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
2034 NULL);
2037 wi->val_only = false;
2038 walk_tree (tp, convert_local_reference_op, wi, NULL);
2039 wi->val_only = save_val_only;
2040 break;
2042 case MEM_REF:
2043 save_val_only = wi->val_only;
2044 wi->val_only = true;
2045 wi->is_lhs = false;
2046 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
2047 wi, NULL);
2048 /* We need to re-fold the MEM_REF as component references as
2049 part of a ADDR_EXPR address are not allowed. But we cannot
2050 fold here, as the chain record type is not yet finalized. */
2051 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
2052 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
2053 info->mem_refs->add (tp);
2054 wi->val_only = save_val_only;
2055 break;
2057 case VIEW_CONVERT_EXPR:
2058 /* Just request to look at the subtrees, leaving val_only and lhs
2059 untouched. This might actually be for !val_only + lhs, in which
2060 case we don't want to force a replacement by a temporary. */
2061 *walk_subtrees = 1;
2062 break;
2064 default:
2065 if (!IS_TYPE_OR_DECL_P (t))
2067 *walk_subtrees = 1;
2068 wi->val_only = true;
2069 wi->is_lhs = false;
2071 break;
2074 return NULL_TREE;
2077 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
2078 struct walk_stmt_info *);
2080 /* Helper for convert_local_reference. Convert all the references in
2081 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
2083 static bool
2084 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
2086 struct nesting_info *const info = (struct nesting_info *) wi->info;
2087 bool need_frame = false, need_stmts = false;
2088 tree clause, decl, *pdecl;
2089 int dummy;
2090 bitmap new_suppress;
2092 new_suppress = BITMAP_GGC_ALLOC ();
2093 bitmap_copy (new_suppress, info->suppress_expansion);
2095 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2097 pdecl = NULL;
2098 switch (OMP_CLAUSE_CODE (clause))
2100 case OMP_CLAUSE_REDUCTION:
2101 case OMP_CLAUSE_IN_REDUCTION:
2102 case OMP_CLAUSE_TASK_REDUCTION:
2103 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2104 need_stmts = true;
2105 if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
2107 pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
2108 if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
2109 pdecl = &TREE_OPERAND (*pdecl, 0);
2110 if (INDIRECT_REF_P (*pdecl)
2111 || TREE_CODE (*pdecl) == ADDR_EXPR)
2112 pdecl = &TREE_OPERAND (*pdecl, 0);
2114 goto do_decl_clause;
2116 case OMP_CLAUSE_LASTPRIVATE:
2117 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
2118 need_stmts = true;
2119 goto do_decl_clause;
2121 case OMP_CLAUSE_LINEAR:
2122 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
2123 need_stmts = true;
2124 wi->val_only = true;
2125 wi->is_lhs = false;
2126 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
2127 wi);
2128 goto do_decl_clause;
2130 case OMP_CLAUSE_PRIVATE:
2131 case OMP_CLAUSE_FIRSTPRIVATE:
2132 case OMP_CLAUSE_COPYPRIVATE:
2133 case OMP_CLAUSE_SHARED:
2134 case OMP_CLAUSE_ENTER:
2135 case OMP_CLAUSE_LINK:
2136 case OMP_CLAUSE_USE_DEVICE_PTR:
2137 case OMP_CLAUSE_USE_DEVICE_ADDR:
2138 case OMP_CLAUSE_HAS_DEVICE_ADDR:
2139 case OMP_CLAUSE_IS_DEVICE_PTR:
2140 case OMP_CLAUSE_DETACH:
2141 do_decl_clause:
2142 if (pdecl == NULL)
2143 pdecl = &OMP_CLAUSE_DECL (clause);
2144 decl = *pdecl;
2145 if (VAR_P (decl)
2146 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2147 break;
2148 if (decl_function_context (decl) == info->context
2149 && !use_pointer_in_frame (decl))
2151 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2152 if (field)
2154 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
2155 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
2156 bitmap_set_bit (new_suppress, DECL_UID (decl));
2157 *pdecl = get_local_debug_decl (info, decl, field);
2158 need_frame = true;
2161 break;
2163 case OMP_CLAUSE_SCHEDULE:
2164 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
2165 break;
2166 /* FALLTHRU */
2167 case OMP_CLAUSE_FINAL:
2168 case OMP_CLAUSE_IF:
2169 case OMP_CLAUSE_SELF:
2170 case OMP_CLAUSE_NUM_THREADS:
2171 case OMP_CLAUSE_DEPEND:
2172 case OMP_CLAUSE_DOACROSS:
2173 case OMP_CLAUSE_DEVICE:
2174 case OMP_CLAUSE_NUM_TEAMS:
2175 case OMP_CLAUSE_THREAD_LIMIT:
2176 case OMP_CLAUSE_SAFELEN:
2177 case OMP_CLAUSE_SIMDLEN:
2178 case OMP_CLAUSE_PRIORITY:
2179 case OMP_CLAUSE_GRAINSIZE:
2180 case OMP_CLAUSE_NUM_TASKS:
2181 case OMP_CLAUSE_HINT:
2182 case OMP_CLAUSE_FILTER:
2183 case OMP_CLAUSE_NUM_GANGS:
2184 case OMP_CLAUSE_NUM_WORKERS:
2185 case OMP_CLAUSE_VECTOR_LENGTH:
2186 case OMP_CLAUSE_GANG:
2187 case OMP_CLAUSE_WORKER:
2188 case OMP_CLAUSE_VECTOR:
2189 case OMP_CLAUSE_ASYNC:
2190 case OMP_CLAUSE_WAIT:
2191 /* Several OpenACC clauses have optional arguments. Check if they
2192 are present. */
2193 if (OMP_CLAUSE_OPERAND (clause, 0))
2195 wi->val_only = true;
2196 wi->is_lhs = false;
2197 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2198 &dummy, wi);
2201 /* The gang clause accepts two arguments. */
2202 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
2203 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
2205 wi->val_only = true;
2206 wi->is_lhs = false;
2207 convert_nonlocal_reference_op
2208 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
2210 break;
2212 case OMP_CLAUSE_DIST_SCHEDULE:
2213 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
2215 wi->val_only = true;
2216 wi->is_lhs = false;
2217 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2218 &dummy, wi);
2220 break;
2222 case OMP_CLAUSE_MAP:
2223 case OMP_CLAUSE_TO:
2224 case OMP_CLAUSE_FROM:
2225 if (OMP_CLAUSE_SIZE (clause))
2227 wi->val_only = true;
2228 wi->is_lhs = false;
2229 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
2230 &dummy, wi);
2232 if (DECL_P (OMP_CLAUSE_DECL (clause)))
2233 goto do_decl_clause;
2234 wi->val_only = true;
2235 wi->is_lhs = false;
2236 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
2237 wi, NULL);
2238 break;
2240 case OMP_CLAUSE_ALIGNED:
2241 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
2243 wi->val_only = true;
2244 wi->is_lhs = false;
2245 convert_local_reference_op
2246 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
2248 /* FALLTHRU */
2249 case OMP_CLAUSE_NONTEMPORAL:
2250 do_decl_clause_no_supp:
2251 /* Like do_decl_clause, but don't add any suppression. */
2252 decl = OMP_CLAUSE_DECL (clause);
2253 if (VAR_P (decl)
2254 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2255 break;
2256 if (decl_function_context (decl) == info->context
2257 && !use_pointer_in_frame (decl))
2259 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2260 if (field)
2262 OMP_CLAUSE_DECL (clause)
2263 = get_local_debug_decl (info, decl, field);
2264 need_frame = true;
2267 break;
2269 case OMP_CLAUSE_ALLOCATE:
2270 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause))
2272 wi->val_only = true;
2273 wi->is_lhs = false;
2274 convert_local_reference_op
2275 (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause), &dummy, wi);
2277 goto do_decl_clause_no_supp;
2279 case OMP_CLAUSE_NOWAIT:
2280 case OMP_CLAUSE_ORDERED:
2281 case OMP_CLAUSE_DEFAULT:
2282 case OMP_CLAUSE_COPYIN:
2283 case OMP_CLAUSE_COLLAPSE:
2284 case OMP_CLAUSE_TILE:
2285 case OMP_CLAUSE_UNTIED:
2286 case OMP_CLAUSE_MERGEABLE:
2287 case OMP_CLAUSE_PROC_BIND:
2288 case OMP_CLAUSE_NOGROUP:
2289 case OMP_CLAUSE_THREADS:
2290 case OMP_CLAUSE_SIMD:
2291 case OMP_CLAUSE_DEFAULTMAP:
2292 case OMP_CLAUSE_ORDER:
2293 case OMP_CLAUSE_SEQ:
2294 case OMP_CLAUSE_INDEPENDENT:
2295 case OMP_CLAUSE_AUTO:
2296 case OMP_CLAUSE_IF_PRESENT:
2297 case OMP_CLAUSE_FINALIZE:
2298 case OMP_CLAUSE_BIND:
2299 case OMP_CLAUSE__CONDTEMP_:
2300 case OMP_CLAUSE__SCANTEMP_:
2301 break;
2303 /* The following clause belongs to the OpenACC cache directive, which
2304 is discarded during gimplification. */
2305 case OMP_CLAUSE__CACHE_:
2306 /* The following clauses are only allowed in the OpenMP declare simd
2307 directive, so not seen here. */
2308 case OMP_CLAUSE_UNIFORM:
2309 case OMP_CLAUSE_INBRANCH:
2310 case OMP_CLAUSE_NOTINBRANCH:
2311 /* The following clauses are only allowed on OpenMP cancel and
2312 cancellation point directives, which at this point have already
2313 been lowered into a function call. */
2314 case OMP_CLAUSE_FOR:
2315 case OMP_CLAUSE_PARALLEL:
2316 case OMP_CLAUSE_SECTIONS:
2317 case OMP_CLAUSE_TASKGROUP:
2318 /* The following clauses are only added during OMP lowering; nested
2319 function decomposition happens before that. */
2320 case OMP_CLAUSE__LOOPTEMP_:
2321 case OMP_CLAUSE__REDUCTEMP_:
2322 case OMP_CLAUSE__SIMDUID_:
2323 case OMP_CLAUSE__SIMT_:
2324 /* The following clauses are only allowed on OpenACC 'routine'
2325 directives, not seen here. */
2326 case OMP_CLAUSE_NOHOST:
2327 /* Anything else. */
2328 default:
2329 gcc_unreachable ();
2333 info->suppress_expansion = new_suppress;
2335 if (need_stmts)
2336 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2337 switch (OMP_CLAUSE_CODE (clause))
2339 case OMP_CLAUSE_REDUCTION:
2340 case OMP_CLAUSE_IN_REDUCTION:
2341 case OMP_CLAUSE_TASK_REDUCTION:
2342 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2344 tree old_context
2345 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
2346 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2347 = info->context;
2348 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2349 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2350 = info->context;
2351 walk_body (convert_local_reference_stmt,
2352 convert_local_reference_op, info,
2353 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
2354 walk_body (convert_local_reference_stmt,
2355 convert_local_reference_op, info,
2356 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
2357 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2358 = old_context;
2359 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2360 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2361 = old_context;
2363 break;
2365 case OMP_CLAUSE_LASTPRIVATE:
2366 walk_body (convert_local_reference_stmt,
2367 convert_local_reference_op, info,
2368 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
2369 break;
2371 case OMP_CLAUSE_LINEAR:
2372 walk_body (convert_local_reference_stmt,
2373 convert_local_reference_op, info,
2374 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
2375 break;
2377 default:
2378 break;
2381 return need_frame;
2385 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2386 and PARM_DECLs that were referenced by inner nested functions.
2387 The rewrite will be a structure reference to the local frame variable. */
2389 static tree
2390 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2391 struct walk_stmt_info *wi)
2393 struct nesting_info *info = (struct nesting_info *) wi->info;
2394 tree save_local_var_chain;
2395 bitmap save_suppress;
2396 char save_static_chain_added;
2397 bool frame_decl_added;
2398 gimple *stmt = gsi_stmt (*gsi);
2400 switch (gimple_code (stmt))
2402 case GIMPLE_OMP_TEAMS:
2403 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2405 save_suppress = info->suppress_expansion;
2406 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2407 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2408 info, gimple_omp_body_ptr (stmt));
2409 info->suppress_expansion = save_suppress;
2410 break;
2412 /* FALLTHRU */
2414 case GIMPLE_OMP_PARALLEL:
2415 case GIMPLE_OMP_TASK:
2416 save_suppress = info->suppress_expansion;
2417 frame_decl_added = false;
2418 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
2419 wi))
2421 tree c = build_omp_clause (gimple_location (stmt),
2422 OMP_CLAUSE_SHARED);
2423 (void) get_frame_type (info);
2424 OMP_CLAUSE_DECL (c) = info->frame_decl;
2425 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2426 gimple_omp_taskreg_set_clauses (stmt, c);
2427 info->static_chain_added |= 4;
2428 frame_decl_added = true;
2431 save_local_var_chain = info->new_local_var_chain;
2432 save_static_chain_added = info->static_chain_added;
2433 info->new_local_var_chain = NULL;
2434 info->static_chain_added = 0;
2436 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2437 gimple_omp_body_ptr (stmt));
2439 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2441 tree c = build_omp_clause (gimple_location (stmt),
2442 OMP_CLAUSE_SHARED);
2443 (void) get_frame_type (info);
2444 OMP_CLAUSE_DECL (c) = info->frame_decl;
2445 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2446 info->static_chain_added |= 4;
2447 gimple_omp_taskreg_set_clauses (stmt, c);
2449 if (info->new_local_var_chain)
2450 declare_vars (info->new_local_var_chain,
2451 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2452 info->new_local_var_chain = save_local_var_chain;
2453 info->suppress_expansion = save_suppress;
2454 info->static_chain_added |= save_static_chain_added;
2455 break;
2457 case GIMPLE_OMP_FOR:
2458 save_suppress = info->suppress_expansion;
2459 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2460 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2461 convert_local_reference_stmt,
2462 convert_local_reference_op, info);
2463 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2464 info, gimple_omp_body_ptr (stmt));
2465 info->suppress_expansion = save_suppress;
2466 break;
2468 case GIMPLE_OMP_SECTIONS:
2469 save_suppress = info->suppress_expansion;
2470 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2471 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2472 info, gimple_omp_body_ptr (stmt));
2473 info->suppress_expansion = save_suppress;
2474 break;
2476 case GIMPLE_OMP_SINGLE:
2477 save_suppress = info->suppress_expansion;
2478 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2479 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2480 info, gimple_omp_body_ptr (stmt));
2481 info->suppress_expansion = save_suppress;
2482 break;
2484 case GIMPLE_OMP_SCOPE:
2485 save_suppress = info->suppress_expansion;
2486 convert_local_omp_clauses (gimple_omp_scope_clauses_ptr (stmt), wi);
2487 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2488 info, gimple_omp_body_ptr (stmt));
2489 info->suppress_expansion = save_suppress;
2490 break;
2492 case GIMPLE_OMP_TASKGROUP:
2493 save_suppress = info->suppress_expansion;
2494 convert_local_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
2495 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2496 info, gimple_omp_body_ptr (stmt));
2497 info->suppress_expansion = save_suppress;
2498 break;
2500 case GIMPLE_OMP_TARGET:
2501 if (!is_gimple_omp_offloaded (stmt))
2503 save_suppress = info->suppress_expansion;
2504 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2505 info->suppress_expansion = save_suppress;
2506 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2507 info, gimple_omp_body_ptr (stmt));
2508 break;
2510 save_suppress = info->suppress_expansion;
2511 frame_decl_added = false;
2512 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2514 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2515 (void) get_frame_type (info);
2516 OMP_CLAUSE_DECL (c) = info->frame_decl;
2517 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2518 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2519 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2520 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2521 info->static_chain_added |= 4;
2522 frame_decl_added = true;
2525 save_local_var_chain = info->new_local_var_chain;
2526 save_static_chain_added = info->static_chain_added;
2527 info->new_local_var_chain = NULL;
2528 info->static_chain_added = 0;
2530 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2531 gimple_omp_body_ptr (stmt));
2533 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2535 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2536 (void) get_frame_type (info);
2537 OMP_CLAUSE_DECL (c) = info->frame_decl;
2538 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2539 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2540 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2541 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2542 info->static_chain_added |= 4;
2545 if (info->new_local_var_chain)
2546 declare_vars (info->new_local_var_chain,
2547 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2548 info->new_local_var_chain = save_local_var_chain;
2549 info->suppress_expansion = save_suppress;
2550 info->static_chain_added |= save_static_chain_added;
2551 break;
2553 case GIMPLE_OMP_SECTION:
2554 case GIMPLE_OMP_STRUCTURED_BLOCK:
2555 case GIMPLE_OMP_MASTER:
2556 case GIMPLE_OMP_MASKED:
2557 case GIMPLE_OMP_ORDERED:
2558 case GIMPLE_OMP_SCAN:
2559 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2560 info, gimple_omp_body_ptr (stmt));
2561 break;
2563 case GIMPLE_COND:
2564 wi->val_only = true;
2565 wi->is_lhs = false;
2566 *handled_ops_p = false;
2567 return NULL_TREE;
2569 case GIMPLE_ASSIGN:
2570 if (gimple_clobber_p (stmt))
2572 tree lhs = gimple_assign_lhs (stmt);
2573 if (DECL_P (lhs)
2574 && decl_function_context (lhs) == info->context
2575 && !use_pointer_in_frame (lhs)
2576 && lookup_field_for_decl (info, lhs, NO_INSERT))
2578 gsi_replace (gsi, gimple_build_nop (), true);
2579 break;
2582 *handled_ops_p = false;
2583 return NULL_TREE;
2585 case GIMPLE_BIND:
2586 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2587 var;
2588 var = DECL_CHAIN (var))
2589 if (TREE_CODE (var) == NAMELIST_DECL)
2591 /* Adjust decls mentioned in NAMELIST_DECL. */
2592 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2593 tree decl;
2594 unsigned int i;
2596 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2598 if (VAR_P (decl)
2599 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2600 continue;
2601 if (decl_function_context (decl) == info->context
2602 && !use_pointer_in_frame (decl))
2604 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2605 if (field)
2607 CONSTRUCTOR_ELT (decls, i)->value
2608 = get_local_debug_decl (info, decl, field);
2614 *handled_ops_p = false;
2615 return NULL_TREE;
2617 default:
2618 /* For every other statement that we are not interested in
2619 handling here, let the walker traverse the operands. */
2620 *handled_ops_p = false;
2621 return NULL_TREE;
2624 /* Indicate that we have handled all the operands ourselves. */
2625 *handled_ops_p = true;
2626 return NULL_TREE;
2630 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2631 that reference labels from outer functions. The rewrite will be a
2632 call to __builtin_nonlocal_goto. */
2634 static tree
2635 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2636 struct walk_stmt_info *wi)
2638 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2639 tree label, new_label, target_context, x, field;
2640 gcall *call;
2641 gimple *stmt = gsi_stmt (*gsi);
2643 if (gimple_code (stmt) != GIMPLE_GOTO)
2645 *handled_ops_p = false;
2646 return NULL_TREE;
2649 label = gimple_goto_dest (stmt);
2650 if (TREE_CODE (label) != LABEL_DECL)
2652 *handled_ops_p = false;
2653 return NULL_TREE;
2656 target_context = decl_function_context (label);
2657 if (target_context == info->context)
2659 *handled_ops_p = false;
2660 return NULL_TREE;
2663 for (i = info->outer; target_context != i->context; i = i->outer)
2664 continue;
2666 /* The original user label may also be use for a normal goto, therefore
2667 we must create a new label that will actually receive the abnormal
2668 control transfer. This new label will be marked LABEL_NONLOCAL; this
2669 mark will trigger proper behavior in the cfg, as well as cause the
2670 (hairy target-specific) non-local goto receiver code to be generated
2671 when we expand rtl. Enter this association into var_map so that we
2672 can insert the new label into the IL during a second pass. */
2673 tree *slot = &i->var_map->get_or_insert (label);
2674 if (*slot == NULL)
2676 new_label = create_artificial_label (UNKNOWN_LOCATION);
2677 DECL_NONLOCAL (new_label) = 1;
2678 *slot = new_label;
2680 else
2681 new_label = *slot;
2683 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2684 field = get_nl_goto_field (i);
2685 x = get_frame_field (info, target_context, field, gsi);
2686 x = build_addr (x);
2687 x = gsi_gimplify_val (info, x, gsi);
2688 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2689 2, build_addr (new_label), x);
2690 gsi_replace (gsi, call, false);
2692 /* We have handled all of STMT's operands, no need to keep going. */
2693 *handled_ops_p = true;
2694 return NULL_TREE;
2698 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2699 are referenced via nonlocal goto from a nested function. The rewrite
2700 will involve installing a newly generated DECL_NONLOCAL label, and
2701 (potentially) a branch around the rtl gunk that is assumed to be
2702 attached to such a label. */
2704 static tree
2705 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2706 struct walk_stmt_info *wi)
2708 struct nesting_info *const info = (struct nesting_info *) wi->info;
2709 tree label, new_label;
2710 gimple_stmt_iterator tmp_gsi;
2711 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2713 if (!stmt)
2715 *handled_ops_p = false;
2716 return NULL_TREE;
2719 label = gimple_label_label (stmt);
2721 tree *slot = info->var_map->get (label);
2722 if (!slot)
2724 *handled_ops_p = false;
2725 return NULL_TREE;
2728 /* If there's any possibility that the previous statement falls through,
2729 then we must branch around the new non-local label. */
2730 tmp_gsi = wi->gsi;
2731 gsi_prev (&tmp_gsi);
2732 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2734 gimple *stmt = gimple_build_goto (label);
2735 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2738 new_label = (tree) *slot;
2739 stmt = gimple_build_label (new_label);
2740 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2742 *handled_ops_p = true;
2743 return NULL_TREE;
2747 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2748 of nested functions that require the use of trampolines. The rewrite
2749 will involve a reference a trampoline generated for the occasion. */
2751 static tree
2752 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2754 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2755 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2756 tree t = *tp, decl, target_context, x, builtin;
2757 bool descr;
2758 gcall *call;
2760 *walk_subtrees = 0;
2761 switch (TREE_CODE (t))
2763 case ADDR_EXPR:
2764 /* Build
2765 T.1 = &CHAIN->tramp;
2766 T.2 = __builtin_adjust_trampoline (T.1);
2767 T.3 = (func_type)T.2;
2770 decl = TREE_OPERAND (t, 0);
2771 if (TREE_CODE (decl) != FUNCTION_DECL)
2772 break;
2774 /* Only need to process nested functions. */
2775 target_context = decl_function_context (decl);
2776 if (!target_context)
2777 break;
2779 /* If the nested function doesn't use a static chain, then
2780 it doesn't need a trampoline. */
2781 if (!DECL_STATIC_CHAIN (decl))
2782 break;
2784 /* If we don't want a trampoline, then don't build one. */
2785 if (TREE_NO_TRAMPOLINE (t))
2786 break;
2788 /* Lookup the immediate parent of the callee, as that's where
2789 we need to insert the trampoline. */
2790 for (i = info; i->context != target_context; i = i->outer)
2791 continue;
2793 /* Decide whether to generate a descriptor or a trampoline. */
2794 descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines;
2796 if (descr)
2797 x = lookup_descr_for_decl (i, decl, INSERT);
2798 else
2799 x = lookup_tramp_for_decl (i, decl, INSERT);
2801 /* Compute the address of the field holding the trampoline. */
2802 x = get_frame_field (info, target_context, x, &wi->gsi);
2804 /* APB: We don't need to do the adjustment calls when using off-stack
2805 trampolines, any such adjustment will be done when the off-stack
2806 trampoline is created. */
2807 if (!descr && flag_trampoline_impl == TRAMPOLINE_IMPL_HEAP)
2808 x = gsi_gimplify_val (info, x, &wi->gsi);
2809 else
2811 x = build_addr (x);
2813 x = gsi_gimplify_val (info, x, &wi->gsi);
2815 /* Do machine-specific ugliness. Normally this will involve
2816 computing extra alignment, but it can really be anything. */
2817 if (descr)
2818 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR);
2819 else
2820 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2821 call = gimple_build_call (builtin, 1, x);
2822 x = init_tmp_var_with_call (info, &wi->gsi, call);
2825 /* Cast back to the proper function type. */
2826 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2827 x = init_tmp_var (info, x, &wi->gsi);
2829 *tp = x;
2830 break;
2832 default:
2833 if (!IS_TYPE_OR_DECL_P (t))
2834 *walk_subtrees = 1;
2835 break;
2838 return NULL_TREE;
2842 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2843 to addresses of nested functions that require the use of
2844 trampolines. The rewrite will involve a reference a trampoline
2845 generated for the occasion. */
2847 static tree
2848 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2849 struct walk_stmt_info *wi)
2851 struct nesting_info *info = (struct nesting_info *) wi->info;
2852 gimple *stmt = gsi_stmt (*gsi);
2854 switch (gimple_code (stmt))
2856 case GIMPLE_CALL:
2858 /* Only walk call arguments, lest we generate trampolines for
2859 direct calls. */
2860 unsigned long i, nargs = gimple_call_num_args (stmt);
2861 for (i = 0; i < nargs; i++)
2862 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2863 wi, NULL);
2864 break;
2867 case GIMPLE_OMP_TEAMS:
2868 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2870 *handled_ops_p = false;
2871 return NULL_TREE;
2873 goto do_parallel;
2875 case GIMPLE_OMP_TARGET:
2876 if (!is_gimple_omp_offloaded (stmt))
2878 *handled_ops_p = false;
2879 return NULL_TREE;
2881 /* FALLTHRU */
2882 case GIMPLE_OMP_PARALLEL:
2883 case GIMPLE_OMP_TASK:
2884 do_parallel:
2886 tree save_local_var_chain = info->new_local_var_chain;
2887 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2888 info->new_local_var_chain = NULL;
2889 char save_static_chain_added = info->static_chain_added;
2890 info->static_chain_added = 0;
2891 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2892 info, gimple_omp_body_ptr (stmt));
2893 if (info->new_local_var_chain)
2894 declare_vars (info->new_local_var_chain,
2895 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2896 false);
2897 for (int i = 0; i < 2; i++)
2899 tree c, decl;
2900 if ((info->static_chain_added & (1 << i)) == 0)
2901 continue;
2902 decl = i ? get_chain_decl (info) : info->frame_decl;
2903 /* Don't add CHAIN.* or FRAME.* twice. */
2904 for (c = gimple_omp_taskreg_clauses (stmt);
2906 c = OMP_CLAUSE_CHAIN (c))
2907 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2908 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2909 && OMP_CLAUSE_DECL (c) == decl)
2910 break;
2911 if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2913 c = build_omp_clause (gimple_location (stmt),
2914 i ? OMP_CLAUSE_FIRSTPRIVATE
2915 : OMP_CLAUSE_SHARED);
2916 OMP_CLAUSE_DECL (c) = decl;
2917 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2918 gimple_omp_taskreg_set_clauses (stmt, c);
2920 else if (c == NULL)
2922 c = build_omp_clause (gimple_location (stmt),
2923 OMP_CLAUSE_MAP);
2924 OMP_CLAUSE_DECL (c) = decl;
2925 OMP_CLAUSE_SET_MAP_KIND (c,
2926 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2927 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2928 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2929 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2933 info->new_local_var_chain = save_local_var_chain;
2934 info->static_chain_added |= save_static_chain_added;
2936 break;
2938 default:
2939 *handled_ops_p = false;
2940 return NULL_TREE;
2943 *handled_ops_p = true;
2944 return NULL_TREE;
2949 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2950 that reference nested functions to make sure that the static chain
2951 is set up properly for the call. */
2953 static tree
2954 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2955 struct walk_stmt_info *wi)
2957 struct nesting_info *const info = (struct nesting_info *) wi->info;
2958 tree decl, target_context;
2959 char save_static_chain_added;
2960 int i;
2961 gimple *stmt = gsi_stmt (*gsi);
2963 switch (gimple_code (stmt))
2965 case GIMPLE_CALL:
2966 if (gimple_call_chain (stmt))
2967 break;
2968 decl = gimple_call_fndecl (stmt);
2969 if (!decl)
2970 break;
2971 target_context = decl_function_context (decl);
2972 if (target_context && DECL_STATIC_CHAIN (decl))
2974 struct nesting_info *i = info;
2975 while (i && i->context != target_context)
2976 i = i->outer;
2977 /* If none of the outer contexts is the target context, this means
2978 that the function is called in a wrong context. */
2979 if (!i)
2980 internal_error ("%s from %s called in %s",
2981 IDENTIFIER_POINTER (DECL_NAME (decl)),
2982 IDENTIFIER_POINTER (DECL_NAME (target_context)),
2983 IDENTIFIER_POINTER (DECL_NAME (info->context)));
2985 gimple_call_set_chain (as_a <gcall *> (stmt),
2986 get_static_chain (info, target_context,
2987 &wi->gsi));
2988 info->static_chain_added |= (1 << (info->context != target_context));
2990 break;
2992 case GIMPLE_OMP_TEAMS:
2993 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2995 walk_body (convert_gimple_call, NULL, info,
2996 gimple_omp_body_ptr (stmt));
2997 break;
2999 /* FALLTHRU */
3001 case GIMPLE_OMP_PARALLEL:
3002 case GIMPLE_OMP_TASK:
3003 save_static_chain_added = info->static_chain_added;
3004 info->static_chain_added = 0;
3005 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
3006 for (i = 0; i < 2; i++)
3008 tree c, decl;
3009 if ((info->static_chain_added & (1 << i)) == 0)
3010 continue;
3011 decl = i ? get_chain_decl (info) : info->frame_decl;
3012 /* Don't add CHAIN.* or FRAME.* twice. */
3013 for (c = gimple_omp_taskreg_clauses (stmt);
3015 c = OMP_CLAUSE_CHAIN (c))
3016 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
3017 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
3018 && OMP_CLAUSE_DECL (c) == decl)
3019 break;
3020 if (c == NULL)
3022 c = build_omp_clause (gimple_location (stmt),
3023 i ? OMP_CLAUSE_FIRSTPRIVATE
3024 : OMP_CLAUSE_SHARED);
3025 OMP_CLAUSE_DECL (c) = decl;
3026 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
3027 gimple_omp_taskreg_set_clauses (stmt, c);
3030 info->static_chain_added |= save_static_chain_added;
3031 break;
3033 case GIMPLE_OMP_TARGET:
3034 if (!is_gimple_omp_offloaded (stmt))
3036 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
3037 break;
3039 save_static_chain_added = info->static_chain_added;
3040 info->static_chain_added = 0;
3041 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
3042 for (i = 0; i < 2; i++)
3044 tree c, decl;
3045 if ((info->static_chain_added & (1 << i)) == 0)
3046 continue;
3047 decl = i ? get_chain_decl (info) : info->frame_decl;
3048 /* Don't add CHAIN.* or FRAME.* twice. */
3049 for (c = gimple_omp_target_clauses (stmt);
3051 c = OMP_CLAUSE_CHAIN (c))
3052 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
3053 && OMP_CLAUSE_DECL (c) == decl)
3054 break;
3055 if (c == NULL)
3057 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
3058 OMP_CLAUSE_DECL (c) = decl;
3059 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
3060 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
3061 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
3062 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
3066 info->static_chain_added |= save_static_chain_added;
3067 break;
3069 case GIMPLE_OMP_FOR:
3070 walk_body (convert_gimple_call, NULL, info,
3071 gimple_omp_for_pre_body_ptr (stmt));
3072 /* FALLTHRU */
3073 case GIMPLE_OMP_SECTIONS:
3074 case GIMPLE_OMP_SECTION:
3075 case GIMPLE_OMP_STRUCTURED_BLOCK:
3076 case GIMPLE_OMP_SINGLE:
3077 case GIMPLE_OMP_SCOPE:
3078 case GIMPLE_OMP_MASTER:
3079 case GIMPLE_OMP_MASKED:
3080 case GIMPLE_OMP_TASKGROUP:
3081 case GIMPLE_OMP_ORDERED:
3082 case GIMPLE_OMP_SCAN:
3083 case GIMPLE_OMP_CRITICAL:
3084 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
3085 break;
3087 default:
3088 /* Keep looking for other operands. */
3089 *handled_ops_p = false;
3090 return NULL_TREE;
3093 *handled_ops_p = true;
3094 return NULL_TREE;
3097 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
3098 call expressions. At the same time, determine if a nested function
3099 actually uses its static chain; if not, remember that. */
3101 static void
3102 convert_all_function_calls (struct nesting_info *root)
3104 unsigned int chain_count = 0, old_chain_count, iter_count;
3105 struct nesting_info *n;
3107 /* First, optimistically clear static_chain for all decls that haven't
3108 used the static chain already for variable access. But always create
3109 it if not optimizing. This makes it possible to reconstruct the static
3110 nesting tree at run time and thus to resolve up-level references from
3111 within the debugger. */
3112 FOR_EACH_NEST_INFO (n, root)
3114 if (n->thunk_p)
3115 continue;
3116 tree decl = n->context;
3117 if (!optimize)
3119 if (n->inner)
3120 (void) get_frame_type (n);
3121 if (n->outer)
3122 (void) get_chain_decl (n);
3124 else if (!n->outer || (!n->chain_decl && !n->chain_field))
3126 DECL_STATIC_CHAIN (decl) = 0;
3127 if (dump_file && (dump_flags & TDF_DETAILS))
3128 fprintf (dump_file, "Guessing no static-chain for %s\n",
3129 lang_hooks.decl_printable_name (decl, 2));
3131 else
3132 DECL_STATIC_CHAIN (decl) = 1;
3133 chain_count += DECL_STATIC_CHAIN (decl);
3136 FOR_EACH_NEST_INFO (n, root)
3137 if (n->thunk_p)
3139 tree decl = n->context;
3140 tree alias = thunk_info::get (cgraph_node::get (decl))->alias;
3141 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
3144 /* Walk the functions and perform transformations. Note that these
3145 transformations can induce new uses of the static chain, which in turn
3146 require re-examining all users of the decl. */
3147 /* ??? It would make sense to try to use the call graph to speed this up,
3148 but the call graph hasn't really been built yet. Even if it did, we
3149 would still need to iterate in this loop since address-of references
3150 wouldn't show up in the callgraph anyway. */
3151 iter_count = 0;
3154 old_chain_count = chain_count;
3155 chain_count = 0;
3156 iter_count++;
3158 if (dump_file && (dump_flags & TDF_DETAILS))
3159 fputc ('\n', dump_file);
3161 FOR_EACH_NEST_INFO (n, root)
3163 if (n->thunk_p)
3164 continue;
3165 tree decl = n->context;
3166 walk_function (convert_tramp_reference_stmt,
3167 convert_tramp_reference_op, n);
3168 walk_function (convert_gimple_call, NULL, n);
3169 chain_count += DECL_STATIC_CHAIN (decl);
3172 FOR_EACH_NEST_INFO (n, root)
3173 if (n->thunk_p)
3175 tree decl = n->context;
3176 tree alias = thunk_info::get (cgraph_node::get (decl))->alias;
3177 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
3180 while (chain_count != old_chain_count);
3182 if (dump_file && (dump_flags & TDF_DETAILS))
3183 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
3184 iter_count);
3187 struct nesting_copy_body_data
3189 copy_body_data cb;
3190 struct nesting_info *root;
3193 /* A helper subroutine for debug_var_chain type remapping. */
3195 static tree
3196 nesting_copy_decl (tree decl, copy_body_data *id)
3198 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
3199 tree *slot = nid->root->var_map->get (decl);
3201 if (slot)
3202 return (tree) *slot;
3204 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
3206 tree new_decl = copy_decl_no_change (decl, id);
3207 DECL_ORIGINAL_TYPE (new_decl)
3208 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
3209 return new_decl;
3212 if (VAR_P (decl)
3213 || TREE_CODE (decl) == PARM_DECL
3214 || TREE_CODE (decl) == RESULT_DECL)
3215 return decl;
3217 return copy_decl_no_change (decl, id);
3220 /* A helper function for remap_vla_decls. See if *TP contains
3221 some remapped variables. */
3223 static tree
3224 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
3226 struct nesting_info *root = (struct nesting_info *) data;
3227 tree t = *tp;
3229 if (DECL_P (t))
3231 *walk_subtrees = 0;
3232 tree *slot = root->var_map->get (t);
3234 if (slot)
3235 return *slot;
3237 return NULL;
3240 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
3241 involved. */
3243 static void
3244 remap_vla_decls (tree block, struct nesting_info *root)
3246 tree var, subblock, val, type;
3247 struct nesting_copy_body_data id;
3249 for (subblock = BLOCK_SUBBLOCKS (block);
3250 subblock;
3251 subblock = BLOCK_CHAIN (subblock))
3252 remap_vla_decls (subblock, root);
3254 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3255 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3257 val = DECL_VALUE_EXPR (var);
3258 type = TREE_TYPE (var);
3260 if (! (INDIRECT_REF_P (val)
3261 && VAR_P (TREE_OPERAND (val, 0))
3262 && variably_modified_type_p (type, NULL)))
3263 continue;
3265 if (root->var_map->get (TREE_OPERAND (val, 0))
3266 || walk_tree (&type, contains_remapped_vars, root, NULL))
3267 break;
3270 if (var == NULL_TREE)
3271 return;
3273 memset (&id, 0, sizeof (id));
3274 id.cb.copy_decl = nesting_copy_decl;
3275 id.cb.decl_map = new hash_map<tree, tree>;
3276 id.root = root;
3278 for (; var; var = DECL_CHAIN (var))
3279 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3281 struct nesting_info *i;
3282 tree newt, context;
3284 val = DECL_VALUE_EXPR (var);
3285 type = TREE_TYPE (var);
3287 if (! (INDIRECT_REF_P (val)
3288 && VAR_P (TREE_OPERAND (val, 0))
3289 && variably_modified_type_p (type, NULL)))
3290 continue;
3292 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
3293 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
3294 continue;
3296 context = decl_function_context (var);
3297 for (i = root; i; i = i->outer)
3298 if (i->context == context)
3299 break;
3301 if (i == NULL)
3302 continue;
3304 /* Fully expand value expressions. This avoids having debug variables
3305 only referenced from them and that can be swept during GC. */
3306 if (slot)
3308 tree t = (tree) *slot;
3309 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
3310 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
3313 id.cb.src_fn = i->context;
3314 id.cb.dst_fn = i->context;
3315 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3317 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
3318 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3320 newt = TREE_TYPE (newt);
3321 type = TREE_TYPE (type);
3323 if (TYPE_NAME (newt)
3324 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3325 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3326 && newt != type
3327 && TYPE_NAME (newt) == TYPE_NAME (type))
3328 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3330 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
3331 if (val != DECL_VALUE_EXPR (var))
3332 SET_DECL_VALUE_EXPR (var, val);
3335 delete id.cb.decl_map;
3338 /* Fixup VLA decls in BLOCK and subblocks if remapped variables are
3339 involved. */
3341 static void
3342 fixup_vla_decls (tree block)
3344 for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3345 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3347 tree val = DECL_VALUE_EXPR (var);
3349 if (! (INDIRECT_REF_P (val)
3350 && VAR_P (TREE_OPERAND (val, 0))
3351 && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val, 0))))
3352 continue;
3354 /* Fully expand value expressions. This avoids having debug variables
3355 only referenced from them and that can be swept during GC. */
3356 val = build1 (INDIRECT_REF, TREE_TYPE (val),
3357 DECL_VALUE_EXPR (TREE_OPERAND (val, 0)));
3358 SET_DECL_VALUE_EXPR (var, val);
3361 for (tree sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3362 fixup_vla_decls (sub);
3365 /* Fold the MEM_REF *E. */
3366 bool
3367 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
3369 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
3370 *ref_p = fold (*ref_p);
3371 return true;
3374 /* Given DECL, a nested function, build an initialization call for FIELD,
3375 the trampoline or descriptor for DECL, using FUNC as the function. */
3377 static gcall *
3378 build_init_call_stmt (struct nesting_info *info, tree decl, tree field,
3379 tree func)
3381 tree arg1, arg2, arg3, x;
3383 gcc_assert (DECL_STATIC_CHAIN (decl));
3384 arg3 = build_addr (info->frame_decl);
3386 arg2 = build_addr (decl);
3388 x = build3 (COMPONENT_REF, TREE_TYPE (field),
3389 info->frame_decl, field, NULL_TREE);
3390 arg1 = build_addr (x);
3392 return gimple_build_call (func, 3, arg1, arg2, arg3);
3395 /* Do "everything else" to clean up or complete state collected by the various
3396 walking passes -- create a field to hold the frame base address, lay out the
3397 types and decls, generate code to initialize the frame decl, store critical
3398 expressions in the struct function for rtl to find. */
3400 static void
3401 finalize_nesting_tree_1 (struct nesting_info *root)
3403 gimple_seq cleanup_list = NULL;
3404 gimple_seq stmt_list = NULL;
3405 gimple *stmt;
3406 tree context = root->context;
3407 struct function *sf;
3409 if (root->thunk_p)
3410 return;
3412 /* If we created a non-local frame type or decl, we need to lay them
3413 out at this time. */
3414 if (root->frame_type)
3416 /* Debugging information needs to compute the frame base address of the
3417 parent frame out of the static chain from the nested frame.
3419 The static chain is the address of the FRAME record, so one could
3420 imagine it would be possible to compute the frame base address just
3421 adding a constant offset to this address. Unfortunately, this is not
3422 possible: if the FRAME object has alignment constraints that are
3423 stronger than the stack, then the offset between the frame base and
3424 the FRAME object will be dynamic.
3426 What we do instead is to append a field to the FRAME object that holds
3427 the frame base address: then debug info just has to fetch this
3428 field. */
3430 /* Debugging information will refer to the CFA as the frame base
3431 address: we will do the same here. */
3432 const tree frame_addr_fndecl
3433 = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
3435 /* Create a field in the FRAME record to hold the frame base address for
3436 this stack frame. Since it will be used only by the debugger, put it
3437 at the end of the record in order not to shift all other offsets. */
3438 tree fb_decl = make_node (FIELD_DECL);
3440 DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
3441 TREE_TYPE (fb_decl) = ptr_type_node;
3442 TREE_ADDRESSABLE (fb_decl) = 1;
3443 DECL_CONTEXT (fb_decl) = root->frame_type;
3444 TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
3445 fb_decl);
3447 /* In some cases the frame type will trigger the -Wpadded warning.
3448 This is not helpful; suppress it. */
3449 int save_warn_padded = warn_padded;
3450 warn_padded = 0;
3451 layout_type (root->frame_type);
3452 warn_padded = save_warn_padded;
3453 layout_decl (root->frame_decl, 0);
3455 /* Initialize the frame base address field. If the builtin we need is
3456 not available, set it to NULL so that debugging information does not
3457 reference junk. */
3458 tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
3459 root->frame_decl, fb_decl, NULL_TREE);
3460 tree fb_tmp;
3462 if (frame_addr_fndecl != NULL_TREE)
3464 gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
3465 integer_zero_node);
3466 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3468 fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
3470 else
3471 fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
3472 gimple_seq_add_stmt (&stmt_list,
3473 gimple_build_assign (fb_ref, fb_tmp));
3475 declare_vars (root->frame_decl,
3476 gimple_seq_first_stmt (gimple_body (context)), true);
3479 /* If any parameters were referenced non-locally, then we need to insert
3480 a copy or a pointer. */
3481 if (root->any_parm_remapped)
3483 tree p;
3484 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
3486 tree field, x, y;
3488 field = lookup_field_for_decl (root, p, NO_INSERT);
3489 if (!field)
3490 continue;
3492 if (use_pointer_in_frame (p))
3493 x = build_addr (p);
3494 else
3495 x = p;
3497 /* If the assignment is from a non-register the stmt is
3498 not valid gimple. Make it so by using a temporary instead. */
3499 if (!is_gimple_reg (x)
3500 && is_gimple_reg_type (TREE_TYPE (x)))
3502 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3503 x = init_tmp_var (root, x, &gsi);
3506 y = build3 (COMPONENT_REF, TREE_TYPE (field),
3507 root->frame_decl, field, NULL_TREE);
3508 stmt = gimple_build_assign (y, x);
3509 gimple_seq_add_stmt (&stmt_list, stmt);
3513 /* If a chain_field was created, then it needs to be initialized
3514 from chain_decl. */
3515 if (root->chain_field)
3517 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
3518 root->frame_decl, root->chain_field, NULL_TREE);
3519 stmt = gimple_build_assign (x, get_chain_decl (root));
3520 gimple_seq_add_stmt (&stmt_list, stmt);
3523 /* If trampolines were created, then we need to initialize them. */
3524 if (root->any_tramp_created)
3526 struct nesting_info *i;
3527 for (i = root->inner; i ; i = i->next)
3529 tree field, x;
3531 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
3532 if (!field)
3533 continue;
3535 if (flag_trampoline_impl == TRAMPOLINE_IMPL_HEAP)
3537 /* We pass a whole bunch of arguments to the builtin function that
3538 creates the off-stack trampoline, these are
3539 1. The nested function chain value (that must be passed to the
3540 nested function so it can find the function arguments).
3541 2. A pointer to the nested function implementation,
3542 3. The address in the local stack frame where we should write
3543 the address of the trampoline.
3545 When this code was originally written I just kind of threw
3546 everything at the builtin, figuring I'd work out what was
3547 actually needed later, I think, the stack pointer could
3548 certainly be dropped, arguments #2 and #4 are based off the
3549 stack pointer anyway, so #1 doesn't seem to add much value. */
3550 tree arg1, arg2, arg3;
3552 gcc_assert (DECL_STATIC_CHAIN (i->context));
3553 arg1 = build_addr (root->frame_decl);
3554 arg2 = build_addr (i->context);
3556 x = build3 (COMPONENT_REF, TREE_TYPE (field),
3557 root->frame_decl, field, NULL_TREE);
3558 arg3 = build_addr (x);
3560 x = builtin_decl_implicit (BUILT_IN_NESTED_PTR_CREATED);
3561 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
3562 gimple_seq_add_stmt (&stmt_list, stmt);
3564 /* This call to delete the nested function trampoline is added to
3565 the cleanup list, and called when we exit the current scope. */
3566 x = builtin_decl_implicit (BUILT_IN_NESTED_PTR_DELETED);
3567 stmt = gimple_build_call (x, 0);
3568 gimple_seq_add_stmt (&cleanup_list, stmt);
3570 else
3572 /* Original code to initialise the on stack trampoline. */
3573 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
3574 stmt = build_init_call_stmt (root, i->context, field, x);
3575 gimple_seq_add_stmt (&stmt_list, stmt);
3580 /* If descriptors were created, then we need to initialize them. */
3581 if (root->any_descr_created)
3583 struct nesting_info *i;
3584 for (i = root->inner; i ; i = i->next)
3586 tree field, x;
3588 field = lookup_descr_for_decl (root, i->context, NO_INSERT);
3589 if (!field)
3590 continue;
3592 x = builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR);
3593 stmt = build_init_call_stmt (root, i->context, field, x);
3594 gimple_seq_add_stmt (&stmt_list, stmt);
3598 /* If we created initialization statements, insert them. */
3599 if (stmt_list)
3601 if (flag_trampoline_impl == TRAMPOLINE_IMPL_HEAP)
3603 /* Handle off-stack trampolines. */
3604 gbind *bind;
3605 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3606 annotate_all_with_location (cleanup_list, DECL_SOURCE_LOCATION (context));
3607 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3608 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3610 gimple_seq xxx_list = NULL;
3612 if (cleanup_list != NULL)
3614 /* Maybe we shouldn't be creating this try/finally if -fno-exceptions is
3615 in use. If this is the case, then maybe we should, instead, be
3616 inserting the cleanup code onto every path out of this function? Not
3617 yet figured out how we would do this. */
3618 gtry *t = gimple_build_try (stmt_list, cleanup_list, GIMPLE_TRY_FINALLY);
3619 gimple_seq_add_stmt (&xxx_list, t);
3621 else
3622 xxx_list = stmt_list;
3624 gimple_bind_set_body (bind, xxx_list);
3626 else
3628 /* The traditional, on stack trampolines. */
3629 gbind *bind;
3630 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3631 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3632 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3633 gimple_bind_set_body (bind, stmt_list);
3637 /* If a chain_decl was created, then it needs to be registered with
3638 struct function so that it gets initialized from the static chain
3639 register at the beginning of the function. */
3640 sf = DECL_STRUCT_FUNCTION (root->context);
3641 sf->static_chain_decl = root->chain_decl;
3643 /* Similarly for the non-local goto save area. */
3644 if (root->nl_goto_field)
3646 sf->nonlocal_goto_save_area
3647 = get_frame_field (root, context, root->nl_goto_field, NULL);
3648 sf->has_nonlocal_label = 1;
3651 /* Make sure all new local variables get inserted into the
3652 proper BIND_EXPR. */
3653 if (root->new_local_var_chain)
3654 declare_vars (root->new_local_var_chain,
3655 gimple_seq_first_stmt (gimple_body (root->context)),
3656 false);
3658 if (root->debug_var_chain)
3660 tree debug_var;
3661 gbind *scope;
3663 remap_vla_decls (DECL_INITIAL (root->context), root);
3665 for (debug_var = root->debug_var_chain; debug_var;
3666 debug_var = DECL_CHAIN (debug_var))
3667 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3668 break;
3670 /* If there are any debug decls with variable length types,
3671 remap those types using other debug_var_chain variables. */
3672 if (debug_var)
3674 struct nesting_copy_body_data id;
3676 memset (&id, 0, sizeof (id));
3677 id.cb.copy_decl = nesting_copy_decl;
3678 id.cb.decl_map = new hash_map<tree, tree>;
3679 id.root = root;
3681 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3682 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3684 tree type = TREE_TYPE (debug_var);
3685 tree newt, t = type;
3686 struct nesting_info *i;
3688 for (i = root; i; i = i->outer)
3689 if (variably_modified_type_p (type, i->context))
3690 break;
3692 if (i == NULL)
3693 continue;
3695 id.cb.src_fn = i->context;
3696 id.cb.dst_fn = i->context;
3697 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3699 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3700 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3702 newt = TREE_TYPE (newt);
3703 t = TREE_TYPE (t);
3705 if (TYPE_NAME (newt)
3706 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3707 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3708 && newt != t
3709 && TYPE_NAME (newt) == TYPE_NAME (t))
3710 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3713 delete id.cb.decl_map;
3716 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3717 if (gimple_bind_block (scope))
3718 declare_vars (root->debug_var_chain, scope, true);
3719 else
3720 BLOCK_VARS (DECL_INITIAL (root->context))
3721 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3722 root->debug_var_chain);
3724 else
3725 fixup_vla_decls (DECL_INITIAL (root->context));
3727 /* Fold the rewritten MEM_REF trees. */
3728 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3730 /* Dump the translated tree function. */
3731 if (dump_file)
3733 fputs ("\n\n", dump_file);
3734 dump_function_to_file (root->context, dump_file, dump_flags);
3738 static void
3739 finalize_nesting_tree (struct nesting_info *root)
3741 struct nesting_info *n;
3742 FOR_EACH_NEST_INFO (n, root)
3743 finalize_nesting_tree_1 (n);
3746 /* Unnest the nodes and pass them to cgraph. */
3748 static void
3749 unnest_nesting_tree_1 (struct nesting_info *root)
3751 struct cgraph_node *node = cgraph_node::get (root->context);
3753 /* For nested functions update the cgraph to reflect unnesting.
3754 We also delay finalizing of these functions up to this point. */
3755 if (nested_function_info::get (node)->origin)
3757 unnest_function (node);
3758 if (!root->thunk_p)
3759 cgraph_node::finalize_function (root->context, true);
3763 static void
3764 unnest_nesting_tree (struct nesting_info *root)
3766 struct nesting_info *n;
3767 FOR_EACH_NEST_INFO (n, root)
3768 unnest_nesting_tree_1 (n);
3771 /* Free the data structures allocated during this pass. */
3773 static void
3774 free_nesting_tree (struct nesting_info *root)
3776 struct nesting_info *node, *next;
3778 node = iter_nestinfo_start (root);
3781 next = iter_nestinfo_next (node);
3782 delete node->var_map;
3783 delete node->field_map;
3784 delete node->mem_refs;
3785 free (node);
3786 node = next;
3788 while (node);
3791 /* Gimplify a function and all its nested functions. */
3792 static void
3793 gimplify_all_functions (struct cgraph_node *root)
3795 struct cgraph_node *iter;
3796 if (!gimple_body (root->decl))
3797 gimplify_function_tree (root->decl);
3798 for (iter = first_nested_function (root); iter;
3799 iter = next_nested_function (iter))
3800 if (!iter->thunk)
3801 gimplify_all_functions (iter);
3804 /* Main entry point for this pass. Process FNDECL and all of its nested
3805 subroutines and turn them into something less tightly bound. */
3807 void
3808 lower_nested_functions (tree fndecl)
3810 struct cgraph_node *cgn;
3811 struct nesting_info *root;
3813 /* If there are no nested functions, there's nothing to do. */
3814 cgn = cgraph_node::get (fndecl);
3815 if (!first_nested_function (cgn))
3816 return;
3818 gimplify_all_functions (cgn);
3820 set_dump_file (dump_begin (TDI_nested, &dump_flags));
3821 if (dump_file)
3822 fprintf (dump_file, "\n;; Function %s\n\n",
3823 lang_hooks.decl_printable_name (fndecl, 2));
3825 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3826 root = create_nesting_tree (cgn);
3828 walk_all_functions (convert_nonlocal_reference_stmt,
3829 convert_nonlocal_reference_op,
3830 root);
3831 walk_all_functions (convert_local_reference_stmt,
3832 convert_local_reference_op,
3833 root);
3834 walk_all_functions (convert_nl_goto_reference, NULL, root);
3835 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3837 convert_all_function_calls (root);
3838 finalize_nesting_tree (root);
3839 unnest_nesting_tree (root);
3841 free_nesting_tree (root);
3842 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3844 if (dump_file)
3846 dump_end (TDI_nested, dump_file);
3847 set_dump_file (NULL);
3851 #include "gt-tree-nested.h"