testsuite: Correct vec-rlmi-rlnm.c testsuite expected result
[official-gcc.git] / gcc / tree-nested.c
blob433f37fffe33fd5734404dad84bb629b398ed1eb
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "cgraph.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "dumpfile.h"
35 #include "tree-inline.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "tree-cfg.h"
40 #include "explow.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 #include "diagnostic.h"
45 #include "alloc-pool.h"
46 #include "tree-nested.h"
47 #include "symbol-summary.h"
49 /* Summary of nested functions. */
50 static function_summary <nested_function_info *>
51 *nested_function_sum = NULL;
53 /* Return nested_function_info, if available. */
54 nested_function_info *
55 nested_function_info::get (cgraph_node *node)
57 if (!nested_function_sum)
58 return NULL;
59 return nested_function_sum->get (node);
62 /* Return nested_function_info possibly creating new one. */
63 nested_function_info *
64 nested_function_info::get_create (cgraph_node *node)
66 if (!nested_function_sum)
67 nested_function_sum = new function_summary <nested_function_info *>
68 (symtab);
69 return nested_function_sum->get_create (node);
72 /* cgraph_node is no longer nested function; update cgraph accordingly. */
73 void
74 unnest_function (cgraph_node *node)
76 nested_function_info *info = nested_function_info::get (node);
77 cgraph_node **node2 = &nested_function_info::get
78 (nested_function_origin (node))->nested;
80 gcc_checking_assert (info->origin);
81 while (*node2 != node)
82 node2 = &nested_function_info::get (*node2)->next_nested;
83 *node2 = info->next_nested;
84 info->next_nested = NULL;
85 info->origin = NULL;
86 nested_function_sum->remove (node);
89 /* Destructor: unlink function from nested function lists. */
90 nested_function_info::~nested_function_info ()
92 cgraph_node *next;
93 for (cgraph_node *n = nested; n; n = next)
95 nested_function_info *info = nested_function_info::get (n);
96 next = info->next_nested;
97 info->origin = NULL;
98 info->next_nested = NULL;
100 nested = NULL;
101 if (origin)
103 cgraph_node **node2
104 = &nested_function_info::get (origin)->nested;
106 nested_function_info *info;
107 while ((info = nested_function_info::get (*node2)) != this && info)
108 node2 = &info->next_nested;
109 *node2 = next_nested;
113 /* Free nested function info summaries. */
114 void
115 nested_function_info::release ()
117 if (nested_function_sum)
118 delete (nested_function_sum);
119 nested_function_sum = NULL;
122 /* If NODE is nested function, record it. */
123 void
124 maybe_record_nested_function (cgraph_node *node)
126 if (DECL_CONTEXT (node->decl)
127 && TREE_CODE (DECL_CONTEXT (node->decl)) == FUNCTION_DECL)
129 cgraph_node *origin = cgraph_node::get_create (DECL_CONTEXT (node->decl));
130 nested_function_info *info = nested_function_info::get_create (node);
131 nested_function_info *origin_info
132 = nested_function_info::get_create (origin);
134 info->origin = origin;
135 info->next_nested = origin_info->nested;
136 origin_info->nested = node;
140 /* The object of this pass is to lower the representation of a set of nested
141 functions in order to expose all of the gory details of the various
142 nonlocal references. We want to do this sooner rather than later, in
143 order to give us more freedom in emitting all of the functions in question.
145 Back in olden times, when gcc was young, we developed an insanely
146 complicated scheme whereby variables which were referenced nonlocally
147 were forced to live in the stack of the declaring function, and then
148 the nested functions magically discovered where these variables were
149 placed. In order for this scheme to function properly, it required
150 that the outer function be partially expanded, then we switch to
151 compiling the inner function, and once done with those we switch back
152 to compiling the outer function. Such delicate ordering requirements
153 makes it difficult to do whole translation unit optimizations
154 involving such functions.
156 The implementation here is much more direct. Everything that can be
157 referenced by an inner function is a member of an explicitly created
158 structure herein called the "nonlocal frame struct". The incoming
159 static chain for a nested function is a pointer to this struct in
160 the parent. In this way, we settle on known offsets from a known
161 base, and so are decoupled from the logic that places objects in the
162 function's stack frame. More importantly, we don't have to wait for
163 that to happen -- since the compilation of the inner function is no
164 longer tied to a real stack frame, the nonlocal frame struct can be
165 allocated anywhere. Which means that the outer function is now
166 inlinable.
168 Theory of operation here is very simple. Iterate over all the
169 statements in all the functions (depth first) several times,
170 allocating structures and fields on demand. In general we want to
171 examine inner functions first, so that we can avoid making changes
172 to outer functions which are unnecessary.
174 The order of the passes matters a bit, in that later passes will be
175 skipped if it is discovered that the functions don't actually interact
176 at all. That is, they're nested in the lexical sense but could have
177 been written as independent functions without change. */
180 struct nesting_info
182 struct nesting_info *outer;
183 struct nesting_info *inner;
184 struct nesting_info *next;
186 hash_map<tree, tree> *field_map;
187 hash_map<tree, tree> *var_map;
188 hash_set<tree *> *mem_refs;
189 bitmap suppress_expansion;
191 tree context;
192 tree new_local_var_chain;
193 tree debug_var_chain;
194 tree frame_type;
195 tree frame_decl;
196 tree chain_field;
197 tree chain_decl;
198 tree nl_goto_field;
200 bool thunk_p;
201 bool any_parm_remapped;
202 bool any_tramp_created;
203 bool any_descr_created;
204 char static_chain_added;
208 /* Iterate over the nesting tree, starting with ROOT, depth first. */
210 static inline struct nesting_info *
211 iter_nestinfo_start (struct nesting_info *root)
213 while (root->inner)
214 root = root->inner;
215 return root;
218 static inline struct nesting_info *
219 iter_nestinfo_next (struct nesting_info *node)
221 if (node->next)
222 return iter_nestinfo_start (node->next);
223 return node->outer;
226 #define FOR_EACH_NEST_INFO(I, ROOT) \
227 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
229 /* Obstack used for the bitmaps in the struct above. */
230 static struct bitmap_obstack nesting_info_bitmap_obstack;
233 /* We're working in so many different function contexts simultaneously,
234 that create_tmp_var is dangerous. Prevent mishap. */
235 #define create_tmp_var cant_use_create_tmp_var_here_dummy
237 /* Like create_tmp_var, except record the variable for registration at
238 the given nesting level. */
240 static tree
241 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
243 tree tmp_var;
245 /* If the type is of variable size or a type which must be created by the
246 frontend, something is wrong. Note that we explicitly allow
247 incomplete types here, since we create them ourselves here. */
248 gcc_assert (!TREE_ADDRESSABLE (type));
249 gcc_assert (!TYPE_SIZE_UNIT (type)
250 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
252 tmp_var = create_tmp_var_raw (type, prefix);
253 DECL_CONTEXT (tmp_var) = info->context;
254 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
255 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
257 info->new_local_var_chain = tmp_var;
259 return tmp_var;
262 /* Like build_simple_mem_ref, but set TREE_THIS_NOTRAP on the result. */
264 static tree
265 build_simple_mem_ref_notrap (tree ptr)
267 tree t = build_simple_mem_ref (ptr);
268 TREE_THIS_NOTRAP (t) = 1;
269 return t;
272 /* Take the address of EXP to be used within function CONTEXT.
273 Mark it for addressability as necessary. */
275 tree
276 build_addr (tree exp)
278 mark_addressable (exp);
279 return build_fold_addr_expr (exp);
282 /* Insert FIELD into TYPE, sorted by alignment requirements. */
284 void
285 insert_field_into_struct (tree type, tree field)
287 tree *p;
289 DECL_CONTEXT (field) = type;
291 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
292 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
293 break;
295 DECL_CHAIN (field) = *p;
296 *p = field;
298 /* Set correct alignment for frame struct type. */
299 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
300 SET_TYPE_ALIGN (type, DECL_ALIGN (field));
303 /* Build or return the RECORD_TYPE that describes the frame state that is
304 shared between INFO->CONTEXT and its nested functions. This record will
305 not be complete until finalize_nesting_tree; up until that point we'll
306 be adding fields as necessary.
308 We also build the DECL that represents this frame in the function. */
310 static tree
311 get_frame_type (struct nesting_info *info)
313 tree type = info->frame_type;
314 if (!type)
316 char *name;
318 type = make_node (RECORD_TYPE);
320 name = concat ("FRAME.",
321 IDENTIFIER_POINTER (DECL_NAME (info->context)),
322 NULL);
323 TYPE_NAME (type) = get_identifier (name);
324 free (name);
326 info->frame_type = type;
328 /* Do not put info->frame_decl on info->new_local_var_chain,
329 so that we can declare it in the lexical blocks, which
330 makes sure virtual regs that end up appearing in its RTL
331 expression get substituted in instantiate_virtual_regs. */
332 info->frame_decl = create_tmp_var_raw (type, "FRAME");
333 DECL_CONTEXT (info->frame_decl) = info->context;
334 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
335 DECL_SEEN_IN_BIND_EXPR_P (info->frame_decl) = 1;
337 /* ??? Always make it addressable for now, since it is meant to
338 be pointed to by the static chain pointer. This pessimizes
339 when it turns out that no static chains are needed because
340 the nested functions referencing non-local variables are not
341 reachable, but the true pessimization is to create the non-
342 local frame structure in the first place. */
343 TREE_ADDRESSABLE (info->frame_decl) = 1;
346 return type;
349 /* Return true if DECL should be referenced by pointer in the non-local frame
350 structure. */
352 static bool
353 use_pointer_in_frame (tree decl)
355 if (TREE_CODE (decl) == PARM_DECL)
357 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
358 sized DECLs, and inefficient to copy large aggregates. Don't bother
359 moving anything but scalar parameters. */
360 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
362 else
364 /* Variable-sized DECLs can only come from OMP clauses at this point
365 since the gimplifier has already turned the regular variables into
366 pointers. Do the same as the gimplifier. */
367 return !DECL_SIZE (decl) || TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST;
371 /* Given DECL, a non-locally accessed variable, find or create a field
372 in the non-local frame structure for the given nesting context. */
374 static tree
375 lookup_field_for_decl (struct nesting_info *info, tree decl,
376 enum insert_option insert)
378 gcc_checking_assert (decl_function_context (decl) == info->context);
380 if (insert == NO_INSERT)
382 tree *slot = info->field_map->get (decl);
383 return slot ? *slot : NULL_TREE;
386 tree *slot = &info->field_map->get_or_insert (decl);
387 if (!*slot)
389 tree type = get_frame_type (info);
390 tree field = make_node (FIELD_DECL);
391 DECL_NAME (field) = DECL_NAME (decl);
393 if (use_pointer_in_frame (decl))
395 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
396 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
397 DECL_NONADDRESSABLE_P (field) = 1;
399 else
401 TREE_TYPE (field) = TREE_TYPE (decl);
402 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
403 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
404 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
405 DECL_IGNORED_P (field) = DECL_IGNORED_P (decl);
406 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
407 TREE_NO_WARNING (field) = TREE_NO_WARNING (decl);
408 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
410 /* Declare the transformation and adjust the original DECL. For a
411 variable or for a parameter when not optimizing, we make it point
412 to the field in the frame directly. For a parameter, we don't do
413 it when optimizing because the variable tracking pass will already
414 do the job, */
415 if (VAR_P (decl) || !optimize)
417 tree x
418 = build3 (COMPONENT_REF, TREE_TYPE (field), info->frame_decl,
419 field, NULL_TREE);
421 /* If the next declaration is a PARM_DECL pointing to the DECL,
422 we need to adjust its VALUE_EXPR directly, since chains of
423 VALUE_EXPRs run afoul of garbage collection. This occurs
424 in Ada for Out parameters that aren't copied in. */
425 tree next = DECL_CHAIN (decl);
426 if (next
427 && TREE_CODE (next) == PARM_DECL
428 && DECL_HAS_VALUE_EXPR_P (next)
429 && DECL_VALUE_EXPR (next) == decl)
430 SET_DECL_VALUE_EXPR (next, x);
432 SET_DECL_VALUE_EXPR (decl, x);
433 DECL_HAS_VALUE_EXPR_P (decl) = 1;
437 insert_field_into_struct (type, field);
438 *slot = field;
440 if (TREE_CODE (decl) == PARM_DECL)
441 info->any_parm_remapped = true;
444 return *slot;
447 /* Build or return the variable that holds the static chain within
448 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
450 static tree
451 get_chain_decl (struct nesting_info *info)
453 tree decl = info->chain_decl;
455 if (!decl)
457 tree type;
459 type = get_frame_type (info->outer);
460 type = build_pointer_type (type);
462 /* Note that this variable is *not* entered into any BIND_EXPR;
463 the construction of this variable is handled specially in
464 expand_function_start and initialize_inlined_parameters.
465 Note also that it's represented as a parameter. This is more
466 close to the truth, since the initial value does come from
467 the caller. */
468 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
469 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
470 DECL_ARTIFICIAL (decl) = 1;
471 DECL_IGNORED_P (decl) = 1;
472 TREE_USED (decl) = 1;
473 DECL_CONTEXT (decl) = info->context;
474 DECL_ARG_TYPE (decl) = type;
476 /* Tell tree-inline.c that we never write to this variable, so
477 it can copy-prop the replacement value immediately. */
478 TREE_READONLY (decl) = 1;
480 info->chain_decl = decl;
482 if (dump_file
483 && (dump_flags & TDF_DETAILS)
484 && !DECL_STATIC_CHAIN (info->context))
485 fprintf (dump_file, "Setting static-chain for %s\n",
486 lang_hooks.decl_printable_name (info->context, 2));
488 DECL_STATIC_CHAIN (info->context) = 1;
490 return decl;
493 /* Build or return the field within the non-local frame state that holds
494 the static chain for INFO->CONTEXT. This is the way to walk back up
495 multiple nesting levels. */
497 static tree
498 get_chain_field (struct nesting_info *info)
500 tree field = info->chain_field;
502 if (!field)
504 tree type = build_pointer_type (get_frame_type (info->outer));
506 field = make_node (FIELD_DECL);
507 DECL_NAME (field) = get_identifier ("__chain");
508 TREE_TYPE (field) = type;
509 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
510 DECL_NONADDRESSABLE_P (field) = 1;
512 insert_field_into_struct (get_frame_type (info), field);
514 info->chain_field = field;
516 if (dump_file
517 && (dump_flags & TDF_DETAILS)
518 && !DECL_STATIC_CHAIN (info->context))
519 fprintf (dump_file, "Setting static-chain for %s\n",
520 lang_hooks.decl_printable_name (info->context, 2));
522 DECL_STATIC_CHAIN (info->context) = 1;
524 return field;
527 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
529 static tree
530 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
531 gcall *call)
533 tree t;
535 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
536 gimple_call_set_lhs (call, t);
537 if (! gsi_end_p (*gsi))
538 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
539 gsi_insert_before (gsi, call, GSI_SAME_STMT);
541 return t;
545 /* Copy EXP into a temporary. Allocate the temporary in the context of
546 INFO and insert the initialization statement before GSI. */
548 static tree
549 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
551 tree t;
552 gimple *stmt;
554 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
555 stmt = gimple_build_assign (t, exp);
556 if (! gsi_end_p (*gsi))
557 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
558 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
560 return t;
564 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
566 static tree
567 gsi_gimplify_val (struct nesting_info *info, tree exp,
568 gimple_stmt_iterator *gsi)
570 if (is_gimple_val (exp))
571 return exp;
572 else
573 return init_tmp_var (info, exp, gsi);
576 /* Similarly, but copy from the temporary and insert the statement
577 after the iterator. */
579 static tree
580 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
582 tree t;
583 gimple *stmt;
585 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
586 stmt = gimple_build_assign (exp, t);
587 if (! gsi_end_p (*gsi))
588 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
589 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
591 return t;
594 /* Build or return the type used to represent a nested function trampoline. */
596 static GTY(()) tree trampoline_type;
598 static tree
599 get_trampoline_type (struct nesting_info *info)
601 unsigned align, size;
602 tree t;
604 if (trampoline_type)
605 return trampoline_type;
607 align = TRAMPOLINE_ALIGNMENT;
608 size = TRAMPOLINE_SIZE;
610 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
611 then allocate extra space so that we can do dynamic alignment. */
612 if (align > STACK_BOUNDARY)
614 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
615 align = STACK_BOUNDARY;
618 t = build_index_type (size_int (size - 1));
619 t = build_array_type (char_type_node, t);
620 t = build_decl (DECL_SOURCE_LOCATION (info->context),
621 FIELD_DECL, get_identifier ("__data"), t);
622 SET_DECL_ALIGN (t, align);
623 DECL_USER_ALIGN (t) = 1;
625 trampoline_type = make_node (RECORD_TYPE);
626 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
627 TYPE_FIELDS (trampoline_type) = t;
628 layout_type (trampoline_type);
629 DECL_CONTEXT (t) = trampoline_type;
631 return trampoline_type;
634 /* Build or return the type used to represent a nested function descriptor. */
636 static GTY(()) tree descriptor_type;
638 static tree
639 get_descriptor_type (struct nesting_info *info)
641 /* The base alignment is that of a function. */
642 const unsigned align = FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY);
643 tree t;
645 if (descriptor_type)
646 return descriptor_type;
648 t = build_index_type (integer_one_node);
649 t = build_array_type (ptr_type_node, t);
650 t = build_decl (DECL_SOURCE_LOCATION (info->context),
651 FIELD_DECL, get_identifier ("__data"), t);
652 SET_DECL_ALIGN (t, MAX (TYPE_ALIGN (ptr_type_node), align));
653 DECL_USER_ALIGN (t) = 1;
655 descriptor_type = make_node (RECORD_TYPE);
656 TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor");
657 TYPE_FIELDS (descriptor_type) = t;
658 layout_type (descriptor_type);
659 DECL_CONTEXT (t) = descriptor_type;
661 return descriptor_type;
664 /* Given DECL, a nested function, find or create an element in the
665 var map for this function. */
667 static tree
668 lookup_element_for_decl (struct nesting_info *info, tree decl,
669 enum insert_option insert)
671 if (insert == NO_INSERT)
673 tree *slot = info->var_map->get (decl);
674 return slot ? *slot : NULL_TREE;
677 tree *slot = &info->var_map->get_or_insert (decl);
678 if (!*slot)
679 *slot = build_tree_list (NULL_TREE, NULL_TREE);
681 return (tree) *slot;
684 /* Given DECL, a nested function, create a field in the non-local
685 frame structure for this function. */
687 static tree
688 create_field_for_decl (struct nesting_info *info, tree decl, tree type)
690 tree field = make_node (FIELD_DECL);
691 DECL_NAME (field) = DECL_NAME (decl);
692 TREE_TYPE (field) = type;
693 TREE_ADDRESSABLE (field) = 1;
694 insert_field_into_struct (get_frame_type (info), field);
695 return field;
698 /* Given DECL, a nested function, find or create a field in the non-local
699 frame structure for a trampoline for this function. */
701 static tree
702 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
703 enum insert_option insert)
705 tree elt, field;
707 elt = lookup_element_for_decl (info, decl, insert);
708 if (!elt)
709 return NULL_TREE;
711 field = TREE_PURPOSE (elt);
713 if (!field && insert == INSERT)
715 field = create_field_for_decl (info, decl, get_trampoline_type (info));
716 TREE_PURPOSE (elt) = field;
717 info->any_tramp_created = true;
720 return field;
723 /* Given DECL, a nested function, find or create a field in the non-local
724 frame structure for a descriptor for this function. */
726 static tree
727 lookup_descr_for_decl (struct nesting_info *info, tree decl,
728 enum insert_option insert)
730 tree elt, field;
732 elt = lookup_element_for_decl (info, decl, insert);
733 if (!elt)
734 return NULL_TREE;
736 field = TREE_VALUE (elt);
738 if (!field && insert == INSERT)
740 field = create_field_for_decl (info, decl, get_descriptor_type (info));
741 TREE_VALUE (elt) = field;
742 info->any_descr_created = true;
745 return field;
748 /* Build or return the field within the non-local frame state that holds
749 the non-local goto "jmp_buf". The buffer itself is maintained by the
750 rtl middle-end as dynamic stack space is allocated. */
752 static tree
753 get_nl_goto_field (struct nesting_info *info)
755 tree field = info->nl_goto_field;
756 if (!field)
758 unsigned size;
759 tree type;
761 /* For __builtin_nonlocal_goto, we need N words. The first is the
762 frame pointer, the rest is for the target's stack pointer save
763 area. The number of words is controlled by STACK_SAVEAREA_MODE;
764 not the best interface, but it'll do for now. */
765 if (Pmode == ptr_mode)
766 type = ptr_type_node;
767 else
768 type = lang_hooks.types.type_for_mode (Pmode, 1);
770 scalar_int_mode mode
771 = as_a <scalar_int_mode> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
772 size = GET_MODE_SIZE (mode);
773 size = size / GET_MODE_SIZE (Pmode);
774 size = size + 1;
776 type = build_array_type
777 (type, build_index_type (size_int (size)));
779 field = make_node (FIELD_DECL);
780 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
781 TREE_TYPE (field) = type;
782 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
783 TREE_ADDRESSABLE (field) = 1;
785 insert_field_into_struct (get_frame_type (info), field);
787 info->nl_goto_field = field;
790 return field;
793 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
795 static void
796 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
797 struct nesting_info *info, gimple_seq *pseq)
799 struct walk_stmt_info wi;
801 memset (&wi, 0, sizeof (wi));
802 wi.info = info;
803 wi.val_only = true;
804 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
808 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
810 static inline void
811 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
812 struct nesting_info *info)
814 gimple_seq body = gimple_body (info->context);
815 walk_body (callback_stmt, callback_op, info, &body);
816 gimple_set_body (info->context, body);
819 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
821 static void
822 walk_gimple_omp_for (gomp_for *for_stmt,
823 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
824 struct nesting_info *info)
826 struct walk_stmt_info wi;
827 gimple_seq seq;
828 tree t;
829 size_t i;
831 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
833 seq = NULL;
834 memset (&wi, 0, sizeof (wi));
835 wi.info = info;
836 wi.gsi = gsi_last (seq);
838 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
840 wi.val_only = false;
841 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
842 &wi, NULL);
843 wi.val_only = true;
844 wi.is_lhs = false;
845 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
846 &wi, NULL);
848 wi.val_only = true;
849 wi.is_lhs = false;
850 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
851 &wi, NULL);
853 t = gimple_omp_for_incr (for_stmt, i);
854 gcc_assert (BINARY_CLASS_P (t));
855 wi.val_only = false;
856 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
857 wi.val_only = true;
858 wi.is_lhs = false;
859 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
862 seq = gsi_seq (wi.gsi);
863 if (!gimple_seq_empty_p (seq))
865 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
866 annotate_all_with_location (seq, gimple_location (for_stmt));
867 gimple_seq_add_seq (&pre_body, seq);
868 gimple_omp_for_set_pre_body (for_stmt, pre_body);
872 /* Similarly for ROOT and all functions nested underneath, depth first. */
874 static void
875 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
876 struct nesting_info *root)
878 struct nesting_info *n;
879 FOR_EACH_NEST_INFO (n, root)
880 walk_function (callback_stmt, callback_op, n);
884 /* We have to check for a fairly pathological case. The operands of function
885 nested function are to be interpreted in the context of the enclosing
886 function. So if any are variably-sized, they will get remapped when the
887 enclosing function is inlined. But that remapping would also have to be
888 done in the types of the PARM_DECLs of the nested function, meaning the
889 argument types of that function will disagree with the arguments in the
890 calls to that function. So we'd either have to make a copy of the nested
891 function corresponding to each time the enclosing function was inlined or
892 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
893 function. The former is not practical. The latter would still require
894 detecting this case to know when to add the conversions. So, for now at
895 least, we don't inline such an enclosing function.
897 We have to do that check recursively, so here return indicating whether
898 FNDECL has such a nested function. ORIG_FN is the function we were
899 trying to inline to use for checking whether any argument is variably
900 modified by anything in it.
902 It would be better to do this in tree-inline.c so that we could give
903 the appropriate warning for why a function can't be inlined, but that's
904 too late since the nesting structure has already been flattened and
905 adding a flag just to record this fact seems a waste of a flag. */
907 static bool
908 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
910 struct cgraph_node *cgn = cgraph_node::get (fndecl);
911 tree arg;
913 for (cgn = first_nested_function (cgn); cgn;
914 cgn = next_nested_function (cgn))
916 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
917 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
918 return true;
920 if (check_for_nested_with_variably_modified (cgn->decl,
921 orig_fndecl))
922 return true;
925 return false;
928 /* Construct our local datastructure describing the function nesting
929 tree rooted by CGN. */
931 static struct nesting_info *
932 create_nesting_tree (struct cgraph_node *cgn)
934 struct nesting_info *info = XCNEW (struct nesting_info);
935 info->field_map = new hash_map<tree, tree>;
936 info->var_map = new hash_map<tree, tree>;
937 info->mem_refs = new hash_set<tree *>;
938 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
939 info->context = cgn->decl;
940 info->thunk_p = cgn->thunk.thunk_p;
942 for (cgn = first_nested_function (cgn); cgn;
943 cgn = next_nested_function (cgn))
945 struct nesting_info *sub = create_nesting_tree (cgn);
946 sub->outer = info;
947 sub->next = info->inner;
948 info->inner = sub;
951 /* See discussion at check_for_nested_with_variably_modified for a
952 discussion of why this has to be here. */
953 if (check_for_nested_with_variably_modified (info->context, info->context))
954 DECL_UNINLINABLE (info->context) = true;
956 return info;
959 /* Return an expression computing the static chain for TARGET_CONTEXT
960 from INFO->CONTEXT. Insert any necessary computations before TSI. */
962 static tree
963 get_static_chain (struct nesting_info *info, tree target_context,
964 gimple_stmt_iterator *gsi)
966 struct nesting_info *i;
967 tree x;
969 if (info->context == target_context)
971 x = build_addr (info->frame_decl);
972 info->static_chain_added |= 1;
974 else
976 x = get_chain_decl (info);
977 info->static_chain_added |= 2;
979 for (i = info->outer; i->context != target_context; i = i->outer)
981 tree field = get_chain_field (i);
983 x = build_simple_mem_ref_notrap (x);
984 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
985 x = init_tmp_var (info, x, gsi);
989 return x;
993 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
994 frame as seen from INFO->CONTEXT. Insert any necessary computations
995 before GSI. */
997 static tree
998 get_frame_field (struct nesting_info *info, tree target_context,
999 tree field, gimple_stmt_iterator *gsi)
1001 struct nesting_info *i;
1002 tree x;
1004 if (info->context == target_context)
1006 /* Make sure frame_decl gets created. */
1007 (void) get_frame_type (info);
1008 x = info->frame_decl;
1009 info->static_chain_added |= 1;
1011 else
1013 x = get_chain_decl (info);
1014 info->static_chain_added |= 2;
1016 for (i = info->outer; i->context != target_context; i = i->outer)
1018 tree field = get_chain_field (i);
1020 x = build_simple_mem_ref_notrap (x);
1021 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1022 x = init_tmp_var (info, x, gsi);
1025 x = build_simple_mem_ref_notrap (x);
1028 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1029 return x;
1032 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
1034 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
1035 in the nested function with DECL_VALUE_EXPR set to reference the true
1036 variable in the parent function. This is used both for debug info
1037 and in OMP lowering. */
1039 static tree
1040 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
1042 tree target_context;
1043 struct nesting_info *i;
1044 tree x, field, new_decl;
1046 tree *slot = &info->var_map->get_or_insert (decl);
1048 if (*slot)
1049 return *slot;
1051 target_context = decl_function_context (decl);
1053 /* A copy of the code in get_frame_field, but without the temporaries. */
1054 if (info->context == target_context)
1056 /* Make sure frame_decl gets created. */
1057 (void) get_frame_type (info);
1058 x = info->frame_decl;
1059 i = info;
1060 info->static_chain_added |= 1;
1062 else
1064 x = get_chain_decl (info);
1065 info->static_chain_added |= 2;
1066 for (i = info->outer; i->context != target_context; i = i->outer)
1068 field = get_chain_field (i);
1069 x = build_simple_mem_ref_notrap (x);
1070 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1072 x = build_simple_mem_ref_notrap (x);
1075 field = lookup_field_for_decl (i, decl, INSERT);
1076 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1077 if (use_pointer_in_frame (decl))
1078 x = build_simple_mem_ref_notrap (x);
1080 /* ??? We should be remapping types as well, surely. */
1081 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1082 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1083 DECL_CONTEXT (new_decl) = info->context;
1084 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1085 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1086 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1087 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1088 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1089 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1090 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1091 if ((TREE_CODE (decl) == PARM_DECL
1092 || TREE_CODE (decl) == RESULT_DECL
1093 || VAR_P (decl))
1094 && DECL_BY_REFERENCE (decl))
1095 DECL_BY_REFERENCE (new_decl) = 1;
1097 SET_DECL_VALUE_EXPR (new_decl, x);
1098 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1100 *slot = new_decl;
1101 DECL_CHAIN (new_decl) = info->debug_var_chain;
1102 info->debug_var_chain = new_decl;
1104 if (!optimize
1105 && info->context != target_context
1106 && variably_modified_type_p (TREE_TYPE (decl), NULL))
1107 note_nonlocal_vla_type (info, TREE_TYPE (decl));
1109 return new_decl;
1113 /* Callback for walk_gimple_stmt, rewrite all references to VAR
1114 and PARM_DECLs that belong to outer functions.
1116 The rewrite will involve some number of structure accesses back up
1117 the static chain. E.g. for a variable FOO up one nesting level it'll
1118 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
1119 indirections apply to decls for which use_pointer_in_frame is true. */
1121 static tree
1122 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
1124 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1125 struct nesting_info *const info = (struct nesting_info *) wi->info;
1126 tree t = *tp;
1128 *walk_subtrees = 0;
1129 switch (TREE_CODE (t))
1131 case VAR_DECL:
1132 /* Non-automatic variables are never processed. */
1133 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1134 break;
1135 /* FALLTHRU */
1137 case PARM_DECL:
1139 tree x, target_context = decl_function_context (t);
1141 if (info->context == target_context)
1142 break;
1144 wi->changed = true;
1146 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1147 x = get_nonlocal_debug_decl (info, t);
1148 else
1150 struct nesting_info *i = info;
1151 while (i && i->context != target_context)
1152 i = i->outer;
1153 /* If none of the outer contexts is the target context, this means
1154 that the VAR or PARM_DECL is referenced in a wrong context. */
1155 if (!i)
1156 internal_error ("%s from %s referenced in %s",
1157 IDENTIFIER_POINTER (DECL_NAME (t)),
1158 IDENTIFIER_POINTER (DECL_NAME (target_context)),
1159 IDENTIFIER_POINTER (DECL_NAME (info->context)));
1161 x = lookup_field_for_decl (i, t, INSERT);
1162 x = get_frame_field (info, target_context, x, &wi->gsi);
1163 if (use_pointer_in_frame (t))
1165 x = init_tmp_var (info, x, &wi->gsi);
1166 x = build_simple_mem_ref_notrap (x);
1170 if (wi->val_only)
1172 if (wi->is_lhs)
1173 x = save_tmp_var (info, x, &wi->gsi);
1174 else
1175 x = init_tmp_var (info, x, &wi->gsi);
1178 *tp = x;
1180 break;
1182 case LABEL_DECL:
1183 /* We're taking the address of a label from a parent function, but
1184 this is not itself a non-local goto. Mark the label such that it
1185 will not be deleted, much as we would with a label address in
1186 static storage. */
1187 if (decl_function_context (t) != info->context)
1188 FORCED_LABEL (t) = 1;
1189 break;
1191 case ADDR_EXPR:
1193 bool save_val_only = wi->val_only;
1195 wi->val_only = false;
1196 wi->is_lhs = false;
1197 wi->changed = false;
1198 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1199 wi->val_only = true;
1201 if (wi->changed)
1203 tree save_context;
1205 /* If we changed anything, we might no longer be directly
1206 referencing a decl. */
1207 save_context = current_function_decl;
1208 current_function_decl = info->context;
1209 recompute_tree_invariant_for_addr_expr (t);
1210 current_function_decl = save_context;
1212 /* If the callback converted the address argument in a context
1213 where we only accept variables (and min_invariant, presumably),
1214 then compute the address into a temporary. */
1215 if (save_val_only)
1216 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1217 t, &wi->gsi);
1220 break;
1222 case REALPART_EXPR:
1223 case IMAGPART_EXPR:
1224 case COMPONENT_REF:
1225 case ARRAY_REF:
1226 case ARRAY_RANGE_REF:
1227 case BIT_FIELD_REF:
1228 /* Go down this entire nest and just look at the final prefix and
1229 anything that describes the references. Otherwise, we lose track
1230 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1231 wi->val_only = true;
1232 wi->is_lhs = false;
1233 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1235 if (TREE_CODE (t) == COMPONENT_REF)
1236 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1237 NULL);
1238 else if (TREE_CODE (t) == ARRAY_REF
1239 || TREE_CODE (t) == ARRAY_RANGE_REF)
1241 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1242 wi, NULL);
1243 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1244 wi, NULL);
1245 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1246 wi, NULL);
1249 wi->val_only = false;
1250 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1251 break;
1253 case VIEW_CONVERT_EXPR:
1254 /* Just request to look at the subtrees, leaving val_only and lhs
1255 untouched. This might actually be for !val_only + lhs, in which
1256 case we don't want to force a replacement by a temporary. */
1257 *walk_subtrees = 1;
1258 break;
1260 default:
1261 if (!IS_TYPE_OR_DECL_P (t))
1263 *walk_subtrees = 1;
1264 wi->val_only = true;
1265 wi->is_lhs = false;
1267 break;
1270 return NULL_TREE;
1273 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1274 struct walk_stmt_info *);
1276 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1277 and PARM_DECLs that belong to outer functions. */
1279 static bool
1280 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1282 struct nesting_info *const info = (struct nesting_info *) wi->info;
1283 bool need_chain = false, need_stmts = false;
1284 tree clause, decl, *pdecl;
1285 int dummy;
1286 bitmap new_suppress;
1288 new_suppress = BITMAP_GGC_ALLOC ();
1289 bitmap_copy (new_suppress, info->suppress_expansion);
1291 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1293 pdecl = NULL;
1294 switch (OMP_CLAUSE_CODE (clause))
1296 case OMP_CLAUSE_REDUCTION:
1297 case OMP_CLAUSE_IN_REDUCTION:
1298 case OMP_CLAUSE_TASK_REDUCTION:
1299 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1300 need_stmts = true;
1301 if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
1303 pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
1304 if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
1305 pdecl = &TREE_OPERAND (*pdecl, 0);
1306 if (TREE_CODE (*pdecl) == INDIRECT_REF
1307 || TREE_CODE (*pdecl) == ADDR_EXPR)
1308 pdecl = &TREE_OPERAND (*pdecl, 0);
1310 goto do_decl_clause;
1312 case OMP_CLAUSE_LASTPRIVATE:
1313 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1314 need_stmts = true;
1315 goto do_decl_clause;
1317 case OMP_CLAUSE_LINEAR:
1318 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1319 need_stmts = true;
1320 wi->val_only = true;
1321 wi->is_lhs = false;
1322 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1323 &dummy, wi);
1324 goto do_decl_clause;
1326 case OMP_CLAUSE_PRIVATE:
1327 case OMP_CLAUSE_FIRSTPRIVATE:
1328 case OMP_CLAUSE_COPYPRIVATE:
1329 case OMP_CLAUSE_SHARED:
1330 case OMP_CLAUSE_TO_DECLARE:
1331 case OMP_CLAUSE_LINK:
1332 case OMP_CLAUSE_USE_DEVICE_PTR:
1333 case OMP_CLAUSE_USE_DEVICE_ADDR:
1334 case OMP_CLAUSE_IS_DEVICE_PTR:
1335 do_decl_clause:
1336 if (pdecl == NULL)
1337 pdecl = &OMP_CLAUSE_DECL (clause);
1338 decl = *pdecl;
1339 if (VAR_P (decl)
1340 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1341 break;
1342 if (decl_function_context (decl) != info->context)
1344 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1345 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1346 bitmap_set_bit (new_suppress, DECL_UID (decl));
1347 *pdecl = get_nonlocal_debug_decl (info, decl);
1348 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1349 need_chain = true;
1351 break;
1353 case OMP_CLAUSE_SCHEDULE:
1354 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1355 break;
1356 /* FALLTHRU */
1357 case OMP_CLAUSE_FINAL:
1358 case OMP_CLAUSE_IF:
1359 case OMP_CLAUSE_NUM_THREADS:
1360 case OMP_CLAUSE_DEPEND:
1361 case OMP_CLAUSE_DEVICE:
1362 case OMP_CLAUSE_NUM_TEAMS:
1363 case OMP_CLAUSE_THREAD_LIMIT:
1364 case OMP_CLAUSE_SAFELEN:
1365 case OMP_CLAUSE_SIMDLEN:
1366 case OMP_CLAUSE_PRIORITY:
1367 case OMP_CLAUSE_GRAINSIZE:
1368 case OMP_CLAUSE_NUM_TASKS:
1369 case OMP_CLAUSE_HINT:
1370 case OMP_CLAUSE_NUM_GANGS:
1371 case OMP_CLAUSE_NUM_WORKERS:
1372 case OMP_CLAUSE_VECTOR_LENGTH:
1373 case OMP_CLAUSE_GANG:
1374 case OMP_CLAUSE_WORKER:
1375 case OMP_CLAUSE_VECTOR:
1376 case OMP_CLAUSE_ASYNC:
1377 case OMP_CLAUSE_WAIT:
1378 /* Several OpenACC clauses have optional arguments. Check if they
1379 are present. */
1380 if (OMP_CLAUSE_OPERAND (clause, 0))
1382 wi->val_only = true;
1383 wi->is_lhs = false;
1384 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1385 &dummy, wi);
1388 /* The gang clause accepts two arguments. */
1389 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1390 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1392 wi->val_only = true;
1393 wi->is_lhs = false;
1394 convert_nonlocal_reference_op
1395 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1397 break;
1399 case OMP_CLAUSE_DIST_SCHEDULE:
1400 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1402 wi->val_only = true;
1403 wi->is_lhs = false;
1404 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1405 &dummy, wi);
1407 break;
1409 case OMP_CLAUSE_MAP:
1410 case OMP_CLAUSE_TO:
1411 case OMP_CLAUSE_FROM:
1412 if (OMP_CLAUSE_SIZE (clause))
1414 wi->val_only = true;
1415 wi->is_lhs = false;
1416 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1417 &dummy, wi);
1419 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1420 goto do_decl_clause;
1421 wi->val_only = true;
1422 wi->is_lhs = false;
1423 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1424 wi, NULL);
1425 break;
1427 case OMP_CLAUSE_ALIGNED:
1428 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1430 wi->val_only = true;
1431 wi->is_lhs = false;
1432 convert_nonlocal_reference_op
1433 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1435 /* FALLTHRU */
1436 case OMP_CLAUSE_NONTEMPORAL:
1437 /* Like do_decl_clause, but don't add any suppression. */
1438 decl = OMP_CLAUSE_DECL (clause);
1439 if (VAR_P (decl)
1440 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1441 break;
1442 if (decl_function_context (decl) != info->context)
1444 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1445 need_chain = true;
1447 break;
1449 case OMP_CLAUSE_NOWAIT:
1450 case OMP_CLAUSE_ORDERED:
1451 case OMP_CLAUSE_DEFAULT:
1452 case OMP_CLAUSE_COPYIN:
1453 case OMP_CLAUSE_COLLAPSE:
1454 case OMP_CLAUSE_TILE:
1455 case OMP_CLAUSE_UNTIED:
1456 case OMP_CLAUSE_MERGEABLE:
1457 case OMP_CLAUSE_PROC_BIND:
1458 case OMP_CLAUSE_NOGROUP:
1459 case OMP_CLAUSE_THREADS:
1460 case OMP_CLAUSE_SIMD:
1461 case OMP_CLAUSE_DEFAULTMAP:
1462 case OMP_CLAUSE_ORDER:
1463 case OMP_CLAUSE_SEQ:
1464 case OMP_CLAUSE_INDEPENDENT:
1465 case OMP_CLAUSE_AUTO:
1466 case OMP_CLAUSE_IF_PRESENT:
1467 case OMP_CLAUSE_FINALIZE:
1468 case OMP_CLAUSE__CONDTEMP_:
1469 case OMP_CLAUSE__SCANTEMP_:
1470 break;
1472 /* The following clause belongs to the OpenACC cache directive, which
1473 is discarded during gimplification. */
1474 case OMP_CLAUSE__CACHE_:
1475 /* The following clauses are only allowed in the OpenMP declare simd
1476 directive, so not seen here. */
1477 case OMP_CLAUSE_UNIFORM:
1478 case OMP_CLAUSE_INBRANCH:
1479 case OMP_CLAUSE_NOTINBRANCH:
1480 /* The following clauses are only allowed on OpenMP cancel and
1481 cancellation point directives, which at this point have already
1482 been lowered into a function call. */
1483 case OMP_CLAUSE_FOR:
1484 case OMP_CLAUSE_PARALLEL:
1485 case OMP_CLAUSE_SECTIONS:
1486 case OMP_CLAUSE_TASKGROUP:
1487 /* The following clauses are only added during OMP lowering; nested
1488 function decomposition happens before that. */
1489 case OMP_CLAUSE__LOOPTEMP_:
1490 case OMP_CLAUSE__REDUCTEMP_:
1491 case OMP_CLAUSE__SIMDUID_:
1492 case OMP_CLAUSE__SIMT_:
1493 /* Anything else. */
1494 default:
1495 gcc_unreachable ();
1499 info->suppress_expansion = new_suppress;
1501 if (need_stmts)
1502 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1503 switch (OMP_CLAUSE_CODE (clause))
1505 case OMP_CLAUSE_REDUCTION:
1506 case OMP_CLAUSE_IN_REDUCTION:
1507 case OMP_CLAUSE_TASK_REDUCTION:
1508 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1510 tree old_context
1511 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1512 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1513 = info->context;
1514 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1515 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1516 = info->context;
1517 tree save_local_var_chain = info->new_local_var_chain;
1518 info->new_local_var_chain = NULL;
1519 gimple_seq *seq = &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause);
1520 walk_body (convert_nonlocal_reference_stmt,
1521 convert_nonlocal_reference_op, info, seq);
1522 if (info->new_local_var_chain)
1523 declare_vars (info->new_local_var_chain,
1524 gimple_seq_first_stmt (*seq), false);
1525 info->new_local_var_chain = NULL;
1526 seq = &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause);
1527 walk_body (convert_nonlocal_reference_stmt,
1528 convert_nonlocal_reference_op, info, seq);
1529 if (info->new_local_var_chain)
1530 declare_vars (info->new_local_var_chain,
1531 gimple_seq_first_stmt (*seq), false);
1532 info->new_local_var_chain = save_local_var_chain;
1533 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1534 = old_context;
1535 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1536 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1537 = old_context;
1539 break;
1541 case OMP_CLAUSE_LASTPRIVATE:
1543 tree save_local_var_chain = info->new_local_var_chain;
1544 info->new_local_var_chain = NULL;
1545 gimple_seq *seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause);
1546 walk_body (convert_nonlocal_reference_stmt,
1547 convert_nonlocal_reference_op, info, seq);
1548 if (info->new_local_var_chain)
1549 declare_vars (info->new_local_var_chain,
1550 gimple_seq_first_stmt (*seq), false);
1551 info->new_local_var_chain = save_local_var_chain;
1553 break;
1555 case OMP_CLAUSE_LINEAR:
1557 tree save_local_var_chain = info->new_local_var_chain;
1558 info->new_local_var_chain = NULL;
1559 gimple_seq *seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause);
1560 walk_body (convert_nonlocal_reference_stmt,
1561 convert_nonlocal_reference_op, info, seq);
1562 if (info->new_local_var_chain)
1563 declare_vars (info->new_local_var_chain,
1564 gimple_seq_first_stmt (*seq), false);
1565 info->new_local_var_chain = save_local_var_chain;
1567 break;
1569 default:
1570 break;
1573 return need_chain;
1576 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1578 static void
1579 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1581 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1582 type = TREE_TYPE (type);
1584 if (TYPE_NAME (type)
1585 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1586 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1587 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1589 while (POINTER_TYPE_P (type)
1590 || TREE_CODE (type) == VECTOR_TYPE
1591 || TREE_CODE (type) == FUNCTION_TYPE
1592 || TREE_CODE (type) == METHOD_TYPE)
1593 type = TREE_TYPE (type);
1595 if (TREE_CODE (type) == ARRAY_TYPE)
1597 tree domain, t;
1599 note_nonlocal_vla_type (info, TREE_TYPE (type));
1600 domain = TYPE_DOMAIN (type);
1601 if (domain)
1603 t = TYPE_MIN_VALUE (domain);
1604 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1605 && decl_function_context (t) != info->context)
1606 get_nonlocal_debug_decl (info, t);
1607 t = TYPE_MAX_VALUE (domain);
1608 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1609 && decl_function_context (t) != info->context)
1610 get_nonlocal_debug_decl (info, t);
1615 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1616 PARM_DECLs that belong to outer functions. This handles statements
1617 that are not handled via the standard recursion done in
1618 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1619 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1620 operands of STMT have been handled by this function. */
1622 static tree
1623 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1624 struct walk_stmt_info *wi)
1626 struct nesting_info *info = (struct nesting_info *) wi->info;
1627 tree save_local_var_chain;
1628 bitmap save_suppress;
1629 gimple *stmt = gsi_stmt (*gsi);
1631 switch (gimple_code (stmt))
1633 case GIMPLE_GOTO:
1634 /* Don't walk non-local gotos for now. */
1635 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1637 wi->val_only = true;
1638 wi->is_lhs = false;
1639 *handled_ops_p = false;
1640 return NULL_TREE;
1642 break;
1644 case GIMPLE_OMP_TEAMS:
1645 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
1647 save_suppress = info->suppress_expansion;
1648 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt),
1649 wi);
1650 walk_body (convert_nonlocal_reference_stmt,
1651 convert_nonlocal_reference_op, info,
1652 gimple_omp_body_ptr (stmt));
1653 info->suppress_expansion = save_suppress;
1654 break;
1656 /* FALLTHRU */
1658 case GIMPLE_OMP_PARALLEL:
1659 case GIMPLE_OMP_TASK:
1660 save_suppress = info->suppress_expansion;
1661 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1662 wi))
1664 tree c, decl;
1665 decl = get_chain_decl (info);
1666 c = build_omp_clause (gimple_location (stmt),
1667 OMP_CLAUSE_FIRSTPRIVATE);
1668 OMP_CLAUSE_DECL (c) = decl;
1669 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1670 gimple_omp_taskreg_set_clauses (stmt, c);
1673 save_local_var_chain = info->new_local_var_chain;
1674 info->new_local_var_chain = NULL;
1676 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1677 info, gimple_omp_body_ptr (stmt));
1679 if (info->new_local_var_chain)
1680 declare_vars (info->new_local_var_chain,
1681 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1682 false);
1683 info->new_local_var_chain = save_local_var_chain;
1684 info->suppress_expansion = save_suppress;
1685 break;
1687 case GIMPLE_OMP_FOR:
1688 save_suppress = info->suppress_expansion;
1689 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1690 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1691 convert_nonlocal_reference_stmt,
1692 convert_nonlocal_reference_op, info);
1693 walk_body (convert_nonlocal_reference_stmt,
1694 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1695 info->suppress_expansion = save_suppress;
1696 break;
1698 case GIMPLE_OMP_SECTIONS:
1699 save_suppress = info->suppress_expansion;
1700 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1701 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1702 info, gimple_omp_body_ptr (stmt));
1703 info->suppress_expansion = save_suppress;
1704 break;
1706 case GIMPLE_OMP_SINGLE:
1707 save_suppress = info->suppress_expansion;
1708 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1709 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1710 info, gimple_omp_body_ptr (stmt));
1711 info->suppress_expansion = save_suppress;
1712 break;
1714 case GIMPLE_OMP_TASKGROUP:
1715 save_suppress = info->suppress_expansion;
1716 convert_nonlocal_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
1717 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1718 info, gimple_omp_body_ptr (stmt));
1719 info->suppress_expansion = save_suppress;
1720 break;
1722 case GIMPLE_OMP_TARGET:
1723 if (!is_gimple_omp_offloaded (stmt))
1725 save_suppress = info->suppress_expansion;
1726 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1727 wi);
1728 info->suppress_expansion = save_suppress;
1729 walk_body (convert_nonlocal_reference_stmt,
1730 convert_nonlocal_reference_op, info,
1731 gimple_omp_body_ptr (stmt));
1732 break;
1734 save_suppress = info->suppress_expansion;
1735 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1736 wi))
1738 tree c, decl;
1739 decl = get_chain_decl (info);
1740 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1741 OMP_CLAUSE_DECL (c) = decl;
1742 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1743 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1744 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1745 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1748 save_local_var_chain = info->new_local_var_chain;
1749 info->new_local_var_chain = NULL;
1751 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1752 info, gimple_omp_body_ptr (stmt));
1754 if (info->new_local_var_chain)
1755 declare_vars (info->new_local_var_chain,
1756 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1757 false);
1758 info->new_local_var_chain = save_local_var_chain;
1759 info->suppress_expansion = save_suppress;
1760 break;
1762 case GIMPLE_OMP_SECTION:
1763 case GIMPLE_OMP_MASTER:
1764 case GIMPLE_OMP_ORDERED:
1765 case GIMPLE_OMP_SCAN:
1766 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1767 info, gimple_omp_body_ptr (stmt));
1768 break;
1770 case GIMPLE_BIND:
1772 gbind *bind_stmt = as_a <gbind *> (stmt);
1774 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1775 if (TREE_CODE (var) == NAMELIST_DECL)
1777 /* Adjust decls mentioned in NAMELIST_DECL. */
1778 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1779 tree decl;
1780 unsigned int i;
1782 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1784 if (VAR_P (decl)
1785 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1786 continue;
1787 if (decl_function_context (decl) != info->context)
1788 CONSTRUCTOR_ELT (decls, i)->value
1789 = get_nonlocal_debug_decl (info, decl);
1793 *handled_ops_p = false;
1794 return NULL_TREE;
1796 case GIMPLE_COND:
1797 wi->val_only = true;
1798 wi->is_lhs = false;
1799 *handled_ops_p = false;
1800 return NULL_TREE;
1802 case GIMPLE_ASSIGN:
1803 if (gimple_clobber_p (stmt))
1805 tree lhs = gimple_assign_lhs (stmt);
1806 if (DECL_P (lhs)
1807 && !(TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
1808 && decl_function_context (lhs) != info->context)
1810 gsi_replace (gsi, gimple_build_nop (), true);
1811 break;
1814 *handled_ops_p = false;
1815 return NULL_TREE;
1817 default:
1818 /* For every other statement that we are not interested in
1819 handling here, let the walker traverse the operands. */
1820 *handled_ops_p = false;
1821 return NULL_TREE;
1824 /* We have handled all of STMT operands, no need to traverse the operands. */
1825 *handled_ops_p = true;
1826 return NULL_TREE;
1830 /* A subroutine of convert_local_reference. Create a local variable
1831 in the parent function with DECL_VALUE_EXPR set to reference the
1832 field in FRAME. This is used both for debug info and in OMP
1833 lowering. */
1835 static tree
1836 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1838 tree x, new_decl;
1840 tree *slot = &info->var_map->get_or_insert (decl);
1841 if (*slot)
1842 return *slot;
1844 /* Make sure frame_decl gets created. */
1845 (void) get_frame_type (info);
1846 x = info->frame_decl;
1847 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1849 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1850 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1851 DECL_CONTEXT (new_decl) = info->context;
1852 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1853 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1854 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1855 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1856 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1857 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1858 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1859 if ((TREE_CODE (decl) == PARM_DECL
1860 || TREE_CODE (decl) == RESULT_DECL
1861 || VAR_P (decl))
1862 && DECL_BY_REFERENCE (decl))
1863 DECL_BY_REFERENCE (new_decl) = 1;
1865 SET_DECL_VALUE_EXPR (new_decl, x);
1866 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1867 *slot = new_decl;
1869 DECL_CHAIN (new_decl) = info->debug_var_chain;
1870 info->debug_var_chain = new_decl;
1872 /* Do not emit debug info twice. */
1873 DECL_IGNORED_P (decl) = 1;
1875 return new_decl;
1879 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1880 and PARM_DECLs that were referenced by inner nested functions.
1881 The rewrite will be a structure reference to the local frame variable. */
1883 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1885 static tree
1886 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1888 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1889 struct nesting_info *const info = (struct nesting_info *) wi->info;
1890 tree t = *tp, field, x;
1891 bool save_val_only;
1893 *walk_subtrees = 0;
1894 switch (TREE_CODE (t))
1896 case VAR_DECL:
1897 /* Non-automatic variables are never processed. */
1898 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1899 break;
1900 /* FALLTHRU */
1902 case PARM_DECL:
1903 if (t != info->frame_decl && decl_function_context (t) == info->context)
1905 /* If we copied a pointer to the frame, then the original decl
1906 is used unchanged in the parent function. */
1907 if (use_pointer_in_frame (t))
1908 break;
1910 /* No need to transform anything if no child references the
1911 variable. */
1912 field = lookup_field_for_decl (info, t, NO_INSERT);
1913 if (!field)
1914 break;
1915 wi->changed = true;
1917 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1918 x = get_local_debug_decl (info, t, field);
1919 else
1920 x = get_frame_field (info, info->context, field, &wi->gsi);
1922 if (wi->val_only)
1924 if (wi->is_lhs)
1925 x = save_tmp_var (info, x, &wi->gsi);
1926 else
1927 x = init_tmp_var (info, x, &wi->gsi);
1930 *tp = x;
1932 break;
1934 case ADDR_EXPR:
1935 save_val_only = wi->val_only;
1936 wi->val_only = false;
1937 wi->is_lhs = false;
1938 wi->changed = false;
1939 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1940 wi->val_only = save_val_only;
1942 /* If we converted anything ... */
1943 if (wi->changed)
1945 tree save_context;
1947 /* Then the frame decl is now addressable. */
1948 TREE_ADDRESSABLE (info->frame_decl) = 1;
1950 save_context = current_function_decl;
1951 current_function_decl = info->context;
1952 recompute_tree_invariant_for_addr_expr (t);
1953 current_function_decl = save_context;
1955 /* If we are in a context where we only accept values, then
1956 compute the address into a temporary. */
1957 if (save_val_only)
1958 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1959 t, &wi->gsi);
1961 break;
1963 case REALPART_EXPR:
1964 case IMAGPART_EXPR:
1965 case COMPONENT_REF:
1966 case ARRAY_REF:
1967 case ARRAY_RANGE_REF:
1968 case BIT_FIELD_REF:
1969 /* Go down this entire nest and just look at the final prefix and
1970 anything that describes the references. Otherwise, we lose track
1971 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1972 save_val_only = wi->val_only;
1973 wi->val_only = true;
1974 wi->is_lhs = false;
1975 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1977 if (TREE_CODE (t) == COMPONENT_REF)
1978 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1979 NULL);
1980 else if (TREE_CODE (t) == ARRAY_REF
1981 || TREE_CODE (t) == ARRAY_RANGE_REF)
1983 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1984 NULL);
1985 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1986 NULL);
1987 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1988 NULL);
1991 wi->val_only = false;
1992 walk_tree (tp, convert_local_reference_op, wi, NULL);
1993 wi->val_only = save_val_only;
1994 break;
1996 case MEM_REF:
1997 save_val_only = wi->val_only;
1998 wi->val_only = true;
1999 wi->is_lhs = false;
2000 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
2001 wi, NULL);
2002 /* We need to re-fold the MEM_REF as component references as
2003 part of a ADDR_EXPR address are not allowed. But we cannot
2004 fold here, as the chain record type is not yet finalized. */
2005 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
2006 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
2007 info->mem_refs->add (tp);
2008 wi->val_only = save_val_only;
2009 break;
2011 case VIEW_CONVERT_EXPR:
2012 /* Just request to look at the subtrees, leaving val_only and lhs
2013 untouched. This might actually be for !val_only + lhs, in which
2014 case we don't want to force a replacement by a temporary. */
2015 *walk_subtrees = 1;
2016 break;
2018 default:
2019 if (!IS_TYPE_OR_DECL_P (t))
2021 *walk_subtrees = 1;
2022 wi->val_only = true;
2023 wi->is_lhs = false;
2025 break;
2028 return NULL_TREE;
2031 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
2032 struct walk_stmt_info *);
2034 /* Helper for convert_local_reference. Convert all the references in
2035 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
2037 static bool
2038 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
2040 struct nesting_info *const info = (struct nesting_info *) wi->info;
2041 bool need_frame = false, need_stmts = false;
2042 tree clause, decl, *pdecl;
2043 int dummy;
2044 bitmap new_suppress;
2046 new_suppress = BITMAP_GGC_ALLOC ();
2047 bitmap_copy (new_suppress, info->suppress_expansion);
2049 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2051 pdecl = NULL;
2052 switch (OMP_CLAUSE_CODE (clause))
2054 case OMP_CLAUSE_REDUCTION:
2055 case OMP_CLAUSE_IN_REDUCTION:
2056 case OMP_CLAUSE_TASK_REDUCTION:
2057 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2058 need_stmts = true;
2059 if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
2061 pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
2062 if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
2063 pdecl = &TREE_OPERAND (*pdecl, 0);
2064 if (TREE_CODE (*pdecl) == INDIRECT_REF
2065 || TREE_CODE (*pdecl) == ADDR_EXPR)
2066 pdecl = &TREE_OPERAND (*pdecl, 0);
2068 goto do_decl_clause;
2070 case OMP_CLAUSE_LASTPRIVATE:
2071 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
2072 need_stmts = true;
2073 goto do_decl_clause;
2075 case OMP_CLAUSE_LINEAR:
2076 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
2077 need_stmts = true;
2078 wi->val_only = true;
2079 wi->is_lhs = false;
2080 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
2081 wi);
2082 goto do_decl_clause;
2084 case OMP_CLAUSE_PRIVATE:
2085 case OMP_CLAUSE_FIRSTPRIVATE:
2086 case OMP_CLAUSE_COPYPRIVATE:
2087 case OMP_CLAUSE_SHARED:
2088 case OMP_CLAUSE_TO_DECLARE:
2089 case OMP_CLAUSE_LINK:
2090 case OMP_CLAUSE_USE_DEVICE_PTR:
2091 case OMP_CLAUSE_USE_DEVICE_ADDR:
2092 case OMP_CLAUSE_IS_DEVICE_PTR:
2093 do_decl_clause:
2094 if (pdecl == NULL)
2095 pdecl = &OMP_CLAUSE_DECL (clause);
2096 decl = *pdecl;
2097 if (VAR_P (decl)
2098 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2099 break;
2100 if (decl_function_context (decl) == info->context
2101 && !use_pointer_in_frame (decl))
2103 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2104 if (field)
2106 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
2107 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
2108 bitmap_set_bit (new_suppress, DECL_UID (decl));
2109 *pdecl = get_local_debug_decl (info, decl, field);
2110 need_frame = true;
2113 break;
2115 case OMP_CLAUSE_SCHEDULE:
2116 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
2117 break;
2118 /* FALLTHRU */
2119 case OMP_CLAUSE_FINAL:
2120 case OMP_CLAUSE_IF:
2121 case OMP_CLAUSE_NUM_THREADS:
2122 case OMP_CLAUSE_DEPEND:
2123 case OMP_CLAUSE_DEVICE:
2124 case OMP_CLAUSE_NUM_TEAMS:
2125 case OMP_CLAUSE_THREAD_LIMIT:
2126 case OMP_CLAUSE_SAFELEN:
2127 case OMP_CLAUSE_SIMDLEN:
2128 case OMP_CLAUSE_PRIORITY:
2129 case OMP_CLAUSE_GRAINSIZE:
2130 case OMP_CLAUSE_NUM_TASKS:
2131 case OMP_CLAUSE_HINT:
2132 case OMP_CLAUSE_NUM_GANGS:
2133 case OMP_CLAUSE_NUM_WORKERS:
2134 case OMP_CLAUSE_VECTOR_LENGTH:
2135 case OMP_CLAUSE_GANG:
2136 case OMP_CLAUSE_WORKER:
2137 case OMP_CLAUSE_VECTOR:
2138 case OMP_CLAUSE_ASYNC:
2139 case OMP_CLAUSE_WAIT:
2140 /* Several OpenACC clauses have optional arguments. Check if they
2141 are present. */
2142 if (OMP_CLAUSE_OPERAND (clause, 0))
2144 wi->val_only = true;
2145 wi->is_lhs = false;
2146 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2147 &dummy, wi);
2150 /* The gang clause accepts two arguments. */
2151 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
2152 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
2154 wi->val_only = true;
2155 wi->is_lhs = false;
2156 convert_nonlocal_reference_op
2157 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
2159 break;
2161 case OMP_CLAUSE_DIST_SCHEDULE:
2162 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
2164 wi->val_only = true;
2165 wi->is_lhs = false;
2166 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2167 &dummy, wi);
2169 break;
2171 case OMP_CLAUSE_MAP:
2172 case OMP_CLAUSE_TO:
2173 case OMP_CLAUSE_FROM:
2174 if (OMP_CLAUSE_SIZE (clause))
2176 wi->val_only = true;
2177 wi->is_lhs = false;
2178 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
2179 &dummy, wi);
2181 if (DECL_P (OMP_CLAUSE_DECL (clause)))
2182 goto do_decl_clause;
2183 wi->val_only = true;
2184 wi->is_lhs = false;
2185 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
2186 wi, NULL);
2187 break;
2189 case OMP_CLAUSE_ALIGNED:
2190 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
2192 wi->val_only = true;
2193 wi->is_lhs = false;
2194 convert_local_reference_op
2195 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
2197 /* FALLTHRU */
2198 case OMP_CLAUSE_NONTEMPORAL:
2199 /* Like do_decl_clause, but don't add any suppression. */
2200 decl = OMP_CLAUSE_DECL (clause);
2201 if (VAR_P (decl)
2202 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2203 break;
2204 if (decl_function_context (decl) == info->context
2205 && !use_pointer_in_frame (decl))
2207 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2208 if (field)
2210 OMP_CLAUSE_DECL (clause)
2211 = get_local_debug_decl (info, decl, field);
2212 need_frame = true;
2215 break;
2217 case OMP_CLAUSE_NOWAIT:
2218 case OMP_CLAUSE_ORDERED:
2219 case OMP_CLAUSE_DEFAULT:
2220 case OMP_CLAUSE_COPYIN:
2221 case OMP_CLAUSE_COLLAPSE:
2222 case OMP_CLAUSE_TILE:
2223 case OMP_CLAUSE_UNTIED:
2224 case OMP_CLAUSE_MERGEABLE:
2225 case OMP_CLAUSE_PROC_BIND:
2226 case OMP_CLAUSE_NOGROUP:
2227 case OMP_CLAUSE_THREADS:
2228 case OMP_CLAUSE_SIMD:
2229 case OMP_CLAUSE_DEFAULTMAP:
2230 case OMP_CLAUSE_ORDER:
2231 case OMP_CLAUSE_SEQ:
2232 case OMP_CLAUSE_INDEPENDENT:
2233 case OMP_CLAUSE_AUTO:
2234 case OMP_CLAUSE_IF_PRESENT:
2235 case OMP_CLAUSE_FINALIZE:
2236 case OMP_CLAUSE__CONDTEMP_:
2237 case OMP_CLAUSE__SCANTEMP_:
2238 break;
2240 /* The following clause belongs to the OpenACC cache directive, which
2241 is discarded during gimplification. */
2242 case OMP_CLAUSE__CACHE_:
2243 /* The following clauses are only allowed in the OpenMP declare simd
2244 directive, so not seen here. */
2245 case OMP_CLAUSE_UNIFORM:
2246 case OMP_CLAUSE_INBRANCH:
2247 case OMP_CLAUSE_NOTINBRANCH:
2248 /* The following clauses are only allowed on OpenMP cancel and
2249 cancellation point directives, which at this point have already
2250 been lowered into a function call. */
2251 case OMP_CLAUSE_FOR:
2252 case OMP_CLAUSE_PARALLEL:
2253 case OMP_CLAUSE_SECTIONS:
2254 case OMP_CLAUSE_TASKGROUP:
2255 /* The following clauses are only added during OMP lowering; nested
2256 function decomposition happens before that. */
2257 case OMP_CLAUSE__LOOPTEMP_:
2258 case OMP_CLAUSE__REDUCTEMP_:
2259 case OMP_CLAUSE__SIMDUID_:
2260 case OMP_CLAUSE__SIMT_:
2261 /* Anything else. */
2262 default:
2263 gcc_unreachable ();
2267 info->suppress_expansion = new_suppress;
2269 if (need_stmts)
2270 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2271 switch (OMP_CLAUSE_CODE (clause))
2273 case OMP_CLAUSE_REDUCTION:
2274 case OMP_CLAUSE_IN_REDUCTION:
2275 case OMP_CLAUSE_TASK_REDUCTION:
2276 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2278 tree old_context
2279 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
2280 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2281 = info->context;
2282 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2283 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2284 = info->context;
2285 walk_body (convert_local_reference_stmt,
2286 convert_local_reference_op, info,
2287 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
2288 walk_body (convert_local_reference_stmt,
2289 convert_local_reference_op, info,
2290 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
2291 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2292 = old_context;
2293 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2294 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2295 = old_context;
2297 break;
2299 case OMP_CLAUSE_LASTPRIVATE:
2300 walk_body (convert_local_reference_stmt,
2301 convert_local_reference_op, info,
2302 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
2303 break;
2305 case OMP_CLAUSE_LINEAR:
2306 walk_body (convert_local_reference_stmt,
2307 convert_local_reference_op, info,
2308 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
2309 break;
2311 default:
2312 break;
2315 return need_frame;
2319 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2320 and PARM_DECLs that were referenced by inner nested functions.
2321 The rewrite will be a structure reference to the local frame variable. */
2323 static tree
2324 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2325 struct walk_stmt_info *wi)
2327 struct nesting_info *info = (struct nesting_info *) wi->info;
2328 tree save_local_var_chain;
2329 bitmap save_suppress;
2330 char save_static_chain_added;
2331 bool frame_decl_added;
2332 gimple *stmt = gsi_stmt (*gsi);
2334 switch (gimple_code (stmt))
2336 case GIMPLE_OMP_TEAMS:
2337 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2339 save_suppress = info->suppress_expansion;
2340 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2341 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2342 info, gimple_omp_body_ptr (stmt));
2343 info->suppress_expansion = save_suppress;
2344 break;
2346 /* FALLTHRU */
2348 case GIMPLE_OMP_PARALLEL:
2349 case GIMPLE_OMP_TASK:
2350 save_suppress = info->suppress_expansion;
2351 frame_decl_added = false;
2352 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
2353 wi))
2355 tree c = build_omp_clause (gimple_location (stmt),
2356 OMP_CLAUSE_SHARED);
2357 (void) get_frame_type (info);
2358 OMP_CLAUSE_DECL (c) = info->frame_decl;
2359 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2360 gimple_omp_taskreg_set_clauses (stmt, c);
2361 info->static_chain_added |= 4;
2362 frame_decl_added = true;
2365 save_local_var_chain = info->new_local_var_chain;
2366 save_static_chain_added = info->static_chain_added;
2367 info->new_local_var_chain = NULL;
2368 info->static_chain_added = 0;
2370 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2371 gimple_omp_body_ptr (stmt));
2373 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2375 tree c = build_omp_clause (gimple_location (stmt),
2376 OMP_CLAUSE_SHARED);
2377 (void) get_frame_type (info);
2378 OMP_CLAUSE_DECL (c) = info->frame_decl;
2379 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2380 info->static_chain_added |= 4;
2381 gimple_omp_taskreg_set_clauses (stmt, c);
2383 if (info->new_local_var_chain)
2384 declare_vars (info->new_local_var_chain,
2385 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2386 info->new_local_var_chain = save_local_var_chain;
2387 info->suppress_expansion = save_suppress;
2388 info->static_chain_added |= save_static_chain_added;
2389 break;
2391 case GIMPLE_OMP_FOR:
2392 save_suppress = info->suppress_expansion;
2393 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2394 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2395 convert_local_reference_stmt,
2396 convert_local_reference_op, info);
2397 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2398 info, gimple_omp_body_ptr (stmt));
2399 info->suppress_expansion = save_suppress;
2400 break;
2402 case GIMPLE_OMP_SECTIONS:
2403 save_suppress = info->suppress_expansion;
2404 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2405 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2406 info, gimple_omp_body_ptr (stmt));
2407 info->suppress_expansion = save_suppress;
2408 break;
2410 case GIMPLE_OMP_SINGLE:
2411 save_suppress = info->suppress_expansion;
2412 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2413 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2414 info, gimple_omp_body_ptr (stmt));
2415 info->suppress_expansion = save_suppress;
2416 break;
2418 case GIMPLE_OMP_TASKGROUP:
2419 save_suppress = info->suppress_expansion;
2420 convert_local_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
2421 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2422 info, gimple_omp_body_ptr (stmt));
2423 info->suppress_expansion = save_suppress;
2424 break;
2426 case GIMPLE_OMP_TARGET:
2427 if (!is_gimple_omp_offloaded (stmt))
2429 save_suppress = info->suppress_expansion;
2430 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2431 info->suppress_expansion = save_suppress;
2432 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2433 info, gimple_omp_body_ptr (stmt));
2434 break;
2436 save_suppress = info->suppress_expansion;
2437 frame_decl_added = false;
2438 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2440 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2441 (void) get_frame_type (info);
2442 OMP_CLAUSE_DECL (c) = info->frame_decl;
2443 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2444 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2445 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2446 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2447 info->static_chain_added |= 4;
2448 frame_decl_added = true;
2451 save_local_var_chain = info->new_local_var_chain;
2452 save_static_chain_added = info->static_chain_added;
2453 info->new_local_var_chain = NULL;
2454 info->static_chain_added = 0;
2456 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2457 gimple_omp_body_ptr (stmt));
2459 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2461 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2462 (void) get_frame_type (info);
2463 OMP_CLAUSE_DECL (c) = info->frame_decl;
2464 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2465 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2466 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2467 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2468 info->static_chain_added |= 4;
2471 if (info->new_local_var_chain)
2472 declare_vars (info->new_local_var_chain,
2473 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2474 info->new_local_var_chain = save_local_var_chain;
2475 info->suppress_expansion = save_suppress;
2476 info->static_chain_added |= save_static_chain_added;
2477 break;
2479 case GIMPLE_OMP_SECTION:
2480 case GIMPLE_OMP_MASTER:
2481 case GIMPLE_OMP_ORDERED:
2482 case GIMPLE_OMP_SCAN:
2483 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2484 info, gimple_omp_body_ptr (stmt));
2485 break;
2487 case GIMPLE_COND:
2488 wi->val_only = true;
2489 wi->is_lhs = false;
2490 *handled_ops_p = false;
2491 return NULL_TREE;
2493 case GIMPLE_ASSIGN:
2494 if (gimple_clobber_p (stmt))
2496 tree lhs = gimple_assign_lhs (stmt);
2497 if (DECL_P (lhs)
2498 && !use_pointer_in_frame (lhs)
2499 && lookup_field_for_decl (info, lhs, NO_INSERT))
2501 gsi_replace (gsi, gimple_build_nop (), true);
2502 break;
2505 *handled_ops_p = false;
2506 return NULL_TREE;
2508 case GIMPLE_BIND:
2509 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2510 var;
2511 var = DECL_CHAIN (var))
2512 if (TREE_CODE (var) == NAMELIST_DECL)
2514 /* Adjust decls mentioned in NAMELIST_DECL. */
2515 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2516 tree decl;
2517 unsigned int i;
2519 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2521 if (VAR_P (decl)
2522 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2523 continue;
2524 if (decl_function_context (decl) == info->context
2525 && !use_pointer_in_frame (decl))
2527 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2528 if (field)
2530 CONSTRUCTOR_ELT (decls, i)->value
2531 = get_local_debug_decl (info, decl, field);
2537 *handled_ops_p = false;
2538 return NULL_TREE;
2540 default:
2541 /* For every other statement that we are not interested in
2542 handling here, let the walker traverse the operands. */
2543 *handled_ops_p = false;
2544 return NULL_TREE;
2547 /* Indicate that we have handled all the operands ourselves. */
2548 *handled_ops_p = true;
2549 return NULL_TREE;
2553 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2554 that reference labels from outer functions. The rewrite will be a
2555 call to __builtin_nonlocal_goto. */
2557 static tree
2558 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2559 struct walk_stmt_info *wi)
2561 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2562 tree label, new_label, target_context, x, field;
2563 gcall *call;
2564 gimple *stmt = gsi_stmt (*gsi);
2566 if (gimple_code (stmt) != GIMPLE_GOTO)
2568 *handled_ops_p = false;
2569 return NULL_TREE;
2572 label = gimple_goto_dest (stmt);
2573 if (TREE_CODE (label) != LABEL_DECL)
2575 *handled_ops_p = false;
2576 return NULL_TREE;
2579 target_context = decl_function_context (label);
2580 if (target_context == info->context)
2582 *handled_ops_p = false;
2583 return NULL_TREE;
2586 for (i = info->outer; target_context != i->context; i = i->outer)
2587 continue;
2589 /* The original user label may also be use for a normal goto, therefore
2590 we must create a new label that will actually receive the abnormal
2591 control transfer. This new label will be marked LABEL_NONLOCAL; this
2592 mark will trigger proper behavior in the cfg, as well as cause the
2593 (hairy target-specific) non-local goto receiver code to be generated
2594 when we expand rtl. Enter this association into var_map so that we
2595 can insert the new label into the IL during a second pass. */
2596 tree *slot = &i->var_map->get_or_insert (label);
2597 if (*slot == NULL)
2599 new_label = create_artificial_label (UNKNOWN_LOCATION);
2600 DECL_NONLOCAL (new_label) = 1;
2601 *slot = new_label;
2603 else
2604 new_label = *slot;
2606 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2607 field = get_nl_goto_field (i);
2608 x = get_frame_field (info, target_context, field, gsi);
2609 x = build_addr (x);
2610 x = gsi_gimplify_val (info, x, gsi);
2611 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2612 2, build_addr (new_label), x);
2613 gsi_replace (gsi, call, false);
2615 /* We have handled all of STMT's operands, no need to keep going. */
2616 *handled_ops_p = true;
2617 return NULL_TREE;
2621 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2622 are referenced via nonlocal goto from a nested function. The rewrite
2623 will involve installing a newly generated DECL_NONLOCAL label, and
2624 (potentially) a branch around the rtl gunk that is assumed to be
2625 attached to such a label. */
2627 static tree
2628 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2629 struct walk_stmt_info *wi)
2631 struct nesting_info *const info = (struct nesting_info *) wi->info;
2632 tree label, new_label;
2633 gimple_stmt_iterator tmp_gsi;
2634 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2636 if (!stmt)
2638 *handled_ops_p = false;
2639 return NULL_TREE;
2642 label = gimple_label_label (stmt);
2644 tree *slot = info->var_map->get (label);
2645 if (!slot)
2647 *handled_ops_p = false;
2648 return NULL_TREE;
2651 /* If there's any possibility that the previous statement falls through,
2652 then we must branch around the new non-local label. */
2653 tmp_gsi = wi->gsi;
2654 gsi_prev (&tmp_gsi);
2655 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2657 gimple *stmt = gimple_build_goto (label);
2658 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2661 new_label = (tree) *slot;
2662 stmt = gimple_build_label (new_label);
2663 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2665 *handled_ops_p = true;
2666 return NULL_TREE;
2670 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2671 of nested functions that require the use of trampolines. The rewrite
2672 will involve a reference a trampoline generated for the occasion. */
2674 static tree
2675 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2677 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2678 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2679 tree t = *tp, decl, target_context, x, builtin;
2680 bool descr;
2681 gcall *call;
2683 *walk_subtrees = 0;
2684 switch (TREE_CODE (t))
2686 case ADDR_EXPR:
2687 /* Build
2688 T.1 = &CHAIN->tramp;
2689 T.2 = __builtin_adjust_trampoline (T.1);
2690 T.3 = (func_type)T.2;
2693 decl = TREE_OPERAND (t, 0);
2694 if (TREE_CODE (decl) != FUNCTION_DECL)
2695 break;
2697 /* Only need to process nested functions. */
2698 target_context = decl_function_context (decl);
2699 if (!target_context)
2700 break;
2702 /* If the nested function doesn't use a static chain, then
2703 it doesn't need a trampoline. */
2704 if (!DECL_STATIC_CHAIN (decl))
2705 break;
2707 /* If we don't want a trampoline, then don't build one. */
2708 if (TREE_NO_TRAMPOLINE (t))
2709 break;
2711 /* Lookup the immediate parent of the callee, as that's where
2712 we need to insert the trampoline. */
2713 for (i = info; i->context != target_context; i = i->outer)
2714 continue;
2716 /* Decide whether to generate a descriptor or a trampoline. */
2717 descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines;
2719 if (descr)
2720 x = lookup_descr_for_decl (i, decl, INSERT);
2721 else
2722 x = lookup_tramp_for_decl (i, decl, INSERT);
2724 /* Compute the address of the field holding the trampoline. */
2725 x = get_frame_field (info, target_context, x, &wi->gsi);
2726 x = build_addr (x);
2727 x = gsi_gimplify_val (info, x, &wi->gsi);
2729 /* Do machine-specific ugliness. Normally this will involve
2730 computing extra alignment, but it can really be anything. */
2731 if (descr)
2732 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR);
2733 else
2734 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2735 call = gimple_build_call (builtin, 1, x);
2736 x = init_tmp_var_with_call (info, &wi->gsi, call);
2738 /* Cast back to the proper function type. */
2739 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2740 x = init_tmp_var (info, x, &wi->gsi);
2742 *tp = x;
2743 break;
2745 default:
2746 if (!IS_TYPE_OR_DECL_P (t))
2747 *walk_subtrees = 1;
2748 break;
2751 return NULL_TREE;
2755 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2756 to addresses of nested functions that require the use of
2757 trampolines. The rewrite will involve a reference a trampoline
2758 generated for the occasion. */
2760 static tree
2761 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2762 struct walk_stmt_info *wi)
2764 struct nesting_info *info = (struct nesting_info *) wi->info;
2765 gimple *stmt = gsi_stmt (*gsi);
2767 switch (gimple_code (stmt))
2769 case GIMPLE_CALL:
2771 /* Only walk call arguments, lest we generate trampolines for
2772 direct calls. */
2773 unsigned long i, nargs = gimple_call_num_args (stmt);
2774 for (i = 0; i < nargs; i++)
2775 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2776 wi, NULL);
2777 break;
2780 case GIMPLE_OMP_TEAMS:
2781 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2783 *handled_ops_p = false;
2784 return NULL_TREE;
2786 goto do_parallel;
2788 case GIMPLE_OMP_TARGET:
2789 if (!is_gimple_omp_offloaded (stmt))
2791 *handled_ops_p = false;
2792 return NULL_TREE;
2794 /* FALLTHRU */
2795 case GIMPLE_OMP_PARALLEL:
2796 case GIMPLE_OMP_TASK:
2797 do_parallel:
2799 tree save_local_var_chain = info->new_local_var_chain;
2800 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2801 info->new_local_var_chain = NULL;
2802 char save_static_chain_added = info->static_chain_added;
2803 info->static_chain_added = 0;
2804 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2805 info, gimple_omp_body_ptr (stmt));
2806 if (info->new_local_var_chain)
2807 declare_vars (info->new_local_var_chain,
2808 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2809 false);
2810 for (int i = 0; i < 2; i++)
2812 tree c, decl;
2813 if ((info->static_chain_added & (1 << i)) == 0)
2814 continue;
2815 decl = i ? get_chain_decl (info) : info->frame_decl;
2816 /* Don't add CHAIN.* or FRAME.* twice. */
2817 for (c = gimple_omp_taskreg_clauses (stmt);
2819 c = OMP_CLAUSE_CHAIN (c))
2820 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2821 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2822 && OMP_CLAUSE_DECL (c) == decl)
2823 break;
2824 if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2826 c = build_omp_clause (gimple_location (stmt),
2827 i ? OMP_CLAUSE_FIRSTPRIVATE
2828 : OMP_CLAUSE_SHARED);
2829 OMP_CLAUSE_DECL (c) = decl;
2830 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2831 gimple_omp_taskreg_set_clauses (stmt, c);
2833 else if (c == NULL)
2835 c = build_omp_clause (gimple_location (stmt),
2836 OMP_CLAUSE_MAP);
2837 OMP_CLAUSE_DECL (c) = decl;
2838 OMP_CLAUSE_SET_MAP_KIND (c,
2839 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2840 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2841 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2842 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2846 info->new_local_var_chain = save_local_var_chain;
2847 info->static_chain_added |= save_static_chain_added;
2849 break;
2851 default:
2852 *handled_ops_p = false;
2853 return NULL_TREE;
2856 *handled_ops_p = true;
2857 return NULL_TREE;
2862 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2863 that reference nested functions to make sure that the static chain
2864 is set up properly for the call. */
2866 static tree
2867 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2868 struct walk_stmt_info *wi)
2870 struct nesting_info *const info = (struct nesting_info *) wi->info;
2871 tree decl, target_context;
2872 char save_static_chain_added;
2873 int i;
2874 gimple *stmt = gsi_stmt (*gsi);
2876 switch (gimple_code (stmt))
2878 case GIMPLE_CALL:
2879 if (gimple_call_chain (stmt))
2880 break;
2881 decl = gimple_call_fndecl (stmt);
2882 if (!decl)
2883 break;
2884 target_context = decl_function_context (decl);
2885 if (target_context && DECL_STATIC_CHAIN (decl))
2887 struct nesting_info *i = info;
2888 while (i && i->context != target_context)
2889 i = i->outer;
2890 /* If none of the outer contexts is the target context, this means
2891 that the function is called in a wrong context. */
2892 if (!i)
2893 internal_error ("%s from %s called in %s",
2894 IDENTIFIER_POINTER (DECL_NAME (decl)),
2895 IDENTIFIER_POINTER (DECL_NAME (target_context)),
2896 IDENTIFIER_POINTER (DECL_NAME (info->context)));
2898 gimple_call_set_chain (as_a <gcall *> (stmt),
2899 get_static_chain (info, target_context,
2900 &wi->gsi));
2901 info->static_chain_added |= (1 << (info->context != target_context));
2903 break;
2905 case GIMPLE_OMP_TEAMS:
2906 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2908 walk_body (convert_gimple_call, NULL, info,
2909 gimple_omp_body_ptr (stmt));
2910 break;
2912 /* FALLTHRU */
2914 case GIMPLE_OMP_PARALLEL:
2915 case GIMPLE_OMP_TASK:
2916 save_static_chain_added = info->static_chain_added;
2917 info->static_chain_added = 0;
2918 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2919 for (i = 0; i < 2; i++)
2921 tree c, decl;
2922 if ((info->static_chain_added & (1 << i)) == 0)
2923 continue;
2924 decl = i ? get_chain_decl (info) : info->frame_decl;
2925 /* Don't add CHAIN.* or FRAME.* twice. */
2926 for (c = gimple_omp_taskreg_clauses (stmt);
2928 c = OMP_CLAUSE_CHAIN (c))
2929 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2930 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2931 && OMP_CLAUSE_DECL (c) == decl)
2932 break;
2933 if (c == NULL)
2935 c = build_omp_clause (gimple_location (stmt),
2936 i ? OMP_CLAUSE_FIRSTPRIVATE
2937 : OMP_CLAUSE_SHARED);
2938 OMP_CLAUSE_DECL (c) = decl;
2939 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2940 gimple_omp_taskreg_set_clauses (stmt, c);
2943 info->static_chain_added |= save_static_chain_added;
2944 break;
2946 case GIMPLE_OMP_TARGET:
2947 if (!is_gimple_omp_offloaded (stmt))
2949 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2950 break;
2952 save_static_chain_added = info->static_chain_added;
2953 info->static_chain_added = 0;
2954 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2955 for (i = 0; i < 2; i++)
2957 tree c, decl;
2958 if ((info->static_chain_added & (1 << i)) == 0)
2959 continue;
2960 decl = i ? get_chain_decl (info) : info->frame_decl;
2961 /* Don't add CHAIN.* or FRAME.* twice. */
2962 for (c = gimple_omp_target_clauses (stmt);
2964 c = OMP_CLAUSE_CHAIN (c))
2965 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2966 && OMP_CLAUSE_DECL (c) == decl)
2967 break;
2968 if (c == NULL)
2970 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2971 OMP_CLAUSE_DECL (c) = decl;
2972 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2973 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2974 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2975 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2979 info->static_chain_added |= save_static_chain_added;
2980 break;
2982 case GIMPLE_OMP_FOR:
2983 walk_body (convert_gimple_call, NULL, info,
2984 gimple_omp_for_pre_body_ptr (stmt));
2985 /* FALLTHRU */
2986 case GIMPLE_OMP_SECTIONS:
2987 case GIMPLE_OMP_SECTION:
2988 case GIMPLE_OMP_SINGLE:
2989 case GIMPLE_OMP_MASTER:
2990 case GIMPLE_OMP_TASKGROUP:
2991 case GIMPLE_OMP_ORDERED:
2992 case GIMPLE_OMP_SCAN:
2993 case GIMPLE_OMP_CRITICAL:
2994 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2995 break;
2997 default:
2998 /* Keep looking for other operands. */
2999 *handled_ops_p = false;
3000 return NULL_TREE;
3003 *handled_ops_p = true;
3004 return NULL_TREE;
3007 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
3008 call expressions. At the same time, determine if a nested function
3009 actually uses its static chain; if not, remember that. */
3011 static void
3012 convert_all_function_calls (struct nesting_info *root)
3014 unsigned int chain_count = 0, old_chain_count, iter_count;
3015 struct nesting_info *n;
3017 /* First, optimistically clear static_chain for all decls that haven't
3018 used the static chain already for variable access. But always create
3019 it if not optimizing. This makes it possible to reconstruct the static
3020 nesting tree at run time and thus to resolve up-level references from
3021 within the debugger. */
3022 FOR_EACH_NEST_INFO (n, root)
3024 if (n->thunk_p)
3025 continue;
3026 tree decl = n->context;
3027 if (!optimize)
3029 if (n->inner)
3030 (void) get_frame_type (n);
3031 if (n->outer)
3032 (void) get_chain_decl (n);
3034 else if (!n->outer || (!n->chain_decl && !n->chain_field))
3036 DECL_STATIC_CHAIN (decl) = 0;
3037 if (dump_file && (dump_flags & TDF_DETAILS))
3038 fprintf (dump_file, "Guessing no static-chain for %s\n",
3039 lang_hooks.decl_printable_name (decl, 2));
3041 else
3042 DECL_STATIC_CHAIN (decl) = 1;
3043 chain_count += DECL_STATIC_CHAIN (decl);
3046 FOR_EACH_NEST_INFO (n, root)
3047 if (n->thunk_p)
3049 tree decl = n->context;
3050 tree alias = cgraph_node::get (decl)->thunk.alias;
3051 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
3054 /* Walk the functions and perform transformations. Note that these
3055 transformations can induce new uses of the static chain, which in turn
3056 require re-examining all users of the decl. */
3057 /* ??? It would make sense to try to use the call graph to speed this up,
3058 but the call graph hasn't really been built yet. Even if it did, we
3059 would still need to iterate in this loop since address-of references
3060 wouldn't show up in the callgraph anyway. */
3061 iter_count = 0;
3064 old_chain_count = chain_count;
3065 chain_count = 0;
3066 iter_count++;
3068 if (dump_file && (dump_flags & TDF_DETAILS))
3069 fputc ('\n', dump_file);
3071 FOR_EACH_NEST_INFO (n, root)
3073 if (n->thunk_p)
3074 continue;
3075 tree decl = n->context;
3076 walk_function (convert_tramp_reference_stmt,
3077 convert_tramp_reference_op, n);
3078 walk_function (convert_gimple_call, NULL, n);
3079 chain_count += DECL_STATIC_CHAIN (decl);
3082 FOR_EACH_NEST_INFO (n, root)
3083 if (n->thunk_p)
3085 tree decl = n->context;
3086 tree alias = cgraph_node::get (decl)->thunk.alias;
3087 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
3090 while (chain_count != old_chain_count);
3092 if (dump_file && (dump_flags & TDF_DETAILS))
3093 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
3094 iter_count);
3097 struct nesting_copy_body_data
3099 copy_body_data cb;
3100 struct nesting_info *root;
3103 /* A helper subroutine for debug_var_chain type remapping. */
3105 static tree
3106 nesting_copy_decl (tree decl, copy_body_data *id)
3108 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
3109 tree *slot = nid->root->var_map->get (decl);
3111 if (slot)
3112 return (tree) *slot;
3114 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
3116 tree new_decl = copy_decl_no_change (decl, id);
3117 DECL_ORIGINAL_TYPE (new_decl)
3118 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
3119 return new_decl;
3122 if (VAR_P (decl)
3123 || TREE_CODE (decl) == PARM_DECL
3124 || TREE_CODE (decl) == RESULT_DECL)
3125 return decl;
3127 return copy_decl_no_change (decl, id);
3130 /* A helper function for remap_vla_decls. See if *TP contains
3131 some remapped variables. */
3133 static tree
3134 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
3136 struct nesting_info *root = (struct nesting_info *) data;
3137 tree t = *tp;
3139 if (DECL_P (t))
3141 *walk_subtrees = 0;
3142 tree *slot = root->var_map->get (t);
3144 if (slot)
3145 return *slot;
3147 return NULL;
3150 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
3151 involved. */
3153 static void
3154 remap_vla_decls (tree block, struct nesting_info *root)
3156 tree var, subblock, val, type;
3157 struct nesting_copy_body_data id;
3159 for (subblock = BLOCK_SUBBLOCKS (block);
3160 subblock;
3161 subblock = BLOCK_CHAIN (subblock))
3162 remap_vla_decls (subblock, root);
3164 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3165 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3167 val = DECL_VALUE_EXPR (var);
3168 type = TREE_TYPE (var);
3170 if (!(TREE_CODE (val) == INDIRECT_REF
3171 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3172 && variably_modified_type_p (type, NULL)))
3173 continue;
3175 if (root->var_map->get (TREE_OPERAND (val, 0))
3176 || walk_tree (&type, contains_remapped_vars, root, NULL))
3177 break;
3180 if (var == NULL_TREE)
3181 return;
3183 memset (&id, 0, sizeof (id));
3184 id.cb.copy_decl = nesting_copy_decl;
3185 id.cb.decl_map = new hash_map<tree, tree>;
3186 id.root = root;
3188 for (; var; var = DECL_CHAIN (var))
3189 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3191 struct nesting_info *i;
3192 tree newt, context;
3194 val = DECL_VALUE_EXPR (var);
3195 type = TREE_TYPE (var);
3197 if (!(TREE_CODE (val) == INDIRECT_REF
3198 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3199 && variably_modified_type_p (type, NULL)))
3200 continue;
3202 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
3203 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
3204 continue;
3206 context = decl_function_context (var);
3207 for (i = root; i; i = i->outer)
3208 if (i->context == context)
3209 break;
3211 if (i == NULL)
3212 continue;
3214 /* Fully expand value expressions. This avoids having debug variables
3215 only referenced from them and that can be swept during GC. */
3216 if (slot)
3218 tree t = (tree) *slot;
3219 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
3220 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
3223 id.cb.src_fn = i->context;
3224 id.cb.dst_fn = i->context;
3225 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3227 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
3228 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3230 newt = TREE_TYPE (newt);
3231 type = TREE_TYPE (type);
3233 if (TYPE_NAME (newt)
3234 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3235 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3236 && newt != type
3237 && TYPE_NAME (newt) == TYPE_NAME (type))
3238 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3240 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
3241 if (val != DECL_VALUE_EXPR (var))
3242 SET_DECL_VALUE_EXPR (var, val);
3245 delete id.cb.decl_map;
3248 /* Fixup VLA decls in BLOCK and subblocks if remapped variables are
3249 involved. */
3251 static void
3252 fixup_vla_decls (tree block)
3254 for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3255 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3257 tree val = DECL_VALUE_EXPR (var);
3259 if (!(TREE_CODE (val) == INDIRECT_REF
3260 && VAR_P (TREE_OPERAND (val, 0))
3261 && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val, 0))))
3262 continue;
3264 /* Fully expand value expressions. This avoids having debug variables
3265 only referenced from them and that can be swept during GC. */
3266 val = build1 (INDIRECT_REF, TREE_TYPE (val),
3267 DECL_VALUE_EXPR (TREE_OPERAND (val, 0)));
3268 SET_DECL_VALUE_EXPR (var, val);
3271 for (tree sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3272 fixup_vla_decls (sub);
3275 /* Fold the MEM_REF *E. */
3276 bool
3277 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
3279 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
3280 *ref_p = fold (*ref_p);
3281 return true;
3284 /* Given DECL, a nested function, build an initialization call for FIELD,
3285 the trampoline or descriptor for DECL, using FUNC as the function. */
3287 static gcall *
3288 build_init_call_stmt (struct nesting_info *info, tree decl, tree field,
3289 tree func)
3291 tree arg1, arg2, arg3, x;
3293 gcc_assert (DECL_STATIC_CHAIN (decl));
3294 arg3 = build_addr (info->frame_decl);
3296 arg2 = build_addr (decl);
3298 x = build3 (COMPONENT_REF, TREE_TYPE (field),
3299 info->frame_decl, field, NULL_TREE);
3300 arg1 = build_addr (x);
3302 return gimple_build_call (func, 3, arg1, arg2, arg3);
3305 /* Do "everything else" to clean up or complete state collected by the various
3306 walking passes -- create a field to hold the frame base address, lay out the
3307 types and decls, generate code to initialize the frame decl, store critical
3308 expressions in the struct function for rtl to find. */
3310 static void
3311 finalize_nesting_tree_1 (struct nesting_info *root)
3313 gimple_seq stmt_list = NULL;
3314 gimple *stmt;
3315 tree context = root->context;
3316 struct function *sf;
3318 if (root->thunk_p)
3319 return;
3321 /* If we created a non-local frame type or decl, we need to lay them
3322 out at this time. */
3323 if (root->frame_type)
3325 /* Debugging information needs to compute the frame base address of the
3326 parent frame out of the static chain from the nested frame.
3328 The static chain is the address of the FRAME record, so one could
3329 imagine it would be possible to compute the frame base address just
3330 adding a constant offset to this address. Unfortunately, this is not
3331 possible: if the FRAME object has alignment constraints that are
3332 stronger than the stack, then the offset between the frame base and
3333 the FRAME object will be dynamic.
3335 What we do instead is to append a field to the FRAME object that holds
3336 the frame base address: then debug info just has to fetch this
3337 field. */
3339 /* Debugging information will refer to the CFA as the frame base
3340 address: we will do the same here. */
3341 const tree frame_addr_fndecl
3342 = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
3344 /* Create a field in the FRAME record to hold the frame base address for
3345 this stack frame. Since it will be used only by the debugger, put it
3346 at the end of the record in order not to shift all other offsets. */
3347 tree fb_decl = make_node (FIELD_DECL);
3349 DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
3350 TREE_TYPE (fb_decl) = ptr_type_node;
3351 TREE_ADDRESSABLE (fb_decl) = 1;
3352 DECL_CONTEXT (fb_decl) = root->frame_type;
3353 TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
3354 fb_decl);
3356 /* In some cases the frame type will trigger the -Wpadded warning.
3357 This is not helpful; suppress it. */
3358 int save_warn_padded = warn_padded;
3359 warn_padded = 0;
3360 layout_type (root->frame_type);
3361 warn_padded = save_warn_padded;
3362 layout_decl (root->frame_decl, 0);
3364 /* Initialize the frame base address field. If the builtin we need is
3365 not available, set it to NULL so that debugging information does not
3366 reference junk. */
3367 tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
3368 root->frame_decl, fb_decl, NULL_TREE);
3369 tree fb_tmp;
3371 if (frame_addr_fndecl != NULL_TREE)
3373 gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
3374 integer_zero_node);
3375 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3377 fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
3379 else
3380 fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
3381 gimple_seq_add_stmt (&stmt_list,
3382 gimple_build_assign (fb_ref, fb_tmp));
3384 declare_vars (root->frame_decl,
3385 gimple_seq_first_stmt (gimple_body (context)), true);
3388 /* If any parameters were referenced non-locally, then we need to insert
3389 a copy or a pointer. */
3390 if (root->any_parm_remapped)
3392 tree p;
3393 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
3395 tree field, x, y;
3397 field = lookup_field_for_decl (root, p, NO_INSERT);
3398 if (!field)
3399 continue;
3401 if (use_pointer_in_frame (p))
3402 x = build_addr (p);
3403 else
3404 x = p;
3406 /* If the assignment is from a non-register the stmt is
3407 not valid gimple. Make it so by using a temporary instead. */
3408 if (!is_gimple_reg (x)
3409 && is_gimple_reg_type (TREE_TYPE (x)))
3411 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3412 x = init_tmp_var (root, x, &gsi);
3415 y = build3 (COMPONENT_REF, TREE_TYPE (field),
3416 root->frame_decl, field, NULL_TREE);
3417 stmt = gimple_build_assign (y, x);
3418 gimple_seq_add_stmt (&stmt_list, stmt);
3422 /* If a chain_field was created, then it needs to be initialized
3423 from chain_decl. */
3424 if (root->chain_field)
3426 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
3427 root->frame_decl, root->chain_field, NULL_TREE);
3428 stmt = gimple_build_assign (x, get_chain_decl (root));
3429 gimple_seq_add_stmt (&stmt_list, stmt);
3432 /* If trampolines were created, then we need to initialize them. */
3433 if (root->any_tramp_created)
3435 struct nesting_info *i;
3436 for (i = root->inner; i ; i = i->next)
3438 tree field, x;
3440 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
3441 if (!field)
3442 continue;
3444 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
3445 stmt = build_init_call_stmt (root, i->context, field, x);
3446 gimple_seq_add_stmt (&stmt_list, stmt);
3450 /* If descriptors were created, then we need to initialize them. */
3451 if (root->any_descr_created)
3453 struct nesting_info *i;
3454 for (i = root->inner; i ; i = i->next)
3456 tree field, x;
3458 field = lookup_descr_for_decl (root, i->context, NO_INSERT);
3459 if (!field)
3460 continue;
3462 x = builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR);
3463 stmt = build_init_call_stmt (root, i->context, field, x);
3464 gimple_seq_add_stmt (&stmt_list, stmt);
3468 /* If we created initialization statements, insert them. */
3469 if (stmt_list)
3471 gbind *bind;
3472 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3473 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3474 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3475 gimple_bind_set_body (bind, stmt_list);
3478 /* If a chain_decl was created, then it needs to be registered with
3479 struct function so that it gets initialized from the static chain
3480 register at the beginning of the function. */
3481 sf = DECL_STRUCT_FUNCTION (root->context);
3482 sf->static_chain_decl = root->chain_decl;
3484 /* Similarly for the non-local goto save area. */
3485 if (root->nl_goto_field)
3487 sf->nonlocal_goto_save_area
3488 = get_frame_field (root, context, root->nl_goto_field, NULL);
3489 sf->has_nonlocal_label = 1;
3492 /* Make sure all new local variables get inserted into the
3493 proper BIND_EXPR. */
3494 if (root->new_local_var_chain)
3495 declare_vars (root->new_local_var_chain,
3496 gimple_seq_first_stmt (gimple_body (root->context)),
3497 false);
3499 if (root->debug_var_chain)
3501 tree debug_var;
3502 gbind *scope;
3504 remap_vla_decls (DECL_INITIAL (root->context), root);
3506 for (debug_var = root->debug_var_chain; debug_var;
3507 debug_var = DECL_CHAIN (debug_var))
3508 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3509 break;
3511 /* If there are any debug decls with variable length types,
3512 remap those types using other debug_var_chain variables. */
3513 if (debug_var)
3515 struct nesting_copy_body_data id;
3517 memset (&id, 0, sizeof (id));
3518 id.cb.copy_decl = nesting_copy_decl;
3519 id.cb.decl_map = new hash_map<tree, tree>;
3520 id.root = root;
3522 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3523 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3525 tree type = TREE_TYPE (debug_var);
3526 tree newt, t = type;
3527 struct nesting_info *i;
3529 for (i = root; i; i = i->outer)
3530 if (variably_modified_type_p (type, i->context))
3531 break;
3533 if (i == NULL)
3534 continue;
3536 id.cb.src_fn = i->context;
3537 id.cb.dst_fn = i->context;
3538 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3540 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3541 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3543 newt = TREE_TYPE (newt);
3544 t = TREE_TYPE (t);
3546 if (TYPE_NAME (newt)
3547 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3548 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3549 && newt != t
3550 && TYPE_NAME (newt) == TYPE_NAME (t))
3551 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3554 delete id.cb.decl_map;
3557 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3558 if (gimple_bind_block (scope))
3559 declare_vars (root->debug_var_chain, scope, true);
3560 else
3561 BLOCK_VARS (DECL_INITIAL (root->context))
3562 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3563 root->debug_var_chain);
3565 else
3566 fixup_vla_decls (DECL_INITIAL (root->context));
3568 /* Fold the rewritten MEM_REF trees. */
3569 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3571 /* Dump the translated tree function. */
3572 if (dump_file)
3574 fputs ("\n\n", dump_file);
3575 dump_function_to_file (root->context, dump_file, dump_flags);
3579 static void
3580 finalize_nesting_tree (struct nesting_info *root)
3582 struct nesting_info *n;
3583 FOR_EACH_NEST_INFO (n, root)
3584 finalize_nesting_tree_1 (n);
3587 /* Unnest the nodes and pass them to cgraph. */
3589 static void
3590 unnest_nesting_tree_1 (struct nesting_info *root)
3592 struct cgraph_node *node = cgraph_node::get (root->context);
3594 /* For nested functions update the cgraph to reflect unnesting.
3595 We also delay finalizing of these functions up to this point. */
3596 if (nested_function_info::get (node)->origin)
3598 unnest_function (node);
3599 if (!root->thunk_p)
3600 cgraph_node::finalize_function (root->context, true);
3604 static void
3605 unnest_nesting_tree (struct nesting_info *root)
3607 struct nesting_info *n;
3608 FOR_EACH_NEST_INFO (n, root)
3609 unnest_nesting_tree_1 (n);
3612 /* Free the data structures allocated during this pass. */
3614 static void
3615 free_nesting_tree (struct nesting_info *root)
3617 struct nesting_info *node, *next;
3619 node = iter_nestinfo_start (root);
3622 next = iter_nestinfo_next (node);
3623 delete node->var_map;
3624 delete node->field_map;
3625 delete node->mem_refs;
3626 free (node);
3627 node = next;
3629 while (node);
3632 /* Gimplify a function and all its nested functions. */
3633 static void
3634 gimplify_all_functions (struct cgraph_node *root)
3636 struct cgraph_node *iter;
3637 if (!gimple_body (root->decl))
3638 gimplify_function_tree (root->decl);
3639 for (iter = first_nested_function (root); iter;
3640 iter = next_nested_function (iter))
3641 if (!iter->thunk.thunk_p)
3642 gimplify_all_functions (iter);
3645 /* Main entry point for this pass. Process FNDECL and all of its nested
3646 subroutines and turn them into something less tightly bound. */
3648 void
3649 lower_nested_functions (tree fndecl)
3651 struct cgraph_node *cgn;
3652 struct nesting_info *root;
3654 /* If there are no nested functions, there's nothing to do. */
3655 cgn = cgraph_node::get (fndecl);
3656 if (!first_nested_function (cgn))
3657 return;
3659 gimplify_all_functions (cgn);
3661 set_dump_file (dump_begin (TDI_nested, &dump_flags));
3662 if (dump_file)
3663 fprintf (dump_file, "\n;; Function %s\n\n",
3664 lang_hooks.decl_printable_name (fndecl, 2));
3666 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3667 root = create_nesting_tree (cgn);
3669 walk_all_functions (convert_nonlocal_reference_stmt,
3670 convert_nonlocal_reference_op,
3671 root);
3672 walk_all_functions (convert_local_reference_stmt,
3673 convert_local_reference_op,
3674 root);
3675 walk_all_functions (convert_nl_goto_reference, NULL, root);
3676 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3678 convert_all_function_calls (root);
3679 finalize_nesting_tree (root);
3680 unnest_nesting_tree (root);
3682 free_nesting_tree (root);
3683 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3685 if (dump_file)
3687 dump_end (TDI_nested, dump_file);
3688 set_dump_file (NULL);
3692 #include "gt-tree-nested.h"