* gimple-ssa-store-merging.c (struct store_immediate_info): Add
[official-gcc.git] / gcc / tree-nested.c
blob7f26e61acc7b20bbc2c23070476d0e4cbe5ce59a
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "cgraph.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "dumpfile.h"
35 #include "tree-inline.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "tree-cfg.h"
40 #include "explow.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
46 /* The object of this pass is to lower the representation of a set of nested
47 functions in order to expose all of the gory details of the various
48 nonlocal references. We want to do this sooner rather than later, in
49 order to give us more freedom in emitting all of the functions in question.
51 Back in olden times, when gcc was young, we developed an insanely
52 complicated scheme whereby variables which were referenced nonlocally
53 were forced to live in the stack of the declaring function, and then
54 the nested functions magically discovered where these variables were
55 placed. In order for this scheme to function properly, it required
56 that the outer function be partially expanded, then we switch to
57 compiling the inner function, and once done with those we switch back
58 to compiling the outer function. Such delicate ordering requirements
59 makes it difficult to do whole translation unit optimizations
60 involving such functions.
62 The implementation here is much more direct. Everything that can be
63 referenced by an inner function is a member of an explicitly created
64 structure herein called the "nonlocal frame struct". The incoming
65 static chain for a nested function is a pointer to this struct in
66 the parent. In this way, we settle on known offsets from a known
67 base, and so are decoupled from the logic that places objects in the
68 function's stack frame. More importantly, we don't have to wait for
69 that to happen -- since the compilation of the inner function is no
70 longer tied to a real stack frame, the nonlocal frame struct can be
71 allocated anywhere. Which means that the outer function is now
72 inlinable.
74 Theory of operation here is very simple. Iterate over all the
75 statements in all the functions (depth first) several times,
76 allocating structures and fields on demand. In general we want to
77 examine inner functions first, so that we can avoid making changes
78 to outer functions which are unnecessary.
80 The order of the passes matters a bit, in that later passes will be
81 skipped if it is discovered that the functions don't actually interact
82 at all. That is, they're nested in the lexical sense but could have
83 been written as independent functions without change. */
86 struct nesting_info
88 struct nesting_info *outer;
89 struct nesting_info *inner;
90 struct nesting_info *next;
92 hash_map<tree, tree> *field_map;
93 hash_map<tree, tree> *var_map;
94 hash_set<tree *> *mem_refs;
95 bitmap suppress_expansion;
97 tree context;
98 tree new_local_var_chain;
99 tree debug_var_chain;
100 tree frame_type;
101 tree frame_decl;
102 tree chain_field;
103 tree chain_decl;
104 tree nl_goto_field;
106 bool any_parm_remapped;
107 bool any_tramp_created;
108 bool any_descr_created;
109 char static_chain_added;
113 /* Iterate over the nesting tree, starting with ROOT, depth first. */
115 static inline struct nesting_info *
116 iter_nestinfo_start (struct nesting_info *root)
118 while (root->inner)
119 root = root->inner;
120 return root;
123 static inline struct nesting_info *
124 iter_nestinfo_next (struct nesting_info *node)
126 if (node->next)
127 return iter_nestinfo_start (node->next);
128 return node->outer;
131 #define FOR_EACH_NEST_INFO(I, ROOT) \
132 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
134 /* Obstack used for the bitmaps in the struct above. */
135 static struct bitmap_obstack nesting_info_bitmap_obstack;
138 /* We're working in so many different function contexts simultaneously,
139 that create_tmp_var is dangerous. Prevent mishap. */
140 #define create_tmp_var cant_use_create_tmp_var_here_dummy
142 /* Like create_tmp_var, except record the variable for registration at
143 the given nesting level. */
145 static tree
146 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
148 tree tmp_var;
150 /* If the type is of variable size or a type which must be created by the
151 frontend, something is wrong. Note that we explicitly allow
152 incomplete types here, since we create them ourselves here. */
153 gcc_assert (!TREE_ADDRESSABLE (type));
154 gcc_assert (!TYPE_SIZE_UNIT (type)
155 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
157 tmp_var = create_tmp_var_raw (type, prefix);
158 DECL_CONTEXT (tmp_var) = info->context;
159 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
160 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
161 if (TREE_CODE (type) == COMPLEX_TYPE
162 || TREE_CODE (type) == VECTOR_TYPE)
163 DECL_GIMPLE_REG_P (tmp_var) = 1;
165 info->new_local_var_chain = tmp_var;
167 return tmp_var;
170 /* Take the address of EXP to be used within function CONTEXT.
171 Mark it for addressability as necessary. */
173 tree
174 build_addr (tree exp)
176 mark_addressable (exp);
177 return build_fold_addr_expr (exp);
180 /* Insert FIELD into TYPE, sorted by alignment requirements. */
182 void
183 insert_field_into_struct (tree type, tree field)
185 tree *p;
187 DECL_CONTEXT (field) = type;
189 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
190 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
191 break;
193 DECL_CHAIN (field) = *p;
194 *p = field;
196 /* Set correct alignment for frame struct type. */
197 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
198 SET_TYPE_ALIGN (type, DECL_ALIGN (field));
201 /* Build or return the RECORD_TYPE that describes the frame state that is
202 shared between INFO->CONTEXT and its nested functions. This record will
203 not be complete until finalize_nesting_tree; up until that point we'll
204 be adding fields as necessary.
206 We also build the DECL that represents this frame in the function. */
208 static tree
209 get_frame_type (struct nesting_info *info)
211 tree type = info->frame_type;
212 if (!type)
214 char *name;
216 type = make_node (RECORD_TYPE);
218 name = concat ("FRAME.",
219 IDENTIFIER_POINTER (DECL_NAME (info->context)),
220 NULL);
221 TYPE_NAME (type) = get_identifier (name);
222 free (name);
224 info->frame_type = type;
225 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
226 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
228 /* ??? Always make it addressable for now, since it is meant to
229 be pointed to by the static chain pointer. This pessimizes
230 when it turns out that no static chains are needed because
231 the nested functions referencing non-local variables are not
232 reachable, but the true pessimization is to create the non-
233 local frame structure in the first place. */
234 TREE_ADDRESSABLE (info->frame_decl) = 1;
236 return type;
239 /* Return true if DECL should be referenced by pointer in the non-local
240 frame structure. */
242 static bool
243 use_pointer_in_frame (tree decl)
245 if (TREE_CODE (decl) == PARM_DECL)
247 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
248 sized decls, and inefficient to copy large aggregates. Don't bother
249 moving anything but scalar variables. */
250 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
252 else
254 /* Variable sized types make things "interesting" in the frame. */
255 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
259 /* Given DECL, a non-locally accessed variable, find or create a field
260 in the non-local frame structure for the given nesting context. */
262 static tree
263 lookup_field_for_decl (struct nesting_info *info, tree decl,
264 enum insert_option insert)
266 if (insert == NO_INSERT)
268 tree *slot = info->field_map->get (decl);
269 return slot ? *slot : NULL_TREE;
272 tree *slot = &info->field_map->get_or_insert (decl);
273 if (!*slot)
275 tree field = make_node (FIELD_DECL);
276 DECL_NAME (field) = DECL_NAME (decl);
278 if (use_pointer_in_frame (decl))
280 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
281 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
282 DECL_NONADDRESSABLE_P (field) = 1;
284 else
286 TREE_TYPE (field) = TREE_TYPE (decl);
287 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
288 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
289 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
290 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
291 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
292 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
295 insert_field_into_struct (get_frame_type (info), field);
296 *slot = field;
298 if (TREE_CODE (decl) == PARM_DECL)
299 info->any_parm_remapped = true;
302 return *slot;
305 /* Build or return the variable that holds the static chain within
306 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
308 static tree
309 get_chain_decl (struct nesting_info *info)
311 tree decl = info->chain_decl;
313 if (!decl)
315 tree type;
317 type = get_frame_type (info->outer);
318 type = build_pointer_type (type);
320 /* Note that this variable is *not* entered into any BIND_EXPR;
321 the construction of this variable is handled specially in
322 expand_function_start and initialize_inlined_parameters.
323 Note also that it's represented as a parameter. This is more
324 close to the truth, since the initial value does come from
325 the caller. */
326 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
327 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
328 DECL_ARTIFICIAL (decl) = 1;
329 DECL_IGNORED_P (decl) = 1;
330 TREE_USED (decl) = 1;
331 DECL_CONTEXT (decl) = info->context;
332 DECL_ARG_TYPE (decl) = type;
334 /* Tell tree-inline.c that we never write to this variable, so
335 it can copy-prop the replacement value immediately. */
336 TREE_READONLY (decl) = 1;
338 info->chain_decl = decl;
340 if (dump_file
341 && (dump_flags & TDF_DETAILS)
342 && !DECL_STATIC_CHAIN (info->context))
343 fprintf (dump_file, "Setting static-chain for %s\n",
344 lang_hooks.decl_printable_name (info->context, 2));
346 DECL_STATIC_CHAIN (info->context) = 1;
348 return decl;
351 /* Build or return the field within the non-local frame state that holds
352 the static chain for INFO->CONTEXT. This is the way to walk back up
353 multiple nesting levels. */
355 static tree
356 get_chain_field (struct nesting_info *info)
358 tree field = info->chain_field;
360 if (!field)
362 tree type = build_pointer_type (get_frame_type (info->outer));
364 field = make_node (FIELD_DECL);
365 DECL_NAME (field) = get_identifier ("__chain");
366 TREE_TYPE (field) = type;
367 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
368 DECL_NONADDRESSABLE_P (field) = 1;
370 insert_field_into_struct (get_frame_type (info), field);
372 info->chain_field = field;
374 if (dump_file
375 && (dump_flags & TDF_DETAILS)
376 && !DECL_STATIC_CHAIN (info->context))
377 fprintf (dump_file, "Setting static-chain for %s\n",
378 lang_hooks.decl_printable_name (info->context, 2));
380 DECL_STATIC_CHAIN (info->context) = 1;
382 return field;
385 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
387 static tree
388 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
389 gcall *call)
391 tree t;
393 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
394 gimple_call_set_lhs (call, t);
395 if (! gsi_end_p (*gsi))
396 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
397 gsi_insert_before (gsi, call, GSI_SAME_STMT);
399 return t;
403 /* Copy EXP into a temporary. Allocate the temporary in the context of
404 INFO and insert the initialization statement before GSI. */
406 static tree
407 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
409 tree t;
410 gimple *stmt;
412 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
413 stmt = gimple_build_assign (t, exp);
414 if (! gsi_end_p (*gsi))
415 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
416 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
418 return t;
422 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
424 static tree
425 gsi_gimplify_val (struct nesting_info *info, tree exp,
426 gimple_stmt_iterator *gsi)
428 if (is_gimple_val (exp))
429 return exp;
430 else
431 return init_tmp_var (info, exp, gsi);
434 /* Similarly, but copy from the temporary and insert the statement
435 after the iterator. */
437 static tree
438 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
440 tree t;
441 gimple *stmt;
443 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
444 stmt = gimple_build_assign (exp, t);
445 if (! gsi_end_p (*gsi))
446 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
447 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
449 return t;
452 /* Build or return the type used to represent a nested function trampoline. */
454 static GTY(()) tree trampoline_type;
456 static tree
457 get_trampoline_type (struct nesting_info *info)
459 unsigned align, size;
460 tree t;
462 if (trampoline_type)
463 return trampoline_type;
465 align = TRAMPOLINE_ALIGNMENT;
466 size = TRAMPOLINE_SIZE;
468 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
469 then allocate extra space so that we can do dynamic alignment. */
470 if (align > STACK_BOUNDARY)
472 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
473 align = STACK_BOUNDARY;
476 t = build_index_type (size_int (size - 1));
477 t = build_array_type (char_type_node, t);
478 t = build_decl (DECL_SOURCE_LOCATION (info->context),
479 FIELD_DECL, get_identifier ("__data"), t);
480 SET_DECL_ALIGN (t, align);
481 DECL_USER_ALIGN (t) = 1;
483 trampoline_type = make_node (RECORD_TYPE);
484 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
485 TYPE_FIELDS (trampoline_type) = t;
486 layout_type (trampoline_type);
487 DECL_CONTEXT (t) = trampoline_type;
489 return trampoline_type;
492 /* Build or return the type used to represent a nested function descriptor. */
494 static GTY(()) tree descriptor_type;
496 static tree
497 get_descriptor_type (struct nesting_info *info)
499 /* The base alignment is that of a function. */
500 const unsigned align = FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY);
501 tree t;
503 if (descriptor_type)
504 return descriptor_type;
506 t = build_index_type (integer_one_node);
507 t = build_array_type (ptr_type_node, t);
508 t = build_decl (DECL_SOURCE_LOCATION (info->context),
509 FIELD_DECL, get_identifier ("__data"), t);
510 SET_DECL_ALIGN (t, MAX (TYPE_ALIGN (ptr_type_node), align));
511 DECL_USER_ALIGN (t) = 1;
513 descriptor_type = make_node (RECORD_TYPE);
514 TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor");
515 TYPE_FIELDS (descriptor_type) = t;
516 layout_type (descriptor_type);
517 DECL_CONTEXT (t) = descriptor_type;
519 return descriptor_type;
522 /* Given DECL, a nested function, find or create an element in the
523 var map for this function. */
525 static tree
526 lookup_element_for_decl (struct nesting_info *info, tree decl,
527 enum insert_option insert)
529 if (insert == NO_INSERT)
531 tree *slot = info->var_map->get (decl);
532 return slot ? *slot : NULL_TREE;
535 tree *slot = &info->var_map->get_or_insert (decl);
536 if (!*slot)
537 *slot = build_tree_list (NULL_TREE, NULL_TREE);
539 return (tree) *slot;
542 /* Given DECL, a nested function, create a field in the non-local
543 frame structure for this function. */
545 static tree
546 create_field_for_decl (struct nesting_info *info, tree decl, tree type)
548 tree field = make_node (FIELD_DECL);
549 DECL_NAME (field) = DECL_NAME (decl);
550 TREE_TYPE (field) = type;
551 TREE_ADDRESSABLE (field) = 1;
552 insert_field_into_struct (get_frame_type (info), field);
553 return field;
556 /* Given DECL, a nested function, find or create a field in the non-local
557 frame structure for a trampoline for this function. */
559 static tree
560 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
561 enum insert_option insert)
563 tree elt, field;
565 elt = lookup_element_for_decl (info, decl, insert);
566 if (!elt)
567 return NULL_TREE;
569 field = TREE_PURPOSE (elt);
571 if (!field && insert == INSERT)
573 field = create_field_for_decl (info, decl, get_trampoline_type (info));
574 TREE_PURPOSE (elt) = field;
575 info->any_tramp_created = true;
578 return field;
581 /* Given DECL, a nested function, find or create a field in the non-local
582 frame structure for a descriptor for this function. */
584 static tree
585 lookup_descr_for_decl (struct nesting_info *info, tree decl,
586 enum insert_option insert)
588 tree elt, field;
590 elt = lookup_element_for_decl (info, decl, insert);
591 if (!elt)
592 return NULL_TREE;
594 field = TREE_VALUE (elt);
596 if (!field && insert == INSERT)
598 field = create_field_for_decl (info, decl, get_descriptor_type (info));
599 TREE_VALUE (elt) = field;
600 info->any_descr_created = true;
603 return field;
606 /* Build or return the field within the non-local frame state that holds
607 the non-local goto "jmp_buf". The buffer itself is maintained by the
608 rtl middle-end as dynamic stack space is allocated. */
610 static tree
611 get_nl_goto_field (struct nesting_info *info)
613 tree field = info->nl_goto_field;
614 if (!field)
616 unsigned size;
617 tree type;
619 /* For __builtin_nonlocal_goto, we need N words. The first is the
620 frame pointer, the rest is for the target's stack pointer save
621 area. The number of words is controlled by STACK_SAVEAREA_MODE;
622 not the best interface, but it'll do for now. */
623 if (Pmode == ptr_mode)
624 type = ptr_type_node;
625 else
626 type = lang_hooks.types.type_for_mode (Pmode, 1);
628 scalar_int_mode mode
629 = as_a <scalar_int_mode> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
630 size = GET_MODE_SIZE (mode);
631 size = size / GET_MODE_SIZE (Pmode);
632 size = size + 1;
634 type = build_array_type
635 (type, build_index_type (size_int (size)));
637 field = make_node (FIELD_DECL);
638 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
639 TREE_TYPE (field) = type;
640 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
641 TREE_ADDRESSABLE (field) = 1;
643 insert_field_into_struct (get_frame_type (info), field);
645 info->nl_goto_field = field;
648 return field;
651 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
653 static void
654 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
655 struct nesting_info *info, gimple_seq *pseq)
657 struct walk_stmt_info wi;
659 memset (&wi, 0, sizeof (wi));
660 wi.info = info;
661 wi.val_only = true;
662 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
666 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
668 static inline void
669 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
670 struct nesting_info *info)
672 gimple_seq body = gimple_body (info->context);
673 walk_body (callback_stmt, callback_op, info, &body);
674 gimple_set_body (info->context, body);
677 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
679 static void
680 walk_gimple_omp_for (gomp_for *for_stmt,
681 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
682 struct nesting_info *info)
684 struct walk_stmt_info wi;
685 gimple_seq seq;
686 tree t;
687 size_t i;
689 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
691 seq = NULL;
692 memset (&wi, 0, sizeof (wi));
693 wi.info = info;
694 wi.gsi = gsi_last (seq);
696 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
698 wi.val_only = false;
699 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
700 &wi, NULL);
701 wi.val_only = true;
702 wi.is_lhs = false;
703 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
704 &wi, NULL);
706 wi.val_only = true;
707 wi.is_lhs = false;
708 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
709 &wi, NULL);
711 t = gimple_omp_for_incr (for_stmt, i);
712 gcc_assert (BINARY_CLASS_P (t));
713 wi.val_only = false;
714 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
715 wi.val_only = true;
716 wi.is_lhs = false;
717 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
720 seq = gsi_seq (wi.gsi);
721 if (!gimple_seq_empty_p (seq))
723 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
724 annotate_all_with_location (seq, gimple_location (for_stmt));
725 gimple_seq_add_seq (&pre_body, seq);
726 gimple_omp_for_set_pre_body (for_stmt, pre_body);
730 /* Similarly for ROOT and all functions nested underneath, depth first. */
732 static void
733 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
734 struct nesting_info *root)
736 struct nesting_info *n;
737 FOR_EACH_NEST_INFO (n, root)
738 walk_function (callback_stmt, callback_op, n);
742 /* We have to check for a fairly pathological case. The operands of function
743 nested function are to be interpreted in the context of the enclosing
744 function. So if any are variably-sized, they will get remapped when the
745 enclosing function is inlined. But that remapping would also have to be
746 done in the types of the PARM_DECLs of the nested function, meaning the
747 argument types of that function will disagree with the arguments in the
748 calls to that function. So we'd either have to make a copy of the nested
749 function corresponding to each time the enclosing function was inlined or
750 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
751 function. The former is not practical. The latter would still require
752 detecting this case to know when to add the conversions. So, for now at
753 least, we don't inline such an enclosing function.
755 We have to do that check recursively, so here return indicating whether
756 FNDECL has such a nested function. ORIG_FN is the function we were
757 trying to inline to use for checking whether any argument is variably
758 modified by anything in it.
760 It would be better to do this in tree-inline.c so that we could give
761 the appropriate warning for why a function can't be inlined, but that's
762 too late since the nesting structure has already been flattened and
763 adding a flag just to record this fact seems a waste of a flag. */
765 static bool
766 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
768 struct cgraph_node *cgn = cgraph_node::get (fndecl);
769 tree arg;
771 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
773 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
774 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
775 return true;
777 if (check_for_nested_with_variably_modified (cgn->decl,
778 orig_fndecl))
779 return true;
782 return false;
785 /* Construct our local datastructure describing the function nesting
786 tree rooted by CGN. */
788 static struct nesting_info *
789 create_nesting_tree (struct cgraph_node *cgn)
791 struct nesting_info *info = XCNEW (struct nesting_info);
792 info->field_map = new hash_map<tree, tree>;
793 info->var_map = new hash_map<tree, tree>;
794 info->mem_refs = new hash_set<tree *>;
795 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
796 info->context = cgn->decl;
798 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
800 struct nesting_info *sub = create_nesting_tree (cgn);
801 sub->outer = info;
802 sub->next = info->inner;
803 info->inner = sub;
806 /* See discussion at check_for_nested_with_variably_modified for a
807 discussion of why this has to be here. */
808 if (check_for_nested_with_variably_modified (info->context, info->context))
809 DECL_UNINLINABLE (info->context) = true;
811 return info;
814 /* Return an expression computing the static chain for TARGET_CONTEXT
815 from INFO->CONTEXT. Insert any necessary computations before TSI. */
817 static tree
818 get_static_chain (struct nesting_info *info, tree target_context,
819 gimple_stmt_iterator *gsi)
821 struct nesting_info *i;
822 tree x;
824 if (info->context == target_context)
826 x = build_addr (info->frame_decl);
827 info->static_chain_added |= 1;
829 else
831 x = get_chain_decl (info);
832 info->static_chain_added |= 2;
834 for (i = info->outer; i->context != target_context; i = i->outer)
836 tree field = get_chain_field (i);
838 x = build_simple_mem_ref (x);
839 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
840 x = init_tmp_var (info, x, gsi);
844 return x;
848 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
849 frame as seen from INFO->CONTEXT. Insert any necessary computations
850 before GSI. */
852 static tree
853 get_frame_field (struct nesting_info *info, tree target_context,
854 tree field, gimple_stmt_iterator *gsi)
856 struct nesting_info *i;
857 tree x;
859 if (info->context == target_context)
861 /* Make sure frame_decl gets created. */
862 (void) get_frame_type (info);
863 x = info->frame_decl;
864 info->static_chain_added |= 1;
866 else
868 x = get_chain_decl (info);
869 info->static_chain_added |= 2;
871 for (i = info->outer; i->context != target_context; i = i->outer)
873 tree field = get_chain_field (i);
875 x = build_simple_mem_ref (x);
876 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
877 x = init_tmp_var (info, x, gsi);
880 x = build_simple_mem_ref (x);
883 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
884 return x;
887 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
889 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
890 in the nested function with DECL_VALUE_EXPR set to reference the true
891 variable in the parent function. This is used both for debug info
892 and in OMP lowering. */
894 static tree
895 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
897 tree target_context;
898 struct nesting_info *i;
899 tree x, field, new_decl;
901 tree *slot = &info->var_map->get_or_insert (decl);
903 if (*slot)
904 return *slot;
906 target_context = decl_function_context (decl);
908 /* A copy of the code in get_frame_field, but without the temporaries. */
909 if (info->context == target_context)
911 /* Make sure frame_decl gets created. */
912 (void) get_frame_type (info);
913 x = info->frame_decl;
914 i = info;
915 info->static_chain_added |= 1;
917 else
919 x = get_chain_decl (info);
920 info->static_chain_added |= 2;
921 for (i = info->outer; i->context != target_context; i = i->outer)
923 field = get_chain_field (i);
924 x = build_simple_mem_ref (x);
925 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
927 x = build_simple_mem_ref (x);
930 field = lookup_field_for_decl (i, decl, INSERT);
931 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
932 if (use_pointer_in_frame (decl))
933 x = build_simple_mem_ref (x);
935 /* ??? We should be remapping types as well, surely. */
936 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
937 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
938 DECL_CONTEXT (new_decl) = info->context;
939 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
940 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
941 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
942 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
943 TREE_READONLY (new_decl) = TREE_READONLY (decl);
944 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
945 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
946 if ((TREE_CODE (decl) == PARM_DECL
947 || TREE_CODE (decl) == RESULT_DECL
948 || VAR_P (decl))
949 && DECL_BY_REFERENCE (decl))
950 DECL_BY_REFERENCE (new_decl) = 1;
952 SET_DECL_VALUE_EXPR (new_decl, x);
953 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
955 *slot = new_decl;
956 DECL_CHAIN (new_decl) = info->debug_var_chain;
957 info->debug_var_chain = new_decl;
959 if (!optimize
960 && info->context != target_context
961 && variably_modified_type_p (TREE_TYPE (decl), NULL))
962 note_nonlocal_vla_type (info, TREE_TYPE (decl));
964 return new_decl;
968 /* Callback for walk_gimple_stmt, rewrite all references to VAR
969 and PARM_DECLs that belong to outer functions.
971 The rewrite will involve some number of structure accesses back up
972 the static chain. E.g. for a variable FOO up one nesting level it'll
973 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
974 indirections apply to decls for which use_pointer_in_frame is true. */
976 static tree
977 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
979 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
980 struct nesting_info *const info = (struct nesting_info *) wi->info;
981 tree t = *tp;
983 *walk_subtrees = 0;
984 switch (TREE_CODE (t))
986 case VAR_DECL:
987 /* Non-automatic variables are never processed. */
988 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
989 break;
990 /* FALLTHRU */
992 case PARM_DECL:
993 if (decl_function_context (t) != info->context)
995 tree x;
996 wi->changed = true;
998 x = get_nonlocal_debug_decl (info, t);
999 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1001 tree target_context = decl_function_context (t);
1002 struct nesting_info *i;
1003 for (i = info->outer; i->context != target_context; i = i->outer)
1004 continue;
1005 x = lookup_field_for_decl (i, t, INSERT);
1006 x = get_frame_field (info, target_context, x, &wi->gsi);
1007 if (use_pointer_in_frame (t))
1009 x = init_tmp_var (info, x, &wi->gsi);
1010 x = build_simple_mem_ref (x);
1014 if (wi->val_only)
1016 if (wi->is_lhs)
1017 x = save_tmp_var (info, x, &wi->gsi);
1018 else
1019 x = init_tmp_var (info, x, &wi->gsi);
1022 *tp = x;
1024 break;
1026 case LABEL_DECL:
1027 /* We're taking the address of a label from a parent function, but
1028 this is not itself a non-local goto. Mark the label such that it
1029 will not be deleted, much as we would with a label address in
1030 static storage. */
1031 if (decl_function_context (t) != info->context)
1032 FORCED_LABEL (t) = 1;
1033 break;
1035 case ADDR_EXPR:
1037 bool save_val_only = wi->val_only;
1039 wi->val_only = false;
1040 wi->is_lhs = false;
1041 wi->changed = false;
1042 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1043 wi->val_only = true;
1045 if (wi->changed)
1047 tree save_context;
1049 /* If we changed anything, we might no longer be directly
1050 referencing a decl. */
1051 save_context = current_function_decl;
1052 current_function_decl = info->context;
1053 recompute_tree_invariant_for_addr_expr (t);
1054 current_function_decl = save_context;
1056 /* If the callback converted the address argument in a context
1057 where we only accept variables (and min_invariant, presumably),
1058 then compute the address into a temporary. */
1059 if (save_val_only)
1060 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1061 t, &wi->gsi);
1064 break;
1066 case REALPART_EXPR:
1067 case IMAGPART_EXPR:
1068 case COMPONENT_REF:
1069 case ARRAY_REF:
1070 case ARRAY_RANGE_REF:
1071 case BIT_FIELD_REF:
1072 /* Go down this entire nest and just look at the final prefix and
1073 anything that describes the references. Otherwise, we lose track
1074 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1075 wi->val_only = true;
1076 wi->is_lhs = false;
1077 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1079 if (TREE_CODE (t) == COMPONENT_REF)
1080 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1081 NULL);
1082 else if (TREE_CODE (t) == ARRAY_REF
1083 || TREE_CODE (t) == ARRAY_RANGE_REF)
1085 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1086 wi, NULL);
1087 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1088 wi, NULL);
1089 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1090 wi, NULL);
1093 wi->val_only = false;
1094 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1095 break;
1097 case VIEW_CONVERT_EXPR:
1098 /* Just request to look at the subtrees, leaving val_only and lhs
1099 untouched. This might actually be for !val_only + lhs, in which
1100 case we don't want to force a replacement by a temporary. */
1101 *walk_subtrees = 1;
1102 break;
1104 default:
1105 if (!IS_TYPE_OR_DECL_P (t))
1107 *walk_subtrees = 1;
1108 wi->val_only = true;
1109 wi->is_lhs = false;
1111 break;
1114 return NULL_TREE;
1117 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1118 struct walk_stmt_info *);
1120 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1121 and PARM_DECLs that belong to outer functions. */
1123 static bool
1124 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1126 struct nesting_info *const info = (struct nesting_info *) wi->info;
1127 bool need_chain = false, need_stmts = false;
1128 tree clause, decl;
1129 int dummy;
1130 bitmap new_suppress;
1132 new_suppress = BITMAP_GGC_ALLOC ();
1133 bitmap_copy (new_suppress, info->suppress_expansion);
1135 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1137 switch (OMP_CLAUSE_CODE (clause))
1139 case OMP_CLAUSE_REDUCTION:
1140 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1141 need_stmts = true;
1142 goto do_decl_clause;
1144 case OMP_CLAUSE_LASTPRIVATE:
1145 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1146 need_stmts = true;
1147 goto do_decl_clause;
1149 case OMP_CLAUSE_LINEAR:
1150 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1151 need_stmts = true;
1152 wi->val_only = true;
1153 wi->is_lhs = false;
1154 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1155 &dummy, wi);
1156 goto do_decl_clause;
1158 case OMP_CLAUSE_PRIVATE:
1159 case OMP_CLAUSE_FIRSTPRIVATE:
1160 case OMP_CLAUSE_COPYPRIVATE:
1161 case OMP_CLAUSE_SHARED:
1162 case OMP_CLAUSE_TO_DECLARE:
1163 case OMP_CLAUSE_LINK:
1164 case OMP_CLAUSE_USE_DEVICE_PTR:
1165 case OMP_CLAUSE_IS_DEVICE_PTR:
1166 do_decl_clause:
1167 decl = OMP_CLAUSE_DECL (clause);
1168 if (VAR_P (decl)
1169 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1170 break;
1171 if (decl_function_context (decl) != info->context)
1173 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1174 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1175 bitmap_set_bit (new_suppress, DECL_UID (decl));
1176 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1177 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1178 need_chain = true;
1180 break;
1182 case OMP_CLAUSE_SCHEDULE:
1183 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1184 break;
1185 /* FALLTHRU */
1186 case OMP_CLAUSE_FINAL:
1187 case OMP_CLAUSE_IF:
1188 case OMP_CLAUSE_NUM_THREADS:
1189 case OMP_CLAUSE_DEPEND:
1190 case OMP_CLAUSE_DEVICE:
1191 case OMP_CLAUSE_NUM_TEAMS:
1192 case OMP_CLAUSE_THREAD_LIMIT:
1193 case OMP_CLAUSE_SAFELEN:
1194 case OMP_CLAUSE_SIMDLEN:
1195 case OMP_CLAUSE_PRIORITY:
1196 case OMP_CLAUSE_GRAINSIZE:
1197 case OMP_CLAUSE_NUM_TASKS:
1198 case OMP_CLAUSE_HINT:
1199 case OMP_CLAUSE__CILK_FOR_COUNT_:
1200 case OMP_CLAUSE_NUM_GANGS:
1201 case OMP_CLAUSE_NUM_WORKERS:
1202 case OMP_CLAUSE_VECTOR_LENGTH:
1203 case OMP_CLAUSE_GANG:
1204 case OMP_CLAUSE_WORKER:
1205 case OMP_CLAUSE_VECTOR:
1206 case OMP_CLAUSE_ASYNC:
1207 case OMP_CLAUSE_WAIT:
1208 /* Several OpenACC clauses have optional arguments. Check if they
1209 are present. */
1210 if (OMP_CLAUSE_OPERAND (clause, 0))
1212 wi->val_only = true;
1213 wi->is_lhs = false;
1214 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1215 &dummy, wi);
1218 /* The gang clause accepts two arguments. */
1219 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1220 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1222 wi->val_only = true;
1223 wi->is_lhs = false;
1224 convert_nonlocal_reference_op
1225 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1227 break;
1229 case OMP_CLAUSE_DIST_SCHEDULE:
1230 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1232 wi->val_only = true;
1233 wi->is_lhs = false;
1234 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1235 &dummy, wi);
1237 break;
1239 case OMP_CLAUSE_MAP:
1240 case OMP_CLAUSE_TO:
1241 case OMP_CLAUSE_FROM:
1242 if (OMP_CLAUSE_SIZE (clause))
1244 wi->val_only = true;
1245 wi->is_lhs = false;
1246 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1247 &dummy, wi);
1249 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1250 goto do_decl_clause;
1251 wi->val_only = true;
1252 wi->is_lhs = false;
1253 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1254 wi, NULL);
1255 break;
1257 case OMP_CLAUSE_ALIGNED:
1258 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1260 wi->val_only = true;
1261 wi->is_lhs = false;
1262 convert_nonlocal_reference_op
1263 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1265 /* Like do_decl_clause, but don't add any suppression. */
1266 decl = OMP_CLAUSE_DECL (clause);
1267 if (VAR_P (decl)
1268 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1269 break;
1270 if (decl_function_context (decl) != info->context)
1272 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1273 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1274 need_chain = true;
1276 break;
1278 case OMP_CLAUSE_NOWAIT:
1279 case OMP_CLAUSE_ORDERED:
1280 case OMP_CLAUSE_DEFAULT:
1281 case OMP_CLAUSE_COPYIN:
1282 case OMP_CLAUSE_COLLAPSE:
1283 case OMP_CLAUSE_TILE:
1284 case OMP_CLAUSE_UNTIED:
1285 case OMP_CLAUSE_MERGEABLE:
1286 case OMP_CLAUSE_PROC_BIND:
1287 case OMP_CLAUSE_NOGROUP:
1288 case OMP_CLAUSE_THREADS:
1289 case OMP_CLAUSE_SIMD:
1290 case OMP_CLAUSE_DEFAULTMAP:
1291 case OMP_CLAUSE_SEQ:
1292 case OMP_CLAUSE_INDEPENDENT:
1293 case OMP_CLAUSE_AUTO:
1294 break;
1296 /* The following clause belongs to the OpenACC cache directive, which
1297 is discarded during gimplification. */
1298 case OMP_CLAUSE__CACHE_:
1299 /* The following clauses are only allowed in the OpenMP declare simd
1300 directive, so not seen here. */
1301 case OMP_CLAUSE_UNIFORM:
1302 case OMP_CLAUSE_INBRANCH:
1303 case OMP_CLAUSE_NOTINBRANCH:
1304 /* The following clauses are only allowed on OpenMP cancel and
1305 cancellation point directives, which at this point have already
1306 been lowered into a function call. */
1307 case OMP_CLAUSE_FOR:
1308 case OMP_CLAUSE_PARALLEL:
1309 case OMP_CLAUSE_SECTIONS:
1310 case OMP_CLAUSE_TASKGROUP:
1311 /* The following clauses are only added during OMP lowering; nested
1312 function decomposition happens before that. */
1313 case OMP_CLAUSE__LOOPTEMP_:
1314 case OMP_CLAUSE__SIMDUID_:
1315 case OMP_CLAUSE__GRIDDIM_:
1316 /* Anything else. */
1317 default:
1318 gcc_unreachable ();
1322 info->suppress_expansion = new_suppress;
1324 if (need_stmts)
1325 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1326 switch (OMP_CLAUSE_CODE (clause))
1328 case OMP_CLAUSE_REDUCTION:
1329 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1331 tree old_context
1332 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1333 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1334 = info->context;
1335 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1336 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1337 = info->context;
1338 walk_body (convert_nonlocal_reference_stmt,
1339 convert_nonlocal_reference_op, info,
1340 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1341 walk_body (convert_nonlocal_reference_stmt,
1342 convert_nonlocal_reference_op, info,
1343 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1344 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1345 = old_context;
1346 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1347 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1348 = old_context;
1350 break;
1352 case OMP_CLAUSE_LASTPRIVATE:
1353 walk_body (convert_nonlocal_reference_stmt,
1354 convert_nonlocal_reference_op, info,
1355 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1356 break;
1358 case OMP_CLAUSE_LINEAR:
1359 walk_body (convert_nonlocal_reference_stmt,
1360 convert_nonlocal_reference_op, info,
1361 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1362 break;
1364 default:
1365 break;
1368 return need_chain;
1371 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1373 static void
1374 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1376 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1377 type = TREE_TYPE (type);
1379 if (TYPE_NAME (type)
1380 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1381 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1382 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1384 while (POINTER_TYPE_P (type)
1385 || TREE_CODE (type) == VECTOR_TYPE
1386 || TREE_CODE (type) == FUNCTION_TYPE
1387 || TREE_CODE (type) == METHOD_TYPE)
1388 type = TREE_TYPE (type);
1390 if (TREE_CODE (type) == ARRAY_TYPE)
1392 tree domain, t;
1394 note_nonlocal_vla_type (info, TREE_TYPE (type));
1395 domain = TYPE_DOMAIN (type);
1396 if (domain)
1398 t = TYPE_MIN_VALUE (domain);
1399 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1400 && decl_function_context (t) != info->context)
1401 get_nonlocal_debug_decl (info, t);
1402 t = TYPE_MAX_VALUE (domain);
1403 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1404 && decl_function_context (t) != info->context)
1405 get_nonlocal_debug_decl (info, t);
1410 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1411 in BLOCK. */
1413 static void
1414 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1416 tree var;
1418 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1419 if (VAR_P (var)
1420 && variably_modified_type_p (TREE_TYPE (var), NULL)
1421 && DECL_HAS_VALUE_EXPR_P (var)
1422 && decl_function_context (var) != info->context)
1423 note_nonlocal_vla_type (info, TREE_TYPE (var));
1426 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1427 PARM_DECLs that belong to outer functions. This handles statements
1428 that are not handled via the standard recursion done in
1429 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1430 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1431 operands of STMT have been handled by this function. */
1433 static tree
1434 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1435 struct walk_stmt_info *wi)
1437 struct nesting_info *info = (struct nesting_info *) wi->info;
1438 tree save_local_var_chain;
1439 bitmap save_suppress;
1440 gimple *stmt = gsi_stmt (*gsi);
1442 switch (gimple_code (stmt))
1444 case GIMPLE_GOTO:
1445 /* Don't walk non-local gotos for now. */
1446 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1448 wi->val_only = true;
1449 wi->is_lhs = false;
1450 *handled_ops_p = false;
1451 return NULL_TREE;
1453 break;
1455 case GIMPLE_OMP_PARALLEL:
1456 case GIMPLE_OMP_TASK:
1457 save_suppress = info->suppress_expansion;
1458 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1459 wi))
1461 tree c, decl;
1462 decl = get_chain_decl (info);
1463 c = build_omp_clause (gimple_location (stmt),
1464 OMP_CLAUSE_FIRSTPRIVATE);
1465 OMP_CLAUSE_DECL (c) = decl;
1466 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1467 gimple_omp_taskreg_set_clauses (stmt, c);
1470 save_local_var_chain = info->new_local_var_chain;
1471 info->new_local_var_chain = NULL;
1473 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1474 info, gimple_omp_body_ptr (stmt));
1476 if (info->new_local_var_chain)
1477 declare_vars (info->new_local_var_chain,
1478 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1479 false);
1480 info->new_local_var_chain = save_local_var_chain;
1481 info->suppress_expansion = save_suppress;
1482 break;
1484 case GIMPLE_OMP_FOR:
1485 save_suppress = info->suppress_expansion;
1486 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1487 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1488 convert_nonlocal_reference_stmt,
1489 convert_nonlocal_reference_op, info);
1490 walk_body (convert_nonlocal_reference_stmt,
1491 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1492 info->suppress_expansion = save_suppress;
1493 break;
1495 case GIMPLE_OMP_SECTIONS:
1496 save_suppress = info->suppress_expansion;
1497 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1498 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1499 info, gimple_omp_body_ptr (stmt));
1500 info->suppress_expansion = save_suppress;
1501 break;
1503 case GIMPLE_OMP_SINGLE:
1504 save_suppress = info->suppress_expansion;
1505 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1506 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1507 info, gimple_omp_body_ptr (stmt));
1508 info->suppress_expansion = save_suppress;
1509 break;
1511 case GIMPLE_OMP_TARGET:
1512 if (!is_gimple_omp_offloaded (stmt))
1514 save_suppress = info->suppress_expansion;
1515 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1516 wi);
1517 info->suppress_expansion = save_suppress;
1518 walk_body (convert_nonlocal_reference_stmt,
1519 convert_nonlocal_reference_op, info,
1520 gimple_omp_body_ptr (stmt));
1521 break;
1523 save_suppress = info->suppress_expansion;
1524 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1525 wi))
1527 tree c, decl;
1528 decl = get_chain_decl (info);
1529 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1530 OMP_CLAUSE_DECL (c) = decl;
1531 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1532 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1533 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1534 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1537 save_local_var_chain = info->new_local_var_chain;
1538 info->new_local_var_chain = NULL;
1540 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1541 info, gimple_omp_body_ptr (stmt));
1543 if (info->new_local_var_chain)
1544 declare_vars (info->new_local_var_chain,
1545 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1546 false);
1547 info->new_local_var_chain = save_local_var_chain;
1548 info->suppress_expansion = save_suppress;
1549 break;
1551 case GIMPLE_OMP_TEAMS:
1552 save_suppress = info->suppress_expansion;
1553 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1554 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1555 info, gimple_omp_body_ptr (stmt));
1556 info->suppress_expansion = save_suppress;
1557 break;
1559 case GIMPLE_OMP_SECTION:
1560 case GIMPLE_OMP_MASTER:
1561 case GIMPLE_OMP_TASKGROUP:
1562 case GIMPLE_OMP_ORDERED:
1563 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1564 info, gimple_omp_body_ptr (stmt));
1565 break;
1567 case GIMPLE_BIND:
1569 gbind *bind_stmt = as_a <gbind *> (stmt);
1570 if (!optimize && gimple_bind_block (bind_stmt))
1571 note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
1573 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1574 if (TREE_CODE (var) == NAMELIST_DECL)
1576 /* Adjust decls mentioned in NAMELIST_DECL. */
1577 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1578 tree decl;
1579 unsigned int i;
1581 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1583 if (VAR_P (decl)
1584 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1585 continue;
1586 if (decl_function_context (decl) != info->context)
1587 CONSTRUCTOR_ELT (decls, i)->value
1588 = get_nonlocal_debug_decl (info, decl);
1592 *handled_ops_p = false;
1593 return NULL_TREE;
1595 case GIMPLE_COND:
1596 wi->val_only = true;
1597 wi->is_lhs = false;
1598 *handled_ops_p = false;
1599 return NULL_TREE;
1601 default:
1602 /* For every other statement that we are not interested in
1603 handling here, let the walker traverse the operands. */
1604 *handled_ops_p = false;
1605 return NULL_TREE;
1608 /* We have handled all of STMT operands, no need to traverse the operands. */
1609 *handled_ops_p = true;
1610 return NULL_TREE;
1614 /* A subroutine of convert_local_reference. Create a local variable
1615 in the parent function with DECL_VALUE_EXPR set to reference the
1616 field in FRAME. This is used both for debug info and in OMP
1617 lowering. */
1619 static tree
1620 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1622 tree x, new_decl;
1624 tree *slot = &info->var_map->get_or_insert (decl);
1625 if (*slot)
1626 return *slot;
1628 /* Make sure frame_decl gets created. */
1629 (void) get_frame_type (info);
1630 x = info->frame_decl;
1631 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1633 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1634 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1635 DECL_CONTEXT (new_decl) = info->context;
1636 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1637 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1638 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1639 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1640 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1641 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1642 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1643 if ((TREE_CODE (decl) == PARM_DECL
1644 || TREE_CODE (decl) == RESULT_DECL
1645 || VAR_P (decl))
1646 && DECL_BY_REFERENCE (decl))
1647 DECL_BY_REFERENCE (new_decl) = 1;
1649 SET_DECL_VALUE_EXPR (new_decl, x);
1650 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1651 *slot = new_decl;
1653 DECL_CHAIN (new_decl) = info->debug_var_chain;
1654 info->debug_var_chain = new_decl;
1656 /* Do not emit debug info twice. */
1657 DECL_IGNORED_P (decl) = 1;
1659 return new_decl;
1663 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1664 and PARM_DECLs that were referenced by inner nested functions.
1665 The rewrite will be a structure reference to the local frame variable. */
1667 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1669 static tree
1670 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1672 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1673 struct nesting_info *const info = (struct nesting_info *) wi->info;
1674 tree t = *tp, field, x;
1675 bool save_val_only;
1677 *walk_subtrees = 0;
1678 switch (TREE_CODE (t))
1680 case VAR_DECL:
1681 /* Non-automatic variables are never processed. */
1682 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1683 break;
1684 /* FALLTHRU */
1686 case PARM_DECL:
1687 if (decl_function_context (t) == info->context)
1689 /* If we copied a pointer to the frame, then the original decl
1690 is used unchanged in the parent function. */
1691 if (use_pointer_in_frame (t))
1692 break;
1694 /* No need to transform anything if no child references the
1695 variable. */
1696 field = lookup_field_for_decl (info, t, NO_INSERT);
1697 if (!field)
1698 break;
1699 wi->changed = true;
1701 x = get_local_debug_decl (info, t, field);
1702 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1703 x = get_frame_field (info, info->context, field, &wi->gsi);
1705 if (wi->val_only)
1707 if (wi->is_lhs)
1708 x = save_tmp_var (info, x, &wi->gsi);
1709 else
1710 x = init_tmp_var (info, x, &wi->gsi);
1713 *tp = x;
1715 break;
1717 case ADDR_EXPR:
1718 save_val_only = wi->val_only;
1719 wi->val_only = false;
1720 wi->is_lhs = false;
1721 wi->changed = false;
1722 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1723 wi->val_only = save_val_only;
1725 /* If we converted anything ... */
1726 if (wi->changed)
1728 tree save_context;
1730 /* Then the frame decl is now addressable. */
1731 TREE_ADDRESSABLE (info->frame_decl) = 1;
1733 save_context = current_function_decl;
1734 current_function_decl = info->context;
1735 recompute_tree_invariant_for_addr_expr (t);
1736 current_function_decl = save_context;
1738 /* If we are in a context where we only accept values, then
1739 compute the address into a temporary. */
1740 if (save_val_only)
1741 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1742 t, &wi->gsi);
1744 break;
1746 case REALPART_EXPR:
1747 case IMAGPART_EXPR:
1748 case COMPONENT_REF:
1749 case ARRAY_REF:
1750 case ARRAY_RANGE_REF:
1751 case BIT_FIELD_REF:
1752 /* Go down this entire nest and just look at the final prefix and
1753 anything that describes the references. Otherwise, we lose track
1754 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1755 save_val_only = wi->val_only;
1756 wi->val_only = true;
1757 wi->is_lhs = false;
1758 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1760 if (TREE_CODE (t) == COMPONENT_REF)
1761 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1762 NULL);
1763 else if (TREE_CODE (t) == ARRAY_REF
1764 || TREE_CODE (t) == ARRAY_RANGE_REF)
1766 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1767 NULL);
1768 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1769 NULL);
1770 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1771 NULL);
1774 wi->val_only = false;
1775 walk_tree (tp, convert_local_reference_op, wi, NULL);
1776 wi->val_only = save_val_only;
1777 break;
1779 case MEM_REF:
1780 save_val_only = wi->val_only;
1781 wi->val_only = true;
1782 wi->is_lhs = false;
1783 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1784 wi, NULL);
1785 /* We need to re-fold the MEM_REF as component references as
1786 part of a ADDR_EXPR address are not allowed. But we cannot
1787 fold here, as the chain record type is not yet finalized. */
1788 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1789 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1790 info->mem_refs->add (tp);
1791 wi->val_only = save_val_only;
1792 break;
1794 case VIEW_CONVERT_EXPR:
1795 /* Just request to look at the subtrees, leaving val_only and lhs
1796 untouched. This might actually be for !val_only + lhs, in which
1797 case we don't want to force a replacement by a temporary. */
1798 *walk_subtrees = 1;
1799 break;
1801 default:
1802 if (!IS_TYPE_OR_DECL_P (t))
1804 *walk_subtrees = 1;
1805 wi->val_only = true;
1806 wi->is_lhs = false;
1808 break;
1811 return NULL_TREE;
1814 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1815 struct walk_stmt_info *);
1817 /* Helper for convert_local_reference. Convert all the references in
1818 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1820 static bool
1821 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1823 struct nesting_info *const info = (struct nesting_info *) wi->info;
1824 bool need_frame = false, need_stmts = false;
1825 tree clause, decl;
1826 int dummy;
1827 bitmap new_suppress;
1829 new_suppress = BITMAP_GGC_ALLOC ();
1830 bitmap_copy (new_suppress, info->suppress_expansion);
1832 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1834 switch (OMP_CLAUSE_CODE (clause))
1836 case OMP_CLAUSE_REDUCTION:
1837 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1838 need_stmts = true;
1839 goto do_decl_clause;
1841 case OMP_CLAUSE_LASTPRIVATE:
1842 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1843 need_stmts = true;
1844 goto do_decl_clause;
1846 case OMP_CLAUSE_LINEAR:
1847 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1848 need_stmts = true;
1849 wi->val_only = true;
1850 wi->is_lhs = false;
1851 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1852 wi);
1853 goto do_decl_clause;
1855 case OMP_CLAUSE_PRIVATE:
1856 case OMP_CLAUSE_FIRSTPRIVATE:
1857 case OMP_CLAUSE_COPYPRIVATE:
1858 case OMP_CLAUSE_SHARED:
1859 case OMP_CLAUSE_TO_DECLARE:
1860 case OMP_CLAUSE_LINK:
1861 case OMP_CLAUSE_USE_DEVICE_PTR:
1862 case OMP_CLAUSE_IS_DEVICE_PTR:
1863 do_decl_clause:
1864 decl = OMP_CLAUSE_DECL (clause);
1865 if (VAR_P (decl)
1866 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1867 break;
1868 if (decl_function_context (decl) == info->context
1869 && !use_pointer_in_frame (decl))
1871 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1872 if (field)
1874 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1875 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1876 bitmap_set_bit (new_suppress, DECL_UID (decl));
1877 OMP_CLAUSE_DECL (clause)
1878 = get_local_debug_decl (info, decl, field);
1879 need_frame = true;
1882 break;
1884 case OMP_CLAUSE_SCHEDULE:
1885 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1886 break;
1887 /* FALLTHRU */
1888 case OMP_CLAUSE_FINAL:
1889 case OMP_CLAUSE_IF:
1890 case OMP_CLAUSE_NUM_THREADS:
1891 case OMP_CLAUSE_DEPEND:
1892 case OMP_CLAUSE_DEVICE:
1893 case OMP_CLAUSE_NUM_TEAMS:
1894 case OMP_CLAUSE_THREAD_LIMIT:
1895 case OMP_CLAUSE_SAFELEN:
1896 case OMP_CLAUSE_SIMDLEN:
1897 case OMP_CLAUSE_PRIORITY:
1898 case OMP_CLAUSE_GRAINSIZE:
1899 case OMP_CLAUSE_NUM_TASKS:
1900 case OMP_CLAUSE_HINT:
1901 case OMP_CLAUSE__CILK_FOR_COUNT_:
1902 case OMP_CLAUSE_NUM_GANGS:
1903 case OMP_CLAUSE_NUM_WORKERS:
1904 case OMP_CLAUSE_VECTOR_LENGTH:
1905 case OMP_CLAUSE_GANG:
1906 case OMP_CLAUSE_WORKER:
1907 case OMP_CLAUSE_VECTOR:
1908 case OMP_CLAUSE_ASYNC:
1909 case OMP_CLAUSE_WAIT:
1910 /* Several OpenACC clauses have optional arguments. Check if they
1911 are present. */
1912 if (OMP_CLAUSE_OPERAND (clause, 0))
1914 wi->val_only = true;
1915 wi->is_lhs = false;
1916 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1917 &dummy, wi);
1920 /* The gang clause accepts two arguments. */
1921 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1922 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1924 wi->val_only = true;
1925 wi->is_lhs = false;
1926 convert_nonlocal_reference_op
1927 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1929 break;
1931 case OMP_CLAUSE_DIST_SCHEDULE:
1932 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1934 wi->val_only = true;
1935 wi->is_lhs = false;
1936 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1937 &dummy, wi);
1939 break;
1941 case OMP_CLAUSE_MAP:
1942 case OMP_CLAUSE_TO:
1943 case OMP_CLAUSE_FROM:
1944 if (OMP_CLAUSE_SIZE (clause))
1946 wi->val_only = true;
1947 wi->is_lhs = false;
1948 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1949 &dummy, wi);
1951 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1952 goto do_decl_clause;
1953 wi->val_only = true;
1954 wi->is_lhs = false;
1955 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1956 wi, NULL);
1957 break;
1959 case OMP_CLAUSE_ALIGNED:
1960 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1962 wi->val_only = true;
1963 wi->is_lhs = false;
1964 convert_local_reference_op
1965 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1967 /* Like do_decl_clause, but don't add any suppression. */
1968 decl = OMP_CLAUSE_DECL (clause);
1969 if (VAR_P (decl)
1970 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1971 break;
1972 if (decl_function_context (decl) == info->context
1973 && !use_pointer_in_frame (decl))
1975 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1976 if (field)
1978 OMP_CLAUSE_DECL (clause)
1979 = get_local_debug_decl (info, decl, field);
1980 need_frame = true;
1983 break;
1985 case OMP_CLAUSE_NOWAIT:
1986 case OMP_CLAUSE_ORDERED:
1987 case OMP_CLAUSE_DEFAULT:
1988 case OMP_CLAUSE_COPYIN:
1989 case OMP_CLAUSE_COLLAPSE:
1990 case OMP_CLAUSE_TILE:
1991 case OMP_CLAUSE_UNTIED:
1992 case OMP_CLAUSE_MERGEABLE:
1993 case OMP_CLAUSE_PROC_BIND:
1994 case OMP_CLAUSE_NOGROUP:
1995 case OMP_CLAUSE_THREADS:
1996 case OMP_CLAUSE_SIMD:
1997 case OMP_CLAUSE_DEFAULTMAP:
1998 case OMP_CLAUSE_SEQ:
1999 case OMP_CLAUSE_INDEPENDENT:
2000 case OMP_CLAUSE_AUTO:
2001 break;
2003 /* The following clause belongs to the OpenACC cache directive, which
2004 is discarded during gimplification. */
2005 case OMP_CLAUSE__CACHE_:
2006 /* The following clauses are only allowed in the OpenMP declare simd
2007 directive, so not seen here. */
2008 case OMP_CLAUSE_UNIFORM:
2009 case OMP_CLAUSE_INBRANCH:
2010 case OMP_CLAUSE_NOTINBRANCH:
2011 /* The following clauses are only allowed on OpenMP cancel and
2012 cancellation point directives, which at this point have already
2013 been lowered into a function call. */
2014 case OMP_CLAUSE_FOR:
2015 case OMP_CLAUSE_PARALLEL:
2016 case OMP_CLAUSE_SECTIONS:
2017 case OMP_CLAUSE_TASKGROUP:
2018 /* The following clauses are only added during OMP lowering; nested
2019 function decomposition happens before that. */
2020 case OMP_CLAUSE__LOOPTEMP_:
2021 case OMP_CLAUSE__SIMDUID_:
2022 case OMP_CLAUSE__GRIDDIM_:
2023 /* Anything else. */
2024 default:
2025 gcc_unreachable ();
2029 info->suppress_expansion = new_suppress;
2031 if (need_stmts)
2032 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2033 switch (OMP_CLAUSE_CODE (clause))
2035 case OMP_CLAUSE_REDUCTION:
2036 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2038 tree old_context
2039 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
2040 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2041 = info->context;
2042 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2043 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2044 = info->context;
2045 walk_body (convert_local_reference_stmt,
2046 convert_local_reference_op, info,
2047 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
2048 walk_body (convert_local_reference_stmt,
2049 convert_local_reference_op, info,
2050 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
2051 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2052 = old_context;
2053 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2054 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2055 = old_context;
2057 break;
2059 case OMP_CLAUSE_LASTPRIVATE:
2060 walk_body (convert_local_reference_stmt,
2061 convert_local_reference_op, info,
2062 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
2063 break;
2065 case OMP_CLAUSE_LINEAR:
2066 walk_body (convert_local_reference_stmt,
2067 convert_local_reference_op, info,
2068 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
2069 break;
2071 default:
2072 break;
2075 return need_frame;
2079 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2080 and PARM_DECLs that were referenced by inner nested functions.
2081 The rewrite will be a structure reference to the local frame variable. */
2083 static tree
2084 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2085 struct walk_stmt_info *wi)
2087 struct nesting_info *info = (struct nesting_info *) wi->info;
2088 tree save_local_var_chain;
2089 bitmap save_suppress;
2090 char save_static_chain_added;
2091 bool frame_decl_added;
2092 gimple *stmt = gsi_stmt (*gsi);
2094 switch (gimple_code (stmt))
2096 case GIMPLE_OMP_PARALLEL:
2097 case GIMPLE_OMP_TASK:
2098 save_suppress = info->suppress_expansion;
2099 frame_decl_added = false;
2100 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
2101 wi))
2103 tree c = build_omp_clause (gimple_location (stmt),
2104 OMP_CLAUSE_SHARED);
2105 (void) get_frame_type (info);
2106 OMP_CLAUSE_DECL (c) = info->frame_decl;
2107 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2108 gimple_omp_taskreg_set_clauses (stmt, c);
2109 info->static_chain_added |= 4;
2110 frame_decl_added = true;
2113 save_local_var_chain = info->new_local_var_chain;
2114 save_static_chain_added = info->static_chain_added;
2115 info->new_local_var_chain = NULL;
2116 info->static_chain_added = 0;
2118 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2119 gimple_omp_body_ptr (stmt));
2121 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2123 tree c = build_omp_clause (gimple_location (stmt),
2124 OMP_CLAUSE_SHARED);
2125 (void) get_frame_type (info);
2126 OMP_CLAUSE_DECL (c) = info->frame_decl;
2127 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2128 info->static_chain_added |= 4;
2129 gimple_omp_taskreg_set_clauses (stmt, c);
2131 if (info->new_local_var_chain)
2132 declare_vars (info->new_local_var_chain,
2133 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2134 info->new_local_var_chain = save_local_var_chain;
2135 info->suppress_expansion = save_suppress;
2136 info->static_chain_added |= save_static_chain_added;
2137 break;
2139 case GIMPLE_OMP_FOR:
2140 save_suppress = info->suppress_expansion;
2141 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2142 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2143 convert_local_reference_stmt,
2144 convert_local_reference_op, info);
2145 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2146 info, gimple_omp_body_ptr (stmt));
2147 info->suppress_expansion = save_suppress;
2148 break;
2150 case GIMPLE_OMP_SECTIONS:
2151 save_suppress = info->suppress_expansion;
2152 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2153 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2154 info, gimple_omp_body_ptr (stmt));
2155 info->suppress_expansion = save_suppress;
2156 break;
2158 case GIMPLE_OMP_SINGLE:
2159 save_suppress = info->suppress_expansion;
2160 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2161 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2162 info, gimple_omp_body_ptr (stmt));
2163 info->suppress_expansion = save_suppress;
2164 break;
2166 case GIMPLE_OMP_TARGET:
2167 if (!is_gimple_omp_offloaded (stmt))
2169 save_suppress = info->suppress_expansion;
2170 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2171 info->suppress_expansion = save_suppress;
2172 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2173 info, gimple_omp_body_ptr (stmt));
2174 break;
2176 save_suppress = info->suppress_expansion;
2177 frame_decl_added = false;
2178 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2180 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2181 (void) get_frame_type (info);
2182 OMP_CLAUSE_DECL (c) = info->frame_decl;
2183 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2184 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2185 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2186 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2187 info->static_chain_added |= 4;
2188 frame_decl_added = true;
2191 save_local_var_chain = info->new_local_var_chain;
2192 save_static_chain_added = info->static_chain_added;
2193 info->new_local_var_chain = NULL;
2194 info->static_chain_added = 0;
2196 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2197 gimple_omp_body_ptr (stmt));
2199 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2201 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2202 (void) get_frame_type (info);
2203 OMP_CLAUSE_DECL (c) = info->frame_decl;
2204 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2205 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2206 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2207 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2208 info->static_chain_added |= 4;
2211 if (info->new_local_var_chain)
2212 declare_vars (info->new_local_var_chain,
2213 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2214 info->new_local_var_chain = save_local_var_chain;
2215 info->suppress_expansion = save_suppress;
2216 info->static_chain_added |= save_static_chain_added;
2217 break;
2219 case GIMPLE_OMP_TEAMS:
2220 save_suppress = info->suppress_expansion;
2221 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2222 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2223 info, gimple_omp_body_ptr (stmt));
2224 info->suppress_expansion = save_suppress;
2225 break;
2227 case GIMPLE_OMP_SECTION:
2228 case GIMPLE_OMP_MASTER:
2229 case GIMPLE_OMP_TASKGROUP:
2230 case GIMPLE_OMP_ORDERED:
2231 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2232 info, gimple_omp_body_ptr (stmt));
2233 break;
2235 case GIMPLE_COND:
2236 wi->val_only = true;
2237 wi->is_lhs = false;
2238 *handled_ops_p = false;
2239 return NULL_TREE;
2241 case GIMPLE_ASSIGN:
2242 if (gimple_clobber_p (stmt))
2244 tree lhs = gimple_assign_lhs (stmt);
2245 if (!use_pointer_in_frame (lhs)
2246 && lookup_field_for_decl (info, lhs, NO_INSERT))
2248 gsi_replace (gsi, gimple_build_nop (), true);
2249 break;
2252 *handled_ops_p = false;
2253 return NULL_TREE;
2255 case GIMPLE_BIND:
2256 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2257 var;
2258 var = DECL_CHAIN (var))
2259 if (TREE_CODE (var) == NAMELIST_DECL)
2261 /* Adjust decls mentioned in NAMELIST_DECL. */
2262 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2263 tree decl;
2264 unsigned int i;
2266 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2268 if (VAR_P (decl)
2269 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2270 continue;
2271 if (decl_function_context (decl) == info->context
2272 && !use_pointer_in_frame (decl))
2274 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2275 if (field)
2277 CONSTRUCTOR_ELT (decls, i)->value
2278 = get_local_debug_decl (info, decl, field);
2284 *handled_ops_p = false;
2285 return NULL_TREE;
2287 default:
2288 /* For every other statement that we are not interested in
2289 handling here, let the walker traverse the operands. */
2290 *handled_ops_p = false;
2291 return NULL_TREE;
2294 /* Indicate that we have handled all the operands ourselves. */
2295 *handled_ops_p = true;
2296 return NULL_TREE;
2300 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2301 that reference labels from outer functions. The rewrite will be a
2302 call to __builtin_nonlocal_goto. */
2304 static tree
2305 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2306 struct walk_stmt_info *wi)
2308 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2309 tree label, new_label, target_context, x, field;
2310 gcall *call;
2311 gimple *stmt = gsi_stmt (*gsi);
2313 if (gimple_code (stmt) != GIMPLE_GOTO)
2315 *handled_ops_p = false;
2316 return NULL_TREE;
2319 label = gimple_goto_dest (stmt);
2320 if (TREE_CODE (label) != LABEL_DECL)
2322 *handled_ops_p = false;
2323 return NULL_TREE;
2326 target_context = decl_function_context (label);
2327 if (target_context == info->context)
2329 *handled_ops_p = false;
2330 return NULL_TREE;
2333 for (i = info->outer; target_context != i->context; i = i->outer)
2334 continue;
2336 /* The original user label may also be use for a normal goto, therefore
2337 we must create a new label that will actually receive the abnormal
2338 control transfer. This new label will be marked LABEL_NONLOCAL; this
2339 mark will trigger proper behavior in the cfg, as well as cause the
2340 (hairy target-specific) non-local goto receiver code to be generated
2341 when we expand rtl. Enter this association into var_map so that we
2342 can insert the new label into the IL during a second pass. */
2343 tree *slot = &i->var_map->get_or_insert (label);
2344 if (*slot == NULL)
2346 new_label = create_artificial_label (UNKNOWN_LOCATION);
2347 DECL_NONLOCAL (new_label) = 1;
2348 *slot = new_label;
2350 else
2351 new_label = *slot;
2353 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2354 field = get_nl_goto_field (i);
2355 x = get_frame_field (info, target_context, field, gsi);
2356 x = build_addr (x);
2357 x = gsi_gimplify_val (info, x, gsi);
2358 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2359 2, build_addr (new_label), x);
2360 gsi_replace (gsi, call, false);
2362 /* We have handled all of STMT's operands, no need to keep going. */
2363 *handled_ops_p = true;
2364 return NULL_TREE;
2368 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2369 are referenced via nonlocal goto from a nested function. The rewrite
2370 will involve installing a newly generated DECL_NONLOCAL label, and
2371 (potentially) a branch around the rtl gunk that is assumed to be
2372 attached to such a label. */
2374 static tree
2375 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2376 struct walk_stmt_info *wi)
2378 struct nesting_info *const info = (struct nesting_info *) wi->info;
2379 tree label, new_label;
2380 gimple_stmt_iterator tmp_gsi;
2381 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2383 if (!stmt)
2385 *handled_ops_p = false;
2386 return NULL_TREE;
2389 label = gimple_label_label (stmt);
2391 tree *slot = info->var_map->get (label);
2392 if (!slot)
2394 *handled_ops_p = false;
2395 return NULL_TREE;
2398 /* If there's any possibility that the previous statement falls through,
2399 then we must branch around the new non-local label. */
2400 tmp_gsi = wi->gsi;
2401 gsi_prev (&tmp_gsi);
2402 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2404 gimple *stmt = gimple_build_goto (label);
2405 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2408 new_label = (tree) *slot;
2409 stmt = gimple_build_label (new_label);
2410 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2412 *handled_ops_p = true;
2413 return NULL_TREE;
2417 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2418 of nested functions that require the use of trampolines. The rewrite
2419 will involve a reference a trampoline generated for the occasion. */
2421 static tree
2422 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2424 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2425 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2426 tree t = *tp, decl, target_context, x, builtin;
2427 bool descr;
2428 gcall *call;
2430 *walk_subtrees = 0;
2431 switch (TREE_CODE (t))
2433 case ADDR_EXPR:
2434 /* Build
2435 T.1 = &CHAIN->tramp;
2436 T.2 = __builtin_adjust_trampoline (T.1);
2437 T.3 = (func_type)T.2;
2440 decl = TREE_OPERAND (t, 0);
2441 if (TREE_CODE (decl) != FUNCTION_DECL)
2442 break;
2444 /* Only need to process nested functions. */
2445 target_context = decl_function_context (decl);
2446 if (!target_context)
2447 break;
2449 /* If the nested function doesn't use a static chain, then
2450 it doesn't need a trampoline. */
2451 if (!DECL_STATIC_CHAIN (decl))
2452 break;
2454 /* If we don't want a trampoline, then don't build one. */
2455 if (TREE_NO_TRAMPOLINE (t))
2456 break;
2458 /* Lookup the immediate parent of the callee, as that's where
2459 we need to insert the trampoline. */
2460 for (i = info; i->context != target_context; i = i->outer)
2461 continue;
2463 /* Decide whether to generate a descriptor or a trampoline. */
2464 descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines;
2466 if (descr)
2467 x = lookup_descr_for_decl (i, decl, INSERT);
2468 else
2469 x = lookup_tramp_for_decl (i, decl, INSERT);
2471 /* Compute the address of the field holding the trampoline. */
2472 x = get_frame_field (info, target_context, x, &wi->gsi);
2473 x = build_addr (x);
2474 x = gsi_gimplify_val (info, x, &wi->gsi);
2476 /* Do machine-specific ugliness. Normally this will involve
2477 computing extra alignment, but it can really be anything. */
2478 if (descr)
2479 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR);
2480 else
2481 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2482 call = gimple_build_call (builtin, 1, x);
2483 x = init_tmp_var_with_call (info, &wi->gsi, call);
2485 /* Cast back to the proper function type. */
2486 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2487 x = init_tmp_var (info, x, &wi->gsi);
2489 *tp = x;
2490 break;
2492 default:
2493 if (!IS_TYPE_OR_DECL_P (t))
2494 *walk_subtrees = 1;
2495 break;
2498 return NULL_TREE;
2502 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2503 to addresses of nested functions that require the use of
2504 trampolines. The rewrite will involve a reference a trampoline
2505 generated for the occasion. */
2507 static tree
2508 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2509 struct walk_stmt_info *wi)
2511 struct nesting_info *info = (struct nesting_info *) wi->info;
2512 gimple *stmt = gsi_stmt (*gsi);
2514 switch (gimple_code (stmt))
2516 case GIMPLE_CALL:
2518 /* Only walk call arguments, lest we generate trampolines for
2519 direct calls. */
2520 unsigned long i, nargs = gimple_call_num_args (stmt);
2521 for (i = 0; i < nargs; i++)
2522 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2523 wi, NULL);
2524 break;
2527 case GIMPLE_OMP_TARGET:
2528 if (!is_gimple_omp_offloaded (stmt))
2530 *handled_ops_p = false;
2531 return NULL_TREE;
2533 /* FALLTHRU */
2534 case GIMPLE_OMP_PARALLEL:
2535 case GIMPLE_OMP_TASK:
2537 tree save_local_var_chain = info->new_local_var_chain;
2538 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2539 info->new_local_var_chain = NULL;
2540 char save_static_chain_added = info->static_chain_added;
2541 info->static_chain_added = 0;
2542 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2543 info, gimple_omp_body_ptr (stmt));
2544 if (info->new_local_var_chain)
2545 declare_vars (info->new_local_var_chain,
2546 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2547 false);
2548 for (int i = 0; i < 2; i++)
2550 tree c, decl;
2551 if ((info->static_chain_added & (1 << i)) == 0)
2552 continue;
2553 decl = i ? get_chain_decl (info) : info->frame_decl;
2554 /* Don't add CHAIN.* or FRAME.* twice. */
2555 for (c = gimple_omp_taskreg_clauses (stmt);
2557 c = OMP_CLAUSE_CHAIN (c))
2558 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2559 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2560 && OMP_CLAUSE_DECL (c) == decl)
2561 break;
2562 if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2564 c = build_omp_clause (gimple_location (stmt),
2565 i ? OMP_CLAUSE_FIRSTPRIVATE
2566 : OMP_CLAUSE_SHARED);
2567 OMP_CLAUSE_DECL (c) = decl;
2568 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2569 gimple_omp_taskreg_set_clauses (stmt, c);
2571 else if (c == NULL)
2573 c = build_omp_clause (gimple_location (stmt),
2574 OMP_CLAUSE_MAP);
2575 OMP_CLAUSE_DECL (c) = decl;
2576 OMP_CLAUSE_SET_MAP_KIND (c,
2577 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2578 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2579 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2580 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2584 info->new_local_var_chain = save_local_var_chain;
2585 info->static_chain_added |= save_static_chain_added;
2587 break;
2589 default:
2590 *handled_ops_p = false;
2591 return NULL_TREE;
2594 *handled_ops_p = true;
2595 return NULL_TREE;
2600 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2601 that reference nested functions to make sure that the static chain
2602 is set up properly for the call. */
2604 static tree
2605 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2606 struct walk_stmt_info *wi)
2608 struct nesting_info *const info = (struct nesting_info *) wi->info;
2609 tree decl, target_context;
2610 char save_static_chain_added;
2611 int i;
2612 gimple *stmt = gsi_stmt (*gsi);
2614 switch (gimple_code (stmt))
2616 case GIMPLE_CALL:
2617 if (gimple_call_chain (stmt))
2618 break;
2619 decl = gimple_call_fndecl (stmt);
2620 if (!decl)
2621 break;
2622 target_context = decl_function_context (decl);
2623 if (target_context && DECL_STATIC_CHAIN (decl))
2625 gimple_call_set_chain (as_a <gcall *> (stmt),
2626 get_static_chain (info, target_context,
2627 &wi->gsi));
2628 info->static_chain_added |= (1 << (info->context != target_context));
2630 break;
2632 case GIMPLE_OMP_PARALLEL:
2633 case GIMPLE_OMP_TASK:
2634 save_static_chain_added = info->static_chain_added;
2635 info->static_chain_added = 0;
2636 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2637 for (i = 0; i < 2; i++)
2639 tree c, decl;
2640 if ((info->static_chain_added & (1 << i)) == 0)
2641 continue;
2642 decl = i ? get_chain_decl (info) : info->frame_decl;
2643 /* Don't add CHAIN.* or FRAME.* twice. */
2644 for (c = gimple_omp_taskreg_clauses (stmt);
2646 c = OMP_CLAUSE_CHAIN (c))
2647 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2648 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2649 && OMP_CLAUSE_DECL (c) == decl)
2650 break;
2651 if (c == NULL)
2653 c = build_omp_clause (gimple_location (stmt),
2654 i ? OMP_CLAUSE_FIRSTPRIVATE
2655 : OMP_CLAUSE_SHARED);
2656 OMP_CLAUSE_DECL (c) = decl;
2657 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2658 gimple_omp_taskreg_set_clauses (stmt, c);
2661 info->static_chain_added |= save_static_chain_added;
2662 break;
2664 case GIMPLE_OMP_TARGET:
2665 if (!is_gimple_omp_offloaded (stmt))
2667 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2668 break;
2670 save_static_chain_added = info->static_chain_added;
2671 info->static_chain_added = 0;
2672 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2673 for (i = 0; i < 2; i++)
2675 tree c, decl;
2676 if ((info->static_chain_added & (1 << i)) == 0)
2677 continue;
2678 decl = i ? get_chain_decl (info) : info->frame_decl;
2679 /* Don't add CHAIN.* or FRAME.* twice. */
2680 for (c = gimple_omp_target_clauses (stmt);
2682 c = OMP_CLAUSE_CHAIN (c))
2683 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2684 && OMP_CLAUSE_DECL (c) == decl)
2685 break;
2686 if (c == NULL)
2688 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2689 OMP_CLAUSE_DECL (c) = decl;
2690 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2691 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2692 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2693 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2697 info->static_chain_added |= save_static_chain_added;
2698 break;
2700 case GIMPLE_OMP_FOR:
2701 walk_body (convert_gimple_call, NULL, info,
2702 gimple_omp_for_pre_body_ptr (stmt));
2703 /* FALLTHRU */
2704 case GIMPLE_OMP_SECTIONS:
2705 case GIMPLE_OMP_SECTION:
2706 case GIMPLE_OMP_SINGLE:
2707 case GIMPLE_OMP_TEAMS:
2708 case GIMPLE_OMP_MASTER:
2709 case GIMPLE_OMP_TASKGROUP:
2710 case GIMPLE_OMP_ORDERED:
2711 case GIMPLE_OMP_CRITICAL:
2712 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2713 break;
2715 default:
2716 /* Keep looking for other operands. */
2717 *handled_ops_p = false;
2718 return NULL_TREE;
2721 *handled_ops_p = true;
2722 return NULL_TREE;
2725 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2726 call expressions. At the same time, determine if a nested function
2727 actually uses its static chain; if not, remember that. */
2729 static void
2730 convert_all_function_calls (struct nesting_info *root)
2732 unsigned int chain_count = 0, old_chain_count, iter_count;
2733 struct nesting_info *n;
2735 /* First, optimistically clear static_chain for all decls that haven't
2736 used the static chain already for variable access. But always create
2737 it if not optimizing. This makes it possible to reconstruct the static
2738 nesting tree at run time and thus to resolve up-level references from
2739 within the debugger. */
2740 FOR_EACH_NEST_INFO (n, root)
2742 tree decl = n->context;
2743 if (!optimize)
2745 if (n->inner)
2746 (void) get_frame_type (n);
2747 if (n->outer)
2748 (void) get_chain_decl (n);
2750 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2752 DECL_STATIC_CHAIN (decl) = 0;
2753 if (dump_file && (dump_flags & TDF_DETAILS))
2754 fprintf (dump_file, "Guessing no static-chain for %s\n",
2755 lang_hooks.decl_printable_name (decl, 2));
2757 else
2758 DECL_STATIC_CHAIN (decl) = 1;
2759 chain_count += DECL_STATIC_CHAIN (decl);
2762 /* Walk the functions and perform transformations. Note that these
2763 transformations can induce new uses of the static chain, which in turn
2764 require re-examining all users of the decl. */
2765 /* ??? It would make sense to try to use the call graph to speed this up,
2766 but the call graph hasn't really been built yet. Even if it did, we
2767 would still need to iterate in this loop since address-of references
2768 wouldn't show up in the callgraph anyway. */
2769 iter_count = 0;
2772 old_chain_count = chain_count;
2773 chain_count = 0;
2774 iter_count++;
2776 if (dump_file && (dump_flags & TDF_DETAILS))
2777 fputc ('\n', dump_file);
2779 FOR_EACH_NEST_INFO (n, root)
2781 tree decl = n->context;
2782 walk_function (convert_tramp_reference_stmt,
2783 convert_tramp_reference_op, n);
2784 walk_function (convert_gimple_call, NULL, n);
2785 chain_count += DECL_STATIC_CHAIN (decl);
2788 while (chain_count != old_chain_count);
2790 if (dump_file && (dump_flags & TDF_DETAILS))
2791 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2792 iter_count);
2795 struct nesting_copy_body_data
2797 copy_body_data cb;
2798 struct nesting_info *root;
2801 /* A helper subroutine for debug_var_chain type remapping. */
2803 static tree
2804 nesting_copy_decl (tree decl, copy_body_data *id)
2806 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2807 tree *slot = nid->root->var_map->get (decl);
2809 if (slot)
2810 return (tree) *slot;
2812 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2814 tree new_decl = copy_decl_no_change (decl, id);
2815 DECL_ORIGINAL_TYPE (new_decl)
2816 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2817 return new_decl;
2820 if (VAR_P (decl)
2821 || TREE_CODE (decl) == PARM_DECL
2822 || TREE_CODE (decl) == RESULT_DECL)
2823 return decl;
2825 return copy_decl_no_change (decl, id);
2828 /* A helper function for remap_vla_decls. See if *TP contains
2829 some remapped variables. */
2831 static tree
2832 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2834 struct nesting_info *root = (struct nesting_info *) data;
2835 tree t = *tp;
2837 if (DECL_P (t))
2839 *walk_subtrees = 0;
2840 tree *slot = root->var_map->get (t);
2842 if (slot)
2843 return *slot;
2845 return NULL;
2848 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2849 involved. */
2851 static void
2852 remap_vla_decls (tree block, struct nesting_info *root)
2854 tree var, subblock, val, type;
2855 struct nesting_copy_body_data id;
2857 for (subblock = BLOCK_SUBBLOCKS (block);
2858 subblock;
2859 subblock = BLOCK_CHAIN (subblock))
2860 remap_vla_decls (subblock, root);
2862 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2863 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
2865 val = DECL_VALUE_EXPR (var);
2866 type = TREE_TYPE (var);
2868 if (!(TREE_CODE (val) == INDIRECT_REF
2869 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2870 && variably_modified_type_p (type, NULL)))
2871 continue;
2873 if (root->var_map->get (TREE_OPERAND (val, 0))
2874 || walk_tree (&type, contains_remapped_vars, root, NULL))
2875 break;
2878 if (var == NULL_TREE)
2879 return;
2881 memset (&id, 0, sizeof (id));
2882 id.cb.copy_decl = nesting_copy_decl;
2883 id.cb.decl_map = new hash_map<tree, tree>;
2884 id.root = root;
2886 for (; var; var = DECL_CHAIN (var))
2887 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
2889 struct nesting_info *i;
2890 tree newt, context;
2892 val = DECL_VALUE_EXPR (var);
2893 type = TREE_TYPE (var);
2895 if (!(TREE_CODE (val) == INDIRECT_REF
2896 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2897 && variably_modified_type_p (type, NULL)))
2898 continue;
2900 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2901 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2902 continue;
2904 context = decl_function_context (var);
2905 for (i = root; i; i = i->outer)
2906 if (i->context == context)
2907 break;
2909 if (i == NULL)
2910 continue;
2912 /* Fully expand value expressions. This avoids having debug variables
2913 only referenced from them and that can be swept during GC. */
2914 if (slot)
2916 tree t = (tree) *slot;
2917 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2918 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2921 id.cb.src_fn = i->context;
2922 id.cb.dst_fn = i->context;
2923 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2925 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2926 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2928 newt = TREE_TYPE (newt);
2929 type = TREE_TYPE (type);
2931 if (TYPE_NAME (newt)
2932 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2933 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2934 && newt != type
2935 && TYPE_NAME (newt) == TYPE_NAME (type))
2936 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2938 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2939 if (val != DECL_VALUE_EXPR (var))
2940 SET_DECL_VALUE_EXPR (var, val);
2943 delete id.cb.decl_map;
2946 /* Fold the MEM_REF *E. */
2947 bool
2948 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2950 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2951 *ref_p = fold (*ref_p);
2952 return true;
2955 /* Given DECL, a nested function, build an initialization call for FIELD,
2956 the trampoline or descriptor for DECL, using FUNC as the function. */
2958 static gcall *
2959 build_init_call_stmt (struct nesting_info *info, tree decl, tree field,
2960 tree func)
2962 tree arg1, arg2, arg3, x;
2964 gcc_assert (DECL_STATIC_CHAIN (decl));
2965 arg3 = build_addr (info->frame_decl);
2967 arg2 = build_addr (decl);
2969 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2970 info->frame_decl, field, NULL_TREE);
2971 arg1 = build_addr (x);
2973 return gimple_build_call (func, 3, arg1, arg2, arg3);
2976 /* Do "everything else" to clean up or complete state collected by the various
2977 walking passes -- create a field to hold the frame base address, lay out the
2978 types and decls, generate code to initialize the frame decl, store critical
2979 expressions in the struct function for rtl to find. */
2981 static void
2982 finalize_nesting_tree_1 (struct nesting_info *root)
2984 gimple_seq stmt_list;
2985 gimple *stmt;
2986 tree context = root->context;
2987 struct function *sf;
2989 stmt_list = NULL;
2991 /* If we created a non-local frame type or decl, we need to lay them
2992 out at this time. */
2993 if (root->frame_type)
2995 /* Debugging information needs to compute the frame base address of the
2996 parent frame out of the static chain from the nested frame.
2998 The static chain is the address of the FRAME record, so one could
2999 imagine it would be possible to compute the frame base address just
3000 adding a constant offset to this address. Unfortunately, this is not
3001 possible: if the FRAME object has alignment constraints that are
3002 stronger than the stack, then the offset between the frame base and
3003 the FRAME object will be dynamic.
3005 What we do instead is to append a field to the FRAME object that holds
3006 the frame base address: then debug info just has to fetch this
3007 field. */
3009 /* Debugging information will refer to the CFA as the frame base
3010 address: we will do the same here. */
3011 const tree frame_addr_fndecl
3012 = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
3014 /* Create a field in the FRAME record to hold the frame base address for
3015 this stack frame. Since it will be used only by the debugger, put it
3016 at the end of the record in order not to shift all other offsets. */
3017 tree fb_decl = make_node (FIELD_DECL);
3019 DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
3020 TREE_TYPE (fb_decl) = ptr_type_node;
3021 TREE_ADDRESSABLE (fb_decl) = 1;
3022 DECL_CONTEXT (fb_decl) = root->frame_type;
3023 TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
3024 fb_decl);
3026 /* In some cases the frame type will trigger the -Wpadded warning.
3027 This is not helpful; suppress it. */
3028 int save_warn_padded = warn_padded;
3029 warn_padded = 0;
3030 layout_type (root->frame_type);
3031 warn_padded = save_warn_padded;
3032 layout_decl (root->frame_decl, 0);
3034 /* Initialize the frame base address field. If the builtin we need is
3035 not available, set it to NULL so that debugging information does not
3036 reference junk. */
3037 tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
3038 root->frame_decl, fb_decl, NULL_TREE);
3039 tree fb_tmp;
3041 if (frame_addr_fndecl != NULL_TREE)
3043 gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
3044 integer_zero_node);
3045 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3047 fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
3049 else
3050 fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
3051 gimple_seq_add_stmt (&stmt_list,
3052 gimple_build_assign (fb_ref, fb_tmp));
3054 /* Remove root->frame_decl from root->new_local_var_chain, so
3055 that we can declare it also in the lexical blocks, which
3056 helps ensure virtual regs that end up appearing in its RTL
3057 expression get substituted in instantiate_virtual_regs(). */
3058 tree *adjust;
3059 for (adjust = &root->new_local_var_chain;
3060 *adjust != root->frame_decl;
3061 adjust = &DECL_CHAIN (*adjust))
3062 gcc_assert (DECL_CHAIN (*adjust));
3063 *adjust = DECL_CHAIN (*adjust);
3065 DECL_CHAIN (root->frame_decl) = NULL_TREE;
3066 declare_vars (root->frame_decl,
3067 gimple_seq_first_stmt (gimple_body (context)), true);
3070 /* If any parameters were referenced non-locally, then we need to
3071 insert a copy. Likewise, if any variables were referenced by
3072 pointer, we need to initialize the address. */
3073 if (root->any_parm_remapped)
3075 tree p;
3076 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
3078 tree field, x, y;
3080 field = lookup_field_for_decl (root, p, NO_INSERT);
3081 if (!field)
3082 continue;
3084 if (use_pointer_in_frame (p))
3085 x = build_addr (p);
3086 else
3087 x = p;
3089 /* If the assignment is from a non-register the stmt is
3090 not valid gimple. Make it so by using a temporary instead. */
3091 if (!is_gimple_reg (x)
3092 && is_gimple_reg_type (TREE_TYPE (x)))
3094 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3095 x = init_tmp_var (root, x, &gsi);
3098 y = build3 (COMPONENT_REF, TREE_TYPE (field),
3099 root->frame_decl, field, NULL_TREE);
3100 stmt = gimple_build_assign (y, x);
3101 gimple_seq_add_stmt (&stmt_list, stmt);
3105 /* If a chain_field was created, then it needs to be initialized
3106 from chain_decl. */
3107 if (root->chain_field)
3109 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
3110 root->frame_decl, root->chain_field, NULL_TREE);
3111 stmt = gimple_build_assign (x, get_chain_decl (root));
3112 gimple_seq_add_stmt (&stmt_list, stmt);
3115 /* If trampolines were created, then we need to initialize them. */
3116 if (root->any_tramp_created)
3118 struct nesting_info *i;
3119 for (i = root->inner; i ; i = i->next)
3121 tree field, x;
3123 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
3124 if (!field)
3125 continue;
3127 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
3128 stmt = build_init_call_stmt (root, i->context, field, x);
3129 gimple_seq_add_stmt (&stmt_list, stmt);
3133 /* If descriptors were created, then we need to initialize them. */
3134 if (root->any_descr_created)
3136 struct nesting_info *i;
3137 for (i = root->inner; i ; i = i->next)
3139 tree field, x;
3141 field = lookup_descr_for_decl (root, i->context, NO_INSERT);
3142 if (!field)
3143 continue;
3145 x = builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR);
3146 stmt = build_init_call_stmt (root, i->context, field, x);
3147 gimple_seq_add_stmt (&stmt_list, stmt);
3151 /* If we created initialization statements, insert them. */
3152 if (stmt_list)
3154 gbind *bind;
3155 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3156 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3157 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3158 gimple_bind_set_body (bind, stmt_list);
3161 /* If a chain_decl was created, then it needs to be registered with
3162 struct function so that it gets initialized from the static chain
3163 register at the beginning of the function. */
3164 sf = DECL_STRUCT_FUNCTION (root->context);
3165 sf->static_chain_decl = root->chain_decl;
3167 /* Similarly for the non-local goto save area. */
3168 if (root->nl_goto_field)
3170 sf->nonlocal_goto_save_area
3171 = get_frame_field (root, context, root->nl_goto_field, NULL);
3172 sf->has_nonlocal_label = 1;
3175 /* Make sure all new local variables get inserted into the
3176 proper BIND_EXPR. */
3177 if (root->new_local_var_chain)
3178 declare_vars (root->new_local_var_chain,
3179 gimple_seq_first_stmt (gimple_body (root->context)),
3180 false);
3182 if (root->debug_var_chain)
3184 tree debug_var;
3185 gbind *scope;
3187 remap_vla_decls (DECL_INITIAL (root->context), root);
3189 for (debug_var = root->debug_var_chain; debug_var;
3190 debug_var = DECL_CHAIN (debug_var))
3191 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3192 break;
3194 /* If there are any debug decls with variable length types,
3195 remap those types using other debug_var_chain variables. */
3196 if (debug_var)
3198 struct nesting_copy_body_data id;
3200 memset (&id, 0, sizeof (id));
3201 id.cb.copy_decl = nesting_copy_decl;
3202 id.cb.decl_map = new hash_map<tree, tree>;
3203 id.root = root;
3205 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3206 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3208 tree type = TREE_TYPE (debug_var);
3209 tree newt, t = type;
3210 struct nesting_info *i;
3212 for (i = root; i; i = i->outer)
3213 if (variably_modified_type_p (type, i->context))
3214 break;
3216 if (i == NULL)
3217 continue;
3219 id.cb.src_fn = i->context;
3220 id.cb.dst_fn = i->context;
3221 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3223 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3224 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3226 newt = TREE_TYPE (newt);
3227 t = TREE_TYPE (t);
3229 if (TYPE_NAME (newt)
3230 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3231 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3232 && newt != t
3233 && TYPE_NAME (newt) == TYPE_NAME (t))
3234 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3237 delete id.cb.decl_map;
3240 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3241 if (gimple_bind_block (scope))
3242 declare_vars (root->debug_var_chain, scope, true);
3243 else
3244 BLOCK_VARS (DECL_INITIAL (root->context))
3245 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3246 root->debug_var_chain);
3249 /* Fold the rewritten MEM_REF trees. */
3250 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3252 /* Dump the translated tree function. */
3253 if (dump_file)
3255 fputs ("\n\n", dump_file);
3256 dump_function_to_file (root->context, dump_file, dump_flags);
3260 static void
3261 finalize_nesting_tree (struct nesting_info *root)
3263 struct nesting_info *n;
3264 FOR_EACH_NEST_INFO (n, root)
3265 finalize_nesting_tree_1 (n);
3268 /* Unnest the nodes and pass them to cgraph. */
3270 static void
3271 unnest_nesting_tree_1 (struct nesting_info *root)
3273 struct cgraph_node *node = cgraph_node::get (root->context);
3275 /* For nested functions update the cgraph to reflect unnesting.
3276 We also delay finalizing of these functions up to this point. */
3277 if (node->origin)
3279 node->unnest ();
3280 cgraph_node::finalize_function (root->context, true);
3284 static void
3285 unnest_nesting_tree (struct nesting_info *root)
3287 struct nesting_info *n;
3288 FOR_EACH_NEST_INFO (n, root)
3289 unnest_nesting_tree_1 (n);
3292 /* Free the data structures allocated during this pass. */
3294 static void
3295 free_nesting_tree (struct nesting_info *root)
3297 struct nesting_info *node, *next;
3299 node = iter_nestinfo_start (root);
3302 next = iter_nestinfo_next (node);
3303 delete node->var_map;
3304 delete node->field_map;
3305 delete node->mem_refs;
3306 free (node);
3307 node = next;
3309 while (node);
3312 /* Gimplify a function and all its nested functions. */
3313 static void
3314 gimplify_all_functions (struct cgraph_node *root)
3316 struct cgraph_node *iter;
3317 if (!gimple_body (root->decl))
3318 gimplify_function_tree (root->decl);
3319 for (iter = root->nested; iter; iter = iter->next_nested)
3320 gimplify_all_functions (iter);
3323 /* Main entry point for this pass. Process FNDECL and all of its nested
3324 subroutines and turn them into something less tightly bound. */
3326 void
3327 lower_nested_functions (tree fndecl)
3329 struct cgraph_node *cgn;
3330 struct nesting_info *root;
3332 /* If there are no nested functions, there's nothing to do. */
3333 cgn = cgraph_node::get (fndecl);
3334 if (!cgn->nested)
3335 return;
3337 gimplify_all_functions (cgn);
3339 dump_file = dump_begin (TDI_nested, &dump_flags);
3340 if (dump_file)
3341 fprintf (dump_file, "\n;; Function %s\n\n",
3342 lang_hooks.decl_printable_name (fndecl, 2));
3344 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3345 root = create_nesting_tree (cgn);
3347 walk_all_functions (convert_nonlocal_reference_stmt,
3348 convert_nonlocal_reference_op,
3349 root);
3350 walk_all_functions (convert_local_reference_stmt,
3351 convert_local_reference_op,
3352 root);
3353 walk_all_functions (convert_nl_goto_reference, NULL, root);
3354 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3356 convert_all_function_calls (root);
3357 finalize_nesting_tree (root);
3358 unnest_nesting_tree (root);
3360 free_nesting_tree (root);
3361 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3363 if (dump_file)
3365 dump_end (TDI_nested, dump_file);
3366 dump_file = NULL;
3370 #include "gt-tree-nested.h"