[testsuite] Require shared effective target for some lto.exp tests
[official-gcc.git] / gcc / tree-nested.c
blobc005e7c735dc93f137102583ceb9f9246823bc46
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "cgraph.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "tree-dump.h"
35 #include "tree-inline.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "tree-cfg.h"
40 #include "explow.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
46 /* The object of this pass is to lower the representation of a set of nested
47 functions in order to expose all of the gory details of the various
48 nonlocal references. We want to do this sooner rather than later, in
49 order to give us more freedom in emitting all of the functions in question.
51 Back in olden times, when gcc was young, we developed an insanely
52 complicated scheme whereby variables which were referenced nonlocally
53 were forced to live in the stack of the declaring function, and then
54 the nested functions magically discovered where these variables were
55 placed. In order for this scheme to function properly, it required
56 that the outer function be partially expanded, then we switch to
57 compiling the inner function, and once done with those we switch back
58 to compiling the outer function. Such delicate ordering requirements
59 makes it difficult to do whole translation unit optimizations
60 involving such functions.
62 The implementation here is much more direct. Everything that can be
63 referenced by an inner function is a member of an explicitly created
64 structure herein called the "nonlocal frame struct". The incoming
65 static chain for a nested function is a pointer to this struct in
66 the parent. In this way, we settle on known offsets from a known
67 base, and so are decoupled from the logic that places objects in the
68 function's stack frame. More importantly, we don't have to wait for
69 that to happen -- since the compilation of the inner function is no
70 longer tied to a real stack frame, the nonlocal frame struct can be
71 allocated anywhere. Which means that the outer function is now
72 inlinable.
74 Theory of operation here is very simple. Iterate over all the
75 statements in all the functions (depth first) several times,
76 allocating structures and fields on demand. In general we want to
77 examine inner functions first, so that we can avoid making changes
78 to outer functions which are unnecessary.
80 The order of the passes matters a bit, in that later passes will be
81 skipped if it is discovered that the functions don't actually interact
82 at all. That is, they're nested in the lexical sense but could have
83 been written as independent functions without change. */
86 struct nesting_info
88 struct nesting_info *outer;
89 struct nesting_info *inner;
90 struct nesting_info *next;
92 hash_map<tree, tree> *field_map;
93 hash_map<tree, tree> *var_map;
94 hash_set<tree *> *mem_refs;
95 bitmap suppress_expansion;
97 tree context;
98 tree new_local_var_chain;
99 tree debug_var_chain;
100 tree frame_type;
101 tree frame_decl;
102 tree chain_field;
103 tree chain_decl;
104 tree nl_goto_field;
106 bool any_parm_remapped;
107 bool any_tramp_created;
108 bool any_descr_created;
109 char static_chain_added;
113 /* Iterate over the nesting tree, starting with ROOT, depth first. */
115 static inline struct nesting_info *
116 iter_nestinfo_start (struct nesting_info *root)
118 while (root->inner)
119 root = root->inner;
120 return root;
123 static inline struct nesting_info *
124 iter_nestinfo_next (struct nesting_info *node)
126 if (node->next)
127 return iter_nestinfo_start (node->next);
128 return node->outer;
131 #define FOR_EACH_NEST_INFO(I, ROOT) \
132 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
134 /* Obstack used for the bitmaps in the struct above. */
135 static struct bitmap_obstack nesting_info_bitmap_obstack;
138 /* We're working in so many different function contexts simultaneously,
139 that create_tmp_var is dangerous. Prevent mishap. */
140 #define create_tmp_var cant_use_create_tmp_var_here_dummy
142 /* Like create_tmp_var, except record the variable for registration at
143 the given nesting level. */
145 static tree
146 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
148 tree tmp_var;
150 /* If the type is of variable size or a type which must be created by the
151 frontend, something is wrong. Note that we explicitly allow
152 incomplete types here, since we create them ourselves here. */
153 gcc_assert (!TREE_ADDRESSABLE (type));
154 gcc_assert (!TYPE_SIZE_UNIT (type)
155 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
157 tmp_var = create_tmp_var_raw (type, prefix);
158 DECL_CONTEXT (tmp_var) = info->context;
159 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
160 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
161 if (TREE_CODE (type) == COMPLEX_TYPE
162 || TREE_CODE (type) == VECTOR_TYPE)
163 DECL_GIMPLE_REG_P (tmp_var) = 1;
165 info->new_local_var_chain = tmp_var;
167 return tmp_var;
170 /* Take the address of EXP to be used within function CONTEXT.
171 Mark it for addressability as necessary. */
173 tree
174 build_addr (tree exp)
176 mark_addressable (exp);
177 return build_fold_addr_expr (exp);
180 /* Insert FIELD into TYPE, sorted by alignment requirements. */
182 void
183 insert_field_into_struct (tree type, tree field)
185 tree *p;
187 DECL_CONTEXT (field) = type;
189 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
190 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
191 break;
193 DECL_CHAIN (field) = *p;
194 *p = field;
196 /* Set correct alignment for frame struct type. */
197 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
198 SET_TYPE_ALIGN (type, DECL_ALIGN (field));
201 /* Build or return the RECORD_TYPE that describes the frame state that is
202 shared between INFO->CONTEXT and its nested functions. This record will
203 not be complete until finalize_nesting_tree; up until that point we'll
204 be adding fields as necessary.
206 We also build the DECL that represents this frame in the function. */
208 static tree
209 get_frame_type (struct nesting_info *info)
211 tree type = info->frame_type;
212 if (!type)
214 char *name;
216 type = make_node (RECORD_TYPE);
218 name = concat ("FRAME.",
219 IDENTIFIER_POINTER (DECL_NAME (info->context)),
220 NULL);
221 TYPE_NAME (type) = get_identifier (name);
222 free (name);
224 info->frame_type = type;
225 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
226 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
228 /* ??? Always make it addressable for now, since it is meant to
229 be pointed to by the static chain pointer. This pessimizes
230 when it turns out that no static chains are needed because
231 the nested functions referencing non-local variables are not
232 reachable, but the true pessimization is to create the non-
233 local frame structure in the first place. */
234 TREE_ADDRESSABLE (info->frame_decl) = 1;
236 return type;
239 /* Return true if DECL should be referenced by pointer in the non-local
240 frame structure. */
242 static bool
243 use_pointer_in_frame (tree decl)
245 if (TREE_CODE (decl) == PARM_DECL)
247 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
248 sized decls, and inefficient to copy large aggregates. Don't bother
249 moving anything but scalar variables. */
250 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
252 else
254 /* Variable sized types make things "interesting" in the frame. */
255 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
259 /* Given DECL, a non-locally accessed variable, find or create a field
260 in the non-local frame structure for the given nesting context. */
262 static tree
263 lookup_field_for_decl (struct nesting_info *info, tree decl,
264 enum insert_option insert)
266 if (insert == NO_INSERT)
268 tree *slot = info->field_map->get (decl);
269 return slot ? *slot : NULL_TREE;
272 tree *slot = &info->field_map->get_or_insert (decl);
273 if (!*slot)
275 tree field = make_node (FIELD_DECL);
276 DECL_NAME (field) = DECL_NAME (decl);
278 if (use_pointer_in_frame (decl))
280 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
281 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
282 DECL_NONADDRESSABLE_P (field) = 1;
284 else
286 TREE_TYPE (field) = TREE_TYPE (decl);
287 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
288 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
289 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
290 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
291 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
292 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
295 insert_field_into_struct (get_frame_type (info), field);
296 *slot = field;
298 if (TREE_CODE (decl) == PARM_DECL)
299 info->any_parm_remapped = true;
302 return *slot;
305 /* Build or return the variable that holds the static chain within
306 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
308 static tree
309 get_chain_decl (struct nesting_info *info)
311 tree decl = info->chain_decl;
313 if (!decl)
315 tree type;
317 type = get_frame_type (info->outer);
318 type = build_pointer_type (type);
320 /* Note that this variable is *not* entered into any BIND_EXPR;
321 the construction of this variable is handled specially in
322 expand_function_start and initialize_inlined_parameters.
323 Note also that it's represented as a parameter. This is more
324 close to the truth, since the initial value does come from
325 the caller. */
326 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
327 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
328 DECL_ARTIFICIAL (decl) = 1;
329 DECL_IGNORED_P (decl) = 1;
330 TREE_USED (decl) = 1;
331 DECL_CONTEXT (decl) = info->context;
332 DECL_ARG_TYPE (decl) = type;
334 /* Tell tree-inline.c that we never write to this variable, so
335 it can copy-prop the replacement value immediately. */
336 TREE_READONLY (decl) = 1;
338 info->chain_decl = decl;
340 if (dump_file
341 && (dump_flags & TDF_DETAILS)
342 && !DECL_STATIC_CHAIN (info->context))
343 fprintf (dump_file, "Setting static-chain for %s\n",
344 lang_hooks.decl_printable_name (info->context, 2));
346 DECL_STATIC_CHAIN (info->context) = 1;
348 return decl;
351 /* Build or return the field within the non-local frame state that holds
352 the static chain for INFO->CONTEXT. This is the way to walk back up
353 multiple nesting levels. */
355 static tree
356 get_chain_field (struct nesting_info *info)
358 tree field = info->chain_field;
360 if (!field)
362 tree type = build_pointer_type (get_frame_type (info->outer));
364 field = make_node (FIELD_DECL);
365 DECL_NAME (field) = get_identifier ("__chain");
366 TREE_TYPE (field) = type;
367 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
368 DECL_NONADDRESSABLE_P (field) = 1;
370 insert_field_into_struct (get_frame_type (info), field);
372 info->chain_field = field;
374 if (dump_file
375 && (dump_flags & TDF_DETAILS)
376 && !DECL_STATIC_CHAIN (info->context))
377 fprintf (dump_file, "Setting static-chain for %s\n",
378 lang_hooks.decl_printable_name (info->context, 2));
380 DECL_STATIC_CHAIN (info->context) = 1;
382 return field;
385 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
387 static tree
388 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
389 gcall *call)
391 tree t;
393 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
394 gimple_call_set_lhs (call, t);
395 if (! gsi_end_p (*gsi))
396 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
397 gsi_insert_before (gsi, call, GSI_SAME_STMT);
399 return t;
403 /* Copy EXP into a temporary. Allocate the temporary in the context of
404 INFO and insert the initialization statement before GSI. */
406 static tree
407 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
409 tree t;
410 gimple *stmt;
412 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
413 stmt = gimple_build_assign (t, exp);
414 if (! gsi_end_p (*gsi))
415 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
416 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
418 return t;
422 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
424 static tree
425 gsi_gimplify_val (struct nesting_info *info, tree exp,
426 gimple_stmt_iterator *gsi)
428 if (is_gimple_val (exp))
429 return exp;
430 else
431 return init_tmp_var (info, exp, gsi);
434 /* Similarly, but copy from the temporary and insert the statement
435 after the iterator. */
437 static tree
438 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
440 tree t;
441 gimple *stmt;
443 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
444 stmt = gimple_build_assign (exp, t);
445 if (! gsi_end_p (*gsi))
446 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
447 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
449 return t;
452 /* Build or return the type used to represent a nested function trampoline. */
454 static GTY(()) tree trampoline_type;
456 static tree
457 get_trampoline_type (struct nesting_info *info)
459 unsigned align, size;
460 tree t;
462 if (trampoline_type)
463 return trampoline_type;
465 align = TRAMPOLINE_ALIGNMENT;
466 size = TRAMPOLINE_SIZE;
468 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
469 then allocate extra space so that we can do dynamic alignment. */
470 if (align > STACK_BOUNDARY)
472 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
473 align = STACK_BOUNDARY;
476 t = build_index_type (size_int (size - 1));
477 t = build_array_type (char_type_node, t);
478 t = build_decl (DECL_SOURCE_LOCATION (info->context),
479 FIELD_DECL, get_identifier ("__data"), t);
480 SET_DECL_ALIGN (t, align);
481 DECL_USER_ALIGN (t) = 1;
483 trampoline_type = make_node (RECORD_TYPE);
484 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
485 TYPE_FIELDS (trampoline_type) = t;
486 layout_type (trampoline_type);
487 DECL_CONTEXT (t) = trampoline_type;
489 return trampoline_type;
492 /* Build or return the type used to represent a nested function descriptor. */
494 static GTY(()) tree descriptor_type;
496 static tree
497 get_descriptor_type (struct nesting_info *info)
499 tree t;
501 if (descriptor_type)
502 return descriptor_type;
504 t = build_index_type (integer_one_node);
505 t = build_array_type (ptr_type_node, t);
506 t = build_decl (DECL_SOURCE_LOCATION (info->context),
507 FIELD_DECL, get_identifier ("__data"), t);
509 descriptor_type = make_node (RECORD_TYPE);
510 TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor");
511 TYPE_FIELDS (descriptor_type) = t;
512 layout_type (descriptor_type);
513 DECL_CONTEXT (t) = descriptor_type;
515 return descriptor_type;
518 /* Given DECL, a nested function, find or create an element in the
519 var map for this function. */
521 static tree
522 lookup_element_for_decl (struct nesting_info *info, tree decl,
523 enum insert_option insert)
525 if (insert == NO_INSERT)
527 tree *slot = info->var_map->get (decl);
528 return slot ? *slot : NULL_TREE;
531 tree *slot = &info->var_map->get_or_insert (decl);
532 if (!*slot)
533 *slot = build_tree_list (NULL_TREE, NULL_TREE);
535 return (tree) *slot;
538 /* Given DECL, a nested function, create a field in the non-local
539 frame structure for this function. */
541 static tree
542 create_field_for_decl (struct nesting_info *info, tree decl, tree type)
544 tree field = make_node (FIELD_DECL);
545 DECL_NAME (field) = DECL_NAME (decl);
546 TREE_TYPE (field) = type;
547 TREE_ADDRESSABLE (field) = 1;
548 insert_field_into_struct (get_frame_type (info), field);
549 return field;
552 /* Given DECL, a nested function, find or create a field in the non-local
553 frame structure for a trampoline for this function. */
555 static tree
556 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
557 enum insert_option insert)
559 tree elt, field;
561 elt = lookup_element_for_decl (info, decl, insert);
562 if (!elt)
563 return NULL_TREE;
565 field = TREE_PURPOSE (elt);
567 if (!field && insert == INSERT)
569 field = create_field_for_decl (info, decl, get_trampoline_type (info));
570 TREE_PURPOSE (elt) = field;
571 info->any_tramp_created = true;
574 return field;
577 /* Given DECL, a nested function, find or create a field in the non-local
578 frame structure for a descriptor for this function. */
580 static tree
581 lookup_descr_for_decl (struct nesting_info *info, tree decl,
582 enum insert_option insert)
584 tree elt, field;
586 elt = lookup_element_for_decl (info, decl, insert);
587 if (!elt)
588 return NULL_TREE;
590 field = TREE_VALUE (elt);
592 if (!field && insert == INSERT)
594 field = create_field_for_decl (info, decl, get_descriptor_type (info));
595 TREE_VALUE (elt) = field;
596 info->any_descr_created = true;
599 return field;
602 /* Build or return the field within the non-local frame state that holds
603 the non-local goto "jmp_buf". The buffer itself is maintained by the
604 rtl middle-end as dynamic stack space is allocated. */
606 static tree
607 get_nl_goto_field (struct nesting_info *info)
609 tree field = info->nl_goto_field;
610 if (!field)
612 unsigned size;
613 tree type;
615 /* For __builtin_nonlocal_goto, we need N words. The first is the
616 frame pointer, the rest is for the target's stack pointer save
617 area. The number of words is controlled by STACK_SAVEAREA_MODE;
618 not the best interface, but it'll do for now. */
619 if (Pmode == ptr_mode)
620 type = ptr_type_node;
621 else
622 type = lang_hooks.types.type_for_mode (Pmode, 1);
624 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
625 size = size / GET_MODE_SIZE (Pmode);
626 size = size + 1;
628 type = build_array_type
629 (type, build_index_type (size_int (size)));
631 field = make_node (FIELD_DECL);
632 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
633 TREE_TYPE (field) = type;
634 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
635 TREE_ADDRESSABLE (field) = 1;
637 insert_field_into_struct (get_frame_type (info), field);
639 info->nl_goto_field = field;
642 return field;
645 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
647 static void
648 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
649 struct nesting_info *info, gimple_seq *pseq)
651 struct walk_stmt_info wi;
653 memset (&wi, 0, sizeof (wi));
654 wi.info = info;
655 wi.val_only = true;
656 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
660 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
662 static inline void
663 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
664 struct nesting_info *info)
666 gimple_seq body = gimple_body (info->context);
667 walk_body (callback_stmt, callback_op, info, &body);
668 gimple_set_body (info->context, body);
671 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
673 static void
674 walk_gimple_omp_for (gomp_for *for_stmt,
675 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
676 struct nesting_info *info)
678 struct walk_stmt_info wi;
679 gimple_seq seq;
680 tree t;
681 size_t i;
683 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
685 seq = NULL;
686 memset (&wi, 0, sizeof (wi));
687 wi.info = info;
688 wi.gsi = gsi_last (seq);
690 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
692 wi.val_only = false;
693 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
694 &wi, NULL);
695 wi.val_only = true;
696 wi.is_lhs = false;
697 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
698 &wi, NULL);
700 wi.val_only = true;
701 wi.is_lhs = false;
702 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
703 &wi, NULL);
705 t = gimple_omp_for_incr (for_stmt, i);
706 gcc_assert (BINARY_CLASS_P (t));
707 wi.val_only = false;
708 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
709 wi.val_only = true;
710 wi.is_lhs = false;
711 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
714 seq = gsi_seq (wi.gsi);
715 if (!gimple_seq_empty_p (seq))
717 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
718 annotate_all_with_location (seq, gimple_location (for_stmt));
719 gimple_seq_add_seq (&pre_body, seq);
720 gimple_omp_for_set_pre_body (for_stmt, pre_body);
724 /* Similarly for ROOT and all functions nested underneath, depth first. */
726 static void
727 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
728 struct nesting_info *root)
730 struct nesting_info *n;
731 FOR_EACH_NEST_INFO (n, root)
732 walk_function (callback_stmt, callback_op, n);
736 /* We have to check for a fairly pathological case. The operands of function
737 nested function are to be interpreted in the context of the enclosing
738 function. So if any are variably-sized, they will get remapped when the
739 enclosing function is inlined. But that remapping would also have to be
740 done in the types of the PARM_DECLs of the nested function, meaning the
741 argument types of that function will disagree with the arguments in the
742 calls to that function. So we'd either have to make a copy of the nested
743 function corresponding to each time the enclosing function was inlined or
744 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
745 function. The former is not practical. The latter would still require
746 detecting this case to know when to add the conversions. So, for now at
747 least, we don't inline such an enclosing function.
749 We have to do that check recursively, so here return indicating whether
750 FNDECL has such a nested function. ORIG_FN is the function we were
751 trying to inline to use for checking whether any argument is variably
752 modified by anything in it.
754 It would be better to do this in tree-inline.c so that we could give
755 the appropriate warning for why a function can't be inlined, but that's
756 too late since the nesting structure has already been flattened and
757 adding a flag just to record this fact seems a waste of a flag. */
759 static bool
760 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
762 struct cgraph_node *cgn = cgraph_node::get (fndecl);
763 tree arg;
765 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
767 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
768 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
769 return true;
771 if (check_for_nested_with_variably_modified (cgn->decl,
772 orig_fndecl))
773 return true;
776 return false;
779 /* Construct our local datastructure describing the function nesting
780 tree rooted by CGN. */
782 static struct nesting_info *
783 create_nesting_tree (struct cgraph_node *cgn)
785 struct nesting_info *info = XCNEW (struct nesting_info);
786 info->field_map = new hash_map<tree, tree>;
787 info->var_map = new hash_map<tree, tree>;
788 info->mem_refs = new hash_set<tree *>;
789 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
790 info->context = cgn->decl;
792 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
794 struct nesting_info *sub = create_nesting_tree (cgn);
795 sub->outer = info;
796 sub->next = info->inner;
797 info->inner = sub;
800 /* See discussion at check_for_nested_with_variably_modified for a
801 discussion of why this has to be here. */
802 if (check_for_nested_with_variably_modified (info->context, info->context))
803 DECL_UNINLINABLE (info->context) = true;
805 return info;
808 /* Return an expression computing the static chain for TARGET_CONTEXT
809 from INFO->CONTEXT. Insert any necessary computations before TSI. */
811 static tree
812 get_static_chain (struct nesting_info *info, tree target_context,
813 gimple_stmt_iterator *gsi)
815 struct nesting_info *i;
816 tree x;
818 if (info->context == target_context)
820 x = build_addr (info->frame_decl);
821 info->static_chain_added |= 1;
823 else
825 x = get_chain_decl (info);
826 info->static_chain_added |= 2;
828 for (i = info->outer; i->context != target_context; i = i->outer)
830 tree field = get_chain_field (i);
832 x = build_simple_mem_ref (x);
833 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
834 x = init_tmp_var (info, x, gsi);
838 return x;
842 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
843 frame as seen from INFO->CONTEXT. Insert any necessary computations
844 before GSI. */
846 static tree
847 get_frame_field (struct nesting_info *info, tree target_context,
848 tree field, gimple_stmt_iterator *gsi)
850 struct nesting_info *i;
851 tree x;
853 if (info->context == target_context)
855 /* Make sure frame_decl gets created. */
856 (void) get_frame_type (info);
857 x = info->frame_decl;
858 info->static_chain_added |= 1;
860 else
862 x = get_chain_decl (info);
863 info->static_chain_added |= 2;
865 for (i = info->outer; i->context != target_context; i = i->outer)
867 tree field = get_chain_field (i);
869 x = build_simple_mem_ref (x);
870 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
871 x = init_tmp_var (info, x, gsi);
874 x = build_simple_mem_ref (x);
877 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
878 return x;
881 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
883 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
884 in the nested function with DECL_VALUE_EXPR set to reference the true
885 variable in the parent function. This is used both for debug info
886 and in OMP lowering. */
888 static tree
889 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
891 tree target_context;
892 struct nesting_info *i;
893 tree x, field, new_decl;
895 tree *slot = &info->var_map->get_or_insert (decl);
897 if (*slot)
898 return *slot;
900 target_context = decl_function_context (decl);
902 /* A copy of the code in get_frame_field, but without the temporaries. */
903 if (info->context == target_context)
905 /* Make sure frame_decl gets created. */
906 (void) get_frame_type (info);
907 x = info->frame_decl;
908 i = info;
909 info->static_chain_added |= 1;
911 else
913 x = get_chain_decl (info);
914 info->static_chain_added |= 2;
915 for (i = info->outer; i->context != target_context; i = i->outer)
917 field = get_chain_field (i);
918 x = build_simple_mem_ref (x);
919 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
921 x = build_simple_mem_ref (x);
924 field = lookup_field_for_decl (i, decl, INSERT);
925 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
926 if (use_pointer_in_frame (decl))
927 x = build_simple_mem_ref (x);
929 /* ??? We should be remapping types as well, surely. */
930 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
931 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
932 DECL_CONTEXT (new_decl) = info->context;
933 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
934 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
935 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
936 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
937 TREE_READONLY (new_decl) = TREE_READONLY (decl);
938 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
939 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
940 if ((TREE_CODE (decl) == PARM_DECL
941 || TREE_CODE (decl) == RESULT_DECL
942 || VAR_P (decl))
943 && DECL_BY_REFERENCE (decl))
944 DECL_BY_REFERENCE (new_decl) = 1;
946 SET_DECL_VALUE_EXPR (new_decl, x);
947 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
949 *slot = new_decl;
950 DECL_CHAIN (new_decl) = info->debug_var_chain;
951 info->debug_var_chain = new_decl;
953 if (!optimize
954 && info->context != target_context
955 && variably_modified_type_p (TREE_TYPE (decl), NULL))
956 note_nonlocal_vla_type (info, TREE_TYPE (decl));
958 return new_decl;
962 /* Callback for walk_gimple_stmt, rewrite all references to VAR
963 and PARM_DECLs that belong to outer functions.
965 The rewrite will involve some number of structure accesses back up
966 the static chain. E.g. for a variable FOO up one nesting level it'll
967 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
968 indirections apply to decls for which use_pointer_in_frame is true. */
970 static tree
971 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
973 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
974 struct nesting_info *const info = (struct nesting_info *) wi->info;
975 tree t = *tp;
977 *walk_subtrees = 0;
978 switch (TREE_CODE (t))
980 case VAR_DECL:
981 /* Non-automatic variables are never processed. */
982 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
983 break;
984 /* FALLTHRU */
986 case PARM_DECL:
987 if (decl_function_context (t) != info->context)
989 tree x;
990 wi->changed = true;
992 x = get_nonlocal_debug_decl (info, t);
993 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
995 tree target_context = decl_function_context (t);
996 struct nesting_info *i;
997 for (i = info->outer; i->context != target_context; i = i->outer)
998 continue;
999 x = lookup_field_for_decl (i, t, INSERT);
1000 x = get_frame_field (info, target_context, x, &wi->gsi);
1001 if (use_pointer_in_frame (t))
1003 x = init_tmp_var (info, x, &wi->gsi);
1004 x = build_simple_mem_ref (x);
1008 if (wi->val_only)
1010 if (wi->is_lhs)
1011 x = save_tmp_var (info, x, &wi->gsi);
1012 else
1013 x = init_tmp_var (info, x, &wi->gsi);
1016 *tp = x;
1018 break;
1020 case LABEL_DECL:
1021 /* We're taking the address of a label from a parent function, but
1022 this is not itself a non-local goto. Mark the label such that it
1023 will not be deleted, much as we would with a label address in
1024 static storage. */
1025 if (decl_function_context (t) != info->context)
1026 FORCED_LABEL (t) = 1;
1027 break;
1029 case ADDR_EXPR:
1031 bool save_val_only = wi->val_only;
1033 wi->val_only = false;
1034 wi->is_lhs = false;
1035 wi->changed = false;
1036 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1037 wi->val_only = true;
1039 if (wi->changed)
1041 tree save_context;
1043 /* If we changed anything, we might no longer be directly
1044 referencing a decl. */
1045 save_context = current_function_decl;
1046 current_function_decl = info->context;
1047 recompute_tree_invariant_for_addr_expr (t);
1048 current_function_decl = save_context;
1050 /* If the callback converted the address argument in a context
1051 where we only accept variables (and min_invariant, presumably),
1052 then compute the address into a temporary. */
1053 if (save_val_only)
1054 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1055 t, &wi->gsi);
1058 break;
1060 case REALPART_EXPR:
1061 case IMAGPART_EXPR:
1062 case COMPONENT_REF:
1063 case ARRAY_REF:
1064 case ARRAY_RANGE_REF:
1065 case BIT_FIELD_REF:
1066 /* Go down this entire nest and just look at the final prefix and
1067 anything that describes the references. Otherwise, we lose track
1068 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1069 wi->val_only = true;
1070 wi->is_lhs = false;
1071 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1073 if (TREE_CODE (t) == COMPONENT_REF)
1074 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1075 NULL);
1076 else if (TREE_CODE (t) == ARRAY_REF
1077 || TREE_CODE (t) == ARRAY_RANGE_REF)
1079 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1080 wi, NULL);
1081 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1082 wi, NULL);
1083 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1084 wi, NULL);
1087 wi->val_only = false;
1088 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1089 break;
1091 case VIEW_CONVERT_EXPR:
1092 /* Just request to look at the subtrees, leaving val_only and lhs
1093 untouched. This might actually be for !val_only + lhs, in which
1094 case we don't want to force a replacement by a temporary. */
1095 *walk_subtrees = 1;
1096 break;
1098 default:
1099 if (!IS_TYPE_OR_DECL_P (t))
1101 *walk_subtrees = 1;
1102 wi->val_only = true;
1103 wi->is_lhs = false;
1105 break;
1108 return NULL_TREE;
1111 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1112 struct walk_stmt_info *);
1114 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1115 and PARM_DECLs that belong to outer functions. */
1117 static bool
1118 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1120 struct nesting_info *const info = (struct nesting_info *) wi->info;
1121 bool need_chain = false, need_stmts = false;
1122 tree clause, decl;
1123 int dummy;
1124 bitmap new_suppress;
1126 new_suppress = BITMAP_GGC_ALLOC ();
1127 bitmap_copy (new_suppress, info->suppress_expansion);
1129 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1131 switch (OMP_CLAUSE_CODE (clause))
1133 case OMP_CLAUSE_REDUCTION:
1134 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1135 need_stmts = true;
1136 goto do_decl_clause;
1138 case OMP_CLAUSE_LASTPRIVATE:
1139 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1140 need_stmts = true;
1141 goto do_decl_clause;
1143 case OMP_CLAUSE_LINEAR:
1144 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1145 need_stmts = true;
1146 wi->val_only = true;
1147 wi->is_lhs = false;
1148 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1149 &dummy, wi);
1150 goto do_decl_clause;
1152 case OMP_CLAUSE_PRIVATE:
1153 case OMP_CLAUSE_FIRSTPRIVATE:
1154 case OMP_CLAUSE_COPYPRIVATE:
1155 case OMP_CLAUSE_SHARED:
1156 case OMP_CLAUSE_TO_DECLARE:
1157 case OMP_CLAUSE_LINK:
1158 case OMP_CLAUSE_USE_DEVICE_PTR:
1159 case OMP_CLAUSE_IS_DEVICE_PTR:
1160 do_decl_clause:
1161 decl = OMP_CLAUSE_DECL (clause);
1162 if (VAR_P (decl)
1163 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1164 break;
1165 if (decl_function_context (decl) != info->context)
1167 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1168 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1169 bitmap_set_bit (new_suppress, DECL_UID (decl));
1170 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1171 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1172 need_chain = true;
1174 break;
1176 case OMP_CLAUSE_SCHEDULE:
1177 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1178 break;
1179 /* FALLTHRU */
1180 case OMP_CLAUSE_FINAL:
1181 case OMP_CLAUSE_IF:
1182 case OMP_CLAUSE_NUM_THREADS:
1183 case OMP_CLAUSE_DEPEND:
1184 case OMP_CLAUSE_DEVICE:
1185 case OMP_CLAUSE_NUM_TEAMS:
1186 case OMP_CLAUSE_THREAD_LIMIT:
1187 case OMP_CLAUSE_SAFELEN:
1188 case OMP_CLAUSE_SIMDLEN:
1189 case OMP_CLAUSE_PRIORITY:
1190 case OMP_CLAUSE_GRAINSIZE:
1191 case OMP_CLAUSE_NUM_TASKS:
1192 case OMP_CLAUSE_HINT:
1193 case OMP_CLAUSE__CILK_FOR_COUNT_:
1194 case OMP_CLAUSE_NUM_GANGS:
1195 case OMP_CLAUSE_NUM_WORKERS:
1196 case OMP_CLAUSE_VECTOR_LENGTH:
1197 case OMP_CLAUSE_GANG:
1198 case OMP_CLAUSE_WORKER:
1199 case OMP_CLAUSE_VECTOR:
1200 case OMP_CLAUSE_ASYNC:
1201 case OMP_CLAUSE_WAIT:
1202 /* Several OpenACC clauses have optional arguments. Check if they
1203 are present. */
1204 if (OMP_CLAUSE_OPERAND (clause, 0))
1206 wi->val_only = true;
1207 wi->is_lhs = false;
1208 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1209 &dummy, wi);
1212 /* The gang clause accepts two arguments. */
1213 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1214 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1216 wi->val_only = true;
1217 wi->is_lhs = false;
1218 convert_nonlocal_reference_op
1219 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1221 break;
1223 case OMP_CLAUSE_DIST_SCHEDULE:
1224 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1226 wi->val_only = true;
1227 wi->is_lhs = false;
1228 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1229 &dummy, wi);
1231 break;
1233 case OMP_CLAUSE_MAP:
1234 case OMP_CLAUSE_TO:
1235 case OMP_CLAUSE_FROM:
1236 if (OMP_CLAUSE_SIZE (clause))
1238 wi->val_only = true;
1239 wi->is_lhs = false;
1240 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1241 &dummy, wi);
1243 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1244 goto do_decl_clause;
1245 wi->val_only = true;
1246 wi->is_lhs = false;
1247 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1248 wi, NULL);
1249 break;
1251 case OMP_CLAUSE_ALIGNED:
1252 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1254 wi->val_only = true;
1255 wi->is_lhs = false;
1256 convert_nonlocal_reference_op
1257 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1259 /* Like do_decl_clause, but don't add any suppression. */
1260 decl = OMP_CLAUSE_DECL (clause);
1261 if (VAR_P (decl)
1262 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1263 break;
1264 if (decl_function_context (decl) != info->context)
1266 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1267 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1268 need_chain = true;
1270 break;
1272 case OMP_CLAUSE_NOWAIT:
1273 case OMP_CLAUSE_ORDERED:
1274 case OMP_CLAUSE_DEFAULT:
1275 case OMP_CLAUSE_COPYIN:
1276 case OMP_CLAUSE_COLLAPSE:
1277 case OMP_CLAUSE_UNTIED:
1278 case OMP_CLAUSE_MERGEABLE:
1279 case OMP_CLAUSE_PROC_BIND:
1280 case OMP_CLAUSE_NOGROUP:
1281 case OMP_CLAUSE_THREADS:
1282 case OMP_CLAUSE_SIMD:
1283 case OMP_CLAUSE_DEFAULTMAP:
1284 case OMP_CLAUSE_SEQ:
1285 case OMP_CLAUSE_INDEPENDENT:
1286 case OMP_CLAUSE_AUTO:
1287 break;
1289 /* OpenACC tile clauses are discarded during gimplification. */
1290 case OMP_CLAUSE_TILE:
1291 /* The following clause belongs to the OpenACC cache directive, which
1292 is discarded during gimplification. */
1293 case OMP_CLAUSE__CACHE_:
1294 /* The following clauses are only allowed in the OpenMP declare simd
1295 directive, so not seen here. */
1296 case OMP_CLAUSE_UNIFORM:
1297 case OMP_CLAUSE_INBRANCH:
1298 case OMP_CLAUSE_NOTINBRANCH:
1299 /* The following clauses are only allowed on OpenMP cancel and
1300 cancellation point directives, which at this point have already
1301 been lowered into a function call. */
1302 case OMP_CLAUSE_FOR:
1303 case OMP_CLAUSE_PARALLEL:
1304 case OMP_CLAUSE_SECTIONS:
1305 case OMP_CLAUSE_TASKGROUP:
1306 /* The following clauses are only added during OMP lowering; nested
1307 function decomposition happens before that. */
1308 case OMP_CLAUSE__LOOPTEMP_:
1309 case OMP_CLAUSE__SIMDUID_:
1310 case OMP_CLAUSE__GRIDDIM_:
1311 /* Anything else. */
1312 default:
1313 gcc_unreachable ();
1317 info->suppress_expansion = new_suppress;
1319 if (need_stmts)
1320 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1321 switch (OMP_CLAUSE_CODE (clause))
1323 case OMP_CLAUSE_REDUCTION:
1324 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1326 tree old_context
1327 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1328 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1329 = info->context;
1330 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1331 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1332 = info->context;
1333 walk_body (convert_nonlocal_reference_stmt,
1334 convert_nonlocal_reference_op, info,
1335 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1336 walk_body (convert_nonlocal_reference_stmt,
1337 convert_nonlocal_reference_op, info,
1338 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1339 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1340 = old_context;
1341 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1342 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1343 = old_context;
1345 break;
1347 case OMP_CLAUSE_LASTPRIVATE:
1348 walk_body (convert_nonlocal_reference_stmt,
1349 convert_nonlocal_reference_op, info,
1350 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1351 break;
1353 case OMP_CLAUSE_LINEAR:
1354 walk_body (convert_nonlocal_reference_stmt,
1355 convert_nonlocal_reference_op, info,
1356 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1357 break;
1359 default:
1360 break;
1363 return need_chain;
1366 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1368 static void
1369 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1371 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1372 type = TREE_TYPE (type);
1374 if (TYPE_NAME (type)
1375 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1376 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1377 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1379 while (POINTER_TYPE_P (type)
1380 || TREE_CODE (type) == VECTOR_TYPE
1381 || TREE_CODE (type) == FUNCTION_TYPE
1382 || TREE_CODE (type) == METHOD_TYPE)
1383 type = TREE_TYPE (type);
1385 if (TREE_CODE (type) == ARRAY_TYPE)
1387 tree domain, t;
1389 note_nonlocal_vla_type (info, TREE_TYPE (type));
1390 domain = TYPE_DOMAIN (type);
1391 if (domain)
1393 t = TYPE_MIN_VALUE (domain);
1394 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1395 && decl_function_context (t) != info->context)
1396 get_nonlocal_debug_decl (info, t);
1397 t = TYPE_MAX_VALUE (domain);
1398 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1399 && decl_function_context (t) != info->context)
1400 get_nonlocal_debug_decl (info, t);
1405 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1406 in BLOCK. */
1408 static void
1409 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1411 tree var;
1413 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1414 if (VAR_P (var)
1415 && variably_modified_type_p (TREE_TYPE (var), NULL)
1416 && DECL_HAS_VALUE_EXPR_P (var)
1417 && decl_function_context (var) != info->context)
1418 note_nonlocal_vla_type (info, TREE_TYPE (var));
1421 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1422 PARM_DECLs that belong to outer functions. This handles statements
1423 that are not handled via the standard recursion done in
1424 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1425 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1426 operands of STMT have been handled by this function. */
1428 static tree
1429 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1430 struct walk_stmt_info *wi)
1432 struct nesting_info *info = (struct nesting_info *) wi->info;
1433 tree save_local_var_chain;
1434 bitmap save_suppress;
1435 gimple *stmt = gsi_stmt (*gsi);
1437 switch (gimple_code (stmt))
1439 case GIMPLE_GOTO:
1440 /* Don't walk non-local gotos for now. */
1441 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1443 wi->val_only = true;
1444 wi->is_lhs = false;
1445 *handled_ops_p = false;
1446 return NULL_TREE;
1448 break;
1450 case GIMPLE_OMP_PARALLEL:
1451 case GIMPLE_OMP_TASK:
1452 save_suppress = info->suppress_expansion;
1453 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1454 wi))
1456 tree c, decl;
1457 decl = get_chain_decl (info);
1458 c = build_omp_clause (gimple_location (stmt),
1459 OMP_CLAUSE_FIRSTPRIVATE);
1460 OMP_CLAUSE_DECL (c) = decl;
1461 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1462 gimple_omp_taskreg_set_clauses (stmt, c);
1465 save_local_var_chain = info->new_local_var_chain;
1466 info->new_local_var_chain = NULL;
1468 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1469 info, gimple_omp_body_ptr (stmt));
1471 if (info->new_local_var_chain)
1472 declare_vars (info->new_local_var_chain,
1473 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1474 false);
1475 info->new_local_var_chain = save_local_var_chain;
1476 info->suppress_expansion = save_suppress;
1477 break;
1479 case GIMPLE_OMP_FOR:
1480 save_suppress = info->suppress_expansion;
1481 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1482 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1483 convert_nonlocal_reference_stmt,
1484 convert_nonlocal_reference_op, info);
1485 walk_body (convert_nonlocal_reference_stmt,
1486 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1487 info->suppress_expansion = save_suppress;
1488 break;
1490 case GIMPLE_OMP_SECTIONS:
1491 save_suppress = info->suppress_expansion;
1492 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1493 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1494 info, gimple_omp_body_ptr (stmt));
1495 info->suppress_expansion = save_suppress;
1496 break;
1498 case GIMPLE_OMP_SINGLE:
1499 save_suppress = info->suppress_expansion;
1500 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1501 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1502 info, gimple_omp_body_ptr (stmt));
1503 info->suppress_expansion = save_suppress;
1504 break;
1506 case GIMPLE_OMP_TARGET:
1507 if (!is_gimple_omp_offloaded (stmt))
1509 save_suppress = info->suppress_expansion;
1510 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1511 wi);
1512 info->suppress_expansion = save_suppress;
1513 walk_body (convert_nonlocal_reference_stmt,
1514 convert_nonlocal_reference_op, info,
1515 gimple_omp_body_ptr (stmt));
1516 break;
1518 save_suppress = info->suppress_expansion;
1519 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1520 wi))
1522 tree c, decl;
1523 decl = get_chain_decl (info);
1524 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1525 OMP_CLAUSE_DECL (c) = decl;
1526 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1527 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1528 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1529 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1532 save_local_var_chain = info->new_local_var_chain;
1533 info->new_local_var_chain = NULL;
1535 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1536 info, gimple_omp_body_ptr (stmt));
1538 if (info->new_local_var_chain)
1539 declare_vars (info->new_local_var_chain,
1540 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1541 false);
1542 info->new_local_var_chain = save_local_var_chain;
1543 info->suppress_expansion = save_suppress;
1544 break;
1546 case GIMPLE_OMP_TEAMS:
1547 save_suppress = info->suppress_expansion;
1548 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1549 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1550 info, gimple_omp_body_ptr (stmt));
1551 info->suppress_expansion = save_suppress;
1552 break;
1554 case GIMPLE_OMP_SECTION:
1555 case GIMPLE_OMP_MASTER:
1556 case GIMPLE_OMP_TASKGROUP:
1557 case GIMPLE_OMP_ORDERED:
1558 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1559 info, gimple_omp_body_ptr (stmt));
1560 break;
1562 case GIMPLE_BIND:
1564 gbind *bind_stmt = as_a <gbind *> (stmt);
1565 if (!optimize && gimple_bind_block (bind_stmt))
1566 note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
1568 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1569 if (TREE_CODE (var) == NAMELIST_DECL)
1571 /* Adjust decls mentioned in NAMELIST_DECL. */
1572 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1573 tree decl;
1574 unsigned int i;
1576 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1578 if (VAR_P (decl)
1579 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1580 continue;
1581 if (decl_function_context (decl) != info->context)
1582 CONSTRUCTOR_ELT (decls, i)->value
1583 = get_nonlocal_debug_decl (info, decl);
1587 *handled_ops_p = false;
1588 return NULL_TREE;
1590 case GIMPLE_COND:
1591 wi->val_only = true;
1592 wi->is_lhs = false;
1593 *handled_ops_p = false;
1594 return NULL_TREE;
1596 default:
1597 /* For every other statement that we are not interested in
1598 handling here, let the walker traverse the operands. */
1599 *handled_ops_p = false;
1600 return NULL_TREE;
1603 /* We have handled all of STMT operands, no need to traverse the operands. */
1604 *handled_ops_p = true;
1605 return NULL_TREE;
1609 /* A subroutine of convert_local_reference. Create a local variable
1610 in the parent function with DECL_VALUE_EXPR set to reference the
1611 field in FRAME. This is used both for debug info and in OMP
1612 lowering. */
1614 static tree
1615 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1617 tree x, new_decl;
1619 tree *slot = &info->var_map->get_or_insert (decl);
1620 if (*slot)
1621 return *slot;
1623 /* Make sure frame_decl gets created. */
1624 (void) get_frame_type (info);
1625 x = info->frame_decl;
1626 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1628 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1629 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1630 DECL_CONTEXT (new_decl) = info->context;
1631 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1632 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1633 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1634 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1635 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1636 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1637 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1638 if ((TREE_CODE (decl) == PARM_DECL
1639 || TREE_CODE (decl) == RESULT_DECL
1640 || VAR_P (decl))
1641 && DECL_BY_REFERENCE (decl))
1642 DECL_BY_REFERENCE (new_decl) = 1;
1644 SET_DECL_VALUE_EXPR (new_decl, x);
1645 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1646 *slot = new_decl;
1648 DECL_CHAIN (new_decl) = info->debug_var_chain;
1649 info->debug_var_chain = new_decl;
1651 /* Do not emit debug info twice. */
1652 DECL_IGNORED_P (decl) = 1;
1654 return new_decl;
1658 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1659 and PARM_DECLs that were referenced by inner nested functions.
1660 The rewrite will be a structure reference to the local frame variable. */
1662 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1664 static tree
1665 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1667 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1668 struct nesting_info *const info = (struct nesting_info *) wi->info;
1669 tree t = *tp, field, x;
1670 bool save_val_only;
1672 *walk_subtrees = 0;
1673 switch (TREE_CODE (t))
1675 case VAR_DECL:
1676 /* Non-automatic variables are never processed. */
1677 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1678 break;
1679 /* FALLTHRU */
1681 case PARM_DECL:
1682 if (decl_function_context (t) == info->context)
1684 /* If we copied a pointer to the frame, then the original decl
1685 is used unchanged in the parent function. */
1686 if (use_pointer_in_frame (t))
1687 break;
1689 /* No need to transform anything if no child references the
1690 variable. */
1691 field = lookup_field_for_decl (info, t, NO_INSERT);
1692 if (!field)
1693 break;
1694 wi->changed = true;
1696 x = get_local_debug_decl (info, t, field);
1697 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1698 x = get_frame_field (info, info->context, field, &wi->gsi);
1700 if (wi->val_only)
1702 if (wi->is_lhs)
1703 x = save_tmp_var (info, x, &wi->gsi);
1704 else
1705 x = init_tmp_var (info, x, &wi->gsi);
1708 *tp = x;
1710 break;
1712 case ADDR_EXPR:
1713 save_val_only = wi->val_only;
1714 wi->val_only = false;
1715 wi->is_lhs = false;
1716 wi->changed = false;
1717 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1718 wi->val_only = save_val_only;
1720 /* If we converted anything ... */
1721 if (wi->changed)
1723 tree save_context;
1725 /* Then the frame decl is now addressable. */
1726 TREE_ADDRESSABLE (info->frame_decl) = 1;
1728 save_context = current_function_decl;
1729 current_function_decl = info->context;
1730 recompute_tree_invariant_for_addr_expr (t);
1731 current_function_decl = save_context;
1733 /* If we are in a context where we only accept values, then
1734 compute the address into a temporary. */
1735 if (save_val_only)
1736 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1737 t, &wi->gsi);
1739 break;
1741 case REALPART_EXPR:
1742 case IMAGPART_EXPR:
1743 case COMPONENT_REF:
1744 case ARRAY_REF:
1745 case ARRAY_RANGE_REF:
1746 case BIT_FIELD_REF:
1747 /* Go down this entire nest and just look at the final prefix and
1748 anything that describes the references. Otherwise, we lose track
1749 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1750 save_val_only = wi->val_only;
1751 wi->val_only = true;
1752 wi->is_lhs = false;
1753 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1755 if (TREE_CODE (t) == COMPONENT_REF)
1756 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1757 NULL);
1758 else if (TREE_CODE (t) == ARRAY_REF
1759 || TREE_CODE (t) == ARRAY_RANGE_REF)
1761 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1762 NULL);
1763 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1764 NULL);
1765 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1766 NULL);
1769 wi->val_only = false;
1770 walk_tree (tp, convert_local_reference_op, wi, NULL);
1771 wi->val_only = save_val_only;
1772 break;
1774 case MEM_REF:
1775 save_val_only = wi->val_only;
1776 wi->val_only = true;
1777 wi->is_lhs = false;
1778 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1779 wi, NULL);
1780 /* We need to re-fold the MEM_REF as component references as
1781 part of a ADDR_EXPR address are not allowed. But we cannot
1782 fold here, as the chain record type is not yet finalized. */
1783 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1784 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1785 info->mem_refs->add (tp);
1786 wi->val_only = save_val_only;
1787 break;
1789 case VIEW_CONVERT_EXPR:
1790 /* Just request to look at the subtrees, leaving val_only and lhs
1791 untouched. This might actually be for !val_only + lhs, in which
1792 case we don't want to force a replacement by a temporary. */
1793 *walk_subtrees = 1;
1794 break;
1796 default:
1797 if (!IS_TYPE_OR_DECL_P (t))
1799 *walk_subtrees = 1;
1800 wi->val_only = true;
1801 wi->is_lhs = false;
1803 break;
1806 return NULL_TREE;
1809 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1810 struct walk_stmt_info *);
1812 /* Helper for convert_local_reference. Convert all the references in
1813 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1815 static bool
1816 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1818 struct nesting_info *const info = (struct nesting_info *) wi->info;
1819 bool need_frame = false, need_stmts = false;
1820 tree clause, decl;
1821 int dummy;
1822 bitmap new_suppress;
1824 new_suppress = BITMAP_GGC_ALLOC ();
1825 bitmap_copy (new_suppress, info->suppress_expansion);
1827 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1829 switch (OMP_CLAUSE_CODE (clause))
1831 case OMP_CLAUSE_REDUCTION:
1832 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1833 need_stmts = true;
1834 goto do_decl_clause;
1836 case OMP_CLAUSE_LASTPRIVATE:
1837 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1838 need_stmts = true;
1839 goto do_decl_clause;
1841 case OMP_CLAUSE_LINEAR:
1842 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1843 need_stmts = true;
1844 wi->val_only = true;
1845 wi->is_lhs = false;
1846 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1847 wi);
1848 goto do_decl_clause;
1850 case OMP_CLAUSE_PRIVATE:
1851 case OMP_CLAUSE_FIRSTPRIVATE:
1852 case OMP_CLAUSE_COPYPRIVATE:
1853 case OMP_CLAUSE_SHARED:
1854 case OMP_CLAUSE_TO_DECLARE:
1855 case OMP_CLAUSE_LINK:
1856 case OMP_CLAUSE_USE_DEVICE_PTR:
1857 case OMP_CLAUSE_IS_DEVICE_PTR:
1858 do_decl_clause:
1859 decl = OMP_CLAUSE_DECL (clause);
1860 if (VAR_P (decl)
1861 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1862 break;
1863 if (decl_function_context (decl) == info->context
1864 && !use_pointer_in_frame (decl))
1866 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1867 if (field)
1869 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1870 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1871 bitmap_set_bit (new_suppress, DECL_UID (decl));
1872 OMP_CLAUSE_DECL (clause)
1873 = get_local_debug_decl (info, decl, field);
1874 need_frame = true;
1877 break;
1879 case OMP_CLAUSE_SCHEDULE:
1880 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1881 break;
1882 /* FALLTHRU */
1883 case OMP_CLAUSE_FINAL:
1884 case OMP_CLAUSE_IF:
1885 case OMP_CLAUSE_NUM_THREADS:
1886 case OMP_CLAUSE_DEPEND:
1887 case OMP_CLAUSE_DEVICE:
1888 case OMP_CLAUSE_NUM_TEAMS:
1889 case OMP_CLAUSE_THREAD_LIMIT:
1890 case OMP_CLAUSE_SAFELEN:
1891 case OMP_CLAUSE_SIMDLEN:
1892 case OMP_CLAUSE_PRIORITY:
1893 case OMP_CLAUSE_GRAINSIZE:
1894 case OMP_CLAUSE_NUM_TASKS:
1895 case OMP_CLAUSE_HINT:
1896 case OMP_CLAUSE__CILK_FOR_COUNT_:
1897 case OMP_CLAUSE_NUM_GANGS:
1898 case OMP_CLAUSE_NUM_WORKERS:
1899 case OMP_CLAUSE_VECTOR_LENGTH:
1900 case OMP_CLAUSE_GANG:
1901 case OMP_CLAUSE_WORKER:
1902 case OMP_CLAUSE_VECTOR:
1903 case OMP_CLAUSE_ASYNC:
1904 case OMP_CLAUSE_WAIT:
1905 /* Several OpenACC clauses have optional arguments. Check if they
1906 are present. */
1907 if (OMP_CLAUSE_OPERAND (clause, 0))
1909 wi->val_only = true;
1910 wi->is_lhs = false;
1911 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1912 &dummy, wi);
1915 /* The gang clause accepts two arguments. */
1916 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1917 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1919 wi->val_only = true;
1920 wi->is_lhs = false;
1921 convert_nonlocal_reference_op
1922 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1924 break;
1926 case OMP_CLAUSE_DIST_SCHEDULE:
1927 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1929 wi->val_only = true;
1930 wi->is_lhs = false;
1931 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1932 &dummy, wi);
1934 break;
1936 case OMP_CLAUSE_MAP:
1937 case OMP_CLAUSE_TO:
1938 case OMP_CLAUSE_FROM:
1939 if (OMP_CLAUSE_SIZE (clause))
1941 wi->val_only = true;
1942 wi->is_lhs = false;
1943 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1944 &dummy, wi);
1946 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1947 goto do_decl_clause;
1948 wi->val_only = true;
1949 wi->is_lhs = false;
1950 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1951 wi, NULL);
1952 break;
1954 case OMP_CLAUSE_ALIGNED:
1955 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1957 wi->val_only = true;
1958 wi->is_lhs = false;
1959 convert_local_reference_op
1960 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1962 /* Like do_decl_clause, but don't add any suppression. */
1963 decl = OMP_CLAUSE_DECL (clause);
1964 if (VAR_P (decl)
1965 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1966 break;
1967 if (decl_function_context (decl) == info->context
1968 && !use_pointer_in_frame (decl))
1970 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1971 if (field)
1973 OMP_CLAUSE_DECL (clause)
1974 = get_local_debug_decl (info, decl, field);
1975 need_frame = true;
1978 break;
1980 case OMP_CLAUSE_NOWAIT:
1981 case OMP_CLAUSE_ORDERED:
1982 case OMP_CLAUSE_DEFAULT:
1983 case OMP_CLAUSE_COPYIN:
1984 case OMP_CLAUSE_COLLAPSE:
1985 case OMP_CLAUSE_UNTIED:
1986 case OMP_CLAUSE_MERGEABLE:
1987 case OMP_CLAUSE_PROC_BIND:
1988 case OMP_CLAUSE_NOGROUP:
1989 case OMP_CLAUSE_THREADS:
1990 case OMP_CLAUSE_SIMD:
1991 case OMP_CLAUSE_DEFAULTMAP:
1992 case OMP_CLAUSE_SEQ:
1993 case OMP_CLAUSE_INDEPENDENT:
1994 case OMP_CLAUSE_AUTO:
1995 break;
1997 /* OpenACC tile clauses are discarded during gimplification. */
1998 case OMP_CLAUSE_TILE:
1999 /* The following clause belongs to the OpenACC cache directive, which
2000 is discarded during gimplification. */
2001 case OMP_CLAUSE__CACHE_:
2002 /* The following clauses are only allowed in the OpenMP declare simd
2003 directive, so not seen here. */
2004 case OMP_CLAUSE_UNIFORM:
2005 case OMP_CLAUSE_INBRANCH:
2006 case OMP_CLAUSE_NOTINBRANCH:
2007 /* The following clauses are only allowed on OpenMP cancel and
2008 cancellation point directives, which at this point have already
2009 been lowered into a function call. */
2010 case OMP_CLAUSE_FOR:
2011 case OMP_CLAUSE_PARALLEL:
2012 case OMP_CLAUSE_SECTIONS:
2013 case OMP_CLAUSE_TASKGROUP:
2014 /* The following clauses are only added during OMP lowering; nested
2015 function decomposition happens before that. */
2016 case OMP_CLAUSE__LOOPTEMP_:
2017 case OMP_CLAUSE__SIMDUID_:
2018 case OMP_CLAUSE__GRIDDIM_:
2019 /* Anything else. */
2020 default:
2021 gcc_unreachable ();
2025 info->suppress_expansion = new_suppress;
2027 if (need_stmts)
2028 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2029 switch (OMP_CLAUSE_CODE (clause))
2031 case OMP_CLAUSE_REDUCTION:
2032 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2034 tree old_context
2035 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
2036 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2037 = info->context;
2038 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2039 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2040 = info->context;
2041 walk_body (convert_local_reference_stmt,
2042 convert_local_reference_op, info,
2043 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
2044 walk_body (convert_local_reference_stmt,
2045 convert_local_reference_op, info,
2046 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
2047 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2048 = old_context;
2049 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2050 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2051 = old_context;
2053 break;
2055 case OMP_CLAUSE_LASTPRIVATE:
2056 walk_body (convert_local_reference_stmt,
2057 convert_local_reference_op, info,
2058 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
2059 break;
2061 case OMP_CLAUSE_LINEAR:
2062 walk_body (convert_local_reference_stmt,
2063 convert_local_reference_op, info,
2064 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
2065 break;
2067 default:
2068 break;
2071 return need_frame;
2075 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2076 and PARM_DECLs that were referenced by inner nested functions.
2077 The rewrite will be a structure reference to the local frame variable. */
2079 static tree
2080 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2081 struct walk_stmt_info *wi)
2083 struct nesting_info *info = (struct nesting_info *) wi->info;
2084 tree save_local_var_chain;
2085 bitmap save_suppress;
2086 char save_static_chain_added;
2087 bool frame_decl_added;
2088 gimple *stmt = gsi_stmt (*gsi);
2090 switch (gimple_code (stmt))
2092 case GIMPLE_OMP_PARALLEL:
2093 case GIMPLE_OMP_TASK:
2094 save_suppress = info->suppress_expansion;
2095 frame_decl_added = false;
2096 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
2097 wi))
2099 tree c = build_omp_clause (gimple_location (stmt),
2100 OMP_CLAUSE_SHARED);
2101 (void) get_frame_type (info);
2102 OMP_CLAUSE_DECL (c) = info->frame_decl;
2103 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2104 gimple_omp_taskreg_set_clauses (stmt, c);
2105 info->static_chain_added |= 4;
2106 frame_decl_added = true;
2109 save_local_var_chain = info->new_local_var_chain;
2110 save_static_chain_added = info->static_chain_added;
2111 info->new_local_var_chain = NULL;
2112 info->static_chain_added = 0;
2114 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2115 gimple_omp_body_ptr (stmt));
2117 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2119 tree c = build_omp_clause (gimple_location (stmt),
2120 OMP_CLAUSE_SHARED);
2121 (void) get_frame_type (info);
2122 OMP_CLAUSE_DECL (c) = info->frame_decl;
2123 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2124 info->static_chain_added |= 4;
2125 gimple_omp_taskreg_set_clauses (stmt, c);
2127 if (info->new_local_var_chain)
2128 declare_vars (info->new_local_var_chain,
2129 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2130 info->new_local_var_chain = save_local_var_chain;
2131 info->suppress_expansion = save_suppress;
2132 info->static_chain_added |= save_static_chain_added;
2133 break;
2135 case GIMPLE_OMP_FOR:
2136 save_suppress = info->suppress_expansion;
2137 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2138 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2139 convert_local_reference_stmt,
2140 convert_local_reference_op, info);
2141 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2142 info, gimple_omp_body_ptr (stmt));
2143 info->suppress_expansion = save_suppress;
2144 break;
2146 case GIMPLE_OMP_SECTIONS:
2147 save_suppress = info->suppress_expansion;
2148 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2149 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2150 info, gimple_omp_body_ptr (stmt));
2151 info->suppress_expansion = save_suppress;
2152 break;
2154 case GIMPLE_OMP_SINGLE:
2155 save_suppress = info->suppress_expansion;
2156 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2157 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2158 info, gimple_omp_body_ptr (stmt));
2159 info->suppress_expansion = save_suppress;
2160 break;
2162 case GIMPLE_OMP_TARGET:
2163 if (!is_gimple_omp_offloaded (stmt))
2165 save_suppress = info->suppress_expansion;
2166 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2167 info->suppress_expansion = save_suppress;
2168 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2169 info, gimple_omp_body_ptr (stmt));
2170 break;
2172 save_suppress = info->suppress_expansion;
2173 frame_decl_added = false;
2174 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2176 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2177 (void) get_frame_type (info);
2178 OMP_CLAUSE_DECL (c) = info->frame_decl;
2179 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2180 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2181 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2182 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2183 info->static_chain_added |= 4;
2184 frame_decl_added = true;
2187 save_local_var_chain = info->new_local_var_chain;
2188 save_static_chain_added = info->static_chain_added;
2189 info->new_local_var_chain = NULL;
2190 info->static_chain_added = 0;
2192 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2193 gimple_omp_body_ptr (stmt));
2195 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2197 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2198 (void) get_frame_type (info);
2199 OMP_CLAUSE_DECL (c) = info->frame_decl;
2200 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2201 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2202 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2203 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2204 info->static_chain_added |= 4;
2207 if (info->new_local_var_chain)
2208 declare_vars (info->new_local_var_chain,
2209 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2210 info->new_local_var_chain = save_local_var_chain;
2211 info->suppress_expansion = save_suppress;
2212 info->static_chain_added |= save_static_chain_added;
2213 break;
2215 case GIMPLE_OMP_TEAMS:
2216 save_suppress = info->suppress_expansion;
2217 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2218 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2219 info, gimple_omp_body_ptr (stmt));
2220 info->suppress_expansion = save_suppress;
2221 break;
2223 case GIMPLE_OMP_SECTION:
2224 case GIMPLE_OMP_MASTER:
2225 case GIMPLE_OMP_TASKGROUP:
2226 case GIMPLE_OMP_ORDERED:
2227 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2228 info, gimple_omp_body_ptr (stmt));
2229 break;
2231 case GIMPLE_COND:
2232 wi->val_only = true;
2233 wi->is_lhs = false;
2234 *handled_ops_p = false;
2235 return NULL_TREE;
2237 case GIMPLE_ASSIGN:
2238 if (gimple_clobber_p (stmt))
2240 tree lhs = gimple_assign_lhs (stmt);
2241 if (!use_pointer_in_frame (lhs)
2242 && lookup_field_for_decl (info, lhs, NO_INSERT))
2244 gsi_replace (gsi, gimple_build_nop (), true);
2245 break;
2248 *handled_ops_p = false;
2249 return NULL_TREE;
2251 case GIMPLE_BIND:
2252 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2253 var;
2254 var = DECL_CHAIN (var))
2255 if (TREE_CODE (var) == NAMELIST_DECL)
2257 /* Adjust decls mentioned in NAMELIST_DECL. */
2258 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2259 tree decl;
2260 unsigned int i;
2262 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2264 if (VAR_P (decl)
2265 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2266 continue;
2267 if (decl_function_context (decl) == info->context
2268 && !use_pointer_in_frame (decl))
2270 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2271 if (field)
2273 CONSTRUCTOR_ELT (decls, i)->value
2274 = get_local_debug_decl (info, decl, field);
2280 *handled_ops_p = false;
2281 return NULL_TREE;
2283 default:
2284 /* For every other statement that we are not interested in
2285 handling here, let the walker traverse the operands. */
2286 *handled_ops_p = false;
2287 return NULL_TREE;
2290 /* Indicate that we have handled all the operands ourselves. */
2291 *handled_ops_p = true;
2292 return NULL_TREE;
2296 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2297 that reference labels from outer functions. The rewrite will be a
2298 call to __builtin_nonlocal_goto. */
2300 static tree
2301 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2302 struct walk_stmt_info *wi)
2304 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2305 tree label, new_label, target_context, x, field;
2306 gcall *call;
2307 gimple *stmt = gsi_stmt (*gsi);
2309 if (gimple_code (stmt) != GIMPLE_GOTO)
2311 *handled_ops_p = false;
2312 return NULL_TREE;
2315 label = gimple_goto_dest (stmt);
2316 if (TREE_CODE (label) != LABEL_DECL)
2318 *handled_ops_p = false;
2319 return NULL_TREE;
2322 target_context = decl_function_context (label);
2323 if (target_context == info->context)
2325 *handled_ops_p = false;
2326 return NULL_TREE;
2329 for (i = info->outer; target_context != i->context; i = i->outer)
2330 continue;
2332 /* The original user label may also be use for a normal goto, therefore
2333 we must create a new label that will actually receive the abnormal
2334 control transfer. This new label will be marked LABEL_NONLOCAL; this
2335 mark will trigger proper behavior in the cfg, as well as cause the
2336 (hairy target-specific) non-local goto receiver code to be generated
2337 when we expand rtl. Enter this association into var_map so that we
2338 can insert the new label into the IL during a second pass. */
2339 tree *slot = &i->var_map->get_or_insert (label);
2340 if (*slot == NULL)
2342 new_label = create_artificial_label (UNKNOWN_LOCATION);
2343 DECL_NONLOCAL (new_label) = 1;
2344 *slot = new_label;
2346 else
2347 new_label = *slot;
2349 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2350 field = get_nl_goto_field (i);
2351 x = get_frame_field (info, target_context, field, gsi);
2352 x = build_addr (x);
2353 x = gsi_gimplify_val (info, x, gsi);
2354 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2355 2, build_addr (new_label), x);
2356 gsi_replace (gsi, call, false);
2358 /* We have handled all of STMT's operands, no need to keep going. */
2359 *handled_ops_p = true;
2360 return NULL_TREE;
2364 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2365 are referenced via nonlocal goto from a nested function. The rewrite
2366 will involve installing a newly generated DECL_NONLOCAL label, and
2367 (potentially) a branch around the rtl gunk that is assumed to be
2368 attached to such a label. */
2370 static tree
2371 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2372 struct walk_stmt_info *wi)
2374 struct nesting_info *const info = (struct nesting_info *) wi->info;
2375 tree label, new_label;
2376 gimple_stmt_iterator tmp_gsi;
2377 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2379 if (!stmt)
2381 *handled_ops_p = false;
2382 return NULL_TREE;
2385 label = gimple_label_label (stmt);
2387 tree *slot = info->var_map->get (label);
2388 if (!slot)
2390 *handled_ops_p = false;
2391 return NULL_TREE;
2394 /* If there's any possibility that the previous statement falls through,
2395 then we must branch around the new non-local label. */
2396 tmp_gsi = wi->gsi;
2397 gsi_prev (&tmp_gsi);
2398 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2400 gimple *stmt = gimple_build_goto (label);
2401 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2404 new_label = (tree) *slot;
2405 stmt = gimple_build_label (new_label);
2406 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2408 *handled_ops_p = true;
2409 return NULL_TREE;
2413 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2414 of nested functions that require the use of trampolines. The rewrite
2415 will involve a reference a trampoline generated for the occasion. */
2417 static tree
2418 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2420 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2421 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2422 tree t = *tp, decl, target_context, x, builtin;
2423 bool descr;
2424 gcall *call;
2426 *walk_subtrees = 0;
2427 switch (TREE_CODE (t))
2429 case ADDR_EXPR:
2430 /* Build
2431 T.1 = &CHAIN->tramp;
2432 T.2 = __builtin_adjust_trampoline (T.1);
2433 T.3 = (func_type)T.2;
2436 decl = TREE_OPERAND (t, 0);
2437 if (TREE_CODE (decl) != FUNCTION_DECL)
2438 break;
2440 /* Only need to process nested functions. */
2441 target_context = decl_function_context (decl);
2442 if (!target_context)
2443 break;
2445 /* If the nested function doesn't use a static chain, then
2446 it doesn't need a trampoline. */
2447 if (!DECL_STATIC_CHAIN (decl))
2448 break;
2450 /* If we don't want a trampoline, then don't build one. */
2451 if (TREE_NO_TRAMPOLINE (t))
2452 break;
2454 /* Lookup the immediate parent of the callee, as that's where
2455 we need to insert the trampoline. */
2456 for (i = info; i->context != target_context; i = i->outer)
2457 continue;
2459 /* Decide whether to generate a descriptor or a trampoline. */
2460 descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines;
2462 if (descr)
2463 x = lookup_descr_for_decl (i, decl, INSERT);
2464 else
2465 x = lookup_tramp_for_decl (i, decl, INSERT);
2467 /* Compute the address of the field holding the trampoline. */
2468 x = get_frame_field (info, target_context, x, &wi->gsi);
2469 x = build_addr (x);
2470 x = gsi_gimplify_val (info, x, &wi->gsi);
2472 /* Do machine-specific ugliness. Normally this will involve
2473 computing extra alignment, but it can really be anything. */
2474 if (descr)
2475 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR);
2476 else
2477 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2478 call = gimple_build_call (builtin, 1, x);
2479 x = init_tmp_var_with_call (info, &wi->gsi, call);
2481 /* Cast back to the proper function type. */
2482 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2483 x = init_tmp_var (info, x, &wi->gsi);
2485 *tp = x;
2486 break;
2488 default:
2489 if (!IS_TYPE_OR_DECL_P (t))
2490 *walk_subtrees = 1;
2491 break;
2494 return NULL_TREE;
2498 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2499 to addresses of nested functions that require the use of
2500 trampolines. The rewrite will involve a reference a trampoline
2501 generated for the occasion. */
2503 static tree
2504 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2505 struct walk_stmt_info *wi)
2507 struct nesting_info *info = (struct nesting_info *) wi->info;
2508 gimple *stmt = gsi_stmt (*gsi);
2510 switch (gimple_code (stmt))
2512 case GIMPLE_CALL:
2514 /* Only walk call arguments, lest we generate trampolines for
2515 direct calls. */
2516 unsigned long i, nargs = gimple_call_num_args (stmt);
2517 for (i = 0; i < nargs; i++)
2518 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2519 wi, NULL);
2520 break;
2523 case GIMPLE_OMP_TARGET:
2524 if (!is_gimple_omp_offloaded (stmt))
2526 *handled_ops_p = false;
2527 return NULL_TREE;
2529 /* FALLTHRU */
2530 case GIMPLE_OMP_PARALLEL:
2531 case GIMPLE_OMP_TASK:
2533 tree save_local_var_chain = info->new_local_var_chain;
2534 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2535 info->new_local_var_chain = NULL;
2536 char save_static_chain_added = info->static_chain_added;
2537 info->static_chain_added = 0;
2538 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2539 info, gimple_omp_body_ptr (stmt));
2540 if (info->new_local_var_chain)
2541 declare_vars (info->new_local_var_chain,
2542 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2543 false);
2544 for (int i = 0; i < 2; i++)
2546 tree c, decl;
2547 if ((info->static_chain_added & (1 << i)) == 0)
2548 continue;
2549 decl = i ? get_chain_decl (info) : info->frame_decl;
2550 /* Don't add CHAIN.* or FRAME.* twice. */
2551 for (c = gimple_omp_taskreg_clauses (stmt);
2553 c = OMP_CLAUSE_CHAIN (c))
2554 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2555 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2556 && OMP_CLAUSE_DECL (c) == decl)
2557 break;
2558 if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2560 c = build_omp_clause (gimple_location (stmt),
2561 i ? OMP_CLAUSE_FIRSTPRIVATE
2562 : OMP_CLAUSE_SHARED);
2563 OMP_CLAUSE_DECL (c) = decl;
2564 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2565 gimple_omp_taskreg_set_clauses (stmt, c);
2567 else if (c == NULL)
2569 c = build_omp_clause (gimple_location (stmt),
2570 OMP_CLAUSE_MAP);
2571 OMP_CLAUSE_DECL (c) = decl;
2572 OMP_CLAUSE_SET_MAP_KIND (c,
2573 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2574 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2575 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2576 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2580 info->new_local_var_chain = save_local_var_chain;
2581 info->static_chain_added |= save_static_chain_added;
2583 break;
2585 default:
2586 *handled_ops_p = false;
2587 return NULL_TREE;
2590 *handled_ops_p = true;
2591 return NULL_TREE;
2596 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2597 that reference nested functions to make sure that the static chain
2598 is set up properly for the call. */
2600 static tree
2601 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2602 struct walk_stmt_info *wi)
2604 struct nesting_info *const info = (struct nesting_info *) wi->info;
2605 tree decl, target_context;
2606 char save_static_chain_added;
2607 int i;
2608 gimple *stmt = gsi_stmt (*gsi);
2610 switch (gimple_code (stmt))
2612 case GIMPLE_CALL:
2613 if (gimple_call_chain (stmt))
2614 break;
2615 decl = gimple_call_fndecl (stmt);
2616 if (!decl)
2617 break;
2618 target_context = decl_function_context (decl);
2619 if (target_context && DECL_STATIC_CHAIN (decl))
2621 gimple_call_set_chain (as_a <gcall *> (stmt),
2622 get_static_chain (info, target_context,
2623 &wi->gsi));
2624 info->static_chain_added |= (1 << (info->context != target_context));
2626 break;
2628 case GIMPLE_OMP_PARALLEL:
2629 case GIMPLE_OMP_TASK:
2630 save_static_chain_added = info->static_chain_added;
2631 info->static_chain_added = 0;
2632 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2633 for (i = 0; i < 2; i++)
2635 tree c, decl;
2636 if ((info->static_chain_added & (1 << i)) == 0)
2637 continue;
2638 decl = i ? get_chain_decl (info) : info->frame_decl;
2639 /* Don't add CHAIN.* or FRAME.* twice. */
2640 for (c = gimple_omp_taskreg_clauses (stmt);
2642 c = OMP_CLAUSE_CHAIN (c))
2643 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2644 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2645 && OMP_CLAUSE_DECL (c) == decl)
2646 break;
2647 if (c == NULL)
2649 c = build_omp_clause (gimple_location (stmt),
2650 i ? OMP_CLAUSE_FIRSTPRIVATE
2651 : OMP_CLAUSE_SHARED);
2652 OMP_CLAUSE_DECL (c) = decl;
2653 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2654 gimple_omp_taskreg_set_clauses (stmt, c);
2657 info->static_chain_added |= save_static_chain_added;
2658 break;
2660 case GIMPLE_OMP_TARGET:
2661 if (!is_gimple_omp_offloaded (stmt))
2663 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2664 break;
2666 save_static_chain_added = info->static_chain_added;
2667 info->static_chain_added = 0;
2668 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2669 for (i = 0; i < 2; i++)
2671 tree c, decl;
2672 if ((info->static_chain_added & (1 << i)) == 0)
2673 continue;
2674 decl = i ? get_chain_decl (info) : info->frame_decl;
2675 /* Don't add CHAIN.* or FRAME.* twice. */
2676 for (c = gimple_omp_target_clauses (stmt);
2678 c = OMP_CLAUSE_CHAIN (c))
2679 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2680 && OMP_CLAUSE_DECL (c) == decl)
2681 break;
2682 if (c == NULL)
2684 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2685 OMP_CLAUSE_DECL (c) = decl;
2686 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2687 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2688 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2689 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2693 info->static_chain_added |= save_static_chain_added;
2694 break;
2696 case GIMPLE_OMP_FOR:
2697 walk_body (convert_gimple_call, NULL, info,
2698 gimple_omp_for_pre_body_ptr (stmt));
2699 /* FALLTHRU */
2700 case GIMPLE_OMP_SECTIONS:
2701 case GIMPLE_OMP_SECTION:
2702 case GIMPLE_OMP_SINGLE:
2703 case GIMPLE_OMP_TEAMS:
2704 case GIMPLE_OMP_MASTER:
2705 case GIMPLE_OMP_TASKGROUP:
2706 case GIMPLE_OMP_ORDERED:
2707 case GIMPLE_OMP_CRITICAL:
2708 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2709 break;
2711 default:
2712 /* Keep looking for other operands. */
2713 *handled_ops_p = false;
2714 return NULL_TREE;
2717 *handled_ops_p = true;
2718 return NULL_TREE;
2721 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2722 call expressions. At the same time, determine if a nested function
2723 actually uses its static chain; if not, remember that. */
2725 static void
2726 convert_all_function_calls (struct nesting_info *root)
2728 unsigned int chain_count = 0, old_chain_count, iter_count;
2729 struct nesting_info *n;
2731 /* First, optimistically clear static_chain for all decls that haven't
2732 used the static chain already for variable access. But always create
2733 it if not optimizing. This makes it possible to reconstruct the static
2734 nesting tree at run time and thus to resolve up-level references from
2735 within the debugger. */
2736 FOR_EACH_NEST_INFO (n, root)
2738 tree decl = n->context;
2739 if (!optimize)
2741 if (n->inner)
2742 (void) get_frame_type (n);
2743 if (n->outer)
2744 (void) get_chain_decl (n);
2746 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2748 DECL_STATIC_CHAIN (decl) = 0;
2749 if (dump_file && (dump_flags & TDF_DETAILS))
2750 fprintf (dump_file, "Guessing no static-chain for %s\n",
2751 lang_hooks.decl_printable_name (decl, 2));
2753 else
2754 DECL_STATIC_CHAIN (decl) = 1;
2755 chain_count += DECL_STATIC_CHAIN (decl);
2758 /* Walk the functions and perform transformations. Note that these
2759 transformations can induce new uses of the static chain, which in turn
2760 require re-examining all users of the decl. */
2761 /* ??? It would make sense to try to use the call graph to speed this up,
2762 but the call graph hasn't really been built yet. Even if it did, we
2763 would still need to iterate in this loop since address-of references
2764 wouldn't show up in the callgraph anyway. */
2765 iter_count = 0;
2768 old_chain_count = chain_count;
2769 chain_count = 0;
2770 iter_count++;
2772 if (dump_file && (dump_flags & TDF_DETAILS))
2773 fputc ('\n', dump_file);
2775 FOR_EACH_NEST_INFO (n, root)
2777 tree decl = n->context;
2778 walk_function (convert_tramp_reference_stmt,
2779 convert_tramp_reference_op, n);
2780 walk_function (convert_gimple_call, NULL, n);
2781 chain_count += DECL_STATIC_CHAIN (decl);
2784 while (chain_count != old_chain_count);
2786 if (dump_file && (dump_flags & TDF_DETAILS))
2787 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2788 iter_count);
2791 struct nesting_copy_body_data
2793 copy_body_data cb;
2794 struct nesting_info *root;
2797 /* A helper subroutine for debug_var_chain type remapping. */
2799 static tree
2800 nesting_copy_decl (tree decl, copy_body_data *id)
2802 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2803 tree *slot = nid->root->var_map->get (decl);
2805 if (slot)
2806 return (tree) *slot;
2808 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2810 tree new_decl = copy_decl_no_change (decl, id);
2811 DECL_ORIGINAL_TYPE (new_decl)
2812 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2813 return new_decl;
2816 if (VAR_P (decl)
2817 || TREE_CODE (decl) == PARM_DECL
2818 || TREE_CODE (decl) == RESULT_DECL)
2819 return decl;
2821 return copy_decl_no_change (decl, id);
2824 /* A helper function for remap_vla_decls. See if *TP contains
2825 some remapped variables. */
2827 static tree
2828 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2830 struct nesting_info *root = (struct nesting_info *) data;
2831 tree t = *tp;
2833 if (DECL_P (t))
2835 *walk_subtrees = 0;
2836 tree *slot = root->var_map->get (t);
2838 if (slot)
2839 return *slot;
2841 return NULL;
2844 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2845 involved. */
2847 static void
2848 remap_vla_decls (tree block, struct nesting_info *root)
2850 tree var, subblock, val, type;
2851 struct nesting_copy_body_data id;
2853 for (subblock = BLOCK_SUBBLOCKS (block);
2854 subblock;
2855 subblock = BLOCK_CHAIN (subblock))
2856 remap_vla_decls (subblock, root);
2858 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2859 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
2861 val = DECL_VALUE_EXPR (var);
2862 type = TREE_TYPE (var);
2864 if (!(TREE_CODE (val) == INDIRECT_REF
2865 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2866 && variably_modified_type_p (type, NULL)))
2867 continue;
2869 if (root->var_map->get (TREE_OPERAND (val, 0))
2870 || walk_tree (&type, contains_remapped_vars, root, NULL))
2871 break;
2874 if (var == NULL_TREE)
2875 return;
2877 memset (&id, 0, sizeof (id));
2878 id.cb.copy_decl = nesting_copy_decl;
2879 id.cb.decl_map = new hash_map<tree, tree>;
2880 id.root = root;
2882 for (; var; var = DECL_CHAIN (var))
2883 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
2885 struct nesting_info *i;
2886 tree newt, context;
2888 val = DECL_VALUE_EXPR (var);
2889 type = TREE_TYPE (var);
2891 if (!(TREE_CODE (val) == INDIRECT_REF
2892 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2893 && variably_modified_type_p (type, NULL)))
2894 continue;
2896 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2897 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2898 continue;
2900 context = decl_function_context (var);
2901 for (i = root; i; i = i->outer)
2902 if (i->context == context)
2903 break;
2905 if (i == NULL)
2906 continue;
2908 /* Fully expand value expressions. This avoids having debug variables
2909 only referenced from them and that can be swept during GC. */
2910 if (slot)
2912 tree t = (tree) *slot;
2913 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2914 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2917 id.cb.src_fn = i->context;
2918 id.cb.dst_fn = i->context;
2919 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2921 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2922 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2924 newt = TREE_TYPE (newt);
2925 type = TREE_TYPE (type);
2927 if (TYPE_NAME (newt)
2928 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2929 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2930 && newt != type
2931 && TYPE_NAME (newt) == TYPE_NAME (type))
2932 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2934 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2935 if (val != DECL_VALUE_EXPR (var))
2936 SET_DECL_VALUE_EXPR (var, val);
2939 delete id.cb.decl_map;
2942 /* Fold the MEM_REF *E. */
2943 bool
2944 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2946 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2947 *ref_p = fold (*ref_p);
2948 return true;
2951 /* Given DECL, a nested function, build an initialization call for FIELD,
2952 the trampoline or descriptor for DECL, using FUNC as the function. */
2954 static gcall *
2955 build_init_call_stmt (struct nesting_info *info, tree decl, tree field,
2956 tree func)
2958 tree arg1, arg2, arg3, x;
2960 gcc_assert (DECL_STATIC_CHAIN (decl));
2961 arg3 = build_addr (info->frame_decl);
2963 arg2 = build_addr (decl);
2965 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2966 info->frame_decl, field, NULL_TREE);
2967 arg1 = build_addr (x);
2969 return gimple_build_call (func, 3, arg1, arg2, arg3);
2972 /* Do "everything else" to clean up or complete state collected by the various
2973 walking passes -- create a field to hold the frame base address, lay out the
2974 types and decls, generate code to initialize the frame decl, store critical
2975 expressions in the struct function for rtl to find. */
2977 static void
2978 finalize_nesting_tree_1 (struct nesting_info *root)
2980 gimple_seq stmt_list;
2981 gimple *stmt;
2982 tree context = root->context;
2983 struct function *sf;
2985 stmt_list = NULL;
2987 /* If we created a non-local frame type or decl, we need to lay them
2988 out at this time. */
2989 if (root->frame_type)
2991 /* Debugging information needs to compute the frame base address of the
2992 parent frame out of the static chain from the nested frame.
2994 The static chain is the address of the FRAME record, so one could
2995 imagine it would be possible to compute the frame base address just
2996 adding a constant offset to this address. Unfortunately, this is not
2997 possible: if the FRAME object has alignment constraints that are
2998 stronger than the stack, then the offset between the frame base and
2999 the FRAME object will be dynamic.
3001 What we do instead is to append a field to the FRAME object that holds
3002 the frame base address: then debug info just has to fetch this
3003 field. */
3005 /* Debugging information will refer to the CFA as the frame base
3006 address: we will do the same here. */
3007 const tree frame_addr_fndecl
3008 = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
3010 /* Create a field in the FRAME record to hold the frame base address for
3011 this stack frame. Since it will be used only by the debugger, put it
3012 at the end of the record in order not to shift all other offsets. */
3013 tree fb_decl = make_node (FIELD_DECL);
3015 DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
3016 TREE_TYPE (fb_decl) = ptr_type_node;
3017 TREE_ADDRESSABLE (fb_decl) = 1;
3018 DECL_CONTEXT (fb_decl) = root->frame_type;
3019 TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
3020 fb_decl);
3022 /* In some cases the frame type will trigger the -Wpadded warning.
3023 This is not helpful; suppress it. */
3024 int save_warn_padded = warn_padded;
3025 warn_padded = 0;
3026 layout_type (root->frame_type);
3027 warn_padded = save_warn_padded;
3028 layout_decl (root->frame_decl, 0);
3030 /* Initialize the frame base address field. If the builtin we need is
3031 not available, set it to NULL so that debugging information does not
3032 reference junk. */
3033 tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
3034 root->frame_decl, fb_decl, NULL_TREE);
3035 tree fb_tmp;
3037 if (frame_addr_fndecl != NULL_TREE)
3039 gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
3040 integer_zero_node);
3041 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3043 fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
3045 else
3046 fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
3047 gimple_seq_add_stmt (&stmt_list,
3048 gimple_build_assign (fb_ref, fb_tmp));
3050 /* Remove root->frame_decl from root->new_local_var_chain, so
3051 that we can declare it also in the lexical blocks, which
3052 helps ensure virtual regs that end up appearing in its RTL
3053 expression get substituted in instantiate_virtual_regs(). */
3054 tree *adjust;
3055 for (adjust = &root->new_local_var_chain;
3056 *adjust != root->frame_decl;
3057 adjust = &DECL_CHAIN (*adjust))
3058 gcc_assert (DECL_CHAIN (*adjust));
3059 *adjust = DECL_CHAIN (*adjust);
3061 DECL_CHAIN (root->frame_decl) = NULL_TREE;
3062 declare_vars (root->frame_decl,
3063 gimple_seq_first_stmt (gimple_body (context)), true);
3066 /* If any parameters were referenced non-locally, then we need to
3067 insert a copy. Likewise, if any variables were referenced by
3068 pointer, we need to initialize the address. */
3069 if (root->any_parm_remapped)
3071 tree p;
3072 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
3074 tree field, x, y;
3076 field = lookup_field_for_decl (root, p, NO_INSERT);
3077 if (!field)
3078 continue;
3080 if (use_pointer_in_frame (p))
3081 x = build_addr (p);
3082 else
3083 x = p;
3085 /* If the assignment is from a non-register the stmt is
3086 not valid gimple. Make it so by using a temporary instead. */
3087 if (!is_gimple_reg (x)
3088 && is_gimple_reg_type (TREE_TYPE (x)))
3090 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3091 x = init_tmp_var (root, x, &gsi);
3094 y = build3 (COMPONENT_REF, TREE_TYPE (field),
3095 root->frame_decl, field, NULL_TREE);
3096 stmt = gimple_build_assign (y, x);
3097 gimple_seq_add_stmt (&stmt_list, stmt);
3101 /* If a chain_field was created, then it needs to be initialized
3102 from chain_decl. */
3103 if (root->chain_field)
3105 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
3106 root->frame_decl, root->chain_field, NULL_TREE);
3107 stmt = gimple_build_assign (x, get_chain_decl (root));
3108 gimple_seq_add_stmt (&stmt_list, stmt);
3111 /* If trampolines were created, then we need to initialize them. */
3112 if (root->any_tramp_created)
3114 struct nesting_info *i;
3115 for (i = root->inner; i ; i = i->next)
3117 tree field, x;
3119 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
3120 if (!field)
3121 continue;
3123 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
3124 stmt = build_init_call_stmt (root, i->context, field, x);
3125 gimple_seq_add_stmt (&stmt_list, stmt);
3129 /* If descriptors were created, then we need to initialize them. */
3130 if (root->any_descr_created)
3132 struct nesting_info *i;
3133 for (i = root->inner; i ; i = i->next)
3135 tree field, x;
3137 field = lookup_descr_for_decl (root, i->context, NO_INSERT);
3138 if (!field)
3139 continue;
3141 x = builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR);
3142 stmt = build_init_call_stmt (root, i->context, field, x);
3143 gimple_seq_add_stmt (&stmt_list, stmt);
3147 /* If we created initialization statements, insert them. */
3148 if (stmt_list)
3150 gbind *bind;
3151 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3152 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3153 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3154 gimple_bind_set_body (bind, stmt_list);
3157 /* If a chain_decl was created, then it needs to be registered with
3158 struct function so that it gets initialized from the static chain
3159 register at the beginning of the function. */
3160 sf = DECL_STRUCT_FUNCTION (root->context);
3161 sf->static_chain_decl = root->chain_decl;
3163 /* Similarly for the non-local goto save area. */
3164 if (root->nl_goto_field)
3166 sf->nonlocal_goto_save_area
3167 = get_frame_field (root, context, root->nl_goto_field, NULL);
3168 sf->has_nonlocal_label = 1;
3171 /* Make sure all new local variables get inserted into the
3172 proper BIND_EXPR. */
3173 if (root->new_local_var_chain)
3174 declare_vars (root->new_local_var_chain,
3175 gimple_seq_first_stmt (gimple_body (root->context)),
3176 false);
3178 if (root->debug_var_chain)
3180 tree debug_var;
3181 gbind *scope;
3183 remap_vla_decls (DECL_INITIAL (root->context), root);
3185 for (debug_var = root->debug_var_chain; debug_var;
3186 debug_var = DECL_CHAIN (debug_var))
3187 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3188 break;
3190 /* If there are any debug decls with variable length types,
3191 remap those types using other debug_var_chain variables. */
3192 if (debug_var)
3194 struct nesting_copy_body_data id;
3196 memset (&id, 0, sizeof (id));
3197 id.cb.copy_decl = nesting_copy_decl;
3198 id.cb.decl_map = new hash_map<tree, tree>;
3199 id.root = root;
3201 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3202 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3204 tree type = TREE_TYPE (debug_var);
3205 tree newt, t = type;
3206 struct nesting_info *i;
3208 for (i = root; i; i = i->outer)
3209 if (variably_modified_type_p (type, i->context))
3210 break;
3212 if (i == NULL)
3213 continue;
3215 id.cb.src_fn = i->context;
3216 id.cb.dst_fn = i->context;
3217 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3219 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3220 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3222 newt = TREE_TYPE (newt);
3223 t = TREE_TYPE (t);
3225 if (TYPE_NAME (newt)
3226 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3227 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3228 && newt != t
3229 && TYPE_NAME (newt) == TYPE_NAME (t))
3230 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3233 delete id.cb.decl_map;
3236 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3237 if (gimple_bind_block (scope))
3238 declare_vars (root->debug_var_chain, scope, true);
3239 else
3240 BLOCK_VARS (DECL_INITIAL (root->context))
3241 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3242 root->debug_var_chain);
3245 /* Fold the rewritten MEM_REF trees. */
3246 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3248 /* Dump the translated tree function. */
3249 if (dump_file)
3251 fputs ("\n\n", dump_file);
3252 dump_function_to_file (root->context, dump_file, dump_flags);
3256 static void
3257 finalize_nesting_tree (struct nesting_info *root)
3259 struct nesting_info *n;
3260 FOR_EACH_NEST_INFO (n, root)
3261 finalize_nesting_tree_1 (n);
3264 /* Unnest the nodes and pass them to cgraph. */
3266 static void
3267 unnest_nesting_tree_1 (struct nesting_info *root)
3269 struct cgraph_node *node = cgraph_node::get (root->context);
3271 /* For nested functions update the cgraph to reflect unnesting.
3272 We also delay finalizing of these functions up to this point. */
3273 if (node->origin)
3275 node->unnest ();
3276 cgraph_node::finalize_function (root->context, true);
3280 static void
3281 unnest_nesting_tree (struct nesting_info *root)
3283 struct nesting_info *n;
3284 FOR_EACH_NEST_INFO (n, root)
3285 unnest_nesting_tree_1 (n);
3288 /* Free the data structures allocated during this pass. */
3290 static void
3291 free_nesting_tree (struct nesting_info *root)
3293 struct nesting_info *node, *next;
3295 node = iter_nestinfo_start (root);
3298 next = iter_nestinfo_next (node);
3299 delete node->var_map;
3300 delete node->field_map;
3301 delete node->mem_refs;
3302 free (node);
3303 node = next;
3305 while (node);
3308 /* Gimplify a function and all its nested functions. */
3309 static void
3310 gimplify_all_functions (struct cgraph_node *root)
3312 struct cgraph_node *iter;
3313 if (!gimple_body (root->decl))
3314 gimplify_function_tree (root->decl);
3315 for (iter = root->nested; iter; iter = iter->next_nested)
3316 gimplify_all_functions (iter);
3319 /* Main entry point for this pass. Process FNDECL and all of its nested
3320 subroutines and turn them into something less tightly bound. */
3322 void
3323 lower_nested_functions (tree fndecl)
3325 struct cgraph_node *cgn;
3326 struct nesting_info *root;
3328 /* If there are no nested functions, there's nothing to do. */
3329 cgn = cgraph_node::get (fndecl);
3330 if (!cgn->nested)
3331 return;
3333 gimplify_all_functions (cgn);
3335 dump_file = dump_begin (TDI_nested, &dump_flags);
3336 if (dump_file)
3337 fprintf (dump_file, "\n;; Function %s\n\n",
3338 lang_hooks.decl_printable_name (fndecl, 2));
3340 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3341 root = create_nesting_tree (cgn);
3343 walk_all_functions (convert_nonlocal_reference_stmt,
3344 convert_nonlocal_reference_op,
3345 root);
3346 walk_all_functions (convert_local_reference_stmt,
3347 convert_local_reference_op,
3348 root);
3349 walk_all_functions (convert_nl_goto_reference, NULL, root);
3350 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3352 convert_all_function_calls (root);
3353 finalize_nesting_tree (root);
3354 unnest_nesting_tree (root);
3356 free_nesting_tree (root);
3357 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3359 if (dump_file)
3361 dump_end (TDI_nested, dump_file);
3362 dump_file = NULL;
3366 #include "gt-tree-nested.h"