1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "stringpool.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
35 #include "tree-inline.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 #include "diagnostic.h"
47 /* The object of this pass is to lower the representation of a set of nested
48 functions in order to expose all of the gory details of the various
49 nonlocal references. We want to do this sooner rather than later, in
50 order to give us more freedom in emitting all of the functions in question.
52 Back in olden times, when gcc was young, we developed an insanely
53 complicated scheme whereby variables which were referenced nonlocally
54 were forced to live in the stack of the declaring function, and then
55 the nested functions magically discovered where these variables were
56 placed. In order for this scheme to function properly, it required
57 that the outer function be partially expanded, then we switch to
58 compiling the inner function, and once done with those we switch back
59 to compiling the outer function. Such delicate ordering requirements
60 makes it difficult to do whole translation unit optimizations
61 involving such functions.
63 The implementation here is much more direct. Everything that can be
64 referenced by an inner function is a member of an explicitly created
65 structure herein called the "nonlocal frame struct". The incoming
66 static chain for a nested function is a pointer to this struct in
67 the parent. In this way, we settle on known offsets from a known
68 base, and so are decoupled from the logic that places objects in the
69 function's stack frame. More importantly, we don't have to wait for
70 that to happen -- since the compilation of the inner function is no
71 longer tied to a real stack frame, the nonlocal frame struct can be
72 allocated anywhere. Which means that the outer function is now
75 Theory of operation here is very simple. Iterate over all the
76 statements in all the functions (depth first) several times,
77 allocating structures and fields on demand. In general we want to
78 examine inner functions first, so that we can avoid making changes
79 to outer functions which are unnecessary.
81 The order of the passes matters a bit, in that later passes will be
82 skipped if it is discovered that the functions don't actually interact
83 at all. That is, they're nested in the lexical sense but could have
84 been written as independent functions without change. */
89 struct nesting_info
*outer
;
90 struct nesting_info
*inner
;
91 struct nesting_info
*next
;
93 hash_map
<tree
, tree
> *field_map
;
94 hash_map
<tree
, tree
> *var_map
;
95 hash_set
<tree
*> *mem_refs
;
96 bitmap suppress_expansion
;
99 tree new_local_var_chain
;
100 tree debug_var_chain
;
108 bool any_parm_remapped
;
109 bool any_tramp_created
;
110 bool any_descr_created
;
111 char static_chain_added
;
115 /* Iterate over the nesting tree, starting with ROOT, depth first. */
117 static inline struct nesting_info
*
118 iter_nestinfo_start (struct nesting_info
*root
)
125 static inline struct nesting_info
*
126 iter_nestinfo_next (struct nesting_info
*node
)
129 return iter_nestinfo_start (node
->next
);
133 #define FOR_EACH_NEST_INFO(I, ROOT) \
134 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
136 /* Obstack used for the bitmaps in the struct above. */
137 static struct bitmap_obstack nesting_info_bitmap_obstack
;
140 /* We're working in so many different function contexts simultaneously,
141 that create_tmp_var is dangerous. Prevent mishap. */
142 #define create_tmp_var cant_use_create_tmp_var_here_dummy
144 /* Like create_tmp_var, except record the variable for registration at
145 the given nesting level. */
148 create_tmp_var_for (struct nesting_info
*info
, tree type
, const char *prefix
)
152 /* If the type is of variable size or a type which must be created by the
153 frontend, something is wrong. Note that we explicitly allow
154 incomplete types here, since we create them ourselves here. */
155 gcc_assert (!TREE_ADDRESSABLE (type
));
156 gcc_assert (!TYPE_SIZE_UNIT (type
)
157 || TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
);
159 tmp_var
= create_tmp_var_raw (type
, prefix
);
160 DECL_CONTEXT (tmp_var
) = info
->context
;
161 DECL_CHAIN (tmp_var
) = info
->new_local_var_chain
;
162 DECL_SEEN_IN_BIND_EXPR_P (tmp_var
) = 1;
163 if (TREE_CODE (type
) == COMPLEX_TYPE
164 || TREE_CODE (type
) == VECTOR_TYPE
)
165 DECL_GIMPLE_REG_P (tmp_var
) = 1;
167 info
->new_local_var_chain
= tmp_var
;
172 /* Like build_simple_mem_ref, but set TREE_THIS_NOTRAP on the result. */
175 build_simple_mem_ref_notrap (tree ptr
)
177 tree t
= build_simple_mem_ref (ptr
);
178 TREE_THIS_NOTRAP (t
) = 1;
182 /* Take the address of EXP to be used within function CONTEXT.
183 Mark it for addressability as necessary. */
186 build_addr (tree exp
)
188 mark_addressable (exp
);
189 return build_fold_addr_expr (exp
);
192 /* Insert FIELD into TYPE, sorted by alignment requirements. */
195 insert_field_into_struct (tree type
, tree field
)
199 DECL_CONTEXT (field
) = type
;
201 for (p
= &TYPE_FIELDS (type
); *p
; p
= &DECL_CHAIN (*p
))
202 if (DECL_ALIGN (field
) >= DECL_ALIGN (*p
))
205 DECL_CHAIN (field
) = *p
;
208 /* Set correct alignment for frame struct type. */
209 if (TYPE_ALIGN (type
) < DECL_ALIGN (field
))
210 SET_TYPE_ALIGN (type
, DECL_ALIGN (field
));
213 /* Build or return the RECORD_TYPE that describes the frame state that is
214 shared between INFO->CONTEXT and its nested functions. This record will
215 not be complete until finalize_nesting_tree; up until that point we'll
216 be adding fields as necessary.
218 We also build the DECL that represents this frame in the function. */
221 get_frame_type (struct nesting_info
*info
)
223 tree type
= info
->frame_type
;
228 type
= make_node (RECORD_TYPE
);
230 name
= concat ("FRAME.",
231 IDENTIFIER_POINTER (DECL_NAME (info
->context
)),
233 TYPE_NAME (type
) = get_identifier (name
);
236 info
->frame_type
= type
;
238 /* Do not put info->frame_decl on info->new_local_var_chain,
239 so that we can declare it in the lexical blocks, which
240 makes sure virtual regs that end up appearing in its RTL
241 expression get substituted in instantiate_virtual_regs. */
242 info
->frame_decl
= create_tmp_var_raw (type
, "FRAME");
243 DECL_CONTEXT (info
->frame_decl
) = info
->context
;
244 DECL_NONLOCAL_FRAME (info
->frame_decl
) = 1;
245 DECL_SEEN_IN_BIND_EXPR_P (info
->frame_decl
) = 1;
247 /* ??? Always make it addressable for now, since it is meant to
248 be pointed to by the static chain pointer. This pessimizes
249 when it turns out that no static chains are needed because
250 the nested functions referencing non-local variables are not
251 reachable, but the true pessimization is to create the non-
252 local frame structure in the first place. */
253 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
259 /* Return true if DECL should be referenced by pointer in the non-local frame
263 use_pointer_in_frame (tree decl
)
265 if (TREE_CODE (decl
) == PARM_DECL
)
267 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
268 sized DECLs, and inefficient to copy large aggregates. Don't bother
269 moving anything but scalar parameters. */
270 return AGGREGATE_TYPE_P (TREE_TYPE (decl
));
274 /* Variable-sized DECLs can only come from OMP clauses at this point
275 since the gimplifier has already turned the regular variables into
276 pointers. Do the same as the gimplifier. */
277 return !DECL_SIZE (decl
) || TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
;
281 /* Given DECL, a non-locally accessed variable, find or create a field
282 in the non-local frame structure for the given nesting context. */
285 lookup_field_for_decl (struct nesting_info
*info
, tree decl
,
286 enum insert_option insert
)
288 gcc_checking_assert (decl_function_context (decl
) == info
->context
);
290 if (insert
== NO_INSERT
)
292 tree
*slot
= info
->field_map
->get (decl
);
293 return slot
? *slot
: NULL_TREE
;
296 tree
*slot
= &info
->field_map
->get_or_insert (decl
);
299 tree type
= get_frame_type (info
);
300 tree field
= make_node (FIELD_DECL
);
301 DECL_NAME (field
) = DECL_NAME (decl
);
303 if (use_pointer_in_frame (decl
))
305 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
306 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
307 DECL_NONADDRESSABLE_P (field
) = 1;
311 TREE_TYPE (field
) = TREE_TYPE (decl
);
312 DECL_SOURCE_LOCATION (field
) = DECL_SOURCE_LOCATION (decl
);
313 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
314 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
315 TREE_ADDRESSABLE (field
) = TREE_ADDRESSABLE (decl
);
316 DECL_NONADDRESSABLE_P (field
) = !TREE_ADDRESSABLE (decl
);
317 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
319 /* Declare the transformation and adjust the original DECL. For a
320 variable or for a parameter when not optimizing, we make it point
321 to the field in the frame directly. For a parameter, we don't do
322 it when optimizing because the variable tracking pass will already
324 if (VAR_P (decl
) || !optimize
)
327 = build3 (COMPONENT_REF
, TREE_TYPE (field
), info
->frame_decl
,
330 /* If the next declaration is a PARM_DECL pointing to the DECL,
331 we need to adjust its VALUE_EXPR directly, since chains of
332 VALUE_EXPRs run afoul of garbage collection. This occurs
333 in Ada for Out parameters that aren't copied in. */
334 tree next
= DECL_CHAIN (decl
);
336 && TREE_CODE (next
) == PARM_DECL
337 && DECL_HAS_VALUE_EXPR_P (next
)
338 && DECL_VALUE_EXPR (next
) == decl
)
339 SET_DECL_VALUE_EXPR (next
, x
);
341 SET_DECL_VALUE_EXPR (decl
, x
);
342 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
346 insert_field_into_struct (type
, field
);
349 if (TREE_CODE (decl
) == PARM_DECL
)
350 info
->any_parm_remapped
= true;
356 /* Build or return the variable that holds the static chain within
357 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
360 get_chain_decl (struct nesting_info
*info
)
362 tree decl
= info
->chain_decl
;
368 type
= get_frame_type (info
->outer
);
369 type
= build_pointer_type (type
);
371 /* Note that this variable is *not* entered into any BIND_EXPR;
372 the construction of this variable is handled specially in
373 expand_function_start and initialize_inlined_parameters.
374 Note also that it's represented as a parameter. This is more
375 close to the truth, since the initial value does come from
377 decl
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
378 PARM_DECL
, create_tmp_var_name ("CHAIN"), type
);
379 DECL_ARTIFICIAL (decl
) = 1;
380 DECL_IGNORED_P (decl
) = 1;
381 TREE_USED (decl
) = 1;
382 DECL_CONTEXT (decl
) = info
->context
;
383 DECL_ARG_TYPE (decl
) = type
;
385 /* Tell tree-inline.c that we never write to this variable, so
386 it can copy-prop the replacement value immediately. */
387 TREE_READONLY (decl
) = 1;
389 info
->chain_decl
= decl
;
392 && (dump_flags
& TDF_DETAILS
)
393 && !DECL_STATIC_CHAIN (info
->context
))
394 fprintf (dump_file
, "Setting static-chain for %s\n",
395 lang_hooks
.decl_printable_name (info
->context
, 2));
397 DECL_STATIC_CHAIN (info
->context
) = 1;
402 /* Build or return the field within the non-local frame state that holds
403 the static chain for INFO->CONTEXT. This is the way to walk back up
404 multiple nesting levels. */
407 get_chain_field (struct nesting_info
*info
)
409 tree field
= info
->chain_field
;
413 tree type
= build_pointer_type (get_frame_type (info
->outer
));
415 field
= make_node (FIELD_DECL
);
416 DECL_NAME (field
) = get_identifier ("__chain");
417 TREE_TYPE (field
) = type
;
418 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
419 DECL_NONADDRESSABLE_P (field
) = 1;
421 insert_field_into_struct (get_frame_type (info
), field
);
423 info
->chain_field
= field
;
426 && (dump_flags
& TDF_DETAILS
)
427 && !DECL_STATIC_CHAIN (info
->context
))
428 fprintf (dump_file
, "Setting static-chain for %s\n",
429 lang_hooks
.decl_printable_name (info
->context
, 2));
431 DECL_STATIC_CHAIN (info
->context
) = 1;
436 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
439 init_tmp_var_with_call (struct nesting_info
*info
, gimple_stmt_iterator
*gsi
,
444 t
= create_tmp_var_for (info
, gimple_call_return_type (call
), NULL
);
445 gimple_call_set_lhs (call
, t
);
446 if (! gsi_end_p (*gsi
))
447 gimple_set_location (call
, gimple_location (gsi_stmt (*gsi
)));
448 gsi_insert_before (gsi
, call
, GSI_SAME_STMT
);
454 /* Copy EXP into a temporary. Allocate the temporary in the context of
455 INFO and insert the initialization statement before GSI. */
458 init_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
463 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
464 stmt
= gimple_build_assign (t
, exp
);
465 if (! gsi_end_p (*gsi
))
466 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
467 gsi_insert_before_without_update (gsi
, stmt
, GSI_SAME_STMT
);
473 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
476 gsi_gimplify_val (struct nesting_info
*info
, tree exp
,
477 gimple_stmt_iterator
*gsi
)
479 if (is_gimple_val (exp
))
482 return init_tmp_var (info
, exp
, gsi
);
485 /* Similarly, but copy from the temporary and insert the statement
486 after the iterator. */
489 save_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
494 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
495 stmt
= gimple_build_assign (exp
, t
);
496 if (! gsi_end_p (*gsi
))
497 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
498 gsi_insert_after_without_update (gsi
, stmt
, GSI_SAME_STMT
);
503 /* Build or return the type used to represent a nested function trampoline. */
505 static GTY(()) tree trampoline_type
;
508 get_trampoline_type (struct nesting_info
*info
)
510 unsigned align
, size
;
514 return trampoline_type
;
516 align
= TRAMPOLINE_ALIGNMENT
;
517 size
= TRAMPOLINE_SIZE
;
519 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
520 then allocate extra space so that we can do dynamic alignment. */
521 if (align
> STACK_BOUNDARY
)
523 size
+= ((align
/BITS_PER_UNIT
) - 1) & -(STACK_BOUNDARY
/BITS_PER_UNIT
);
524 align
= STACK_BOUNDARY
;
527 t
= build_index_type (size_int (size
- 1));
528 t
= build_array_type (char_type_node
, t
);
529 t
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
530 FIELD_DECL
, get_identifier ("__data"), t
);
531 SET_DECL_ALIGN (t
, align
);
532 DECL_USER_ALIGN (t
) = 1;
534 trampoline_type
= make_node (RECORD_TYPE
);
535 TYPE_NAME (trampoline_type
) = get_identifier ("__builtin_trampoline");
536 TYPE_FIELDS (trampoline_type
) = t
;
537 layout_type (trampoline_type
);
538 DECL_CONTEXT (t
) = trampoline_type
;
540 return trampoline_type
;
543 /* Build or return the type used to represent a nested function descriptor. */
545 static GTY(()) tree descriptor_type
;
548 get_descriptor_type (struct nesting_info
*info
)
550 /* The base alignment is that of a function. */
551 const unsigned align
= FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY
);
555 return descriptor_type
;
557 t
= build_index_type (integer_one_node
);
558 t
= build_array_type (ptr_type_node
, t
);
559 t
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
560 FIELD_DECL
, get_identifier ("__data"), t
);
561 SET_DECL_ALIGN (t
, MAX (TYPE_ALIGN (ptr_type_node
), align
));
562 DECL_USER_ALIGN (t
) = 1;
564 descriptor_type
= make_node (RECORD_TYPE
);
565 TYPE_NAME (descriptor_type
) = get_identifier ("__builtin_descriptor");
566 TYPE_FIELDS (descriptor_type
) = t
;
567 layout_type (descriptor_type
);
568 DECL_CONTEXT (t
) = descriptor_type
;
570 return descriptor_type
;
573 /* Given DECL, a nested function, find or create an element in the
574 var map for this function. */
577 lookup_element_for_decl (struct nesting_info
*info
, tree decl
,
578 enum insert_option insert
)
580 if (insert
== NO_INSERT
)
582 tree
*slot
= info
->var_map
->get (decl
);
583 return slot
? *slot
: NULL_TREE
;
586 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
588 *slot
= build_tree_list (NULL_TREE
, NULL_TREE
);
593 /* Given DECL, a nested function, create a field in the non-local
594 frame structure for this function. */
597 create_field_for_decl (struct nesting_info
*info
, tree decl
, tree type
)
599 tree field
= make_node (FIELD_DECL
);
600 DECL_NAME (field
) = DECL_NAME (decl
);
601 TREE_TYPE (field
) = type
;
602 TREE_ADDRESSABLE (field
) = 1;
603 insert_field_into_struct (get_frame_type (info
), field
);
607 /* Given DECL, a nested function, find or create a field in the non-local
608 frame structure for a trampoline for this function. */
611 lookup_tramp_for_decl (struct nesting_info
*info
, tree decl
,
612 enum insert_option insert
)
616 elt
= lookup_element_for_decl (info
, decl
, insert
);
620 field
= TREE_PURPOSE (elt
);
622 if (!field
&& insert
== INSERT
)
624 field
= create_field_for_decl (info
, decl
, get_trampoline_type (info
));
625 TREE_PURPOSE (elt
) = field
;
626 info
->any_tramp_created
= true;
632 /* Given DECL, a nested function, find or create a field in the non-local
633 frame structure for a descriptor for this function. */
636 lookup_descr_for_decl (struct nesting_info
*info
, tree decl
,
637 enum insert_option insert
)
641 elt
= lookup_element_for_decl (info
, decl
, insert
);
645 field
= TREE_VALUE (elt
);
647 if (!field
&& insert
== INSERT
)
649 field
= create_field_for_decl (info
, decl
, get_descriptor_type (info
));
650 TREE_VALUE (elt
) = field
;
651 info
->any_descr_created
= true;
657 /* Build or return the field within the non-local frame state that holds
658 the non-local goto "jmp_buf". The buffer itself is maintained by the
659 rtl middle-end as dynamic stack space is allocated. */
662 get_nl_goto_field (struct nesting_info
*info
)
664 tree field
= info
->nl_goto_field
;
670 /* For __builtin_nonlocal_goto, we need N words. The first is the
671 frame pointer, the rest is for the target's stack pointer save
672 area. The number of words is controlled by STACK_SAVEAREA_MODE;
673 not the best interface, but it'll do for now. */
674 if (Pmode
== ptr_mode
)
675 type
= ptr_type_node
;
677 type
= lang_hooks
.types
.type_for_mode (Pmode
, 1);
680 = as_a
<scalar_int_mode
> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
));
681 size
= GET_MODE_SIZE (mode
);
682 size
= size
/ GET_MODE_SIZE (Pmode
);
685 type
= build_array_type
686 (type
, build_index_type (size_int (size
)));
688 field
= make_node (FIELD_DECL
);
689 DECL_NAME (field
) = get_identifier ("__nl_goto_buf");
690 TREE_TYPE (field
) = type
;
691 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
692 TREE_ADDRESSABLE (field
) = 1;
694 insert_field_into_struct (get_frame_type (info
), field
);
696 info
->nl_goto_field
= field
;
702 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
705 walk_body (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
706 struct nesting_info
*info
, gimple_seq
*pseq
)
708 struct walk_stmt_info wi
;
710 memset (&wi
, 0, sizeof (wi
));
713 walk_gimple_seq_mod (pseq
, callback_stmt
, callback_op
, &wi
);
717 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
720 walk_function (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
721 struct nesting_info
*info
)
723 gimple_seq body
= gimple_body (info
->context
);
724 walk_body (callback_stmt
, callback_op
, info
, &body
);
725 gimple_set_body (info
->context
, body
);
728 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
731 walk_gimple_omp_for (gomp_for
*for_stmt
,
732 walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
733 struct nesting_info
*info
)
735 struct walk_stmt_info wi
;
740 walk_body (callback_stmt
, callback_op
, info
, gimple_omp_for_pre_body_ptr (for_stmt
));
743 memset (&wi
, 0, sizeof (wi
));
745 wi
.gsi
= gsi_last (seq
);
747 for (i
= 0; i
< gimple_omp_for_collapse (for_stmt
); i
++)
750 walk_tree (gimple_omp_for_index_ptr (for_stmt
, i
), callback_op
,
754 walk_tree (gimple_omp_for_initial_ptr (for_stmt
, i
), callback_op
,
759 walk_tree (gimple_omp_for_final_ptr (for_stmt
, i
), callback_op
,
762 t
= gimple_omp_for_incr (for_stmt
, i
);
763 gcc_assert (BINARY_CLASS_P (t
));
765 walk_tree (&TREE_OPERAND (t
, 0), callback_op
, &wi
, NULL
);
768 walk_tree (&TREE_OPERAND (t
, 1), callback_op
, &wi
, NULL
);
771 seq
= gsi_seq (wi
.gsi
);
772 if (!gimple_seq_empty_p (seq
))
774 gimple_seq pre_body
= gimple_omp_for_pre_body (for_stmt
);
775 annotate_all_with_location (seq
, gimple_location (for_stmt
));
776 gimple_seq_add_seq (&pre_body
, seq
);
777 gimple_omp_for_set_pre_body (for_stmt
, pre_body
);
781 /* Similarly for ROOT and all functions nested underneath, depth first. */
784 walk_all_functions (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
785 struct nesting_info
*root
)
787 struct nesting_info
*n
;
788 FOR_EACH_NEST_INFO (n
, root
)
789 walk_function (callback_stmt
, callback_op
, n
);
793 /* We have to check for a fairly pathological case. The operands of function
794 nested function are to be interpreted in the context of the enclosing
795 function. So if any are variably-sized, they will get remapped when the
796 enclosing function is inlined. But that remapping would also have to be
797 done in the types of the PARM_DECLs of the nested function, meaning the
798 argument types of that function will disagree with the arguments in the
799 calls to that function. So we'd either have to make a copy of the nested
800 function corresponding to each time the enclosing function was inlined or
801 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
802 function. The former is not practical. The latter would still require
803 detecting this case to know when to add the conversions. So, for now at
804 least, we don't inline such an enclosing function.
806 We have to do that check recursively, so here return indicating whether
807 FNDECL has such a nested function. ORIG_FN is the function we were
808 trying to inline to use for checking whether any argument is variably
809 modified by anything in it.
811 It would be better to do this in tree-inline.c so that we could give
812 the appropriate warning for why a function can't be inlined, but that's
813 too late since the nesting structure has already been flattened and
814 adding a flag just to record this fact seems a waste of a flag. */
817 check_for_nested_with_variably_modified (tree fndecl
, tree orig_fndecl
)
819 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
822 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
824 for (arg
= DECL_ARGUMENTS (cgn
->decl
); arg
; arg
= DECL_CHAIN (arg
))
825 if (variably_modified_type_p (TREE_TYPE (arg
), orig_fndecl
))
828 if (check_for_nested_with_variably_modified (cgn
->decl
,
836 /* Construct our local datastructure describing the function nesting
837 tree rooted by CGN. */
839 static struct nesting_info
*
840 create_nesting_tree (struct cgraph_node
*cgn
)
842 struct nesting_info
*info
= XCNEW (struct nesting_info
);
843 info
->field_map
= new hash_map
<tree
, tree
>;
844 info
->var_map
= new hash_map
<tree
, tree
>;
845 info
->mem_refs
= new hash_set
<tree
*>;
846 info
->suppress_expansion
= BITMAP_ALLOC (&nesting_info_bitmap_obstack
);
847 info
->context
= cgn
->decl
;
848 info
->thunk_p
= cgn
->thunk
.thunk_p
;
850 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
852 struct nesting_info
*sub
= create_nesting_tree (cgn
);
854 sub
->next
= info
->inner
;
858 /* See discussion at check_for_nested_with_variably_modified for a
859 discussion of why this has to be here. */
860 if (check_for_nested_with_variably_modified (info
->context
, info
->context
))
861 DECL_UNINLINABLE (info
->context
) = true;
866 /* Return an expression computing the static chain for TARGET_CONTEXT
867 from INFO->CONTEXT. Insert any necessary computations before TSI. */
870 get_static_chain (struct nesting_info
*info
, tree target_context
,
871 gimple_stmt_iterator
*gsi
)
873 struct nesting_info
*i
;
876 if (info
->context
== target_context
)
878 x
= build_addr (info
->frame_decl
);
879 info
->static_chain_added
|= 1;
883 x
= get_chain_decl (info
);
884 info
->static_chain_added
|= 2;
886 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
888 tree field
= get_chain_field (i
);
890 x
= build_simple_mem_ref_notrap (x
);
891 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
892 x
= init_tmp_var (info
, x
, gsi
);
900 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
901 frame as seen from INFO->CONTEXT. Insert any necessary computations
905 get_frame_field (struct nesting_info
*info
, tree target_context
,
906 tree field
, gimple_stmt_iterator
*gsi
)
908 struct nesting_info
*i
;
911 if (info
->context
== target_context
)
913 /* Make sure frame_decl gets created. */
914 (void) get_frame_type (info
);
915 x
= info
->frame_decl
;
916 info
->static_chain_added
|= 1;
920 x
= get_chain_decl (info
);
921 info
->static_chain_added
|= 2;
923 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
925 tree field
= get_chain_field (i
);
927 x
= build_simple_mem_ref_notrap (x
);
928 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
929 x
= init_tmp_var (info
, x
, gsi
);
932 x
= build_simple_mem_ref_notrap (x
);
935 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
939 static void note_nonlocal_vla_type (struct nesting_info
*info
, tree type
);
941 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
942 in the nested function with DECL_VALUE_EXPR set to reference the true
943 variable in the parent function. This is used both for debug info
944 and in OMP lowering. */
947 get_nonlocal_debug_decl (struct nesting_info
*info
, tree decl
)
950 struct nesting_info
*i
;
951 tree x
, field
, new_decl
;
953 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
958 target_context
= decl_function_context (decl
);
960 /* A copy of the code in get_frame_field, but without the temporaries. */
961 if (info
->context
== target_context
)
963 /* Make sure frame_decl gets created. */
964 (void) get_frame_type (info
);
965 x
= info
->frame_decl
;
967 info
->static_chain_added
|= 1;
971 x
= get_chain_decl (info
);
972 info
->static_chain_added
|= 2;
973 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
975 field
= get_chain_field (i
);
976 x
= build_simple_mem_ref_notrap (x
);
977 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
979 x
= build_simple_mem_ref_notrap (x
);
982 field
= lookup_field_for_decl (i
, decl
, INSERT
);
983 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
984 if (use_pointer_in_frame (decl
))
985 x
= build_simple_mem_ref_notrap (x
);
987 /* ??? We should be remapping types as well, surely. */
988 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
989 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
990 DECL_CONTEXT (new_decl
) = info
->context
;
991 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
992 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
993 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
994 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
995 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
996 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
997 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
998 if ((TREE_CODE (decl
) == PARM_DECL
999 || TREE_CODE (decl
) == RESULT_DECL
1001 && DECL_BY_REFERENCE (decl
))
1002 DECL_BY_REFERENCE (new_decl
) = 1;
1004 SET_DECL_VALUE_EXPR (new_decl
, x
);
1005 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
1008 DECL_CHAIN (new_decl
) = info
->debug_var_chain
;
1009 info
->debug_var_chain
= new_decl
;
1012 && info
->context
!= target_context
1013 && variably_modified_type_p (TREE_TYPE (decl
), NULL
))
1014 note_nonlocal_vla_type (info
, TREE_TYPE (decl
));
1020 /* Callback for walk_gimple_stmt, rewrite all references to VAR
1021 and PARM_DECLs that belong to outer functions.
1023 The rewrite will involve some number of structure accesses back up
1024 the static chain. E.g. for a variable FOO up one nesting level it'll
1025 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
1026 indirections apply to decls for which use_pointer_in_frame is true. */
1029 convert_nonlocal_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1031 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1032 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1036 switch (TREE_CODE (t
))
1039 /* Non-automatic variables are never processed. */
1040 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
1046 tree x
, target_context
= decl_function_context (t
);
1048 if (info
->context
== target_context
)
1053 if (bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
1054 x
= get_nonlocal_debug_decl (info
, t
);
1057 struct nesting_info
*i
= info
;
1058 while (i
&& i
->context
!= target_context
)
1060 /* If none of the outer contexts is the target context, this means
1061 that the VAR or PARM_DECL is referenced in a wrong context. */
1063 internal_error ("%s from %s referenced in %s",
1064 IDENTIFIER_POINTER (DECL_NAME (t
)),
1065 IDENTIFIER_POINTER (DECL_NAME (target_context
)),
1066 IDENTIFIER_POINTER (DECL_NAME (info
->context
)));
1068 x
= lookup_field_for_decl (i
, t
, INSERT
);
1069 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
1070 if (use_pointer_in_frame (t
))
1072 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1073 x
= build_simple_mem_ref_notrap (x
);
1080 x
= save_tmp_var (info
, x
, &wi
->gsi
);
1082 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1090 /* We're taking the address of a label from a parent function, but
1091 this is not itself a non-local goto. Mark the label such that it
1092 will not be deleted, much as we would with a label address in
1094 if (decl_function_context (t
) != info
->context
)
1095 FORCED_LABEL (t
) = 1;
1100 bool save_val_only
= wi
->val_only
;
1102 wi
->val_only
= false;
1104 wi
->changed
= false;
1105 walk_tree (&TREE_OPERAND (t
, 0), convert_nonlocal_reference_op
, wi
, 0);
1106 wi
->val_only
= true;
1112 /* If we changed anything, we might no longer be directly
1113 referencing a decl. */
1114 save_context
= current_function_decl
;
1115 current_function_decl
= info
->context
;
1116 recompute_tree_invariant_for_addr_expr (t
);
1117 current_function_decl
= save_context
;
1119 /* If the callback converted the address argument in a context
1120 where we only accept variables (and min_invariant, presumably),
1121 then compute the address into a temporary. */
1123 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
1133 case ARRAY_RANGE_REF
:
1135 /* Go down this entire nest and just look at the final prefix and
1136 anything that describes the references. Otherwise, we lose track
1137 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1138 wi
->val_only
= true;
1140 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1142 if (TREE_CODE (t
) == COMPONENT_REF
)
1143 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
, wi
,
1145 else if (TREE_CODE (t
) == ARRAY_REF
1146 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1148 walk_tree (&TREE_OPERAND (t
, 1), convert_nonlocal_reference_op
,
1150 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
,
1152 walk_tree (&TREE_OPERAND (t
, 3), convert_nonlocal_reference_op
,
1156 wi
->val_only
= false;
1157 walk_tree (tp
, convert_nonlocal_reference_op
, wi
, NULL
);
1160 case VIEW_CONVERT_EXPR
:
1161 /* Just request to look at the subtrees, leaving val_only and lhs
1162 untouched. This might actually be for !val_only + lhs, in which
1163 case we don't want to force a replacement by a temporary. */
1168 if (!IS_TYPE_OR_DECL_P (t
))
1171 wi
->val_only
= true;
1180 static tree
convert_nonlocal_reference_stmt (gimple_stmt_iterator
*, bool *,
1181 struct walk_stmt_info
*);
1183 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1184 and PARM_DECLs that belong to outer functions. */
1187 convert_nonlocal_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1189 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1190 bool need_chain
= false, need_stmts
= false;
1193 bitmap new_suppress
;
1195 new_suppress
= BITMAP_GGC_ALLOC ();
1196 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1198 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1200 switch (OMP_CLAUSE_CODE (clause
))
1202 case OMP_CLAUSE_REDUCTION
:
1203 case OMP_CLAUSE_IN_REDUCTION
:
1204 case OMP_CLAUSE_TASK_REDUCTION
:
1205 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1207 goto do_decl_clause
;
1209 case OMP_CLAUSE_LASTPRIVATE
:
1210 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1212 goto do_decl_clause
;
1214 case OMP_CLAUSE_LINEAR
:
1215 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
))
1217 wi
->val_only
= true;
1219 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause
),
1221 goto do_decl_clause
;
1223 case OMP_CLAUSE_PRIVATE
:
1224 case OMP_CLAUSE_FIRSTPRIVATE
:
1225 case OMP_CLAUSE_COPYPRIVATE
:
1226 case OMP_CLAUSE_SHARED
:
1227 case OMP_CLAUSE_TO_DECLARE
:
1228 case OMP_CLAUSE_LINK
:
1229 case OMP_CLAUSE_USE_DEVICE_PTR
:
1230 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1231 case OMP_CLAUSE_IS_DEVICE_PTR
:
1233 decl
= OMP_CLAUSE_DECL (clause
);
1235 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1237 if (decl_function_context (decl
) != info
->context
)
1239 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_SHARED
)
1240 OMP_CLAUSE_SHARED_READONLY (clause
) = 0;
1241 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1242 OMP_CLAUSE_DECL (clause
) = get_nonlocal_debug_decl (info
, decl
);
1243 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_PRIVATE
)
1248 case OMP_CLAUSE_SCHEDULE
:
1249 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1252 case OMP_CLAUSE_FINAL
:
1254 case OMP_CLAUSE_NUM_THREADS
:
1255 case OMP_CLAUSE_DEPEND
:
1256 case OMP_CLAUSE_DEVICE
:
1257 case OMP_CLAUSE_NUM_TEAMS
:
1258 case OMP_CLAUSE_THREAD_LIMIT
:
1259 case OMP_CLAUSE_SAFELEN
:
1260 case OMP_CLAUSE_SIMDLEN
:
1261 case OMP_CLAUSE_PRIORITY
:
1262 case OMP_CLAUSE_GRAINSIZE
:
1263 case OMP_CLAUSE_NUM_TASKS
:
1264 case OMP_CLAUSE_HINT
:
1265 case OMP_CLAUSE_NUM_GANGS
:
1266 case OMP_CLAUSE_NUM_WORKERS
:
1267 case OMP_CLAUSE_VECTOR_LENGTH
:
1268 case OMP_CLAUSE_GANG
:
1269 case OMP_CLAUSE_WORKER
:
1270 case OMP_CLAUSE_VECTOR
:
1271 case OMP_CLAUSE_ASYNC
:
1272 case OMP_CLAUSE_WAIT
:
1273 /* Several OpenACC clauses have optional arguments. Check if they
1275 if (OMP_CLAUSE_OPERAND (clause
, 0))
1277 wi
->val_only
= true;
1279 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1283 /* The gang clause accepts two arguments. */
1284 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_GANG
1285 && OMP_CLAUSE_GANG_STATIC_EXPR (clause
))
1287 wi
->val_only
= true;
1289 convert_nonlocal_reference_op
1290 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause
), &dummy
, wi
);
1294 case OMP_CLAUSE_DIST_SCHEDULE
:
1295 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause
) != NULL
)
1297 wi
->val_only
= true;
1299 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1304 case OMP_CLAUSE_MAP
:
1306 case OMP_CLAUSE_FROM
:
1307 if (OMP_CLAUSE_SIZE (clause
))
1309 wi
->val_only
= true;
1311 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause
),
1314 if (DECL_P (OMP_CLAUSE_DECL (clause
)))
1315 goto do_decl_clause
;
1316 wi
->val_only
= true;
1318 walk_tree (&OMP_CLAUSE_DECL (clause
), convert_nonlocal_reference_op
,
1322 case OMP_CLAUSE_ALIGNED
:
1323 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
1325 wi
->val_only
= true;
1327 convert_nonlocal_reference_op
1328 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
), &dummy
, wi
);
1331 case OMP_CLAUSE_NONTEMPORAL
:
1332 /* Like do_decl_clause, but don't add any suppression. */
1333 decl
= OMP_CLAUSE_DECL (clause
);
1335 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1337 if (decl_function_context (decl
) != info
->context
)
1339 OMP_CLAUSE_DECL (clause
) = get_nonlocal_debug_decl (info
, decl
);
1344 case OMP_CLAUSE_NOWAIT
:
1345 case OMP_CLAUSE_ORDERED
:
1346 case OMP_CLAUSE_DEFAULT
:
1347 case OMP_CLAUSE_COPYIN
:
1348 case OMP_CLAUSE_COLLAPSE
:
1349 case OMP_CLAUSE_TILE
:
1350 case OMP_CLAUSE_UNTIED
:
1351 case OMP_CLAUSE_MERGEABLE
:
1352 case OMP_CLAUSE_PROC_BIND
:
1353 case OMP_CLAUSE_NOGROUP
:
1354 case OMP_CLAUSE_THREADS
:
1355 case OMP_CLAUSE_SIMD
:
1356 case OMP_CLAUSE_DEFAULTMAP
:
1357 case OMP_CLAUSE_ORDER
:
1358 case OMP_CLAUSE_SEQ
:
1359 case OMP_CLAUSE_INDEPENDENT
:
1360 case OMP_CLAUSE_AUTO
:
1361 case OMP_CLAUSE_IF_PRESENT
:
1362 case OMP_CLAUSE_FINALIZE
:
1363 case OMP_CLAUSE__CONDTEMP_
:
1364 case OMP_CLAUSE__SCANTEMP_
:
1367 /* The following clause belongs to the OpenACC cache directive, which
1368 is discarded during gimplification. */
1369 case OMP_CLAUSE__CACHE_
:
1370 /* The following clauses are only allowed in the OpenMP declare simd
1371 directive, so not seen here. */
1372 case OMP_CLAUSE_UNIFORM
:
1373 case OMP_CLAUSE_INBRANCH
:
1374 case OMP_CLAUSE_NOTINBRANCH
:
1375 /* The following clauses are only allowed on OpenMP cancel and
1376 cancellation point directives, which at this point have already
1377 been lowered into a function call. */
1378 case OMP_CLAUSE_FOR
:
1379 case OMP_CLAUSE_PARALLEL
:
1380 case OMP_CLAUSE_SECTIONS
:
1381 case OMP_CLAUSE_TASKGROUP
:
1382 /* The following clauses are only added during OMP lowering; nested
1383 function decomposition happens before that. */
1384 case OMP_CLAUSE__LOOPTEMP_
:
1385 case OMP_CLAUSE__REDUCTEMP_
:
1386 case OMP_CLAUSE__SIMDUID_
:
1387 case OMP_CLAUSE__GRIDDIM_
:
1388 case OMP_CLAUSE__SIMT_
:
1389 /* Anything else. */
1395 info
->suppress_expansion
= new_suppress
;
1398 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1399 switch (OMP_CLAUSE_CODE (clause
))
1401 case OMP_CLAUSE_REDUCTION
:
1402 case OMP_CLAUSE_IN_REDUCTION
:
1403 case OMP_CLAUSE_TASK_REDUCTION
:
1404 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1407 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
1408 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1410 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1411 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1413 walk_body (convert_nonlocal_reference_stmt
,
1414 convert_nonlocal_reference_op
, info
,
1415 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
1416 walk_body (convert_nonlocal_reference_stmt
,
1417 convert_nonlocal_reference_op
, info
,
1418 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
1419 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1421 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1422 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1427 case OMP_CLAUSE_LASTPRIVATE
:
1428 walk_body (convert_nonlocal_reference_stmt
,
1429 convert_nonlocal_reference_op
, info
,
1430 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
1433 case OMP_CLAUSE_LINEAR
:
1434 walk_body (convert_nonlocal_reference_stmt
,
1435 convert_nonlocal_reference_op
, info
,
1436 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
));
1446 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1449 note_nonlocal_vla_type (struct nesting_info
*info
, tree type
)
1451 while (POINTER_TYPE_P (type
) && !TYPE_NAME (type
))
1452 type
= TREE_TYPE (type
);
1454 if (TYPE_NAME (type
)
1455 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1456 && DECL_ORIGINAL_TYPE (TYPE_NAME (type
)))
1457 type
= DECL_ORIGINAL_TYPE (TYPE_NAME (type
));
1459 while (POINTER_TYPE_P (type
)
1460 || TREE_CODE (type
) == VECTOR_TYPE
1461 || TREE_CODE (type
) == FUNCTION_TYPE
1462 || TREE_CODE (type
) == METHOD_TYPE
)
1463 type
= TREE_TYPE (type
);
1465 if (TREE_CODE (type
) == ARRAY_TYPE
)
1469 note_nonlocal_vla_type (info
, TREE_TYPE (type
));
1470 domain
= TYPE_DOMAIN (type
);
1473 t
= TYPE_MIN_VALUE (domain
);
1474 if (t
&& (VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
)
1475 && decl_function_context (t
) != info
->context
)
1476 get_nonlocal_debug_decl (info
, t
);
1477 t
= TYPE_MAX_VALUE (domain
);
1478 if (t
&& (VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
)
1479 && decl_function_context (t
) != info
->context
)
1480 get_nonlocal_debug_decl (info
, t
);
1485 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1486 PARM_DECLs that belong to outer functions. This handles statements
1487 that are not handled via the standard recursion done in
1488 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1489 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1490 operands of STMT have been handled by this function. */
1493 convert_nonlocal_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1494 struct walk_stmt_info
*wi
)
1496 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
1497 tree save_local_var_chain
;
1498 bitmap save_suppress
;
1499 gimple
*stmt
= gsi_stmt (*gsi
);
1501 switch (gimple_code (stmt
))
1504 /* Don't walk non-local gotos for now. */
1505 if (TREE_CODE (gimple_goto_dest (stmt
)) != LABEL_DECL
)
1507 wi
->val_only
= true;
1509 *handled_ops_p
= false;
1514 case GIMPLE_OMP_TEAMS
:
1515 if (!gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
1517 save_suppress
= info
->suppress_expansion
;
1518 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt
),
1520 walk_body (convert_nonlocal_reference_stmt
,
1521 convert_nonlocal_reference_op
, info
,
1522 gimple_omp_body_ptr (stmt
));
1523 info
->suppress_expansion
= save_suppress
;
1528 case GIMPLE_OMP_PARALLEL
:
1529 case GIMPLE_OMP_TASK
:
1530 save_suppress
= info
->suppress_expansion
;
1531 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
1535 decl
= get_chain_decl (info
);
1536 c
= build_omp_clause (gimple_location (stmt
),
1537 OMP_CLAUSE_FIRSTPRIVATE
);
1538 OMP_CLAUSE_DECL (c
) = decl
;
1539 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1540 gimple_omp_taskreg_set_clauses (stmt
, c
);
1543 save_local_var_chain
= info
->new_local_var_chain
;
1544 info
->new_local_var_chain
= NULL
;
1546 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1547 info
, gimple_omp_body_ptr (stmt
));
1549 if (info
->new_local_var_chain
)
1550 declare_vars (info
->new_local_var_chain
,
1551 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1553 info
->new_local_var_chain
= save_local_var_chain
;
1554 info
->suppress_expansion
= save_suppress
;
1557 case GIMPLE_OMP_FOR
:
1558 save_suppress
= info
->suppress_expansion
;
1559 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
1560 walk_gimple_omp_for (as_a
<gomp_for
*> (stmt
),
1561 convert_nonlocal_reference_stmt
,
1562 convert_nonlocal_reference_op
, info
);
1563 walk_body (convert_nonlocal_reference_stmt
,
1564 convert_nonlocal_reference_op
, info
, gimple_omp_body_ptr (stmt
));
1565 info
->suppress_expansion
= save_suppress
;
1568 case GIMPLE_OMP_SECTIONS
:
1569 save_suppress
= info
->suppress_expansion
;
1570 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
1571 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1572 info
, gimple_omp_body_ptr (stmt
));
1573 info
->suppress_expansion
= save_suppress
;
1576 case GIMPLE_OMP_SINGLE
:
1577 save_suppress
= info
->suppress_expansion
;
1578 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
1579 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1580 info
, gimple_omp_body_ptr (stmt
));
1581 info
->suppress_expansion
= save_suppress
;
1584 case GIMPLE_OMP_TASKGROUP
:
1585 save_suppress
= info
->suppress_expansion
;
1586 convert_nonlocal_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt
), wi
);
1587 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1588 info
, gimple_omp_body_ptr (stmt
));
1589 info
->suppress_expansion
= save_suppress
;
1592 case GIMPLE_OMP_TARGET
:
1593 if (!is_gimple_omp_offloaded (stmt
))
1595 save_suppress
= info
->suppress_expansion
;
1596 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt
),
1598 info
->suppress_expansion
= save_suppress
;
1599 walk_body (convert_nonlocal_reference_stmt
,
1600 convert_nonlocal_reference_op
, info
,
1601 gimple_omp_body_ptr (stmt
));
1604 save_suppress
= info
->suppress_expansion
;
1605 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt
),
1609 decl
= get_chain_decl (info
);
1610 c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
1611 OMP_CLAUSE_DECL (c
) = decl
;
1612 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
1613 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
1614 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
1615 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
1618 save_local_var_chain
= info
->new_local_var_chain
;
1619 info
->new_local_var_chain
= NULL
;
1621 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1622 info
, gimple_omp_body_ptr (stmt
));
1624 if (info
->new_local_var_chain
)
1625 declare_vars (info
->new_local_var_chain
,
1626 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1628 info
->new_local_var_chain
= save_local_var_chain
;
1629 info
->suppress_expansion
= save_suppress
;
1632 case GIMPLE_OMP_SECTION
:
1633 case GIMPLE_OMP_MASTER
:
1634 case GIMPLE_OMP_ORDERED
:
1635 case GIMPLE_OMP_SCAN
:
1636 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1637 info
, gimple_omp_body_ptr (stmt
));
1642 gbind
*bind_stmt
= as_a
<gbind
*> (stmt
);
1644 for (tree var
= gimple_bind_vars (bind_stmt
); var
; var
= DECL_CHAIN (var
))
1645 if (TREE_CODE (var
) == NAMELIST_DECL
)
1647 /* Adjust decls mentioned in NAMELIST_DECL. */
1648 tree decls
= NAMELIST_DECL_ASSOCIATED_DECL (var
);
1652 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls
), i
, decl
)
1655 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1657 if (decl_function_context (decl
) != info
->context
)
1658 CONSTRUCTOR_ELT (decls
, i
)->value
1659 = get_nonlocal_debug_decl (info
, decl
);
1663 *handled_ops_p
= false;
1667 wi
->val_only
= true;
1669 *handled_ops_p
= false;
1673 if (gimple_clobber_p (stmt
))
1675 tree lhs
= gimple_assign_lhs (stmt
);
1677 && !(TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
1678 && decl_function_context (lhs
) != info
->context
)
1680 gsi_replace (gsi
, gimple_build_nop (), true);
1684 *handled_ops_p
= false;
1688 /* For every other statement that we are not interested in
1689 handling here, let the walker traverse the operands. */
1690 *handled_ops_p
= false;
1694 /* We have handled all of STMT operands, no need to traverse the operands. */
1695 *handled_ops_p
= true;
1700 /* A subroutine of convert_local_reference. Create a local variable
1701 in the parent function with DECL_VALUE_EXPR set to reference the
1702 field in FRAME. This is used both for debug info and in OMP
1706 get_local_debug_decl (struct nesting_info
*info
, tree decl
, tree field
)
1710 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
1714 /* Make sure frame_decl gets created. */
1715 (void) get_frame_type (info
);
1716 x
= info
->frame_decl
;
1717 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1719 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
1720 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
1721 DECL_CONTEXT (new_decl
) = info
->context
;
1722 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
1723 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
1724 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
1725 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
1726 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
1727 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
1728 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
1729 if ((TREE_CODE (decl
) == PARM_DECL
1730 || TREE_CODE (decl
) == RESULT_DECL
1732 && DECL_BY_REFERENCE (decl
))
1733 DECL_BY_REFERENCE (new_decl
) = 1;
1735 SET_DECL_VALUE_EXPR (new_decl
, x
);
1736 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
1739 DECL_CHAIN (new_decl
) = info
->debug_var_chain
;
1740 info
->debug_var_chain
= new_decl
;
1742 /* Do not emit debug info twice. */
1743 DECL_IGNORED_P (decl
) = 1;
1749 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1750 and PARM_DECLs that were referenced by inner nested functions.
1751 The rewrite will be a structure reference to the local frame variable. */
1753 static bool convert_local_omp_clauses (tree
*, struct walk_stmt_info
*);
1756 convert_local_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1758 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1759 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1760 tree t
= *tp
, field
, x
;
1764 switch (TREE_CODE (t
))
1767 /* Non-automatic variables are never processed. */
1768 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
1773 if (t
!= info
->frame_decl
&& decl_function_context (t
) == info
->context
)
1775 /* If we copied a pointer to the frame, then the original decl
1776 is used unchanged in the parent function. */
1777 if (use_pointer_in_frame (t
))
1780 /* No need to transform anything if no child references the
1782 field
= lookup_field_for_decl (info
, t
, NO_INSERT
);
1787 if (bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
1788 x
= get_local_debug_decl (info
, t
, field
);
1790 x
= get_frame_field (info
, info
->context
, field
, &wi
->gsi
);
1795 x
= save_tmp_var (info
, x
, &wi
->gsi
);
1797 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1805 save_val_only
= wi
->val_only
;
1806 wi
->val_only
= false;
1808 wi
->changed
= false;
1809 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
, wi
, NULL
);
1810 wi
->val_only
= save_val_only
;
1812 /* If we converted anything ... */
1817 /* Then the frame decl is now addressable. */
1818 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
1820 save_context
= current_function_decl
;
1821 current_function_decl
= info
->context
;
1822 recompute_tree_invariant_for_addr_expr (t
);
1823 current_function_decl
= save_context
;
1825 /* If we are in a context where we only accept values, then
1826 compute the address into a temporary. */
1828 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
1837 case ARRAY_RANGE_REF
:
1839 /* Go down this entire nest and just look at the final prefix and
1840 anything that describes the references. Otherwise, we lose track
1841 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1842 save_val_only
= wi
->val_only
;
1843 wi
->val_only
= true;
1845 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1847 if (TREE_CODE (t
) == COMPONENT_REF
)
1848 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1850 else if (TREE_CODE (t
) == ARRAY_REF
1851 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1853 walk_tree (&TREE_OPERAND (t
, 1), convert_local_reference_op
, wi
,
1855 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1857 walk_tree (&TREE_OPERAND (t
, 3), convert_local_reference_op
, wi
,
1861 wi
->val_only
= false;
1862 walk_tree (tp
, convert_local_reference_op
, wi
, NULL
);
1863 wi
->val_only
= save_val_only
;
1867 save_val_only
= wi
->val_only
;
1868 wi
->val_only
= true;
1870 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
,
1872 /* We need to re-fold the MEM_REF as component references as
1873 part of a ADDR_EXPR address are not allowed. But we cannot
1874 fold here, as the chain record type is not yet finalized. */
1875 if (TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
1876 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)))
1877 info
->mem_refs
->add (tp
);
1878 wi
->val_only
= save_val_only
;
1881 case VIEW_CONVERT_EXPR
:
1882 /* Just request to look at the subtrees, leaving val_only and lhs
1883 untouched. This might actually be for !val_only + lhs, in which
1884 case we don't want to force a replacement by a temporary. */
1889 if (!IS_TYPE_OR_DECL_P (t
))
1892 wi
->val_only
= true;
1901 static tree
convert_local_reference_stmt (gimple_stmt_iterator
*, bool *,
1902 struct walk_stmt_info
*);
1904 /* Helper for convert_local_reference. Convert all the references in
1905 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1908 convert_local_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1910 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1911 bool need_frame
= false, need_stmts
= false;
1914 bitmap new_suppress
;
1916 new_suppress
= BITMAP_GGC_ALLOC ();
1917 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1919 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1921 switch (OMP_CLAUSE_CODE (clause
))
1923 case OMP_CLAUSE_REDUCTION
:
1924 case OMP_CLAUSE_IN_REDUCTION
:
1925 case OMP_CLAUSE_TASK_REDUCTION
:
1926 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1928 goto do_decl_clause
;
1930 case OMP_CLAUSE_LASTPRIVATE
:
1931 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1933 goto do_decl_clause
;
1935 case OMP_CLAUSE_LINEAR
:
1936 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
))
1938 wi
->val_only
= true;
1940 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause
), &dummy
,
1942 goto do_decl_clause
;
1944 case OMP_CLAUSE_PRIVATE
:
1945 case OMP_CLAUSE_FIRSTPRIVATE
:
1946 case OMP_CLAUSE_COPYPRIVATE
:
1947 case OMP_CLAUSE_SHARED
:
1948 case OMP_CLAUSE_TO_DECLARE
:
1949 case OMP_CLAUSE_LINK
:
1950 case OMP_CLAUSE_USE_DEVICE_PTR
:
1951 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1952 case OMP_CLAUSE_IS_DEVICE_PTR
:
1954 decl
= OMP_CLAUSE_DECL (clause
);
1956 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1958 if (decl_function_context (decl
) == info
->context
1959 && !use_pointer_in_frame (decl
))
1961 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
1964 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_SHARED
)
1965 OMP_CLAUSE_SHARED_READONLY (clause
) = 0;
1966 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1967 OMP_CLAUSE_DECL (clause
)
1968 = get_local_debug_decl (info
, decl
, field
);
1974 case OMP_CLAUSE_SCHEDULE
:
1975 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1978 case OMP_CLAUSE_FINAL
:
1980 case OMP_CLAUSE_NUM_THREADS
:
1981 case OMP_CLAUSE_DEPEND
:
1982 case OMP_CLAUSE_DEVICE
:
1983 case OMP_CLAUSE_NUM_TEAMS
:
1984 case OMP_CLAUSE_THREAD_LIMIT
:
1985 case OMP_CLAUSE_SAFELEN
:
1986 case OMP_CLAUSE_SIMDLEN
:
1987 case OMP_CLAUSE_PRIORITY
:
1988 case OMP_CLAUSE_GRAINSIZE
:
1989 case OMP_CLAUSE_NUM_TASKS
:
1990 case OMP_CLAUSE_HINT
:
1991 case OMP_CLAUSE_NUM_GANGS
:
1992 case OMP_CLAUSE_NUM_WORKERS
:
1993 case OMP_CLAUSE_VECTOR_LENGTH
:
1994 case OMP_CLAUSE_GANG
:
1995 case OMP_CLAUSE_WORKER
:
1996 case OMP_CLAUSE_VECTOR
:
1997 case OMP_CLAUSE_ASYNC
:
1998 case OMP_CLAUSE_WAIT
:
1999 /* Several OpenACC clauses have optional arguments. Check if they
2001 if (OMP_CLAUSE_OPERAND (clause
, 0))
2003 wi
->val_only
= true;
2005 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
2009 /* The gang clause accepts two arguments. */
2010 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_GANG
2011 && OMP_CLAUSE_GANG_STATIC_EXPR (clause
))
2013 wi
->val_only
= true;
2015 convert_nonlocal_reference_op
2016 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause
), &dummy
, wi
);
2020 case OMP_CLAUSE_DIST_SCHEDULE
:
2021 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause
) != NULL
)
2023 wi
->val_only
= true;
2025 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
2030 case OMP_CLAUSE_MAP
:
2032 case OMP_CLAUSE_FROM
:
2033 if (OMP_CLAUSE_SIZE (clause
))
2035 wi
->val_only
= true;
2037 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause
),
2040 if (DECL_P (OMP_CLAUSE_DECL (clause
)))
2041 goto do_decl_clause
;
2042 wi
->val_only
= true;
2044 walk_tree (&OMP_CLAUSE_DECL (clause
), convert_local_reference_op
,
2048 case OMP_CLAUSE_ALIGNED
:
2049 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
2051 wi
->val_only
= true;
2053 convert_local_reference_op
2054 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
), &dummy
, wi
);
2057 case OMP_CLAUSE_NONTEMPORAL
:
2058 /* Like do_decl_clause, but don't add any suppression. */
2059 decl
= OMP_CLAUSE_DECL (clause
);
2061 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2063 if (decl_function_context (decl
) == info
->context
2064 && !use_pointer_in_frame (decl
))
2066 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
2069 OMP_CLAUSE_DECL (clause
)
2070 = get_local_debug_decl (info
, decl
, field
);
2076 case OMP_CLAUSE_NOWAIT
:
2077 case OMP_CLAUSE_ORDERED
:
2078 case OMP_CLAUSE_DEFAULT
:
2079 case OMP_CLAUSE_COPYIN
:
2080 case OMP_CLAUSE_COLLAPSE
:
2081 case OMP_CLAUSE_TILE
:
2082 case OMP_CLAUSE_UNTIED
:
2083 case OMP_CLAUSE_MERGEABLE
:
2084 case OMP_CLAUSE_PROC_BIND
:
2085 case OMP_CLAUSE_NOGROUP
:
2086 case OMP_CLAUSE_THREADS
:
2087 case OMP_CLAUSE_SIMD
:
2088 case OMP_CLAUSE_DEFAULTMAP
:
2089 case OMP_CLAUSE_ORDER
:
2090 case OMP_CLAUSE_SEQ
:
2091 case OMP_CLAUSE_INDEPENDENT
:
2092 case OMP_CLAUSE_AUTO
:
2093 case OMP_CLAUSE_IF_PRESENT
:
2094 case OMP_CLAUSE_FINALIZE
:
2095 case OMP_CLAUSE__CONDTEMP_
:
2096 case OMP_CLAUSE__SCANTEMP_
:
2099 /* The following clause belongs to the OpenACC cache directive, which
2100 is discarded during gimplification. */
2101 case OMP_CLAUSE__CACHE_
:
2102 /* The following clauses are only allowed in the OpenMP declare simd
2103 directive, so not seen here. */
2104 case OMP_CLAUSE_UNIFORM
:
2105 case OMP_CLAUSE_INBRANCH
:
2106 case OMP_CLAUSE_NOTINBRANCH
:
2107 /* The following clauses are only allowed on OpenMP cancel and
2108 cancellation point directives, which at this point have already
2109 been lowered into a function call. */
2110 case OMP_CLAUSE_FOR
:
2111 case OMP_CLAUSE_PARALLEL
:
2112 case OMP_CLAUSE_SECTIONS
:
2113 case OMP_CLAUSE_TASKGROUP
:
2114 /* The following clauses are only added during OMP lowering; nested
2115 function decomposition happens before that. */
2116 case OMP_CLAUSE__LOOPTEMP_
:
2117 case OMP_CLAUSE__REDUCTEMP_
:
2118 case OMP_CLAUSE__SIMDUID_
:
2119 case OMP_CLAUSE__GRIDDIM_
:
2120 case OMP_CLAUSE__SIMT_
:
2121 /* Anything else. */
2127 info
->suppress_expansion
= new_suppress
;
2130 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
2131 switch (OMP_CLAUSE_CODE (clause
))
2133 case OMP_CLAUSE_REDUCTION
:
2134 case OMP_CLAUSE_IN_REDUCTION
:
2135 case OMP_CLAUSE_TASK_REDUCTION
:
2136 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2139 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
2140 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2142 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2143 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2145 walk_body (convert_local_reference_stmt
,
2146 convert_local_reference_op
, info
,
2147 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
2148 walk_body (convert_local_reference_stmt
,
2149 convert_local_reference_op
, info
,
2150 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
2151 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2153 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2154 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2159 case OMP_CLAUSE_LASTPRIVATE
:
2160 walk_body (convert_local_reference_stmt
,
2161 convert_local_reference_op
, info
,
2162 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
2165 case OMP_CLAUSE_LINEAR
:
2166 walk_body (convert_local_reference_stmt
,
2167 convert_local_reference_op
, info
,
2168 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
));
2179 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2180 and PARM_DECLs that were referenced by inner nested functions.
2181 The rewrite will be a structure reference to the local frame variable. */
2184 convert_local_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2185 struct walk_stmt_info
*wi
)
2187 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
2188 tree save_local_var_chain
;
2189 bitmap save_suppress
;
2190 char save_static_chain_added
;
2191 bool frame_decl_added
;
2192 gimple
*stmt
= gsi_stmt (*gsi
);
2194 switch (gimple_code (stmt
))
2196 case GIMPLE_OMP_TEAMS
:
2197 if (!gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
2199 save_suppress
= info
->suppress_expansion
;
2200 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt
), wi
);
2201 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2202 info
, gimple_omp_body_ptr (stmt
));
2203 info
->suppress_expansion
= save_suppress
;
2208 case GIMPLE_OMP_PARALLEL
:
2209 case GIMPLE_OMP_TASK
:
2210 save_suppress
= info
->suppress_expansion
;
2211 frame_decl_added
= false;
2212 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
2215 tree c
= build_omp_clause (gimple_location (stmt
),
2217 (void) get_frame_type (info
);
2218 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2219 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2220 gimple_omp_taskreg_set_clauses (stmt
, c
);
2221 info
->static_chain_added
|= 4;
2222 frame_decl_added
= true;
2225 save_local_var_chain
= info
->new_local_var_chain
;
2226 save_static_chain_added
= info
->static_chain_added
;
2227 info
->new_local_var_chain
= NULL
;
2228 info
->static_chain_added
= 0;
2230 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
2231 gimple_omp_body_ptr (stmt
));
2233 if ((info
->static_chain_added
& 4) != 0 && !frame_decl_added
)
2235 tree c
= build_omp_clause (gimple_location (stmt
),
2237 (void) get_frame_type (info
);
2238 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2239 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2240 info
->static_chain_added
|= 4;
2241 gimple_omp_taskreg_set_clauses (stmt
, c
);
2243 if (info
->new_local_var_chain
)
2244 declare_vars (info
->new_local_var_chain
,
2245 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
2246 info
->new_local_var_chain
= save_local_var_chain
;
2247 info
->suppress_expansion
= save_suppress
;
2248 info
->static_chain_added
|= save_static_chain_added
;
2251 case GIMPLE_OMP_FOR
:
2252 save_suppress
= info
->suppress_expansion
;
2253 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
2254 walk_gimple_omp_for (as_a
<gomp_for
*> (stmt
),
2255 convert_local_reference_stmt
,
2256 convert_local_reference_op
, info
);
2257 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2258 info
, gimple_omp_body_ptr (stmt
));
2259 info
->suppress_expansion
= save_suppress
;
2262 case GIMPLE_OMP_SECTIONS
:
2263 save_suppress
= info
->suppress_expansion
;
2264 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
2265 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2266 info
, gimple_omp_body_ptr (stmt
));
2267 info
->suppress_expansion
= save_suppress
;
2270 case GIMPLE_OMP_SINGLE
:
2271 save_suppress
= info
->suppress_expansion
;
2272 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
2273 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2274 info
, gimple_omp_body_ptr (stmt
));
2275 info
->suppress_expansion
= save_suppress
;
2278 case GIMPLE_OMP_TASKGROUP
:
2279 save_suppress
= info
->suppress_expansion
;
2280 convert_local_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt
), wi
);
2281 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2282 info
, gimple_omp_body_ptr (stmt
));
2283 info
->suppress_expansion
= save_suppress
;
2286 case GIMPLE_OMP_TARGET
:
2287 if (!is_gimple_omp_offloaded (stmt
))
2289 save_suppress
= info
->suppress_expansion
;
2290 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt
), wi
);
2291 info
->suppress_expansion
= save_suppress
;
2292 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2293 info
, gimple_omp_body_ptr (stmt
));
2296 save_suppress
= info
->suppress_expansion
;
2297 frame_decl_added
= false;
2298 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt
), wi
))
2300 tree c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2301 (void) get_frame_type (info
);
2302 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2303 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
2304 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (info
->frame_decl
);
2305 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2306 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
2307 info
->static_chain_added
|= 4;
2308 frame_decl_added
= true;
2311 save_local_var_chain
= info
->new_local_var_chain
;
2312 save_static_chain_added
= info
->static_chain_added
;
2313 info
->new_local_var_chain
= NULL
;
2314 info
->static_chain_added
= 0;
2316 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
2317 gimple_omp_body_ptr (stmt
));
2319 if ((info
->static_chain_added
& 4) != 0 && !frame_decl_added
)
2321 tree c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2322 (void) get_frame_type (info
);
2323 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2324 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
2325 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (info
->frame_decl
);
2326 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2327 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
2328 info
->static_chain_added
|= 4;
2331 if (info
->new_local_var_chain
)
2332 declare_vars (info
->new_local_var_chain
,
2333 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
2334 info
->new_local_var_chain
= save_local_var_chain
;
2335 info
->suppress_expansion
= save_suppress
;
2336 info
->static_chain_added
|= save_static_chain_added
;
2339 case GIMPLE_OMP_SECTION
:
2340 case GIMPLE_OMP_MASTER
:
2341 case GIMPLE_OMP_ORDERED
:
2342 case GIMPLE_OMP_SCAN
:
2343 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2344 info
, gimple_omp_body_ptr (stmt
));
2348 wi
->val_only
= true;
2350 *handled_ops_p
= false;
2354 if (gimple_clobber_p (stmt
))
2356 tree lhs
= gimple_assign_lhs (stmt
);
2358 && !use_pointer_in_frame (lhs
)
2359 && lookup_field_for_decl (info
, lhs
, NO_INSERT
))
2361 gsi_replace (gsi
, gimple_build_nop (), true);
2365 *handled_ops_p
= false;
2369 for (tree var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
2371 var
= DECL_CHAIN (var
))
2372 if (TREE_CODE (var
) == NAMELIST_DECL
)
2374 /* Adjust decls mentioned in NAMELIST_DECL. */
2375 tree decls
= NAMELIST_DECL_ASSOCIATED_DECL (var
);
2379 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls
), i
, decl
)
2382 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2384 if (decl_function_context (decl
) == info
->context
2385 && !use_pointer_in_frame (decl
))
2387 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
2390 CONSTRUCTOR_ELT (decls
, i
)->value
2391 = get_local_debug_decl (info
, decl
, field
);
2397 *handled_ops_p
= false;
2401 /* For every other statement that we are not interested in
2402 handling here, let the walker traverse the operands. */
2403 *handled_ops_p
= false;
2407 /* Indicate that we have handled all the operands ourselves. */
2408 *handled_ops_p
= true;
2413 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2414 that reference labels from outer functions. The rewrite will be a
2415 call to __builtin_nonlocal_goto. */
2418 convert_nl_goto_reference (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2419 struct walk_stmt_info
*wi
)
2421 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
2422 tree label
, new_label
, target_context
, x
, field
;
2424 gimple
*stmt
= gsi_stmt (*gsi
);
2426 if (gimple_code (stmt
) != GIMPLE_GOTO
)
2428 *handled_ops_p
= false;
2432 label
= gimple_goto_dest (stmt
);
2433 if (TREE_CODE (label
) != LABEL_DECL
)
2435 *handled_ops_p
= false;
2439 target_context
= decl_function_context (label
);
2440 if (target_context
== info
->context
)
2442 *handled_ops_p
= false;
2446 for (i
= info
->outer
; target_context
!= i
->context
; i
= i
->outer
)
2449 /* The original user label may also be use for a normal goto, therefore
2450 we must create a new label that will actually receive the abnormal
2451 control transfer. This new label will be marked LABEL_NONLOCAL; this
2452 mark will trigger proper behavior in the cfg, as well as cause the
2453 (hairy target-specific) non-local goto receiver code to be generated
2454 when we expand rtl. Enter this association into var_map so that we
2455 can insert the new label into the IL during a second pass. */
2456 tree
*slot
= &i
->var_map
->get_or_insert (label
);
2459 new_label
= create_artificial_label (UNKNOWN_LOCATION
);
2460 DECL_NONLOCAL (new_label
) = 1;
2466 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2467 field
= get_nl_goto_field (i
);
2468 x
= get_frame_field (info
, target_context
, field
, gsi
);
2470 x
= gsi_gimplify_val (info
, x
, gsi
);
2471 call
= gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO
),
2472 2, build_addr (new_label
), x
);
2473 gsi_replace (gsi
, call
, false);
2475 /* We have handled all of STMT's operands, no need to keep going. */
2476 *handled_ops_p
= true;
2481 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2482 are referenced via nonlocal goto from a nested function. The rewrite
2483 will involve installing a newly generated DECL_NONLOCAL label, and
2484 (potentially) a branch around the rtl gunk that is assumed to be
2485 attached to such a label. */
2488 convert_nl_goto_receiver (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2489 struct walk_stmt_info
*wi
)
2491 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
2492 tree label
, new_label
;
2493 gimple_stmt_iterator tmp_gsi
;
2494 glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (*gsi
));
2498 *handled_ops_p
= false;
2502 label
= gimple_label_label (stmt
);
2504 tree
*slot
= info
->var_map
->get (label
);
2507 *handled_ops_p
= false;
2511 /* If there's any possibility that the previous statement falls through,
2512 then we must branch around the new non-local label. */
2514 gsi_prev (&tmp_gsi
);
2515 if (gsi_end_p (tmp_gsi
) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi
)))
2517 gimple
*stmt
= gimple_build_goto (label
);
2518 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2521 new_label
= (tree
) *slot
;
2522 stmt
= gimple_build_label (new_label
);
2523 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2525 *handled_ops_p
= true;
2530 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2531 of nested functions that require the use of trampolines. The rewrite
2532 will involve a reference a trampoline generated for the occasion. */
2535 convert_tramp_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
2537 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
2538 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
2539 tree t
= *tp
, decl
, target_context
, x
, builtin
;
2544 switch (TREE_CODE (t
))
2548 T.1 = &CHAIN->tramp;
2549 T.2 = __builtin_adjust_trampoline (T.1);
2550 T.3 = (func_type)T.2;
2553 decl
= TREE_OPERAND (t
, 0);
2554 if (TREE_CODE (decl
) != FUNCTION_DECL
)
2557 /* Only need to process nested functions. */
2558 target_context
= decl_function_context (decl
);
2559 if (!target_context
)
2562 /* If the nested function doesn't use a static chain, then
2563 it doesn't need a trampoline. */
2564 if (!DECL_STATIC_CHAIN (decl
))
2567 /* If we don't want a trampoline, then don't build one. */
2568 if (TREE_NO_TRAMPOLINE (t
))
2571 /* Lookup the immediate parent of the callee, as that's where
2572 we need to insert the trampoline. */
2573 for (i
= info
; i
->context
!= target_context
; i
= i
->outer
)
2576 /* Decide whether to generate a descriptor or a trampoline. */
2577 descr
= FUNC_ADDR_BY_DESCRIPTOR (t
) && !flag_trampolines
;
2580 x
= lookup_descr_for_decl (i
, decl
, INSERT
);
2582 x
= lookup_tramp_for_decl (i
, decl
, INSERT
);
2584 /* Compute the address of the field holding the trampoline. */
2585 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
2587 x
= gsi_gimplify_val (info
, x
, &wi
->gsi
);
2589 /* Do machine-specific ugliness. Normally this will involve
2590 computing extra alignment, but it can really be anything. */
2592 builtin
= builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR
);
2594 builtin
= builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE
);
2595 call
= gimple_build_call (builtin
, 1, x
);
2596 x
= init_tmp_var_with_call (info
, &wi
->gsi
, call
);
2598 /* Cast back to the proper function type. */
2599 x
= build1 (NOP_EXPR
, TREE_TYPE (t
), x
);
2600 x
= init_tmp_var (info
, x
, &wi
->gsi
);
2606 if (!IS_TYPE_OR_DECL_P (t
))
2615 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2616 to addresses of nested functions that require the use of
2617 trampolines. The rewrite will involve a reference a trampoline
2618 generated for the occasion. */
2621 convert_tramp_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2622 struct walk_stmt_info
*wi
)
2624 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
2625 gimple
*stmt
= gsi_stmt (*gsi
);
2627 switch (gimple_code (stmt
))
2631 /* Only walk call arguments, lest we generate trampolines for
2633 unsigned long i
, nargs
= gimple_call_num_args (stmt
);
2634 for (i
= 0; i
< nargs
; i
++)
2635 walk_tree (gimple_call_arg_ptr (stmt
, i
), convert_tramp_reference_op
,
2640 case GIMPLE_OMP_TEAMS
:
2641 if (!gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
2643 *handled_ops_p
= false;
2648 case GIMPLE_OMP_TARGET
:
2649 if (!is_gimple_omp_offloaded (stmt
))
2651 *handled_ops_p
= false;
2655 case GIMPLE_OMP_PARALLEL
:
2656 case GIMPLE_OMP_TASK
:
2659 tree save_local_var_chain
= info
->new_local_var_chain
;
2660 walk_gimple_op (stmt
, convert_tramp_reference_op
, wi
);
2661 info
->new_local_var_chain
= NULL
;
2662 char save_static_chain_added
= info
->static_chain_added
;
2663 info
->static_chain_added
= 0;
2664 walk_body (convert_tramp_reference_stmt
, convert_tramp_reference_op
,
2665 info
, gimple_omp_body_ptr (stmt
));
2666 if (info
->new_local_var_chain
)
2667 declare_vars (info
->new_local_var_chain
,
2668 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
2670 for (int i
= 0; i
< 2; i
++)
2673 if ((info
->static_chain_added
& (1 << i
)) == 0)
2675 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2676 /* Don't add CHAIN.* or FRAME.* twice. */
2677 for (c
= gimple_omp_taskreg_clauses (stmt
);
2679 c
= OMP_CLAUSE_CHAIN (c
))
2680 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
2681 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
2682 && OMP_CLAUSE_DECL (c
) == decl
)
2684 if (c
== NULL
&& gimple_code (stmt
) != GIMPLE_OMP_TARGET
)
2686 c
= build_omp_clause (gimple_location (stmt
),
2687 i
? OMP_CLAUSE_FIRSTPRIVATE
2688 : OMP_CLAUSE_SHARED
);
2689 OMP_CLAUSE_DECL (c
) = decl
;
2690 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2691 gimple_omp_taskreg_set_clauses (stmt
, c
);
2695 c
= build_omp_clause (gimple_location (stmt
),
2697 OMP_CLAUSE_DECL (c
) = decl
;
2698 OMP_CLAUSE_SET_MAP_KIND (c
,
2699 i
? GOMP_MAP_TO
: GOMP_MAP_TOFROM
);
2700 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
2701 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2702 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
),
2706 info
->new_local_var_chain
= save_local_var_chain
;
2707 info
->static_chain_added
|= save_static_chain_added
;
2712 *handled_ops_p
= false;
2716 *handled_ops_p
= true;
2722 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2723 that reference nested functions to make sure that the static chain
2724 is set up properly for the call. */
2727 convert_gimple_call (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2728 struct walk_stmt_info
*wi
)
2730 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
2731 tree decl
, target_context
;
2732 char save_static_chain_added
;
2734 gimple
*stmt
= gsi_stmt (*gsi
);
2736 switch (gimple_code (stmt
))
2739 if (gimple_call_chain (stmt
))
2741 decl
= gimple_call_fndecl (stmt
);
2744 target_context
= decl_function_context (decl
);
2745 if (target_context
&& DECL_STATIC_CHAIN (decl
))
2747 struct nesting_info
*i
= info
;
2748 while (i
&& i
->context
!= target_context
)
2750 /* If none of the outer contexts is the target context, this means
2751 that the function is called in a wrong context. */
2753 internal_error ("%s from %s called in %s",
2754 IDENTIFIER_POINTER (DECL_NAME (decl
)),
2755 IDENTIFIER_POINTER (DECL_NAME (target_context
)),
2756 IDENTIFIER_POINTER (DECL_NAME (info
->context
)));
2758 gimple_call_set_chain (as_a
<gcall
*> (stmt
),
2759 get_static_chain (info
, target_context
,
2761 info
->static_chain_added
|= (1 << (info
->context
!= target_context
));
2765 case GIMPLE_OMP_TEAMS
:
2766 if (!gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
2768 walk_body (convert_gimple_call
, NULL
, info
,
2769 gimple_omp_body_ptr (stmt
));
2774 case GIMPLE_OMP_PARALLEL
:
2775 case GIMPLE_OMP_TASK
:
2776 save_static_chain_added
= info
->static_chain_added
;
2777 info
->static_chain_added
= 0;
2778 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2779 for (i
= 0; i
< 2; i
++)
2782 if ((info
->static_chain_added
& (1 << i
)) == 0)
2784 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2785 /* Don't add CHAIN.* or FRAME.* twice. */
2786 for (c
= gimple_omp_taskreg_clauses (stmt
);
2788 c
= OMP_CLAUSE_CHAIN (c
))
2789 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
2790 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
2791 && OMP_CLAUSE_DECL (c
) == decl
)
2795 c
= build_omp_clause (gimple_location (stmt
),
2796 i
? OMP_CLAUSE_FIRSTPRIVATE
2797 : OMP_CLAUSE_SHARED
);
2798 OMP_CLAUSE_DECL (c
) = decl
;
2799 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2800 gimple_omp_taskreg_set_clauses (stmt
, c
);
2803 info
->static_chain_added
|= save_static_chain_added
;
2806 case GIMPLE_OMP_TARGET
:
2807 if (!is_gimple_omp_offloaded (stmt
))
2809 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2812 save_static_chain_added
= info
->static_chain_added
;
2813 info
->static_chain_added
= 0;
2814 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2815 for (i
= 0; i
< 2; i
++)
2818 if ((info
->static_chain_added
& (1 << i
)) == 0)
2820 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2821 /* Don't add CHAIN.* or FRAME.* twice. */
2822 for (c
= gimple_omp_target_clauses (stmt
);
2824 c
= OMP_CLAUSE_CHAIN (c
))
2825 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
2826 && OMP_CLAUSE_DECL (c
) == decl
)
2830 c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2831 OMP_CLAUSE_DECL (c
) = decl
;
2832 OMP_CLAUSE_SET_MAP_KIND (c
, i
? GOMP_MAP_TO
: GOMP_MAP_TOFROM
);
2833 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
2834 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2835 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
),
2839 info
->static_chain_added
|= save_static_chain_added
;
2842 case GIMPLE_OMP_FOR
:
2843 walk_body (convert_gimple_call
, NULL
, info
,
2844 gimple_omp_for_pre_body_ptr (stmt
));
2846 case GIMPLE_OMP_SECTIONS
:
2847 case GIMPLE_OMP_SECTION
:
2848 case GIMPLE_OMP_SINGLE
:
2849 case GIMPLE_OMP_MASTER
:
2850 case GIMPLE_OMP_TASKGROUP
:
2851 case GIMPLE_OMP_ORDERED
:
2852 case GIMPLE_OMP_SCAN
:
2853 case GIMPLE_OMP_CRITICAL
:
2854 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2858 /* Keep looking for other operands. */
2859 *handled_ops_p
= false;
2863 *handled_ops_p
= true;
2867 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2868 call expressions. At the same time, determine if a nested function
2869 actually uses its static chain; if not, remember that. */
2872 convert_all_function_calls (struct nesting_info
*root
)
2874 unsigned int chain_count
= 0, old_chain_count
, iter_count
;
2875 struct nesting_info
*n
;
2877 /* First, optimistically clear static_chain for all decls that haven't
2878 used the static chain already for variable access. But always create
2879 it if not optimizing. This makes it possible to reconstruct the static
2880 nesting tree at run time and thus to resolve up-level references from
2881 within the debugger. */
2882 FOR_EACH_NEST_INFO (n
, root
)
2886 tree decl
= n
->context
;
2890 (void) get_frame_type (n
);
2892 (void) get_chain_decl (n
);
2894 else if (!n
->outer
|| (!n
->chain_decl
&& !n
->chain_field
))
2896 DECL_STATIC_CHAIN (decl
) = 0;
2897 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2898 fprintf (dump_file
, "Guessing no static-chain for %s\n",
2899 lang_hooks
.decl_printable_name (decl
, 2));
2902 DECL_STATIC_CHAIN (decl
) = 1;
2903 chain_count
+= DECL_STATIC_CHAIN (decl
);
2906 FOR_EACH_NEST_INFO (n
, root
)
2909 tree decl
= n
->context
;
2910 tree alias
= cgraph_node::get (decl
)->thunk
.alias
;
2911 DECL_STATIC_CHAIN (decl
) = DECL_STATIC_CHAIN (alias
);
2914 /* Walk the functions and perform transformations. Note that these
2915 transformations can induce new uses of the static chain, which in turn
2916 require re-examining all users of the decl. */
2917 /* ??? It would make sense to try to use the call graph to speed this up,
2918 but the call graph hasn't really been built yet. Even if it did, we
2919 would still need to iterate in this loop since address-of references
2920 wouldn't show up in the callgraph anyway. */
2924 old_chain_count
= chain_count
;
2928 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2929 fputc ('\n', dump_file
);
2931 FOR_EACH_NEST_INFO (n
, root
)
2935 tree decl
= n
->context
;
2936 walk_function (convert_tramp_reference_stmt
,
2937 convert_tramp_reference_op
, n
);
2938 walk_function (convert_gimple_call
, NULL
, n
);
2939 chain_count
+= DECL_STATIC_CHAIN (decl
);
2942 FOR_EACH_NEST_INFO (n
, root
)
2945 tree decl
= n
->context
;
2946 tree alias
= cgraph_node::get (decl
)->thunk
.alias
;
2947 DECL_STATIC_CHAIN (decl
) = DECL_STATIC_CHAIN (alias
);
2950 while (chain_count
!= old_chain_count
);
2952 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2953 fprintf (dump_file
, "convert_all_function_calls iterations: %u\n\n",
2957 struct nesting_copy_body_data
2960 struct nesting_info
*root
;
2963 /* A helper subroutine for debug_var_chain type remapping. */
2966 nesting_copy_decl (tree decl
, copy_body_data
*id
)
2968 struct nesting_copy_body_data
*nid
= (struct nesting_copy_body_data
*) id
;
2969 tree
*slot
= nid
->root
->var_map
->get (decl
);
2972 return (tree
) *slot
;
2974 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_ORIGINAL_TYPE (decl
))
2976 tree new_decl
= copy_decl_no_change (decl
, id
);
2977 DECL_ORIGINAL_TYPE (new_decl
)
2978 = remap_type (DECL_ORIGINAL_TYPE (decl
), id
);
2983 || TREE_CODE (decl
) == PARM_DECL
2984 || TREE_CODE (decl
) == RESULT_DECL
)
2987 return copy_decl_no_change (decl
, id
);
2990 /* A helper function for remap_vla_decls. See if *TP contains
2991 some remapped variables. */
2994 contains_remapped_vars (tree
*tp
, int *walk_subtrees
, void *data
)
2996 struct nesting_info
*root
= (struct nesting_info
*) data
;
3002 tree
*slot
= root
->var_map
->get (t
);
3010 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
3014 remap_vla_decls (tree block
, struct nesting_info
*root
)
3016 tree var
, subblock
, val
, type
;
3017 struct nesting_copy_body_data id
;
3019 for (subblock
= BLOCK_SUBBLOCKS (block
);
3021 subblock
= BLOCK_CHAIN (subblock
))
3022 remap_vla_decls (subblock
, root
);
3024 for (var
= BLOCK_VARS (block
); var
; var
= DECL_CHAIN (var
))
3025 if (VAR_P (var
) && DECL_HAS_VALUE_EXPR_P (var
))
3027 val
= DECL_VALUE_EXPR (var
);
3028 type
= TREE_TYPE (var
);
3030 if (!(TREE_CODE (val
) == INDIRECT_REF
3031 && TREE_CODE (TREE_OPERAND (val
, 0)) == VAR_DECL
3032 && variably_modified_type_p (type
, NULL
)))
3035 if (root
->var_map
->get (TREE_OPERAND (val
, 0))
3036 || walk_tree (&type
, contains_remapped_vars
, root
, NULL
))
3040 if (var
== NULL_TREE
)
3043 memset (&id
, 0, sizeof (id
));
3044 id
.cb
.copy_decl
= nesting_copy_decl
;
3045 id
.cb
.decl_map
= new hash_map
<tree
, tree
>;
3048 for (; var
; var
= DECL_CHAIN (var
))
3049 if (VAR_P (var
) && DECL_HAS_VALUE_EXPR_P (var
))
3051 struct nesting_info
*i
;
3054 val
= DECL_VALUE_EXPR (var
);
3055 type
= TREE_TYPE (var
);
3057 if (!(TREE_CODE (val
) == INDIRECT_REF
3058 && TREE_CODE (TREE_OPERAND (val
, 0)) == VAR_DECL
3059 && variably_modified_type_p (type
, NULL
)))
3062 tree
*slot
= root
->var_map
->get (TREE_OPERAND (val
, 0));
3063 if (!slot
&& !walk_tree (&type
, contains_remapped_vars
, root
, NULL
))
3066 context
= decl_function_context (var
);
3067 for (i
= root
; i
; i
= i
->outer
)
3068 if (i
->context
== context
)
3074 /* Fully expand value expressions. This avoids having debug variables
3075 only referenced from them and that can be swept during GC. */
3078 tree t
= (tree
) *slot
;
3079 gcc_assert (DECL_P (t
) && DECL_HAS_VALUE_EXPR_P (t
));
3080 val
= build1 (INDIRECT_REF
, TREE_TYPE (val
), DECL_VALUE_EXPR (t
));
3083 id
.cb
.src_fn
= i
->context
;
3084 id
.cb
.dst_fn
= i
->context
;
3085 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
3087 TREE_TYPE (var
) = newt
= remap_type (type
, &id
.cb
);
3088 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
3090 newt
= TREE_TYPE (newt
);
3091 type
= TREE_TYPE (type
);
3093 if (TYPE_NAME (newt
)
3094 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
3095 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
3097 && TYPE_NAME (newt
) == TYPE_NAME (type
))
3098 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
3100 walk_tree (&val
, copy_tree_body_r
, &id
.cb
, NULL
);
3101 if (val
!= DECL_VALUE_EXPR (var
))
3102 SET_DECL_VALUE_EXPR (var
, val
);
3105 delete id
.cb
.decl_map
;
3108 /* Fixup VLA decls in BLOCK and subblocks if remapped variables are
3112 fixup_vla_decls (tree block
)
3114 for (tree var
= BLOCK_VARS (block
); var
; var
= DECL_CHAIN (var
))
3115 if (VAR_P (var
) && DECL_HAS_VALUE_EXPR_P (var
))
3117 tree val
= DECL_VALUE_EXPR (var
);
3119 if (!(TREE_CODE (val
) == INDIRECT_REF
3120 && VAR_P (TREE_OPERAND (val
, 0))
3121 && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val
, 0))))
3124 /* Fully expand value expressions. This avoids having debug variables
3125 only referenced from them and that can be swept during GC. */
3126 val
= build1 (INDIRECT_REF
, TREE_TYPE (val
),
3127 DECL_VALUE_EXPR (TREE_OPERAND (val
, 0)));
3128 SET_DECL_VALUE_EXPR (var
, val
);
3131 for (tree sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
3132 fixup_vla_decls (sub
);
3135 /* Fold the MEM_REF *E. */
3137 fold_mem_refs (tree
*const &e
, void *data ATTRIBUTE_UNUSED
)
3139 tree
*ref_p
= CONST_CAST2 (tree
*, const tree
*, (const tree
*)e
);
3140 *ref_p
= fold (*ref_p
);
3144 /* Given DECL, a nested function, build an initialization call for FIELD,
3145 the trampoline or descriptor for DECL, using FUNC as the function. */
3148 build_init_call_stmt (struct nesting_info
*info
, tree decl
, tree field
,
3151 tree arg1
, arg2
, arg3
, x
;
3153 gcc_assert (DECL_STATIC_CHAIN (decl
));
3154 arg3
= build_addr (info
->frame_decl
);
3156 arg2
= build_addr (decl
);
3158 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
3159 info
->frame_decl
, field
, NULL_TREE
);
3160 arg1
= build_addr (x
);
3162 return gimple_build_call (func
, 3, arg1
, arg2
, arg3
);
3165 /* Do "everything else" to clean up or complete state collected by the various
3166 walking passes -- create a field to hold the frame base address, lay out the
3167 types and decls, generate code to initialize the frame decl, store critical
3168 expressions in the struct function for rtl to find. */
3171 finalize_nesting_tree_1 (struct nesting_info
*root
)
3173 gimple_seq stmt_list
= NULL
;
3175 tree context
= root
->context
;
3176 struct function
*sf
;
3181 /* If we created a non-local frame type or decl, we need to lay them
3182 out at this time. */
3183 if (root
->frame_type
)
3185 /* Debugging information needs to compute the frame base address of the
3186 parent frame out of the static chain from the nested frame.
3188 The static chain is the address of the FRAME record, so one could
3189 imagine it would be possible to compute the frame base address just
3190 adding a constant offset to this address. Unfortunately, this is not
3191 possible: if the FRAME object has alignment constraints that are
3192 stronger than the stack, then the offset between the frame base and
3193 the FRAME object will be dynamic.
3195 What we do instead is to append a field to the FRAME object that holds
3196 the frame base address: then debug info just has to fetch this
3199 /* Debugging information will refer to the CFA as the frame base
3200 address: we will do the same here. */
3201 const tree frame_addr_fndecl
3202 = builtin_decl_explicit (BUILT_IN_DWARF_CFA
);
3204 /* Create a field in the FRAME record to hold the frame base address for
3205 this stack frame. Since it will be used only by the debugger, put it
3206 at the end of the record in order not to shift all other offsets. */
3207 tree fb_decl
= make_node (FIELD_DECL
);
3209 DECL_NAME (fb_decl
) = get_identifier ("FRAME_BASE.PARENT");
3210 TREE_TYPE (fb_decl
) = ptr_type_node
;
3211 TREE_ADDRESSABLE (fb_decl
) = 1;
3212 DECL_CONTEXT (fb_decl
) = root
->frame_type
;
3213 TYPE_FIELDS (root
->frame_type
) = chainon (TYPE_FIELDS (root
->frame_type
),
3216 /* In some cases the frame type will trigger the -Wpadded warning.
3217 This is not helpful; suppress it. */
3218 int save_warn_padded
= warn_padded
;
3220 layout_type (root
->frame_type
);
3221 warn_padded
= save_warn_padded
;
3222 layout_decl (root
->frame_decl
, 0);
3224 /* Initialize the frame base address field. If the builtin we need is
3225 not available, set it to NULL so that debugging information does not
3227 tree fb_ref
= build3 (COMPONENT_REF
, TREE_TYPE (fb_decl
),
3228 root
->frame_decl
, fb_decl
, NULL_TREE
);
3231 if (frame_addr_fndecl
!= NULL_TREE
)
3233 gcall
*fb_gimple
= gimple_build_call (frame_addr_fndecl
, 1,
3235 gimple_stmt_iterator gsi
= gsi_last (stmt_list
);
3237 fb_tmp
= init_tmp_var_with_call (root
, &gsi
, fb_gimple
);
3240 fb_tmp
= build_int_cst (TREE_TYPE (fb_ref
), 0);
3241 gimple_seq_add_stmt (&stmt_list
,
3242 gimple_build_assign (fb_ref
, fb_tmp
));
3244 declare_vars (root
->frame_decl
,
3245 gimple_seq_first_stmt (gimple_body (context
)), true);
3248 /* If any parameters were referenced non-locally, then we need to insert
3249 a copy or a pointer. */
3250 if (root
->any_parm_remapped
)
3253 for (p
= DECL_ARGUMENTS (context
); p
; p
= DECL_CHAIN (p
))
3257 field
= lookup_field_for_decl (root
, p
, NO_INSERT
);
3261 if (use_pointer_in_frame (p
))
3266 /* If the assignment is from a non-register the stmt is
3267 not valid gimple. Make it so by using a temporary instead. */
3268 if (!is_gimple_reg (x
)
3269 && is_gimple_reg_type (TREE_TYPE (x
)))
3271 gimple_stmt_iterator gsi
= gsi_last (stmt_list
);
3272 x
= init_tmp_var (root
, x
, &gsi
);
3275 y
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
3276 root
->frame_decl
, field
, NULL_TREE
);
3277 stmt
= gimple_build_assign (y
, x
);
3278 gimple_seq_add_stmt (&stmt_list
, stmt
);
3282 /* If a chain_field was created, then it needs to be initialized
3284 if (root
->chain_field
)
3286 tree x
= build3 (COMPONENT_REF
, TREE_TYPE (root
->chain_field
),
3287 root
->frame_decl
, root
->chain_field
, NULL_TREE
);
3288 stmt
= gimple_build_assign (x
, get_chain_decl (root
));
3289 gimple_seq_add_stmt (&stmt_list
, stmt
);
3292 /* If trampolines were created, then we need to initialize them. */
3293 if (root
->any_tramp_created
)
3295 struct nesting_info
*i
;
3296 for (i
= root
->inner
; i
; i
= i
->next
)
3300 field
= lookup_tramp_for_decl (root
, i
->context
, NO_INSERT
);
3304 x
= builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE
);
3305 stmt
= build_init_call_stmt (root
, i
->context
, field
, x
);
3306 gimple_seq_add_stmt (&stmt_list
, stmt
);
3310 /* If descriptors were created, then we need to initialize them. */
3311 if (root
->any_descr_created
)
3313 struct nesting_info
*i
;
3314 for (i
= root
->inner
; i
; i
= i
->next
)
3318 field
= lookup_descr_for_decl (root
, i
->context
, NO_INSERT
);
3322 x
= builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR
);
3323 stmt
= build_init_call_stmt (root
, i
->context
, field
, x
);
3324 gimple_seq_add_stmt (&stmt_list
, stmt
);
3328 /* If we created initialization statements, insert them. */
3332 annotate_all_with_location (stmt_list
, DECL_SOURCE_LOCATION (context
));
3333 bind
= gimple_seq_first_stmt_as_a_bind (gimple_body (context
));
3334 gimple_seq_add_seq (&stmt_list
, gimple_bind_body (bind
));
3335 gimple_bind_set_body (bind
, stmt_list
);
3338 /* If a chain_decl was created, then it needs to be registered with
3339 struct function so that it gets initialized from the static chain
3340 register at the beginning of the function. */
3341 sf
= DECL_STRUCT_FUNCTION (root
->context
);
3342 sf
->static_chain_decl
= root
->chain_decl
;
3344 /* Similarly for the non-local goto save area. */
3345 if (root
->nl_goto_field
)
3347 sf
->nonlocal_goto_save_area
3348 = get_frame_field (root
, context
, root
->nl_goto_field
, NULL
);
3349 sf
->has_nonlocal_label
= 1;
3352 /* Make sure all new local variables get inserted into the
3353 proper BIND_EXPR. */
3354 if (root
->new_local_var_chain
)
3355 declare_vars (root
->new_local_var_chain
,
3356 gimple_seq_first_stmt (gimple_body (root
->context
)),
3359 if (root
->debug_var_chain
)
3364 remap_vla_decls (DECL_INITIAL (root
->context
), root
);
3366 for (debug_var
= root
->debug_var_chain
; debug_var
;
3367 debug_var
= DECL_CHAIN (debug_var
))
3368 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
3371 /* If there are any debug decls with variable length types,
3372 remap those types using other debug_var_chain variables. */
3375 struct nesting_copy_body_data id
;
3377 memset (&id
, 0, sizeof (id
));
3378 id
.cb
.copy_decl
= nesting_copy_decl
;
3379 id
.cb
.decl_map
= new hash_map
<tree
, tree
>;
3382 for (; debug_var
; debug_var
= DECL_CHAIN (debug_var
))
3383 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
3385 tree type
= TREE_TYPE (debug_var
);
3386 tree newt
, t
= type
;
3387 struct nesting_info
*i
;
3389 for (i
= root
; i
; i
= i
->outer
)
3390 if (variably_modified_type_p (type
, i
->context
))
3396 id
.cb
.src_fn
= i
->context
;
3397 id
.cb
.dst_fn
= i
->context
;
3398 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
3400 TREE_TYPE (debug_var
) = newt
= remap_type (type
, &id
.cb
);
3401 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
3403 newt
= TREE_TYPE (newt
);
3406 if (TYPE_NAME (newt
)
3407 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
3408 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
3410 && TYPE_NAME (newt
) == TYPE_NAME (t
))
3411 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
3414 delete id
.cb
.decl_map
;
3417 scope
= gimple_seq_first_stmt_as_a_bind (gimple_body (root
->context
));
3418 if (gimple_bind_block (scope
))
3419 declare_vars (root
->debug_var_chain
, scope
, true);
3421 BLOCK_VARS (DECL_INITIAL (root
->context
))
3422 = chainon (BLOCK_VARS (DECL_INITIAL (root
->context
)),
3423 root
->debug_var_chain
);
3426 fixup_vla_decls (DECL_INITIAL (root
->context
));
3428 /* Fold the rewritten MEM_REF trees. */
3429 root
->mem_refs
->traverse
<void *, fold_mem_refs
> (NULL
);
3431 /* Dump the translated tree function. */
3434 fputs ("\n\n", dump_file
);
3435 dump_function_to_file (root
->context
, dump_file
, dump_flags
);
3440 finalize_nesting_tree (struct nesting_info
*root
)
3442 struct nesting_info
*n
;
3443 FOR_EACH_NEST_INFO (n
, root
)
3444 finalize_nesting_tree_1 (n
);
3447 /* Unnest the nodes and pass them to cgraph. */
3450 unnest_nesting_tree_1 (struct nesting_info
*root
)
3452 struct cgraph_node
*node
= cgraph_node::get (root
->context
);
3454 /* For nested functions update the cgraph to reflect unnesting.
3455 We also delay finalizing of these functions up to this point. */
3460 cgraph_node::finalize_function (root
->context
, true);
3465 unnest_nesting_tree (struct nesting_info
*root
)
3467 struct nesting_info
*n
;
3468 FOR_EACH_NEST_INFO (n
, root
)
3469 unnest_nesting_tree_1 (n
);
3472 /* Free the data structures allocated during this pass. */
3475 free_nesting_tree (struct nesting_info
*root
)
3477 struct nesting_info
*node
, *next
;
3479 node
= iter_nestinfo_start (root
);
3482 next
= iter_nestinfo_next (node
);
3483 delete node
->var_map
;
3484 delete node
->field_map
;
3485 delete node
->mem_refs
;
3492 /* Gimplify a function and all its nested functions. */
3494 gimplify_all_functions (struct cgraph_node
*root
)
3496 struct cgraph_node
*iter
;
3497 if (!gimple_body (root
->decl
))
3498 gimplify_function_tree (root
->decl
);
3499 for (iter
= root
->nested
; iter
; iter
= iter
->next_nested
)
3500 if (!iter
->thunk
.thunk_p
)
3501 gimplify_all_functions (iter
);
3504 /* Main entry point for this pass. Process FNDECL and all of its nested
3505 subroutines and turn them into something less tightly bound. */
3508 lower_nested_functions (tree fndecl
)
3510 struct cgraph_node
*cgn
;
3511 struct nesting_info
*root
;
3513 /* If there are no nested functions, there's nothing to do. */
3514 cgn
= cgraph_node::get (fndecl
);
3518 gimplify_all_functions (cgn
);
3520 set_dump_file (dump_begin (TDI_nested
, &dump_flags
));
3522 fprintf (dump_file
, "\n;; Function %s\n\n",
3523 lang_hooks
.decl_printable_name (fndecl
, 2));
3525 bitmap_obstack_initialize (&nesting_info_bitmap_obstack
);
3526 root
= create_nesting_tree (cgn
);
3528 walk_all_functions (convert_nonlocal_reference_stmt
,
3529 convert_nonlocal_reference_op
,
3531 walk_all_functions (convert_local_reference_stmt
,
3532 convert_local_reference_op
,
3534 walk_all_functions (convert_nl_goto_reference
, NULL
, root
);
3535 walk_all_functions (convert_nl_goto_receiver
, NULL
, root
);
3537 convert_all_function_calls (root
);
3538 finalize_nesting_tree (root
);
3539 unnest_nesting_tree (root
);
3541 free_nesting_tree (root
);
3542 bitmap_obstack_release (&nesting_info_bitmap_obstack
);
3546 dump_end (TDI_nested
, dump_file
);
3547 set_dump_file (NULL
);
3551 #include "gt-tree-nested.h"