1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-dump.h"
30 #include "tree-inline.h"
32 #include "tree-iterator.h"
33 #include "tree-flow.h"
36 #include "langhooks.h"
37 #include "pointer-set.h"
41 /* The object of this pass is to lower the representation of a set of nested
42 functions in order to expose all of the gory details of the various
43 nonlocal references. We want to do this sooner rather than later, in
44 order to give us more freedom in emitting all of the functions in question.
46 Back in olden times, when gcc was young, we developed an insanely
47 complicated scheme whereby variables which were referenced nonlocally
48 were forced to live in the stack of the declaring function, and then
49 the nested functions magically discovered where these variables were
50 placed. In order for this scheme to function properly, it required
51 that the outer function be partially expanded, then we switch to
52 compiling the inner function, and once done with those we switch back
53 to compiling the outer function. Such delicate ordering requirements
54 makes it difficult to do whole translation unit optimizations
55 involving such functions.
57 The implementation here is much more direct. Everything that can be
58 referenced by an inner function is a member of an explicitly created
59 structure herein called the "nonlocal frame struct". The incoming
60 static chain for a nested function is a pointer to this struct in
61 the parent. In this way, we settle on known offsets from a known
62 base, and so are decoupled from the logic that places objects in the
63 function's stack frame. More importantly, we don't have to wait for
64 that to happen -- since the compilation of the inner function is no
65 longer tied to a real stack frame, the nonlocal frame struct can be
66 allocated anywhere. Which means that the outer function is now
69 Theory of operation here is very simple. Iterate over all the
70 statements in all the functions (depth first) several times,
71 allocating structures and fields on demand. In general we want to
72 examine inner functions first, so that we can avoid making changes
73 to outer functions which are unnecessary.
75 The order of the passes matters a bit, in that later passes will be
76 skipped if it is discovered that the functions don't actually interact
77 at all. That is, they're nested in the lexical sense but could have
78 been written as independent functions without change. */
83 struct nesting_info
*outer
;
84 struct nesting_info
*inner
;
85 struct nesting_info
*next
;
87 struct pointer_map_t
*field_map
;
88 struct pointer_map_t
*var_map
;
89 bitmap suppress_expansion
;
92 tree new_local_var_chain
;
100 bool any_parm_remapped
;
101 bool any_tramp_created
;
102 char static_chain_added
;
106 /* Iterate over the nesting tree, starting with ROOT, depth first. */
108 static inline struct nesting_info
*
109 iter_nestinfo_start (struct nesting_info
*root
)
116 static inline struct nesting_info
*
117 iter_nestinfo_next (struct nesting_info
*node
)
120 return iter_nestinfo_start (node
->next
);
124 #define FOR_EACH_NEST_INFO(I, ROOT) \
125 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
127 /* Obstack used for the bitmaps in the struct above. */
128 static struct bitmap_obstack nesting_info_bitmap_obstack
;
131 /* We're working in so many different function contexts simultaneously,
132 that create_tmp_var is dangerous. Prevent mishap. */
133 #define create_tmp_var cant_use_create_tmp_var_here_dummy
135 /* Like create_tmp_var, except record the variable for registration at
136 the given nesting level. */
139 create_tmp_var_for (struct nesting_info
*info
, tree type
, const char *prefix
)
143 /* If the type is of variable size or a type which must be created by the
144 frontend, something is wrong. Note that we explicitly allow
145 incomplete types here, since we create them ourselves here. */
146 gcc_assert (!TREE_ADDRESSABLE (type
));
147 gcc_assert (!TYPE_SIZE_UNIT (type
)
148 || TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
);
150 tmp_var
= create_tmp_var_raw (type
, prefix
);
151 DECL_CONTEXT (tmp_var
) = info
->context
;
152 TREE_CHAIN (tmp_var
) = info
->new_local_var_chain
;
153 DECL_SEEN_IN_BIND_EXPR_P (tmp_var
) = 1;
154 if (TREE_CODE (type
) == COMPLEX_TYPE
155 || TREE_CODE (type
) == VECTOR_TYPE
)
156 DECL_GIMPLE_REG_P (tmp_var
) = 1;
158 info
->new_local_var_chain
= tmp_var
;
163 /* Take the address of EXP to be used within function CONTEXT.
164 Mark it for addressability as necessary. */
167 build_addr (tree exp
, tree context
)
173 while (handled_component_p (base
))
174 base
= TREE_OPERAND (base
, 0);
177 TREE_ADDRESSABLE (base
) = 1;
179 /* Building the ADDR_EXPR will compute a set of properties for
180 that ADDR_EXPR. Those properties are unfortunately context
181 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
183 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
184 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
185 way the properties are for the ADDR_EXPR are computed properly. */
186 save_context
= current_function_decl
;
187 current_function_decl
= context
;
188 retval
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (exp
)), exp
);
189 current_function_decl
= save_context
;
193 /* Insert FIELD into TYPE, sorted by alignment requirements. */
196 insert_field_into_struct (tree type
, tree field
)
200 DECL_CONTEXT (field
) = type
;
202 for (p
= &TYPE_FIELDS (type
); *p
; p
= &TREE_CHAIN (*p
))
203 if (DECL_ALIGN (field
) >= DECL_ALIGN (*p
))
206 TREE_CHAIN (field
) = *p
;
209 /* Set correct alignment for frame struct type. */
210 if (TYPE_ALIGN (type
) < DECL_ALIGN (field
))
211 TYPE_ALIGN (type
) = DECL_ALIGN (field
);
214 /* Build or return the RECORD_TYPE that describes the frame state that is
215 shared between INFO->CONTEXT and its nested functions. This record will
216 not be complete until finalize_nesting_tree; up until that point we'll
217 be adding fields as necessary.
219 We also build the DECL that represents this frame in the function. */
222 get_frame_type (struct nesting_info
*info
)
224 tree type
= info
->frame_type
;
229 type
= make_node (RECORD_TYPE
);
231 name
= concat ("FRAME.",
232 IDENTIFIER_POINTER (DECL_NAME (info
->context
)),
234 TYPE_NAME (type
) = get_identifier (name
);
237 info
->frame_type
= type
;
238 info
->frame_decl
= create_tmp_var_for (info
, type
, "FRAME");
240 /* ??? Always make it addressable for now, since it is meant to
241 be pointed to by the static chain pointer. This pessimizes
242 when it turns out that no static chains are needed because
243 the nested functions referencing non-local variables are not
244 reachable, but the true pessimization is to create the non-
245 local frame structure in the first place. */
246 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
251 /* Return true if DECL should be referenced by pointer in the non-local
255 use_pointer_in_frame (tree decl
)
257 if (TREE_CODE (decl
) == PARM_DECL
)
259 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
260 sized decls, and inefficient to copy large aggregates. Don't bother
261 moving anything but scalar variables. */
262 return AGGREGATE_TYPE_P (TREE_TYPE (decl
));
266 /* Variable sized types make things "interesting" in the frame. */
267 return DECL_SIZE (decl
) == NULL
|| !TREE_CONSTANT (DECL_SIZE (decl
));
271 /* Given DECL, a non-locally accessed variable, find or create a field
272 in the non-local frame structure for the given nesting context. */
275 lookup_field_for_decl (struct nesting_info
*info
, tree decl
,
276 enum insert_option insert
)
280 if (insert
== NO_INSERT
)
282 slot
= pointer_map_contains (info
->field_map
, decl
);
283 return slot
? (tree
) *slot
: NULL_TREE
;
286 slot
= pointer_map_insert (info
->field_map
, decl
);
289 tree field
= make_node (FIELD_DECL
);
290 DECL_NAME (field
) = DECL_NAME (decl
);
292 if (use_pointer_in_frame (decl
))
294 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
295 DECL_ALIGN (field
) = TYPE_ALIGN (TREE_TYPE (field
));
296 DECL_NONADDRESSABLE_P (field
) = 1;
300 TREE_TYPE (field
) = TREE_TYPE (decl
);
301 DECL_SOURCE_LOCATION (field
) = DECL_SOURCE_LOCATION (decl
);
302 DECL_ALIGN (field
) = DECL_ALIGN (decl
);
303 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
304 TREE_ADDRESSABLE (field
) = TREE_ADDRESSABLE (decl
);
305 DECL_NONADDRESSABLE_P (field
) = !TREE_ADDRESSABLE (decl
);
306 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
309 insert_field_into_struct (get_frame_type (info
), field
);
312 if (TREE_CODE (decl
) == PARM_DECL
)
313 info
->any_parm_remapped
= true;
319 /* Build or return the variable that holds the static chain within
320 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
323 get_chain_decl (struct nesting_info
*info
)
325 tree decl
= info
->chain_decl
;
331 type
= get_frame_type (info
->outer
);
332 type
= build_pointer_type (type
);
334 /* Note that this variable is *not* entered into any BIND_EXPR;
335 the construction of this variable is handled specially in
336 expand_function_start and initialize_inlined_parameters.
337 Note also that it's represented as a parameter. This is more
338 close to the truth, since the initial value does come from
340 decl
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
341 PARM_DECL
, create_tmp_var_name ("CHAIN"), type
);
342 DECL_ARTIFICIAL (decl
) = 1;
343 DECL_IGNORED_P (decl
) = 1;
344 TREE_USED (decl
) = 1;
345 DECL_CONTEXT (decl
) = info
->context
;
346 DECL_ARG_TYPE (decl
) = type
;
348 /* Tell tree-inline.c that we never write to this variable, so
349 it can copy-prop the replacement value immediately. */
350 TREE_READONLY (decl
) = 1;
352 info
->chain_decl
= decl
;
355 && (dump_flags
& TDF_DETAILS
)
356 && !DECL_STATIC_CHAIN (info
->context
))
357 fprintf (dump_file
, "Setting static-chain for %s\n",
358 lang_hooks
.decl_printable_name (info
->context
, 2));
360 DECL_STATIC_CHAIN (info
->context
) = 1;
365 /* Build or return the field within the non-local frame state that holds
366 the static chain for INFO->CONTEXT. This is the way to walk back up
367 multiple nesting levels. */
370 get_chain_field (struct nesting_info
*info
)
372 tree field
= info
->chain_field
;
376 tree type
= build_pointer_type (get_frame_type (info
->outer
));
378 field
= make_node (FIELD_DECL
);
379 DECL_NAME (field
) = get_identifier ("__chain");
380 TREE_TYPE (field
) = type
;
381 DECL_ALIGN (field
) = TYPE_ALIGN (type
);
382 DECL_NONADDRESSABLE_P (field
) = 1;
384 insert_field_into_struct (get_frame_type (info
), field
);
386 info
->chain_field
= field
;
389 && (dump_flags
& TDF_DETAILS
)
390 && !DECL_STATIC_CHAIN (info
->context
))
391 fprintf (dump_file
, "Setting static-chain for %s\n",
392 lang_hooks
.decl_printable_name (info
->context
, 2));
394 DECL_STATIC_CHAIN (info
->context
) = 1;
399 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
402 init_tmp_var_with_call (struct nesting_info
*info
, gimple_stmt_iterator
*gsi
,
407 t
= create_tmp_var_for (info
, gimple_call_return_type (call
), NULL
);
408 gimple_call_set_lhs (call
, t
);
409 if (! gsi_end_p (*gsi
))
410 gimple_set_location (call
, gimple_location (gsi_stmt (*gsi
)));
411 gsi_insert_before (gsi
, call
, GSI_SAME_STMT
);
417 /* Copy EXP into a temporary. Allocate the temporary in the context of
418 INFO and insert the initialization statement before GSI. */
421 init_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
426 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
427 stmt
= gimple_build_assign (t
, exp
);
428 if (! gsi_end_p (*gsi
))
429 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
430 gsi_insert_before_without_update (gsi
, stmt
, GSI_SAME_STMT
);
436 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
439 gsi_gimplify_val (struct nesting_info
*info
, tree exp
,
440 gimple_stmt_iterator
*gsi
)
442 if (is_gimple_val (exp
))
445 return init_tmp_var (info
, exp
, gsi
);
448 /* Similarly, but copy from the temporary and insert the statement
449 after the iterator. */
452 save_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
457 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
458 stmt
= gimple_build_assign (exp
, t
);
459 if (! gsi_end_p (*gsi
))
460 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
461 gsi_insert_after_without_update (gsi
, stmt
, GSI_SAME_STMT
);
466 /* Build or return the type used to represent a nested function trampoline. */
468 static GTY(()) tree trampoline_type
;
471 get_trampoline_type (struct nesting_info
*info
)
473 unsigned align
, size
;
477 return trampoline_type
;
479 align
= TRAMPOLINE_ALIGNMENT
;
480 size
= TRAMPOLINE_SIZE
;
482 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
483 then allocate extra space so that we can do dynamic alignment. */
484 if (align
> STACK_BOUNDARY
)
486 size
+= ((align
/BITS_PER_UNIT
) - 1) & -(STACK_BOUNDARY
/BITS_PER_UNIT
);
487 align
= STACK_BOUNDARY
;
490 t
= build_index_type (build_int_cst (NULL_TREE
, size
- 1));
491 t
= build_array_type (char_type_node
, t
);
492 t
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
493 FIELD_DECL
, get_identifier ("__data"), t
);
494 DECL_ALIGN (t
) = align
;
495 DECL_USER_ALIGN (t
) = 1;
497 trampoline_type
= make_node (RECORD_TYPE
);
498 TYPE_NAME (trampoline_type
) = get_identifier ("__builtin_trampoline");
499 TYPE_FIELDS (trampoline_type
) = t
;
500 layout_type (trampoline_type
);
501 DECL_CONTEXT (t
) = trampoline_type
;
503 return trampoline_type
;
506 /* Given DECL, a nested function, find or create a field in the non-local
507 frame structure for a trampoline for this function. */
510 lookup_tramp_for_decl (struct nesting_info
*info
, tree decl
,
511 enum insert_option insert
)
515 if (insert
== NO_INSERT
)
517 slot
= pointer_map_contains (info
->var_map
, decl
);
518 return slot
? (tree
) *slot
: NULL_TREE
;
521 slot
= pointer_map_insert (info
->var_map
, decl
);
524 tree field
= make_node (FIELD_DECL
);
525 DECL_NAME (field
) = DECL_NAME (decl
);
526 TREE_TYPE (field
) = get_trampoline_type (info
);
527 TREE_ADDRESSABLE (field
) = 1;
529 insert_field_into_struct (get_frame_type (info
), field
);
532 info
->any_tramp_created
= true;
538 /* Build or return the field within the non-local frame state that holds
539 the non-local goto "jmp_buf". The buffer itself is maintained by the
540 rtl middle-end as dynamic stack space is allocated. */
543 get_nl_goto_field (struct nesting_info
*info
)
545 tree field
= info
->nl_goto_field
;
551 /* For __builtin_nonlocal_goto, we need N words. The first is the
552 frame pointer, the rest is for the target's stack pointer save
553 area. The number of words is controlled by STACK_SAVEAREA_MODE;
554 not the best interface, but it'll do for now. */
555 if (Pmode
== ptr_mode
)
556 type
= ptr_type_node
;
558 type
= lang_hooks
.types
.type_for_mode (Pmode
, 1);
560 size
= GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
));
561 size
= size
/ GET_MODE_SIZE (Pmode
);
564 type
= build_array_type
565 (type
, build_index_type (build_int_cst (NULL_TREE
, size
)));
567 field
= make_node (FIELD_DECL
);
568 DECL_NAME (field
) = get_identifier ("__nl_goto_buf");
569 TREE_TYPE (field
) = type
;
570 DECL_ALIGN (field
) = TYPE_ALIGN (type
);
571 TREE_ADDRESSABLE (field
) = 1;
573 insert_field_into_struct (get_frame_type (info
), field
);
575 info
->nl_goto_field
= field
;
581 /* Invoke CALLBACK on all statements of GIMPLE sequence SEQ. */
584 walk_body (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
585 struct nesting_info
*info
, gimple_seq seq
)
587 struct walk_stmt_info wi
;
589 memset (&wi
, 0, sizeof (wi
));
592 walk_gimple_seq (seq
, callback_stmt
, callback_op
, &wi
);
596 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
599 walk_function (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
600 struct nesting_info
*info
)
602 walk_body (callback_stmt
, callback_op
, info
, gimple_body (info
->context
));
605 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
608 walk_gimple_omp_for (gimple for_stmt
,
609 walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
610 struct nesting_info
*info
)
612 struct walk_stmt_info wi
;
617 walk_body (callback_stmt
, callback_op
, info
, gimple_omp_for_pre_body (for_stmt
));
619 seq
= gimple_seq_alloc ();
620 memset (&wi
, 0, sizeof (wi
));
622 wi
.gsi
= gsi_last (seq
);
624 for (i
= 0; i
< gimple_omp_for_collapse (for_stmt
); i
++)
627 walk_tree (gimple_omp_for_index_ptr (for_stmt
, i
), callback_op
,
631 walk_tree (gimple_omp_for_initial_ptr (for_stmt
, i
), callback_op
,
636 walk_tree (gimple_omp_for_final_ptr (for_stmt
, i
), callback_op
,
639 t
= gimple_omp_for_incr (for_stmt
, i
);
640 gcc_assert (BINARY_CLASS_P (t
));
642 walk_tree (&TREE_OPERAND (t
, 0), callback_op
, &wi
, NULL
);
645 walk_tree (&TREE_OPERAND (t
, 1), callback_op
, &wi
, NULL
);
648 if (gimple_seq_empty_p (seq
))
649 gimple_seq_free (seq
);
652 gimple_seq pre_body
= gimple_omp_for_pre_body (for_stmt
);
653 annotate_all_with_location (seq
, gimple_location (for_stmt
));
654 gimple_seq_add_seq (&pre_body
, seq
);
655 gimple_omp_for_set_pre_body (for_stmt
, pre_body
);
659 /* Similarly for ROOT and all functions nested underneath, depth first. */
662 walk_all_functions (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
663 struct nesting_info
*root
)
665 struct nesting_info
*n
;
666 FOR_EACH_NEST_INFO (n
, root
)
667 walk_function (callback_stmt
, callback_op
, n
);
671 /* We have to check for a fairly pathological case. The operands of function
672 nested function are to be interpreted in the context of the enclosing
673 function. So if any are variably-sized, they will get remapped when the
674 enclosing function is inlined. But that remapping would also have to be
675 done in the types of the PARM_DECLs of the nested function, meaning the
676 argument types of that function will disagree with the arguments in the
677 calls to that function. So we'd either have to make a copy of the nested
678 function corresponding to each time the enclosing function was inlined or
679 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
680 function. The former is not practical. The latter would still require
681 detecting this case to know when to add the conversions. So, for now at
682 least, we don't inline such an enclosing function.
684 We have to do that check recursively, so here return indicating whether
685 FNDECL has such a nested function. ORIG_FN is the function we were
686 trying to inline to use for checking whether any argument is variably
687 modified by anything in it.
689 It would be better to do this in tree-inline.c so that we could give
690 the appropriate warning for why a function can't be inlined, but that's
691 too late since the nesting structure has already been flattened and
692 adding a flag just to record this fact seems a waste of a flag. */
695 check_for_nested_with_variably_modified (tree fndecl
, tree orig_fndecl
)
697 struct cgraph_node
*cgn
= cgraph_node (fndecl
);
700 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
702 for (arg
= DECL_ARGUMENTS (cgn
->decl
); arg
; arg
= TREE_CHAIN (arg
))
703 if (variably_modified_type_p (TREE_TYPE (arg
), orig_fndecl
))
706 if (check_for_nested_with_variably_modified (cgn
->decl
, orig_fndecl
))
713 /* Construct our local datastructure describing the function nesting
714 tree rooted by CGN. */
716 static struct nesting_info
*
717 create_nesting_tree (struct cgraph_node
*cgn
)
719 struct nesting_info
*info
= XCNEW (struct nesting_info
);
720 info
->field_map
= pointer_map_create ();
721 info
->var_map
= pointer_map_create ();
722 info
->suppress_expansion
= BITMAP_ALLOC (&nesting_info_bitmap_obstack
);
723 info
->context
= cgn
->decl
;
725 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
727 struct nesting_info
*sub
= create_nesting_tree (cgn
);
729 sub
->next
= info
->inner
;
733 /* See discussion at check_for_nested_with_variably_modified for a
734 discussion of why this has to be here. */
735 if (check_for_nested_with_variably_modified (info
->context
, info
->context
))
736 DECL_UNINLINABLE (info
->context
) = true;
741 /* Return an expression computing the static chain for TARGET_CONTEXT
742 from INFO->CONTEXT. Insert any necessary computations before TSI. */
745 get_static_chain (struct nesting_info
*info
, tree target_context
,
746 gimple_stmt_iterator
*gsi
)
748 struct nesting_info
*i
;
751 if (info
->context
== target_context
)
753 x
= build_addr (info
->frame_decl
, target_context
);
757 x
= get_chain_decl (info
);
759 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
761 tree field
= get_chain_field (i
);
763 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
764 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
765 x
= init_tmp_var (info
, x
, gsi
);
773 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
774 frame as seen from INFO->CONTEXT. Insert any necessary computations
778 get_frame_field (struct nesting_info
*info
, tree target_context
,
779 tree field
, gimple_stmt_iterator
*gsi
)
781 struct nesting_info
*i
;
784 if (info
->context
== target_context
)
786 /* Make sure frame_decl gets created. */
787 (void) get_frame_type (info
);
788 x
= info
->frame_decl
;
792 x
= get_chain_decl (info
);
794 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
796 tree field
= get_chain_field (i
);
798 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
799 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
800 x
= init_tmp_var (info
, x
, gsi
);
803 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
806 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
810 static void note_nonlocal_vla_type (struct nesting_info
*info
, tree type
);
812 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
813 in the nested function with DECL_VALUE_EXPR set to reference the true
814 variable in the parent function. This is used both for debug info
815 and in OpenMP lowering. */
818 get_nonlocal_debug_decl (struct nesting_info
*info
, tree decl
)
821 struct nesting_info
*i
;
822 tree x
, field
, new_decl
;
825 slot
= pointer_map_insert (info
->var_map
, decl
);
830 target_context
= decl_function_context (decl
);
832 /* A copy of the code in get_frame_field, but without the temporaries. */
833 if (info
->context
== target_context
)
835 /* Make sure frame_decl gets created. */
836 (void) get_frame_type (info
);
837 x
= info
->frame_decl
;
842 x
= get_chain_decl (info
);
843 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
845 field
= get_chain_field (i
);
846 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
847 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
849 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
852 field
= lookup_field_for_decl (i
, decl
, INSERT
);
853 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
854 if (use_pointer_in_frame (decl
))
855 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
857 /* ??? We should be remapping types as well, surely. */
858 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
859 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
860 DECL_CONTEXT (new_decl
) = info
->context
;
861 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
862 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
863 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
864 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
865 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
866 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
867 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
868 if ((TREE_CODE (decl
) == PARM_DECL
869 || TREE_CODE (decl
) == RESULT_DECL
870 || TREE_CODE (decl
) == VAR_DECL
)
871 && DECL_BY_REFERENCE (decl
))
872 DECL_BY_REFERENCE (new_decl
) = 1;
874 SET_DECL_VALUE_EXPR (new_decl
, x
);
875 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
878 TREE_CHAIN (new_decl
) = info
->debug_var_chain
;
879 info
->debug_var_chain
= new_decl
;
882 && info
->context
!= target_context
883 && variably_modified_type_p (TREE_TYPE (decl
), NULL
))
884 note_nonlocal_vla_type (info
, TREE_TYPE (decl
));
890 /* Callback for walk_gimple_stmt, rewrite all references to VAR
891 and PARM_DECLs that belong to outer functions.
893 The rewrite will involve some number of structure accesses back up
894 the static chain. E.g. for a variable FOO up one nesting level it'll
895 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
896 indirections apply to decls for which use_pointer_in_frame is true. */
899 convert_nonlocal_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
901 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
902 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
906 switch (TREE_CODE (t
))
909 /* Non-automatic variables are never processed. */
910 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
915 if (decl_function_context (t
) != info
->context
)
920 x
= get_nonlocal_debug_decl (info
, t
);
921 if (!bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
923 tree target_context
= decl_function_context (t
);
924 struct nesting_info
*i
;
925 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
927 x
= lookup_field_for_decl (i
, t
, INSERT
);
928 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
929 if (use_pointer_in_frame (t
))
931 x
= init_tmp_var (info
, x
, &wi
->gsi
);
932 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
939 x
= save_tmp_var (info
, x
, &wi
->gsi
);
941 x
= init_tmp_var (info
, x
, &wi
->gsi
);
949 /* We're taking the address of a label from a parent function, but
950 this is not itself a non-local goto. Mark the label such that it
951 will not be deleted, much as we would with a label address in
953 if (decl_function_context (t
) != info
->context
)
954 FORCED_LABEL (t
) = 1;
959 bool save_val_only
= wi
->val_only
;
961 wi
->val_only
= false;
964 walk_tree (&TREE_OPERAND (t
, 0), convert_nonlocal_reference_op
, wi
, 0);
971 /* If we changed anything, we might no longer be directly
972 referencing a decl. */
973 save_context
= current_function_decl
;
974 current_function_decl
= info
->context
;
975 recompute_tree_invariant_for_addr_expr (t
);
976 current_function_decl
= save_context
;
978 /* If the callback converted the address argument in a context
979 where we only accept variables (and min_invariant, presumably),
980 then compute the address into a temporary. */
982 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
992 case ARRAY_RANGE_REF
:
994 /* Go down this entire nest and just look at the final prefix and
995 anything that describes the references. Otherwise, we lose track
996 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
999 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1001 if (TREE_CODE (t
) == COMPONENT_REF
)
1002 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
, wi
,
1004 else if (TREE_CODE (t
) == ARRAY_REF
1005 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1007 walk_tree (&TREE_OPERAND (t
, 1), convert_nonlocal_reference_op
,
1009 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
,
1011 walk_tree (&TREE_OPERAND (t
, 3), convert_nonlocal_reference_op
,
1014 else if (TREE_CODE (t
) == BIT_FIELD_REF
)
1016 walk_tree (&TREE_OPERAND (t
, 1), convert_nonlocal_reference_op
,
1018 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
,
1022 wi
->val_only
= false;
1023 walk_tree (tp
, convert_nonlocal_reference_op
, wi
, NULL
);
1026 case VIEW_CONVERT_EXPR
:
1027 /* Just request to look at the subtrees, leaving val_only and lhs
1028 untouched. This might actually be for !val_only + lhs, in which
1029 case we don't want to force a replacement by a temporary. */
1034 if (!IS_TYPE_OR_DECL_P (t
))
1037 wi
->val_only
= true;
1046 static tree
convert_nonlocal_reference_stmt (gimple_stmt_iterator
*, bool *,
1047 struct walk_stmt_info
*);
1049 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1050 and PARM_DECLs that belong to outer functions. */
1053 convert_nonlocal_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1055 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1056 bool need_chain
= false, need_stmts
= false;
1059 bitmap new_suppress
;
1061 new_suppress
= BITMAP_GGC_ALLOC ();
1062 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1064 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1066 switch (OMP_CLAUSE_CODE (clause
))
1068 case OMP_CLAUSE_REDUCTION
:
1069 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1071 goto do_decl_clause
;
1073 case OMP_CLAUSE_LASTPRIVATE
:
1074 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1076 goto do_decl_clause
;
1078 case OMP_CLAUSE_PRIVATE
:
1079 case OMP_CLAUSE_FIRSTPRIVATE
:
1080 case OMP_CLAUSE_COPYPRIVATE
:
1081 case OMP_CLAUSE_SHARED
:
1083 decl
= OMP_CLAUSE_DECL (clause
);
1084 if (TREE_CODE (decl
) == VAR_DECL
1085 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1087 if (decl_function_context (decl
) != info
->context
)
1089 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1090 OMP_CLAUSE_DECL (clause
) = get_nonlocal_debug_decl (info
, decl
);
1091 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_PRIVATE
)
1096 case OMP_CLAUSE_SCHEDULE
:
1097 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1101 case OMP_CLAUSE_NUM_THREADS
:
1102 wi
->val_only
= true;
1104 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1108 case OMP_CLAUSE_NOWAIT
:
1109 case OMP_CLAUSE_ORDERED
:
1110 case OMP_CLAUSE_DEFAULT
:
1111 case OMP_CLAUSE_COPYIN
:
1112 case OMP_CLAUSE_COLLAPSE
:
1113 case OMP_CLAUSE_UNTIED
:
1121 info
->suppress_expansion
= new_suppress
;
1124 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1125 switch (OMP_CLAUSE_CODE (clause
))
1127 case OMP_CLAUSE_REDUCTION
:
1128 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1131 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
1132 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1134 walk_body (convert_nonlocal_reference_stmt
,
1135 convert_nonlocal_reference_op
, info
,
1136 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
1137 walk_body (convert_nonlocal_reference_stmt
,
1138 convert_nonlocal_reference_op
, info
,
1139 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
1140 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1145 case OMP_CLAUSE_LASTPRIVATE
:
1146 walk_body (convert_nonlocal_reference_stmt
,
1147 convert_nonlocal_reference_op
, info
,
1148 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
1158 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1161 note_nonlocal_vla_type (struct nesting_info
*info
, tree type
)
1163 while (POINTER_TYPE_P (type
) && !TYPE_NAME (type
))
1164 type
= TREE_TYPE (type
);
1166 if (TYPE_NAME (type
)
1167 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1168 && DECL_ORIGINAL_TYPE (TYPE_NAME (type
)))
1169 type
= DECL_ORIGINAL_TYPE (TYPE_NAME (type
));
1171 while (POINTER_TYPE_P (type
)
1172 || TREE_CODE (type
) == VECTOR_TYPE
1173 || TREE_CODE (type
) == FUNCTION_TYPE
1174 || TREE_CODE (type
) == METHOD_TYPE
)
1175 type
= TREE_TYPE (type
);
1177 if (TREE_CODE (type
) == ARRAY_TYPE
)
1181 note_nonlocal_vla_type (info
, TREE_TYPE (type
));
1182 domain
= TYPE_DOMAIN (type
);
1185 t
= TYPE_MIN_VALUE (domain
);
1186 if (t
&& (TREE_CODE (t
) == VAR_DECL
|| TREE_CODE (t
) == PARM_DECL
)
1187 && decl_function_context (t
) != info
->context
)
1188 get_nonlocal_debug_decl (info
, t
);
1189 t
= TYPE_MAX_VALUE (domain
);
1190 if (t
&& (TREE_CODE (t
) == VAR_DECL
|| TREE_CODE (t
) == PARM_DECL
)
1191 && decl_function_context (t
) != info
->context
)
1192 get_nonlocal_debug_decl (info
, t
);
1197 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1201 note_nonlocal_block_vlas (struct nesting_info
*info
, tree block
)
1205 for (var
= BLOCK_VARS (block
); var
; var
= TREE_CHAIN (var
))
1206 if (TREE_CODE (var
) == VAR_DECL
1207 && variably_modified_type_p (TREE_TYPE (var
), NULL
)
1208 && DECL_HAS_VALUE_EXPR_P (var
)
1209 && decl_function_context (var
) != info
->context
)
1210 note_nonlocal_vla_type (info
, TREE_TYPE (var
));
1213 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1214 PARM_DECLs that belong to outer functions. This handles statements
1215 that are not handled via the standard recursion done in
1216 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1217 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1218 operands of STMT have been handled by this function. */
1221 convert_nonlocal_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1222 struct walk_stmt_info
*wi
)
1224 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
1225 tree save_local_var_chain
;
1226 bitmap save_suppress
;
1227 gimple stmt
= gsi_stmt (*gsi
);
1229 switch (gimple_code (stmt
))
1232 /* Don't walk non-local gotos for now. */
1233 if (TREE_CODE (gimple_goto_dest (stmt
)) != LABEL_DECL
)
1235 wi
->val_only
= true;
1237 *handled_ops_p
= true;
1242 case GIMPLE_OMP_PARALLEL
:
1243 case GIMPLE_OMP_TASK
:
1244 save_suppress
= info
->suppress_expansion
;
1245 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
1249 decl
= get_chain_decl (info
);
1250 c
= build_omp_clause (gimple_location (stmt
),
1251 OMP_CLAUSE_FIRSTPRIVATE
);
1252 OMP_CLAUSE_DECL (c
) = decl
;
1253 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1254 gimple_omp_taskreg_set_clauses (stmt
, c
);
1257 save_local_var_chain
= info
->new_local_var_chain
;
1258 info
->new_local_var_chain
= NULL
;
1260 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1261 info
, gimple_omp_body (stmt
));
1263 if (info
->new_local_var_chain
)
1264 declare_vars (info
->new_local_var_chain
,
1265 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1267 info
->new_local_var_chain
= save_local_var_chain
;
1268 info
->suppress_expansion
= save_suppress
;
1271 case GIMPLE_OMP_FOR
:
1272 save_suppress
= info
->suppress_expansion
;
1273 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
1274 walk_gimple_omp_for (stmt
, convert_nonlocal_reference_stmt
,
1275 convert_nonlocal_reference_op
, info
);
1276 walk_body (convert_nonlocal_reference_stmt
,
1277 convert_nonlocal_reference_op
, info
, gimple_omp_body (stmt
));
1278 info
->suppress_expansion
= save_suppress
;
1281 case GIMPLE_OMP_SECTIONS
:
1282 save_suppress
= info
->suppress_expansion
;
1283 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
1284 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1285 info
, gimple_omp_body (stmt
));
1286 info
->suppress_expansion
= save_suppress
;
1289 case GIMPLE_OMP_SINGLE
:
1290 save_suppress
= info
->suppress_expansion
;
1291 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
1292 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1293 info
, gimple_omp_body (stmt
));
1294 info
->suppress_expansion
= save_suppress
;
1297 case GIMPLE_OMP_SECTION
:
1298 case GIMPLE_OMP_MASTER
:
1299 case GIMPLE_OMP_ORDERED
:
1300 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1301 info
, gimple_omp_body (stmt
));
1305 if (!optimize
&& gimple_bind_block (stmt
))
1306 note_nonlocal_block_vlas (info
, gimple_bind_block (stmt
));
1308 *handled_ops_p
= false;
1312 wi
->val_only
= true;
1314 *handled_ops_p
= false;
1318 /* For every other statement that we are not interested in
1319 handling here, let the walker traverse the operands. */
1320 *handled_ops_p
= false;
1324 /* We have handled all of STMT operands, no need to traverse the operands. */
1325 *handled_ops_p
= true;
1330 /* A subroutine of convert_local_reference. Create a local variable
1331 in the parent function with DECL_VALUE_EXPR set to reference the
1332 field in FRAME. This is used both for debug info and in OpenMP
1336 get_local_debug_decl (struct nesting_info
*info
, tree decl
, tree field
)
1341 slot
= pointer_map_insert (info
->var_map
, decl
);
1343 return (tree
) *slot
;
1345 /* Make sure frame_decl gets created. */
1346 (void) get_frame_type (info
);
1347 x
= info
->frame_decl
;
1348 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1350 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
1351 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
1352 DECL_CONTEXT (new_decl
) = info
->context
;
1353 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
1354 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
1355 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
1356 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
1357 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
1358 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
1359 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
1360 if ((TREE_CODE (decl
) == PARM_DECL
1361 || TREE_CODE (decl
) == RESULT_DECL
1362 || TREE_CODE (decl
) == VAR_DECL
)
1363 && DECL_BY_REFERENCE (decl
))
1364 DECL_BY_REFERENCE (new_decl
) = 1;
1366 SET_DECL_VALUE_EXPR (new_decl
, x
);
1367 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
1370 TREE_CHAIN (new_decl
) = info
->debug_var_chain
;
1371 info
->debug_var_chain
= new_decl
;
1373 /* Do not emit debug info twice. */
1374 DECL_IGNORED_P (decl
) = 1;
1380 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1381 and PARM_DECLs that were referenced by inner nested functions.
1382 The rewrite will be a structure reference to the local frame variable. */
1384 static bool convert_local_omp_clauses (tree
*, struct walk_stmt_info
*);
1387 convert_local_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1389 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1390 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1391 tree t
= *tp
, field
, x
;
1395 switch (TREE_CODE (t
))
1398 /* Non-automatic variables are never processed. */
1399 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
1404 if (decl_function_context (t
) == info
->context
)
1406 /* If we copied a pointer to the frame, then the original decl
1407 is used unchanged in the parent function. */
1408 if (use_pointer_in_frame (t
))
1411 /* No need to transform anything if no child references the
1413 field
= lookup_field_for_decl (info
, t
, NO_INSERT
);
1418 x
= get_local_debug_decl (info
, t
, field
);
1419 if (!bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
1420 x
= get_frame_field (info
, info
->context
, field
, &wi
->gsi
);
1425 x
= save_tmp_var (info
, x
, &wi
->gsi
);
1427 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1435 save_val_only
= wi
->val_only
;
1436 wi
->val_only
= false;
1438 wi
->changed
= false;
1439 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
, wi
, NULL
);
1440 wi
->val_only
= save_val_only
;
1442 /* If we converted anything ... */
1447 /* Then the frame decl is now addressable. */
1448 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
1450 save_context
= current_function_decl
;
1451 current_function_decl
= info
->context
;
1452 recompute_tree_invariant_for_addr_expr (t
);
1453 current_function_decl
= save_context
;
1455 /* If we are in a context where we only accept values, then
1456 compute the address into a temporary. */
1458 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
1467 case ARRAY_RANGE_REF
:
1469 /* Go down this entire nest and just look at the final prefix and
1470 anything that describes the references. Otherwise, we lose track
1471 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1472 save_val_only
= wi
->val_only
;
1473 wi
->val_only
= true;
1475 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1477 if (TREE_CODE (t
) == COMPONENT_REF
)
1478 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1480 else if (TREE_CODE (t
) == ARRAY_REF
1481 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1483 walk_tree (&TREE_OPERAND (t
, 1), convert_local_reference_op
, wi
,
1485 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1487 walk_tree (&TREE_OPERAND (t
, 3), convert_local_reference_op
, wi
,
1490 else if (TREE_CODE (t
) == BIT_FIELD_REF
)
1492 walk_tree (&TREE_OPERAND (t
, 1), convert_local_reference_op
, wi
,
1494 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1498 wi
->val_only
= false;
1499 walk_tree (tp
, convert_local_reference_op
, wi
, NULL
);
1500 wi
->val_only
= save_val_only
;
1503 case VIEW_CONVERT_EXPR
:
1504 /* Just request to look at the subtrees, leaving val_only and lhs
1505 untouched. This might actually be for !val_only + lhs, in which
1506 case we don't want to force a replacement by a temporary. */
1511 if (!IS_TYPE_OR_DECL_P (t
))
1514 wi
->val_only
= true;
1523 static tree
convert_local_reference_stmt (gimple_stmt_iterator
*, bool *,
1524 struct walk_stmt_info
*);
1526 /* Helper for convert_local_reference. Convert all the references in
1527 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1530 convert_local_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1532 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1533 bool need_frame
= false, need_stmts
= false;
1536 bitmap new_suppress
;
1538 new_suppress
= BITMAP_GGC_ALLOC ();
1539 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1541 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1543 switch (OMP_CLAUSE_CODE (clause
))
1545 case OMP_CLAUSE_REDUCTION
:
1546 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1548 goto do_decl_clause
;
1550 case OMP_CLAUSE_LASTPRIVATE
:
1551 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1553 goto do_decl_clause
;
1555 case OMP_CLAUSE_PRIVATE
:
1556 case OMP_CLAUSE_FIRSTPRIVATE
:
1557 case OMP_CLAUSE_COPYPRIVATE
:
1558 case OMP_CLAUSE_SHARED
:
1560 decl
= OMP_CLAUSE_DECL (clause
);
1561 if (TREE_CODE (decl
) == VAR_DECL
1562 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1564 if (decl_function_context (decl
) == info
->context
1565 && !use_pointer_in_frame (decl
))
1567 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
1570 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1571 OMP_CLAUSE_DECL (clause
)
1572 = get_local_debug_decl (info
, decl
, field
);
1578 case OMP_CLAUSE_SCHEDULE
:
1579 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1583 case OMP_CLAUSE_NUM_THREADS
:
1584 wi
->val_only
= true;
1586 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0), &dummy
,
1590 case OMP_CLAUSE_NOWAIT
:
1591 case OMP_CLAUSE_ORDERED
:
1592 case OMP_CLAUSE_DEFAULT
:
1593 case OMP_CLAUSE_COPYIN
:
1594 case OMP_CLAUSE_COLLAPSE
:
1595 case OMP_CLAUSE_UNTIED
:
1603 info
->suppress_expansion
= new_suppress
;
1606 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1607 switch (OMP_CLAUSE_CODE (clause
))
1609 case OMP_CLAUSE_REDUCTION
:
1610 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1613 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
1614 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1616 walk_body (convert_local_reference_stmt
,
1617 convert_local_reference_op
, info
,
1618 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
1619 walk_body (convert_local_reference_stmt
,
1620 convert_local_reference_op
, info
,
1621 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
1622 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1627 case OMP_CLAUSE_LASTPRIVATE
:
1628 walk_body (convert_local_reference_stmt
,
1629 convert_local_reference_op
, info
,
1630 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
1641 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1642 and PARM_DECLs that were referenced by inner nested functions.
1643 The rewrite will be a structure reference to the local frame variable. */
1646 convert_local_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1647 struct walk_stmt_info
*wi
)
1649 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
1650 tree save_local_var_chain
;
1651 bitmap save_suppress
;
1652 gimple stmt
= gsi_stmt (*gsi
);
1654 switch (gimple_code (stmt
))
1656 case GIMPLE_OMP_PARALLEL
:
1657 case GIMPLE_OMP_TASK
:
1658 save_suppress
= info
->suppress_expansion
;
1659 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
1663 (void) get_frame_type (info
);
1664 c
= build_omp_clause (gimple_location (stmt
),
1666 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
1667 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1668 gimple_omp_taskreg_set_clauses (stmt
, c
);
1671 save_local_var_chain
= info
->new_local_var_chain
;
1672 info
->new_local_var_chain
= NULL
;
1674 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
1675 gimple_omp_body (stmt
));
1677 if (info
->new_local_var_chain
)
1678 declare_vars (info
->new_local_var_chain
,
1679 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
1680 info
->new_local_var_chain
= save_local_var_chain
;
1681 info
->suppress_expansion
= save_suppress
;
1684 case GIMPLE_OMP_FOR
:
1685 save_suppress
= info
->suppress_expansion
;
1686 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
1687 walk_gimple_omp_for (stmt
, convert_local_reference_stmt
,
1688 convert_local_reference_op
, info
);
1689 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1690 info
, gimple_omp_body (stmt
));
1691 info
->suppress_expansion
= save_suppress
;
1694 case GIMPLE_OMP_SECTIONS
:
1695 save_suppress
= info
->suppress_expansion
;
1696 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
1697 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1698 info
, gimple_omp_body (stmt
));
1699 info
->suppress_expansion
= save_suppress
;
1702 case GIMPLE_OMP_SINGLE
:
1703 save_suppress
= info
->suppress_expansion
;
1704 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
1705 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1706 info
, gimple_omp_body (stmt
));
1707 info
->suppress_expansion
= save_suppress
;
1710 case GIMPLE_OMP_SECTION
:
1711 case GIMPLE_OMP_MASTER
:
1712 case GIMPLE_OMP_ORDERED
:
1713 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1714 info
, gimple_omp_body (stmt
));
1718 wi
->val_only
= true;
1720 *handled_ops_p
= false;
1724 /* For every other statement that we are not interested in
1725 handling here, let the walker traverse the operands. */
1726 *handled_ops_p
= false;
1730 /* Indicate that we have handled all the operands ourselves. */
1731 *handled_ops_p
= true;
1736 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
1737 that reference labels from outer functions. The rewrite will be a
1738 call to __builtin_nonlocal_goto. */
1741 convert_nl_goto_reference (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1742 struct walk_stmt_info
*wi
)
1744 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
1745 tree label
, new_label
, target_context
, x
, field
;
1748 gimple stmt
= gsi_stmt (*gsi
);
1750 if (gimple_code (stmt
) != GIMPLE_GOTO
)
1752 *handled_ops_p
= false;
1756 label
= gimple_goto_dest (stmt
);
1757 if (TREE_CODE (label
) != LABEL_DECL
)
1759 *handled_ops_p
= false;
1763 target_context
= decl_function_context (label
);
1764 if (target_context
== info
->context
)
1766 *handled_ops_p
= false;
1770 for (i
= info
->outer
; target_context
!= i
->context
; i
= i
->outer
)
1773 /* The original user label may also be use for a normal goto, therefore
1774 we must create a new label that will actually receive the abnormal
1775 control transfer. This new label will be marked LABEL_NONLOCAL; this
1776 mark will trigger proper behavior in the cfg, as well as cause the
1777 (hairy target-specific) non-local goto receiver code to be generated
1778 when we expand rtl. Enter this association into var_map so that we
1779 can insert the new label into the IL during a second pass. */
1780 slot
= pointer_map_insert (i
->var_map
, label
);
1783 new_label
= create_artificial_label (UNKNOWN_LOCATION
);
1784 DECL_NONLOCAL (new_label
) = 1;
1788 new_label
= (tree
) *slot
;
1790 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
1791 field
= get_nl_goto_field (i
);
1792 x
= get_frame_field (info
, target_context
, field
, &wi
->gsi
);
1793 x
= build_addr (x
, target_context
);
1794 x
= gsi_gimplify_val (info
, x
, &wi
->gsi
);
1795 call
= gimple_build_call (implicit_built_in_decls
[BUILT_IN_NONLOCAL_GOTO
], 2,
1796 build_addr (new_label
, target_context
), x
);
1797 gsi_replace (&wi
->gsi
, call
, false);
1799 /* We have handled all of STMT's operands, no need to keep going. */
1800 *handled_ops_p
= true;
1805 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
1806 are referenced via nonlocal goto from a nested function. The rewrite
1807 will involve installing a newly generated DECL_NONLOCAL label, and
1808 (potentially) a branch around the rtl gunk that is assumed to be
1809 attached to such a label. */
1812 convert_nl_goto_receiver (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1813 struct walk_stmt_info
*wi
)
1815 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1816 tree label
, new_label
;
1817 gimple_stmt_iterator tmp_gsi
;
1819 gimple stmt
= gsi_stmt (*gsi
);
1821 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1823 *handled_ops_p
= false;
1827 label
= gimple_label_label (stmt
);
1829 slot
= pointer_map_contains (info
->var_map
, label
);
1832 *handled_ops_p
= false;
1836 /* If there's any possibility that the previous statement falls through,
1837 then we must branch around the new non-local label. */
1839 gsi_prev (&tmp_gsi
);
1840 if (gsi_end_p (tmp_gsi
) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi
)))
1842 gimple stmt
= gimple_build_goto (label
);
1843 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1846 new_label
= (tree
) *slot
;
1847 stmt
= gimple_build_label (new_label
);
1848 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1850 *handled_ops_p
= true;
1855 /* Called via walk_function+walk_stmt, rewrite all references to addresses
1856 of nested functions that require the use of trampolines. The rewrite
1857 will involve a reference a trampoline generated for the occasion. */
1860 convert_tramp_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1862 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1863 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
1864 tree t
= *tp
, decl
, target_context
, x
, builtin
;
1868 switch (TREE_CODE (t
))
1872 T.1 = &CHAIN->tramp;
1873 T.2 = __builtin_adjust_trampoline (T.1);
1874 T.3 = (func_type)T.2;
1877 decl
= TREE_OPERAND (t
, 0);
1878 if (TREE_CODE (decl
) != FUNCTION_DECL
)
1881 /* Only need to process nested functions. */
1882 target_context
= decl_function_context (decl
);
1883 if (!target_context
)
1886 /* If the nested function doesn't use a static chain, then
1887 it doesn't need a trampoline. */
1888 if (!DECL_STATIC_CHAIN (decl
))
1891 /* If we don't want a trampoline, then don't build one. */
1892 if (TREE_NO_TRAMPOLINE (t
))
1895 /* Lookup the immediate parent of the callee, as that's where
1896 we need to insert the trampoline. */
1897 for (i
= info
; i
->context
!= target_context
; i
= i
->outer
)
1899 x
= lookup_tramp_for_decl (i
, decl
, INSERT
);
1901 /* Compute the address of the field holding the trampoline. */
1902 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
1903 x
= build_addr (x
, target_context
);
1904 x
= gsi_gimplify_val (info
, x
, &wi
->gsi
);
1906 /* Do machine-specific ugliness. Normally this will involve
1907 computing extra alignment, but it can really be anything. */
1908 builtin
= implicit_built_in_decls
[BUILT_IN_ADJUST_TRAMPOLINE
];
1909 call
= gimple_build_call (builtin
, 1, x
);
1910 x
= init_tmp_var_with_call (info
, &wi
->gsi
, call
);
1912 /* Cast back to the proper function type. */
1913 x
= build1 (NOP_EXPR
, TREE_TYPE (t
), x
);
1914 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1920 if (!IS_TYPE_OR_DECL_P (t
))
1929 /* Called via walk_function+walk_gimple_stmt, rewrite all references
1930 to addresses of nested functions that require the use of
1931 trampolines. The rewrite will involve a reference a trampoline
1932 generated for the occasion. */
1935 convert_tramp_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1936 struct walk_stmt_info
*wi
)
1938 gimple stmt
= gsi_stmt (*gsi
);
1940 switch (gimple_code (stmt
))
1944 /* Only walk call arguments, lest we generate trampolines for
1946 unsigned long i
, nargs
= gimple_call_num_args (stmt
);
1947 for (i
= 0; i
< nargs
; i
++)
1948 walk_tree (gimple_call_arg_ptr (stmt
, i
), convert_tramp_reference_op
,
1951 *handled_ops_p
= true;
1959 *handled_ops_p
= false;
1965 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
1966 that reference nested functions to make sure that the static chain
1967 is set up properly for the call. */
1970 convert_gimple_call (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1971 struct walk_stmt_info
*wi
)
1973 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1974 tree decl
, target_context
;
1975 char save_static_chain_added
;
1977 gimple stmt
= gsi_stmt (*gsi
);
1979 switch (gimple_code (stmt
))
1982 if (gimple_call_chain (stmt
))
1984 decl
= gimple_call_fndecl (stmt
);
1987 target_context
= decl_function_context (decl
);
1988 if (target_context
&& DECL_STATIC_CHAIN (decl
))
1990 gimple_call_set_chain (stmt
, get_static_chain (info
, target_context
,
1992 info
->static_chain_added
|= (1 << (info
->context
!= target_context
));
1996 case GIMPLE_OMP_PARALLEL
:
1997 case GIMPLE_OMP_TASK
:
1998 save_static_chain_added
= info
->static_chain_added
;
1999 info
->static_chain_added
= 0;
2000 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body (stmt
));
2001 for (i
= 0; i
< 2; i
++)
2004 if ((info
->static_chain_added
& (1 << i
)) == 0)
2006 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2007 /* Don't add CHAIN.* or FRAME.* twice. */
2008 for (c
= gimple_omp_taskreg_clauses (stmt
);
2010 c
= OMP_CLAUSE_CHAIN (c
))
2011 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
2012 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
2013 && OMP_CLAUSE_DECL (c
) == decl
)
2017 c
= build_omp_clause (gimple_location (stmt
),
2018 i
? OMP_CLAUSE_FIRSTPRIVATE
2019 : OMP_CLAUSE_SHARED
);
2020 OMP_CLAUSE_DECL (c
) = decl
;
2021 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2022 gimple_omp_taskreg_set_clauses (stmt
, c
);
2025 info
->static_chain_added
|= save_static_chain_added
;
2028 case GIMPLE_OMP_FOR
:
2029 walk_body (convert_gimple_call
, NULL
, info
,
2030 gimple_omp_for_pre_body (stmt
));
2032 case GIMPLE_OMP_SECTIONS
:
2033 case GIMPLE_OMP_SECTION
:
2034 case GIMPLE_OMP_SINGLE
:
2035 case GIMPLE_OMP_MASTER
:
2036 case GIMPLE_OMP_ORDERED
:
2037 case GIMPLE_OMP_CRITICAL
:
2038 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body (stmt
));
2042 /* Keep looking for other operands. */
2043 *handled_ops_p
= false;
2047 *handled_ops_p
= true;
2051 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2052 call expressions. At the same time, determine if a nested function
2053 actually uses its static chain; if not, remember that. */
2056 convert_all_function_calls (struct nesting_info
*root
)
2058 struct nesting_info
*n
;
2062 /* First, optimistically clear static_chain for all decls that haven't
2063 used the static chain already for variable access. */
2064 FOR_EACH_NEST_INFO (n
, root
)
2066 tree decl
= n
->context
;
2067 if (!n
->outer
|| (!n
->chain_decl
&& !n
->chain_field
))
2069 DECL_STATIC_CHAIN (decl
) = 0;
2070 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2071 fprintf (dump_file
, "Guessing no static-chain for %s\n",
2072 lang_hooks
.decl_printable_name (decl
, 2));
2075 DECL_STATIC_CHAIN (decl
) = 1;
2078 /* Walk the functions and perform transformations. Note that these
2079 transformations can induce new uses of the static chain, which in turn
2080 require re-examining all users of the decl. */
2081 /* ??? It would make sense to try to use the call graph to speed this up,
2082 but the call graph hasn't really been built yet. Even if it did, we
2083 would still need to iterate in this loop since address-of references
2084 wouldn't show up in the callgraph anyway. */
2088 any_changed
= false;
2091 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2092 fputc ('\n', dump_file
);
2094 FOR_EACH_NEST_INFO (n
, root
)
2096 tree decl
= n
->context
;
2097 bool old_static_chain
= DECL_STATIC_CHAIN (decl
);
2099 walk_function (convert_tramp_reference_stmt
,
2100 convert_tramp_reference_op
, n
);
2101 walk_function (convert_gimple_call
, NULL
, n
);
2103 /* If a call to another function created the use of a chain
2104 within this function, we'll have to continue iteration. */
2105 if (!old_static_chain
&& DECL_STATIC_CHAIN (decl
))
2109 while (any_changed
);
2111 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2112 fprintf (dump_file
, "convert_all_function_calls iterations: %d\n\n",
2116 struct nesting_copy_body_data
2119 struct nesting_info
*root
;
2122 /* A helper subroutine for debug_var_chain type remapping. */
2125 nesting_copy_decl (tree decl
, copy_body_data
*id
)
2127 struct nesting_copy_body_data
*nid
= (struct nesting_copy_body_data
*) id
;
2128 void **slot
= pointer_map_contains (nid
->root
->var_map
, decl
);
2131 return (tree
) *slot
;
2133 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_ORIGINAL_TYPE (decl
))
2135 tree new_decl
= copy_decl_no_change (decl
, id
);
2136 DECL_ORIGINAL_TYPE (new_decl
)
2137 = remap_type (DECL_ORIGINAL_TYPE (decl
), id
);
2141 if (TREE_CODE (decl
) == VAR_DECL
2142 || TREE_CODE (decl
) == PARM_DECL
2143 || TREE_CODE (decl
) == RESULT_DECL
)
2146 return copy_decl_no_change (decl
, id
);
2149 /* A helper function for remap_vla_decls. See if *TP contains
2150 some remapped variables. */
2153 contains_remapped_vars (tree
*tp
, int *walk_subtrees
, void *data
)
2155 struct nesting_info
*root
= (struct nesting_info
*) data
;
2162 slot
= pointer_map_contains (root
->var_map
, t
);
2165 return (tree
) *slot
;
2170 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2174 remap_vla_decls (tree block
, struct nesting_info
*root
)
2176 tree var
, subblock
, val
, type
;
2177 struct nesting_copy_body_data id
;
2179 for (subblock
= BLOCK_SUBBLOCKS (block
);
2181 subblock
= BLOCK_CHAIN (subblock
))
2182 remap_vla_decls (subblock
, root
);
2184 for (var
= BLOCK_VARS (block
); var
; var
= TREE_CHAIN (var
))
2186 if (TREE_CODE (var
) == VAR_DECL
2187 && variably_modified_type_p (TREE_TYPE (var
), NULL
)
2188 && DECL_HAS_VALUE_EXPR_P (var
))
2190 type
= TREE_TYPE (var
);
2191 val
= DECL_VALUE_EXPR (var
);
2192 if (walk_tree (&type
, contains_remapped_vars
, root
, NULL
) != NULL
2193 || walk_tree (&val
, contains_remapped_vars
, root
, NULL
) != NULL
)
2197 if (var
== NULL_TREE
)
2200 memset (&id
, 0, sizeof (id
));
2201 id
.cb
.copy_decl
= nesting_copy_decl
;
2202 id
.cb
.decl_map
= pointer_map_create ();
2205 for (; var
; var
= TREE_CHAIN (var
))
2206 if (TREE_CODE (var
) == VAR_DECL
2207 && variably_modified_type_p (TREE_TYPE (var
), NULL
)
2208 && DECL_HAS_VALUE_EXPR_P (var
))
2210 struct nesting_info
*i
;
2211 tree newt
, t
, context
;
2213 t
= type
= TREE_TYPE (var
);
2214 val
= DECL_VALUE_EXPR (var
);
2215 if (walk_tree (&type
, contains_remapped_vars
, root
, NULL
) == NULL
2216 && walk_tree (&val
, contains_remapped_vars
, root
, NULL
) == NULL
)
2219 context
= decl_function_context (var
);
2220 for (i
= root
; i
; i
= i
->outer
)
2221 if (i
->context
== context
)
2227 id
.cb
.src_fn
= i
->context
;
2228 id
.cb
.dst_fn
= i
->context
;
2229 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
2231 TREE_TYPE (var
) = newt
= remap_type (type
, &id
.cb
);
2232 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
2234 newt
= TREE_TYPE (newt
);
2237 if (TYPE_NAME (newt
)
2238 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
2239 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
2241 && TYPE_NAME (newt
) == TYPE_NAME (t
))
2242 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
2244 walk_tree (&val
, copy_tree_body_r
, &id
.cb
, NULL
);
2245 if (val
!= DECL_VALUE_EXPR (var
))
2246 SET_DECL_VALUE_EXPR (var
, val
);
2249 pointer_map_destroy (id
.cb
.decl_map
);
2252 /* Do "everything else" to clean up or complete state collected by the
2253 various walking passes -- lay out the types and decls, generate code
2254 to initialize the frame decl, store critical expressions in the
2255 struct function for rtl to find. */
2258 finalize_nesting_tree_1 (struct nesting_info
*root
)
2260 gimple_seq stmt_list
;
2262 tree context
= root
->context
;
2263 struct function
*sf
;
2267 /* If we created a non-local frame type or decl, we need to lay them
2268 out at this time. */
2269 if (root
->frame_type
)
2271 /* In some cases the frame type will trigger the -Wpadded warning.
2272 This is not helpful; suppress it. */
2273 int save_warn_padded
= warn_padded
;
2277 layout_type (root
->frame_type
);
2278 warn_padded
= save_warn_padded
;
2279 layout_decl (root
->frame_decl
, 0);
2281 /* Remove root->frame_decl from root->new_local_var_chain, so
2282 that we can declare it also in the lexical blocks, which
2283 helps ensure virtual regs that end up appearing in its RTL
2284 expression get substituted in instantiate_virtual_regs(). */
2285 for (adjust
= &root
->new_local_var_chain
;
2286 *adjust
!= root
->frame_decl
;
2287 adjust
= &TREE_CHAIN (*adjust
))
2288 gcc_assert (TREE_CHAIN (*adjust
));
2289 *adjust
= TREE_CHAIN (*adjust
);
2291 TREE_CHAIN (root
->frame_decl
) = NULL_TREE
;
2292 declare_vars (root
->frame_decl
,
2293 gimple_seq_first_stmt (gimple_body (context
)), true);
2296 /* If any parameters were referenced non-locally, then we need to
2297 insert a copy. Likewise, if any variables were referenced by
2298 pointer, we need to initialize the address. */
2299 if (root
->any_parm_remapped
)
2302 for (p
= DECL_ARGUMENTS (context
); p
; p
= TREE_CHAIN (p
))
2306 field
= lookup_field_for_decl (root
, p
, NO_INSERT
);
2310 if (use_pointer_in_frame (p
))
2311 x
= build_addr (p
, context
);
2315 y
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
2316 root
->frame_decl
, field
, NULL_TREE
);
2317 stmt
= gimple_build_assign (y
, x
);
2318 gimple_seq_add_stmt (&stmt_list
, stmt
);
2319 /* If the assignment is from a non-register the stmt is
2320 not valid gimple. Make it so by using a temporary instead. */
2321 if (!is_gimple_reg (x
)
2322 && is_gimple_reg_type (TREE_TYPE (x
)))
2324 gimple_stmt_iterator gsi
= gsi_last (stmt_list
);
2325 x
= init_tmp_var (root
, x
, &gsi
);
2326 gimple_assign_set_rhs1 (stmt
, x
);
2331 /* If a chain_field was created, then it needs to be initialized
2333 if (root
->chain_field
)
2335 tree x
= build3 (COMPONENT_REF
, TREE_TYPE (root
->chain_field
),
2336 root
->frame_decl
, root
->chain_field
, NULL_TREE
);
2337 stmt
= gimple_build_assign (x
, get_chain_decl (root
));
2338 gimple_seq_add_stmt (&stmt_list
, stmt
);
2341 /* If trampolines were created, then we need to initialize them. */
2342 if (root
->any_tramp_created
)
2344 struct nesting_info
*i
;
2345 for (i
= root
->inner
; i
; i
= i
->next
)
2347 tree arg1
, arg2
, arg3
, x
, field
;
2349 field
= lookup_tramp_for_decl (root
, i
->context
, NO_INSERT
);
2353 gcc_assert (DECL_STATIC_CHAIN (i
->context
));
2354 arg3
= build_addr (root
->frame_decl
, context
);
2356 arg2
= build_addr (i
->context
, context
);
2358 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
2359 root
->frame_decl
, field
, NULL_TREE
);
2360 arg1
= build_addr (x
, context
);
2362 x
= implicit_built_in_decls
[BUILT_IN_INIT_TRAMPOLINE
];
2363 stmt
= gimple_build_call (x
, 3, arg1
, arg2
, arg3
);
2364 gimple_seq_add_stmt (&stmt_list
, stmt
);
2368 /* If we created initialization statements, insert them. */
2372 annotate_all_with_location (stmt_list
, DECL_SOURCE_LOCATION (context
));
2373 bind
= gimple_seq_first_stmt (gimple_body (context
));
2374 gimple_seq_add_seq (&stmt_list
, gimple_bind_body (bind
));
2375 gimple_bind_set_body (bind
, stmt_list
);
2378 /* If a chain_decl was created, then it needs to be registered with
2379 struct function so that it gets initialized from the static chain
2380 register at the beginning of the function. */
2381 sf
= DECL_STRUCT_FUNCTION (root
->context
);
2382 sf
->static_chain_decl
= root
->chain_decl
;
2384 /* Similarly for the non-local goto save area. */
2385 if (root
->nl_goto_field
)
2387 sf
->nonlocal_goto_save_area
2388 = get_frame_field (root
, context
, root
->nl_goto_field
, NULL
);
2389 sf
->has_nonlocal_label
= 1;
2392 /* Make sure all new local variables get inserted into the
2393 proper BIND_EXPR. */
2394 if (root
->new_local_var_chain
)
2395 declare_vars (root
->new_local_var_chain
,
2396 gimple_seq_first_stmt (gimple_body (root
->context
)),
2399 if (root
->debug_var_chain
)
2404 remap_vla_decls (DECL_INITIAL (root
->context
), root
);
2406 for (debug_var
= root
->debug_var_chain
; debug_var
;
2407 debug_var
= TREE_CHAIN (debug_var
))
2408 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
2411 /* If there are any debug decls with variable length types,
2412 remap those types using other debug_var_chain variables. */
2415 struct nesting_copy_body_data id
;
2417 memset (&id
, 0, sizeof (id
));
2418 id
.cb
.copy_decl
= nesting_copy_decl
;
2419 id
.cb
.decl_map
= pointer_map_create ();
2422 for (; debug_var
; debug_var
= TREE_CHAIN (debug_var
))
2423 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
2425 tree type
= TREE_TYPE (debug_var
);
2426 tree newt
, t
= type
;
2427 struct nesting_info
*i
;
2429 for (i
= root
; i
; i
= i
->outer
)
2430 if (variably_modified_type_p (type
, i
->context
))
2436 id
.cb
.src_fn
= i
->context
;
2437 id
.cb
.dst_fn
= i
->context
;
2438 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
2440 TREE_TYPE (debug_var
) = newt
= remap_type (type
, &id
.cb
);
2441 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
2443 newt
= TREE_TYPE (newt
);
2446 if (TYPE_NAME (newt
)
2447 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
2448 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
2450 && TYPE_NAME (newt
) == TYPE_NAME (t
))
2451 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
2454 pointer_map_destroy (id
.cb
.decl_map
);
2457 scope
= gimple_seq_first_stmt (gimple_body (root
->context
));
2458 if (gimple_bind_block (scope
))
2459 declare_vars (root
->debug_var_chain
, scope
, true);
2461 BLOCK_VARS (DECL_INITIAL (root
->context
))
2462 = chainon (BLOCK_VARS (DECL_INITIAL (root
->context
)),
2463 root
->debug_var_chain
);
2466 /* Dump the translated tree function. */
2469 fputs ("\n\n", dump_file
);
2470 dump_function_to_file (root
->context
, dump_file
, dump_flags
);
2475 finalize_nesting_tree (struct nesting_info
*root
)
2477 struct nesting_info
*n
;
2478 FOR_EACH_NEST_INFO (n
, root
)
2479 finalize_nesting_tree_1 (n
);
2482 /* Unnest the nodes and pass them to cgraph. */
2485 unnest_nesting_tree_1 (struct nesting_info
*root
)
2487 struct cgraph_node
*node
= cgraph_node (root
->context
);
2489 /* For nested functions update the cgraph to reflect unnesting.
2490 We also delay finalizing of these functions up to this point. */
2493 cgraph_unnest_node (cgraph_node (root
->context
));
2494 cgraph_finalize_function (root
->context
, true);
2499 unnest_nesting_tree (struct nesting_info
*root
)
2501 struct nesting_info
*n
;
2502 FOR_EACH_NEST_INFO (n
, root
)
2503 unnest_nesting_tree_1 (n
);
2506 /* Free the data structures allocated during this pass. */
2509 free_nesting_tree (struct nesting_info
*root
)
2511 struct nesting_info
*node
, *next
;
2513 node
= iter_nestinfo_start (root
);
2516 next
= iter_nestinfo_next (node
);
2517 pointer_map_destroy (node
->var_map
);
2518 pointer_map_destroy (node
->field_map
);
2525 /* Gimplify a function and all its nested functions. */
2527 gimplify_all_functions (struct cgraph_node
*root
)
2529 struct cgraph_node
*iter
;
2530 if (!gimple_body (root
->decl
))
2531 gimplify_function_tree (root
->decl
);
2532 for (iter
= root
->nested
; iter
; iter
= iter
->next_nested
)
2533 gimplify_all_functions (iter
);
2536 /* Main entry point for this pass. Process FNDECL and all of its nested
2537 subroutines and turn them into something less tightly bound. */
2540 lower_nested_functions (tree fndecl
)
2542 struct cgraph_node
*cgn
;
2543 struct nesting_info
*root
;
2545 /* If there are no nested functions, there's nothing to do. */
2546 cgn
= cgraph_node (fndecl
);
2550 gimplify_all_functions (cgn
);
2552 dump_file
= dump_begin (TDI_nested
, &dump_flags
);
2554 fprintf (dump_file
, "\n;; Function %s\n\n",
2555 lang_hooks
.decl_printable_name (fndecl
, 2));
2557 bitmap_obstack_initialize (&nesting_info_bitmap_obstack
);
2558 root
= create_nesting_tree (cgn
);
2560 walk_all_functions (convert_nonlocal_reference_stmt
,
2561 convert_nonlocal_reference_op
,
2563 walk_all_functions (convert_local_reference_stmt
,
2564 convert_local_reference_op
,
2566 walk_all_functions (convert_nl_goto_reference
, NULL
, root
);
2567 walk_all_functions (convert_nl_goto_receiver
, NULL
, root
);
2569 convert_all_function_calls (root
);
2570 finalize_nesting_tree (root
);
2571 unnest_nesting_tree (root
);
2573 free_nesting_tree (root
);
2574 bitmap_obstack_release (&nesting_info_bitmap_obstack
);
2578 dump_end (TDI_nested
, dump_file
);
2583 #include "gt-tree-nested.h"