1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-inline.h"
31 #include "tree-iterator.h"
32 #include "tree-flow.h"
35 #include "langhooks.h"
36 #include "pointer-set.h"
40 /* The object of this pass is to lower the representation of a set of nested
41 functions in order to expose all of the gory details of the various
42 nonlocal references. We want to do this sooner rather than later, in
43 order to give us more freedom in emitting all of the functions in question.
45 Back in olden times, when gcc was young, we developed an insanely
46 complicated scheme whereby variables which were referenced nonlocally
47 were forced to live in the stack of the declaring function, and then
48 the nested functions magically discovered where these variables were
49 placed. In order for this scheme to function properly, it required
50 that the outer function be partially expanded, then we switch to
51 compiling the inner function, and once done with those we switch back
52 to compiling the outer function. Such delicate ordering requirements
53 makes it difficult to do whole translation unit optimizations
54 involving such functions.
56 The implementation here is much more direct. Everything that can be
57 referenced by an inner function is a member of an explicitly created
58 structure herein called the "nonlocal frame struct". The incoming
59 static chain for a nested function is a pointer to this struct in
60 the parent. In this way, we settle on known offsets from a known
61 base, and so are decoupled from the logic that places objects in the
62 function's stack frame. More importantly, we don't have to wait for
63 that to happen -- since the compilation of the inner function is no
64 longer tied to a real stack frame, the nonlocal frame struct can be
65 allocated anywhere. Which means that the outer function is now
68 Theory of operation here is very simple. Iterate over all the
69 statements in all the functions (depth first) several times,
70 allocating structures and fields on demand. In general we want to
71 examine inner functions first, so that we can avoid making changes
72 to outer functions which are unnecessary.
74 The order of the passes matters a bit, in that later passes will be
75 skipped if it is discovered that the functions don't actually interact
76 at all. That is, they're nested in the lexical sense but could have
77 been written as independent functions without change. */
82 struct nesting_info
*outer
;
83 struct nesting_info
*inner
;
84 struct nesting_info
*next
;
86 struct pointer_map_t
*field_map
;
87 struct pointer_map_t
*var_map
;
88 bitmap suppress_expansion
;
91 tree new_local_var_chain
;
99 bool any_parm_remapped
;
100 bool any_tramp_created
;
101 char static_chain_added
;
105 /* Obstack used for the bitmaps in the struct above. */
106 static struct bitmap_obstack nesting_info_bitmap_obstack
;
109 /* We're working in so many different function contexts simultaneously,
110 that create_tmp_var is dangerous. Prevent mishap. */
111 #define create_tmp_var cant_use_create_tmp_var_here_dummy
113 /* Like create_tmp_var, except record the variable for registration at
114 the given nesting level. */
117 create_tmp_var_for (struct nesting_info
*info
, tree type
, const char *prefix
)
121 /* If the type is of variable size or a type which must be created by the
122 frontend, something is wrong. Note that we explicitly allow
123 incomplete types here, since we create them ourselves here. */
124 gcc_assert (!TREE_ADDRESSABLE (type
));
125 gcc_assert (!TYPE_SIZE_UNIT (type
)
126 || TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
);
128 tmp_var
= create_tmp_var_raw (type
, prefix
);
129 DECL_CONTEXT (tmp_var
) = info
->context
;
130 TREE_CHAIN (tmp_var
) = info
->new_local_var_chain
;
131 DECL_SEEN_IN_BIND_EXPR_P (tmp_var
) = 1;
132 if (TREE_CODE (type
) == COMPLEX_TYPE
133 || TREE_CODE (type
) == VECTOR_TYPE
)
134 DECL_GIMPLE_REG_P (tmp_var
) = 1;
136 info
->new_local_var_chain
= tmp_var
;
141 /* Take the address of EXP to be used within function CONTEXT.
142 Mark it for addressability as necessary. */
145 build_addr (tree exp
, tree context
)
151 while (handled_component_p (base
))
152 base
= TREE_OPERAND (base
, 0);
155 TREE_ADDRESSABLE (base
) = 1;
157 /* Building the ADDR_EXPR will compute a set of properties for
158 that ADDR_EXPR. Those properties are unfortunately context
159 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
161 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
162 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
163 way the properties are for the ADDR_EXPR are computed properly. */
164 save_context
= current_function_decl
;
165 current_function_decl
= context
;
166 retval
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (exp
)), exp
);
167 current_function_decl
= save_context
;
171 /* Insert FIELD into TYPE, sorted by alignment requirements. */
174 insert_field_into_struct (tree type
, tree field
)
178 DECL_CONTEXT (field
) = type
;
180 for (p
= &TYPE_FIELDS (type
); *p
; p
= &TREE_CHAIN (*p
))
181 if (DECL_ALIGN (field
) >= DECL_ALIGN (*p
))
184 TREE_CHAIN (field
) = *p
;
187 /* Set correct alignment for frame struct type. */
188 if (TYPE_ALIGN (type
) < DECL_ALIGN (field
))
189 TYPE_ALIGN (type
) = DECL_ALIGN (field
);
192 /* Build or return the RECORD_TYPE that describes the frame state that is
193 shared between INFO->CONTEXT and its nested functions. This record will
194 not be complete until finalize_nesting_tree; up until that point we'll
195 be adding fields as necessary.
197 We also build the DECL that represents this frame in the function. */
200 get_frame_type (struct nesting_info
*info
)
202 tree type
= info
->frame_type
;
207 type
= make_node (RECORD_TYPE
);
209 name
= concat ("FRAME.",
210 IDENTIFIER_POINTER (DECL_NAME (info
->context
)),
212 TYPE_NAME (type
) = get_identifier (name
);
215 info
->frame_type
= type
;
216 info
->frame_decl
= create_tmp_var_for (info
, type
, "FRAME");
218 /* ??? Always make it addressable for now, since it is meant to
219 be pointed to by the static chain pointer. This pessimizes
220 when it turns out that no static chains are needed because
221 the nested functions referencing non-local variables are not
222 reachable, but the true pessimization is to create the non-
223 local frame structure in the first place. */
224 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
229 /* Return true if DECL should be referenced by pointer in the non-local
233 use_pointer_in_frame (tree decl
)
235 if (TREE_CODE (decl
) == PARM_DECL
)
237 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
238 sized decls, and inefficient to copy large aggregates. Don't bother
239 moving anything but scalar variables. */
240 return AGGREGATE_TYPE_P (TREE_TYPE (decl
));
244 /* Variable sized types make things "interesting" in the frame. */
245 return DECL_SIZE (decl
) == NULL
|| !TREE_CONSTANT (DECL_SIZE (decl
));
249 /* Given DECL, a non-locally accessed variable, find or create a field
250 in the non-local frame structure for the given nesting context. */
253 lookup_field_for_decl (struct nesting_info
*info
, tree decl
,
254 enum insert_option insert
)
258 if (insert
== NO_INSERT
)
260 slot
= pointer_map_contains (info
->field_map
, decl
);
261 return slot
? (tree
) *slot
: NULL_TREE
;
264 slot
= pointer_map_insert (info
->field_map
, decl
);
267 tree field
= make_node (FIELD_DECL
);
268 DECL_NAME (field
) = DECL_NAME (decl
);
270 if (use_pointer_in_frame (decl
))
272 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
273 DECL_ALIGN (field
) = TYPE_ALIGN (TREE_TYPE (field
));
274 DECL_NONADDRESSABLE_P (field
) = 1;
278 TREE_TYPE (field
) = TREE_TYPE (decl
);
279 DECL_SOURCE_LOCATION (field
) = DECL_SOURCE_LOCATION (decl
);
280 DECL_ALIGN (field
) = DECL_ALIGN (decl
);
281 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
282 TREE_ADDRESSABLE (field
) = TREE_ADDRESSABLE (decl
);
283 DECL_NONADDRESSABLE_P (field
) = !TREE_ADDRESSABLE (decl
);
284 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
287 insert_field_into_struct (get_frame_type (info
), field
);
290 if (TREE_CODE (decl
) == PARM_DECL
)
291 info
->any_parm_remapped
= true;
297 /* Build or return the variable that holds the static chain within
298 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
301 get_chain_decl (struct nesting_info
*info
)
303 tree decl
= info
->chain_decl
;
308 type
= get_frame_type (info
->outer
);
309 type
= build_pointer_type (type
);
311 /* Note that this variable is *not* entered into any BIND_EXPR;
312 the construction of this variable is handled specially in
313 expand_function_start and initialize_inlined_parameters.
314 Note also that it's represented as a parameter. This is more
315 close to the truth, since the initial value does come from
317 decl
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
318 PARM_DECL
, create_tmp_var_name ("CHAIN"), type
);
319 DECL_ARTIFICIAL (decl
) = 1;
320 DECL_IGNORED_P (decl
) = 1;
321 TREE_USED (decl
) = 1;
322 DECL_CONTEXT (decl
) = info
->context
;
323 DECL_ARG_TYPE (decl
) = type
;
325 /* Tell tree-inline.c that we never write to this variable, so
326 it can copy-prop the replacement value immediately. */
327 TREE_READONLY (decl
) = 1;
329 info
->chain_decl
= decl
;
334 /* Build or return the field within the non-local frame state that holds
335 the static chain for INFO->CONTEXT. This is the way to walk back up
336 multiple nesting levels. */
339 get_chain_field (struct nesting_info
*info
)
341 tree field
= info
->chain_field
;
344 tree type
= build_pointer_type (get_frame_type (info
->outer
));
346 field
= make_node (FIELD_DECL
);
347 DECL_NAME (field
) = get_identifier ("__chain");
348 TREE_TYPE (field
) = type
;
349 DECL_ALIGN (field
) = TYPE_ALIGN (type
);
350 DECL_NONADDRESSABLE_P (field
) = 1;
352 insert_field_into_struct (get_frame_type (info
), field
);
354 info
->chain_field
= field
;
359 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
362 init_tmp_var_with_call (struct nesting_info
*info
, gimple_stmt_iterator
*gsi
,
367 t
= create_tmp_var_for (info
, gimple_call_return_type (call
), NULL
);
368 gimple_call_set_lhs (call
, t
);
369 if (! gsi_end_p (*gsi
))
370 gimple_set_location (call
, gimple_location (gsi_stmt (*gsi
)));
371 gsi_insert_before (gsi
, call
, GSI_SAME_STMT
);
377 /* Copy EXP into a temporary. Allocate the temporary in the context of
378 INFO and insert the initialization statement before GSI. */
381 init_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
386 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
387 stmt
= gimple_build_assign (t
, exp
);
388 if (! gsi_end_p (*gsi
))
389 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
390 gsi_insert_before_without_update (gsi
, stmt
, GSI_SAME_STMT
);
396 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
399 gsi_gimplify_val (struct nesting_info
*info
, tree exp
,
400 gimple_stmt_iterator
*gsi
)
402 if (is_gimple_val (exp
))
405 return init_tmp_var (info
, exp
, gsi
);
408 /* Similarly, but copy from the temporary and insert the statement
409 after the iterator. */
412 save_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
417 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
418 stmt
= gimple_build_assign (exp
, t
);
419 if (! gsi_end_p (*gsi
))
420 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
421 gsi_insert_after_without_update (gsi
, stmt
, GSI_SAME_STMT
);
426 /* Build or return the type used to represent a nested function trampoline. */
428 static GTY(()) tree trampoline_type
;
431 get_trampoline_type (struct nesting_info
*info
)
433 unsigned align
, size
;
437 return trampoline_type
;
439 align
= TRAMPOLINE_ALIGNMENT
;
440 size
= TRAMPOLINE_SIZE
;
442 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
443 then allocate extra space so that we can do dynamic alignment. */
444 if (align
> STACK_BOUNDARY
)
446 size
+= ((align
/BITS_PER_UNIT
) - 1) & -(STACK_BOUNDARY
/BITS_PER_UNIT
);
447 align
= STACK_BOUNDARY
;
450 t
= build_index_type (build_int_cst (NULL_TREE
, size
- 1));
451 t
= build_array_type (char_type_node
, t
);
452 t
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
453 FIELD_DECL
, get_identifier ("__data"), t
);
454 DECL_ALIGN (t
) = align
;
455 DECL_USER_ALIGN (t
) = 1;
457 trampoline_type
= make_node (RECORD_TYPE
);
458 TYPE_NAME (trampoline_type
) = get_identifier ("__builtin_trampoline");
459 TYPE_FIELDS (trampoline_type
) = t
;
460 layout_type (trampoline_type
);
461 DECL_CONTEXT (t
) = trampoline_type
;
463 return trampoline_type
;
466 /* Given DECL, a nested function, find or create a field in the non-local
467 frame structure for a trampoline for this function. */
470 lookup_tramp_for_decl (struct nesting_info
*info
, tree decl
,
471 enum insert_option insert
)
475 if (insert
== NO_INSERT
)
477 slot
= pointer_map_contains (info
->var_map
, decl
);
478 return slot
? (tree
) *slot
: NULL_TREE
;
481 slot
= pointer_map_insert (info
->var_map
, decl
);
484 tree field
= make_node (FIELD_DECL
);
485 DECL_NAME (field
) = DECL_NAME (decl
);
486 TREE_TYPE (field
) = get_trampoline_type (info
);
487 TREE_ADDRESSABLE (field
) = 1;
489 insert_field_into_struct (get_frame_type (info
), field
);
492 info
->any_tramp_created
= true;
498 /* Build or return the field within the non-local frame state that holds
499 the non-local goto "jmp_buf". The buffer itself is maintained by the
500 rtl middle-end as dynamic stack space is allocated. */
503 get_nl_goto_field (struct nesting_info
*info
)
505 tree field
= info
->nl_goto_field
;
511 /* For __builtin_nonlocal_goto, we need N words. The first is the
512 frame pointer, the rest is for the target's stack pointer save
513 area. The number of words is controlled by STACK_SAVEAREA_MODE;
514 not the best interface, but it'll do for now. */
515 if (Pmode
== ptr_mode
)
516 type
= ptr_type_node
;
518 type
= lang_hooks
.types
.type_for_mode (Pmode
, 1);
520 size
= GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
));
521 size
= size
/ GET_MODE_SIZE (Pmode
);
524 type
= build_array_type
525 (type
, build_index_type (build_int_cst (NULL_TREE
, size
)));
527 field
= make_node (FIELD_DECL
);
528 DECL_NAME (field
) = get_identifier ("__nl_goto_buf");
529 TREE_TYPE (field
) = type
;
530 DECL_ALIGN (field
) = TYPE_ALIGN (type
);
531 TREE_ADDRESSABLE (field
) = 1;
533 insert_field_into_struct (get_frame_type (info
), field
);
535 info
->nl_goto_field
= field
;
541 /* Invoke CALLBACK on all statements of GIMPLE sequence SEQ. */
544 walk_body (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
545 struct nesting_info
*info
, gimple_seq seq
)
547 struct walk_stmt_info wi
;
549 memset (&wi
, 0, sizeof (wi
));
552 walk_gimple_seq (seq
, callback_stmt
, callback_op
, &wi
);
556 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
559 walk_function (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
560 struct nesting_info
*info
)
562 walk_body (callback_stmt
, callback_op
, info
, gimple_body (info
->context
));
565 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
568 walk_gimple_omp_for (gimple for_stmt
,
569 walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
570 struct nesting_info
*info
)
572 struct walk_stmt_info wi
;
577 walk_body (callback_stmt
, callback_op
, info
, gimple_omp_for_pre_body (for_stmt
));
579 seq
= gimple_seq_alloc ();
580 memset (&wi
, 0, sizeof (wi
));
582 wi
.gsi
= gsi_last (seq
);
584 for (i
= 0; i
< gimple_omp_for_collapse (for_stmt
); i
++)
587 walk_tree (gimple_omp_for_index_ptr (for_stmt
, i
), callback_op
,
591 walk_tree (gimple_omp_for_initial_ptr (for_stmt
, i
), callback_op
,
596 walk_tree (gimple_omp_for_final_ptr (for_stmt
, i
), callback_op
,
599 t
= gimple_omp_for_incr (for_stmt
, i
);
600 gcc_assert (BINARY_CLASS_P (t
));
602 walk_tree (&TREE_OPERAND (t
, 0), callback_op
, &wi
, NULL
);
605 walk_tree (&TREE_OPERAND (t
, 1), callback_op
, &wi
, NULL
);
608 if (gimple_seq_empty_p (seq
))
609 gimple_seq_free (seq
);
612 gimple_seq pre_body
= gimple_omp_for_pre_body (for_stmt
);
613 annotate_all_with_location (seq
, gimple_location (for_stmt
));
614 gimple_seq_add_seq (&pre_body
, seq
);
615 gimple_omp_for_set_pre_body (for_stmt
, pre_body
);
619 /* Similarly for ROOT and all functions nested underneath, depth first. */
622 walk_all_functions (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
623 struct nesting_info
*root
)
628 walk_all_functions (callback_stmt
, callback_op
, root
->inner
);
629 walk_function (callback_stmt
, callback_op
, root
);
636 /* We have to check for a fairly pathological case. The operands of function
637 nested function are to be interpreted in the context of the enclosing
638 function. So if any are variably-sized, they will get remapped when the
639 enclosing function is inlined. But that remapping would also have to be
640 done in the types of the PARM_DECLs of the nested function, meaning the
641 argument types of that function will disagree with the arguments in the
642 calls to that function. So we'd either have to make a copy of the nested
643 function corresponding to each time the enclosing function was inlined or
644 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
645 function. The former is not practical. The latter would still require
646 detecting this case to know when to add the conversions. So, for now at
647 least, we don't inline such an enclosing function.
649 We have to do that check recursively, so here return indicating whether
650 FNDECL has such a nested function. ORIG_FN is the function we were
651 trying to inline to use for checking whether any argument is variably
652 modified by anything in it.
654 It would be better to do this in tree-inline.c so that we could give
655 the appropriate warning for why a function can't be inlined, but that's
656 too late since the nesting structure has already been flattened and
657 adding a flag just to record this fact seems a waste of a flag. */
660 check_for_nested_with_variably_modified (tree fndecl
, tree orig_fndecl
)
662 struct cgraph_node
*cgn
= cgraph_node (fndecl
);
665 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
667 for (arg
= DECL_ARGUMENTS (cgn
->decl
); arg
; arg
= TREE_CHAIN (arg
))
668 if (variably_modified_type_p (TREE_TYPE (arg
), orig_fndecl
))
671 if (check_for_nested_with_variably_modified (cgn
->decl
, orig_fndecl
))
678 /* Construct our local datastructure describing the function nesting
679 tree rooted by CGN. */
681 static struct nesting_info
*
682 create_nesting_tree (struct cgraph_node
*cgn
)
684 struct nesting_info
*info
= XCNEW (struct nesting_info
);
685 info
->field_map
= pointer_map_create ();
686 info
->var_map
= pointer_map_create ();
687 info
->suppress_expansion
= BITMAP_ALLOC (&nesting_info_bitmap_obstack
);
688 info
->context
= cgn
->decl
;
690 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
692 struct nesting_info
*sub
= create_nesting_tree (cgn
);
694 sub
->next
= info
->inner
;
698 /* See discussion at check_for_nested_with_variably_modified for a
699 discussion of why this has to be here. */
700 if (check_for_nested_with_variably_modified (info
->context
, info
->context
))
701 DECL_UNINLINABLE (info
->context
) = true;
706 /* Return an expression computing the static chain for TARGET_CONTEXT
707 from INFO->CONTEXT. Insert any necessary computations before TSI. */
710 get_static_chain (struct nesting_info
*info
, tree target_context
,
711 gimple_stmt_iterator
*gsi
)
713 struct nesting_info
*i
;
716 if (info
->context
== target_context
)
718 x
= build_addr (info
->frame_decl
, target_context
);
722 x
= get_chain_decl (info
);
724 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
726 tree field
= get_chain_field (i
);
728 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
729 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
730 x
= init_tmp_var (info
, x
, gsi
);
738 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
739 frame as seen from INFO->CONTEXT. Insert any necessary computations
743 get_frame_field (struct nesting_info
*info
, tree target_context
,
744 tree field
, gimple_stmt_iterator
*gsi
)
746 struct nesting_info
*i
;
749 if (info
->context
== target_context
)
751 /* Make sure frame_decl gets created. */
752 (void) get_frame_type (info
);
753 x
= info
->frame_decl
;
757 x
= get_chain_decl (info
);
759 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
761 tree field
= get_chain_field (i
);
763 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
764 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
765 x
= init_tmp_var (info
, x
, gsi
);
768 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
771 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
775 static void note_nonlocal_vla_type (struct nesting_info
*info
, tree type
);
777 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
778 in the nested function with DECL_VALUE_EXPR set to reference the true
779 variable in the parent function. This is used both for debug info
780 and in OpenMP lowering. */
783 get_nonlocal_debug_decl (struct nesting_info
*info
, tree decl
)
786 struct nesting_info
*i
;
787 tree x
, field
, new_decl
;
790 slot
= pointer_map_insert (info
->var_map
, decl
);
795 target_context
= decl_function_context (decl
);
797 /* A copy of the code in get_frame_field, but without the temporaries. */
798 if (info
->context
== target_context
)
800 /* Make sure frame_decl gets created. */
801 (void) get_frame_type (info
);
802 x
= info
->frame_decl
;
807 x
= get_chain_decl (info
);
808 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
810 field
= get_chain_field (i
);
811 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
812 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
814 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
817 field
= lookup_field_for_decl (i
, decl
, INSERT
);
818 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
819 if (use_pointer_in_frame (decl
))
820 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
822 /* ??? We should be remapping types as well, surely. */
823 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
824 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
825 DECL_CONTEXT (new_decl
) = info
->context
;
826 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
827 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
828 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
829 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
830 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
831 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
832 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
833 if ((TREE_CODE (decl
) == PARM_DECL
834 || TREE_CODE (decl
) == RESULT_DECL
835 || TREE_CODE (decl
) == VAR_DECL
)
836 && DECL_BY_REFERENCE (decl
))
837 DECL_BY_REFERENCE (new_decl
) = 1;
839 SET_DECL_VALUE_EXPR (new_decl
, x
);
840 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
843 TREE_CHAIN (new_decl
) = info
->debug_var_chain
;
844 info
->debug_var_chain
= new_decl
;
847 && info
->context
!= target_context
848 && variably_modified_type_p (TREE_TYPE (decl
), NULL
))
849 note_nonlocal_vla_type (info
, TREE_TYPE (decl
));
855 /* Callback for walk_gimple_stmt, rewrite all references to VAR
856 and PARM_DECLs that belong to outer functions.
858 The rewrite will involve some number of structure accesses back up
859 the static chain. E.g. for a variable FOO up one nesting level it'll
860 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
861 indirections apply to decls for which use_pointer_in_frame is true. */
864 convert_nonlocal_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
866 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
867 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
871 switch (TREE_CODE (t
))
874 /* Non-automatic variables are never processed. */
875 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
880 if (decl_function_context (t
) != info
->context
)
885 x
= get_nonlocal_debug_decl (info
, t
);
886 if (!bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
888 tree target_context
= decl_function_context (t
);
889 struct nesting_info
*i
;
890 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
892 x
= lookup_field_for_decl (i
, t
, INSERT
);
893 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
894 if (use_pointer_in_frame (t
))
896 x
= init_tmp_var (info
, x
, &wi
->gsi
);
897 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
904 x
= save_tmp_var (info
, x
, &wi
->gsi
);
906 x
= init_tmp_var (info
, x
, &wi
->gsi
);
914 /* We're taking the address of a label from a parent function, but
915 this is not itself a non-local goto. Mark the label such that it
916 will not be deleted, much as we would with a label address in
918 if (decl_function_context (t
) != info
->context
)
919 FORCED_LABEL (t
) = 1;
924 bool save_val_only
= wi
->val_only
;
926 wi
->val_only
= false;
929 walk_tree (&TREE_OPERAND (t
, 0), convert_nonlocal_reference_op
, wi
, 0);
936 /* If we changed anything, we might no longer be directly
937 referencing a decl. */
938 save_context
= current_function_decl
;
939 current_function_decl
= info
->context
;
940 recompute_tree_invariant_for_addr_expr (t
);
941 current_function_decl
= save_context
;
943 /* If the callback converted the address argument in a context
944 where we only accept variables (and min_invariant, presumably),
945 then compute the address into a temporary. */
947 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
957 case ARRAY_RANGE_REF
:
959 /* Go down this entire nest and just look at the final prefix and
960 anything that describes the references. Otherwise, we lose track
961 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
964 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
966 if (TREE_CODE (t
) == COMPONENT_REF
)
967 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
, wi
,
969 else if (TREE_CODE (t
) == ARRAY_REF
970 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
972 walk_tree (&TREE_OPERAND (t
, 1), convert_nonlocal_reference_op
,
974 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
,
976 walk_tree (&TREE_OPERAND (t
, 3), convert_nonlocal_reference_op
,
979 else if (TREE_CODE (t
) == BIT_FIELD_REF
)
981 walk_tree (&TREE_OPERAND (t
, 1), convert_nonlocal_reference_op
,
983 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
,
987 wi
->val_only
= false;
988 walk_tree (tp
, convert_nonlocal_reference_op
, wi
, NULL
);
991 case VIEW_CONVERT_EXPR
:
992 /* Just request to look at the subtrees, leaving val_only and lhs
993 untouched. This might actually be for !val_only + lhs, in which
994 case we don't want to force a replacement by a temporary. */
999 if (!IS_TYPE_OR_DECL_P (t
))
1002 wi
->val_only
= true;
1011 static tree
convert_nonlocal_reference_stmt (gimple_stmt_iterator
*, bool *,
1012 struct walk_stmt_info
*);
1014 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1015 and PARM_DECLs that belong to outer functions. */
1018 convert_nonlocal_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1020 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1021 bool need_chain
= false, need_stmts
= false;
1024 bitmap new_suppress
;
1026 new_suppress
= BITMAP_GGC_ALLOC ();
1027 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1029 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1031 switch (OMP_CLAUSE_CODE (clause
))
1033 case OMP_CLAUSE_REDUCTION
:
1034 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1036 goto do_decl_clause
;
1038 case OMP_CLAUSE_LASTPRIVATE
:
1039 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1041 goto do_decl_clause
;
1043 case OMP_CLAUSE_PRIVATE
:
1044 case OMP_CLAUSE_FIRSTPRIVATE
:
1045 case OMP_CLAUSE_COPYPRIVATE
:
1046 case OMP_CLAUSE_SHARED
:
1048 decl
= OMP_CLAUSE_DECL (clause
);
1049 if (TREE_CODE (decl
) == VAR_DECL
1050 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1052 if (decl_function_context (decl
) != info
->context
)
1054 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1055 OMP_CLAUSE_DECL (clause
) = get_nonlocal_debug_decl (info
, decl
);
1060 case OMP_CLAUSE_SCHEDULE
:
1061 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1065 case OMP_CLAUSE_NUM_THREADS
:
1066 wi
->val_only
= true;
1068 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1072 case OMP_CLAUSE_NOWAIT
:
1073 case OMP_CLAUSE_ORDERED
:
1074 case OMP_CLAUSE_DEFAULT
:
1075 case OMP_CLAUSE_COPYIN
:
1076 case OMP_CLAUSE_COLLAPSE
:
1077 case OMP_CLAUSE_UNTIED
:
1085 info
->suppress_expansion
= new_suppress
;
1088 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1089 switch (OMP_CLAUSE_CODE (clause
))
1091 case OMP_CLAUSE_REDUCTION
:
1092 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1095 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
1096 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1098 walk_body (convert_nonlocal_reference_stmt
,
1099 convert_nonlocal_reference_op
, info
,
1100 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
1101 walk_body (convert_nonlocal_reference_stmt
,
1102 convert_nonlocal_reference_op
, info
,
1103 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
1104 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1109 case OMP_CLAUSE_LASTPRIVATE
:
1110 walk_body (convert_nonlocal_reference_stmt
,
1111 convert_nonlocal_reference_op
, info
,
1112 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
1122 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1125 note_nonlocal_vla_type (struct nesting_info
*info
, tree type
)
1127 while (POINTER_TYPE_P (type
) && !TYPE_NAME (type
))
1128 type
= TREE_TYPE (type
);
1130 if (TYPE_NAME (type
)
1131 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1132 && DECL_ORIGINAL_TYPE (TYPE_NAME (type
)))
1133 type
= DECL_ORIGINAL_TYPE (TYPE_NAME (type
));
1135 while (POINTER_TYPE_P (type
)
1136 || TREE_CODE (type
) == VECTOR_TYPE
1137 || TREE_CODE (type
) == FUNCTION_TYPE
1138 || TREE_CODE (type
) == METHOD_TYPE
)
1139 type
= TREE_TYPE (type
);
1141 if (TREE_CODE (type
) == ARRAY_TYPE
)
1145 note_nonlocal_vla_type (info
, TREE_TYPE (type
));
1146 domain
= TYPE_DOMAIN (type
);
1149 t
= TYPE_MIN_VALUE (domain
);
1150 if (t
&& (TREE_CODE (t
) == VAR_DECL
|| TREE_CODE (t
) == PARM_DECL
)
1151 && decl_function_context (t
) != info
->context
)
1152 get_nonlocal_debug_decl (info
, t
);
1153 t
= TYPE_MAX_VALUE (domain
);
1154 if (t
&& (TREE_CODE (t
) == VAR_DECL
|| TREE_CODE (t
) == PARM_DECL
)
1155 && decl_function_context (t
) != info
->context
)
1156 get_nonlocal_debug_decl (info
, t
);
1161 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1165 note_nonlocal_block_vlas (struct nesting_info
*info
, tree block
)
1169 for (var
= BLOCK_VARS (block
); var
; var
= TREE_CHAIN (var
))
1170 if (TREE_CODE (var
) == VAR_DECL
1171 && variably_modified_type_p (TREE_TYPE (var
), NULL
)
1172 && DECL_HAS_VALUE_EXPR_P (var
)
1173 && decl_function_context (var
) != info
->context
)
1174 note_nonlocal_vla_type (info
, TREE_TYPE (var
));
1177 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1178 PARM_DECLs that belong to outer functions. This handles statements
1179 that are not handled via the standard recursion done in
1180 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1181 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1182 operands of STMT have been handled by this function. */
1185 convert_nonlocal_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1186 struct walk_stmt_info
*wi
)
1188 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
1189 tree save_local_var_chain
;
1190 bitmap save_suppress
;
1191 gimple stmt
= gsi_stmt (*gsi
);
1193 switch (gimple_code (stmt
))
1196 /* Don't walk non-local gotos for now. */
1197 if (TREE_CODE (gimple_goto_dest (stmt
)) != LABEL_DECL
)
1199 wi
->val_only
= true;
1201 *handled_ops_p
= true;
1206 case GIMPLE_OMP_PARALLEL
:
1207 case GIMPLE_OMP_TASK
:
1208 save_suppress
= info
->suppress_expansion
;
1209 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
1213 decl
= get_chain_decl (info
);
1214 c
= build_omp_clause (gimple_location (stmt
),
1215 OMP_CLAUSE_FIRSTPRIVATE
);
1216 OMP_CLAUSE_DECL (c
) = decl
;
1217 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1218 gimple_omp_taskreg_set_clauses (stmt
, c
);
1221 save_local_var_chain
= info
->new_local_var_chain
;
1222 info
->new_local_var_chain
= NULL
;
1224 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1225 info
, gimple_omp_body (stmt
));
1227 if (info
->new_local_var_chain
)
1228 declare_vars (info
->new_local_var_chain
,
1229 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1231 info
->new_local_var_chain
= save_local_var_chain
;
1232 info
->suppress_expansion
= save_suppress
;
1235 case GIMPLE_OMP_FOR
:
1236 save_suppress
= info
->suppress_expansion
;
1237 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
1238 walk_gimple_omp_for (stmt
, convert_nonlocal_reference_stmt
,
1239 convert_nonlocal_reference_op
, info
);
1240 walk_body (convert_nonlocal_reference_stmt
,
1241 convert_nonlocal_reference_op
, info
, gimple_omp_body (stmt
));
1242 info
->suppress_expansion
= save_suppress
;
1245 case GIMPLE_OMP_SECTIONS
:
1246 save_suppress
= info
->suppress_expansion
;
1247 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
1248 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1249 info
, gimple_omp_body (stmt
));
1250 info
->suppress_expansion
= save_suppress
;
1253 case GIMPLE_OMP_SINGLE
:
1254 save_suppress
= info
->suppress_expansion
;
1255 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
1256 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1257 info
, gimple_omp_body (stmt
));
1258 info
->suppress_expansion
= save_suppress
;
1261 case GIMPLE_OMP_SECTION
:
1262 case GIMPLE_OMP_MASTER
:
1263 case GIMPLE_OMP_ORDERED
:
1264 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1265 info
, gimple_omp_body (stmt
));
1269 if (!optimize
&& gimple_bind_block (stmt
))
1270 note_nonlocal_block_vlas (info
, gimple_bind_block (stmt
));
1272 *handled_ops_p
= false;
1276 /* For every other statement that we are not interested in
1277 handling here, let the walker traverse the operands. */
1278 *handled_ops_p
= false;
1282 /* We have handled all of STMT operands, no need to traverse the operands. */
1283 *handled_ops_p
= true;
1288 /* A subroutine of convert_local_reference. Create a local variable
1289 in the parent function with DECL_VALUE_EXPR set to reference the
1290 field in FRAME. This is used both for debug info and in OpenMP
1294 get_local_debug_decl (struct nesting_info
*info
, tree decl
, tree field
)
1299 slot
= pointer_map_insert (info
->var_map
, decl
);
1301 return (tree
) *slot
;
1303 /* Make sure frame_decl gets created. */
1304 (void) get_frame_type (info
);
1305 x
= info
->frame_decl
;
1306 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1308 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
1309 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
1310 DECL_CONTEXT (new_decl
) = info
->context
;
1311 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
1312 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
1313 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
1314 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
1315 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
1316 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
1317 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
1318 if ((TREE_CODE (decl
) == PARM_DECL
1319 || TREE_CODE (decl
) == RESULT_DECL
1320 || TREE_CODE (decl
) == VAR_DECL
)
1321 && DECL_BY_REFERENCE (decl
))
1322 DECL_BY_REFERENCE (new_decl
) = 1;
1324 SET_DECL_VALUE_EXPR (new_decl
, x
);
1325 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
1328 TREE_CHAIN (new_decl
) = info
->debug_var_chain
;
1329 info
->debug_var_chain
= new_decl
;
1331 /* Do not emit debug info twice. */
1332 DECL_IGNORED_P (decl
) = 1;
1338 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1339 and PARM_DECLs that were referenced by inner nested functions.
1340 The rewrite will be a structure reference to the local frame variable. */
1342 static bool convert_local_omp_clauses (tree
*, struct walk_stmt_info
*);
1345 convert_local_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1347 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1348 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1349 tree t
= *tp
, field
, x
;
1353 switch (TREE_CODE (t
))
1356 /* Non-automatic variables are never processed. */
1357 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
1362 if (decl_function_context (t
) == info
->context
)
1364 /* If we copied a pointer to the frame, then the original decl
1365 is used unchanged in the parent function. */
1366 if (use_pointer_in_frame (t
))
1369 /* No need to transform anything if no child references the
1371 field
= lookup_field_for_decl (info
, t
, NO_INSERT
);
1376 x
= get_local_debug_decl (info
, t
, field
);
1377 if (!bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
1378 x
= get_frame_field (info
, info
->context
, field
, &wi
->gsi
);
1383 x
= save_tmp_var (info
, x
, &wi
->gsi
);
1385 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1393 save_val_only
= wi
->val_only
;
1394 wi
->val_only
= false;
1396 wi
->changed
= false;
1397 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
, wi
, NULL
);
1398 wi
->val_only
= save_val_only
;
1400 /* If we converted anything ... */
1405 /* Then the frame decl is now addressable. */
1406 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
1408 save_context
= current_function_decl
;
1409 current_function_decl
= info
->context
;
1410 recompute_tree_invariant_for_addr_expr (t
);
1411 current_function_decl
= save_context
;
1413 /* If we are in a context where we only accept values, then
1414 compute the address into a temporary. */
1416 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
1425 case ARRAY_RANGE_REF
:
1427 /* Go down this entire nest and just look at the final prefix and
1428 anything that describes the references. Otherwise, we lose track
1429 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1430 save_val_only
= wi
->val_only
;
1431 wi
->val_only
= true;
1433 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1435 if (TREE_CODE (t
) == COMPONENT_REF
)
1436 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1438 else if (TREE_CODE (t
) == ARRAY_REF
1439 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1441 walk_tree (&TREE_OPERAND (t
, 1), convert_local_reference_op
, wi
,
1443 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1445 walk_tree (&TREE_OPERAND (t
, 3), convert_local_reference_op
, wi
,
1448 else if (TREE_CODE (t
) == BIT_FIELD_REF
)
1450 walk_tree (&TREE_OPERAND (t
, 1), convert_local_reference_op
, wi
,
1452 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1456 wi
->val_only
= false;
1457 walk_tree (tp
, convert_local_reference_op
, wi
, NULL
);
1458 wi
->val_only
= save_val_only
;
1461 case VIEW_CONVERT_EXPR
:
1462 /* Just request to look at the subtrees, leaving val_only and lhs
1463 untouched. This might actually be for !val_only + lhs, in which
1464 case we don't want to force a replacement by a temporary. */
1469 if (!IS_TYPE_OR_DECL_P (t
))
1472 wi
->val_only
= true;
1481 static tree
convert_local_reference_stmt (gimple_stmt_iterator
*, bool *,
1482 struct walk_stmt_info
*);
1484 /* Helper for convert_local_reference. Convert all the references in
1485 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1488 convert_local_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1490 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1491 bool need_frame
= false, need_stmts
= false;
1494 bitmap new_suppress
;
1496 new_suppress
= BITMAP_GGC_ALLOC ();
1497 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1499 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1501 switch (OMP_CLAUSE_CODE (clause
))
1503 case OMP_CLAUSE_REDUCTION
:
1504 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1506 goto do_decl_clause
;
1508 case OMP_CLAUSE_LASTPRIVATE
:
1509 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1511 goto do_decl_clause
;
1513 case OMP_CLAUSE_PRIVATE
:
1514 case OMP_CLAUSE_FIRSTPRIVATE
:
1515 case OMP_CLAUSE_COPYPRIVATE
:
1516 case OMP_CLAUSE_SHARED
:
1518 decl
= OMP_CLAUSE_DECL (clause
);
1519 if (TREE_CODE (decl
) == VAR_DECL
1520 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1522 if (decl_function_context (decl
) == info
->context
1523 && !use_pointer_in_frame (decl
))
1525 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
1528 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1529 OMP_CLAUSE_DECL (clause
)
1530 = get_local_debug_decl (info
, decl
, field
);
1536 case OMP_CLAUSE_SCHEDULE
:
1537 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1541 case OMP_CLAUSE_NUM_THREADS
:
1542 wi
->val_only
= true;
1544 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0), &dummy
,
1548 case OMP_CLAUSE_NOWAIT
:
1549 case OMP_CLAUSE_ORDERED
:
1550 case OMP_CLAUSE_DEFAULT
:
1551 case OMP_CLAUSE_COPYIN
:
1552 case OMP_CLAUSE_COLLAPSE
:
1553 case OMP_CLAUSE_UNTIED
:
1561 info
->suppress_expansion
= new_suppress
;
1564 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1565 switch (OMP_CLAUSE_CODE (clause
))
1567 case OMP_CLAUSE_REDUCTION
:
1568 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1571 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
1572 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1574 walk_body (convert_local_reference_stmt
,
1575 convert_local_reference_op
, info
,
1576 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
1577 walk_body (convert_local_reference_stmt
,
1578 convert_local_reference_op
, info
,
1579 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
1580 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1585 case OMP_CLAUSE_LASTPRIVATE
:
1586 walk_body (convert_local_reference_stmt
,
1587 convert_local_reference_op
, info
,
1588 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
1599 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1600 and PARM_DECLs that were referenced by inner nested functions.
1601 The rewrite will be a structure reference to the local frame variable. */
1604 convert_local_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1605 struct walk_stmt_info
*wi
)
1607 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
1608 tree save_local_var_chain
;
1609 bitmap save_suppress
;
1610 gimple stmt
= gsi_stmt (*gsi
);
1612 switch (gimple_code (stmt
))
1614 case GIMPLE_OMP_PARALLEL
:
1615 case GIMPLE_OMP_TASK
:
1616 save_suppress
= info
->suppress_expansion
;
1617 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
1621 (void) get_frame_type (info
);
1622 c
= build_omp_clause (gimple_location (stmt
),
1624 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
1625 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1626 gimple_omp_taskreg_set_clauses (stmt
, c
);
1629 save_local_var_chain
= info
->new_local_var_chain
;
1630 info
->new_local_var_chain
= NULL
;
1632 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
1633 gimple_omp_body (stmt
));
1635 if (info
->new_local_var_chain
)
1636 declare_vars (info
->new_local_var_chain
,
1637 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
1638 info
->new_local_var_chain
= save_local_var_chain
;
1639 info
->suppress_expansion
= save_suppress
;
1642 case GIMPLE_OMP_FOR
:
1643 save_suppress
= info
->suppress_expansion
;
1644 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
1645 walk_gimple_omp_for (stmt
, convert_local_reference_stmt
,
1646 convert_local_reference_op
, info
);
1647 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1648 info
, gimple_omp_body (stmt
));
1649 info
->suppress_expansion
= save_suppress
;
1652 case GIMPLE_OMP_SECTIONS
:
1653 save_suppress
= info
->suppress_expansion
;
1654 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
1655 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1656 info
, gimple_omp_body (stmt
));
1657 info
->suppress_expansion
= save_suppress
;
1660 case GIMPLE_OMP_SINGLE
:
1661 save_suppress
= info
->suppress_expansion
;
1662 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
1663 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1664 info
, gimple_omp_body (stmt
));
1665 info
->suppress_expansion
= save_suppress
;
1668 case GIMPLE_OMP_SECTION
:
1669 case GIMPLE_OMP_MASTER
:
1670 case GIMPLE_OMP_ORDERED
:
1671 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1672 info
, gimple_omp_body (stmt
));
1676 /* For every other statement that we are not interested in
1677 handling here, let the walker traverse the operands. */
1678 *handled_ops_p
= false;
1682 /* Indicate that we have handled all the operands ourselves. */
1683 *handled_ops_p
= true;
1688 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
1689 that reference labels from outer functions. The rewrite will be a
1690 call to __builtin_nonlocal_goto. */
1693 convert_nl_goto_reference (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1694 struct walk_stmt_info
*wi
)
1696 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
1697 tree label
, new_label
, target_context
, x
, field
;
1700 gimple stmt
= gsi_stmt (*gsi
);
1702 if (gimple_code (stmt
) != GIMPLE_GOTO
)
1704 *handled_ops_p
= false;
1708 label
= gimple_goto_dest (stmt
);
1709 if (TREE_CODE (label
) != LABEL_DECL
)
1711 *handled_ops_p
= false;
1715 target_context
= decl_function_context (label
);
1716 if (target_context
== info
->context
)
1718 *handled_ops_p
= false;
1722 for (i
= info
->outer
; target_context
!= i
->context
; i
= i
->outer
)
1725 /* The original user label may also be use for a normal goto, therefore
1726 we must create a new label that will actually receive the abnormal
1727 control transfer. This new label will be marked LABEL_NONLOCAL; this
1728 mark will trigger proper behavior in the cfg, as well as cause the
1729 (hairy target-specific) non-local goto receiver code to be generated
1730 when we expand rtl. Enter this association into var_map so that we
1731 can insert the new label into the IL during a second pass. */
1732 slot
= pointer_map_insert (i
->var_map
, label
);
1735 new_label
= create_artificial_label (UNKNOWN_LOCATION
);
1736 DECL_NONLOCAL (new_label
) = 1;
1740 new_label
= (tree
) *slot
;
1742 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
1743 field
= get_nl_goto_field (i
);
1744 x
= get_frame_field (info
, target_context
, field
, &wi
->gsi
);
1745 x
= build_addr (x
, target_context
);
1746 x
= gsi_gimplify_val (info
, x
, &wi
->gsi
);
1747 call
= gimple_build_call (implicit_built_in_decls
[BUILT_IN_NONLOCAL_GOTO
], 2,
1748 build_addr (new_label
, target_context
), x
);
1749 gsi_replace (&wi
->gsi
, call
, false);
1751 /* We have handled all of STMT's operands, no need to keep going. */
1752 *handled_ops_p
= true;
1757 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
1758 are referenced via nonlocal goto from a nested function. The rewrite
1759 will involve installing a newly generated DECL_NONLOCAL label, and
1760 (potentially) a branch around the rtl gunk that is assumed to be
1761 attached to such a label. */
1764 convert_nl_goto_receiver (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1765 struct walk_stmt_info
*wi
)
1767 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1768 tree label
, new_label
;
1769 gimple_stmt_iterator tmp_gsi
;
1771 gimple stmt
= gsi_stmt (*gsi
);
1773 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1775 *handled_ops_p
= false;
1779 label
= gimple_label_label (stmt
);
1781 slot
= pointer_map_contains (info
->var_map
, label
);
1784 *handled_ops_p
= false;
1788 /* If there's any possibility that the previous statement falls through,
1789 then we must branch around the new non-local label. */
1791 gsi_prev (&tmp_gsi
);
1792 if (gsi_end_p (tmp_gsi
) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi
)))
1794 gimple stmt
= gimple_build_goto (label
);
1795 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1798 new_label
= (tree
) *slot
;
1799 stmt
= gimple_build_label (new_label
);
1800 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1802 *handled_ops_p
= true;
1807 /* Called via walk_function+walk_stmt, rewrite all references to addresses
1808 of nested functions that require the use of trampolines. The rewrite
1809 will involve a reference a trampoline generated for the occasion. */
1812 convert_tramp_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1814 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1815 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
1816 tree t
= *tp
, decl
, target_context
, x
, builtin
;
1820 switch (TREE_CODE (t
))
1824 T.1 = &CHAIN->tramp;
1825 T.2 = __builtin_adjust_trampoline (T.1);
1826 T.3 = (func_type)T.2;
1829 decl
= TREE_OPERAND (t
, 0);
1830 if (TREE_CODE (decl
) != FUNCTION_DECL
)
1833 /* Only need to process nested functions. */
1834 target_context
= decl_function_context (decl
);
1835 if (!target_context
)
1838 /* If the nested function doesn't use a static chain, then
1839 it doesn't need a trampoline. */
1840 if (DECL_NO_STATIC_CHAIN (decl
))
1843 /* If we don't want a trampoline, then don't build one. */
1844 if (TREE_NO_TRAMPOLINE (t
))
1847 /* Lookup the immediate parent of the callee, as that's where
1848 we need to insert the trampoline. */
1849 for (i
= info
; i
->context
!= target_context
; i
= i
->outer
)
1851 x
= lookup_tramp_for_decl (i
, decl
, INSERT
);
1853 /* Compute the address of the field holding the trampoline. */
1854 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
1855 x
= build_addr (x
, target_context
);
1856 x
= gsi_gimplify_val (info
, x
, &wi
->gsi
);
1858 /* Do machine-specific ugliness. Normally this will involve
1859 computing extra alignment, but it can really be anything. */
1860 builtin
= implicit_built_in_decls
[BUILT_IN_ADJUST_TRAMPOLINE
];
1861 call
= gimple_build_call (builtin
, 1, x
);
1862 x
= init_tmp_var_with_call (info
, &wi
->gsi
, call
);
1864 /* Cast back to the proper function type. */
1865 x
= build1 (NOP_EXPR
, TREE_TYPE (t
), x
);
1866 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1872 if (!IS_TYPE_OR_DECL_P (t
))
1881 /* Called via walk_function+walk_gimple_stmt, rewrite all references
1882 to addresses of nested functions that require the use of
1883 trampolines. The rewrite will involve a reference a trampoline
1884 generated for the occasion. */
1887 convert_tramp_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1888 struct walk_stmt_info
*wi
)
1890 gimple stmt
= gsi_stmt (*gsi
);
1892 switch (gimple_code (stmt
))
1896 /* Only walk call arguments, lest we generate trampolines for
1898 unsigned long i
, nargs
= gimple_call_num_args (stmt
);
1899 for (i
= 0; i
< nargs
; i
++)
1900 walk_tree (gimple_call_arg_ptr (stmt
, i
), convert_tramp_reference_op
,
1903 *handled_ops_p
= true;
1911 *handled_ops_p
= false;
1917 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
1918 that reference nested functions to make sure that the static chain
1919 is set up properly for the call. */
1922 convert_gimple_call (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1923 struct walk_stmt_info
*wi
)
1925 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1926 tree decl
, target_context
;
1927 char save_static_chain_added
;
1929 gimple stmt
= gsi_stmt (*gsi
);
1931 switch (gimple_code (stmt
))
1934 decl
= gimple_call_fndecl (stmt
);
1937 target_context
= decl_function_context (decl
);
1938 if (target_context
&& !DECL_NO_STATIC_CHAIN (decl
))
1940 gimple_call_set_chain (stmt
, get_static_chain (info
, target_context
,
1942 info
->static_chain_added
|= (1 << (info
->context
!= target_context
));
1946 case GIMPLE_OMP_PARALLEL
:
1947 case GIMPLE_OMP_TASK
:
1948 save_static_chain_added
= info
->static_chain_added
;
1949 info
->static_chain_added
= 0;
1950 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body (stmt
));
1951 for (i
= 0; i
< 2; i
++)
1954 if ((info
->static_chain_added
& (1 << i
)) == 0)
1956 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
1957 /* Don't add CHAIN.* or FRAME.* twice. */
1958 for (c
= gimple_omp_taskreg_clauses (stmt
);
1960 c
= OMP_CLAUSE_CHAIN (c
))
1961 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1962 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
1963 && OMP_CLAUSE_DECL (c
) == decl
)
1967 c
= build_omp_clause (gimple_location (stmt
),
1968 i
? OMP_CLAUSE_FIRSTPRIVATE
1969 : OMP_CLAUSE_SHARED
);
1970 OMP_CLAUSE_DECL (c
) = decl
;
1971 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1972 gimple_omp_taskreg_set_clauses (stmt
, c
);
1975 info
->static_chain_added
|= save_static_chain_added
;
1978 case GIMPLE_OMP_FOR
:
1979 walk_body (convert_gimple_call
, NULL
, info
,
1980 gimple_omp_for_pre_body (stmt
));
1982 case GIMPLE_OMP_SECTIONS
:
1983 case GIMPLE_OMP_SECTION
:
1984 case GIMPLE_OMP_SINGLE
:
1985 case GIMPLE_OMP_MASTER
:
1986 case GIMPLE_OMP_ORDERED
:
1987 case GIMPLE_OMP_CRITICAL
:
1988 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body (stmt
));
1992 /* Keep looking for other operands. */
1993 *handled_ops_p
= false;
1997 *handled_ops_p
= true;
2002 /* Walk the nesting tree starting with ROOT, depth first. Convert all
2003 trampolines and call expressions. On the way back up, determine if
2004 a nested function actually uses its static chain; if not, remember that. */
2007 convert_all_function_calls (struct nesting_info
*root
)
2012 convert_all_function_calls (root
->inner
);
2014 walk_function (convert_tramp_reference_stmt
, convert_tramp_reference_op
,
2016 walk_function (convert_gimple_call
, NULL
, root
);
2018 /* If the function does not use a static chain, then remember that. */
2019 if (root
->outer
&& !root
->chain_decl
&& !root
->chain_field
)
2020 DECL_NO_STATIC_CHAIN (root
->context
) = 1;
2022 gcc_assert (!DECL_NO_STATIC_CHAIN (root
->context
));
2029 struct nesting_copy_body_data
2032 struct nesting_info
*root
;
2035 /* A helper subroutine for debug_var_chain type remapping. */
2038 nesting_copy_decl (tree decl
, copy_body_data
*id
)
2040 struct nesting_copy_body_data
*nid
= (struct nesting_copy_body_data
*) id
;
2041 void **slot
= pointer_map_contains (nid
->root
->var_map
, decl
);
2044 return (tree
) *slot
;
2046 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_ORIGINAL_TYPE (decl
))
2048 tree new_decl
= copy_decl_no_change (decl
, id
);
2049 DECL_ORIGINAL_TYPE (new_decl
)
2050 = remap_type (DECL_ORIGINAL_TYPE (decl
), id
);
2054 if (TREE_CODE (decl
) == VAR_DECL
2055 || TREE_CODE (decl
) == PARM_DECL
2056 || TREE_CODE (decl
) == RESULT_DECL
)
2059 return copy_decl_no_change (decl
, id
);
2062 /* A helper function for remap_vla_decls. See if *TP contains
2063 some remapped variables. */
2066 contains_remapped_vars (tree
*tp
, int *walk_subtrees
, void *data
)
2068 struct nesting_info
*root
= (struct nesting_info
*) data
;
2075 slot
= pointer_map_contains (root
->var_map
, t
);
2078 return (tree
) *slot
;
2083 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2087 remap_vla_decls (tree block
, struct nesting_info
*root
)
2089 tree var
, subblock
, val
, type
;
2090 struct nesting_copy_body_data id
;
2092 for (subblock
= BLOCK_SUBBLOCKS (block
);
2094 subblock
= BLOCK_CHAIN (subblock
))
2095 remap_vla_decls (subblock
, root
);
2097 for (var
= BLOCK_VARS (block
); var
; var
= TREE_CHAIN (var
))
2099 if (TREE_CODE (var
) == VAR_DECL
2100 && variably_modified_type_p (TREE_TYPE (var
), NULL
)
2101 && DECL_HAS_VALUE_EXPR_P (var
))
2103 type
= TREE_TYPE (var
);
2104 val
= DECL_VALUE_EXPR (var
);
2105 if (walk_tree (&type
, contains_remapped_vars
, root
, NULL
) != NULL
2106 || walk_tree (&val
, contains_remapped_vars
, root
, NULL
) != NULL
)
2110 if (var
== NULL_TREE
)
2113 memset (&id
, 0, sizeof (id
));
2114 id
.cb
.copy_decl
= nesting_copy_decl
;
2115 id
.cb
.decl_map
= pointer_map_create ();
2118 for (; var
; var
= TREE_CHAIN (var
))
2119 if (TREE_CODE (var
) == VAR_DECL
2120 && variably_modified_type_p (TREE_TYPE (var
), NULL
)
2121 && DECL_HAS_VALUE_EXPR_P (var
))
2123 struct nesting_info
*i
;
2124 tree newt
, t
, context
;
2126 t
= type
= TREE_TYPE (var
);
2127 val
= DECL_VALUE_EXPR (var
);
2128 if (walk_tree (&type
, contains_remapped_vars
, root
, NULL
) == NULL
2129 && walk_tree (&val
, contains_remapped_vars
, root
, NULL
) == NULL
)
2132 context
= decl_function_context (var
);
2133 for (i
= root
; i
; i
= i
->outer
)
2134 if (i
->context
== context
)
2140 id
.cb
.src_fn
= i
->context
;
2141 id
.cb
.dst_fn
= i
->context
;
2142 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
2144 TREE_TYPE (var
) = newt
= remap_type (type
, &id
.cb
);
2145 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
2147 newt
= TREE_TYPE (newt
);
2150 if (TYPE_NAME (newt
)
2151 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
2152 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
2154 && TYPE_NAME (newt
) == TYPE_NAME (t
))
2155 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
2157 walk_tree (&val
, copy_tree_body_r
, &id
.cb
, NULL
);
2158 if (val
!= DECL_VALUE_EXPR (var
))
2159 SET_DECL_VALUE_EXPR (var
, val
);
2162 pointer_map_destroy (id
.cb
.decl_map
);
2165 /* Do "everything else" to clean up or complete state collected by the
2166 various walking passes -- lay out the types and decls, generate code
2167 to initialize the frame decl, store critical expressions in the
2168 struct function for rtl to find. */
2171 finalize_nesting_tree_1 (struct nesting_info
*root
)
2173 gimple_seq stmt_list
;
2175 tree context
= root
->context
;
2176 struct function
*sf
;
2180 /* If we created a non-local frame type or decl, we need to lay them
2181 out at this time. */
2182 if (root
->frame_type
)
2184 /* In some cases the frame type will trigger the -Wpadded warning.
2185 This is not helpful; suppress it. */
2186 int save_warn_padded
= warn_padded
;
2190 layout_type (root
->frame_type
);
2191 warn_padded
= save_warn_padded
;
2192 layout_decl (root
->frame_decl
, 0);
2194 /* Remove root->frame_decl from root->new_local_var_chain, so
2195 that we can declare it also in the lexical blocks, which
2196 helps ensure virtual regs that end up appearing in its RTL
2197 expression get substituted in instantiate_virtual_regs(). */
2198 for (adjust
= &root
->new_local_var_chain
;
2199 *adjust
!= root
->frame_decl
;
2200 adjust
= &TREE_CHAIN (*adjust
))
2201 gcc_assert (TREE_CHAIN (*adjust
));
2202 *adjust
= TREE_CHAIN (*adjust
);
2204 TREE_CHAIN (root
->frame_decl
) = NULL_TREE
;
2205 declare_vars (root
->frame_decl
,
2206 gimple_seq_first_stmt (gimple_body (context
)), true);
2209 /* If any parameters were referenced non-locally, then we need to
2210 insert a copy. Likewise, if any variables were referenced by
2211 pointer, we need to initialize the address. */
2212 if (root
->any_parm_remapped
)
2215 for (p
= DECL_ARGUMENTS (context
); p
; p
= TREE_CHAIN (p
))
2219 field
= lookup_field_for_decl (root
, p
, NO_INSERT
);
2223 if (use_pointer_in_frame (p
))
2224 x
= build_addr (p
, context
);
2228 y
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
2229 root
->frame_decl
, field
, NULL_TREE
);
2230 stmt
= gimple_build_assign (y
, x
);
2231 gimple_seq_add_stmt (&stmt_list
, stmt
);
2232 /* If the assignment is from a non-register the stmt is
2233 not valid gimple. Make it so by using a temporary instead. */
2234 if (!is_gimple_reg (x
)
2235 && is_gimple_reg_type (TREE_TYPE (x
)))
2237 gimple_stmt_iterator gsi
= gsi_last (stmt_list
);
2238 x
= init_tmp_var (root
, x
, &gsi
);
2239 gimple_assign_set_rhs1 (stmt
, x
);
2244 /* If a chain_field was created, then it needs to be initialized
2246 if (root
->chain_field
)
2248 tree x
= build3 (COMPONENT_REF
, TREE_TYPE (root
->chain_field
),
2249 root
->frame_decl
, root
->chain_field
, NULL_TREE
);
2250 stmt
= gimple_build_assign (x
, get_chain_decl (root
));
2251 gimple_seq_add_stmt (&stmt_list
, stmt
);
2254 /* If trampolines were created, then we need to initialize them. */
2255 if (root
->any_tramp_created
)
2257 struct nesting_info
*i
;
2258 for (i
= root
->inner
; i
; i
= i
->next
)
2260 tree arg1
, arg2
, arg3
, x
, field
;
2262 field
= lookup_tramp_for_decl (root
, i
->context
, NO_INSERT
);
2266 if (DECL_NO_STATIC_CHAIN (i
->context
))
2267 arg3
= null_pointer_node
;
2269 arg3
= build_addr (root
->frame_decl
, context
);
2271 arg2
= build_addr (i
->context
, context
);
2273 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
2274 root
->frame_decl
, field
, NULL_TREE
);
2275 arg1
= build_addr (x
, context
);
2277 x
= implicit_built_in_decls
[BUILT_IN_INIT_TRAMPOLINE
];
2278 stmt
= gimple_build_call (x
, 3, arg1
, arg2
, arg3
);
2279 gimple_seq_add_stmt (&stmt_list
, stmt
);
2283 /* If we created initialization statements, insert them. */
2287 annotate_all_with_location (stmt_list
, DECL_SOURCE_LOCATION (context
));
2288 bind
= gimple_seq_first_stmt (gimple_body (context
));
2289 gimple_seq_add_seq (&stmt_list
, gimple_bind_body (bind
));
2290 gimple_bind_set_body (bind
, stmt_list
);
2293 /* If a chain_decl was created, then it needs to be registered with
2294 struct function so that it gets initialized from the static chain
2295 register at the beginning of the function. */
2296 sf
= DECL_STRUCT_FUNCTION (root
->context
);
2297 sf
->static_chain_decl
= root
->chain_decl
;
2299 /* Similarly for the non-local goto save area. */
2300 if (root
->nl_goto_field
)
2302 sf
->nonlocal_goto_save_area
2303 = get_frame_field (root
, context
, root
->nl_goto_field
, NULL
);
2304 sf
->has_nonlocal_label
= 1;
2307 /* Make sure all new local variables get inserted into the
2308 proper BIND_EXPR. */
2309 if (root
->new_local_var_chain
)
2310 declare_vars (root
->new_local_var_chain
,
2311 gimple_seq_first_stmt (gimple_body (root
->context
)),
2314 if (root
->debug_var_chain
)
2319 remap_vla_decls (DECL_INITIAL (root
->context
), root
);
2321 for (debug_var
= root
->debug_var_chain
; debug_var
;
2322 debug_var
= TREE_CHAIN (debug_var
))
2323 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
2326 /* If there are any debug decls with variable length types,
2327 remap those types using other debug_var_chain variables. */
2330 struct nesting_copy_body_data id
;
2332 memset (&id
, 0, sizeof (id
));
2333 id
.cb
.copy_decl
= nesting_copy_decl
;
2334 id
.cb
.decl_map
= pointer_map_create ();
2337 for (; debug_var
; debug_var
= TREE_CHAIN (debug_var
))
2338 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
2340 tree type
= TREE_TYPE (debug_var
);
2341 tree newt
, t
= type
;
2342 struct nesting_info
*i
;
2344 for (i
= root
; i
; i
= i
->outer
)
2345 if (variably_modified_type_p (type
, i
->context
))
2351 id
.cb
.src_fn
= i
->context
;
2352 id
.cb
.dst_fn
= i
->context
;
2353 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
2355 TREE_TYPE (debug_var
) = newt
= remap_type (type
, &id
.cb
);
2356 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
2358 newt
= TREE_TYPE (newt
);
2361 if (TYPE_NAME (newt
)
2362 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
2363 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
2365 && TYPE_NAME (newt
) == TYPE_NAME (t
))
2366 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
2369 pointer_map_destroy (id
.cb
.decl_map
);
2372 scope
= gimple_seq_first_stmt (gimple_body (root
->context
));
2373 if (gimple_bind_block (scope
))
2374 declare_vars (root
->debug_var_chain
, scope
, true);
2376 BLOCK_VARS (DECL_INITIAL (root
->context
))
2377 = chainon (BLOCK_VARS (DECL_INITIAL (root
->context
)),
2378 root
->debug_var_chain
);
2381 /* Dump the translated tree function. */
2382 dump_function (TDI_nested
, root
->context
);
2386 finalize_nesting_tree (struct nesting_info
*root
)
2391 finalize_nesting_tree (root
->inner
);
2392 finalize_nesting_tree_1 (root
);
2398 /* Unnest the nodes and pass them to cgraph. */
2401 unnest_nesting_tree_1 (struct nesting_info
*root
)
2403 struct cgraph_node
*node
= cgraph_node (root
->context
);
2405 /* For nested functions update the cgraph to reflect unnesting.
2406 We also delay finalizing of these functions up to this point. */
2409 cgraph_unnest_node (cgraph_node (root
->context
));
2410 cgraph_finalize_function (root
->context
, true);
2415 unnest_nesting_tree (struct nesting_info
*root
)
2420 unnest_nesting_tree (root
->inner
);
2421 unnest_nesting_tree_1 (root
);
2427 /* Free the data structures allocated during this pass. */
2430 free_nesting_tree (struct nesting_info
*root
)
2432 struct nesting_info
*next
;
2436 free_nesting_tree (root
->inner
);
2437 pointer_map_destroy (root
->var_map
);
2438 pointer_map_destroy (root
->field_map
);
2446 /* Main entry point for this pass. Process FNDECL and all of its nested
2447 subroutines and turn them into something less tightly bound. */
2450 lower_nested_functions (tree fndecl
)
2452 struct cgraph_node
*cgn
;
2453 struct nesting_info
*root
;
2455 /* If there are no nested functions, there's nothing to do. */
2456 cgn
= cgraph_node (fndecl
);
2460 bitmap_obstack_initialize (&nesting_info_bitmap_obstack
);
2461 root
= create_nesting_tree (cgn
);
2462 walk_all_functions (convert_nonlocal_reference_stmt
,
2463 convert_nonlocal_reference_op
,
2465 walk_all_functions (convert_local_reference_stmt
,
2466 convert_local_reference_op
,
2468 walk_all_functions (convert_nl_goto_reference
, NULL
, root
);
2469 walk_all_functions (convert_nl_goto_receiver
, NULL
, root
);
2470 convert_all_function_calls (root
);
2471 finalize_nesting_tree (root
);
2472 unnest_nesting_tree (root
);
2473 free_nesting_tree (root
);
2474 bitmap_obstack_release (&nesting_info_bitmap_obstack
);
2477 #include "gt-tree-nested.h"