1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "stringpool.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "tree-dump.h"
35 #include "tree-inline.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
46 /* The object of this pass is to lower the representation of a set of nested
47 functions in order to expose all of the gory details of the various
48 nonlocal references. We want to do this sooner rather than later, in
49 order to give us more freedom in emitting all of the functions in question.
51 Back in olden times, when gcc was young, we developed an insanely
52 complicated scheme whereby variables which were referenced nonlocally
53 were forced to live in the stack of the declaring function, and then
54 the nested functions magically discovered where these variables were
55 placed. In order for this scheme to function properly, it required
56 that the outer function be partially expanded, then we switch to
57 compiling the inner function, and once done with those we switch back
58 to compiling the outer function. Such delicate ordering requirements
59 makes it difficult to do whole translation unit optimizations
60 involving such functions.
62 The implementation here is much more direct. Everything that can be
63 referenced by an inner function is a member of an explicitly created
64 structure herein called the "nonlocal frame struct". The incoming
65 static chain for a nested function is a pointer to this struct in
66 the parent. In this way, we settle on known offsets from a known
67 base, and so are decoupled from the logic that places objects in the
68 function's stack frame. More importantly, we don't have to wait for
69 that to happen -- since the compilation of the inner function is no
70 longer tied to a real stack frame, the nonlocal frame struct can be
71 allocated anywhere. Which means that the outer function is now
74 Theory of operation here is very simple. Iterate over all the
75 statements in all the functions (depth first) several times,
76 allocating structures and fields on demand. In general we want to
77 examine inner functions first, so that we can avoid making changes
78 to outer functions which are unnecessary.
80 The order of the passes matters a bit, in that later passes will be
81 skipped if it is discovered that the functions don't actually interact
82 at all. That is, they're nested in the lexical sense but could have
83 been written as independent functions without change. */
88 struct nesting_info
*outer
;
89 struct nesting_info
*inner
;
90 struct nesting_info
*next
;
92 hash_map
<tree
, tree
> *field_map
;
93 hash_map
<tree
, tree
> *var_map
;
94 hash_set
<tree
*> *mem_refs
;
95 bitmap suppress_expansion
;
98 tree new_local_var_chain
;
106 bool any_parm_remapped
;
107 bool any_tramp_created
;
108 bool any_descr_created
;
109 char static_chain_added
;
113 /* Iterate over the nesting tree, starting with ROOT, depth first. */
115 static inline struct nesting_info
*
116 iter_nestinfo_start (struct nesting_info
*root
)
123 static inline struct nesting_info
*
124 iter_nestinfo_next (struct nesting_info
*node
)
127 return iter_nestinfo_start (node
->next
);
131 #define FOR_EACH_NEST_INFO(I, ROOT) \
132 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
134 /* Obstack used for the bitmaps in the struct above. */
135 static struct bitmap_obstack nesting_info_bitmap_obstack
;
138 /* We're working in so many different function contexts simultaneously,
139 that create_tmp_var is dangerous. Prevent mishap. */
140 #define create_tmp_var cant_use_create_tmp_var_here_dummy
142 /* Like create_tmp_var, except record the variable for registration at
143 the given nesting level. */
146 create_tmp_var_for (struct nesting_info
*info
, tree type
, const char *prefix
)
150 /* If the type is of variable size or a type which must be created by the
151 frontend, something is wrong. Note that we explicitly allow
152 incomplete types here, since we create them ourselves here. */
153 gcc_assert (!TREE_ADDRESSABLE (type
));
154 gcc_assert (!TYPE_SIZE_UNIT (type
)
155 || TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
);
157 tmp_var
= create_tmp_var_raw (type
, prefix
);
158 DECL_CONTEXT (tmp_var
) = info
->context
;
159 DECL_CHAIN (tmp_var
) = info
->new_local_var_chain
;
160 DECL_SEEN_IN_BIND_EXPR_P (tmp_var
) = 1;
161 if (TREE_CODE (type
) == COMPLEX_TYPE
162 || TREE_CODE (type
) == VECTOR_TYPE
)
163 DECL_GIMPLE_REG_P (tmp_var
) = 1;
165 info
->new_local_var_chain
= tmp_var
;
170 /* Take the address of EXP to be used within function CONTEXT.
171 Mark it for addressability as necessary. */
174 build_addr (tree exp
)
176 mark_addressable (exp
);
177 return build_fold_addr_expr (exp
);
180 /* Insert FIELD into TYPE, sorted by alignment requirements. */
183 insert_field_into_struct (tree type
, tree field
)
187 DECL_CONTEXT (field
) = type
;
189 for (p
= &TYPE_FIELDS (type
); *p
; p
= &DECL_CHAIN (*p
))
190 if (DECL_ALIGN (field
) >= DECL_ALIGN (*p
))
193 DECL_CHAIN (field
) = *p
;
196 /* Set correct alignment for frame struct type. */
197 if (TYPE_ALIGN (type
) < DECL_ALIGN (field
))
198 SET_TYPE_ALIGN (type
, DECL_ALIGN (field
));
201 /* Build or return the RECORD_TYPE that describes the frame state that is
202 shared between INFO->CONTEXT and its nested functions. This record will
203 not be complete until finalize_nesting_tree; up until that point we'll
204 be adding fields as necessary.
206 We also build the DECL that represents this frame in the function. */
209 get_frame_type (struct nesting_info
*info
)
211 tree type
= info
->frame_type
;
216 type
= make_node (RECORD_TYPE
);
218 name
= concat ("FRAME.",
219 IDENTIFIER_POINTER (DECL_NAME (info
->context
)),
221 TYPE_NAME (type
) = get_identifier (name
);
224 info
->frame_type
= type
;
225 info
->frame_decl
= create_tmp_var_for (info
, type
, "FRAME");
226 DECL_NONLOCAL_FRAME (info
->frame_decl
) = 1;
228 /* ??? Always make it addressable for now, since it is meant to
229 be pointed to by the static chain pointer. This pessimizes
230 when it turns out that no static chains are needed because
231 the nested functions referencing non-local variables are not
232 reachable, but the true pessimization is to create the non-
233 local frame structure in the first place. */
234 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
239 /* Return true if DECL should be referenced by pointer in the non-local
243 use_pointer_in_frame (tree decl
)
245 if (TREE_CODE (decl
) == PARM_DECL
)
247 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
248 sized decls, and inefficient to copy large aggregates. Don't bother
249 moving anything but scalar variables. */
250 return AGGREGATE_TYPE_P (TREE_TYPE (decl
));
254 /* Variable sized types make things "interesting" in the frame. */
255 return DECL_SIZE (decl
) == NULL
|| !TREE_CONSTANT (DECL_SIZE (decl
));
259 /* Given DECL, a non-locally accessed variable, find or create a field
260 in the non-local frame structure for the given nesting context. */
263 lookup_field_for_decl (struct nesting_info
*info
, tree decl
,
264 enum insert_option insert
)
266 if (insert
== NO_INSERT
)
268 tree
*slot
= info
->field_map
->get (decl
);
269 return slot
? *slot
: NULL_TREE
;
272 tree
*slot
= &info
->field_map
->get_or_insert (decl
);
275 tree field
= make_node (FIELD_DECL
);
276 DECL_NAME (field
) = DECL_NAME (decl
);
278 if (use_pointer_in_frame (decl
))
280 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
281 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
282 DECL_NONADDRESSABLE_P (field
) = 1;
286 TREE_TYPE (field
) = TREE_TYPE (decl
);
287 DECL_SOURCE_LOCATION (field
) = DECL_SOURCE_LOCATION (decl
);
288 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
289 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
290 TREE_ADDRESSABLE (field
) = TREE_ADDRESSABLE (decl
);
291 DECL_NONADDRESSABLE_P (field
) = !TREE_ADDRESSABLE (decl
);
292 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
295 insert_field_into_struct (get_frame_type (info
), field
);
298 if (TREE_CODE (decl
) == PARM_DECL
)
299 info
->any_parm_remapped
= true;
305 /* Build or return the variable that holds the static chain within
306 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
309 get_chain_decl (struct nesting_info
*info
)
311 tree decl
= info
->chain_decl
;
317 type
= get_frame_type (info
->outer
);
318 type
= build_pointer_type (type
);
320 /* Note that this variable is *not* entered into any BIND_EXPR;
321 the construction of this variable is handled specially in
322 expand_function_start and initialize_inlined_parameters.
323 Note also that it's represented as a parameter. This is more
324 close to the truth, since the initial value does come from
326 decl
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
327 PARM_DECL
, create_tmp_var_name ("CHAIN"), type
);
328 DECL_ARTIFICIAL (decl
) = 1;
329 DECL_IGNORED_P (decl
) = 1;
330 TREE_USED (decl
) = 1;
331 DECL_CONTEXT (decl
) = info
->context
;
332 DECL_ARG_TYPE (decl
) = type
;
334 /* Tell tree-inline.c that we never write to this variable, so
335 it can copy-prop the replacement value immediately. */
336 TREE_READONLY (decl
) = 1;
338 info
->chain_decl
= decl
;
341 && (dump_flags
& TDF_DETAILS
)
342 && !DECL_STATIC_CHAIN (info
->context
))
343 fprintf (dump_file
, "Setting static-chain for %s\n",
344 lang_hooks
.decl_printable_name (info
->context
, 2));
346 DECL_STATIC_CHAIN (info
->context
) = 1;
351 /* Build or return the field within the non-local frame state that holds
352 the static chain for INFO->CONTEXT. This is the way to walk back up
353 multiple nesting levels. */
356 get_chain_field (struct nesting_info
*info
)
358 tree field
= info
->chain_field
;
362 tree type
= build_pointer_type (get_frame_type (info
->outer
));
364 field
= make_node (FIELD_DECL
);
365 DECL_NAME (field
) = get_identifier ("__chain");
366 TREE_TYPE (field
) = type
;
367 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
368 DECL_NONADDRESSABLE_P (field
) = 1;
370 insert_field_into_struct (get_frame_type (info
), field
);
372 info
->chain_field
= field
;
375 && (dump_flags
& TDF_DETAILS
)
376 && !DECL_STATIC_CHAIN (info
->context
))
377 fprintf (dump_file
, "Setting static-chain for %s\n",
378 lang_hooks
.decl_printable_name (info
->context
, 2));
380 DECL_STATIC_CHAIN (info
->context
) = 1;
385 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
388 init_tmp_var_with_call (struct nesting_info
*info
, gimple_stmt_iterator
*gsi
,
393 t
= create_tmp_var_for (info
, gimple_call_return_type (call
), NULL
);
394 gimple_call_set_lhs (call
, t
);
395 if (! gsi_end_p (*gsi
))
396 gimple_set_location (call
, gimple_location (gsi_stmt (*gsi
)));
397 gsi_insert_before (gsi
, call
, GSI_SAME_STMT
);
403 /* Copy EXP into a temporary. Allocate the temporary in the context of
404 INFO and insert the initialization statement before GSI. */
407 init_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
412 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
413 stmt
= gimple_build_assign (t
, exp
);
414 if (! gsi_end_p (*gsi
))
415 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
416 gsi_insert_before_without_update (gsi
, stmt
, GSI_SAME_STMT
);
422 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
425 gsi_gimplify_val (struct nesting_info
*info
, tree exp
,
426 gimple_stmt_iterator
*gsi
)
428 if (is_gimple_val (exp
))
431 return init_tmp_var (info
, exp
, gsi
);
434 /* Similarly, but copy from the temporary and insert the statement
435 after the iterator. */
438 save_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
443 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
444 stmt
= gimple_build_assign (exp
, t
);
445 if (! gsi_end_p (*gsi
))
446 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
447 gsi_insert_after_without_update (gsi
, stmt
, GSI_SAME_STMT
);
452 /* Build or return the type used to represent a nested function trampoline. */
454 static GTY(()) tree trampoline_type
;
457 get_trampoline_type (struct nesting_info
*info
)
459 unsigned align
, size
;
463 return trampoline_type
;
465 align
= TRAMPOLINE_ALIGNMENT
;
466 size
= TRAMPOLINE_SIZE
;
468 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
469 then allocate extra space so that we can do dynamic alignment. */
470 if (align
> STACK_BOUNDARY
)
472 size
+= ((align
/BITS_PER_UNIT
) - 1) & -(STACK_BOUNDARY
/BITS_PER_UNIT
);
473 align
= STACK_BOUNDARY
;
476 t
= build_index_type (size_int (size
- 1));
477 t
= build_array_type (char_type_node
, t
);
478 t
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
479 FIELD_DECL
, get_identifier ("__data"), t
);
480 SET_DECL_ALIGN (t
, align
);
481 DECL_USER_ALIGN (t
) = 1;
483 trampoline_type
= make_node (RECORD_TYPE
);
484 TYPE_NAME (trampoline_type
) = get_identifier ("__builtin_trampoline");
485 TYPE_FIELDS (trampoline_type
) = t
;
486 layout_type (trampoline_type
);
487 DECL_CONTEXT (t
) = trampoline_type
;
489 return trampoline_type
;
492 /* Build or return the type used to represent a nested function descriptor. */
494 static GTY(()) tree descriptor_type
;
497 get_descriptor_type (struct nesting_info
*info
)
502 return descriptor_type
;
504 t
= build_index_type (integer_one_node
);
505 t
= build_array_type (ptr_type_node
, t
);
506 t
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
507 FIELD_DECL
, get_identifier ("__data"), t
);
509 descriptor_type
= make_node (RECORD_TYPE
);
510 TYPE_NAME (descriptor_type
) = get_identifier ("__builtin_descriptor");
511 TYPE_FIELDS (descriptor_type
) = t
;
512 layout_type (descriptor_type
);
513 DECL_CONTEXT (t
) = descriptor_type
;
515 return descriptor_type
;
518 /* Given DECL, a nested function, find or create an element in the
519 var map for this function. */
522 lookup_element_for_decl (struct nesting_info
*info
, tree decl
,
523 enum insert_option insert
)
525 if (insert
== NO_INSERT
)
527 tree
*slot
= info
->var_map
->get (decl
);
528 return slot
? *slot
: NULL_TREE
;
531 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
533 *slot
= build_tree_list (NULL_TREE
, NULL_TREE
);
538 /* Given DECL, a nested function, create a field in the non-local
539 frame structure for this function. */
542 create_field_for_decl (struct nesting_info
*info
, tree decl
, tree type
)
544 tree field
= make_node (FIELD_DECL
);
545 DECL_NAME (field
) = DECL_NAME (decl
);
546 TREE_TYPE (field
) = type
;
547 TREE_ADDRESSABLE (field
) = 1;
548 insert_field_into_struct (get_frame_type (info
), field
);
552 /* Given DECL, a nested function, find or create a field in the non-local
553 frame structure for a trampoline for this function. */
556 lookup_tramp_for_decl (struct nesting_info
*info
, tree decl
,
557 enum insert_option insert
)
561 elt
= lookup_element_for_decl (info
, decl
, insert
);
565 field
= TREE_PURPOSE (elt
);
567 if (!field
&& insert
== INSERT
)
569 field
= create_field_for_decl (info
, decl
, get_trampoline_type (info
));
570 TREE_PURPOSE (elt
) = field
;
571 info
->any_tramp_created
= true;
577 /* Given DECL, a nested function, find or create a field in the non-local
578 frame structure for a descriptor for this function. */
581 lookup_descr_for_decl (struct nesting_info
*info
, tree decl
,
582 enum insert_option insert
)
586 elt
= lookup_element_for_decl (info
, decl
, insert
);
590 field
= TREE_VALUE (elt
);
592 if (!field
&& insert
== INSERT
)
594 field
= create_field_for_decl (info
, decl
, get_descriptor_type (info
));
595 TREE_VALUE (elt
) = field
;
596 info
->any_descr_created
= true;
602 /* Build or return the field within the non-local frame state that holds
603 the non-local goto "jmp_buf". The buffer itself is maintained by the
604 rtl middle-end as dynamic stack space is allocated. */
607 get_nl_goto_field (struct nesting_info
*info
)
609 tree field
= info
->nl_goto_field
;
615 /* For __builtin_nonlocal_goto, we need N words. The first is the
616 frame pointer, the rest is for the target's stack pointer save
617 area. The number of words is controlled by STACK_SAVEAREA_MODE;
618 not the best interface, but it'll do for now. */
619 if (Pmode
== ptr_mode
)
620 type
= ptr_type_node
;
622 type
= lang_hooks
.types
.type_for_mode (Pmode
, 1);
624 size
= GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
));
625 size
= size
/ GET_MODE_SIZE (Pmode
);
628 type
= build_array_type
629 (type
, build_index_type (size_int (size
)));
631 field
= make_node (FIELD_DECL
);
632 DECL_NAME (field
) = get_identifier ("__nl_goto_buf");
633 TREE_TYPE (field
) = type
;
634 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
635 TREE_ADDRESSABLE (field
) = 1;
637 insert_field_into_struct (get_frame_type (info
), field
);
639 info
->nl_goto_field
= field
;
645 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
648 walk_body (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
649 struct nesting_info
*info
, gimple_seq
*pseq
)
651 struct walk_stmt_info wi
;
653 memset (&wi
, 0, sizeof (wi
));
656 walk_gimple_seq_mod (pseq
, callback_stmt
, callback_op
, &wi
);
660 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
663 walk_function (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
664 struct nesting_info
*info
)
666 gimple_seq body
= gimple_body (info
->context
);
667 walk_body (callback_stmt
, callback_op
, info
, &body
);
668 gimple_set_body (info
->context
, body
);
671 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
674 walk_gimple_omp_for (gomp_for
*for_stmt
,
675 walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
676 struct nesting_info
*info
)
678 struct walk_stmt_info wi
;
683 walk_body (callback_stmt
, callback_op
, info
, gimple_omp_for_pre_body_ptr (for_stmt
));
686 memset (&wi
, 0, sizeof (wi
));
688 wi
.gsi
= gsi_last (seq
);
690 for (i
= 0; i
< gimple_omp_for_collapse (for_stmt
); i
++)
693 walk_tree (gimple_omp_for_index_ptr (for_stmt
, i
), callback_op
,
697 walk_tree (gimple_omp_for_initial_ptr (for_stmt
, i
), callback_op
,
702 walk_tree (gimple_omp_for_final_ptr (for_stmt
, i
), callback_op
,
705 t
= gimple_omp_for_incr (for_stmt
, i
);
706 gcc_assert (BINARY_CLASS_P (t
));
708 walk_tree (&TREE_OPERAND (t
, 0), callback_op
, &wi
, NULL
);
711 walk_tree (&TREE_OPERAND (t
, 1), callback_op
, &wi
, NULL
);
714 seq
= gsi_seq (wi
.gsi
);
715 if (!gimple_seq_empty_p (seq
))
717 gimple_seq pre_body
= gimple_omp_for_pre_body (for_stmt
);
718 annotate_all_with_location (seq
, gimple_location (for_stmt
));
719 gimple_seq_add_seq (&pre_body
, seq
);
720 gimple_omp_for_set_pre_body (for_stmt
, pre_body
);
724 /* Similarly for ROOT and all functions nested underneath, depth first. */
727 walk_all_functions (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
728 struct nesting_info
*root
)
730 struct nesting_info
*n
;
731 FOR_EACH_NEST_INFO (n
, root
)
732 walk_function (callback_stmt
, callback_op
, n
);
736 /* We have to check for a fairly pathological case. The operands of function
737 nested function are to be interpreted in the context of the enclosing
738 function. So if any are variably-sized, they will get remapped when the
739 enclosing function is inlined. But that remapping would also have to be
740 done in the types of the PARM_DECLs of the nested function, meaning the
741 argument types of that function will disagree with the arguments in the
742 calls to that function. So we'd either have to make a copy of the nested
743 function corresponding to each time the enclosing function was inlined or
744 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
745 function. The former is not practical. The latter would still require
746 detecting this case to know when to add the conversions. So, for now at
747 least, we don't inline such an enclosing function.
749 We have to do that check recursively, so here return indicating whether
750 FNDECL has such a nested function. ORIG_FN is the function we were
751 trying to inline to use for checking whether any argument is variably
752 modified by anything in it.
754 It would be better to do this in tree-inline.c so that we could give
755 the appropriate warning for why a function can't be inlined, but that's
756 too late since the nesting structure has already been flattened and
757 adding a flag just to record this fact seems a waste of a flag. */
760 check_for_nested_with_variably_modified (tree fndecl
, tree orig_fndecl
)
762 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
765 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
767 for (arg
= DECL_ARGUMENTS (cgn
->decl
); arg
; arg
= DECL_CHAIN (arg
))
768 if (variably_modified_type_p (TREE_TYPE (arg
), orig_fndecl
))
771 if (check_for_nested_with_variably_modified (cgn
->decl
,
779 /* Construct our local datastructure describing the function nesting
780 tree rooted by CGN. */
782 static struct nesting_info
*
783 create_nesting_tree (struct cgraph_node
*cgn
)
785 struct nesting_info
*info
= XCNEW (struct nesting_info
);
786 info
->field_map
= new hash_map
<tree
, tree
>;
787 info
->var_map
= new hash_map
<tree
, tree
>;
788 info
->mem_refs
= new hash_set
<tree
*>;
789 info
->suppress_expansion
= BITMAP_ALLOC (&nesting_info_bitmap_obstack
);
790 info
->context
= cgn
->decl
;
792 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
794 struct nesting_info
*sub
= create_nesting_tree (cgn
);
796 sub
->next
= info
->inner
;
800 /* See discussion at check_for_nested_with_variably_modified for a
801 discussion of why this has to be here. */
802 if (check_for_nested_with_variably_modified (info
->context
, info
->context
))
803 DECL_UNINLINABLE (info
->context
) = true;
808 /* Return an expression computing the static chain for TARGET_CONTEXT
809 from INFO->CONTEXT. Insert any necessary computations before TSI. */
812 get_static_chain (struct nesting_info
*info
, tree target_context
,
813 gimple_stmt_iterator
*gsi
)
815 struct nesting_info
*i
;
818 if (info
->context
== target_context
)
820 x
= build_addr (info
->frame_decl
);
821 info
->static_chain_added
|= 1;
825 x
= get_chain_decl (info
);
826 info
->static_chain_added
|= 2;
828 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
830 tree field
= get_chain_field (i
);
832 x
= build_simple_mem_ref (x
);
833 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
834 x
= init_tmp_var (info
, x
, gsi
);
842 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
843 frame as seen from INFO->CONTEXT. Insert any necessary computations
847 get_frame_field (struct nesting_info
*info
, tree target_context
,
848 tree field
, gimple_stmt_iterator
*gsi
)
850 struct nesting_info
*i
;
853 if (info
->context
== target_context
)
855 /* Make sure frame_decl gets created. */
856 (void) get_frame_type (info
);
857 x
= info
->frame_decl
;
858 info
->static_chain_added
|= 1;
862 x
= get_chain_decl (info
);
863 info
->static_chain_added
|= 2;
865 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
867 tree field
= get_chain_field (i
);
869 x
= build_simple_mem_ref (x
);
870 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
871 x
= init_tmp_var (info
, x
, gsi
);
874 x
= build_simple_mem_ref (x
);
877 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
881 static void note_nonlocal_vla_type (struct nesting_info
*info
, tree type
);
883 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
884 in the nested function with DECL_VALUE_EXPR set to reference the true
885 variable in the parent function. This is used both for debug info
886 and in OMP lowering. */
889 get_nonlocal_debug_decl (struct nesting_info
*info
, tree decl
)
892 struct nesting_info
*i
;
893 tree x
, field
, new_decl
;
895 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
900 target_context
= decl_function_context (decl
);
902 /* A copy of the code in get_frame_field, but without the temporaries. */
903 if (info
->context
== target_context
)
905 /* Make sure frame_decl gets created. */
906 (void) get_frame_type (info
);
907 x
= info
->frame_decl
;
909 info
->static_chain_added
|= 1;
913 x
= get_chain_decl (info
);
914 info
->static_chain_added
|= 2;
915 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
917 field
= get_chain_field (i
);
918 x
= build_simple_mem_ref (x
);
919 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
921 x
= build_simple_mem_ref (x
);
924 field
= lookup_field_for_decl (i
, decl
, INSERT
);
925 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
926 if (use_pointer_in_frame (decl
))
927 x
= build_simple_mem_ref (x
);
929 /* ??? We should be remapping types as well, surely. */
930 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
931 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
932 DECL_CONTEXT (new_decl
) = info
->context
;
933 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
934 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
935 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
936 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
937 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
938 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
939 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
940 if ((TREE_CODE (decl
) == PARM_DECL
941 || TREE_CODE (decl
) == RESULT_DECL
943 && DECL_BY_REFERENCE (decl
))
944 DECL_BY_REFERENCE (new_decl
) = 1;
946 SET_DECL_VALUE_EXPR (new_decl
, x
);
947 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
950 DECL_CHAIN (new_decl
) = info
->debug_var_chain
;
951 info
->debug_var_chain
= new_decl
;
954 && info
->context
!= target_context
955 && variably_modified_type_p (TREE_TYPE (decl
), NULL
))
956 note_nonlocal_vla_type (info
, TREE_TYPE (decl
));
962 /* Callback for walk_gimple_stmt, rewrite all references to VAR
963 and PARM_DECLs that belong to outer functions.
965 The rewrite will involve some number of structure accesses back up
966 the static chain. E.g. for a variable FOO up one nesting level it'll
967 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
968 indirections apply to decls for which use_pointer_in_frame is true. */
971 convert_nonlocal_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
973 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
974 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
978 switch (TREE_CODE (t
))
981 /* Non-automatic variables are never processed. */
982 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
987 if (decl_function_context (t
) != info
->context
)
992 x
= get_nonlocal_debug_decl (info
, t
);
993 if (!bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
995 tree target_context
= decl_function_context (t
);
996 struct nesting_info
*i
;
997 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
999 x
= lookup_field_for_decl (i
, t
, INSERT
);
1000 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
1001 if (use_pointer_in_frame (t
))
1003 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1004 x
= build_simple_mem_ref (x
);
1011 x
= save_tmp_var (info
, x
, &wi
->gsi
);
1013 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1021 /* We're taking the address of a label from a parent function, but
1022 this is not itself a non-local goto. Mark the label such that it
1023 will not be deleted, much as we would with a label address in
1025 if (decl_function_context (t
) != info
->context
)
1026 FORCED_LABEL (t
) = 1;
1031 bool save_val_only
= wi
->val_only
;
1033 wi
->val_only
= false;
1035 wi
->changed
= false;
1036 walk_tree (&TREE_OPERAND (t
, 0), convert_nonlocal_reference_op
, wi
, 0);
1037 wi
->val_only
= true;
1043 /* If we changed anything, we might no longer be directly
1044 referencing a decl. */
1045 save_context
= current_function_decl
;
1046 current_function_decl
= info
->context
;
1047 recompute_tree_invariant_for_addr_expr (t
);
1048 current_function_decl
= save_context
;
1050 /* If the callback converted the address argument in a context
1051 where we only accept variables (and min_invariant, presumably),
1052 then compute the address into a temporary. */
1054 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
1064 case ARRAY_RANGE_REF
:
1066 /* Go down this entire nest and just look at the final prefix and
1067 anything that describes the references. Otherwise, we lose track
1068 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1069 wi
->val_only
= true;
1071 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1073 if (TREE_CODE (t
) == COMPONENT_REF
)
1074 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
, wi
,
1076 else if (TREE_CODE (t
) == ARRAY_REF
1077 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1079 walk_tree (&TREE_OPERAND (t
, 1), convert_nonlocal_reference_op
,
1081 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
,
1083 walk_tree (&TREE_OPERAND (t
, 3), convert_nonlocal_reference_op
,
1087 wi
->val_only
= false;
1088 walk_tree (tp
, convert_nonlocal_reference_op
, wi
, NULL
);
1091 case VIEW_CONVERT_EXPR
:
1092 /* Just request to look at the subtrees, leaving val_only and lhs
1093 untouched. This might actually be for !val_only + lhs, in which
1094 case we don't want to force a replacement by a temporary. */
1099 if (!IS_TYPE_OR_DECL_P (t
))
1102 wi
->val_only
= true;
1111 static tree
convert_nonlocal_reference_stmt (gimple_stmt_iterator
*, bool *,
1112 struct walk_stmt_info
*);
1114 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1115 and PARM_DECLs that belong to outer functions. */
1118 convert_nonlocal_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1120 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1121 bool need_chain
= false, need_stmts
= false;
1124 bitmap new_suppress
;
1126 new_suppress
= BITMAP_GGC_ALLOC ();
1127 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1129 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1131 switch (OMP_CLAUSE_CODE (clause
))
1133 case OMP_CLAUSE_REDUCTION
:
1134 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1136 goto do_decl_clause
;
1138 case OMP_CLAUSE_LASTPRIVATE
:
1139 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1141 goto do_decl_clause
;
1143 case OMP_CLAUSE_LINEAR
:
1144 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
))
1146 wi
->val_only
= true;
1148 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause
),
1150 goto do_decl_clause
;
1152 case OMP_CLAUSE_PRIVATE
:
1153 case OMP_CLAUSE_FIRSTPRIVATE
:
1154 case OMP_CLAUSE_COPYPRIVATE
:
1155 case OMP_CLAUSE_SHARED
:
1156 case OMP_CLAUSE_TO_DECLARE
:
1157 case OMP_CLAUSE_LINK
:
1158 case OMP_CLAUSE_USE_DEVICE_PTR
:
1159 case OMP_CLAUSE_IS_DEVICE_PTR
:
1161 decl
= OMP_CLAUSE_DECL (clause
);
1163 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1165 if (decl_function_context (decl
) != info
->context
)
1167 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_SHARED
)
1168 OMP_CLAUSE_SHARED_READONLY (clause
) = 0;
1169 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1170 OMP_CLAUSE_DECL (clause
) = get_nonlocal_debug_decl (info
, decl
);
1171 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_PRIVATE
)
1176 case OMP_CLAUSE_SCHEDULE
:
1177 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1180 case OMP_CLAUSE_FINAL
:
1182 case OMP_CLAUSE_NUM_THREADS
:
1183 case OMP_CLAUSE_DEPEND
:
1184 case OMP_CLAUSE_DEVICE
:
1185 case OMP_CLAUSE_NUM_TEAMS
:
1186 case OMP_CLAUSE_THREAD_LIMIT
:
1187 case OMP_CLAUSE_SAFELEN
:
1188 case OMP_CLAUSE_SIMDLEN
:
1189 case OMP_CLAUSE_PRIORITY
:
1190 case OMP_CLAUSE_GRAINSIZE
:
1191 case OMP_CLAUSE_NUM_TASKS
:
1192 case OMP_CLAUSE_HINT
:
1193 case OMP_CLAUSE__CILK_FOR_COUNT_
:
1194 case OMP_CLAUSE_NUM_GANGS
:
1195 case OMP_CLAUSE_NUM_WORKERS
:
1196 case OMP_CLAUSE_VECTOR_LENGTH
:
1197 case OMP_CLAUSE_GANG
:
1198 case OMP_CLAUSE_WORKER
:
1199 case OMP_CLAUSE_VECTOR
:
1200 case OMP_CLAUSE_ASYNC
:
1201 case OMP_CLAUSE_WAIT
:
1202 /* Several OpenACC clauses have optional arguments. Check if they
1204 if (OMP_CLAUSE_OPERAND (clause
, 0))
1206 wi
->val_only
= true;
1208 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1212 /* The gang clause accepts two arguments. */
1213 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_GANG
1214 && OMP_CLAUSE_GANG_STATIC_EXPR (clause
))
1216 wi
->val_only
= true;
1218 convert_nonlocal_reference_op
1219 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause
), &dummy
, wi
);
1223 case OMP_CLAUSE_DIST_SCHEDULE
:
1224 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause
) != NULL
)
1226 wi
->val_only
= true;
1228 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1233 case OMP_CLAUSE_MAP
:
1235 case OMP_CLAUSE_FROM
:
1236 if (OMP_CLAUSE_SIZE (clause
))
1238 wi
->val_only
= true;
1240 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause
),
1243 if (DECL_P (OMP_CLAUSE_DECL (clause
)))
1244 goto do_decl_clause
;
1245 wi
->val_only
= true;
1247 walk_tree (&OMP_CLAUSE_DECL (clause
), convert_nonlocal_reference_op
,
1251 case OMP_CLAUSE_ALIGNED
:
1252 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
1254 wi
->val_only
= true;
1256 convert_nonlocal_reference_op
1257 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
), &dummy
, wi
);
1259 /* Like do_decl_clause, but don't add any suppression. */
1260 decl
= OMP_CLAUSE_DECL (clause
);
1262 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1264 if (decl_function_context (decl
) != info
->context
)
1266 OMP_CLAUSE_DECL (clause
) = get_nonlocal_debug_decl (info
, decl
);
1267 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_PRIVATE
)
1272 case OMP_CLAUSE_NOWAIT
:
1273 case OMP_CLAUSE_ORDERED
:
1274 case OMP_CLAUSE_DEFAULT
:
1275 case OMP_CLAUSE_COPYIN
:
1276 case OMP_CLAUSE_COLLAPSE
:
1277 case OMP_CLAUSE_UNTIED
:
1278 case OMP_CLAUSE_MERGEABLE
:
1279 case OMP_CLAUSE_PROC_BIND
:
1280 case OMP_CLAUSE_NOGROUP
:
1281 case OMP_CLAUSE_THREADS
:
1282 case OMP_CLAUSE_SIMD
:
1283 case OMP_CLAUSE_DEFAULTMAP
:
1284 case OMP_CLAUSE_SEQ
:
1285 case OMP_CLAUSE_INDEPENDENT
:
1286 case OMP_CLAUSE_AUTO
:
1289 /* OpenACC tile clauses are discarded during gimplification. */
1290 case OMP_CLAUSE_TILE
:
1291 /* The following clause belongs to the OpenACC cache directive, which
1292 is discarded during gimplification. */
1293 case OMP_CLAUSE__CACHE_
:
1294 /* The following clauses are only allowed in the OpenMP declare simd
1295 directive, so not seen here. */
1296 case OMP_CLAUSE_UNIFORM
:
1297 case OMP_CLAUSE_INBRANCH
:
1298 case OMP_CLAUSE_NOTINBRANCH
:
1299 /* The following clauses are only allowed on OpenMP cancel and
1300 cancellation point directives, which at this point have already
1301 been lowered into a function call. */
1302 case OMP_CLAUSE_FOR
:
1303 case OMP_CLAUSE_PARALLEL
:
1304 case OMP_CLAUSE_SECTIONS
:
1305 case OMP_CLAUSE_TASKGROUP
:
1306 /* The following clauses are only added during OMP lowering; nested
1307 function decomposition happens before that. */
1308 case OMP_CLAUSE__LOOPTEMP_
:
1309 case OMP_CLAUSE__SIMDUID_
:
1310 case OMP_CLAUSE__GRIDDIM_
:
1311 /* Anything else. */
1317 info
->suppress_expansion
= new_suppress
;
1320 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1321 switch (OMP_CLAUSE_CODE (clause
))
1323 case OMP_CLAUSE_REDUCTION
:
1324 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1327 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
1328 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1330 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1331 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1333 walk_body (convert_nonlocal_reference_stmt
,
1334 convert_nonlocal_reference_op
, info
,
1335 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
1336 walk_body (convert_nonlocal_reference_stmt
,
1337 convert_nonlocal_reference_op
, info
,
1338 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
1339 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1341 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1342 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1347 case OMP_CLAUSE_LASTPRIVATE
:
1348 walk_body (convert_nonlocal_reference_stmt
,
1349 convert_nonlocal_reference_op
, info
,
1350 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
1353 case OMP_CLAUSE_LINEAR
:
1354 walk_body (convert_nonlocal_reference_stmt
,
1355 convert_nonlocal_reference_op
, info
,
1356 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
));
1366 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1369 note_nonlocal_vla_type (struct nesting_info
*info
, tree type
)
1371 while (POINTER_TYPE_P (type
) && !TYPE_NAME (type
))
1372 type
= TREE_TYPE (type
);
1374 if (TYPE_NAME (type
)
1375 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1376 && DECL_ORIGINAL_TYPE (TYPE_NAME (type
)))
1377 type
= DECL_ORIGINAL_TYPE (TYPE_NAME (type
));
1379 while (POINTER_TYPE_P (type
)
1380 || TREE_CODE (type
) == VECTOR_TYPE
1381 || TREE_CODE (type
) == FUNCTION_TYPE
1382 || TREE_CODE (type
) == METHOD_TYPE
)
1383 type
= TREE_TYPE (type
);
1385 if (TREE_CODE (type
) == ARRAY_TYPE
)
1389 note_nonlocal_vla_type (info
, TREE_TYPE (type
));
1390 domain
= TYPE_DOMAIN (type
);
1393 t
= TYPE_MIN_VALUE (domain
);
1394 if (t
&& (VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
)
1395 && decl_function_context (t
) != info
->context
)
1396 get_nonlocal_debug_decl (info
, t
);
1397 t
= TYPE_MAX_VALUE (domain
);
1398 if (t
&& (VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
)
1399 && decl_function_context (t
) != info
->context
)
1400 get_nonlocal_debug_decl (info
, t
);
1405 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1409 note_nonlocal_block_vlas (struct nesting_info
*info
, tree block
)
1413 for (var
= BLOCK_VARS (block
); var
; var
= DECL_CHAIN (var
))
1415 && variably_modified_type_p (TREE_TYPE (var
), NULL
)
1416 && DECL_HAS_VALUE_EXPR_P (var
)
1417 && decl_function_context (var
) != info
->context
)
1418 note_nonlocal_vla_type (info
, TREE_TYPE (var
));
1421 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1422 PARM_DECLs that belong to outer functions. This handles statements
1423 that are not handled via the standard recursion done in
1424 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1425 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1426 operands of STMT have been handled by this function. */
1429 convert_nonlocal_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1430 struct walk_stmt_info
*wi
)
1432 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
1433 tree save_local_var_chain
;
1434 bitmap save_suppress
;
1435 gimple
*stmt
= gsi_stmt (*gsi
);
1437 switch (gimple_code (stmt
))
1440 /* Don't walk non-local gotos for now. */
1441 if (TREE_CODE (gimple_goto_dest (stmt
)) != LABEL_DECL
)
1443 wi
->val_only
= true;
1445 *handled_ops_p
= false;
1450 case GIMPLE_OMP_PARALLEL
:
1451 case GIMPLE_OMP_TASK
:
1452 save_suppress
= info
->suppress_expansion
;
1453 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
1457 decl
= get_chain_decl (info
);
1458 c
= build_omp_clause (gimple_location (stmt
),
1459 OMP_CLAUSE_FIRSTPRIVATE
);
1460 OMP_CLAUSE_DECL (c
) = decl
;
1461 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1462 gimple_omp_taskreg_set_clauses (stmt
, c
);
1465 save_local_var_chain
= info
->new_local_var_chain
;
1466 info
->new_local_var_chain
= NULL
;
1468 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1469 info
, gimple_omp_body_ptr (stmt
));
1471 if (info
->new_local_var_chain
)
1472 declare_vars (info
->new_local_var_chain
,
1473 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1475 info
->new_local_var_chain
= save_local_var_chain
;
1476 info
->suppress_expansion
= save_suppress
;
1479 case GIMPLE_OMP_FOR
:
1480 save_suppress
= info
->suppress_expansion
;
1481 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
1482 walk_gimple_omp_for (as_a
<gomp_for
*> (stmt
),
1483 convert_nonlocal_reference_stmt
,
1484 convert_nonlocal_reference_op
, info
);
1485 walk_body (convert_nonlocal_reference_stmt
,
1486 convert_nonlocal_reference_op
, info
, gimple_omp_body_ptr (stmt
));
1487 info
->suppress_expansion
= save_suppress
;
1490 case GIMPLE_OMP_SECTIONS
:
1491 save_suppress
= info
->suppress_expansion
;
1492 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
1493 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1494 info
, gimple_omp_body_ptr (stmt
));
1495 info
->suppress_expansion
= save_suppress
;
1498 case GIMPLE_OMP_SINGLE
:
1499 save_suppress
= info
->suppress_expansion
;
1500 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
1501 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1502 info
, gimple_omp_body_ptr (stmt
));
1503 info
->suppress_expansion
= save_suppress
;
1506 case GIMPLE_OMP_TARGET
:
1507 if (!is_gimple_omp_offloaded (stmt
))
1509 save_suppress
= info
->suppress_expansion
;
1510 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt
),
1512 info
->suppress_expansion
= save_suppress
;
1513 walk_body (convert_nonlocal_reference_stmt
,
1514 convert_nonlocal_reference_op
, info
,
1515 gimple_omp_body_ptr (stmt
));
1518 save_suppress
= info
->suppress_expansion
;
1519 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt
),
1523 decl
= get_chain_decl (info
);
1524 c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
1525 OMP_CLAUSE_DECL (c
) = decl
;
1526 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
1527 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
1528 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
1529 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
1532 save_local_var_chain
= info
->new_local_var_chain
;
1533 info
->new_local_var_chain
= NULL
;
1535 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1536 info
, gimple_omp_body_ptr (stmt
));
1538 if (info
->new_local_var_chain
)
1539 declare_vars (info
->new_local_var_chain
,
1540 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1542 info
->new_local_var_chain
= save_local_var_chain
;
1543 info
->suppress_expansion
= save_suppress
;
1546 case GIMPLE_OMP_TEAMS
:
1547 save_suppress
= info
->suppress_expansion
;
1548 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt
), wi
);
1549 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1550 info
, gimple_omp_body_ptr (stmt
));
1551 info
->suppress_expansion
= save_suppress
;
1554 case GIMPLE_OMP_SECTION
:
1555 case GIMPLE_OMP_MASTER
:
1556 case GIMPLE_OMP_TASKGROUP
:
1557 case GIMPLE_OMP_ORDERED
:
1558 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1559 info
, gimple_omp_body_ptr (stmt
));
1564 gbind
*bind_stmt
= as_a
<gbind
*> (stmt
);
1565 if (!optimize
&& gimple_bind_block (bind_stmt
))
1566 note_nonlocal_block_vlas (info
, gimple_bind_block (bind_stmt
));
1568 for (tree var
= gimple_bind_vars (bind_stmt
); var
; var
= DECL_CHAIN (var
))
1569 if (TREE_CODE (var
) == NAMELIST_DECL
)
1571 /* Adjust decls mentioned in NAMELIST_DECL. */
1572 tree decls
= NAMELIST_DECL_ASSOCIATED_DECL (var
);
1576 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls
), i
, decl
)
1579 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1581 if (decl_function_context (decl
) != info
->context
)
1582 CONSTRUCTOR_ELT (decls
, i
)->value
1583 = get_nonlocal_debug_decl (info
, decl
);
1587 *handled_ops_p
= false;
1591 wi
->val_only
= true;
1593 *handled_ops_p
= false;
1597 /* For every other statement that we are not interested in
1598 handling here, let the walker traverse the operands. */
1599 *handled_ops_p
= false;
1603 /* We have handled all of STMT operands, no need to traverse the operands. */
1604 *handled_ops_p
= true;
1609 /* A subroutine of convert_local_reference. Create a local variable
1610 in the parent function with DECL_VALUE_EXPR set to reference the
1611 field in FRAME. This is used both for debug info and in OMP
1615 get_local_debug_decl (struct nesting_info
*info
, tree decl
, tree field
)
1619 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
1623 /* Make sure frame_decl gets created. */
1624 (void) get_frame_type (info
);
1625 x
= info
->frame_decl
;
1626 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1628 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
1629 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
1630 DECL_CONTEXT (new_decl
) = info
->context
;
1631 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
1632 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
1633 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
1634 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
1635 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
1636 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
1637 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
1638 if ((TREE_CODE (decl
) == PARM_DECL
1639 || TREE_CODE (decl
) == RESULT_DECL
1641 && DECL_BY_REFERENCE (decl
))
1642 DECL_BY_REFERENCE (new_decl
) = 1;
1644 SET_DECL_VALUE_EXPR (new_decl
, x
);
1645 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
1648 DECL_CHAIN (new_decl
) = info
->debug_var_chain
;
1649 info
->debug_var_chain
= new_decl
;
1651 /* Do not emit debug info twice. */
1652 DECL_IGNORED_P (decl
) = 1;
1658 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1659 and PARM_DECLs that were referenced by inner nested functions.
1660 The rewrite will be a structure reference to the local frame variable. */
1662 static bool convert_local_omp_clauses (tree
*, struct walk_stmt_info
*);
1665 convert_local_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1667 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1668 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1669 tree t
= *tp
, field
, x
;
1673 switch (TREE_CODE (t
))
1676 /* Non-automatic variables are never processed. */
1677 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
1682 if (decl_function_context (t
) == info
->context
)
1684 /* If we copied a pointer to the frame, then the original decl
1685 is used unchanged in the parent function. */
1686 if (use_pointer_in_frame (t
))
1689 /* No need to transform anything if no child references the
1691 field
= lookup_field_for_decl (info
, t
, NO_INSERT
);
1696 x
= get_local_debug_decl (info
, t
, field
);
1697 if (!bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
1698 x
= get_frame_field (info
, info
->context
, field
, &wi
->gsi
);
1703 x
= save_tmp_var (info
, x
, &wi
->gsi
);
1705 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1713 save_val_only
= wi
->val_only
;
1714 wi
->val_only
= false;
1716 wi
->changed
= false;
1717 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
, wi
, NULL
);
1718 wi
->val_only
= save_val_only
;
1720 /* If we converted anything ... */
1725 /* Then the frame decl is now addressable. */
1726 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
1728 save_context
= current_function_decl
;
1729 current_function_decl
= info
->context
;
1730 recompute_tree_invariant_for_addr_expr (t
);
1731 current_function_decl
= save_context
;
1733 /* If we are in a context where we only accept values, then
1734 compute the address into a temporary. */
1736 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
1745 case ARRAY_RANGE_REF
:
1747 /* Go down this entire nest and just look at the final prefix and
1748 anything that describes the references. Otherwise, we lose track
1749 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1750 save_val_only
= wi
->val_only
;
1751 wi
->val_only
= true;
1753 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1755 if (TREE_CODE (t
) == COMPONENT_REF
)
1756 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1758 else if (TREE_CODE (t
) == ARRAY_REF
1759 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1761 walk_tree (&TREE_OPERAND (t
, 1), convert_local_reference_op
, wi
,
1763 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1765 walk_tree (&TREE_OPERAND (t
, 3), convert_local_reference_op
, wi
,
1769 wi
->val_only
= false;
1770 walk_tree (tp
, convert_local_reference_op
, wi
, NULL
);
1771 wi
->val_only
= save_val_only
;
1775 save_val_only
= wi
->val_only
;
1776 wi
->val_only
= true;
1778 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
,
1780 /* We need to re-fold the MEM_REF as component references as
1781 part of a ADDR_EXPR address are not allowed. But we cannot
1782 fold here, as the chain record type is not yet finalized. */
1783 if (TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
1784 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)))
1785 info
->mem_refs
->add (tp
);
1786 wi
->val_only
= save_val_only
;
1789 case VIEW_CONVERT_EXPR
:
1790 /* Just request to look at the subtrees, leaving val_only and lhs
1791 untouched. This might actually be for !val_only + lhs, in which
1792 case we don't want to force a replacement by a temporary. */
1797 if (!IS_TYPE_OR_DECL_P (t
))
1800 wi
->val_only
= true;
1809 static tree
convert_local_reference_stmt (gimple_stmt_iterator
*, bool *,
1810 struct walk_stmt_info
*);
1812 /* Helper for convert_local_reference. Convert all the references in
1813 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1816 convert_local_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1818 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1819 bool need_frame
= false, need_stmts
= false;
1822 bitmap new_suppress
;
1824 new_suppress
= BITMAP_GGC_ALLOC ();
1825 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1827 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1829 switch (OMP_CLAUSE_CODE (clause
))
1831 case OMP_CLAUSE_REDUCTION
:
1832 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1834 goto do_decl_clause
;
1836 case OMP_CLAUSE_LASTPRIVATE
:
1837 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1839 goto do_decl_clause
;
1841 case OMP_CLAUSE_LINEAR
:
1842 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
))
1844 wi
->val_only
= true;
1846 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause
), &dummy
,
1848 goto do_decl_clause
;
1850 case OMP_CLAUSE_PRIVATE
:
1851 case OMP_CLAUSE_FIRSTPRIVATE
:
1852 case OMP_CLAUSE_COPYPRIVATE
:
1853 case OMP_CLAUSE_SHARED
:
1854 case OMP_CLAUSE_TO_DECLARE
:
1855 case OMP_CLAUSE_LINK
:
1856 case OMP_CLAUSE_USE_DEVICE_PTR
:
1857 case OMP_CLAUSE_IS_DEVICE_PTR
:
1859 decl
= OMP_CLAUSE_DECL (clause
);
1861 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1863 if (decl_function_context (decl
) == info
->context
1864 && !use_pointer_in_frame (decl
))
1866 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
1869 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_SHARED
)
1870 OMP_CLAUSE_SHARED_READONLY (clause
) = 0;
1871 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1872 OMP_CLAUSE_DECL (clause
)
1873 = get_local_debug_decl (info
, decl
, field
);
1879 case OMP_CLAUSE_SCHEDULE
:
1880 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1883 case OMP_CLAUSE_FINAL
:
1885 case OMP_CLAUSE_NUM_THREADS
:
1886 case OMP_CLAUSE_DEPEND
:
1887 case OMP_CLAUSE_DEVICE
:
1888 case OMP_CLAUSE_NUM_TEAMS
:
1889 case OMP_CLAUSE_THREAD_LIMIT
:
1890 case OMP_CLAUSE_SAFELEN
:
1891 case OMP_CLAUSE_SIMDLEN
:
1892 case OMP_CLAUSE_PRIORITY
:
1893 case OMP_CLAUSE_GRAINSIZE
:
1894 case OMP_CLAUSE_NUM_TASKS
:
1895 case OMP_CLAUSE_HINT
:
1896 case OMP_CLAUSE__CILK_FOR_COUNT_
:
1897 case OMP_CLAUSE_NUM_GANGS
:
1898 case OMP_CLAUSE_NUM_WORKERS
:
1899 case OMP_CLAUSE_VECTOR_LENGTH
:
1900 case OMP_CLAUSE_GANG
:
1901 case OMP_CLAUSE_WORKER
:
1902 case OMP_CLAUSE_VECTOR
:
1903 case OMP_CLAUSE_ASYNC
:
1904 case OMP_CLAUSE_WAIT
:
1905 /* Several OpenACC clauses have optional arguments. Check if they
1907 if (OMP_CLAUSE_OPERAND (clause
, 0))
1909 wi
->val_only
= true;
1911 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1915 /* The gang clause accepts two arguments. */
1916 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_GANG
1917 && OMP_CLAUSE_GANG_STATIC_EXPR (clause
))
1919 wi
->val_only
= true;
1921 convert_nonlocal_reference_op
1922 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause
), &dummy
, wi
);
1926 case OMP_CLAUSE_DIST_SCHEDULE
:
1927 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause
) != NULL
)
1929 wi
->val_only
= true;
1931 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1936 case OMP_CLAUSE_MAP
:
1938 case OMP_CLAUSE_FROM
:
1939 if (OMP_CLAUSE_SIZE (clause
))
1941 wi
->val_only
= true;
1943 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause
),
1946 if (DECL_P (OMP_CLAUSE_DECL (clause
)))
1947 goto do_decl_clause
;
1948 wi
->val_only
= true;
1950 walk_tree (&OMP_CLAUSE_DECL (clause
), convert_local_reference_op
,
1954 case OMP_CLAUSE_ALIGNED
:
1955 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
1957 wi
->val_only
= true;
1959 convert_local_reference_op
1960 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
), &dummy
, wi
);
1962 /* Like do_decl_clause, but don't add any suppression. */
1963 decl
= OMP_CLAUSE_DECL (clause
);
1965 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1967 if (decl_function_context (decl
) == info
->context
1968 && !use_pointer_in_frame (decl
))
1970 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
1973 OMP_CLAUSE_DECL (clause
)
1974 = get_local_debug_decl (info
, decl
, field
);
1980 case OMP_CLAUSE_NOWAIT
:
1981 case OMP_CLAUSE_ORDERED
:
1982 case OMP_CLAUSE_DEFAULT
:
1983 case OMP_CLAUSE_COPYIN
:
1984 case OMP_CLAUSE_COLLAPSE
:
1985 case OMP_CLAUSE_UNTIED
:
1986 case OMP_CLAUSE_MERGEABLE
:
1987 case OMP_CLAUSE_PROC_BIND
:
1988 case OMP_CLAUSE_NOGROUP
:
1989 case OMP_CLAUSE_THREADS
:
1990 case OMP_CLAUSE_SIMD
:
1991 case OMP_CLAUSE_DEFAULTMAP
:
1992 case OMP_CLAUSE_SEQ
:
1993 case OMP_CLAUSE_INDEPENDENT
:
1994 case OMP_CLAUSE_AUTO
:
1997 /* OpenACC tile clauses are discarded during gimplification. */
1998 case OMP_CLAUSE_TILE
:
1999 /* The following clause belongs to the OpenACC cache directive, which
2000 is discarded during gimplification. */
2001 case OMP_CLAUSE__CACHE_
:
2002 /* The following clauses are only allowed in the OpenMP declare simd
2003 directive, so not seen here. */
2004 case OMP_CLAUSE_UNIFORM
:
2005 case OMP_CLAUSE_INBRANCH
:
2006 case OMP_CLAUSE_NOTINBRANCH
:
2007 /* The following clauses are only allowed on OpenMP cancel and
2008 cancellation point directives, which at this point have already
2009 been lowered into a function call. */
2010 case OMP_CLAUSE_FOR
:
2011 case OMP_CLAUSE_PARALLEL
:
2012 case OMP_CLAUSE_SECTIONS
:
2013 case OMP_CLAUSE_TASKGROUP
:
2014 /* The following clauses are only added during OMP lowering; nested
2015 function decomposition happens before that. */
2016 case OMP_CLAUSE__LOOPTEMP_
:
2017 case OMP_CLAUSE__SIMDUID_
:
2018 case OMP_CLAUSE__GRIDDIM_
:
2019 /* Anything else. */
2025 info
->suppress_expansion
= new_suppress
;
2028 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
2029 switch (OMP_CLAUSE_CODE (clause
))
2031 case OMP_CLAUSE_REDUCTION
:
2032 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2035 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
2036 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2038 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2039 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2041 walk_body (convert_local_reference_stmt
,
2042 convert_local_reference_op
, info
,
2043 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
2044 walk_body (convert_local_reference_stmt
,
2045 convert_local_reference_op
, info
,
2046 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
2047 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2049 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2050 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2055 case OMP_CLAUSE_LASTPRIVATE
:
2056 walk_body (convert_local_reference_stmt
,
2057 convert_local_reference_op
, info
,
2058 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
2061 case OMP_CLAUSE_LINEAR
:
2062 walk_body (convert_local_reference_stmt
,
2063 convert_local_reference_op
, info
,
2064 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
));
2075 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2076 and PARM_DECLs that were referenced by inner nested functions.
2077 The rewrite will be a structure reference to the local frame variable. */
2080 convert_local_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2081 struct walk_stmt_info
*wi
)
2083 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
2084 tree save_local_var_chain
;
2085 bitmap save_suppress
;
2086 char save_static_chain_added
;
2087 bool frame_decl_added
;
2088 gimple
*stmt
= gsi_stmt (*gsi
);
2090 switch (gimple_code (stmt
))
2092 case GIMPLE_OMP_PARALLEL
:
2093 case GIMPLE_OMP_TASK
:
2094 save_suppress
= info
->suppress_expansion
;
2095 frame_decl_added
= false;
2096 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
2099 tree c
= build_omp_clause (gimple_location (stmt
),
2101 (void) get_frame_type (info
);
2102 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2103 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2104 gimple_omp_taskreg_set_clauses (stmt
, c
);
2105 info
->static_chain_added
|= 4;
2106 frame_decl_added
= true;
2109 save_local_var_chain
= info
->new_local_var_chain
;
2110 save_static_chain_added
= info
->static_chain_added
;
2111 info
->new_local_var_chain
= NULL
;
2112 info
->static_chain_added
= 0;
2114 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
2115 gimple_omp_body_ptr (stmt
));
2117 if ((info
->static_chain_added
& 4) != 0 && !frame_decl_added
)
2119 tree c
= build_omp_clause (gimple_location (stmt
),
2121 (void) get_frame_type (info
);
2122 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2123 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2124 info
->static_chain_added
|= 4;
2125 gimple_omp_taskreg_set_clauses (stmt
, c
);
2127 if (info
->new_local_var_chain
)
2128 declare_vars (info
->new_local_var_chain
,
2129 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
2130 info
->new_local_var_chain
= save_local_var_chain
;
2131 info
->suppress_expansion
= save_suppress
;
2132 info
->static_chain_added
|= save_static_chain_added
;
2135 case GIMPLE_OMP_FOR
:
2136 save_suppress
= info
->suppress_expansion
;
2137 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
2138 walk_gimple_omp_for (as_a
<gomp_for
*> (stmt
),
2139 convert_local_reference_stmt
,
2140 convert_local_reference_op
, info
);
2141 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2142 info
, gimple_omp_body_ptr (stmt
));
2143 info
->suppress_expansion
= save_suppress
;
2146 case GIMPLE_OMP_SECTIONS
:
2147 save_suppress
= info
->suppress_expansion
;
2148 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
2149 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2150 info
, gimple_omp_body_ptr (stmt
));
2151 info
->suppress_expansion
= save_suppress
;
2154 case GIMPLE_OMP_SINGLE
:
2155 save_suppress
= info
->suppress_expansion
;
2156 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
2157 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2158 info
, gimple_omp_body_ptr (stmt
));
2159 info
->suppress_expansion
= save_suppress
;
2162 case GIMPLE_OMP_TARGET
:
2163 if (!is_gimple_omp_offloaded (stmt
))
2165 save_suppress
= info
->suppress_expansion
;
2166 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt
), wi
);
2167 info
->suppress_expansion
= save_suppress
;
2168 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2169 info
, gimple_omp_body_ptr (stmt
));
2172 save_suppress
= info
->suppress_expansion
;
2173 frame_decl_added
= false;
2174 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt
), wi
))
2176 tree c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2177 (void) get_frame_type (info
);
2178 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2179 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
2180 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (info
->frame_decl
);
2181 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2182 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
2183 info
->static_chain_added
|= 4;
2184 frame_decl_added
= true;
2187 save_local_var_chain
= info
->new_local_var_chain
;
2188 save_static_chain_added
= info
->static_chain_added
;
2189 info
->new_local_var_chain
= NULL
;
2190 info
->static_chain_added
= 0;
2192 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
2193 gimple_omp_body_ptr (stmt
));
2195 if ((info
->static_chain_added
& 4) != 0 && !frame_decl_added
)
2197 tree c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2198 (void) get_frame_type (info
);
2199 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2200 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
2201 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (info
->frame_decl
);
2202 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2203 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
2204 info
->static_chain_added
|= 4;
2207 if (info
->new_local_var_chain
)
2208 declare_vars (info
->new_local_var_chain
,
2209 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
2210 info
->new_local_var_chain
= save_local_var_chain
;
2211 info
->suppress_expansion
= save_suppress
;
2212 info
->static_chain_added
|= save_static_chain_added
;
2215 case GIMPLE_OMP_TEAMS
:
2216 save_suppress
= info
->suppress_expansion
;
2217 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt
), wi
);
2218 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2219 info
, gimple_omp_body_ptr (stmt
));
2220 info
->suppress_expansion
= save_suppress
;
2223 case GIMPLE_OMP_SECTION
:
2224 case GIMPLE_OMP_MASTER
:
2225 case GIMPLE_OMP_TASKGROUP
:
2226 case GIMPLE_OMP_ORDERED
:
2227 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2228 info
, gimple_omp_body_ptr (stmt
));
2232 wi
->val_only
= true;
2234 *handled_ops_p
= false;
2238 if (gimple_clobber_p (stmt
))
2240 tree lhs
= gimple_assign_lhs (stmt
);
2241 if (!use_pointer_in_frame (lhs
)
2242 && lookup_field_for_decl (info
, lhs
, NO_INSERT
))
2244 gsi_replace (gsi
, gimple_build_nop (), true);
2248 *handled_ops_p
= false;
2252 for (tree var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
2254 var
= DECL_CHAIN (var
))
2255 if (TREE_CODE (var
) == NAMELIST_DECL
)
2257 /* Adjust decls mentioned in NAMELIST_DECL. */
2258 tree decls
= NAMELIST_DECL_ASSOCIATED_DECL (var
);
2262 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls
), i
, decl
)
2265 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2267 if (decl_function_context (decl
) == info
->context
2268 && !use_pointer_in_frame (decl
))
2270 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
2273 CONSTRUCTOR_ELT (decls
, i
)->value
2274 = get_local_debug_decl (info
, decl
, field
);
2280 *handled_ops_p
= false;
2284 /* For every other statement that we are not interested in
2285 handling here, let the walker traverse the operands. */
2286 *handled_ops_p
= false;
2290 /* Indicate that we have handled all the operands ourselves. */
2291 *handled_ops_p
= true;
2296 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2297 that reference labels from outer functions. The rewrite will be a
2298 call to __builtin_nonlocal_goto. */
2301 convert_nl_goto_reference (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2302 struct walk_stmt_info
*wi
)
2304 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
2305 tree label
, new_label
, target_context
, x
, field
;
2307 gimple
*stmt
= gsi_stmt (*gsi
);
2309 if (gimple_code (stmt
) != GIMPLE_GOTO
)
2311 *handled_ops_p
= false;
2315 label
= gimple_goto_dest (stmt
);
2316 if (TREE_CODE (label
) != LABEL_DECL
)
2318 *handled_ops_p
= false;
2322 target_context
= decl_function_context (label
);
2323 if (target_context
== info
->context
)
2325 *handled_ops_p
= false;
2329 for (i
= info
->outer
; target_context
!= i
->context
; i
= i
->outer
)
2332 /* The original user label may also be use for a normal goto, therefore
2333 we must create a new label that will actually receive the abnormal
2334 control transfer. This new label will be marked LABEL_NONLOCAL; this
2335 mark will trigger proper behavior in the cfg, as well as cause the
2336 (hairy target-specific) non-local goto receiver code to be generated
2337 when we expand rtl. Enter this association into var_map so that we
2338 can insert the new label into the IL during a second pass. */
2339 tree
*slot
= &i
->var_map
->get_or_insert (label
);
2342 new_label
= create_artificial_label (UNKNOWN_LOCATION
);
2343 DECL_NONLOCAL (new_label
) = 1;
2349 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2350 field
= get_nl_goto_field (i
);
2351 x
= get_frame_field (info
, target_context
, field
, gsi
);
2353 x
= gsi_gimplify_val (info
, x
, gsi
);
2354 call
= gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO
),
2355 2, build_addr (new_label
), x
);
2356 gsi_replace (gsi
, call
, false);
2358 /* We have handled all of STMT's operands, no need to keep going. */
2359 *handled_ops_p
= true;
2364 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2365 are referenced via nonlocal goto from a nested function. The rewrite
2366 will involve installing a newly generated DECL_NONLOCAL label, and
2367 (potentially) a branch around the rtl gunk that is assumed to be
2368 attached to such a label. */
2371 convert_nl_goto_receiver (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2372 struct walk_stmt_info
*wi
)
2374 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
2375 tree label
, new_label
;
2376 gimple_stmt_iterator tmp_gsi
;
2377 glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (*gsi
));
2381 *handled_ops_p
= false;
2385 label
= gimple_label_label (stmt
);
2387 tree
*slot
= info
->var_map
->get (label
);
2390 *handled_ops_p
= false;
2394 /* If there's any possibility that the previous statement falls through,
2395 then we must branch around the new non-local label. */
2397 gsi_prev (&tmp_gsi
);
2398 if (gsi_end_p (tmp_gsi
) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi
)))
2400 gimple
*stmt
= gimple_build_goto (label
);
2401 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2404 new_label
= (tree
) *slot
;
2405 stmt
= gimple_build_label (new_label
);
2406 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2408 *handled_ops_p
= true;
2413 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2414 of nested functions that require the use of trampolines. The rewrite
2415 will involve a reference a trampoline generated for the occasion. */
2418 convert_tramp_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
2420 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
2421 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
2422 tree t
= *tp
, decl
, target_context
, x
, builtin
;
2427 switch (TREE_CODE (t
))
2431 T.1 = &CHAIN->tramp;
2432 T.2 = __builtin_adjust_trampoline (T.1);
2433 T.3 = (func_type)T.2;
2436 decl
= TREE_OPERAND (t
, 0);
2437 if (TREE_CODE (decl
) != FUNCTION_DECL
)
2440 /* Only need to process nested functions. */
2441 target_context
= decl_function_context (decl
);
2442 if (!target_context
)
2445 /* If the nested function doesn't use a static chain, then
2446 it doesn't need a trampoline. */
2447 if (!DECL_STATIC_CHAIN (decl
))
2450 /* If we don't want a trampoline, then don't build one. */
2451 if (TREE_NO_TRAMPOLINE (t
))
2454 /* Lookup the immediate parent of the callee, as that's where
2455 we need to insert the trampoline. */
2456 for (i
= info
; i
->context
!= target_context
; i
= i
->outer
)
2459 /* Decide whether to generate a descriptor or a trampoline. */
2460 descr
= FUNC_ADDR_BY_DESCRIPTOR (t
) && !flag_trampolines
;
2463 x
= lookup_descr_for_decl (i
, decl
, INSERT
);
2465 x
= lookup_tramp_for_decl (i
, decl
, INSERT
);
2467 /* Compute the address of the field holding the trampoline. */
2468 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
2470 x
= gsi_gimplify_val (info
, x
, &wi
->gsi
);
2472 /* Do machine-specific ugliness. Normally this will involve
2473 computing extra alignment, but it can really be anything. */
2475 builtin
= builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR
);
2477 builtin
= builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE
);
2478 call
= gimple_build_call (builtin
, 1, x
);
2479 x
= init_tmp_var_with_call (info
, &wi
->gsi
, call
);
2481 /* Cast back to the proper function type. */
2482 x
= build1 (NOP_EXPR
, TREE_TYPE (t
), x
);
2483 x
= init_tmp_var (info
, x
, &wi
->gsi
);
2489 if (!IS_TYPE_OR_DECL_P (t
))
2498 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2499 to addresses of nested functions that require the use of
2500 trampolines. The rewrite will involve a reference a trampoline
2501 generated for the occasion. */
2504 convert_tramp_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2505 struct walk_stmt_info
*wi
)
2507 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
2508 gimple
*stmt
= gsi_stmt (*gsi
);
2510 switch (gimple_code (stmt
))
2514 /* Only walk call arguments, lest we generate trampolines for
2516 unsigned long i
, nargs
= gimple_call_num_args (stmt
);
2517 for (i
= 0; i
< nargs
; i
++)
2518 walk_tree (gimple_call_arg_ptr (stmt
, i
), convert_tramp_reference_op
,
2523 case GIMPLE_OMP_TARGET
:
2524 if (!is_gimple_omp_offloaded (stmt
))
2526 *handled_ops_p
= false;
2530 case GIMPLE_OMP_PARALLEL
:
2531 case GIMPLE_OMP_TASK
:
2533 tree save_local_var_chain
= info
->new_local_var_chain
;
2534 walk_gimple_op (stmt
, convert_tramp_reference_op
, wi
);
2535 info
->new_local_var_chain
= NULL
;
2536 char save_static_chain_added
= info
->static_chain_added
;
2537 info
->static_chain_added
= 0;
2538 walk_body (convert_tramp_reference_stmt
, convert_tramp_reference_op
,
2539 info
, gimple_omp_body_ptr (stmt
));
2540 if (info
->new_local_var_chain
)
2541 declare_vars (info
->new_local_var_chain
,
2542 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
2544 for (int i
= 0; i
< 2; i
++)
2547 if ((info
->static_chain_added
& (1 << i
)) == 0)
2549 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2550 /* Don't add CHAIN.* or FRAME.* twice. */
2551 for (c
= gimple_omp_taskreg_clauses (stmt
);
2553 c
= OMP_CLAUSE_CHAIN (c
))
2554 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
2555 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
2556 && OMP_CLAUSE_DECL (c
) == decl
)
2558 if (c
== NULL
&& gimple_code (stmt
) != GIMPLE_OMP_TARGET
)
2560 c
= build_omp_clause (gimple_location (stmt
),
2561 i
? OMP_CLAUSE_FIRSTPRIVATE
2562 : OMP_CLAUSE_SHARED
);
2563 OMP_CLAUSE_DECL (c
) = decl
;
2564 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2565 gimple_omp_taskreg_set_clauses (stmt
, c
);
2569 c
= build_omp_clause (gimple_location (stmt
),
2571 OMP_CLAUSE_DECL (c
) = decl
;
2572 OMP_CLAUSE_SET_MAP_KIND (c
,
2573 i
? GOMP_MAP_TO
: GOMP_MAP_TOFROM
);
2574 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
2575 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2576 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
),
2580 info
->new_local_var_chain
= save_local_var_chain
;
2581 info
->static_chain_added
|= save_static_chain_added
;
2586 *handled_ops_p
= false;
2590 *handled_ops_p
= true;
2596 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2597 that reference nested functions to make sure that the static chain
2598 is set up properly for the call. */
2601 convert_gimple_call (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2602 struct walk_stmt_info
*wi
)
2604 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
2605 tree decl
, target_context
;
2606 char save_static_chain_added
;
2608 gimple
*stmt
= gsi_stmt (*gsi
);
2610 switch (gimple_code (stmt
))
2613 if (gimple_call_chain (stmt
))
2615 decl
= gimple_call_fndecl (stmt
);
2618 target_context
= decl_function_context (decl
);
2619 if (target_context
&& DECL_STATIC_CHAIN (decl
))
2621 gimple_call_set_chain (as_a
<gcall
*> (stmt
),
2622 get_static_chain (info
, target_context
,
2624 info
->static_chain_added
|= (1 << (info
->context
!= target_context
));
2628 case GIMPLE_OMP_PARALLEL
:
2629 case GIMPLE_OMP_TASK
:
2630 save_static_chain_added
= info
->static_chain_added
;
2631 info
->static_chain_added
= 0;
2632 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2633 for (i
= 0; i
< 2; i
++)
2636 if ((info
->static_chain_added
& (1 << i
)) == 0)
2638 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2639 /* Don't add CHAIN.* or FRAME.* twice. */
2640 for (c
= gimple_omp_taskreg_clauses (stmt
);
2642 c
= OMP_CLAUSE_CHAIN (c
))
2643 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
2644 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
2645 && OMP_CLAUSE_DECL (c
) == decl
)
2649 c
= build_omp_clause (gimple_location (stmt
),
2650 i
? OMP_CLAUSE_FIRSTPRIVATE
2651 : OMP_CLAUSE_SHARED
);
2652 OMP_CLAUSE_DECL (c
) = decl
;
2653 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2654 gimple_omp_taskreg_set_clauses (stmt
, c
);
2657 info
->static_chain_added
|= save_static_chain_added
;
2660 case GIMPLE_OMP_TARGET
:
2661 if (!is_gimple_omp_offloaded (stmt
))
2663 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2666 save_static_chain_added
= info
->static_chain_added
;
2667 info
->static_chain_added
= 0;
2668 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2669 for (i
= 0; i
< 2; i
++)
2672 if ((info
->static_chain_added
& (1 << i
)) == 0)
2674 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2675 /* Don't add CHAIN.* or FRAME.* twice. */
2676 for (c
= gimple_omp_target_clauses (stmt
);
2678 c
= OMP_CLAUSE_CHAIN (c
))
2679 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
2680 && OMP_CLAUSE_DECL (c
) == decl
)
2684 c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2685 OMP_CLAUSE_DECL (c
) = decl
;
2686 OMP_CLAUSE_SET_MAP_KIND (c
, i
? GOMP_MAP_TO
: GOMP_MAP_TOFROM
);
2687 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
2688 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2689 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
),
2693 info
->static_chain_added
|= save_static_chain_added
;
2696 case GIMPLE_OMP_FOR
:
2697 walk_body (convert_gimple_call
, NULL
, info
,
2698 gimple_omp_for_pre_body_ptr (stmt
));
2700 case GIMPLE_OMP_SECTIONS
:
2701 case GIMPLE_OMP_SECTION
:
2702 case GIMPLE_OMP_SINGLE
:
2703 case GIMPLE_OMP_TEAMS
:
2704 case GIMPLE_OMP_MASTER
:
2705 case GIMPLE_OMP_TASKGROUP
:
2706 case GIMPLE_OMP_ORDERED
:
2707 case GIMPLE_OMP_CRITICAL
:
2708 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2712 /* Keep looking for other operands. */
2713 *handled_ops_p
= false;
2717 *handled_ops_p
= true;
2721 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2722 call expressions. At the same time, determine if a nested function
2723 actually uses its static chain; if not, remember that. */
2726 convert_all_function_calls (struct nesting_info
*root
)
2728 unsigned int chain_count
= 0, old_chain_count
, iter_count
;
2729 struct nesting_info
*n
;
2731 /* First, optimistically clear static_chain for all decls that haven't
2732 used the static chain already for variable access. But always create
2733 it if not optimizing. This makes it possible to reconstruct the static
2734 nesting tree at run time and thus to resolve up-level references from
2735 within the debugger. */
2736 FOR_EACH_NEST_INFO (n
, root
)
2738 tree decl
= n
->context
;
2742 (void) get_frame_type (n
);
2744 (void) get_chain_decl (n
);
2746 else if (!n
->outer
|| (!n
->chain_decl
&& !n
->chain_field
))
2748 DECL_STATIC_CHAIN (decl
) = 0;
2749 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2750 fprintf (dump_file
, "Guessing no static-chain for %s\n",
2751 lang_hooks
.decl_printable_name (decl
, 2));
2754 DECL_STATIC_CHAIN (decl
) = 1;
2755 chain_count
+= DECL_STATIC_CHAIN (decl
);
2758 /* Walk the functions and perform transformations. Note that these
2759 transformations can induce new uses of the static chain, which in turn
2760 require re-examining all users of the decl. */
2761 /* ??? It would make sense to try to use the call graph to speed this up,
2762 but the call graph hasn't really been built yet. Even if it did, we
2763 would still need to iterate in this loop since address-of references
2764 wouldn't show up in the callgraph anyway. */
2768 old_chain_count
= chain_count
;
2772 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2773 fputc ('\n', dump_file
);
2775 FOR_EACH_NEST_INFO (n
, root
)
2777 tree decl
= n
->context
;
2778 walk_function (convert_tramp_reference_stmt
,
2779 convert_tramp_reference_op
, n
);
2780 walk_function (convert_gimple_call
, NULL
, n
);
2781 chain_count
+= DECL_STATIC_CHAIN (decl
);
2784 while (chain_count
!= old_chain_count
);
2786 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2787 fprintf (dump_file
, "convert_all_function_calls iterations: %u\n\n",
2791 struct nesting_copy_body_data
2794 struct nesting_info
*root
;
2797 /* A helper subroutine for debug_var_chain type remapping. */
2800 nesting_copy_decl (tree decl
, copy_body_data
*id
)
2802 struct nesting_copy_body_data
*nid
= (struct nesting_copy_body_data
*) id
;
2803 tree
*slot
= nid
->root
->var_map
->get (decl
);
2806 return (tree
) *slot
;
2808 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_ORIGINAL_TYPE (decl
))
2810 tree new_decl
= copy_decl_no_change (decl
, id
);
2811 DECL_ORIGINAL_TYPE (new_decl
)
2812 = remap_type (DECL_ORIGINAL_TYPE (decl
), id
);
2817 || TREE_CODE (decl
) == PARM_DECL
2818 || TREE_CODE (decl
) == RESULT_DECL
)
2821 return copy_decl_no_change (decl
, id
);
2824 /* A helper function for remap_vla_decls. See if *TP contains
2825 some remapped variables. */
2828 contains_remapped_vars (tree
*tp
, int *walk_subtrees
, void *data
)
2830 struct nesting_info
*root
= (struct nesting_info
*) data
;
2836 tree
*slot
= root
->var_map
->get (t
);
2844 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2848 remap_vla_decls (tree block
, struct nesting_info
*root
)
2850 tree var
, subblock
, val
, type
;
2851 struct nesting_copy_body_data id
;
2853 for (subblock
= BLOCK_SUBBLOCKS (block
);
2855 subblock
= BLOCK_CHAIN (subblock
))
2856 remap_vla_decls (subblock
, root
);
2858 for (var
= BLOCK_VARS (block
); var
; var
= DECL_CHAIN (var
))
2859 if (VAR_P (var
) && DECL_HAS_VALUE_EXPR_P (var
))
2861 val
= DECL_VALUE_EXPR (var
);
2862 type
= TREE_TYPE (var
);
2864 if (!(TREE_CODE (val
) == INDIRECT_REF
2865 && TREE_CODE (TREE_OPERAND (val
, 0)) == VAR_DECL
2866 && variably_modified_type_p (type
, NULL
)))
2869 if (root
->var_map
->get (TREE_OPERAND (val
, 0))
2870 || walk_tree (&type
, contains_remapped_vars
, root
, NULL
))
2874 if (var
== NULL_TREE
)
2877 memset (&id
, 0, sizeof (id
));
2878 id
.cb
.copy_decl
= nesting_copy_decl
;
2879 id
.cb
.decl_map
= new hash_map
<tree
, tree
>;
2882 for (; var
; var
= DECL_CHAIN (var
))
2883 if (VAR_P (var
) && DECL_HAS_VALUE_EXPR_P (var
))
2885 struct nesting_info
*i
;
2888 val
= DECL_VALUE_EXPR (var
);
2889 type
= TREE_TYPE (var
);
2891 if (!(TREE_CODE (val
) == INDIRECT_REF
2892 && TREE_CODE (TREE_OPERAND (val
, 0)) == VAR_DECL
2893 && variably_modified_type_p (type
, NULL
)))
2896 tree
*slot
= root
->var_map
->get (TREE_OPERAND (val
, 0));
2897 if (!slot
&& !walk_tree (&type
, contains_remapped_vars
, root
, NULL
))
2900 context
= decl_function_context (var
);
2901 for (i
= root
; i
; i
= i
->outer
)
2902 if (i
->context
== context
)
2908 /* Fully expand value expressions. This avoids having debug variables
2909 only referenced from them and that can be swept during GC. */
2912 tree t
= (tree
) *slot
;
2913 gcc_assert (DECL_P (t
) && DECL_HAS_VALUE_EXPR_P (t
));
2914 val
= build1 (INDIRECT_REF
, TREE_TYPE (val
), DECL_VALUE_EXPR (t
));
2917 id
.cb
.src_fn
= i
->context
;
2918 id
.cb
.dst_fn
= i
->context
;
2919 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
2921 TREE_TYPE (var
) = newt
= remap_type (type
, &id
.cb
);
2922 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
2924 newt
= TREE_TYPE (newt
);
2925 type
= TREE_TYPE (type
);
2927 if (TYPE_NAME (newt
)
2928 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
2929 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
2931 && TYPE_NAME (newt
) == TYPE_NAME (type
))
2932 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
2934 walk_tree (&val
, copy_tree_body_r
, &id
.cb
, NULL
);
2935 if (val
!= DECL_VALUE_EXPR (var
))
2936 SET_DECL_VALUE_EXPR (var
, val
);
2939 delete id
.cb
.decl_map
;
2942 /* Fold the MEM_REF *E. */
2944 fold_mem_refs (tree
*const &e
, void *data ATTRIBUTE_UNUSED
)
2946 tree
*ref_p
= CONST_CAST2 (tree
*, const tree
*, (const tree
*)e
);
2947 *ref_p
= fold (*ref_p
);
2951 /* Given DECL, a nested function, build an initialization call for FIELD,
2952 the trampoline or descriptor for DECL, using FUNC as the function. */
2955 build_init_call_stmt (struct nesting_info
*info
, tree decl
, tree field
,
2958 tree arg1
, arg2
, arg3
, x
;
2960 gcc_assert (DECL_STATIC_CHAIN (decl
));
2961 arg3
= build_addr (info
->frame_decl
);
2963 arg2
= build_addr (decl
);
2965 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
2966 info
->frame_decl
, field
, NULL_TREE
);
2967 arg1
= build_addr (x
);
2969 return gimple_build_call (func
, 3, arg1
, arg2
, arg3
);
2972 /* Do "everything else" to clean up or complete state collected by the various
2973 walking passes -- create a field to hold the frame base address, lay out the
2974 types and decls, generate code to initialize the frame decl, store critical
2975 expressions in the struct function for rtl to find. */
2978 finalize_nesting_tree_1 (struct nesting_info
*root
)
2980 gimple_seq stmt_list
;
2982 tree context
= root
->context
;
2983 struct function
*sf
;
2987 /* If we created a non-local frame type or decl, we need to lay them
2988 out at this time. */
2989 if (root
->frame_type
)
2991 /* Debugging information needs to compute the frame base address of the
2992 parent frame out of the static chain from the nested frame.
2994 The static chain is the address of the FRAME record, so one could
2995 imagine it would be possible to compute the frame base address just
2996 adding a constant offset to this address. Unfortunately, this is not
2997 possible: if the FRAME object has alignment constraints that are
2998 stronger than the stack, then the offset between the frame base and
2999 the FRAME object will be dynamic.
3001 What we do instead is to append a field to the FRAME object that holds
3002 the frame base address: then debug info just has to fetch this
3005 /* Debugging information will refer to the CFA as the frame base
3006 address: we will do the same here. */
3007 const tree frame_addr_fndecl
3008 = builtin_decl_explicit (BUILT_IN_DWARF_CFA
);
3010 /* Create a field in the FRAME record to hold the frame base address for
3011 this stack frame. Since it will be used only by the debugger, put it
3012 at the end of the record in order not to shift all other offsets. */
3013 tree fb_decl
= make_node (FIELD_DECL
);
3015 DECL_NAME (fb_decl
) = get_identifier ("FRAME_BASE.PARENT");
3016 TREE_TYPE (fb_decl
) = ptr_type_node
;
3017 TREE_ADDRESSABLE (fb_decl
) = 1;
3018 DECL_CONTEXT (fb_decl
) = root
->frame_type
;
3019 TYPE_FIELDS (root
->frame_type
) = chainon (TYPE_FIELDS (root
->frame_type
),
3022 /* In some cases the frame type will trigger the -Wpadded warning.
3023 This is not helpful; suppress it. */
3024 int save_warn_padded
= warn_padded
;
3026 layout_type (root
->frame_type
);
3027 warn_padded
= save_warn_padded
;
3028 layout_decl (root
->frame_decl
, 0);
3030 /* Initialize the frame base address field. If the builtin we need is
3031 not available, set it to NULL so that debugging information does not
3033 tree fb_ref
= build3 (COMPONENT_REF
, TREE_TYPE (fb_decl
),
3034 root
->frame_decl
, fb_decl
, NULL_TREE
);
3037 if (frame_addr_fndecl
!= NULL_TREE
)
3039 gcall
*fb_gimple
= gimple_build_call (frame_addr_fndecl
, 1,
3041 gimple_stmt_iterator gsi
= gsi_last (stmt_list
);
3043 fb_tmp
= init_tmp_var_with_call (root
, &gsi
, fb_gimple
);
3046 fb_tmp
= build_int_cst (TREE_TYPE (fb_ref
), 0);
3047 gimple_seq_add_stmt (&stmt_list
,
3048 gimple_build_assign (fb_ref
, fb_tmp
));
3050 /* Remove root->frame_decl from root->new_local_var_chain, so
3051 that we can declare it also in the lexical blocks, which
3052 helps ensure virtual regs that end up appearing in its RTL
3053 expression get substituted in instantiate_virtual_regs(). */
3055 for (adjust
= &root
->new_local_var_chain
;
3056 *adjust
!= root
->frame_decl
;
3057 adjust
= &DECL_CHAIN (*adjust
))
3058 gcc_assert (DECL_CHAIN (*adjust
));
3059 *adjust
= DECL_CHAIN (*adjust
);
3061 DECL_CHAIN (root
->frame_decl
) = NULL_TREE
;
3062 declare_vars (root
->frame_decl
,
3063 gimple_seq_first_stmt (gimple_body (context
)), true);
3066 /* If any parameters were referenced non-locally, then we need to
3067 insert a copy. Likewise, if any variables were referenced by
3068 pointer, we need to initialize the address. */
3069 if (root
->any_parm_remapped
)
3072 for (p
= DECL_ARGUMENTS (context
); p
; p
= DECL_CHAIN (p
))
3076 field
= lookup_field_for_decl (root
, p
, NO_INSERT
);
3080 if (use_pointer_in_frame (p
))
3085 /* If the assignment is from a non-register the stmt is
3086 not valid gimple. Make it so by using a temporary instead. */
3087 if (!is_gimple_reg (x
)
3088 && is_gimple_reg_type (TREE_TYPE (x
)))
3090 gimple_stmt_iterator gsi
= gsi_last (stmt_list
);
3091 x
= init_tmp_var (root
, x
, &gsi
);
3094 y
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
3095 root
->frame_decl
, field
, NULL_TREE
);
3096 stmt
= gimple_build_assign (y
, x
);
3097 gimple_seq_add_stmt (&stmt_list
, stmt
);
3101 /* If a chain_field was created, then it needs to be initialized
3103 if (root
->chain_field
)
3105 tree x
= build3 (COMPONENT_REF
, TREE_TYPE (root
->chain_field
),
3106 root
->frame_decl
, root
->chain_field
, NULL_TREE
);
3107 stmt
= gimple_build_assign (x
, get_chain_decl (root
));
3108 gimple_seq_add_stmt (&stmt_list
, stmt
);
3111 /* If trampolines were created, then we need to initialize them. */
3112 if (root
->any_tramp_created
)
3114 struct nesting_info
*i
;
3115 for (i
= root
->inner
; i
; i
= i
->next
)
3119 field
= lookup_tramp_for_decl (root
, i
->context
, NO_INSERT
);
3123 x
= builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE
);
3124 stmt
= build_init_call_stmt (root
, i
->context
, field
, x
);
3125 gimple_seq_add_stmt (&stmt_list
, stmt
);
3129 /* If descriptors were created, then we need to initialize them. */
3130 if (root
->any_descr_created
)
3132 struct nesting_info
*i
;
3133 for (i
= root
->inner
; i
; i
= i
->next
)
3137 field
= lookup_descr_for_decl (root
, i
->context
, NO_INSERT
);
3141 x
= builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR
);
3142 stmt
= build_init_call_stmt (root
, i
->context
, field
, x
);
3143 gimple_seq_add_stmt (&stmt_list
, stmt
);
3147 /* If we created initialization statements, insert them. */
3151 annotate_all_with_location (stmt_list
, DECL_SOURCE_LOCATION (context
));
3152 bind
= gimple_seq_first_stmt_as_a_bind (gimple_body (context
));
3153 gimple_seq_add_seq (&stmt_list
, gimple_bind_body (bind
));
3154 gimple_bind_set_body (bind
, stmt_list
);
3157 /* If a chain_decl was created, then it needs to be registered with
3158 struct function so that it gets initialized from the static chain
3159 register at the beginning of the function. */
3160 sf
= DECL_STRUCT_FUNCTION (root
->context
);
3161 sf
->static_chain_decl
= root
->chain_decl
;
3163 /* Similarly for the non-local goto save area. */
3164 if (root
->nl_goto_field
)
3166 sf
->nonlocal_goto_save_area
3167 = get_frame_field (root
, context
, root
->nl_goto_field
, NULL
);
3168 sf
->has_nonlocal_label
= 1;
3171 /* Make sure all new local variables get inserted into the
3172 proper BIND_EXPR. */
3173 if (root
->new_local_var_chain
)
3174 declare_vars (root
->new_local_var_chain
,
3175 gimple_seq_first_stmt (gimple_body (root
->context
)),
3178 if (root
->debug_var_chain
)
3183 remap_vla_decls (DECL_INITIAL (root
->context
), root
);
3185 for (debug_var
= root
->debug_var_chain
; debug_var
;
3186 debug_var
= DECL_CHAIN (debug_var
))
3187 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
3190 /* If there are any debug decls with variable length types,
3191 remap those types using other debug_var_chain variables. */
3194 struct nesting_copy_body_data id
;
3196 memset (&id
, 0, sizeof (id
));
3197 id
.cb
.copy_decl
= nesting_copy_decl
;
3198 id
.cb
.decl_map
= new hash_map
<tree
, tree
>;
3201 for (; debug_var
; debug_var
= DECL_CHAIN (debug_var
))
3202 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
3204 tree type
= TREE_TYPE (debug_var
);
3205 tree newt
, t
= type
;
3206 struct nesting_info
*i
;
3208 for (i
= root
; i
; i
= i
->outer
)
3209 if (variably_modified_type_p (type
, i
->context
))
3215 id
.cb
.src_fn
= i
->context
;
3216 id
.cb
.dst_fn
= i
->context
;
3217 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
3219 TREE_TYPE (debug_var
) = newt
= remap_type (type
, &id
.cb
);
3220 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
3222 newt
= TREE_TYPE (newt
);
3225 if (TYPE_NAME (newt
)
3226 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
3227 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
3229 && TYPE_NAME (newt
) == TYPE_NAME (t
))
3230 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
3233 delete id
.cb
.decl_map
;
3236 scope
= gimple_seq_first_stmt_as_a_bind (gimple_body (root
->context
));
3237 if (gimple_bind_block (scope
))
3238 declare_vars (root
->debug_var_chain
, scope
, true);
3240 BLOCK_VARS (DECL_INITIAL (root
->context
))
3241 = chainon (BLOCK_VARS (DECL_INITIAL (root
->context
)),
3242 root
->debug_var_chain
);
3245 /* Fold the rewritten MEM_REF trees. */
3246 root
->mem_refs
->traverse
<void *, fold_mem_refs
> (NULL
);
3248 /* Dump the translated tree function. */
3251 fputs ("\n\n", dump_file
);
3252 dump_function_to_file (root
->context
, dump_file
, dump_flags
);
3257 finalize_nesting_tree (struct nesting_info
*root
)
3259 struct nesting_info
*n
;
3260 FOR_EACH_NEST_INFO (n
, root
)
3261 finalize_nesting_tree_1 (n
);
3264 /* Unnest the nodes and pass them to cgraph. */
3267 unnest_nesting_tree_1 (struct nesting_info
*root
)
3269 struct cgraph_node
*node
= cgraph_node::get (root
->context
);
3271 /* For nested functions update the cgraph to reflect unnesting.
3272 We also delay finalizing of these functions up to this point. */
3276 cgraph_node::finalize_function (root
->context
, true);
3281 unnest_nesting_tree (struct nesting_info
*root
)
3283 struct nesting_info
*n
;
3284 FOR_EACH_NEST_INFO (n
, root
)
3285 unnest_nesting_tree_1 (n
);
3288 /* Free the data structures allocated during this pass. */
3291 free_nesting_tree (struct nesting_info
*root
)
3293 struct nesting_info
*node
, *next
;
3295 node
= iter_nestinfo_start (root
);
3298 next
= iter_nestinfo_next (node
);
3299 delete node
->var_map
;
3300 delete node
->field_map
;
3301 delete node
->mem_refs
;
3308 /* Gimplify a function and all its nested functions. */
3310 gimplify_all_functions (struct cgraph_node
*root
)
3312 struct cgraph_node
*iter
;
3313 if (!gimple_body (root
->decl
))
3314 gimplify_function_tree (root
->decl
);
3315 for (iter
= root
->nested
; iter
; iter
= iter
->next_nested
)
3316 gimplify_all_functions (iter
);
3319 /* Main entry point for this pass. Process FNDECL and all of its nested
3320 subroutines and turn them into something less tightly bound. */
3323 lower_nested_functions (tree fndecl
)
3325 struct cgraph_node
*cgn
;
3326 struct nesting_info
*root
;
3328 /* If there are no nested functions, there's nothing to do. */
3329 cgn
= cgraph_node::get (fndecl
);
3333 gimplify_all_functions (cgn
);
3335 dump_file
= dump_begin (TDI_nested
, &dump_flags
);
3337 fprintf (dump_file
, "\n;; Function %s\n\n",
3338 lang_hooks
.decl_printable_name (fndecl
, 2));
3340 bitmap_obstack_initialize (&nesting_info_bitmap_obstack
);
3341 root
= create_nesting_tree (cgn
);
3343 walk_all_functions (convert_nonlocal_reference_stmt
,
3344 convert_nonlocal_reference_op
,
3346 walk_all_functions (convert_local_reference_stmt
,
3347 convert_local_reference_op
,
3349 walk_all_functions (convert_nl_goto_reference
, NULL
, root
);
3350 walk_all_functions (convert_nl_goto_receiver
, NULL
, root
);
3352 convert_all_function_calls (root
);
3353 finalize_nesting_tree (root
);
3354 unnest_nesting_tree (root
);
3356 free_nesting_tree (root
);
3357 bitmap_obstack_release (&nesting_info_bitmap_obstack
);
3361 dump_end (TDI_nested
, dump_file
);
3366 #include "gt-tree-nested.h"