1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "stringpool.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
35 #include "tree-inline.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 #include "diagnostic.h"
47 /* The object of this pass is to lower the representation of a set of nested
48 functions in order to expose all of the gory details of the various
49 nonlocal references. We want to do this sooner rather than later, in
50 order to give us more freedom in emitting all of the functions in question.
52 Back in olden times, when gcc was young, we developed an insanely
53 complicated scheme whereby variables which were referenced nonlocally
54 were forced to live in the stack of the declaring function, and then
55 the nested functions magically discovered where these variables were
56 placed. In order for this scheme to function properly, it required
57 that the outer function be partially expanded, then we switch to
58 compiling the inner function, and once done with those we switch back
59 to compiling the outer function. Such delicate ordering requirements
60 makes it difficult to do whole translation unit optimizations
61 involving such functions.
63 The implementation here is much more direct. Everything that can be
64 referenced by an inner function is a member of an explicitly created
65 structure herein called the "nonlocal frame struct". The incoming
66 static chain for a nested function is a pointer to this struct in
67 the parent. In this way, we settle on known offsets from a known
68 base, and so are decoupled from the logic that places objects in the
69 function's stack frame. More importantly, we don't have to wait for
70 that to happen -- since the compilation of the inner function is no
71 longer tied to a real stack frame, the nonlocal frame struct can be
72 allocated anywhere. Which means that the outer function is now
75 Theory of operation here is very simple. Iterate over all the
76 statements in all the functions (depth first) several times,
77 allocating structures and fields on demand. In general we want to
78 examine inner functions first, so that we can avoid making changes
79 to outer functions which are unnecessary.
81 The order of the passes matters a bit, in that later passes will be
82 skipped if it is discovered that the functions don't actually interact
83 at all. That is, they're nested in the lexical sense but could have
84 been written as independent functions without change. */
89 struct nesting_info
*outer
;
90 struct nesting_info
*inner
;
91 struct nesting_info
*next
;
93 hash_map
<tree
, tree
> *field_map
;
94 hash_map
<tree
, tree
> *var_map
;
95 hash_set
<tree
*> *mem_refs
;
96 bitmap suppress_expansion
;
99 tree new_local_var_chain
;
100 tree debug_var_chain
;
107 bool any_parm_remapped
;
108 bool any_tramp_created
;
109 bool any_descr_created
;
110 char static_chain_added
;
114 /* Iterate over the nesting tree, starting with ROOT, depth first. */
116 static inline struct nesting_info
*
117 iter_nestinfo_start (struct nesting_info
*root
)
124 static inline struct nesting_info
*
125 iter_nestinfo_next (struct nesting_info
*node
)
128 return iter_nestinfo_start (node
->next
);
132 #define FOR_EACH_NEST_INFO(I, ROOT) \
133 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
135 /* Obstack used for the bitmaps in the struct above. */
136 static struct bitmap_obstack nesting_info_bitmap_obstack
;
139 /* We're working in so many different function contexts simultaneously,
140 that create_tmp_var is dangerous. Prevent mishap. */
141 #define create_tmp_var cant_use_create_tmp_var_here_dummy
143 /* Like create_tmp_var, except record the variable for registration at
144 the given nesting level. */
147 create_tmp_var_for (struct nesting_info
*info
, tree type
, const char *prefix
)
151 /* If the type is of variable size or a type which must be created by the
152 frontend, something is wrong. Note that we explicitly allow
153 incomplete types here, since we create them ourselves here. */
154 gcc_assert (!TREE_ADDRESSABLE (type
));
155 gcc_assert (!TYPE_SIZE_UNIT (type
)
156 || TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
);
158 tmp_var
= create_tmp_var_raw (type
, prefix
);
159 DECL_CONTEXT (tmp_var
) = info
->context
;
160 DECL_CHAIN (tmp_var
) = info
->new_local_var_chain
;
161 DECL_SEEN_IN_BIND_EXPR_P (tmp_var
) = 1;
162 if (TREE_CODE (type
) == COMPLEX_TYPE
163 || TREE_CODE (type
) == VECTOR_TYPE
)
164 DECL_GIMPLE_REG_P (tmp_var
) = 1;
166 info
->new_local_var_chain
= tmp_var
;
171 /* Take the address of EXP to be used within function CONTEXT.
172 Mark it for addressability as necessary. */
175 build_addr (tree exp
)
177 mark_addressable (exp
);
178 return build_fold_addr_expr (exp
);
181 /* Insert FIELD into TYPE, sorted by alignment requirements. */
184 insert_field_into_struct (tree type
, tree field
)
188 DECL_CONTEXT (field
) = type
;
190 for (p
= &TYPE_FIELDS (type
); *p
; p
= &DECL_CHAIN (*p
))
191 if (DECL_ALIGN (field
) >= DECL_ALIGN (*p
))
194 DECL_CHAIN (field
) = *p
;
197 /* Set correct alignment for frame struct type. */
198 if (TYPE_ALIGN (type
) < DECL_ALIGN (field
))
199 SET_TYPE_ALIGN (type
, DECL_ALIGN (field
));
202 /* Build or return the RECORD_TYPE that describes the frame state that is
203 shared between INFO->CONTEXT and its nested functions. This record will
204 not be complete until finalize_nesting_tree; up until that point we'll
205 be adding fields as necessary.
207 We also build the DECL that represents this frame in the function. */
210 get_frame_type (struct nesting_info
*info
)
212 tree type
= info
->frame_type
;
217 type
= make_node (RECORD_TYPE
);
219 name
= concat ("FRAME.",
220 IDENTIFIER_POINTER (DECL_NAME (info
->context
)),
222 TYPE_NAME (type
) = get_identifier (name
);
225 info
->frame_type
= type
;
226 info
->frame_decl
= create_tmp_var_for (info
, type
, "FRAME");
227 DECL_NONLOCAL_FRAME (info
->frame_decl
) = 1;
229 /* ??? Always make it addressable for now, since it is meant to
230 be pointed to by the static chain pointer. This pessimizes
231 when it turns out that no static chains are needed because
232 the nested functions referencing non-local variables are not
233 reachable, but the true pessimization is to create the non-
234 local frame structure in the first place. */
235 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
240 /* Return true if DECL should be referenced by pointer in the non-local frame
244 use_pointer_in_frame (tree decl
)
246 if (TREE_CODE (decl
) == PARM_DECL
)
248 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
249 sized DECLs, and inefficient to copy large aggregates. Don't bother
250 moving anything but scalar parameters. */
251 return AGGREGATE_TYPE_P (TREE_TYPE (decl
));
255 /* Variable-sized DECLs can only come from OMP clauses at this point
256 since the gimplifier has already turned the regular variables into
257 pointers. Do the same as the gimplifier. */
258 return !DECL_SIZE (decl
) || TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
;
262 /* Given DECL, a non-locally accessed variable, find or create a field
263 in the non-local frame structure for the given nesting context. */
266 lookup_field_for_decl (struct nesting_info
*info
, tree decl
,
267 enum insert_option insert
)
269 gcc_checking_assert (decl_function_context (decl
) == info
->context
);
271 if (insert
== NO_INSERT
)
273 tree
*slot
= info
->field_map
->get (decl
);
274 return slot
? *slot
: NULL_TREE
;
277 tree
*slot
= &info
->field_map
->get_or_insert (decl
);
280 tree type
= get_frame_type (info
);
281 tree field
= make_node (FIELD_DECL
);
282 DECL_NAME (field
) = DECL_NAME (decl
);
284 if (use_pointer_in_frame (decl
))
286 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
287 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
288 DECL_NONADDRESSABLE_P (field
) = 1;
292 TREE_TYPE (field
) = TREE_TYPE (decl
);
293 DECL_SOURCE_LOCATION (field
) = DECL_SOURCE_LOCATION (decl
);
294 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
295 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
296 TREE_ADDRESSABLE (field
) = TREE_ADDRESSABLE (decl
);
297 DECL_NONADDRESSABLE_P (field
) = !TREE_ADDRESSABLE (decl
);
298 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
300 /* Declare the transformation and adjust the original DECL. For a
301 variable or for a parameter when not optimizing, we make it point
302 to the field in the frame directly. For a parameter, we don't do
303 it when optimizing because the variable tracking pass will already
305 if (VAR_P (decl
) || !optimize
)
308 = build3 (COMPONENT_REF
, TREE_TYPE (field
), info
->frame_decl
,
311 /* If the next declaration is a PARM_DECL pointing to the DECL,
312 we need to adjust its VALUE_EXPR directly, since chains of
313 VALUE_EXPRs run afoul of garbage collection. This occurs
314 in Ada for Out parameters that aren't copied in. */
315 tree next
= DECL_CHAIN (decl
);
317 && TREE_CODE (next
) == PARM_DECL
318 && DECL_HAS_VALUE_EXPR_P (next
)
319 && DECL_VALUE_EXPR (next
) == decl
)
320 SET_DECL_VALUE_EXPR (next
, x
);
322 SET_DECL_VALUE_EXPR (decl
, x
);
323 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
327 insert_field_into_struct (type
, field
);
330 if (TREE_CODE (decl
) == PARM_DECL
)
331 info
->any_parm_remapped
= true;
337 /* Build or return the variable that holds the static chain within
338 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
341 get_chain_decl (struct nesting_info
*info
)
343 tree decl
= info
->chain_decl
;
349 type
= get_frame_type (info
->outer
);
350 type
= build_pointer_type (type
);
352 /* Note that this variable is *not* entered into any BIND_EXPR;
353 the construction of this variable is handled specially in
354 expand_function_start and initialize_inlined_parameters.
355 Note also that it's represented as a parameter. This is more
356 close to the truth, since the initial value does come from
358 decl
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
359 PARM_DECL
, create_tmp_var_name ("CHAIN"), type
);
360 DECL_ARTIFICIAL (decl
) = 1;
361 DECL_IGNORED_P (decl
) = 1;
362 TREE_USED (decl
) = 1;
363 DECL_CONTEXT (decl
) = info
->context
;
364 DECL_ARG_TYPE (decl
) = type
;
366 /* Tell tree-inline.c that we never write to this variable, so
367 it can copy-prop the replacement value immediately. */
368 TREE_READONLY (decl
) = 1;
370 info
->chain_decl
= decl
;
373 && (dump_flags
& TDF_DETAILS
)
374 && !DECL_STATIC_CHAIN (info
->context
))
375 fprintf (dump_file
, "Setting static-chain for %s\n",
376 lang_hooks
.decl_printable_name (info
->context
, 2));
378 DECL_STATIC_CHAIN (info
->context
) = 1;
383 /* Build or return the field within the non-local frame state that holds
384 the static chain for INFO->CONTEXT. This is the way to walk back up
385 multiple nesting levels. */
388 get_chain_field (struct nesting_info
*info
)
390 tree field
= info
->chain_field
;
394 tree type
= build_pointer_type (get_frame_type (info
->outer
));
396 field
= make_node (FIELD_DECL
);
397 DECL_NAME (field
) = get_identifier ("__chain");
398 TREE_TYPE (field
) = type
;
399 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
400 DECL_NONADDRESSABLE_P (field
) = 1;
402 insert_field_into_struct (get_frame_type (info
), field
);
404 info
->chain_field
= field
;
407 && (dump_flags
& TDF_DETAILS
)
408 && !DECL_STATIC_CHAIN (info
->context
))
409 fprintf (dump_file
, "Setting static-chain for %s\n",
410 lang_hooks
.decl_printable_name (info
->context
, 2));
412 DECL_STATIC_CHAIN (info
->context
) = 1;
417 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
420 init_tmp_var_with_call (struct nesting_info
*info
, gimple_stmt_iterator
*gsi
,
425 t
= create_tmp_var_for (info
, gimple_call_return_type (call
), NULL
);
426 gimple_call_set_lhs (call
, t
);
427 if (! gsi_end_p (*gsi
))
428 gimple_set_location (call
, gimple_location (gsi_stmt (*gsi
)));
429 gsi_insert_before (gsi
, call
, GSI_SAME_STMT
);
435 /* Copy EXP into a temporary. Allocate the temporary in the context of
436 INFO and insert the initialization statement before GSI. */
439 init_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
444 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
445 stmt
= gimple_build_assign (t
, exp
);
446 if (! gsi_end_p (*gsi
))
447 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
448 gsi_insert_before_without_update (gsi
, stmt
, GSI_SAME_STMT
);
454 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
457 gsi_gimplify_val (struct nesting_info
*info
, tree exp
,
458 gimple_stmt_iterator
*gsi
)
460 if (is_gimple_val (exp
))
463 return init_tmp_var (info
, exp
, gsi
);
466 /* Similarly, but copy from the temporary and insert the statement
467 after the iterator. */
470 save_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
475 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
476 stmt
= gimple_build_assign (exp
, t
);
477 if (! gsi_end_p (*gsi
))
478 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
479 gsi_insert_after_without_update (gsi
, stmt
, GSI_SAME_STMT
);
484 /* Build or return the type used to represent a nested function trampoline. */
486 static GTY(()) tree trampoline_type
;
489 get_trampoline_type (struct nesting_info
*info
)
491 unsigned align
, size
;
495 return trampoline_type
;
497 align
= TRAMPOLINE_ALIGNMENT
;
498 size
= TRAMPOLINE_SIZE
;
500 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
501 then allocate extra space so that we can do dynamic alignment. */
502 if (align
> STACK_BOUNDARY
)
504 size
+= ((align
/BITS_PER_UNIT
) - 1) & -(STACK_BOUNDARY
/BITS_PER_UNIT
);
505 align
= STACK_BOUNDARY
;
508 t
= build_index_type (size_int (size
- 1));
509 t
= build_array_type (char_type_node
, t
);
510 t
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
511 FIELD_DECL
, get_identifier ("__data"), t
);
512 SET_DECL_ALIGN (t
, align
);
513 DECL_USER_ALIGN (t
) = 1;
515 trampoline_type
= make_node (RECORD_TYPE
);
516 TYPE_NAME (trampoline_type
) = get_identifier ("__builtin_trampoline");
517 TYPE_FIELDS (trampoline_type
) = t
;
518 layout_type (trampoline_type
);
519 DECL_CONTEXT (t
) = trampoline_type
;
521 return trampoline_type
;
524 /* Build or return the type used to represent a nested function descriptor. */
526 static GTY(()) tree descriptor_type
;
529 get_descriptor_type (struct nesting_info
*info
)
531 /* The base alignment is that of a function. */
532 const unsigned align
= FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY
);
536 return descriptor_type
;
538 t
= build_index_type (integer_one_node
);
539 t
= build_array_type (ptr_type_node
, t
);
540 t
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
541 FIELD_DECL
, get_identifier ("__data"), t
);
542 SET_DECL_ALIGN (t
, MAX (TYPE_ALIGN (ptr_type_node
), align
));
543 DECL_USER_ALIGN (t
) = 1;
545 descriptor_type
= make_node (RECORD_TYPE
);
546 TYPE_NAME (descriptor_type
) = get_identifier ("__builtin_descriptor");
547 TYPE_FIELDS (descriptor_type
) = t
;
548 layout_type (descriptor_type
);
549 DECL_CONTEXT (t
) = descriptor_type
;
551 return descriptor_type
;
554 /* Given DECL, a nested function, find or create an element in the
555 var map for this function. */
558 lookup_element_for_decl (struct nesting_info
*info
, tree decl
,
559 enum insert_option insert
)
561 if (insert
== NO_INSERT
)
563 tree
*slot
= info
->var_map
->get (decl
);
564 return slot
? *slot
: NULL_TREE
;
567 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
569 *slot
= build_tree_list (NULL_TREE
, NULL_TREE
);
574 /* Given DECL, a nested function, create a field in the non-local
575 frame structure for this function. */
578 create_field_for_decl (struct nesting_info
*info
, tree decl
, tree type
)
580 tree field
= make_node (FIELD_DECL
);
581 DECL_NAME (field
) = DECL_NAME (decl
);
582 TREE_TYPE (field
) = type
;
583 TREE_ADDRESSABLE (field
) = 1;
584 insert_field_into_struct (get_frame_type (info
), field
);
588 /* Given DECL, a nested function, find or create a field in the non-local
589 frame structure for a trampoline for this function. */
592 lookup_tramp_for_decl (struct nesting_info
*info
, tree decl
,
593 enum insert_option insert
)
597 elt
= lookup_element_for_decl (info
, decl
, insert
);
601 field
= TREE_PURPOSE (elt
);
603 if (!field
&& insert
== INSERT
)
605 field
= create_field_for_decl (info
, decl
, get_trampoline_type (info
));
606 TREE_PURPOSE (elt
) = field
;
607 info
->any_tramp_created
= true;
613 /* Given DECL, a nested function, find or create a field in the non-local
614 frame structure for a descriptor for this function. */
617 lookup_descr_for_decl (struct nesting_info
*info
, tree decl
,
618 enum insert_option insert
)
622 elt
= lookup_element_for_decl (info
, decl
, insert
);
626 field
= TREE_VALUE (elt
);
628 if (!field
&& insert
== INSERT
)
630 field
= create_field_for_decl (info
, decl
, get_descriptor_type (info
));
631 TREE_VALUE (elt
) = field
;
632 info
->any_descr_created
= true;
638 /* Build or return the field within the non-local frame state that holds
639 the non-local goto "jmp_buf". The buffer itself is maintained by the
640 rtl middle-end as dynamic stack space is allocated. */
643 get_nl_goto_field (struct nesting_info
*info
)
645 tree field
= info
->nl_goto_field
;
651 /* For __builtin_nonlocal_goto, we need N words. The first is the
652 frame pointer, the rest is for the target's stack pointer save
653 area. The number of words is controlled by STACK_SAVEAREA_MODE;
654 not the best interface, but it'll do for now. */
655 if (Pmode
== ptr_mode
)
656 type
= ptr_type_node
;
658 type
= lang_hooks
.types
.type_for_mode (Pmode
, 1);
661 = as_a
<scalar_int_mode
> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
));
662 size
= GET_MODE_SIZE (mode
);
663 size
= size
/ GET_MODE_SIZE (Pmode
);
666 type
= build_array_type
667 (type
, build_index_type (size_int (size
)));
669 field
= make_node (FIELD_DECL
);
670 DECL_NAME (field
) = get_identifier ("__nl_goto_buf");
671 TREE_TYPE (field
) = type
;
672 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
673 TREE_ADDRESSABLE (field
) = 1;
675 insert_field_into_struct (get_frame_type (info
), field
);
677 info
->nl_goto_field
= field
;
683 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
686 walk_body (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
687 struct nesting_info
*info
, gimple_seq
*pseq
)
689 struct walk_stmt_info wi
;
691 memset (&wi
, 0, sizeof (wi
));
694 walk_gimple_seq_mod (pseq
, callback_stmt
, callback_op
, &wi
);
698 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
701 walk_function (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
702 struct nesting_info
*info
)
704 gimple_seq body
= gimple_body (info
->context
);
705 walk_body (callback_stmt
, callback_op
, info
, &body
);
706 gimple_set_body (info
->context
, body
);
709 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
712 walk_gimple_omp_for (gomp_for
*for_stmt
,
713 walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
714 struct nesting_info
*info
)
716 struct walk_stmt_info wi
;
721 walk_body (callback_stmt
, callback_op
, info
, gimple_omp_for_pre_body_ptr (for_stmt
));
724 memset (&wi
, 0, sizeof (wi
));
726 wi
.gsi
= gsi_last (seq
);
728 for (i
= 0; i
< gimple_omp_for_collapse (for_stmt
); i
++)
731 walk_tree (gimple_omp_for_index_ptr (for_stmt
, i
), callback_op
,
735 walk_tree (gimple_omp_for_initial_ptr (for_stmt
, i
), callback_op
,
740 walk_tree (gimple_omp_for_final_ptr (for_stmt
, i
), callback_op
,
743 t
= gimple_omp_for_incr (for_stmt
, i
);
744 gcc_assert (BINARY_CLASS_P (t
));
746 walk_tree (&TREE_OPERAND (t
, 0), callback_op
, &wi
, NULL
);
749 walk_tree (&TREE_OPERAND (t
, 1), callback_op
, &wi
, NULL
);
752 seq
= gsi_seq (wi
.gsi
);
753 if (!gimple_seq_empty_p (seq
))
755 gimple_seq pre_body
= gimple_omp_for_pre_body (for_stmt
);
756 annotate_all_with_location (seq
, gimple_location (for_stmt
));
757 gimple_seq_add_seq (&pre_body
, seq
);
758 gimple_omp_for_set_pre_body (for_stmt
, pre_body
);
762 /* Similarly for ROOT and all functions nested underneath, depth first. */
765 walk_all_functions (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
766 struct nesting_info
*root
)
768 struct nesting_info
*n
;
769 FOR_EACH_NEST_INFO (n
, root
)
770 walk_function (callback_stmt
, callback_op
, n
);
774 /* We have to check for a fairly pathological case. The operands of function
775 nested function are to be interpreted in the context of the enclosing
776 function. So if any are variably-sized, they will get remapped when the
777 enclosing function is inlined. But that remapping would also have to be
778 done in the types of the PARM_DECLs of the nested function, meaning the
779 argument types of that function will disagree with the arguments in the
780 calls to that function. So we'd either have to make a copy of the nested
781 function corresponding to each time the enclosing function was inlined or
782 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
783 function. The former is not practical. The latter would still require
784 detecting this case to know when to add the conversions. So, for now at
785 least, we don't inline such an enclosing function.
787 We have to do that check recursively, so here return indicating whether
788 FNDECL has such a nested function. ORIG_FN is the function we were
789 trying to inline to use for checking whether any argument is variably
790 modified by anything in it.
792 It would be better to do this in tree-inline.c so that we could give
793 the appropriate warning for why a function can't be inlined, but that's
794 too late since the nesting structure has already been flattened and
795 adding a flag just to record this fact seems a waste of a flag. */
798 check_for_nested_with_variably_modified (tree fndecl
, tree orig_fndecl
)
800 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
803 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
805 for (arg
= DECL_ARGUMENTS (cgn
->decl
); arg
; arg
= DECL_CHAIN (arg
))
806 if (variably_modified_type_p (TREE_TYPE (arg
), orig_fndecl
))
809 if (check_for_nested_with_variably_modified (cgn
->decl
,
817 /* Construct our local datastructure describing the function nesting
818 tree rooted by CGN. */
820 static struct nesting_info
*
821 create_nesting_tree (struct cgraph_node
*cgn
)
823 struct nesting_info
*info
= XCNEW (struct nesting_info
);
824 info
->field_map
= new hash_map
<tree
, tree
>;
825 info
->var_map
= new hash_map
<tree
, tree
>;
826 info
->mem_refs
= new hash_set
<tree
*>;
827 info
->suppress_expansion
= BITMAP_ALLOC (&nesting_info_bitmap_obstack
);
828 info
->context
= cgn
->decl
;
830 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
832 struct nesting_info
*sub
= create_nesting_tree (cgn
);
834 sub
->next
= info
->inner
;
838 /* See discussion at check_for_nested_with_variably_modified for a
839 discussion of why this has to be here. */
840 if (check_for_nested_with_variably_modified (info
->context
, info
->context
))
841 DECL_UNINLINABLE (info
->context
) = true;
846 /* Return an expression computing the static chain for TARGET_CONTEXT
847 from INFO->CONTEXT. Insert any necessary computations before TSI. */
850 get_static_chain (struct nesting_info
*info
, tree target_context
,
851 gimple_stmt_iterator
*gsi
)
853 struct nesting_info
*i
;
856 if (info
->context
== target_context
)
858 x
= build_addr (info
->frame_decl
);
859 info
->static_chain_added
|= 1;
863 x
= get_chain_decl (info
);
864 info
->static_chain_added
|= 2;
866 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
868 tree field
= get_chain_field (i
);
870 x
= build_simple_mem_ref (x
);
871 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
872 x
= init_tmp_var (info
, x
, gsi
);
880 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
881 frame as seen from INFO->CONTEXT. Insert any necessary computations
885 get_frame_field (struct nesting_info
*info
, tree target_context
,
886 tree field
, gimple_stmt_iterator
*gsi
)
888 struct nesting_info
*i
;
891 if (info
->context
== target_context
)
893 /* Make sure frame_decl gets created. */
894 (void) get_frame_type (info
);
895 x
= info
->frame_decl
;
896 info
->static_chain_added
|= 1;
900 x
= get_chain_decl (info
);
901 info
->static_chain_added
|= 2;
903 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
905 tree field
= get_chain_field (i
);
907 x
= build_simple_mem_ref (x
);
908 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
909 x
= init_tmp_var (info
, x
, gsi
);
912 x
= build_simple_mem_ref (x
);
915 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
919 static void note_nonlocal_vla_type (struct nesting_info
*info
, tree type
);
921 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
922 in the nested function with DECL_VALUE_EXPR set to reference the true
923 variable in the parent function. This is used both for debug info
924 and in OMP lowering. */
927 get_nonlocal_debug_decl (struct nesting_info
*info
, tree decl
)
930 struct nesting_info
*i
;
931 tree x
, field
, new_decl
;
933 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
938 target_context
= decl_function_context (decl
);
940 /* A copy of the code in get_frame_field, but without the temporaries. */
941 if (info
->context
== target_context
)
943 /* Make sure frame_decl gets created. */
944 (void) get_frame_type (info
);
945 x
= info
->frame_decl
;
947 info
->static_chain_added
|= 1;
951 x
= get_chain_decl (info
);
952 info
->static_chain_added
|= 2;
953 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
955 field
= get_chain_field (i
);
956 x
= build_simple_mem_ref (x
);
957 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
959 x
= build_simple_mem_ref (x
);
962 field
= lookup_field_for_decl (i
, decl
, INSERT
);
963 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
964 if (use_pointer_in_frame (decl
))
965 x
= build_simple_mem_ref (x
);
967 /* ??? We should be remapping types as well, surely. */
968 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
969 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
970 DECL_CONTEXT (new_decl
) = info
->context
;
971 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
972 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
973 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
974 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
975 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
976 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
977 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
978 if ((TREE_CODE (decl
) == PARM_DECL
979 || TREE_CODE (decl
) == RESULT_DECL
981 && DECL_BY_REFERENCE (decl
))
982 DECL_BY_REFERENCE (new_decl
) = 1;
984 SET_DECL_VALUE_EXPR (new_decl
, x
);
985 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
988 DECL_CHAIN (new_decl
) = info
->debug_var_chain
;
989 info
->debug_var_chain
= new_decl
;
992 && info
->context
!= target_context
993 && variably_modified_type_p (TREE_TYPE (decl
), NULL
))
994 note_nonlocal_vla_type (info
, TREE_TYPE (decl
));
1000 /* Callback for walk_gimple_stmt, rewrite all references to VAR
1001 and PARM_DECLs that belong to outer functions.
1003 The rewrite will involve some number of structure accesses back up
1004 the static chain. E.g. for a variable FOO up one nesting level it'll
1005 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
1006 indirections apply to decls for which use_pointer_in_frame is true. */
1009 convert_nonlocal_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1011 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1012 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1016 switch (TREE_CODE (t
))
1019 /* Non-automatic variables are never processed. */
1020 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
1026 tree x
, target_context
= decl_function_context (t
);
1028 if (info
->context
== target_context
)
1033 if (bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
1034 x
= get_nonlocal_debug_decl (info
, t
);
1037 struct nesting_info
*i
= info
;
1038 while (i
&& i
->context
!= target_context
)
1040 /* If none of the outer contexts is the target context, this means
1041 that the VAR or PARM_DECL is referenced in a wrong context. */
1043 internal_error ("%s from %s referenced in %s",
1044 IDENTIFIER_POINTER (DECL_NAME (t
)),
1045 IDENTIFIER_POINTER (DECL_NAME (target_context
)),
1046 IDENTIFIER_POINTER (DECL_NAME (info
->context
)));
1048 x
= lookup_field_for_decl (i
, t
, INSERT
);
1049 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
1050 if (use_pointer_in_frame (t
))
1052 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1053 x
= build_simple_mem_ref (x
);
1060 x
= save_tmp_var (info
, x
, &wi
->gsi
);
1062 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1070 /* We're taking the address of a label from a parent function, but
1071 this is not itself a non-local goto. Mark the label such that it
1072 will not be deleted, much as we would with a label address in
1074 if (decl_function_context (t
) != info
->context
)
1075 FORCED_LABEL (t
) = 1;
1080 bool save_val_only
= wi
->val_only
;
1082 wi
->val_only
= false;
1084 wi
->changed
= false;
1085 walk_tree (&TREE_OPERAND (t
, 0), convert_nonlocal_reference_op
, wi
, 0);
1086 wi
->val_only
= true;
1092 /* If we changed anything, we might no longer be directly
1093 referencing a decl. */
1094 save_context
= current_function_decl
;
1095 current_function_decl
= info
->context
;
1096 recompute_tree_invariant_for_addr_expr (t
);
1097 current_function_decl
= save_context
;
1099 /* If the callback converted the address argument in a context
1100 where we only accept variables (and min_invariant, presumably),
1101 then compute the address into a temporary. */
1103 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
1113 case ARRAY_RANGE_REF
:
1115 /* Go down this entire nest and just look at the final prefix and
1116 anything that describes the references. Otherwise, we lose track
1117 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1118 wi
->val_only
= true;
1120 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1122 if (TREE_CODE (t
) == COMPONENT_REF
)
1123 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
, wi
,
1125 else if (TREE_CODE (t
) == ARRAY_REF
1126 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1128 walk_tree (&TREE_OPERAND (t
, 1), convert_nonlocal_reference_op
,
1130 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
,
1132 walk_tree (&TREE_OPERAND (t
, 3), convert_nonlocal_reference_op
,
1136 wi
->val_only
= false;
1137 walk_tree (tp
, convert_nonlocal_reference_op
, wi
, NULL
);
1140 case VIEW_CONVERT_EXPR
:
1141 /* Just request to look at the subtrees, leaving val_only and lhs
1142 untouched. This might actually be for !val_only + lhs, in which
1143 case we don't want to force a replacement by a temporary. */
1148 if (!IS_TYPE_OR_DECL_P (t
))
1151 wi
->val_only
= true;
1160 static tree
convert_nonlocal_reference_stmt (gimple_stmt_iterator
*, bool *,
1161 struct walk_stmt_info
*);
1163 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1164 and PARM_DECLs that belong to outer functions. */
1167 convert_nonlocal_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1169 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1170 bool need_chain
= false, need_stmts
= false;
1173 bitmap new_suppress
;
1175 new_suppress
= BITMAP_GGC_ALLOC ();
1176 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1178 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1180 switch (OMP_CLAUSE_CODE (clause
))
1182 case OMP_CLAUSE_REDUCTION
:
1183 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1185 goto do_decl_clause
;
1187 case OMP_CLAUSE_LASTPRIVATE
:
1188 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1190 goto do_decl_clause
;
1192 case OMP_CLAUSE_LINEAR
:
1193 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
))
1195 wi
->val_only
= true;
1197 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause
),
1199 goto do_decl_clause
;
1201 case OMP_CLAUSE_PRIVATE
:
1202 case OMP_CLAUSE_FIRSTPRIVATE
:
1203 case OMP_CLAUSE_COPYPRIVATE
:
1204 case OMP_CLAUSE_SHARED
:
1205 case OMP_CLAUSE_TO_DECLARE
:
1206 case OMP_CLAUSE_LINK
:
1207 case OMP_CLAUSE_USE_DEVICE_PTR
:
1208 case OMP_CLAUSE_IS_DEVICE_PTR
:
1210 decl
= OMP_CLAUSE_DECL (clause
);
1212 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1214 if (decl_function_context (decl
) != info
->context
)
1216 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_SHARED
)
1217 OMP_CLAUSE_SHARED_READONLY (clause
) = 0;
1218 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1219 OMP_CLAUSE_DECL (clause
) = get_nonlocal_debug_decl (info
, decl
);
1220 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_PRIVATE
)
1225 case OMP_CLAUSE_SCHEDULE
:
1226 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1229 case OMP_CLAUSE_FINAL
:
1231 case OMP_CLAUSE_NUM_THREADS
:
1232 case OMP_CLAUSE_DEPEND
:
1233 case OMP_CLAUSE_DEVICE
:
1234 case OMP_CLAUSE_NUM_TEAMS
:
1235 case OMP_CLAUSE_THREAD_LIMIT
:
1236 case OMP_CLAUSE_SAFELEN
:
1237 case OMP_CLAUSE_SIMDLEN
:
1238 case OMP_CLAUSE_PRIORITY
:
1239 case OMP_CLAUSE_GRAINSIZE
:
1240 case OMP_CLAUSE_NUM_TASKS
:
1241 case OMP_CLAUSE_HINT
:
1242 case OMP_CLAUSE_NUM_GANGS
:
1243 case OMP_CLAUSE_NUM_WORKERS
:
1244 case OMP_CLAUSE_VECTOR_LENGTH
:
1245 case OMP_CLAUSE_GANG
:
1246 case OMP_CLAUSE_WORKER
:
1247 case OMP_CLAUSE_VECTOR
:
1248 case OMP_CLAUSE_ASYNC
:
1249 case OMP_CLAUSE_WAIT
:
1250 /* Several OpenACC clauses have optional arguments. Check if they
1252 if (OMP_CLAUSE_OPERAND (clause
, 0))
1254 wi
->val_only
= true;
1256 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1260 /* The gang clause accepts two arguments. */
1261 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_GANG
1262 && OMP_CLAUSE_GANG_STATIC_EXPR (clause
))
1264 wi
->val_only
= true;
1266 convert_nonlocal_reference_op
1267 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause
), &dummy
, wi
);
1271 case OMP_CLAUSE_DIST_SCHEDULE
:
1272 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause
) != NULL
)
1274 wi
->val_only
= true;
1276 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1281 case OMP_CLAUSE_MAP
:
1283 case OMP_CLAUSE_FROM
:
1284 if (OMP_CLAUSE_SIZE (clause
))
1286 wi
->val_only
= true;
1288 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause
),
1291 if (DECL_P (OMP_CLAUSE_DECL (clause
)))
1292 goto do_decl_clause
;
1293 wi
->val_only
= true;
1295 walk_tree (&OMP_CLAUSE_DECL (clause
), convert_nonlocal_reference_op
,
1299 case OMP_CLAUSE_ALIGNED
:
1300 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
1302 wi
->val_only
= true;
1304 convert_nonlocal_reference_op
1305 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
), &dummy
, wi
);
1307 /* Like do_decl_clause, but don't add any suppression. */
1308 decl
= OMP_CLAUSE_DECL (clause
);
1310 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1312 if (decl_function_context (decl
) != info
->context
)
1314 OMP_CLAUSE_DECL (clause
) = get_nonlocal_debug_decl (info
, decl
);
1315 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_PRIVATE
)
1320 case OMP_CLAUSE_NOWAIT
:
1321 case OMP_CLAUSE_ORDERED
:
1322 case OMP_CLAUSE_DEFAULT
:
1323 case OMP_CLAUSE_COPYIN
:
1324 case OMP_CLAUSE_COLLAPSE
:
1325 case OMP_CLAUSE_TILE
:
1326 case OMP_CLAUSE_UNTIED
:
1327 case OMP_CLAUSE_MERGEABLE
:
1328 case OMP_CLAUSE_PROC_BIND
:
1329 case OMP_CLAUSE_NOGROUP
:
1330 case OMP_CLAUSE_THREADS
:
1331 case OMP_CLAUSE_SIMD
:
1332 case OMP_CLAUSE_DEFAULTMAP
:
1333 case OMP_CLAUSE_SEQ
:
1334 case OMP_CLAUSE_INDEPENDENT
:
1335 case OMP_CLAUSE_AUTO
:
1338 /* The following clause belongs to the OpenACC cache directive, which
1339 is discarded during gimplification. */
1340 case OMP_CLAUSE__CACHE_
:
1341 /* The following clauses are only allowed in the OpenMP declare simd
1342 directive, so not seen here. */
1343 case OMP_CLAUSE_UNIFORM
:
1344 case OMP_CLAUSE_INBRANCH
:
1345 case OMP_CLAUSE_NOTINBRANCH
:
1346 /* The following clauses are only allowed on OpenMP cancel and
1347 cancellation point directives, which at this point have already
1348 been lowered into a function call. */
1349 case OMP_CLAUSE_FOR
:
1350 case OMP_CLAUSE_PARALLEL
:
1351 case OMP_CLAUSE_SECTIONS
:
1352 case OMP_CLAUSE_TASKGROUP
:
1353 /* The following clauses are only added during OMP lowering; nested
1354 function decomposition happens before that. */
1355 case OMP_CLAUSE__LOOPTEMP_
:
1356 case OMP_CLAUSE__SIMDUID_
:
1357 case OMP_CLAUSE__GRIDDIM_
:
1358 /* Anything else. */
1364 info
->suppress_expansion
= new_suppress
;
1367 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1368 switch (OMP_CLAUSE_CODE (clause
))
1370 case OMP_CLAUSE_REDUCTION
:
1371 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1374 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
1375 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1377 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1378 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1380 walk_body (convert_nonlocal_reference_stmt
,
1381 convert_nonlocal_reference_op
, info
,
1382 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
1383 walk_body (convert_nonlocal_reference_stmt
,
1384 convert_nonlocal_reference_op
, info
,
1385 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
1386 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1388 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1389 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1394 case OMP_CLAUSE_LASTPRIVATE
:
1395 walk_body (convert_nonlocal_reference_stmt
,
1396 convert_nonlocal_reference_op
, info
,
1397 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
1400 case OMP_CLAUSE_LINEAR
:
1401 walk_body (convert_nonlocal_reference_stmt
,
1402 convert_nonlocal_reference_op
, info
,
1403 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
));
1413 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1416 note_nonlocal_vla_type (struct nesting_info
*info
, tree type
)
1418 while (POINTER_TYPE_P (type
) && !TYPE_NAME (type
))
1419 type
= TREE_TYPE (type
);
1421 if (TYPE_NAME (type
)
1422 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1423 && DECL_ORIGINAL_TYPE (TYPE_NAME (type
)))
1424 type
= DECL_ORIGINAL_TYPE (TYPE_NAME (type
));
1426 while (POINTER_TYPE_P (type
)
1427 || TREE_CODE (type
) == VECTOR_TYPE
1428 || TREE_CODE (type
) == FUNCTION_TYPE
1429 || TREE_CODE (type
) == METHOD_TYPE
)
1430 type
= TREE_TYPE (type
);
1432 if (TREE_CODE (type
) == ARRAY_TYPE
)
1436 note_nonlocal_vla_type (info
, TREE_TYPE (type
));
1437 domain
= TYPE_DOMAIN (type
);
1440 t
= TYPE_MIN_VALUE (domain
);
1441 if (t
&& (VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
)
1442 && decl_function_context (t
) != info
->context
)
1443 get_nonlocal_debug_decl (info
, t
);
1444 t
= TYPE_MAX_VALUE (domain
);
1445 if (t
&& (VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
)
1446 && decl_function_context (t
) != info
->context
)
1447 get_nonlocal_debug_decl (info
, t
);
1452 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1453 PARM_DECLs that belong to outer functions. This handles statements
1454 that are not handled via the standard recursion done in
1455 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1456 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1457 operands of STMT have been handled by this function. */
1460 convert_nonlocal_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1461 struct walk_stmt_info
*wi
)
1463 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
1464 tree save_local_var_chain
;
1465 bitmap save_suppress
;
1466 gimple
*stmt
= gsi_stmt (*gsi
);
1468 switch (gimple_code (stmt
))
1471 /* Don't walk non-local gotos for now. */
1472 if (TREE_CODE (gimple_goto_dest (stmt
)) != LABEL_DECL
)
1474 wi
->val_only
= true;
1476 *handled_ops_p
= false;
1481 case GIMPLE_OMP_PARALLEL
:
1482 case GIMPLE_OMP_TASK
:
1483 save_suppress
= info
->suppress_expansion
;
1484 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
1488 decl
= get_chain_decl (info
);
1489 c
= build_omp_clause (gimple_location (stmt
),
1490 OMP_CLAUSE_FIRSTPRIVATE
);
1491 OMP_CLAUSE_DECL (c
) = decl
;
1492 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1493 gimple_omp_taskreg_set_clauses (stmt
, c
);
1496 save_local_var_chain
= info
->new_local_var_chain
;
1497 info
->new_local_var_chain
= NULL
;
1499 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1500 info
, gimple_omp_body_ptr (stmt
));
1502 if (info
->new_local_var_chain
)
1503 declare_vars (info
->new_local_var_chain
,
1504 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1506 info
->new_local_var_chain
= save_local_var_chain
;
1507 info
->suppress_expansion
= save_suppress
;
1510 case GIMPLE_OMP_FOR
:
1511 save_suppress
= info
->suppress_expansion
;
1512 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
1513 walk_gimple_omp_for (as_a
<gomp_for
*> (stmt
),
1514 convert_nonlocal_reference_stmt
,
1515 convert_nonlocal_reference_op
, info
);
1516 walk_body (convert_nonlocal_reference_stmt
,
1517 convert_nonlocal_reference_op
, info
, gimple_omp_body_ptr (stmt
));
1518 info
->suppress_expansion
= save_suppress
;
1521 case GIMPLE_OMP_SECTIONS
:
1522 save_suppress
= info
->suppress_expansion
;
1523 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
1524 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1525 info
, gimple_omp_body_ptr (stmt
));
1526 info
->suppress_expansion
= save_suppress
;
1529 case GIMPLE_OMP_SINGLE
:
1530 save_suppress
= info
->suppress_expansion
;
1531 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
1532 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1533 info
, gimple_omp_body_ptr (stmt
));
1534 info
->suppress_expansion
= save_suppress
;
1537 case GIMPLE_OMP_TARGET
:
1538 if (!is_gimple_omp_offloaded (stmt
))
1540 save_suppress
= info
->suppress_expansion
;
1541 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt
),
1543 info
->suppress_expansion
= save_suppress
;
1544 walk_body (convert_nonlocal_reference_stmt
,
1545 convert_nonlocal_reference_op
, info
,
1546 gimple_omp_body_ptr (stmt
));
1549 save_suppress
= info
->suppress_expansion
;
1550 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt
),
1554 decl
= get_chain_decl (info
);
1555 c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
1556 OMP_CLAUSE_DECL (c
) = decl
;
1557 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
1558 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
1559 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
1560 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
1563 save_local_var_chain
= info
->new_local_var_chain
;
1564 info
->new_local_var_chain
= NULL
;
1566 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1567 info
, gimple_omp_body_ptr (stmt
));
1569 if (info
->new_local_var_chain
)
1570 declare_vars (info
->new_local_var_chain
,
1571 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1573 info
->new_local_var_chain
= save_local_var_chain
;
1574 info
->suppress_expansion
= save_suppress
;
1577 case GIMPLE_OMP_TEAMS
:
1578 save_suppress
= info
->suppress_expansion
;
1579 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt
), wi
);
1580 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1581 info
, gimple_omp_body_ptr (stmt
));
1582 info
->suppress_expansion
= save_suppress
;
1585 case GIMPLE_OMP_SECTION
:
1586 case GIMPLE_OMP_MASTER
:
1587 case GIMPLE_OMP_TASKGROUP
:
1588 case GIMPLE_OMP_ORDERED
:
1589 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1590 info
, gimple_omp_body_ptr (stmt
));
1595 gbind
*bind_stmt
= as_a
<gbind
*> (stmt
);
1597 for (tree var
= gimple_bind_vars (bind_stmt
); var
; var
= DECL_CHAIN (var
))
1598 if (TREE_CODE (var
) == NAMELIST_DECL
)
1600 /* Adjust decls mentioned in NAMELIST_DECL. */
1601 tree decls
= NAMELIST_DECL_ASSOCIATED_DECL (var
);
1605 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls
), i
, decl
)
1608 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1610 if (decl_function_context (decl
) != info
->context
)
1611 CONSTRUCTOR_ELT (decls
, i
)->value
1612 = get_nonlocal_debug_decl (info
, decl
);
1616 *handled_ops_p
= false;
1620 wi
->val_only
= true;
1622 *handled_ops_p
= false;
1626 /* For every other statement that we are not interested in
1627 handling here, let the walker traverse the operands. */
1628 *handled_ops_p
= false;
1632 /* We have handled all of STMT operands, no need to traverse the operands. */
1633 *handled_ops_p
= true;
1638 /* A subroutine of convert_local_reference. Create a local variable
1639 in the parent function with DECL_VALUE_EXPR set to reference the
1640 field in FRAME. This is used both for debug info and in OMP
1644 get_local_debug_decl (struct nesting_info
*info
, tree decl
, tree field
)
1648 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
1652 /* Make sure frame_decl gets created. */
1653 (void) get_frame_type (info
);
1654 x
= info
->frame_decl
;
1655 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1657 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
1658 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
1659 DECL_CONTEXT (new_decl
) = info
->context
;
1660 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
1661 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
1662 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
1663 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
1664 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
1665 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
1666 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
1667 if ((TREE_CODE (decl
) == PARM_DECL
1668 || TREE_CODE (decl
) == RESULT_DECL
1670 && DECL_BY_REFERENCE (decl
))
1671 DECL_BY_REFERENCE (new_decl
) = 1;
1673 SET_DECL_VALUE_EXPR (new_decl
, x
);
1674 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
1677 DECL_CHAIN (new_decl
) = info
->debug_var_chain
;
1678 info
->debug_var_chain
= new_decl
;
1680 /* Do not emit debug info twice. */
1681 DECL_IGNORED_P (decl
) = 1;
1687 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1688 and PARM_DECLs that were referenced by inner nested functions.
1689 The rewrite will be a structure reference to the local frame variable. */
1691 static bool convert_local_omp_clauses (tree
*, struct walk_stmt_info
*);
1694 convert_local_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1696 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1697 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1698 tree t
= *tp
, field
, x
;
1702 switch (TREE_CODE (t
))
1705 /* Non-automatic variables are never processed. */
1706 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
1711 if (t
!= info
->frame_decl
&& decl_function_context (t
) == info
->context
)
1713 /* If we copied a pointer to the frame, then the original decl
1714 is used unchanged in the parent function. */
1715 if (use_pointer_in_frame (t
))
1718 /* No need to transform anything if no child references the
1720 field
= lookup_field_for_decl (info
, t
, NO_INSERT
);
1725 if (bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
1726 x
= get_local_debug_decl (info
, t
, field
);
1728 x
= get_frame_field (info
, info
->context
, field
, &wi
->gsi
);
1733 x
= save_tmp_var (info
, x
, &wi
->gsi
);
1735 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1743 save_val_only
= wi
->val_only
;
1744 wi
->val_only
= false;
1746 wi
->changed
= false;
1747 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
, wi
, NULL
);
1748 wi
->val_only
= save_val_only
;
1750 /* If we converted anything ... */
1755 /* Then the frame decl is now addressable. */
1756 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
1758 save_context
= current_function_decl
;
1759 current_function_decl
= info
->context
;
1760 recompute_tree_invariant_for_addr_expr (t
);
1761 current_function_decl
= save_context
;
1763 /* If we are in a context where we only accept values, then
1764 compute the address into a temporary. */
1766 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
1775 case ARRAY_RANGE_REF
:
1777 /* Go down this entire nest and just look at the final prefix and
1778 anything that describes the references. Otherwise, we lose track
1779 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1780 save_val_only
= wi
->val_only
;
1781 wi
->val_only
= true;
1783 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1785 if (TREE_CODE (t
) == COMPONENT_REF
)
1786 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1788 else if (TREE_CODE (t
) == ARRAY_REF
1789 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1791 walk_tree (&TREE_OPERAND (t
, 1), convert_local_reference_op
, wi
,
1793 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1795 walk_tree (&TREE_OPERAND (t
, 3), convert_local_reference_op
, wi
,
1799 wi
->val_only
= false;
1800 walk_tree (tp
, convert_local_reference_op
, wi
, NULL
);
1801 wi
->val_only
= save_val_only
;
1805 save_val_only
= wi
->val_only
;
1806 wi
->val_only
= true;
1808 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
,
1810 /* We need to re-fold the MEM_REF as component references as
1811 part of a ADDR_EXPR address are not allowed. But we cannot
1812 fold here, as the chain record type is not yet finalized. */
1813 if (TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
1814 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)))
1815 info
->mem_refs
->add (tp
);
1816 wi
->val_only
= save_val_only
;
1819 case VIEW_CONVERT_EXPR
:
1820 /* Just request to look at the subtrees, leaving val_only and lhs
1821 untouched. This might actually be for !val_only + lhs, in which
1822 case we don't want to force a replacement by a temporary. */
1827 if (!IS_TYPE_OR_DECL_P (t
))
1830 wi
->val_only
= true;
1839 static tree
convert_local_reference_stmt (gimple_stmt_iterator
*, bool *,
1840 struct walk_stmt_info
*);
1842 /* Helper for convert_local_reference. Convert all the references in
1843 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1846 convert_local_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1848 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1849 bool need_frame
= false, need_stmts
= false;
1852 bitmap new_suppress
;
1854 new_suppress
= BITMAP_GGC_ALLOC ();
1855 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1857 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1859 switch (OMP_CLAUSE_CODE (clause
))
1861 case OMP_CLAUSE_REDUCTION
:
1862 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1864 goto do_decl_clause
;
1866 case OMP_CLAUSE_LASTPRIVATE
:
1867 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1869 goto do_decl_clause
;
1871 case OMP_CLAUSE_LINEAR
:
1872 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
))
1874 wi
->val_only
= true;
1876 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause
), &dummy
,
1878 goto do_decl_clause
;
1880 case OMP_CLAUSE_PRIVATE
:
1881 case OMP_CLAUSE_FIRSTPRIVATE
:
1882 case OMP_CLAUSE_COPYPRIVATE
:
1883 case OMP_CLAUSE_SHARED
:
1884 case OMP_CLAUSE_TO_DECLARE
:
1885 case OMP_CLAUSE_LINK
:
1886 case OMP_CLAUSE_USE_DEVICE_PTR
:
1887 case OMP_CLAUSE_IS_DEVICE_PTR
:
1889 decl
= OMP_CLAUSE_DECL (clause
);
1891 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1893 if (decl_function_context (decl
) == info
->context
1894 && !use_pointer_in_frame (decl
))
1896 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
1899 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_SHARED
)
1900 OMP_CLAUSE_SHARED_READONLY (clause
) = 0;
1901 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1902 OMP_CLAUSE_DECL (clause
)
1903 = get_local_debug_decl (info
, decl
, field
);
1909 case OMP_CLAUSE_SCHEDULE
:
1910 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1913 case OMP_CLAUSE_FINAL
:
1915 case OMP_CLAUSE_NUM_THREADS
:
1916 case OMP_CLAUSE_DEPEND
:
1917 case OMP_CLAUSE_DEVICE
:
1918 case OMP_CLAUSE_NUM_TEAMS
:
1919 case OMP_CLAUSE_THREAD_LIMIT
:
1920 case OMP_CLAUSE_SAFELEN
:
1921 case OMP_CLAUSE_SIMDLEN
:
1922 case OMP_CLAUSE_PRIORITY
:
1923 case OMP_CLAUSE_GRAINSIZE
:
1924 case OMP_CLAUSE_NUM_TASKS
:
1925 case OMP_CLAUSE_HINT
:
1926 case OMP_CLAUSE_NUM_GANGS
:
1927 case OMP_CLAUSE_NUM_WORKERS
:
1928 case OMP_CLAUSE_VECTOR_LENGTH
:
1929 case OMP_CLAUSE_GANG
:
1930 case OMP_CLAUSE_WORKER
:
1931 case OMP_CLAUSE_VECTOR
:
1932 case OMP_CLAUSE_ASYNC
:
1933 case OMP_CLAUSE_WAIT
:
1934 /* Several OpenACC clauses have optional arguments. Check if they
1936 if (OMP_CLAUSE_OPERAND (clause
, 0))
1938 wi
->val_only
= true;
1940 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1944 /* The gang clause accepts two arguments. */
1945 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_GANG
1946 && OMP_CLAUSE_GANG_STATIC_EXPR (clause
))
1948 wi
->val_only
= true;
1950 convert_nonlocal_reference_op
1951 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause
), &dummy
, wi
);
1955 case OMP_CLAUSE_DIST_SCHEDULE
:
1956 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause
) != NULL
)
1958 wi
->val_only
= true;
1960 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1965 case OMP_CLAUSE_MAP
:
1967 case OMP_CLAUSE_FROM
:
1968 if (OMP_CLAUSE_SIZE (clause
))
1970 wi
->val_only
= true;
1972 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause
),
1975 if (DECL_P (OMP_CLAUSE_DECL (clause
)))
1976 goto do_decl_clause
;
1977 wi
->val_only
= true;
1979 walk_tree (&OMP_CLAUSE_DECL (clause
), convert_local_reference_op
,
1983 case OMP_CLAUSE_ALIGNED
:
1984 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
1986 wi
->val_only
= true;
1988 convert_local_reference_op
1989 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
), &dummy
, wi
);
1991 /* Like do_decl_clause, but don't add any suppression. */
1992 decl
= OMP_CLAUSE_DECL (clause
);
1994 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1996 if (decl_function_context (decl
) == info
->context
1997 && !use_pointer_in_frame (decl
))
1999 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
2002 OMP_CLAUSE_DECL (clause
)
2003 = get_local_debug_decl (info
, decl
, field
);
2009 case OMP_CLAUSE_NOWAIT
:
2010 case OMP_CLAUSE_ORDERED
:
2011 case OMP_CLAUSE_DEFAULT
:
2012 case OMP_CLAUSE_COPYIN
:
2013 case OMP_CLAUSE_COLLAPSE
:
2014 case OMP_CLAUSE_TILE
:
2015 case OMP_CLAUSE_UNTIED
:
2016 case OMP_CLAUSE_MERGEABLE
:
2017 case OMP_CLAUSE_PROC_BIND
:
2018 case OMP_CLAUSE_NOGROUP
:
2019 case OMP_CLAUSE_THREADS
:
2020 case OMP_CLAUSE_SIMD
:
2021 case OMP_CLAUSE_DEFAULTMAP
:
2022 case OMP_CLAUSE_SEQ
:
2023 case OMP_CLAUSE_INDEPENDENT
:
2024 case OMP_CLAUSE_AUTO
:
2027 /* The following clause belongs to the OpenACC cache directive, which
2028 is discarded during gimplification. */
2029 case OMP_CLAUSE__CACHE_
:
2030 /* The following clauses are only allowed in the OpenMP declare simd
2031 directive, so not seen here. */
2032 case OMP_CLAUSE_UNIFORM
:
2033 case OMP_CLAUSE_INBRANCH
:
2034 case OMP_CLAUSE_NOTINBRANCH
:
2035 /* The following clauses are only allowed on OpenMP cancel and
2036 cancellation point directives, which at this point have already
2037 been lowered into a function call. */
2038 case OMP_CLAUSE_FOR
:
2039 case OMP_CLAUSE_PARALLEL
:
2040 case OMP_CLAUSE_SECTIONS
:
2041 case OMP_CLAUSE_TASKGROUP
:
2042 /* The following clauses are only added during OMP lowering; nested
2043 function decomposition happens before that. */
2044 case OMP_CLAUSE__LOOPTEMP_
:
2045 case OMP_CLAUSE__SIMDUID_
:
2046 case OMP_CLAUSE__GRIDDIM_
:
2047 /* Anything else. */
2053 info
->suppress_expansion
= new_suppress
;
2056 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
2057 switch (OMP_CLAUSE_CODE (clause
))
2059 case OMP_CLAUSE_REDUCTION
:
2060 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2063 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
2064 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2066 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2067 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2069 walk_body (convert_local_reference_stmt
,
2070 convert_local_reference_op
, info
,
2071 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
2072 walk_body (convert_local_reference_stmt
,
2073 convert_local_reference_op
, info
,
2074 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
2075 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2077 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2078 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2083 case OMP_CLAUSE_LASTPRIVATE
:
2084 walk_body (convert_local_reference_stmt
,
2085 convert_local_reference_op
, info
,
2086 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
2089 case OMP_CLAUSE_LINEAR
:
2090 walk_body (convert_local_reference_stmt
,
2091 convert_local_reference_op
, info
,
2092 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
));
2103 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2104 and PARM_DECLs that were referenced by inner nested functions.
2105 The rewrite will be a structure reference to the local frame variable. */
2108 convert_local_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2109 struct walk_stmt_info
*wi
)
2111 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
2112 tree save_local_var_chain
;
2113 bitmap save_suppress
;
2114 char save_static_chain_added
;
2115 bool frame_decl_added
;
2116 gimple
*stmt
= gsi_stmt (*gsi
);
2118 switch (gimple_code (stmt
))
2120 case GIMPLE_OMP_PARALLEL
:
2121 case GIMPLE_OMP_TASK
:
2122 save_suppress
= info
->suppress_expansion
;
2123 frame_decl_added
= false;
2124 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
2127 tree c
= build_omp_clause (gimple_location (stmt
),
2129 (void) get_frame_type (info
);
2130 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2131 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2132 gimple_omp_taskreg_set_clauses (stmt
, c
);
2133 info
->static_chain_added
|= 4;
2134 frame_decl_added
= true;
2137 save_local_var_chain
= info
->new_local_var_chain
;
2138 save_static_chain_added
= info
->static_chain_added
;
2139 info
->new_local_var_chain
= NULL
;
2140 info
->static_chain_added
= 0;
2142 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
2143 gimple_omp_body_ptr (stmt
));
2145 if ((info
->static_chain_added
& 4) != 0 && !frame_decl_added
)
2147 tree c
= build_omp_clause (gimple_location (stmt
),
2149 (void) get_frame_type (info
);
2150 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2151 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2152 info
->static_chain_added
|= 4;
2153 gimple_omp_taskreg_set_clauses (stmt
, c
);
2155 if (info
->new_local_var_chain
)
2156 declare_vars (info
->new_local_var_chain
,
2157 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
2158 info
->new_local_var_chain
= save_local_var_chain
;
2159 info
->suppress_expansion
= save_suppress
;
2160 info
->static_chain_added
|= save_static_chain_added
;
2163 case GIMPLE_OMP_FOR
:
2164 save_suppress
= info
->suppress_expansion
;
2165 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
2166 walk_gimple_omp_for (as_a
<gomp_for
*> (stmt
),
2167 convert_local_reference_stmt
,
2168 convert_local_reference_op
, info
);
2169 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2170 info
, gimple_omp_body_ptr (stmt
));
2171 info
->suppress_expansion
= save_suppress
;
2174 case GIMPLE_OMP_SECTIONS
:
2175 save_suppress
= info
->suppress_expansion
;
2176 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
2177 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2178 info
, gimple_omp_body_ptr (stmt
));
2179 info
->suppress_expansion
= save_suppress
;
2182 case GIMPLE_OMP_SINGLE
:
2183 save_suppress
= info
->suppress_expansion
;
2184 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
2185 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2186 info
, gimple_omp_body_ptr (stmt
));
2187 info
->suppress_expansion
= save_suppress
;
2190 case GIMPLE_OMP_TARGET
:
2191 if (!is_gimple_omp_offloaded (stmt
))
2193 save_suppress
= info
->suppress_expansion
;
2194 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt
), wi
);
2195 info
->suppress_expansion
= save_suppress
;
2196 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2197 info
, gimple_omp_body_ptr (stmt
));
2200 save_suppress
= info
->suppress_expansion
;
2201 frame_decl_added
= false;
2202 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt
), wi
))
2204 tree c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2205 (void) get_frame_type (info
);
2206 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2207 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
2208 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (info
->frame_decl
);
2209 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2210 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
2211 info
->static_chain_added
|= 4;
2212 frame_decl_added
= true;
2215 save_local_var_chain
= info
->new_local_var_chain
;
2216 save_static_chain_added
= info
->static_chain_added
;
2217 info
->new_local_var_chain
= NULL
;
2218 info
->static_chain_added
= 0;
2220 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
2221 gimple_omp_body_ptr (stmt
));
2223 if ((info
->static_chain_added
& 4) != 0 && !frame_decl_added
)
2225 tree c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2226 (void) get_frame_type (info
);
2227 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2228 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
2229 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (info
->frame_decl
);
2230 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2231 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
2232 info
->static_chain_added
|= 4;
2235 if (info
->new_local_var_chain
)
2236 declare_vars (info
->new_local_var_chain
,
2237 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
2238 info
->new_local_var_chain
= save_local_var_chain
;
2239 info
->suppress_expansion
= save_suppress
;
2240 info
->static_chain_added
|= save_static_chain_added
;
2243 case GIMPLE_OMP_TEAMS
:
2244 save_suppress
= info
->suppress_expansion
;
2245 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt
), wi
);
2246 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2247 info
, gimple_omp_body_ptr (stmt
));
2248 info
->suppress_expansion
= save_suppress
;
2251 case GIMPLE_OMP_SECTION
:
2252 case GIMPLE_OMP_MASTER
:
2253 case GIMPLE_OMP_TASKGROUP
:
2254 case GIMPLE_OMP_ORDERED
:
2255 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2256 info
, gimple_omp_body_ptr (stmt
));
2260 wi
->val_only
= true;
2262 *handled_ops_p
= false;
2266 if (gimple_clobber_p (stmt
))
2268 tree lhs
= gimple_assign_lhs (stmt
);
2269 if (!use_pointer_in_frame (lhs
)
2270 && lookup_field_for_decl (info
, lhs
, NO_INSERT
))
2272 gsi_replace (gsi
, gimple_build_nop (), true);
2276 *handled_ops_p
= false;
2280 for (tree var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
2282 var
= DECL_CHAIN (var
))
2283 if (TREE_CODE (var
) == NAMELIST_DECL
)
2285 /* Adjust decls mentioned in NAMELIST_DECL. */
2286 tree decls
= NAMELIST_DECL_ASSOCIATED_DECL (var
);
2290 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls
), i
, decl
)
2293 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2295 if (decl_function_context (decl
) == info
->context
2296 && !use_pointer_in_frame (decl
))
2298 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
2301 CONSTRUCTOR_ELT (decls
, i
)->value
2302 = get_local_debug_decl (info
, decl
, field
);
2308 *handled_ops_p
= false;
2312 /* For every other statement that we are not interested in
2313 handling here, let the walker traverse the operands. */
2314 *handled_ops_p
= false;
2318 /* Indicate that we have handled all the operands ourselves. */
2319 *handled_ops_p
= true;
2324 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2325 that reference labels from outer functions. The rewrite will be a
2326 call to __builtin_nonlocal_goto. */
2329 convert_nl_goto_reference (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2330 struct walk_stmt_info
*wi
)
2332 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
2333 tree label
, new_label
, target_context
, x
, field
;
2335 gimple
*stmt
= gsi_stmt (*gsi
);
2337 if (gimple_code (stmt
) != GIMPLE_GOTO
)
2339 *handled_ops_p
= false;
2343 label
= gimple_goto_dest (stmt
);
2344 if (TREE_CODE (label
) != LABEL_DECL
)
2346 *handled_ops_p
= false;
2350 target_context
= decl_function_context (label
);
2351 if (target_context
== info
->context
)
2353 *handled_ops_p
= false;
2357 for (i
= info
->outer
; target_context
!= i
->context
; i
= i
->outer
)
2360 /* The original user label may also be use for a normal goto, therefore
2361 we must create a new label that will actually receive the abnormal
2362 control transfer. This new label will be marked LABEL_NONLOCAL; this
2363 mark will trigger proper behavior in the cfg, as well as cause the
2364 (hairy target-specific) non-local goto receiver code to be generated
2365 when we expand rtl. Enter this association into var_map so that we
2366 can insert the new label into the IL during a second pass. */
2367 tree
*slot
= &i
->var_map
->get_or_insert (label
);
2370 new_label
= create_artificial_label (UNKNOWN_LOCATION
);
2371 DECL_NONLOCAL (new_label
) = 1;
2377 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2378 field
= get_nl_goto_field (i
);
2379 x
= get_frame_field (info
, target_context
, field
, gsi
);
2381 x
= gsi_gimplify_val (info
, x
, gsi
);
2382 call
= gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO
),
2383 2, build_addr (new_label
), x
);
2384 gsi_replace (gsi
, call
, false);
2386 /* We have handled all of STMT's operands, no need to keep going. */
2387 *handled_ops_p
= true;
2392 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2393 are referenced via nonlocal goto from a nested function. The rewrite
2394 will involve installing a newly generated DECL_NONLOCAL label, and
2395 (potentially) a branch around the rtl gunk that is assumed to be
2396 attached to such a label. */
2399 convert_nl_goto_receiver (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2400 struct walk_stmt_info
*wi
)
2402 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
2403 tree label
, new_label
;
2404 gimple_stmt_iterator tmp_gsi
;
2405 glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (*gsi
));
2409 *handled_ops_p
= false;
2413 label
= gimple_label_label (stmt
);
2415 tree
*slot
= info
->var_map
->get (label
);
2418 *handled_ops_p
= false;
2422 /* If there's any possibility that the previous statement falls through,
2423 then we must branch around the new non-local label. */
2425 gsi_prev (&tmp_gsi
);
2426 if (gsi_end_p (tmp_gsi
) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi
)))
2428 gimple
*stmt
= gimple_build_goto (label
);
2429 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2432 new_label
= (tree
) *slot
;
2433 stmt
= gimple_build_label (new_label
);
2434 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2436 *handled_ops_p
= true;
2441 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2442 of nested functions that require the use of trampolines. The rewrite
2443 will involve a reference a trampoline generated for the occasion. */
2446 convert_tramp_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
2448 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
2449 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
2450 tree t
= *tp
, decl
, target_context
, x
, builtin
;
2455 switch (TREE_CODE (t
))
2459 T.1 = &CHAIN->tramp;
2460 T.2 = __builtin_adjust_trampoline (T.1);
2461 T.3 = (func_type)T.2;
2464 decl
= TREE_OPERAND (t
, 0);
2465 if (TREE_CODE (decl
) != FUNCTION_DECL
)
2468 /* Only need to process nested functions. */
2469 target_context
= decl_function_context (decl
);
2470 if (!target_context
)
2473 /* If the nested function doesn't use a static chain, then
2474 it doesn't need a trampoline. */
2475 if (!DECL_STATIC_CHAIN (decl
))
2478 /* If we don't want a trampoline, then don't build one. */
2479 if (TREE_NO_TRAMPOLINE (t
))
2482 /* Lookup the immediate parent of the callee, as that's where
2483 we need to insert the trampoline. */
2484 for (i
= info
; i
->context
!= target_context
; i
= i
->outer
)
2487 /* Decide whether to generate a descriptor or a trampoline. */
2488 descr
= FUNC_ADDR_BY_DESCRIPTOR (t
) && !flag_trampolines
;
2491 x
= lookup_descr_for_decl (i
, decl
, INSERT
);
2493 x
= lookup_tramp_for_decl (i
, decl
, INSERT
);
2495 /* Compute the address of the field holding the trampoline. */
2496 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
2498 x
= gsi_gimplify_val (info
, x
, &wi
->gsi
);
2500 /* Do machine-specific ugliness. Normally this will involve
2501 computing extra alignment, but it can really be anything. */
2503 builtin
= builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR
);
2505 builtin
= builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE
);
2506 call
= gimple_build_call (builtin
, 1, x
);
2507 x
= init_tmp_var_with_call (info
, &wi
->gsi
, call
);
2509 /* Cast back to the proper function type. */
2510 x
= build1 (NOP_EXPR
, TREE_TYPE (t
), x
);
2511 x
= init_tmp_var (info
, x
, &wi
->gsi
);
2517 if (!IS_TYPE_OR_DECL_P (t
))
2526 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2527 to addresses of nested functions that require the use of
2528 trampolines. The rewrite will involve a reference a trampoline
2529 generated for the occasion. */
2532 convert_tramp_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2533 struct walk_stmt_info
*wi
)
2535 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
2536 gimple
*stmt
= gsi_stmt (*gsi
);
2538 switch (gimple_code (stmt
))
2542 /* Only walk call arguments, lest we generate trampolines for
2544 unsigned long i
, nargs
= gimple_call_num_args (stmt
);
2545 for (i
= 0; i
< nargs
; i
++)
2546 walk_tree (gimple_call_arg_ptr (stmt
, i
), convert_tramp_reference_op
,
2551 case GIMPLE_OMP_TARGET
:
2552 if (!is_gimple_omp_offloaded (stmt
))
2554 *handled_ops_p
= false;
2558 case GIMPLE_OMP_PARALLEL
:
2559 case GIMPLE_OMP_TASK
:
2561 tree save_local_var_chain
= info
->new_local_var_chain
;
2562 walk_gimple_op (stmt
, convert_tramp_reference_op
, wi
);
2563 info
->new_local_var_chain
= NULL
;
2564 char save_static_chain_added
= info
->static_chain_added
;
2565 info
->static_chain_added
= 0;
2566 walk_body (convert_tramp_reference_stmt
, convert_tramp_reference_op
,
2567 info
, gimple_omp_body_ptr (stmt
));
2568 if (info
->new_local_var_chain
)
2569 declare_vars (info
->new_local_var_chain
,
2570 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
2572 for (int i
= 0; i
< 2; i
++)
2575 if ((info
->static_chain_added
& (1 << i
)) == 0)
2577 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2578 /* Don't add CHAIN.* or FRAME.* twice. */
2579 for (c
= gimple_omp_taskreg_clauses (stmt
);
2581 c
= OMP_CLAUSE_CHAIN (c
))
2582 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
2583 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
2584 && OMP_CLAUSE_DECL (c
) == decl
)
2586 if (c
== NULL
&& gimple_code (stmt
) != GIMPLE_OMP_TARGET
)
2588 c
= build_omp_clause (gimple_location (stmt
),
2589 i
? OMP_CLAUSE_FIRSTPRIVATE
2590 : OMP_CLAUSE_SHARED
);
2591 OMP_CLAUSE_DECL (c
) = decl
;
2592 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2593 gimple_omp_taskreg_set_clauses (stmt
, c
);
2597 c
= build_omp_clause (gimple_location (stmt
),
2599 OMP_CLAUSE_DECL (c
) = decl
;
2600 OMP_CLAUSE_SET_MAP_KIND (c
,
2601 i
? GOMP_MAP_TO
: GOMP_MAP_TOFROM
);
2602 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
2603 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2604 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
),
2608 info
->new_local_var_chain
= save_local_var_chain
;
2609 info
->static_chain_added
|= save_static_chain_added
;
2614 *handled_ops_p
= false;
2618 *handled_ops_p
= true;
2624 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2625 that reference nested functions to make sure that the static chain
2626 is set up properly for the call. */
2629 convert_gimple_call (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2630 struct walk_stmt_info
*wi
)
2632 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
2633 tree decl
, target_context
;
2634 char save_static_chain_added
;
2636 gimple
*stmt
= gsi_stmt (*gsi
);
2638 switch (gimple_code (stmt
))
2641 if (gimple_call_chain (stmt
))
2643 decl
= gimple_call_fndecl (stmt
);
2646 target_context
= decl_function_context (decl
);
2647 if (target_context
&& DECL_STATIC_CHAIN (decl
))
2649 struct nesting_info
*i
= info
;
2650 while (i
&& i
->context
!= target_context
)
2652 /* If none of the outer contexts is the target context, this means
2653 that the function is called in a wrong context. */
2655 internal_error ("%s from %s called in %s",
2656 IDENTIFIER_POINTER (DECL_NAME (decl
)),
2657 IDENTIFIER_POINTER (DECL_NAME (target_context
)),
2658 IDENTIFIER_POINTER (DECL_NAME (info
->context
)));
2660 gimple_call_set_chain (as_a
<gcall
*> (stmt
),
2661 get_static_chain (info
, target_context
,
2663 info
->static_chain_added
|= (1 << (info
->context
!= target_context
));
2667 case GIMPLE_OMP_PARALLEL
:
2668 case GIMPLE_OMP_TASK
:
2669 save_static_chain_added
= info
->static_chain_added
;
2670 info
->static_chain_added
= 0;
2671 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2672 for (i
= 0; i
< 2; i
++)
2675 if ((info
->static_chain_added
& (1 << i
)) == 0)
2677 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2678 /* Don't add CHAIN.* or FRAME.* twice. */
2679 for (c
= gimple_omp_taskreg_clauses (stmt
);
2681 c
= OMP_CLAUSE_CHAIN (c
))
2682 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
2683 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
2684 && OMP_CLAUSE_DECL (c
) == decl
)
2688 c
= build_omp_clause (gimple_location (stmt
),
2689 i
? OMP_CLAUSE_FIRSTPRIVATE
2690 : OMP_CLAUSE_SHARED
);
2691 OMP_CLAUSE_DECL (c
) = decl
;
2692 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2693 gimple_omp_taskreg_set_clauses (stmt
, c
);
2696 info
->static_chain_added
|= save_static_chain_added
;
2699 case GIMPLE_OMP_TARGET
:
2700 if (!is_gimple_omp_offloaded (stmt
))
2702 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2705 save_static_chain_added
= info
->static_chain_added
;
2706 info
->static_chain_added
= 0;
2707 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2708 for (i
= 0; i
< 2; i
++)
2711 if ((info
->static_chain_added
& (1 << i
)) == 0)
2713 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2714 /* Don't add CHAIN.* or FRAME.* twice. */
2715 for (c
= gimple_omp_target_clauses (stmt
);
2717 c
= OMP_CLAUSE_CHAIN (c
))
2718 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
2719 && OMP_CLAUSE_DECL (c
) == decl
)
2723 c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2724 OMP_CLAUSE_DECL (c
) = decl
;
2725 OMP_CLAUSE_SET_MAP_KIND (c
, i
? GOMP_MAP_TO
: GOMP_MAP_TOFROM
);
2726 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
2727 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2728 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
),
2732 info
->static_chain_added
|= save_static_chain_added
;
2735 case GIMPLE_OMP_FOR
:
2736 walk_body (convert_gimple_call
, NULL
, info
,
2737 gimple_omp_for_pre_body_ptr (stmt
));
2739 case GIMPLE_OMP_SECTIONS
:
2740 case GIMPLE_OMP_SECTION
:
2741 case GIMPLE_OMP_SINGLE
:
2742 case GIMPLE_OMP_TEAMS
:
2743 case GIMPLE_OMP_MASTER
:
2744 case GIMPLE_OMP_TASKGROUP
:
2745 case GIMPLE_OMP_ORDERED
:
2746 case GIMPLE_OMP_CRITICAL
:
2747 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2751 /* Keep looking for other operands. */
2752 *handled_ops_p
= false;
2756 *handled_ops_p
= true;
2760 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2761 call expressions. At the same time, determine if a nested function
2762 actually uses its static chain; if not, remember that. */
2765 convert_all_function_calls (struct nesting_info
*root
)
2767 unsigned int chain_count
= 0, old_chain_count
, iter_count
;
2768 struct nesting_info
*n
;
2770 /* First, optimistically clear static_chain for all decls that haven't
2771 used the static chain already for variable access. But always create
2772 it if not optimizing. This makes it possible to reconstruct the static
2773 nesting tree at run time and thus to resolve up-level references from
2774 within the debugger. */
2775 FOR_EACH_NEST_INFO (n
, root
)
2777 tree decl
= n
->context
;
2781 (void) get_frame_type (n
);
2783 (void) get_chain_decl (n
);
2785 else if (!n
->outer
|| (!n
->chain_decl
&& !n
->chain_field
))
2787 DECL_STATIC_CHAIN (decl
) = 0;
2788 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2789 fprintf (dump_file
, "Guessing no static-chain for %s\n",
2790 lang_hooks
.decl_printable_name (decl
, 2));
2793 DECL_STATIC_CHAIN (decl
) = 1;
2794 chain_count
+= DECL_STATIC_CHAIN (decl
);
2797 /* Walk the functions and perform transformations. Note that these
2798 transformations can induce new uses of the static chain, which in turn
2799 require re-examining all users of the decl. */
2800 /* ??? It would make sense to try to use the call graph to speed this up,
2801 but the call graph hasn't really been built yet. Even if it did, we
2802 would still need to iterate in this loop since address-of references
2803 wouldn't show up in the callgraph anyway. */
2807 old_chain_count
= chain_count
;
2811 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2812 fputc ('\n', dump_file
);
2814 FOR_EACH_NEST_INFO (n
, root
)
2816 tree decl
= n
->context
;
2817 walk_function (convert_tramp_reference_stmt
,
2818 convert_tramp_reference_op
, n
);
2819 walk_function (convert_gimple_call
, NULL
, n
);
2820 chain_count
+= DECL_STATIC_CHAIN (decl
);
2823 while (chain_count
!= old_chain_count
);
2825 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2826 fprintf (dump_file
, "convert_all_function_calls iterations: %u\n\n",
2830 struct nesting_copy_body_data
2833 struct nesting_info
*root
;
2836 /* A helper subroutine for debug_var_chain type remapping. */
2839 nesting_copy_decl (tree decl
, copy_body_data
*id
)
2841 struct nesting_copy_body_data
*nid
= (struct nesting_copy_body_data
*) id
;
2842 tree
*slot
= nid
->root
->var_map
->get (decl
);
2845 return (tree
) *slot
;
2847 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_ORIGINAL_TYPE (decl
))
2849 tree new_decl
= copy_decl_no_change (decl
, id
);
2850 DECL_ORIGINAL_TYPE (new_decl
)
2851 = remap_type (DECL_ORIGINAL_TYPE (decl
), id
);
2856 || TREE_CODE (decl
) == PARM_DECL
2857 || TREE_CODE (decl
) == RESULT_DECL
)
2860 return copy_decl_no_change (decl
, id
);
2863 /* A helper function for remap_vla_decls. See if *TP contains
2864 some remapped variables. */
2867 contains_remapped_vars (tree
*tp
, int *walk_subtrees
, void *data
)
2869 struct nesting_info
*root
= (struct nesting_info
*) data
;
2875 tree
*slot
= root
->var_map
->get (t
);
2883 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2887 remap_vla_decls (tree block
, struct nesting_info
*root
)
2889 tree var
, subblock
, val
, type
;
2890 struct nesting_copy_body_data id
;
2892 for (subblock
= BLOCK_SUBBLOCKS (block
);
2894 subblock
= BLOCK_CHAIN (subblock
))
2895 remap_vla_decls (subblock
, root
);
2897 for (var
= BLOCK_VARS (block
); var
; var
= DECL_CHAIN (var
))
2898 if (VAR_P (var
) && DECL_HAS_VALUE_EXPR_P (var
))
2900 val
= DECL_VALUE_EXPR (var
);
2901 type
= TREE_TYPE (var
);
2903 if (!(TREE_CODE (val
) == INDIRECT_REF
2904 && TREE_CODE (TREE_OPERAND (val
, 0)) == VAR_DECL
2905 && variably_modified_type_p (type
, NULL
)))
2908 if (root
->var_map
->get (TREE_OPERAND (val
, 0))
2909 || walk_tree (&type
, contains_remapped_vars
, root
, NULL
))
2913 if (var
== NULL_TREE
)
2916 memset (&id
, 0, sizeof (id
));
2917 id
.cb
.copy_decl
= nesting_copy_decl
;
2918 id
.cb
.decl_map
= new hash_map
<tree
, tree
>;
2921 for (; var
; var
= DECL_CHAIN (var
))
2922 if (VAR_P (var
) && DECL_HAS_VALUE_EXPR_P (var
))
2924 struct nesting_info
*i
;
2927 val
= DECL_VALUE_EXPR (var
);
2928 type
= TREE_TYPE (var
);
2930 if (!(TREE_CODE (val
) == INDIRECT_REF
2931 && TREE_CODE (TREE_OPERAND (val
, 0)) == VAR_DECL
2932 && variably_modified_type_p (type
, NULL
)))
2935 tree
*slot
= root
->var_map
->get (TREE_OPERAND (val
, 0));
2936 if (!slot
&& !walk_tree (&type
, contains_remapped_vars
, root
, NULL
))
2939 context
= decl_function_context (var
);
2940 for (i
= root
; i
; i
= i
->outer
)
2941 if (i
->context
== context
)
2947 /* Fully expand value expressions. This avoids having debug variables
2948 only referenced from them and that can be swept during GC. */
2951 tree t
= (tree
) *slot
;
2952 gcc_assert (DECL_P (t
) && DECL_HAS_VALUE_EXPR_P (t
));
2953 val
= build1 (INDIRECT_REF
, TREE_TYPE (val
), DECL_VALUE_EXPR (t
));
2956 id
.cb
.src_fn
= i
->context
;
2957 id
.cb
.dst_fn
= i
->context
;
2958 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
2960 TREE_TYPE (var
) = newt
= remap_type (type
, &id
.cb
);
2961 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
2963 newt
= TREE_TYPE (newt
);
2964 type
= TREE_TYPE (type
);
2966 if (TYPE_NAME (newt
)
2967 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
2968 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
2970 && TYPE_NAME (newt
) == TYPE_NAME (type
))
2971 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
2973 walk_tree (&val
, copy_tree_body_r
, &id
.cb
, NULL
);
2974 if (val
!= DECL_VALUE_EXPR (var
))
2975 SET_DECL_VALUE_EXPR (var
, val
);
2978 delete id
.cb
.decl_map
;
2981 /* Fixup VLA decls in BLOCK and subblocks if remapped variables are
2985 fixup_vla_decls (tree block
)
2987 for (tree var
= BLOCK_VARS (block
); var
; var
= DECL_CHAIN (var
))
2988 if (VAR_P (var
) && DECL_HAS_VALUE_EXPR_P (var
))
2990 tree val
= DECL_VALUE_EXPR (var
);
2992 if (!(TREE_CODE (val
) == INDIRECT_REF
2993 && VAR_P (TREE_OPERAND (val
, 0))
2994 && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val
, 0))))
2997 /* Fully expand value expressions. This avoids having debug variables
2998 only referenced from them and that can be swept during GC. */
2999 val
= build1 (INDIRECT_REF
, TREE_TYPE (val
),
3000 DECL_VALUE_EXPR (TREE_OPERAND (val
, 0)));
3001 SET_DECL_VALUE_EXPR (var
, val
);
3004 for (tree sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
3005 fixup_vla_decls (sub
);
3008 /* Fold the MEM_REF *E. */
3010 fold_mem_refs (tree
*const &e
, void *data ATTRIBUTE_UNUSED
)
3012 tree
*ref_p
= CONST_CAST2 (tree
*, const tree
*, (const tree
*)e
);
3013 *ref_p
= fold (*ref_p
);
3017 /* Given DECL, a nested function, build an initialization call for FIELD,
3018 the trampoline or descriptor for DECL, using FUNC as the function. */
3021 build_init_call_stmt (struct nesting_info
*info
, tree decl
, tree field
,
3024 tree arg1
, arg2
, arg3
, x
;
3026 gcc_assert (DECL_STATIC_CHAIN (decl
));
3027 arg3
= build_addr (info
->frame_decl
);
3029 arg2
= build_addr (decl
);
3031 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
3032 info
->frame_decl
, field
, NULL_TREE
);
3033 arg1
= build_addr (x
);
3035 return gimple_build_call (func
, 3, arg1
, arg2
, arg3
);
3038 /* Do "everything else" to clean up or complete state collected by the various
3039 walking passes -- create a field to hold the frame base address, lay out the
3040 types and decls, generate code to initialize the frame decl, store critical
3041 expressions in the struct function for rtl to find. */
3044 finalize_nesting_tree_1 (struct nesting_info
*root
)
3046 gimple_seq stmt_list
;
3048 tree context
= root
->context
;
3049 struct function
*sf
;
3053 /* If we created a non-local frame type or decl, we need to lay them
3054 out at this time. */
3055 if (root
->frame_type
)
3057 /* Debugging information needs to compute the frame base address of the
3058 parent frame out of the static chain from the nested frame.
3060 The static chain is the address of the FRAME record, so one could
3061 imagine it would be possible to compute the frame base address just
3062 adding a constant offset to this address. Unfortunately, this is not
3063 possible: if the FRAME object has alignment constraints that are
3064 stronger than the stack, then the offset between the frame base and
3065 the FRAME object will be dynamic.
3067 What we do instead is to append a field to the FRAME object that holds
3068 the frame base address: then debug info just has to fetch this
3071 /* Debugging information will refer to the CFA as the frame base
3072 address: we will do the same here. */
3073 const tree frame_addr_fndecl
3074 = builtin_decl_explicit (BUILT_IN_DWARF_CFA
);
3076 /* Create a field in the FRAME record to hold the frame base address for
3077 this stack frame. Since it will be used only by the debugger, put it
3078 at the end of the record in order not to shift all other offsets. */
3079 tree fb_decl
= make_node (FIELD_DECL
);
3081 DECL_NAME (fb_decl
) = get_identifier ("FRAME_BASE.PARENT");
3082 TREE_TYPE (fb_decl
) = ptr_type_node
;
3083 TREE_ADDRESSABLE (fb_decl
) = 1;
3084 DECL_CONTEXT (fb_decl
) = root
->frame_type
;
3085 TYPE_FIELDS (root
->frame_type
) = chainon (TYPE_FIELDS (root
->frame_type
),
3088 /* In some cases the frame type will trigger the -Wpadded warning.
3089 This is not helpful; suppress it. */
3090 int save_warn_padded
= warn_padded
;
3092 layout_type (root
->frame_type
);
3093 warn_padded
= save_warn_padded
;
3094 layout_decl (root
->frame_decl
, 0);
3096 /* Initialize the frame base address field. If the builtin we need is
3097 not available, set it to NULL so that debugging information does not
3099 tree fb_ref
= build3 (COMPONENT_REF
, TREE_TYPE (fb_decl
),
3100 root
->frame_decl
, fb_decl
, NULL_TREE
);
3103 if (frame_addr_fndecl
!= NULL_TREE
)
3105 gcall
*fb_gimple
= gimple_build_call (frame_addr_fndecl
, 1,
3107 gimple_stmt_iterator gsi
= gsi_last (stmt_list
);
3109 fb_tmp
= init_tmp_var_with_call (root
, &gsi
, fb_gimple
);
3112 fb_tmp
= build_int_cst (TREE_TYPE (fb_ref
), 0);
3113 gimple_seq_add_stmt (&stmt_list
,
3114 gimple_build_assign (fb_ref
, fb_tmp
));
3116 /* Remove root->frame_decl from root->new_local_var_chain, so
3117 that we can declare it also in the lexical blocks, which
3118 helps ensure virtual regs that end up appearing in its RTL
3119 expression get substituted in instantiate_virtual_regs(). */
3121 for (adjust
= &root
->new_local_var_chain
;
3122 *adjust
!= root
->frame_decl
;
3123 adjust
= &DECL_CHAIN (*adjust
))
3124 gcc_assert (DECL_CHAIN (*adjust
));
3125 *adjust
= DECL_CHAIN (*adjust
);
3127 DECL_CHAIN (root
->frame_decl
) = NULL_TREE
;
3128 declare_vars (root
->frame_decl
,
3129 gimple_seq_first_stmt (gimple_body (context
)), true);
3132 /* If any parameters were referenced non-locally, then we need to insert
3133 a copy or a pointer. */
3134 if (root
->any_parm_remapped
)
3137 for (p
= DECL_ARGUMENTS (context
); p
; p
= DECL_CHAIN (p
))
3141 field
= lookup_field_for_decl (root
, p
, NO_INSERT
);
3145 if (use_pointer_in_frame (p
))
3150 /* If the assignment is from a non-register the stmt is
3151 not valid gimple. Make it so by using a temporary instead. */
3152 if (!is_gimple_reg (x
)
3153 && is_gimple_reg_type (TREE_TYPE (x
)))
3155 gimple_stmt_iterator gsi
= gsi_last (stmt_list
);
3156 x
= init_tmp_var (root
, x
, &gsi
);
3159 y
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
3160 root
->frame_decl
, field
, NULL_TREE
);
3161 stmt
= gimple_build_assign (y
, x
);
3162 gimple_seq_add_stmt (&stmt_list
, stmt
);
3166 /* If a chain_field was created, then it needs to be initialized
3168 if (root
->chain_field
)
3170 tree x
= build3 (COMPONENT_REF
, TREE_TYPE (root
->chain_field
),
3171 root
->frame_decl
, root
->chain_field
, NULL_TREE
);
3172 stmt
= gimple_build_assign (x
, get_chain_decl (root
));
3173 gimple_seq_add_stmt (&stmt_list
, stmt
);
3176 /* If trampolines were created, then we need to initialize them. */
3177 if (root
->any_tramp_created
)
3179 struct nesting_info
*i
;
3180 for (i
= root
->inner
; i
; i
= i
->next
)
3184 field
= lookup_tramp_for_decl (root
, i
->context
, NO_INSERT
);
3188 x
= builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE
);
3189 stmt
= build_init_call_stmt (root
, i
->context
, field
, x
);
3190 gimple_seq_add_stmt (&stmt_list
, stmt
);
3194 /* If descriptors were created, then we need to initialize them. */
3195 if (root
->any_descr_created
)
3197 struct nesting_info
*i
;
3198 for (i
= root
->inner
; i
; i
= i
->next
)
3202 field
= lookup_descr_for_decl (root
, i
->context
, NO_INSERT
);
3206 x
= builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR
);
3207 stmt
= build_init_call_stmt (root
, i
->context
, field
, x
);
3208 gimple_seq_add_stmt (&stmt_list
, stmt
);
3212 /* If we created initialization statements, insert them. */
3216 annotate_all_with_location (stmt_list
, DECL_SOURCE_LOCATION (context
));
3217 bind
= gimple_seq_first_stmt_as_a_bind (gimple_body (context
));
3218 gimple_seq_add_seq (&stmt_list
, gimple_bind_body (bind
));
3219 gimple_bind_set_body (bind
, stmt_list
);
3222 /* If a chain_decl was created, then it needs to be registered with
3223 struct function so that it gets initialized from the static chain
3224 register at the beginning of the function. */
3225 sf
= DECL_STRUCT_FUNCTION (root
->context
);
3226 sf
->static_chain_decl
= root
->chain_decl
;
3228 /* Similarly for the non-local goto save area. */
3229 if (root
->nl_goto_field
)
3231 sf
->nonlocal_goto_save_area
3232 = get_frame_field (root
, context
, root
->nl_goto_field
, NULL
);
3233 sf
->has_nonlocal_label
= 1;
3236 /* Make sure all new local variables get inserted into the
3237 proper BIND_EXPR. */
3238 if (root
->new_local_var_chain
)
3239 declare_vars (root
->new_local_var_chain
,
3240 gimple_seq_first_stmt (gimple_body (root
->context
)),
3243 if (root
->debug_var_chain
)
3248 remap_vla_decls (DECL_INITIAL (root
->context
), root
);
3250 for (debug_var
= root
->debug_var_chain
; debug_var
;
3251 debug_var
= DECL_CHAIN (debug_var
))
3252 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
3255 /* If there are any debug decls with variable length types,
3256 remap those types using other debug_var_chain variables. */
3259 struct nesting_copy_body_data id
;
3261 memset (&id
, 0, sizeof (id
));
3262 id
.cb
.copy_decl
= nesting_copy_decl
;
3263 id
.cb
.decl_map
= new hash_map
<tree
, tree
>;
3266 for (; debug_var
; debug_var
= DECL_CHAIN (debug_var
))
3267 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
3269 tree type
= TREE_TYPE (debug_var
);
3270 tree newt
, t
= type
;
3271 struct nesting_info
*i
;
3273 for (i
= root
; i
; i
= i
->outer
)
3274 if (variably_modified_type_p (type
, i
->context
))
3280 id
.cb
.src_fn
= i
->context
;
3281 id
.cb
.dst_fn
= i
->context
;
3282 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
3284 TREE_TYPE (debug_var
) = newt
= remap_type (type
, &id
.cb
);
3285 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
3287 newt
= TREE_TYPE (newt
);
3290 if (TYPE_NAME (newt
)
3291 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
3292 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
3294 && TYPE_NAME (newt
) == TYPE_NAME (t
))
3295 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
3298 delete id
.cb
.decl_map
;
3301 scope
= gimple_seq_first_stmt_as_a_bind (gimple_body (root
->context
));
3302 if (gimple_bind_block (scope
))
3303 declare_vars (root
->debug_var_chain
, scope
, true);
3305 BLOCK_VARS (DECL_INITIAL (root
->context
))
3306 = chainon (BLOCK_VARS (DECL_INITIAL (root
->context
)),
3307 root
->debug_var_chain
);
3310 fixup_vla_decls (DECL_INITIAL (root
->context
));
3312 /* Fold the rewritten MEM_REF trees. */
3313 root
->mem_refs
->traverse
<void *, fold_mem_refs
> (NULL
);
3315 /* Dump the translated tree function. */
3318 fputs ("\n\n", dump_file
);
3319 dump_function_to_file (root
->context
, dump_file
, dump_flags
);
3324 finalize_nesting_tree (struct nesting_info
*root
)
3326 struct nesting_info
*n
;
3327 FOR_EACH_NEST_INFO (n
, root
)
3328 finalize_nesting_tree_1 (n
);
3331 /* Unnest the nodes and pass them to cgraph. */
3334 unnest_nesting_tree_1 (struct nesting_info
*root
)
3336 struct cgraph_node
*node
= cgraph_node::get (root
->context
);
3338 /* For nested functions update the cgraph to reflect unnesting.
3339 We also delay finalizing of these functions up to this point. */
3343 cgraph_node::finalize_function (root
->context
, true);
3348 unnest_nesting_tree (struct nesting_info
*root
)
3350 struct nesting_info
*n
;
3351 FOR_EACH_NEST_INFO (n
, root
)
3352 unnest_nesting_tree_1 (n
);
3355 /* Free the data structures allocated during this pass. */
3358 free_nesting_tree (struct nesting_info
*root
)
3360 struct nesting_info
*node
, *next
;
3362 node
= iter_nestinfo_start (root
);
3365 next
= iter_nestinfo_next (node
);
3366 delete node
->var_map
;
3367 delete node
->field_map
;
3368 delete node
->mem_refs
;
3375 /* Gimplify a function and all its nested functions. */
3377 gimplify_all_functions (struct cgraph_node
*root
)
3379 struct cgraph_node
*iter
;
3380 if (!gimple_body (root
->decl
))
3381 gimplify_function_tree (root
->decl
);
3382 for (iter
= root
->nested
; iter
; iter
= iter
->next_nested
)
3383 gimplify_all_functions (iter
);
3386 /* Main entry point for this pass. Process FNDECL and all of its nested
3387 subroutines and turn them into something less tightly bound. */
3390 lower_nested_functions (tree fndecl
)
3392 struct cgraph_node
*cgn
;
3393 struct nesting_info
*root
;
3395 /* If there are no nested functions, there's nothing to do. */
3396 cgn
= cgraph_node::get (fndecl
);
3400 gimplify_all_functions (cgn
);
3402 dump_file
= dump_begin (TDI_nested
, &dump_flags
);
3404 fprintf (dump_file
, "\n;; Function %s\n\n",
3405 lang_hooks
.decl_printable_name (fndecl
, 2));
3407 bitmap_obstack_initialize (&nesting_info_bitmap_obstack
);
3408 root
= create_nesting_tree (cgn
);
3410 walk_all_functions (convert_nonlocal_reference_stmt
,
3411 convert_nonlocal_reference_op
,
3413 walk_all_functions (convert_local_reference_stmt
,
3414 convert_local_reference_op
,
3416 walk_all_functions (convert_nl_goto_reference
, NULL
, root
);
3417 walk_all_functions (convert_nl_goto_receiver
, NULL
, root
);
3419 convert_all_function_calls (root
);
3420 finalize_nesting_tree (root
);
3421 unnest_nesting_tree (root
);
3423 free_nesting_tree (root
);
3424 bitmap_obstack_release (&nesting_info_bitmap_obstack
);
3428 dump_end (TDI_nested
, dump_file
);
3433 #include "gt-tree-nested.h"