1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
40 #include "fold-const.h"
45 #include "gimple-fold.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
56 #include "omp-general.h"
58 #include "gimple-low.h"
60 #include "gomp-constants.h"
61 #include "splay-tree.h"
62 #include "gimple-walk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
65 #include "stringpool.h"
70 /* Hash set of poisoned variables in a bind expr. */
71 static hash_set
<tree
> *asan_poisoned_variables
= NULL
;
73 enum gimplify_omp_var_data
79 GOVD_FIRSTPRIVATE
= 16,
80 GOVD_LASTPRIVATE
= 32,
84 GOVD_DEBUG_PRIVATE
= 512,
85 GOVD_PRIVATE_OUTER_REF
= 1024,
89 /* Flag for GOVD_MAP: don't copy back. */
90 GOVD_MAP_TO_ONLY
= 8192,
92 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
93 GOVD_LINEAR_LASTPRIVATE_NO_OUTER
= 16384,
95 GOVD_MAP_0LEN_ARRAY
= 32768,
97 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
98 GOVD_MAP_ALWAYS_TO
= 65536,
100 /* Flag for shared vars that are or might be stored to in the region. */
101 GOVD_WRITTEN
= 131072,
103 /* Flag for GOVD_MAP, if it is a forced mapping. */
104 GOVD_MAP_FORCE
= 262144,
106 /* Flag for GOVD_MAP: must be present already. */
107 GOVD_MAP_FORCE_PRESENT
= 524288,
109 GOVD_DATA_SHARE_CLASS
= (GOVD_SHARED
| GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
110 | GOVD_LASTPRIVATE
| GOVD_REDUCTION
| GOVD_LINEAR
117 ORT_WORKSHARE
= 0x00,
121 ORT_COMBINED_PARALLEL
= 0x03,
124 ORT_UNTIED_TASK
= 0x05,
127 ORT_COMBINED_TEAMS
= 0x09,
130 ORT_TARGET_DATA
= 0x10,
132 /* Data region with offloading. */
134 ORT_COMBINED_TARGET
= 0x21,
136 /* OpenACC variants. */
137 ORT_ACC
= 0x40, /* A generic OpenACC region. */
138 ORT_ACC_DATA
= ORT_ACC
| ORT_TARGET_DATA
, /* Data construct. */
139 ORT_ACC_PARALLEL
= ORT_ACC
| ORT_TARGET
, /* Parallel construct */
140 ORT_ACC_KERNELS
= ORT_ACC
| ORT_TARGET
| 0x80, /* Kernels construct. */
141 ORT_ACC_HOST_DATA
= ORT_ACC
| ORT_TARGET_DATA
| 0x80, /* Host data. */
143 /* Dummy OpenMP region, used to disable expansion of
144 DECL_VALUE_EXPRs in taskloop pre body. */
148 /* Gimplify hashtable helper. */
150 struct gimplify_hasher
: free_ptr_hash
<elt_t
>
152 static inline hashval_t
hash (const elt_t
*);
153 static inline bool equal (const elt_t
*, const elt_t
*);
158 struct gimplify_ctx
*prev_context
;
160 vec
<gbind
*> bind_expr_stack
;
162 gimple_seq conditional_cleanups
;
166 vec
<tree
> case_labels
;
167 hash_set
<tree
> *live_switch_vars
;
168 /* The formal temporary table. Should this be persistent? */
169 hash_table
<gimplify_hasher
> *temp_htab
;
172 unsigned into_ssa
: 1;
173 unsigned allow_rhs_cond_expr
: 1;
174 unsigned in_cleanup_point_expr
: 1;
175 unsigned keep_stack
: 1;
176 unsigned save_stack
: 1;
177 unsigned in_switch_expr
: 1;
180 struct gimplify_omp_ctx
182 struct gimplify_omp_ctx
*outer_context
;
183 splay_tree variables
;
184 hash_set
<tree
> *privatized_types
;
185 /* Iteration variables in an OMP_FOR. */
186 vec
<tree
> loop_iter_var
;
188 enum omp_clause_default_kind default_kind
;
189 enum omp_region_type region_type
;
192 bool target_map_scalars_firstprivate
;
193 bool target_map_pointers_as_0len_arrays
;
194 bool target_firstprivatize_array_bases
;
197 static struct gimplify_ctx
*gimplify_ctxp
;
198 static struct gimplify_omp_ctx
*gimplify_omp_ctxp
;
200 /* Forward declaration. */
201 static enum gimplify_status
gimplify_compound_expr (tree
*, gimple_seq
*, bool);
202 static hash_map
<tree
, tree
> *oacc_declare_returns
;
203 static enum gimplify_status
gimplify_expr (tree
*, gimple_seq
*, gimple_seq
*,
204 bool (*) (tree
), fallback_t
, bool);
206 /* Shorter alias name for the above function for use in gimplify.c
210 gimplify_seq_add_stmt (gimple_seq
*seq_p
, gimple
*gs
)
212 gimple_seq_add_stmt_without_update (seq_p
, gs
);
215 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
216 NULL, a new sequence is allocated. This function is
217 similar to gimple_seq_add_seq, but does not scan the operands.
218 During gimplification, we need to manipulate statement sequences
219 before the def/use vectors have been constructed. */
222 gimplify_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
224 gimple_stmt_iterator si
;
229 si
= gsi_last (*dst_p
);
230 gsi_insert_seq_after_without_update (&si
, src
, GSI_NEW_STMT
);
234 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
235 and popping gimplify contexts. */
237 static struct gimplify_ctx
*ctx_pool
= NULL
;
239 /* Return a gimplify context struct from the pool. */
241 static inline struct gimplify_ctx
*
244 struct gimplify_ctx
* c
= ctx_pool
;
247 ctx_pool
= c
->prev_context
;
249 c
= XNEW (struct gimplify_ctx
);
251 memset (c
, '\0', sizeof (*c
));
255 /* Put gimplify context C back into the pool. */
258 ctx_free (struct gimplify_ctx
*c
)
260 c
->prev_context
= ctx_pool
;
264 /* Free allocated ctx stack memory. */
267 free_gimplify_stack (void)
269 struct gimplify_ctx
*c
;
271 while ((c
= ctx_pool
))
273 ctx_pool
= c
->prev_context
;
279 /* Set up a context for the gimplifier. */
282 push_gimplify_context (bool in_ssa
, bool rhs_cond_ok
)
284 struct gimplify_ctx
*c
= ctx_alloc ();
286 c
->prev_context
= gimplify_ctxp
;
288 gimplify_ctxp
->into_ssa
= in_ssa
;
289 gimplify_ctxp
->allow_rhs_cond_expr
= rhs_cond_ok
;
292 /* Tear down a context for the gimplifier. If BODY is non-null, then
293 put the temporaries into the outer BIND_EXPR. Otherwise, put them
296 BODY is not a sequence, but the first tuple in a sequence. */
299 pop_gimplify_context (gimple
*body
)
301 struct gimplify_ctx
*c
= gimplify_ctxp
;
304 && (!c
->bind_expr_stack
.exists ()
305 || c
->bind_expr_stack
.is_empty ()));
306 c
->bind_expr_stack
.release ();
307 gimplify_ctxp
= c
->prev_context
;
310 declare_vars (c
->temps
, body
, false);
312 record_vars (c
->temps
);
319 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
322 gimple_push_bind_expr (gbind
*bind_stmt
)
324 gimplify_ctxp
->bind_expr_stack
.reserve (8);
325 gimplify_ctxp
->bind_expr_stack
.safe_push (bind_stmt
);
328 /* Pop the first element off the stack of bindings. */
331 gimple_pop_bind_expr (void)
333 gimplify_ctxp
->bind_expr_stack
.pop ();
336 /* Return the first element of the stack of bindings. */
339 gimple_current_bind_expr (void)
341 return gimplify_ctxp
->bind_expr_stack
.last ();
344 /* Return the stack of bindings created during gimplification. */
347 gimple_bind_expr_stack (void)
349 return gimplify_ctxp
->bind_expr_stack
;
352 /* Return true iff there is a COND_EXPR between us and the innermost
353 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
356 gimple_conditional_context (void)
358 return gimplify_ctxp
->conditions
> 0;
361 /* Note that we've entered a COND_EXPR. */
364 gimple_push_condition (void)
366 #ifdef ENABLE_GIMPLE_CHECKING
367 if (gimplify_ctxp
->conditions
== 0)
368 gcc_assert (gimple_seq_empty_p (gimplify_ctxp
->conditional_cleanups
));
370 ++(gimplify_ctxp
->conditions
);
373 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
374 now, add any conditional cleanups we've seen to the prequeue. */
377 gimple_pop_condition (gimple_seq
*pre_p
)
379 int conds
= --(gimplify_ctxp
->conditions
);
381 gcc_assert (conds
>= 0);
384 gimplify_seq_add_seq (pre_p
, gimplify_ctxp
->conditional_cleanups
);
385 gimplify_ctxp
->conditional_cleanups
= NULL
;
389 /* A stable comparison routine for use with splay trees and DECLs. */
392 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
397 return DECL_UID (a
) - DECL_UID (b
);
400 /* Create a new omp construct that deals with variable remapping. */
402 static struct gimplify_omp_ctx
*
403 new_omp_context (enum omp_region_type region_type
)
405 struct gimplify_omp_ctx
*c
;
407 c
= XCNEW (struct gimplify_omp_ctx
);
408 c
->outer_context
= gimplify_omp_ctxp
;
409 c
->variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
410 c
->privatized_types
= new hash_set
<tree
>;
411 c
->location
= input_location
;
412 c
->region_type
= region_type
;
413 if ((region_type
& ORT_TASK
) == 0)
414 c
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
416 c
->default_kind
= OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
421 /* Destroy an omp construct that deals with variable remapping. */
424 delete_omp_context (struct gimplify_omp_ctx
*c
)
426 splay_tree_delete (c
->variables
);
427 delete c
->privatized_types
;
428 c
->loop_iter_var
.release ();
432 static void omp_add_variable (struct gimplify_omp_ctx
*, tree
, unsigned int);
433 static bool omp_notice_variable (struct gimplify_omp_ctx
*, tree
, bool);
435 /* Both gimplify the statement T and append it to *SEQ_P. This function
436 behaves exactly as gimplify_stmt, but you don't have to pass T as a
440 gimplify_and_add (tree t
, gimple_seq
*seq_p
)
442 gimplify_stmt (&t
, seq_p
);
445 /* Gimplify statement T into sequence *SEQ_P, and return the first
446 tuple in the sequence of generated tuples for this statement.
447 Return NULL if gimplifying T produced no tuples. */
450 gimplify_and_return_first (tree t
, gimple_seq
*seq_p
)
452 gimple_stmt_iterator last
= gsi_last (*seq_p
);
454 gimplify_and_add (t
, seq_p
);
456 if (!gsi_end_p (last
))
459 return gsi_stmt (last
);
462 return gimple_seq_first_stmt (*seq_p
);
465 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
466 LHS, or for a call argument. */
469 is_gimple_mem_rhs (tree t
)
471 /* If we're dealing with a renamable type, either source or dest must be
472 a renamed variable. */
473 if (is_gimple_reg_type (TREE_TYPE (t
)))
474 return is_gimple_val (t
);
476 return is_gimple_val (t
) || is_gimple_lvalue (t
);
479 /* Return true if T is a CALL_EXPR or an expression that can be
480 assigned to a temporary. Note that this predicate should only be
481 used during gimplification. See the rationale for this in
482 gimplify_modify_expr. */
485 is_gimple_reg_rhs_or_call (tree t
)
487 return (get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
488 || TREE_CODE (t
) == CALL_EXPR
);
491 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
492 this predicate should only be used during gimplification. See the
493 rationale for this in gimplify_modify_expr. */
496 is_gimple_mem_rhs_or_call (tree t
)
498 /* If we're dealing with a renamable type, either source or dest must be
499 a renamed variable. */
500 if (is_gimple_reg_type (TREE_TYPE (t
)))
501 return is_gimple_val (t
);
503 return (is_gimple_val (t
)
504 || is_gimple_lvalue (t
)
505 || TREE_CLOBBER_P (t
)
506 || TREE_CODE (t
) == CALL_EXPR
);
509 /* Create a temporary with a name derived from VAL. Subroutine of
510 lookup_tmp_var; nobody else should call this function. */
513 create_tmp_from_val (tree val
)
515 /* Drop all qualifiers and address-space information from the value type. */
516 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (val
));
517 tree var
= create_tmp_var (type
, get_name (val
));
518 if (TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
519 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
)
520 DECL_GIMPLE_REG_P (var
) = 1;
524 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
525 an existing expression temporary. */
528 lookup_tmp_var (tree val
, bool is_formal
)
532 /* If not optimizing, never really reuse a temporary. local-alloc
533 won't allocate any variable that is used in more than one basic
534 block, which means it will go into memory, causing much extra
535 work in reload and final and poorer code generation, outweighing
536 the extra memory allocation here. */
537 if (!optimize
|| !is_formal
|| TREE_SIDE_EFFECTS (val
))
538 ret
= create_tmp_from_val (val
);
545 if (!gimplify_ctxp
->temp_htab
)
546 gimplify_ctxp
->temp_htab
= new hash_table
<gimplify_hasher
> (1000);
547 slot
= gimplify_ctxp
->temp_htab
->find_slot (&elt
, INSERT
);
550 elt_p
= XNEW (elt_t
);
552 elt_p
->temp
= ret
= create_tmp_from_val (val
);
565 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
568 internal_get_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
569 bool is_formal
, bool allow_ssa
)
573 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
574 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
575 gimplify_expr (&val
, pre_p
, post_p
, is_gimple_reg_rhs_or_call
,
579 && gimplify_ctxp
->into_ssa
580 && is_gimple_reg_type (TREE_TYPE (val
)))
582 t
= make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val
)));
583 if (! gimple_in_ssa_p (cfun
))
585 const char *name
= get_name (val
);
587 SET_SSA_NAME_VAR_OR_IDENTIFIER (t
, create_tmp_var_name (name
));
591 t
= lookup_tmp_var (val
, is_formal
);
593 mod
= build2 (INIT_EXPR
, TREE_TYPE (t
), t
, unshare_expr (val
));
595 SET_EXPR_LOCATION (mod
, EXPR_LOC_OR_LOC (val
, input_location
));
597 /* gimplify_modify_expr might want to reduce this further. */
598 gimplify_and_add (mod
, pre_p
);
604 /* Return a formal temporary variable initialized with VAL. PRE_P is as
605 in gimplify_expr. Only use this function if:
607 1) The value of the unfactored expression represented by VAL will not
608 change between the initialization and use of the temporary, and
609 2) The temporary will not be otherwise modified.
611 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
612 and #2 means it is inappropriate for && temps.
614 For other cases, use get_initialized_tmp_var instead. */
617 get_formal_tmp_var (tree val
, gimple_seq
*pre_p
)
619 return internal_get_tmp_var (val
, pre_p
, NULL
, true, true);
622 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
623 are as in gimplify_expr. */
626 get_initialized_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
629 return internal_get_tmp_var (val
, pre_p
, post_p
, false, allow_ssa
);
632 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
633 generate debug info for them; otherwise don't. */
636 declare_vars (tree vars
, gimple
*gs
, bool debug_info
)
643 gbind
*scope
= as_a
<gbind
*> (gs
);
645 temps
= nreverse (last
);
647 block
= gimple_bind_block (scope
);
648 gcc_assert (!block
|| TREE_CODE (block
) == BLOCK
);
649 if (!block
|| !debug_info
)
651 DECL_CHAIN (last
) = gimple_bind_vars (scope
);
652 gimple_bind_set_vars (scope
, temps
);
656 /* We need to attach the nodes both to the BIND_EXPR and to its
657 associated BLOCK for debugging purposes. The key point here
658 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
659 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
660 if (BLOCK_VARS (block
))
661 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), temps
);
664 gimple_bind_set_vars (scope
,
665 chainon (gimple_bind_vars (scope
), temps
));
666 BLOCK_VARS (block
) = temps
;
672 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
673 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
674 no such upper bound can be obtained. */
677 force_constant_size (tree var
)
679 /* The only attempt we make is by querying the maximum size of objects
680 of the variable's type. */
682 HOST_WIDE_INT max_size
;
684 gcc_assert (VAR_P (var
));
686 max_size
= max_int_size_in_bytes (TREE_TYPE (var
));
688 gcc_assert (max_size
>= 0);
691 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var
)), max_size
);
693 = build_int_cst (TREE_TYPE (DECL_SIZE (var
)), max_size
* BITS_PER_UNIT
);
696 /* Push the temporary variable TMP into the current binding. */
699 gimple_add_tmp_var_fn (struct function
*fn
, tree tmp
)
701 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
703 /* Later processing assumes that the object size is constant, which might
704 not be true at this point. Force the use of a constant upper bound in
706 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp
)))
707 force_constant_size (tmp
);
709 DECL_CONTEXT (tmp
) = fn
->decl
;
710 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
712 record_vars_into (tmp
, fn
->decl
);
715 /* Push the temporary variable TMP into the current binding. */
718 gimple_add_tmp_var (tree tmp
)
720 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
722 /* Later processing assumes that the object size is constant, which might
723 not be true at this point. Force the use of a constant upper bound in
725 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp
)))
726 force_constant_size (tmp
);
728 DECL_CONTEXT (tmp
) = current_function_decl
;
729 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
733 DECL_CHAIN (tmp
) = gimplify_ctxp
->temps
;
734 gimplify_ctxp
->temps
= tmp
;
736 /* Mark temporaries local within the nearest enclosing parallel. */
737 if (gimplify_omp_ctxp
)
739 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
741 && (ctx
->region_type
== ORT_WORKSHARE
742 || ctx
->region_type
== ORT_SIMD
743 || ctx
->region_type
== ORT_ACC
))
744 ctx
= ctx
->outer_context
;
746 omp_add_variable (ctx
, tmp
, GOVD_LOCAL
| GOVD_SEEN
);
755 /* This case is for nested functions. We need to expose the locals
757 body_seq
= gimple_body (current_function_decl
);
758 declare_vars (tmp
, gimple_seq_first_stmt (body_seq
), false);
764 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
765 nodes that are referenced more than once in GENERIC functions. This is
766 necessary because gimplification (translation into GIMPLE) is performed
767 by modifying tree nodes in-place, so gimplication of a shared node in a
768 first context could generate an invalid GIMPLE form in a second context.
770 This is achieved with a simple mark/copy/unmark algorithm that walks the
771 GENERIC representation top-down, marks nodes with TREE_VISITED the first
772 time it encounters them, duplicates them if they already have TREE_VISITED
773 set, and finally removes the TREE_VISITED marks it has set.
775 The algorithm works only at the function level, i.e. it generates a GENERIC
776 representation of a function with no nodes shared within the function when
777 passed a GENERIC function (except for nodes that are allowed to be shared).
779 At the global level, it is also necessary to unshare tree nodes that are
780 referenced in more than one function, for the same aforementioned reason.
781 This requires some cooperation from the front-end. There are 2 strategies:
783 1. Manual unsharing. The front-end needs to call unshare_expr on every
784 expression that might end up being shared across functions.
786 2. Deep unsharing. This is an extension of regular unsharing. Instead
787 of calling unshare_expr on expressions that might be shared across
788 functions, the front-end pre-marks them with TREE_VISITED. This will
789 ensure that they are unshared on the first reference within functions
790 when the regular unsharing algorithm runs. The counterpart is that
791 this algorithm must look deeper than for manual unsharing, which is
792 specified by LANG_HOOKS_DEEP_UNSHARING.
794 If there are only few specific cases of node sharing across functions, it is
795 probably easier for a front-end to unshare the expressions manually. On the
796 contrary, if the expressions generated at the global level are as widespread
797 as expressions generated within functions, deep unsharing is very likely the
800 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
801 These nodes model computations that must be done once. If we were to
802 unshare something like SAVE_EXPR(i++), the gimplification process would
803 create wrong code. However, if DATA is non-null, it must hold a pointer
804 set that is used to unshare the subtrees of these nodes. */
807 mostly_copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data
)
810 enum tree_code code
= TREE_CODE (t
);
812 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
813 copy their subtrees if we can make sure to do it only once. */
814 if (code
== SAVE_EXPR
|| code
== TARGET_EXPR
|| code
== BIND_EXPR
)
816 if (data
&& !((hash_set
<tree
> *)data
)->add (t
))
822 /* Stop at types, decls, constants like copy_tree_r. */
823 else if (TREE_CODE_CLASS (code
) == tcc_type
824 || TREE_CODE_CLASS (code
) == tcc_declaration
825 || TREE_CODE_CLASS (code
) == tcc_constant
)
828 /* Cope with the statement expression extension. */
829 else if (code
== STATEMENT_LIST
)
832 /* Leave the bulk of the work to copy_tree_r itself. */
834 copy_tree_r (tp
, walk_subtrees
, NULL
);
839 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
840 If *TP has been visited already, then *TP is deeply copied by calling
841 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
844 copy_if_shared_r (tree
*tp
, int *walk_subtrees
, void *data
)
847 enum tree_code code
= TREE_CODE (t
);
849 /* Skip types, decls, and constants. But we do want to look at their
850 types and the bounds of types. Mark them as visited so we properly
851 unmark their subtrees on the unmark pass. If we've already seen them,
852 don't look down further. */
853 if (TREE_CODE_CLASS (code
) == tcc_type
854 || TREE_CODE_CLASS (code
) == tcc_declaration
855 || TREE_CODE_CLASS (code
) == tcc_constant
)
857 if (TREE_VISITED (t
))
860 TREE_VISITED (t
) = 1;
863 /* If this node has been visited already, unshare it and don't look
865 else if (TREE_VISITED (t
))
867 walk_tree (tp
, mostly_copy_tree_r
, data
, NULL
);
871 /* Otherwise, mark the node as visited and keep looking. */
873 TREE_VISITED (t
) = 1;
878 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
879 copy_if_shared_r callback unmodified. */
882 copy_if_shared (tree
*tp
, void *data
)
884 walk_tree (tp
, copy_if_shared_r
, data
, NULL
);
887 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
888 any nested functions. */
891 unshare_body (tree fndecl
)
893 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
894 /* If the language requires deep unsharing, we need a pointer set to make
895 sure we don't repeatedly unshare subtrees of unshareable nodes. */
896 hash_set
<tree
> *visited
897 = lang_hooks
.deep_unsharing
? new hash_set
<tree
> : NULL
;
899 copy_if_shared (&DECL_SAVED_TREE (fndecl
), visited
);
900 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl
)), visited
);
901 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)), visited
);
906 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
907 unshare_body (cgn
->decl
);
910 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
911 Subtrees are walked until the first unvisited node is encountered. */
914 unmark_visited_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
918 /* If this node has been visited, unmark it and keep looking. */
919 if (TREE_VISITED (t
))
920 TREE_VISITED (t
) = 0;
922 /* Otherwise, don't look any deeper. */
929 /* Unmark the visited trees rooted at *TP. */
932 unmark_visited (tree
*tp
)
934 walk_tree (tp
, unmark_visited_r
, NULL
, NULL
);
937 /* Likewise, but mark all trees as not visited. */
940 unvisit_body (tree fndecl
)
942 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
944 unmark_visited (&DECL_SAVED_TREE (fndecl
));
945 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl
)));
946 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)));
949 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
950 unvisit_body (cgn
->decl
);
953 /* Unconditionally make an unshared copy of EXPR. This is used when using
954 stored expressions which span multiple functions, such as BINFO_VTABLE,
955 as the normal unsharing process can't tell that they're shared. */
958 unshare_expr (tree expr
)
960 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
964 /* Worker for unshare_expr_without_location. */
967 prune_expr_location (tree
*tp
, int *walk_subtrees
, void *)
970 SET_EXPR_LOCATION (*tp
, UNKNOWN_LOCATION
);
976 /* Similar to unshare_expr but also prune all expression locations
980 unshare_expr_without_location (tree expr
)
982 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
984 walk_tree (&expr
, prune_expr_location
, NULL
, NULL
);
988 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
989 contain statements and have a value. Assign its value to a temporary
990 and give it void_type_node. Return the temporary, or NULL_TREE if
991 WRAPPER was already void. */
994 voidify_wrapper_expr (tree wrapper
, tree temp
)
996 tree type
= TREE_TYPE (wrapper
);
997 if (type
&& !VOID_TYPE_P (type
))
1001 /* Set p to point to the body of the wrapper. Loop until we find
1002 something that isn't a wrapper. */
1003 for (p
= &wrapper
; p
&& *p
; )
1005 switch (TREE_CODE (*p
))
1008 TREE_SIDE_EFFECTS (*p
) = 1;
1009 TREE_TYPE (*p
) = void_type_node
;
1010 /* For a BIND_EXPR, the body is operand 1. */
1011 p
= &BIND_EXPR_BODY (*p
);
1014 case CLEANUP_POINT_EXPR
:
1015 case TRY_FINALLY_EXPR
:
1016 case TRY_CATCH_EXPR
:
1017 TREE_SIDE_EFFECTS (*p
) = 1;
1018 TREE_TYPE (*p
) = void_type_node
;
1019 p
= &TREE_OPERAND (*p
, 0);
1022 case STATEMENT_LIST
:
1024 tree_stmt_iterator i
= tsi_last (*p
);
1025 TREE_SIDE_EFFECTS (*p
) = 1;
1026 TREE_TYPE (*p
) = void_type_node
;
1027 p
= tsi_end_p (i
) ? NULL
: tsi_stmt_ptr (i
);
1032 /* Advance to the last statement. Set all container types to
1034 for (; TREE_CODE (*p
) == COMPOUND_EXPR
; p
= &TREE_OPERAND (*p
, 1))
1036 TREE_SIDE_EFFECTS (*p
) = 1;
1037 TREE_TYPE (*p
) = void_type_node
;
1041 case TRANSACTION_EXPR
:
1042 TREE_SIDE_EFFECTS (*p
) = 1;
1043 TREE_TYPE (*p
) = void_type_node
;
1044 p
= &TRANSACTION_EXPR_BODY (*p
);
1048 /* Assume that any tree upon which voidify_wrapper_expr is
1049 directly called is a wrapper, and that its body is op0. */
1052 TREE_SIDE_EFFECTS (*p
) = 1;
1053 TREE_TYPE (*p
) = void_type_node
;
1054 p
= &TREE_OPERAND (*p
, 0);
1062 if (p
== NULL
|| IS_EMPTY_STMT (*p
))
1066 /* The wrapper is on the RHS of an assignment that we're pushing
1068 gcc_assert (TREE_CODE (temp
) == INIT_EXPR
1069 || TREE_CODE (temp
) == MODIFY_EXPR
);
1070 TREE_OPERAND (temp
, 1) = *p
;
1075 temp
= create_tmp_var (type
, "retval");
1076 *p
= build2 (INIT_EXPR
, type
, temp
, *p
);
1085 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1086 a temporary through which they communicate. */
1089 build_stack_save_restore (gcall
**save
, gcall
**restore
)
1093 *save
= gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE
), 0);
1094 tmp_var
= create_tmp_var (ptr_type_node
, "saved_stack");
1095 gimple_call_set_lhs (*save
, tmp_var
);
1098 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE
),
1102 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1105 build_asan_poison_call_expr (tree decl
)
1107 /* Do not poison variables that have size equal to zero. */
1108 tree unit_size
= DECL_SIZE_UNIT (decl
);
1109 if (zerop (unit_size
))
1112 tree base
= build_fold_addr_expr (decl
);
1114 return build_call_expr_internal_loc (UNKNOWN_LOCATION
, IFN_ASAN_MARK
,
1116 build_int_cst (integer_type_node
,
1121 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1122 on POISON flag, shadow memory of a DECL variable. The call will be
1123 put on location identified by IT iterator, where BEFORE flag drives
1124 position where the stmt will be put. */
1127 asan_poison_variable (tree decl
, bool poison
, gimple_stmt_iterator
*it
,
1130 /* When within an OMP context, do not emit ASAN_MARK internal fns. */
1131 if (gimplify_omp_ctxp
)
1134 tree unit_size
= DECL_SIZE_UNIT (decl
);
1135 tree base
= build_fold_addr_expr (decl
);
1137 /* Do not poison variables that have size equal to zero. */
1138 if (zerop (unit_size
))
1141 /* It's necessary to have all stack variables aligned to ASAN granularity
1143 if (DECL_ALIGN_UNIT (decl
) <= ASAN_SHADOW_GRANULARITY
)
1144 SET_DECL_ALIGN (decl
, BITS_PER_UNIT
* ASAN_SHADOW_GRANULARITY
);
1146 HOST_WIDE_INT flags
= poison
? ASAN_MARK_POISON
: ASAN_MARK_UNPOISON
;
1149 = gimple_build_call_internal (IFN_ASAN_MARK
, 3,
1150 build_int_cst (integer_type_node
, flags
),
1154 gsi_insert_before (it
, g
, GSI_NEW_STMT
);
1156 gsi_insert_after (it
, g
, GSI_NEW_STMT
);
1159 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1160 either poisons or unpoisons a DECL. Created statement is appended
1161 to SEQ_P gimple sequence. */
1164 asan_poison_variable (tree decl
, bool poison
, gimple_seq
*seq_p
)
1166 gimple_stmt_iterator it
= gsi_last (*seq_p
);
1167 bool before
= false;
1172 asan_poison_variable (decl
, poison
, &it
, before
);
1175 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1178 sort_by_decl_uid (const void *a
, const void *b
)
1180 const tree
*t1
= (const tree
*)a
;
1181 const tree
*t2
= (const tree
*)b
;
1183 int uid1
= DECL_UID (*t1
);
1184 int uid2
= DECL_UID (*t2
);
1188 else if (uid1
> uid2
)
1194 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1195 depending on POISON flag. Created statement is appended
1196 to SEQ_P gimple sequence. */
1199 asan_poison_variables (hash_set
<tree
> *variables
, bool poison
, gimple_seq
*seq_p
)
1201 unsigned c
= variables
->elements ();
1205 auto_vec
<tree
> sorted_variables (c
);
1207 for (hash_set
<tree
>::iterator it
= variables
->begin ();
1208 it
!= variables
->end (); ++it
)
1209 sorted_variables
.safe_push (*it
);
1211 sorted_variables
.qsort (sort_by_decl_uid
);
1215 FOR_EACH_VEC_ELT (sorted_variables
, i
, var
)
1217 asan_poison_variable (var
, poison
, seq_p
);
1219 /* Add use_after_scope_memory attribute for the variable in order
1220 to prevent re-written into SSA. */
1221 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
,
1222 DECL_ATTRIBUTES (var
)))
1223 DECL_ATTRIBUTES (var
)
1224 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
),
1226 DECL_ATTRIBUTES (var
));
1230 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1232 static enum gimplify_status
1233 gimplify_bind_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1235 tree bind_expr
= *expr_p
;
1236 bool old_keep_stack
= gimplify_ctxp
->keep_stack
;
1237 bool old_save_stack
= gimplify_ctxp
->save_stack
;
1240 gimple_seq body
, cleanup
;
1242 location_t start_locus
= 0, end_locus
= 0;
1243 tree ret_clauses
= NULL
;
1245 tree temp
= voidify_wrapper_expr (bind_expr
, NULL
);
1247 /* Mark variables seen in this bind expr. */
1248 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1252 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
1254 /* Mark variable as local. */
1255 if (ctx
&& ctx
->region_type
!= ORT_NONE
&& !DECL_EXTERNAL (t
)
1256 && (! DECL_SEEN_IN_BIND_EXPR_P (t
)
1257 || splay_tree_lookup (ctx
->variables
,
1258 (splay_tree_key
) t
) == NULL
))
1260 if (ctx
->region_type
== ORT_SIMD
1261 && TREE_ADDRESSABLE (t
)
1262 && !TREE_STATIC (t
))
1263 omp_add_variable (ctx
, t
, GOVD_PRIVATE
| GOVD_SEEN
);
1265 omp_add_variable (ctx
, t
, GOVD_LOCAL
| GOVD_SEEN
);
1268 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
1270 if (DECL_HARD_REGISTER (t
) && !is_global_var (t
) && cfun
)
1271 cfun
->has_local_explicit_reg_vars
= true;
1274 /* Preliminarily mark non-addressed complex variables as eligible
1275 for promotion to gimple registers. We'll transform their uses
1277 if ((TREE_CODE (TREE_TYPE (t
)) == COMPLEX_TYPE
1278 || TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
1279 && !TREE_THIS_VOLATILE (t
)
1280 && (VAR_P (t
) && !DECL_HARD_REGISTER (t
))
1281 && !needs_to_live_in_memory (t
))
1282 DECL_GIMPLE_REG_P (t
) = 1;
1285 bind_stmt
= gimple_build_bind (BIND_EXPR_VARS (bind_expr
), NULL
,
1286 BIND_EXPR_BLOCK (bind_expr
));
1287 gimple_push_bind_expr (bind_stmt
);
1289 gimplify_ctxp
->keep_stack
= false;
1290 gimplify_ctxp
->save_stack
= false;
1292 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1294 gimplify_stmt (&BIND_EXPR_BODY (bind_expr
), &body
);
1295 gimple_bind_set_body (bind_stmt
, body
);
1297 /* Source location wise, the cleanup code (stack_restore and clobbers)
1298 belongs to the end of the block, so propagate what we have. The
1299 stack_save operation belongs to the beginning of block, which we can
1300 infer from the bind_expr directly if the block has no explicit
1302 if (BIND_EXPR_BLOCK (bind_expr
))
1304 end_locus
= BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1305 start_locus
= BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1307 if (start_locus
== 0)
1308 start_locus
= EXPR_LOCATION (bind_expr
);
1313 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1314 the stack space allocated to the VLAs. */
1315 if (gimplify_ctxp
->save_stack
&& !gimplify_ctxp
->keep_stack
)
1317 gcall
*stack_restore
;
1319 /* Save stack on entry and restore it on exit. Add a try_finally
1320 block to achieve this. */
1321 build_stack_save_restore (&stack_save
, &stack_restore
);
1323 gimple_set_location (stack_save
, start_locus
);
1324 gimple_set_location (stack_restore
, end_locus
);
1326 gimplify_seq_add_stmt (&cleanup
, stack_restore
);
1329 /* Add clobbers for all variables that go out of scope. */
1330 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1333 && !is_global_var (t
)
1334 && DECL_CONTEXT (t
) == current_function_decl
)
1336 if (!DECL_HARD_REGISTER (t
)
1337 && !TREE_THIS_VOLATILE (t
)
1338 && !DECL_HAS_VALUE_EXPR_P (t
)
1339 /* Only care for variables that have to be in memory. Others
1340 will be rewritten into SSA names, hence moved to the
1342 && !is_gimple_reg (t
)
1343 && flag_stack_reuse
!= SR_NONE
)
1345 tree clobber
= build_constructor (TREE_TYPE (t
), NULL
);
1346 gimple
*clobber_stmt
;
1347 TREE_THIS_VOLATILE (clobber
) = 1;
1348 clobber_stmt
= gimple_build_assign (t
, clobber
);
1349 gimple_set_location (clobber_stmt
, end_locus
);
1350 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1353 if (flag_openacc
&& oacc_declare_returns
!= NULL
)
1355 tree
*c
= oacc_declare_returns
->get (t
);
1359 OMP_CLAUSE_CHAIN (*c
) = ret_clauses
;
1363 oacc_declare_returns
->remove (t
);
1365 if (oacc_declare_returns
->elements () == 0)
1367 delete oacc_declare_returns
;
1368 oacc_declare_returns
= NULL
;
1374 if (asan_poisoned_variables
!= NULL
1375 && asan_poisoned_variables
->contains (t
))
1377 asan_poisoned_variables
->remove (t
);
1378 asan_poison_variable (t
, true, &cleanup
);
1381 if (gimplify_ctxp
->live_switch_vars
!= NULL
1382 && gimplify_ctxp
->live_switch_vars
->contains (t
))
1383 gimplify_ctxp
->live_switch_vars
->remove (t
);
1389 gimple_stmt_iterator si
= gsi_start (cleanup
);
1391 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
1393 gsi_insert_seq_before_without_update (&si
, stmt
, GSI_NEW_STMT
);
1399 gimple_seq new_body
;
1402 gs
= gimple_build_try (gimple_bind_body (bind_stmt
), cleanup
,
1403 GIMPLE_TRY_FINALLY
);
1406 gimplify_seq_add_stmt (&new_body
, stack_save
);
1407 gimplify_seq_add_stmt (&new_body
, gs
);
1408 gimple_bind_set_body (bind_stmt
, new_body
);
1411 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1412 if (!gimplify_ctxp
->keep_stack
)
1413 gimplify_ctxp
->keep_stack
= old_keep_stack
;
1414 gimplify_ctxp
->save_stack
= old_save_stack
;
1416 gimple_pop_bind_expr ();
1418 gimplify_seq_add_stmt (pre_p
, bind_stmt
);
1426 *expr_p
= NULL_TREE
;
1430 /* Maybe add early return predict statement to PRE_P sequence. */
1433 maybe_add_early_return_predict_stmt (gimple_seq
*pre_p
)
1435 /* If we are not in a conditional context, add PREDICT statement. */
1436 if (gimple_conditional_context ())
1438 gimple
*predict
= gimple_build_predict (PRED_TREE_EARLY_RETURN
,
1440 gimplify_seq_add_stmt (pre_p
, predict
);
1444 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1445 GIMPLE value, it is assigned to a new temporary and the statement is
1446 re-written to return the temporary.
1448 PRE_P points to the sequence where side effects that must happen before
1449 STMT should be stored. */
1451 static enum gimplify_status
1452 gimplify_return_expr (tree stmt
, gimple_seq
*pre_p
)
1455 tree ret_expr
= TREE_OPERAND (stmt
, 0);
1456 tree result_decl
, result
;
1458 if (ret_expr
== error_mark_node
)
1461 /* Implicit _Cilk_sync must be inserted right before any return statement
1462 if there is a _Cilk_spawn in the function. If the user has provided a
1463 _Cilk_sync, the optimizer should remove this duplicate one. */
1464 if (fn_contains_cilk_spawn_p (cfun
))
1466 tree impl_sync
= build0 (CILK_SYNC_STMT
, void_type_node
);
1467 gimplify_and_add (impl_sync
, pre_p
);
1471 || TREE_CODE (ret_expr
) == RESULT_DECL
1472 || ret_expr
== error_mark_node
)
1474 maybe_add_early_return_predict_stmt (pre_p
);
1475 greturn
*ret
= gimple_build_return (ret_expr
);
1476 gimple_set_no_warning (ret
, TREE_NO_WARNING (stmt
));
1477 gimplify_seq_add_stmt (pre_p
, ret
);
1481 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1482 result_decl
= NULL_TREE
;
1485 result_decl
= TREE_OPERAND (ret_expr
, 0);
1487 /* See through a return by reference. */
1488 if (TREE_CODE (result_decl
) == INDIRECT_REF
)
1489 result_decl
= TREE_OPERAND (result_decl
, 0);
1491 gcc_assert ((TREE_CODE (ret_expr
) == MODIFY_EXPR
1492 || TREE_CODE (ret_expr
) == INIT_EXPR
)
1493 && TREE_CODE (result_decl
) == RESULT_DECL
);
1496 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1497 Recall that aggregate_value_p is FALSE for any aggregate type that is
1498 returned in registers. If we're returning values in registers, then
1499 we don't want to extend the lifetime of the RESULT_DECL, particularly
1500 across another call. In addition, for those aggregates for which
1501 hard_function_value generates a PARALLEL, we'll die during normal
1502 expansion of structure assignments; there's special code in expand_return
1503 to handle this case that does not exist in expand_expr. */
1506 else if (aggregate_value_p (result_decl
, TREE_TYPE (current_function_decl
)))
1508 if (TREE_CODE (DECL_SIZE (result_decl
)) != INTEGER_CST
)
1510 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl
)))
1511 gimplify_type_sizes (TREE_TYPE (result_decl
), pre_p
);
1512 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1513 should be effectively allocated by the caller, i.e. all calls to
1514 this function must be subject to the Return Slot Optimization. */
1515 gimplify_one_sizepos (&DECL_SIZE (result_decl
), pre_p
);
1516 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl
), pre_p
);
1518 result
= result_decl
;
1520 else if (gimplify_ctxp
->return_temp
)
1521 result
= gimplify_ctxp
->return_temp
;
1524 result
= create_tmp_reg (TREE_TYPE (result_decl
));
1526 /* ??? With complex control flow (usually involving abnormal edges),
1527 we can wind up warning about an uninitialized value for this. Due
1528 to how this variable is constructed and initialized, this is never
1529 true. Give up and never warn. */
1530 TREE_NO_WARNING (result
) = 1;
1532 gimplify_ctxp
->return_temp
= result
;
1535 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1536 Then gimplify the whole thing. */
1537 if (result
!= result_decl
)
1538 TREE_OPERAND (ret_expr
, 0) = result
;
1540 gimplify_and_add (TREE_OPERAND (stmt
, 0), pre_p
);
1542 maybe_add_early_return_predict_stmt (pre_p
);
1543 ret
= gimple_build_return (result
);
1544 gimple_set_no_warning (ret
, TREE_NO_WARNING (stmt
));
1545 gimplify_seq_add_stmt (pre_p
, ret
);
1550 /* Gimplify a variable-length array DECL. */
1553 gimplify_vla_decl (tree decl
, gimple_seq
*seq_p
)
1555 /* This is a variable-sized decl. Simplify its size and mark it
1556 for deferred expansion. */
1557 tree t
, addr
, ptr_type
;
1559 gimplify_one_sizepos (&DECL_SIZE (decl
), seq_p
);
1560 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl
), seq_p
);
1562 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1563 if (DECL_HAS_VALUE_EXPR_P (decl
))
1566 /* All occurrences of this decl in final gimplified code will be
1567 replaced by indirection. Setting DECL_VALUE_EXPR does two
1568 things: First, it lets the rest of the gimplifier know what
1569 replacement to use. Second, it lets the debug info know
1570 where to find the value. */
1571 ptr_type
= build_pointer_type (TREE_TYPE (decl
));
1572 addr
= create_tmp_var (ptr_type
, get_name (decl
));
1573 DECL_IGNORED_P (addr
) = 0;
1574 t
= build_fold_indirect_ref (addr
);
1575 TREE_THIS_NOTRAP (t
) = 1;
1576 SET_DECL_VALUE_EXPR (decl
, t
);
1577 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
1579 t
= build_alloca_call_expr (DECL_SIZE_UNIT (decl
), DECL_ALIGN (decl
),
1580 max_int_size_in_bytes (TREE_TYPE (decl
)));
1581 /* The call has been built for a variable-sized object. */
1582 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
1583 t
= fold_convert (ptr_type
, t
);
1584 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
1586 gimplify_and_add (t
, seq_p
);
1589 /* A helper function to be called via walk_tree. Mark all labels under *TP
1590 as being forced. To be called for DECL_INITIAL of static variables. */
1593 force_labels_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1597 if (TREE_CODE (*tp
) == LABEL_DECL
)
1599 FORCED_LABEL (*tp
) = 1;
1600 cfun
->has_forced_label_in_static
= 1;
1606 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1607 and initialization explicit. */
1609 static enum gimplify_status
1610 gimplify_decl_expr (tree
*stmt_p
, gimple_seq
*seq_p
)
1612 tree stmt
= *stmt_p
;
1613 tree decl
= DECL_EXPR_DECL (stmt
);
1615 *stmt_p
= NULL_TREE
;
1617 if (TREE_TYPE (decl
) == error_mark_node
)
1620 if ((TREE_CODE (decl
) == TYPE_DECL
1622 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl
)))
1624 gimplify_type_sizes (TREE_TYPE (decl
), seq_p
);
1625 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1626 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl
)), seq_p
);
1629 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1630 in case its size expressions contain problematic nodes like CALL_EXPR. */
1631 if (TREE_CODE (decl
) == TYPE_DECL
1632 && DECL_ORIGINAL_TYPE (decl
)
1633 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl
)))
1635 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl
), seq_p
);
1636 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl
)) == REFERENCE_TYPE
)
1637 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl
)), seq_p
);
1640 if (VAR_P (decl
) && !DECL_EXTERNAL (decl
))
1642 tree init
= DECL_INITIAL (decl
);
1643 bool is_vla
= false;
1645 if (TREE_CODE (DECL_SIZE_UNIT (decl
)) != INTEGER_CST
1646 || (!TREE_STATIC (decl
)
1647 && flag_stack_check
== GENERIC_STACK_CHECK
1648 && compare_tree_int (DECL_SIZE_UNIT (decl
),
1649 STACK_CHECK_MAX_VAR_SIZE
) > 0))
1651 gimplify_vla_decl (decl
, seq_p
);
1655 if (asan_poisoned_variables
1657 && TREE_ADDRESSABLE (decl
)
1658 && !TREE_STATIC (decl
)
1659 && !DECL_HAS_VALUE_EXPR_P (decl
)
1660 && DECL_ALIGN (decl
) <= MAX_SUPPORTED_STACK_ALIGNMENT
1661 && dbg_cnt (asan_use_after_scope
))
1663 asan_poisoned_variables
->add (decl
);
1664 asan_poison_variable (decl
, false, seq_p
);
1665 if (!DECL_ARTIFICIAL (decl
) && gimplify_ctxp
->live_switch_vars
)
1666 gimplify_ctxp
->live_switch_vars
->add (decl
);
1669 /* Some front ends do not explicitly declare all anonymous
1670 artificial variables. We compensate here by declaring the
1671 variables, though it would be better if the front ends would
1672 explicitly declare them. */
1673 if (!DECL_SEEN_IN_BIND_EXPR_P (decl
)
1674 && DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)
1675 gimple_add_tmp_var (decl
);
1677 if (init
&& init
!= error_mark_node
)
1679 if (!TREE_STATIC (decl
))
1681 DECL_INITIAL (decl
) = NULL_TREE
;
1682 init
= build2 (INIT_EXPR
, void_type_node
, decl
, init
);
1683 gimplify_and_add (init
, seq_p
);
1687 /* We must still examine initializers for static variables
1688 as they may contain a label address. */
1689 walk_tree (&init
, force_labels_r
, NULL
, NULL
);
1696 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1697 and replacing the LOOP_EXPR with goto, but if the loop contains an
1698 EXIT_EXPR, we need to append a label for it to jump to. */
1700 static enum gimplify_status
1701 gimplify_loop_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1703 tree saved_label
= gimplify_ctxp
->exit_label
;
1704 tree start_label
= create_artificial_label (UNKNOWN_LOCATION
);
1706 gimplify_seq_add_stmt (pre_p
, gimple_build_label (start_label
));
1708 gimplify_ctxp
->exit_label
= NULL_TREE
;
1710 gimplify_and_add (LOOP_EXPR_BODY (*expr_p
), pre_p
);
1712 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (start_label
));
1714 if (gimplify_ctxp
->exit_label
)
1715 gimplify_seq_add_stmt (pre_p
,
1716 gimple_build_label (gimplify_ctxp
->exit_label
));
1718 gimplify_ctxp
->exit_label
= saved_label
;
1724 /* Gimplify a statement list onto a sequence. These may be created either
1725 by an enlightened front-end, or by shortcut_cond_expr. */
1727 static enum gimplify_status
1728 gimplify_statement_list (tree
*expr_p
, gimple_seq
*pre_p
)
1730 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
1732 tree_stmt_iterator i
= tsi_start (*expr_p
);
1734 while (!tsi_end_p (i
))
1736 gimplify_stmt (tsi_stmt_ptr (i
), pre_p
);
1749 /* Callback for walk_gimple_seq. */
1752 warn_switch_unreachable_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
1753 struct walk_stmt_info
*wi
)
1755 gimple
*stmt
= gsi_stmt (*gsi_p
);
1757 *handled_ops_p
= true;
1758 switch (gimple_code (stmt
))
1761 /* A compiler-generated cleanup or a user-written try block.
1762 If it's empty, don't dive into it--that would result in
1763 worse location info. */
1764 if (gimple_try_eval (stmt
) == NULL
)
1767 return integer_zero_node
;
1772 case GIMPLE_EH_FILTER
:
1773 case GIMPLE_TRANSACTION
:
1774 /* Walk the sub-statements. */
1775 *handled_ops_p
= false;
1778 if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
1780 *handled_ops_p
= false;
1785 /* Save the first "real" statement (not a decl/lexical scope/...). */
1787 return integer_zero_node
;
1792 /* Possibly warn about unreachable statements between switch's controlling
1793 expression and the first case. SEQ is the body of a switch expression. */
1796 maybe_warn_switch_unreachable (gimple_seq seq
)
1798 if (!warn_switch_unreachable
1799 /* This warning doesn't play well with Fortran when optimizations
1801 || lang_GNU_Fortran ()
1805 struct walk_stmt_info wi
;
1806 memset (&wi
, 0, sizeof (wi
));
1807 walk_gimple_seq (seq
, warn_switch_unreachable_r
, NULL
, &wi
);
1808 gimple
*stmt
= (gimple
*) wi
.info
;
1810 if (stmt
&& gimple_code (stmt
) != GIMPLE_LABEL
)
1812 if (gimple_code (stmt
) == GIMPLE_GOTO
1813 && TREE_CODE (gimple_goto_dest (stmt
)) == LABEL_DECL
1814 && DECL_ARTIFICIAL (gimple_goto_dest (stmt
)))
1815 /* Don't warn for compiler-generated gotos. These occur
1816 in Duff's devices, for example. */;
1818 warning_at (gimple_location (stmt
), OPT_Wswitch_unreachable
,
1819 "statement will never be executed");
1824 /* A label entry that pairs label and a location. */
1831 /* Find LABEL in vector of label entries VEC. */
1833 static struct label_entry
*
1834 find_label_entry (const auto_vec
<struct label_entry
> *vec
, tree label
)
1837 struct label_entry
*l
;
1839 FOR_EACH_VEC_ELT (*vec
, i
, l
)
1840 if (l
->label
== label
)
1845 /* Return true if LABEL, a LABEL_DECL, represents a case label
1846 in a vector of labels CASES. */
1849 case_label_p (const vec
<tree
> *cases
, tree label
)
1854 FOR_EACH_VEC_ELT (*cases
, i
, l
)
1855 if (CASE_LABEL (l
) == label
)
1860 /* Find the last statement in a scope STMT. */
1863 last_stmt_in_scope (gimple
*stmt
)
1868 switch (gimple_code (stmt
))
1872 gbind
*bind
= as_a
<gbind
*> (stmt
);
1873 stmt
= gimple_seq_last_stmt (gimple_bind_body (bind
));
1874 return last_stmt_in_scope (stmt
);
1879 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
1880 stmt
= gimple_seq_last_stmt (gimple_try_eval (try_stmt
));
1881 gimple
*last_eval
= last_stmt_in_scope (stmt
);
1882 if (gimple_stmt_may_fallthru (last_eval
)
1883 && (last_eval
== NULL
1884 || !gimple_call_internal_p (last_eval
, IFN_FALLTHROUGH
))
1885 && gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
1887 stmt
= gimple_seq_last_stmt (gimple_try_cleanup (try_stmt
));
1888 return last_stmt_in_scope (stmt
);
1899 /* Collect interesting labels in LABELS and return the statement preceding
1900 another case label, or a user-defined label. */
1903 collect_fallthrough_labels (gimple_stmt_iterator
*gsi_p
,
1904 auto_vec
<struct label_entry
> *labels
)
1906 gimple
*prev
= NULL
;
1910 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
1911 || gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_TRY
)
1913 /* Nested scope. Only look at the last statement of
1914 the innermost scope. */
1915 location_t bind_loc
= gimple_location (gsi_stmt (*gsi_p
));
1916 gimple
*last
= last_stmt_in_scope (gsi_stmt (*gsi_p
));
1920 /* It might be a label without a location. Use the
1921 location of the scope then. */
1922 if (!gimple_has_location (prev
))
1923 gimple_set_location (prev
, bind_loc
);
1929 /* Ifs are tricky. */
1930 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_COND
)
1932 gcond
*cond_stmt
= as_a
<gcond
*> (gsi_stmt (*gsi_p
));
1933 tree false_lab
= gimple_cond_false_label (cond_stmt
);
1934 location_t if_loc
= gimple_location (cond_stmt
);
1937 if (i > 1) goto <D.2259>; else goto D;
1938 we can't do much with the else-branch. */
1939 if (!DECL_ARTIFICIAL (false_lab
))
1942 /* Go on until the false label, then one step back. */
1943 for (; !gsi_end_p (*gsi_p
); gsi_next (gsi_p
))
1945 gimple
*stmt
= gsi_stmt (*gsi_p
);
1946 if (gimple_code (stmt
) == GIMPLE_LABEL
1947 && gimple_label_label (as_a
<glabel
*> (stmt
)) == false_lab
)
1951 /* Not found? Oops. */
1952 if (gsi_end_p (*gsi_p
))
1955 struct label_entry l
= { false_lab
, if_loc
};
1956 labels
->safe_push (l
);
1958 /* Go to the last statement of the then branch. */
1961 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
1967 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_GOTO
1968 && !gimple_has_location (gsi_stmt (*gsi_p
)))
1970 /* Look at the statement before, it might be
1971 attribute fallthrough, in which case don't warn. */
1973 bool fallthru_before_dest
1974 = gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_FALLTHROUGH
);
1976 tree goto_dest
= gimple_goto_dest (gsi_stmt (*gsi_p
));
1977 if (!fallthru_before_dest
)
1979 struct label_entry l
= { goto_dest
, if_loc
};
1980 labels
->safe_push (l
);
1983 /* And move back. */
1987 /* Remember the last statement. Skip labels that are of no interest
1989 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
1991 tree label
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (*gsi_p
)));
1992 if (find_label_entry (labels
, label
))
1993 prev
= gsi_stmt (*gsi_p
);
1995 else if (gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_ASAN_MARK
))
1998 prev
= gsi_stmt (*gsi_p
);
2001 while (!gsi_end_p (*gsi_p
)
2002 /* Stop if we find a case or a user-defined label. */
2003 && (gimple_code (gsi_stmt (*gsi_p
)) != GIMPLE_LABEL
2004 || !gimple_has_location (gsi_stmt (*gsi_p
))));
2009 /* Return true if the switch fallthough warning should occur. LABEL is
2010 the label statement that we're falling through to. */
2013 should_warn_for_implicit_fallthrough (gimple_stmt_iterator
*gsi_p
, tree label
)
2015 gimple_stmt_iterator gsi
= *gsi_p
;
2017 /* Don't warn if the label is marked with a "falls through" comment. */
2018 if (FALLTHROUGH_LABEL_P (label
))
2021 /* Don't warn for non-case labels followed by a statement:
2026 as these are likely intentional. */
2027 if (!case_label_p (&gimplify_ctxp
->case_labels
, label
))
2030 while (!gsi_end_p (gsi
)
2031 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2032 && (l
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi
))))
2033 && !case_label_p (&gimplify_ctxp
->case_labels
, l
))
2035 if (gsi_end_p (gsi
) || gimple_code (gsi_stmt (gsi
)) != GIMPLE_LABEL
)
2039 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2040 immediately breaks. */
2043 /* Skip all immediately following labels. */
2044 while (!gsi_end_p (gsi
)
2045 && (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2046 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_PREDICT
))
2049 /* { ... something; default:; } */
2051 /* { ... something; default: break; } or
2052 { ... something; default: goto L; } */
2053 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_GOTO
2054 /* { ... something; default: return; } */
2055 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
2061 /* Callback for walk_gimple_seq. */
2064 warn_implicit_fallthrough_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2065 struct walk_stmt_info
*)
2067 gimple
*stmt
= gsi_stmt (*gsi_p
);
2069 *handled_ops_p
= true;
2070 switch (gimple_code (stmt
))
2075 case GIMPLE_EH_FILTER
:
2076 case GIMPLE_TRANSACTION
:
2077 /* Walk the sub-statements. */
2078 *handled_ops_p
= false;
2081 /* Find a sequence of form:
2088 and possibly warn. */
2091 /* Found a label. Skip all immediately following labels. */
2092 while (!gsi_end_p (*gsi_p
)
2093 && gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2096 /* There might be no more statements. */
2097 if (gsi_end_p (*gsi_p
))
2098 return integer_zero_node
;
2100 /* Vector of labels that fall through. */
2101 auto_vec
<struct label_entry
> labels
;
2102 gimple
*prev
= collect_fallthrough_labels (gsi_p
, &labels
);
2104 /* There might be no more statements. */
2105 if (gsi_end_p (*gsi_p
))
2106 return integer_zero_node
;
2108 gimple
*next
= gsi_stmt (*gsi_p
);
2110 /* If what follows is a label, then we may have a fallthrough. */
2111 if (gimple_code (next
) == GIMPLE_LABEL
2112 && gimple_has_location (next
)
2113 && (label
= gimple_label_label (as_a
<glabel
*> (next
)))
2116 struct label_entry
*l
;
2117 bool warned_p
= false;
2118 if (!should_warn_for_implicit_fallthrough (gsi_p
, label
))
2120 else if (gimple_code (prev
) == GIMPLE_LABEL
2121 && (label
= gimple_label_label (as_a
<glabel
*> (prev
)))
2122 && (l
= find_label_entry (&labels
, label
)))
2123 warned_p
= warning_at (l
->loc
, OPT_Wimplicit_fallthrough_
,
2124 "this statement may fall through");
2125 else if (!gimple_call_internal_p (prev
, IFN_FALLTHROUGH
)
2126 /* Try to be clever and don't warn when the statement
2127 can't actually fall through. */
2128 && gimple_stmt_may_fallthru (prev
)
2129 && gimple_has_location (prev
))
2130 warned_p
= warning_at (gimple_location (prev
),
2131 OPT_Wimplicit_fallthrough_
,
2132 "this statement may fall through");
2134 inform (gimple_location (next
), "here");
2136 /* Mark this label as processed so as to prevent multiple
2137 warnings in nested switches. */
2138 FALLTHROUGH_LABEL_P (label
) = true;
2140 /* So that next warn_implicit_fallthrough_r will start looking for
2141 a new sequence starting with this label. */
2152 /* Warn when a switch case falls through. */
2155 maybe_warn_implicit_fallthrough (gimple_seq seq
)
2157 if (!warn_implicit_fallthrough
)
2160 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2163 || lang_GNU_OBJC ()))
2166 struct walk_stmt_info wi
;
2167 memset (&wi
, 0, sizeof (wi
));
2168 walk_gimple_seq (seq
, warn_implicit_fallthrough_r
, NULL
, &wi
);
2171 /* Callback for walk_gimple_seq. */
2174 expand_FALLTHROUGH_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2175 struct walk_stmt_info
*)
2177 gimple
*stmt
= gsi_stmt (*gsi_p
);
2179 *handled_ops_p
= true;
2180 switch (gimple_code (stmt
))
2185 case GIMPLE_EH_FILTER
:
2186 case GIMPLE_TRANSACTION
:
2187 /* Walk the sub-statements. */
2188 *handled_ops_p
= false;
2191 if (gimple_call_internal_p (stmt
, IFN_FALLTHROUGH
))
2193 gsi_remove (gsi_p
, true);
2194 if (gsi_end_p (*gsi_p
))
2195 return integer_zero_node
;
2198 location_t loc
= gimple_location (stmt
);
2200 gimple_stmt_iterator gsi2
= *gsi_p
;
2201 stmt
= gsi_stmt (gsi2
);
2202 if (gimple_code (stmt
) == GIMPLE_GOTO
&& !gimple_has_location (stmt
))
2204 /* Go on until the artificial label. */
2205 tree goto_dest
= gimple_goto_dest (stmt
);
2206 for (; !gsi_end_p (gsi2
); gsi_next (&gsi2
))
2208 if (gimple_code (gsi_stmt (gsi2
)) == GIMPLE_LABEL
2209 && gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi2
)))
2214 /* Not found? Stop. */
2215 if (gsi_end_p (gsi2
))
2218 /* Look one past it. */
2222 /* We're looking for a case label or default label here. */
2223 while (!gsi_end_p (gsi2
))
2225 stmt
= gsi_stmt (gsi2
);
2226 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2228 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
2229 if (gimple_has_location (stmt
) && DECL_ARTIFICIAL (label
))
2235 else if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2238 /* Something other is not expected. */
2243 warning_at (loc
, 0, "attribute %<fallthrough%> not preceding "
2244 "a case label or default label");
2253 /* Expand all FALLTHROUGH () calls in SEQ. */
2256 expand_FALLTHROUGH (gimple_seq
*seq_p
)
2258 struct walk_stmt_info wi
;
2259 memset (&wi
, 0, sizeof (wi
));
2260 walk_gimple_seq_mod (seq_p
, expand_FALLTHROUGH_r
, NULL
, &wi
);
2264 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2267 static enum gimplify_status
2268 gimplify_switch_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2270 tree switch_expr
= *expr_p
;
2271 gimple_seq switch_body_seq
= NULL
;
2272 enum gimplify_status ret
;
2273 tree index_type
= TREE_TYPE (switch_expr
);
2274 if (index_type
== NULL_TREE
)
2275 index_type
= TREE_TYPE (SWITCH_COND (switch_expr
));
2277 ret
= gimplify_expr (&SWITCH_COND (switch_expr
), pre_p
, NULL
, is_gimple_val
,
2279 if (ret
== GS_ERROR
|| ret
== GS_UNHANDLED
)
2282 if (SWITCH_BODY (switch_expr
))
2285 vec
<tree
> saved_labels
;
2286 hash_set
<tree
> *saved_live_switch_vars
= NULL
;
2287 tree default_case
= NULL_TREE
;
2288 gswitch
*switch_stmt
;
2290 /* If someone can be bothered to fill in the labels, they can
2291 be bothered to null out the body too. */
2292 gcc_assert (!SWITCH_LABELS (switch_expr
));
2294 /* Save old labels, get new ones from body, then restore the old
2295 labels. Save all the things from the switch body to append after. */
2296 saved_labels
= gimplify_ctxp
->case_labels
;
2297 gimplify_ctxp
->case_labels
.create (8);
2299 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2300 saved_live_switch_vars
= gimplify_ctxp
->live_switch_vars
;
2301 tree_code body_type
= TREE_CODE (SWITCH_BODY (switch_expr
));
2302 if (body_type
== BIND_EXPR
|| body_type
== STATEMENT_LIST
)
2303 gimplify_ctxp
->live_switch_vars
= new hash_set
<tree
> (4);
2305 gimplify_ctxp
->live_switch_vars
= NULL
;
2307 bool old_in_switch_expr
= gimplify_ctxp
->in_switch_expr
;
2308 gimplify_ctxp
->in_switch_expr
= true;
2310 gimplify_stmt (&SWITCH_BODY (switch_expr
), &switch_body_seq
);
2312 gimplify_ctxp
->in_switch_expr
= old_in_switch_expr
;
2313 maybe_warn_switch_unreachable (switch_body_seq
);
2314 maybe_warn_implicit_fallthrough (switch_body_seq
);
2315 /* Only do this for the outermost GIMPLE_SWITCH. */
2316 if (!gimplify_ctxp
->in_switch_expr
)
2317 expand_FALLTHROUGH (&switch_body_seq
);
2319 labels
= gimplify_ctxp
->case_labels
;
2320 gimplify_ctxp
->case_labels
= saved_labels
;
2322 if (gimplify_ctxp
->live_switch_vars
)
2324 gcc_assert (gimplify_ctxp
->live_switch_vars
->elements () == 0);
2325 delete gimplify_ctxp
->live_switch_vars
;
2327 gimplify_ctxp
->live_switch_vars
= saved_live_switch_vars
;
2329 preprocess_case_label_vec_for_gimple (labels
, index_type
,
2334 glabel
*new_default
;
2337 = build_case_label (NULL_TREE
, NULL_TREE
,
2338 create_artificial_label (UNKNOWN_LOCATION
));
2339 new_default
= gimple_build_label (CASE_LABEL (default_case
));
2340 gimplify_seq_add_stmt (&switch_body_seq
, new_default
);
2343 switch_stmt
= gimple_build_switch (SWITCH_COND (switch_expr
),
2344 default_case
, labels
);
2345 gimplify_seq_add_stmt (pre_p
, switch_stmt
);
2346 gimplify_seq_add_seq (pre_p
, switch_body_seq
);
2350 gcc_assert (SWITCH_LABELS (switch_expr
));
2355 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2357 static enum gimplify_status
2358 gimplify_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2360 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p
))
2361 == current_function_decl
);
2363 tree label
= LABEL_EXPR_LABEL (*expr_p
);
2364 glabel
*label_stmt
= gimple_build_label (label
);
2365 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2366 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2368 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2369 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2371 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2372 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2378 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2380 static enum gimplify_status
2381 gimplify_case_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2383 struct gimplify_ctx
*ctxp
;
2386 /* Invalid programs can play Duff's Device type games with, for example,
2387 #pragma omp parallel. At least in the C front end, we don't
2388 detect such invalid branches until after gimplification, in the
2389 diagnose_omp_blocks pass. */
2390 for (ctxp
= gimplify_ctxp
; ; ctxp
= ctxp
->prev_context
)
2391 if (ctxp
->case_labels
.exists ())
2394 label_stmt
= gimple_build_label (CASE_LABEL (*expr_p
));
2395 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2396 ctxp
->case_labels
.safe_push (*expr_p
);
2397 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2402 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2406 build_and_jump (tree
*label_p
)
2408 if (label_p
== NULL
)
2409 /* If there's nowhere to jump, just fall through. */
2412 if (*label_p
== NULL_TREE
)
2414 tree label
= create_artificial_label (UNKNOWN_LOCATION
);
2418 return build1 (GOTO_EXPR
, void_type_node
, *label_p
);
2421 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2422 This also involves building a label to jump to and communicating it to
2423 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2425 static enum gimplify_status
2426 gimplify_exit_expr (tree
*expr_p
)
2428 tree cond
= TREE_OPERAND (*expr_p
, 0);
2431 expr
= build_and_jump (&gimplify_ctxp
->exit_label
);
2432 expr
= build3 (COND_EXPR
, void_type_node
, cond
, expr
, NULL_TREE
);
2438 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2439 different from its canonical type, wrap the whole thing inside a
2440 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2443 The canonical type of a COMPONENT_REF is the type of the field being
2444 referenced--unless the field is a bit-field which can be read directly
2445 in a smaller mode, in which case the canonical type is the
2446 sign-appropriate type corresponding to that mode. */
2449 canonicalize_component_ref (tree
*expr_p
)
2451 tree expr
= *expr_p
;
2454 gcc_assert (TREE_CODE (expr
) == COMPONENT_REF
);
2456 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
)))
2457 type
= TREE_TYPE (get_unwidened (expr
, NULL_TREE
));
2459 type
= TREE_TYPE (TREE_OPERAND (expr
, 1));
2461 /* One could argue that all the stuff below is not necessary for
2462 the non-bitfield case and declare it a FE error if type
2463 adjustment would be needed. */
2464 if (TREE_TYPE (expr
) != type
)
2466 #ifdef ENABLE_TYPES_CHECKING
2467 tree old_type
= TREE_TYPE (expr
);
2471 /* We need to preserve qualifiers and propagate them from
2473 type_quals
= TYPE_QUALS (type
)
2474 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr
, 0)));
2475 if (TYPE_QUALS (type
) != type_quals
)
2476 type
= build_qualified_type (TYPE_MAIN_VARIANT (type
), type_quals
);
2478 /* Set the type of the COMPONENT_REF to the underlying type. */
2479 TREE_TYPE (expr
) = type
;
2481 #ifdef ENABLE_TYPES_CHECKING
2482 /* It is now a FE error, if the conversion from the canonical
2483 type to the original expression type is not useless. */
2484 gcc_assert (useless_type_conversion_p (old_type
, type
));
2489 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2490 to foo, embed that change in the ADDR_EXPR by converting
2495 where L is the lower bound. For simplicity, only do this for constant
2497 The constraint is that the type of &array[L] is trivially convertible
2501 canonicalize_addr_expr (tree
*expr_p
)
2503 tree expr
= *expr_p
;
2504 tree addr_expr
= TREE_OPERAND (expr
, 0);
2505 tree datype
, ddatype
, pddatype
;
2507 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2508 if (!POINTER_TYPE_P (TREE_TYPE (expr
))
2509 || TREE_CODE (addr_expr
) != ADDR_EXPR
)
2512 /* The addr_expr type should be a pointer to an array. */
2513 datype
= TREE_TYPE (TREE_TYPE (addr_expr
));
2514 if (TREE_CODE (datype
) != ARRAY_TYPE
)
2517 /* The pointer to element type shall be trivially convertible to
2518 the expression pointer type. */
2519 ddatype
= TREE_TYPE (datype
);
2520 pddatype
= build_pointer_type (ddatype
);
2521 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr
)),
2525 /* The lower bound and element sizes must be constant. */
2526 if (!TYPE_SIZE_UNIT (ddatype
)
2527 || TREE_CODE (TYPE_SIZE_UNIT (ddatype
)) != INTEGER_CST
2528 || !TYPE_DOMAIN (datype
) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))
2529 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))) != INTEGER_CST
)
2532 /* All checks succeeded. Build a new node to merge the cast. */
2533 *expr_p
= build4 (ARRAY_REF
, ddatype
, TREE_OPERAND (addr_expr
, 0),
2534 TYPE_MIN_VALUE (TYPE_DOMAIN (datype
)),
2535 NULL_TREE
, NULL_TREE
);
2536 *expr_p
= build1 (ADDR_EXPR
, pddatype
, *expr_p
);
2538 /* We can have stripped a required restrict qualifier above. */
2539 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
2540 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
2543 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2544 underneath as appropriate. */
2546 static enum gimplify_status
2547 gimplify_conversion (tree
*expr_p
)
2549 location_t loc
= EXPR_LOCATION (*expr_p
);
2550 gcc_assert (CONVERT_EXPR_P (*expr_p
));
2552 /* Then strip away all but the outermost conversion. */
2553 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p
, 0));
2555 /* And remove the outermost conversion if it's useless. */
2556 if (tree_ssa_useless_type_conversion (*expr_p
))
2557 *expr_p
= TREE_OPERAND (*expr_p
, 0);
2559 /* If we still have a conversion at the toplevel,
2560 then canonicalize some constructs. */
2561 if (CONVERT_EXPR_P (*expr_p
))
2563 tree sub
= TREE_OPERAND (*expr_p
, 0);
2565 /* If a NOP conversion is changing the type of a COMPONENT_REF
2566 expression, then canonicalize its type now in order to expose more
2567 redundant conversions. */
2568 if (TREE_CODE (sub
) == COMPONENT_REF
)
2569 canonicalize_component_ref (&TREE_OPERAND (*expr_p
, 0));
2571 /* If a NOP conversion is changing a pointer to array of foo
2572 to a pointer to foo, embed that change in the ADDR_EXPR. */
2573 else if (TREE_CODE (sub
) == ADDR_EXPR
)
2574 canonicalize_addr_expr (expr_p
);
2577 /* If we have a conversion to a non-register type force the
2578 use of a VIEW_CONVERT_EXPR instead. */
2579 if (CONVERT_EXPR_P (*expr_p
) && !is_gimple_reg_type (TREE_TYPE (*expr_p
)))
2580 *expr_p
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (*expr_p
),
2581 TREE_OPERAND (*expr_p
, 0));
2583 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2584 if (TREE_CODE (*expr_p
) == CONVERT_EXPR
)
2585 TREE_SET_CODE (*expr_p
, NOP_EXPR
);
2590 /* Nonlocal VLAs seen in the current function. */
2591 static hash_set
<tree
> *nonlocal_vlas
;
2593 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2594 static tree nonlocal_vla_vars
;
2596 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2597 DECL_VALUE_EXPR, and it's worth re-examining things. */
2599 static enum gimplify_status
2600 gimplify_var_or_parm_decl (tree
*expr_p
)
2602 tree decl
= *expr_p
;
2604 /* ??? If this is a local variable, and it has not been seen in any
2605 outer BIND_EXPR, then it's probably the result of a duplicate
2606 declaration, for which we've already issued an error. It would
2607 be really nice if the front end wouldn't leak these at all.
2608 Currently the only known culprit is C++ destructors, as seen
2609 in g++.old-deja/g++.jason/binding.C. */
2611 && !DECL_SEEN_IN_BIND_EXPR_P (decl
)
2612 && !TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)
2613 && decl_function_context (decl
) == current_function_decl
)
2615 gcc_assert (seen_error ());
2619 /* When within an OMP context, notice uses of variables. */
2620 if (gimplify_omp_ctxp
&& omp_notice_variable (gimplify_omp_ctxp
, decl
, true))
2623 /* If the decl is an alias for another expression, substitute it now. */
2624 if (DECL_HAS_VALUE_EXPR_P (decl
))
2626 tree value_expr
= DECL_VALUE_EXPR (decl
);
2628 /* For referenced nonlocal VLAs add a decl for debugging purposes
2629 to the current function. */
2631 && TREE_CODE (DECL_SIZE_UNIT (decl
)) != INTEGER_CST
2632 && nonlocal_vlas
!= NULL
2633 && TREE_CODE (value_expr
) == INDIRECT_REF
2634 && TREE_CODE (TREE_OPERAND (value_expr
, 0)) == VAR_DECL
2635 && decl_function_context (decl
) != current_function_decl
)
2637 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
2639 && (ctx
->region_type
== ORT_WORKSHARE
2640 || ctx
->region_type
== ORT_SIMD
2641 || ctx
->region_type
== ORT_ACC
))
2642 ctx
= ctx
->outer_context
;
2643 if (!ctx
&& !nonlocal_vlas
->add (decl
))
2645 tree copy
= copy_node (decl
);
2647 lang_hooks
.dup_lang_specific_decl (copy
);
2648 SET_DECL_RTL (copy
, 0);
2649 TREE_USED (copy
) = 1;
2650 DECL_CHAIN (copy
) = nonlocal_vla_vars
;
2651 nonlocal_vla_vars
= copy
;
2652 SET_DECL_VALUE_EXPR (copy
, unshare_expr (value_expr
));
2653 DECL_HAS_VALUE_EXPR_P (copy
) = 1;
2657 *expr_p
= unshare_expr (value_expr
);
2664 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2667 recalculate_side_effects (tree t
)
2669 enum tree_code code
= TREE_CODE (t
);
2670 int len
= TREE_OPERAND_LENGTH (t
);
2673 switch (TREE_CODE_CLASS (code
))
2675 case tcc_expression
:
2681 case PREDECREMENT_EXPR
:
2682 case PREINCREMENT_EXPR
:
2683 case POSTDECREMENT_EXPR
:
2684 case POSTINCREMENT_EXPR
:
2685 /* All of these have side-effects, no matter what their
2694 case tcc_comparison
: /* a comparison expression */
2695 case tcc_unary
: /* a unary arithmetic expression */
2696 case tcc_binary
: /* a binary arithmetic expression */
2697 case tcc_reference
: /* a reference */
2698 case tcc_vl_exp
: /* a function call */
2699 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
2700 for (i
= 0; i
< len
; ++i
)
2702 tree op
= TREE_OPERAND (t
, i
);
2703 if (op
&& TREE_SIDE_EFFECTS (op
))
2704 TREE_SIDE_EFFECTS (t
) = 1;
2709 /* No side-effects. */
2717 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2721 : min_lval '[' val ']'
2723 | compound_lval '[' val ']'
2724 | compound_lval '.' ID
2726 This is not part of the original SIMPLE definition, which separates
2727 array and member references, but it seems reasonable to handle them
2728 together. Also, this way we don't run into problems with union
2729 aliasing; gcc requires that for accesses through a union to alias, the
2730 union reference must be explicit, which was not always the case when we
2731 were splitting up array and member refs.
2733 PRE_P points to the sequence where side effects that must happen before
2734 *EXPR_P should be stored.
2736 POST_P points to the sequence where side effects that must happen after
2737 *EXPR_P should be stored. */
2739 static enum gimplify_status
2740 gimplify_compound_lval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
2741 fallback_t fallback
)
2744 enum gimplify_status ret
= GS_ALL_DONE
, tret
;
2746 location_t loc
= EXPR_LOCATION (*expr_p
);
2747 tree expr
= *expr_p
;
2749 /* Create a stack of the subexpressions so later we can walk them in
2750 order from inner to outer. */
2751 auto_vec
<tree
, 10> expr_stack
;
2753 /* We can handle anything that get_inner_reference can deal with. */
2754 for (p
= expr_p
; ; p
= &TREE_OPERAND (*p
, 0))
2757 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2758 if (TREE_CODE (*p
) == INDIRECT_REF
)
2759 *p
= fold_indirect_ref_loc (loc
, *p
);
2761 if (handled_component_p (*p
))
2763 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2764 additional COMPONENT_REFs. */
2765 else if ((VAR_P (*p
) || TREE_CODE (*p
) == PARM_DECL
)
2766 && gimplify_var_or_parm_decl (p
) == GS_OK
)
2771 expr_stack
.safe_push (*p
);
2774 gcc_assert (expr_stack
.length ());
2776 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2777 walked through and P points to the innermost expression.
2779 Java requires that we elaborated nodes in source order. That
2780 means we must gimplify the inner expression followed by each of
2781 the indices, in order. But we can't gimplify the inner
2782 expression until we deal with any variable bounds, sizes, or
2783 positions in order to deal with PLACEHOLDER_EXPRs.
2785 So we do this in three steps. First we deal with the annotations
2786 for any variables in the components, then we gimplify the base,
2787 then we gimplify any indices, from left to right. */
2788 for (i
= expr_stack
.length () - 1; i
>= 0; i
--)
2790 tree t
= expr_stack
[i
];
2792 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
2794 /* Gimplify the low bound and element type size and put them into
2795 the ARRAY_REF. If these values are set, they have already been
2797 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
2799 tree low
= unshare_expr (array_ref_low_bound (t
));
2800 if (!is_gimple_min_invariant (low
))
2802 TREE_OPERAND (t
, 2) = low
;
2803 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
2804 post_p
, is_gimple_reg
,
2806 ret
= MIN (ret
, tret
);
2811 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
2812 is_gimple_reg
, fb_rvalue
);
2813 ret
= MIN (ret
, tret
);
2816 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
2818 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
, 0)));
2819 tree elmt_size
= unshare_expr (array_ref_element_size (t
));
2820 tree factor
= size_int (TYPE_ALIGN_UNIT (elmt_type
));
2822 /* Divide the element size by the alignment of the element
2825 = size_binop_loc (loc
, EXACT_DIV_EXPR
, elmt_size
, factor
);
2827 if (!is_gimple_min_invariant (elmt_size
))
2829 TREE_OPERAND (t
, 3) = elmt_size
;
2830 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
2831 post_p
, is_gimple_reg
,
2833 ret
= MIN (ret
, tret
);
2838 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
, post_p
,
2839 is_gimple_reg
, fb_rvalue
);
2840 ret
= MIN (ret
, tret
);
2843 else if (TREE_CODE (t
) == COMPONENT_REF
)
2845 /* Set the field offset into T and gimplify it. */
2846 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
2848 tree offset
= unshare_expr (component_ref_field_offset (t
));
2849 tree field
= TREE_OPERAND (t
, 1);
2851 = size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
);
2853 /* Divide the offset by its alignment. */
2854 offset
= size_binop_loc (loc
, EXACT_DIV_EXPR
, offset
, factor
);
2856 if (!is_gimple_min_invariant (offset
))
2858 TREE_OPERAND (t
, 2) = offset
;
2859 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
2860 post_p
, is_gimple_reg
,
2862 ret
= MIN (ret
, tret
);
2867 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
2868 is_gimple_reg
, fb_rvalue
);
2869 ret
= MIN (ret
, tret
);
2874 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2875 so as to match the min_lval predicate. Failure to do so may result
2876 in the creation of large aggregate temporaries. */
2877 tret
= gimplify_expr (p
, pre_p
, post_p
, is_gimple_min_lval
,
2878 fallback
| fb_lvalue
);
2879 ret
= MIN (ret
, tret
);
2881 /* And finally, the indices and operands of ARRAY_REF. During this
2882 loop we also remove any useless conversions. */
2883 for (; expr_stack
.length () > 0; )
2885 tree t
= expr_stack
.pop ();
2887 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
2889 /* Gimplify the dimension. */
2890 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1)))
2892 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, post_p
,
2893 is_gimple_val
, fb_rvalue
);
2894 ret
= MIN (ret
, tret
);
2898 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t
, 0));
2900 /* The innermost expression P may have originally had
2901 TREE_SIDE_EFFECTS set which would have caused all the outer
2902 expressions in *EXPR_P leading to P to also have had
2903 TREE_SIDE_EFFECTS set. */
2904 recalculate_side_effects (t
);
2907 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2908 if ((fallback
& fb_rvalue
) && TREE_CODE (*expr_p
) == COMPONENT_REF
)
2910 canonicalize_component_ref (expr_p
);
2913 expr_stack
.release ();
2915 gcc_assert (*expr_p
== expr
|| ret
!= GS_ALL_DONE
);
2920 /* Gimplify the self modifying expression pointed to by EXPR_P
2923 PRE_P points to the list where side effects that must happen before
2924 *EXPR_P should be stored.
2926 POST_P points to the list where side effects that must happen after
2927 *EXPR_P should be stored.
2929 WANT_VALUE is nonzero iff we want to use the value of this expression
2930 in another expression.
2932 ARITH_TYPE is the type the computation should be performed in. */
2934 enum gimplify_status
2935 gimplify_self_mod_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
2936 bool want_value
, tree arith_type
)
2938 enum tree_code code
;
2939 tree lhs
, lvalue
, rhs
, t1
;
2940 gimple_seq post
= NULL
, *orig_post_p
= post_p
;
2942 enum tree_code arith_code
;
2943 enum gimplify_status ret
;
2944 location_t loc
= EXPR_LOCATION (*expr_p
);
2946 code
= TREE_CODE (*expr_p
);
2948 gcc_assert (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
2949 || code
== PREINCREMENT_EXPR
|| code
== PREDECREMENT_EXPR
);
2951 /* Prefix or postfix? */
2952 if (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
)
2953 /* Faster to treat as prefix if result is not used. */
2954 postfix
= want_value
;
2958 /* For postfix, make sure the inner expression's post side effects
2959 are executed after side effects from this expression. */
2963 /* Add or subtract? */
2964 if (code
== PREINCREMENT_EXPR
|| code
== POSTINCREMENT_EXPR
)
2965 arith_code
= PLUS_EXPR
;
2967 arith_code
= MINUS_EXPR
;
2969 /* Gimplify the LHS into a GIMPLE lvalue. */
2970 lvalue
= TREE_OPERAND (*expr_p
, 0);
2971 ret
= gimplify_expr (&lvalue
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
2972 if (ret
== GS_ERROR
)
2975 /* Extract the operands to the arithmetic operation. */
2977 rhs
= TREE_OPERAND (*expr_p
, 1);
2979 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2980 that as the result value and in the postqueue operation. */
2983 ret
= gimplify_expr (&lhs
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
2984 if (ret
== GS_ERROR
)
2987 lhs
= get_initialized_tmp_var (lhs
, pre_p
, NULL
);
2990 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2991 if (POINTER_TYPE_P (TREE_TYPE (lhs
)))
2993 rhs
= convert_to_ptrofftype_loc (loc
, rhs
);
2994 if (arith_code
== MINUS_EXPR
)
2995 rhs
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (rhs
), rhs
);
2996 t1
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (*expr_p
), lhs
, rhs
);
2999 t1
= fold_convert (TREE_TYPE (*expr_p
),
3000 fold_build2 (arith_code
, arith_type
,
3001 fold_convert (arith_type
, lhs
),
3002 fold_convert (arith_type
, rhs
)));
3006 gimplify_assign (lvalue
, t1
, pre_p
);
3007 gimplify_seq_add_seq (orig_post_p
, post
);
3013 *expr_p
= build2 (MODIFY_EXPR
, TREE_TYPE (lvalue
), lvalue
, t1
);
3018 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3021 maybe_with_size_expr (tree
*expr_p
)
3023 tree expr
= *expr_p
;
3024 tree type
= TREE_TYPE (expr
);
3027 /* If we've already wrapped this or the type is error_mark_node, we can't do
3029 if (TREE_CODE (expr
) == WITH_SIZE_EXPR
3030 || type
== error_mark_node
)
3033 /* If the size isn't known or is a constant, we have nothing to do. */
3034 size
= TYPE_SIZE_UNIT (type
);
3035 if (!size
|| TREE_CODE (size
) == INTEGER_CST
)
3038 /* Otherwise, make a WITH_SIZE_EXPR. */
3039 size
= unshare_expr (size
);
3040 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, expr
);
3041 *expr_p
= build2 (WITH_SIZE_EXPR
, type
, expr
, size
);
3044 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3045 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3046 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3047 gimplified to an SSA name. */
3049 enum gimplify_status
3050 gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
3053 bool (*test
) (tree
);
3056 /* In general, we allow lvalues for function arguments to avoid
3057 extra overhead of copying large aggregates out of even larger
3058 aggregates into temporaries only to copy the temporaries to
3059 the argument list. Make optimizers happy by pulling out to
3060 temporaries those types that fit in registers. */
3061 if (is_gimple_reg_type (TREE_TYPE (*arg_p
)))
3062 test
= is_gimple_val
, fb
= fb_rvalue
;
3065 test
= is_gimple_lvalue
, fb
= fb_either
;
3066 /* Also strip a TARGET_EXPR that would force an extra copy. */
3067 if (TREE_CODE (*arg_p
) == TARGET_EXPR
)
3069 tree init
= TARGET_EXPR_INITIAL (*arg_p
);
3071 && !VOID_TYPE_P (TREE_TYPE (init
)))
3076 /* If this is a variable sized type, we must remember the size. */
3077 maybe_with_size_expr (arg_p
);
3079 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3080 /* Make sure arguments have the same location as the function call
3082 protected_set_expr_location (*arg_p
, call_location
);
3084 /* There is a sequence point before a function call. Side effects in
3085 the argument list must occur before the actual call. So, when
3086 gimplifying arguments, force gimplify_expr to use an internal
3087 post queue which is then appended to the end of PRE_P. */
3088 return gimplify_expr (arg_p
, pre_p
, NULL
, test
, fb
, allow_ssa
);
3091 /* Don't fold inside offloading or taskreg regions: it can break code by
3092 adding decl references that weren't in the source. We'll do it during
3093 omplower pass instead. */
3096 maybe_fold_stmt (gimple_stmt_iterator
*gsi
)
3098 struct gimplify_omp_ctx
*ctx
;
3099 for (ctx
= gimplify_omp_ctxp
; ctx
; ctx
= ctx
->outer_context
)
3100 if ((ctx
->region_type
& (ORT_TARGET
| ORT_PARALLEL
| ORT_TASK
)) != 0)
3102 return fold_stmt (gsi
);
3105 /* Add a gimple call to __builtin_cilk_detach to GIMPLE sequence PRE_P,
3106 with the pointer to the proper cilk frame. */
3108 gimplify_cilk_detach (gimple_seq
*pre_p
)
3110 tree frame
= cfun
->cilk_frame_decl
;
3111 tree ptrf
= build1 (ADDR_EXPR
, cilk_frame_ptr_type_decl
,
3113 gcall
*detach
= gimple_build_call (cilk_detach_fndecl
, 1,
3115 gimplify_seq_add_stmt(pre_p
, detach
);
3118 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3119 WANT_VALUE is true if the result of the call is desired. */
3121 static enum gimplify_status
3122 gimplify_call_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
3124 tree fndecl
, parms
, p
, fnptrtype
;
3125 enum gimplify_status ret
;
3128 bool builtin_va_start_p
= false;
3129 location_t loc
= EXPR_LOCATION (*expr_p
);
3131 gcc_assert (TREE_CODE (*expr_p
) == CALL_EXPR
);
3133 /* For reliable diagnostics during inlining, it is necessary that
3134 every call_expr be annotated with file and line. */
3135 if (! EXPR_HAS_LOCATION (*expr_p
))
3136 SET_EXPR_LOCATION (*expr_p
, input_location
);
3138 /* Gimplify internal functions created in the FEs. */
3139 if (CALL_EXPR_FN (*expr_p
) == NULL_TREE
)
3144 nargs
= call_expr_nargs (*expr_p
);
3145 enum internal_fn ifn
= CALL_EXPR_IFN (*expr_p
);
3146 auto_vec
<tree
> vargs (nargs
);
3148 for (i
= 0; i
< nargs
; i
++)
3150 gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3151 EXPR_LOCATION (*expr_p
));
3152 vargs
.quick_push (CALL_EXPR_ARG (*expr_p
, i
));
3155 if (EXPR_CILK_SPAWN (*expr_p
))
3156 gimplify_cilk_detach (pre_p
);
3157 gcall
*call
= gimple_build_call_internal_vec (ifn
, vargs
);
3158 gimple_call_set_nothrow (call
, TREE_NOTHROW (*expr_p
));
3159 gimplify_seq_add_stmt (pre_p
, call
);
3163 /* This may be a call to a builtin function.
3165 Builtin function calls may be transformed into different
3166 (and more efficient) builtin function calls under certain
3167 circumstances. Unfortunately, gimplification can muck things
3168 up enough that the builtin expanders are not aware that certain
3169 transformations are still valid.
3171 So we attempt transformation/gimplification of the call before
3172 we gimplify the CALL_EXPR. At this time we do not manage to
3173 transform all calls in the same manner as the expanders do, but
3174 we do transform most of them. */
3175 fndecl
= get_callee_fndecl (*expr_p
);
3177 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3178 switch (DECL_FUNCTION_CODE (fndecl
))
3180 CASE_BUILT_IN_ALLOCA
:
3181 /* If the call has been built for a variable-sized object, then we
3182 want to restore the stack level when the enclosing BIND_EXPR is
3183 exited to reclaim the allocated space; otherwise, we precisely
3184 need to do the opposite and preserve the latest stack level. */
3185 if (CALL_ALLOCA_FOR_VAR_P (*expr_p
))
3186 gimplify_ctxp
->save_stack
= true;
3188 gimplify_ctxp
->keep_stack
= true;
3191 case BUILT_IN_VA_START
:
3193 builtin_va_start_p
= TRUE
;
3194 if (call_expr_nargs (*expr_p
) < 2)
3196 error ("too few arguments to function %<va_start%>");
3197 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3201 if (fold_builtin_next_arg (*expr_p
, true))
3203 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3212 if (fndecl
&& DECL_BUILT_IN (fndecl
))
3214 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3215 if (new_tree
&& new_tree
!= *expr_p
)
3217 /* There was a transformation of this call which computes the
3218 same value, but in a more efficient way. Return and try
3225 /* Remember the original function pointer type. */
3226 fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
3228 /* There is a sequence point before the call, so any side effects in
3229 the calling expression must occur before the actual call. Force
3230 gimplify_expr to use an internal post queue. */
3231 ret
= gimplify_expr (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
3232 is_gimple_call_addr
, fb_rvalue
);
3234 nargs
= call_expr_nargs (*expr_p
);
3236 /* Get argument types for verification. */
3237 fndecl
= get_callee_fndecl (*expr_p
);
3240 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3242 parms
= TYPE_ARG_TYPES (TREE_TYPE (fnptrtype
));
3244 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
3245 p
= DECL_ARGUMENTS (fndecl
);
3250 for (i
= 0; i
< nargs
&& p
; i
++, p
= TREE_CHAIN (p
))
3253 /* If the last argument is __builtin_va_arg_pack () and it is not
3254 passed as a named argument, decrease the number of CALL_EXPR
3255 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3258 && TREE_CODE (CALL_EXPR_ARG (*expr_p
, nargs
- 1)) == CALL_EXPR
)
3260 tree last_arg
= CALL_EXPR_ARG (*expr_p
, nargs
- 1);
3261 tree last_arg_fndecl
= get_callee_fndecl (last_arg
);
3264 && TREE_CODE (last_arg_fndecl
) == FUNCTION_DECL
3265 && DECL_BUILT_IN_CLASS (last_arg_fndecl
) == BUILT_IN_NORMAL
3266 && DECL_FUNCTION_CODE (last_arg_fndecl
) == BUILT_IN_VA_ARG_PACK
)
3268 tree call
= *expr_p
;
3271 *expr_p
= build_call_array_loc (loc
, TREE_TYPE (call
),
3272 CALL_EXPR_FN (call
),
3273 nargs
, CALL_EXPR_ARGP (call
));
3275 /* Copy all CALL_EXPR flags, location and block, except
3276 CALL_EXPR_VA_ARG_PACK flag. */
3277 CALL_EXPR_STATIC_CHAIN (*expr_p
) = CALL_EXPR_STATIC_CHAIN (call
);
3278 CALL_EXPR_TAILCALL (*expr_p
) = CALL_EXPR_TAILCALL (call
);
3279 CALL_EXPR_RETURN_SLOT_OPT (*expr_p
)
3280 = CALL_EXPR_RETURN_SLOT_OPT (call
);
3281 CALL_FROM_THUNK_P (*expr_p
) = CALL_FROM_THUNK_P (call
);
3282 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (call
));
3284 /* Set CALL_EXPR_VA_ARG_PACK. */
3285 CALL_EXPR_VA_ARG_PACK (*expr_p
) = 1;
3289 /* If the call returns twice then after building the CFG the call
3290 argument computations will no longer dominate the call because
3291 we add an abnormal incoming edge to the call. So do not use SSA
3293 bool returns_twice
= call_expr_flags (*expr_p
) & ECF_RETURNS_TWICE
;
3295 /* Gimplify the function arguments. */
3298 for (i
= (PUSH_ARGS_REVERSED
? nargs
- 1 : 0);
3299 PUSH_ARGS_REVERSED
? i
>= 0 : i
< nargs
;
3300 PUSH_ARGS_REVERSED
? i
-- : i
++)
3302 enum gimplify_status t
;
3304 /* Avoid gimplifying the second argument to va_start, which needs to
3305 be the plain PARM_DECL. */
3306 if ((i
!= 1) || !builtin_va_start_p
)
3308 t
= gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3309 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3317 /* Gimplify the static chain. */
3318 if (CALL_EXPR_STATIC_CHAIN (*expr_p
))
3320 if (fndecl
&& !DECL_STATIC_CHAIN (fndecl
))
3321 CALL_EXPR_STATIC_CHAIN (*expr_p
) = NULL
;
3324 enum gimplify_status t
;
3325 t
= gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p
), pre_p
,
3326 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3332 /* Verify the function result. */
3333 if (want_value
&& fndecl
3334 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype
))))
3336 error_at (loc
, "using result of function returning %<void%>");
3340 /* Try this again in case gimplification exposed something. */
3341 if (ret
!= GS_ERROR
)
3343 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3345 if (new_tree
&& new_tree
!= *expr_p
)
3347 /* There was a transformation of this call which computes the
3348 same value, but in a more efficient way. Return and try
3356 *expr_p
= error_mark_node
;
3360 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3361 decl. This allows us to eliminate redundant or useless
3362 calls to "const" functions. */
3363 if (TREE_CODE (*expr_p
) == CALL_EXPR
)
3365 int flags
= call_expr_flags (*expr_p
);
3366 if (flags
& (ECF_CONST
| ECF_PURE
)
3367 /* An infinite loop is considered a side effect. */
3368 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
3369 TREE_SIDE_EFFECTS (*expr_p
) = 0;
3372 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3373 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3374 form and delegate the creation of a GIMPLE_CALL to
3375 gimplify_modify_expr. This is always possible because when
3376 WANT_VALUE is true, the caller wants the result of this call into
3377 a temporary, which means that we will emit an INIT_EXPR in
3378 internal_get_tmp_var which will then be handled by
3379 gimplify_modify_expr. */
3382 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3383 have to do is replicate it as a GIMPLE_CALL tuple. */
3384 gimple_stmt_iterator gsi
;
3385 call
= gimple_build_call_from_tree (*expr_p
, fnptrtype
);
3386 notice_special_calls (call
);
3387 if (EXPR_CILK_SPAWN (*expr_p
))
3388 gimplify_cilk_detach (pre_p
);
3389 gimplify_seq_add_stmt (pre_p
, call
);
3390 gsi
= gsi_last (*pre_p
);
3391 maybe_fold_stmt (&gsi
);
3392 *expr_p
= NULL_TREE
;
3395 /* Remember the original function type. */
3396 CALL_EXPR_FN (*expr_p
) = build1 (NOP_EXPR
, fnptrtype
,
3397 CALL_EXPR_FN (*expr_p
));
3402 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3403 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3405 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3406 condition is true or false, respectively. If null, we should generate
3407 our own to skip over the evaluation of this specific expression.
3409 LOCUS is the source location of the COND_EXPR.
3411 This function is the tree equivalent of do_jump.
3413 shortcut_cond_r should only be called by shortcut_cond_expr. */
3416 shortcut_cond_r (tree pred
, tree
*true_label_p
, tree
*false_label_p
,
3419 tree local_label
= NULL_TREE
;
3420 tree t
, expr
= NULL
;
3422 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3423 retain the shortcut semantics. Just insert the gotos here;
3424 shortcut_cond_expr will append the real blocks later. */
3425 if (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3427 location_t new_locus
;
3429 /* Turn if (a && b) into
3431 if (a); else goto no;
3432 if (b) goto yes; else goto no;
3435 if (false_label_p
== NULL
)
3436 false_label_p
= &local_label
;
3438 /* Keep the original source location on the first 'if'. */
3439 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), NULL
, false_label_p
, locus
);
3440 append_to_statement_list (t
, &expr
);
3442 /* Set the source location of the && on the second 'if'. */
3443 new_locus
= EXPR_HAS_LOCATION (pred
) ? EXPR_LOCATION (pred
) : locus
;
3444 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3446 append_to_statement_list (t
, &expr
);
3448 else if (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3450 location_t new_locus
;
3452 /* Turn if (a || b) into
3455 if (b) goto yes; else goto no;
3458 if (true_label_p
== NULL
)
3459 true_label_p
= &local_label
;
3461 /* Keep the original source location on the first 'if'. */
3462 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), true_label_p
, NULL
, locus
);
3463 append_to_statement_list (t
, &expr
);
3465 /* Set the source location of the || on the second 'if'. */
3466 new_locus
= EXPR_HAS_LOCATION (pred
) ? EXPR_LOCATION (pred
) : locus
;
3467 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3469 append_to_statement_list (t
, &expr
);
3471 else if (TREE_CODE (pred
) == COND_EXPR
3472 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 1)))
3473 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 2))))
3475 location_t new_locus
;
3477 /* As long as we're messing with gotos, turn if (a ? b : c) into
3479 if (b) goto yes; else goto no;
3481 if (c) goto yes; else goto no;
3483 Don't do this if one of the arms has void type, which can happen
3484 in C++ when the arm is throw. */
3486 /* Keep the original source location on the first 'if'. Set the source
3487 location of the ? on the second 'if'. */
3488 new_locus
= EXPR_HAS_LOCATION (pred
) ? EXPR_LOCATION (pred
) : locus
;
3489 expr
= build3 (COND_EXPR
, void_type_node
, TREE_OPERAND (pred
, 0),
3490 shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
,
3491 false_label_p
, locus
),
3492 shortcut_cond_r (TREE_OPERAND (pred
, 2), true_label_p
,
3493 false_label_p
, new_locus
));
3497 expr
= build3 (COND_EXPR
, void_type_node
, pred
,
3498 build_and_jump (true_label_p
),
3499 build_and_jump (false_label_p
));
3500 SET_EXPR_LOCATION (expr
, locus
);
3505 t
= build1 (LABEL_EXPR
, void_type_node
, local_label
);
3506 append_to_statement_list (t
, &expr
);
3512 /* Given a conditional expression EXPR with short-circuit boolean
3513 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3514 predicate apart into the equivalent sequence of conditionals. */
3517 shortcut_cond_expr (tree expr
)
3519 tree pred
= TREE_OPERAND (expr
, 0);
3520 tree then_
= TREE_OPERAND (expr
, 1);
3521 tree else_
= TREE_OPERAND (expr
, 2);
3522 tree true_label
, false_label
, end_label
, t
;
3524 tree
*false_label_p
;
3525 bool emit_end
, emit_false
, jump_over_else
;
3526 bool then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3527 bool else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3529 /* First do simple transformations. */
3532 /* If there is no 'else', turn
3535 if (a) if (b) then c. */
3536 while (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3538 /* Keep the original source location on the first 'if'. */
3539 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3540 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3541 /* Set the source location of the && on the second 'if'. */
3542 if (EXPR_HAS_LOCATION (pred
))
3543 SET_EXPR_LOCATION (expr
, EXPR_LOCATION (pred
));
3544 then_
= shortcut_cond_expr (expr
);
3545 then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3546 pred
= TREE_OPERAND (pred
, 0);
3547 expr
= build3 (COND_EXPR
, void_type_node
, pred
, then_
, NULL_TREE
);
3548 SET_EXPR_LOCATION (expr
, locus
);
3554 /* If there is no 'then', turn
3557 if (a); else if (b); else d. */
3558 while (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3560 /* Keep the original source location on the first 'if'. */
3561 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3562 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3563 /* Set the source location of the || on the second 'if'. */
3564 if (EXPR_HAS_LOCATION (pred
))
3565 SET_EXPR_LOCATION (expr
, EXPR_LOCATION (pred
));
3566 else_
= shortcut_cond_expr (expr
);
3567 else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3568 pred
= TREE_OPERAND (pred
, 0);
3569 expr
= build3 (COND_EXPR
, void_type_node
, pred
, NULL_TREE
, else_
);
3570 SET_EXPR_LOCATION (expr
, locus
);
3574 /* If we're done, great. */
3575 if (TREE_CODE (pred
) != TRUTH_ANDIF_EXPR
3576 && TREE_CODE (pred
) != TRUTH_ORIF_EXPR
)
3579 /* Otherwise we need to mess with gotos. Change
3582 if (a); else goto no;
3585 and recursively gimplify the condition. */
3587 true_label
= false_label
= end_label
= NULL_TREE
;
3589 /* If our arms just jump somewhere, hijack those labels so we don't
3590 generate jumps to jumps. */
3593 && TREE_CODE (then_
) == GOTO_EXPR
3594 && TREE_CODE (GOTO_DESTINATION (then_
)) == LABEL_DECL
)
3596 true_label
= GOTO_DESTINATION (then_
);
3602 && TREE_CODE (else_
) == GOTO_EXPR
3603 && TREE_CODE (GOTO_DESTINATION (else_
)) == LABEL_DECL
)
3605 false_label
= GOTO_DESTINATION (else_
);
3610 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3612 true_label_p
= &true_label
;
3614 true_label_p
= NULL
;
3616 /* The 'else' branch also needs a label if it contains interesting code. */
3617 if (false_label
|| else_se
)
3618 false_label_p
= &false_label
;
3620 false_label_p
= NULL
;
3622 /* If there was nothing else in our arms, just forward the label(s). */
3623 if (!then_se
&& !else_se
)
3624 return shortcut_cond_r (pred
, true_label_p
, false_label_p
,
3625 EXPR_LOC_OR_LOC (expr
, input_location
));
3627 /* If our last subexpression already has a terminal label, reuse it. */
3629 t
= expr_last (else_
);
3631 t
= expr_last (then_
);
3634 if (t
&& TREE_CODE (t
) == LABEL_EXPR
)
3635 end_label
= LABEL_EXPR_LABEL (t
);
3637 /* If we don't care about jumping to the 'else' branch, jump to the end
3638 if the condition is false. */
3640 false_label_p
= &end_label
;
3642 /* We only want to emit these labels if we aren't hijacking them. */
3643 emit_end
= (end_label
== NULL_TREE
);
3644 emit_false
= (false_label
== NULL_TREE
);
3646 /* We only emit the jump over the else clause if we have to--if the
3647 then clause may fall through. Otherwise we can wind up with a
3648 useless jump and a useless label at the end of gimplified code,
3649 which will cause us to think that this conditional as a whole
3650 falls through even if it doesn't. If we then inline a function
3651 which ends with such a condition, that can cause us to issue an
3652 inappropriate warning about control reaching the end of a
3653 non-void function. */
3654 jump_over_else
= block_may_fallthru (then_
);
3656 pred
= shortcut_cond_r (pred
, true_label_p
, false_label_p
,
3657 EXPR_LOC_OR_LOC (expr
, input_location
));
3660 append_to_statement_list (pred
, &expr
);
3662 append_to_statement_list (then_
, &expr
);
3667 tree last
= expr_last (expr
);
3668 t
= build_and_jump (&end_label
);
3669 if (EXPR_HAS_LOCATION (last
))
3670 SET_EXPR_LOCATION (t
, EXPR_LOCATION (last
));
3671 append_to_statement_list (t
, &expr
);
3675 t
= build1 (LABEL_EXPR
, void_type_node
, false_label
);
3676 append_to_statement_list (t
, &expr
);
3678 append_to_statement_list (else_
, &expr
);
3680 if (emit_end
&& end_label
)
3682 t
= build1 (LABEL_EXPR
, void_type_node
, end_label
);
3683 append_to_statement_list (t
, &expr
);
3689 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3692 gimple_boolify (tree expr
)
3694 tree type
= TREE_TYPE (expr
);
3695 location_t loc
= EXPR_LOCATION (expr
);
3697 if (TREE_CODE (expr
) == NE_EXPR
3698 && TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
3699 && integer_zerop (TREE_OPERAND (expr
, 1)))
3701 tree call
= TREE_OPERAND (expr
, 0);
3702 tree fn
= get_callee_fndecl (call
);
3704 /* For __builtin_expect ((long) (x), y) recurse into x as well
3705 if x is truth_value_p. */
3707 && DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
3708 && DECL_FUNCTION_CODE (fn
) == BUILT_IN_EXPECT
3709 && call_expr_nargs (call
) == 2)
3711 tree arg
= CALL_EXPR_ARG (call
, 0);
3714 if (TREE_CODE (arg
) == NOP_EXPR
3715 && TREE_TYPE (arg
) == TREE_TYPE (call
))
3716 arg
= TREE_OPERAND (arg
, 0);
3717 if (truth_value_p (TREE_CODE (arg
)))
3719 arg
= gimple_boolify (arg
);
3720 CALL_EXPR_ARG (call
, 0)
3721 = fold_convert_loc (loc
, TREE_TYPE (call
), arg
);
3727 switch (TREE_CODE (expr
))
3729 case TRUTH_AND_EXPR
:
3731 case TRUTH_XOR_EXPR
:
3732 case TRUTH_ANDIF_EXPR
:
3733 case TRUTH_ORIF_EXPR
:
3734 /* Also boolify the arguments of truth exprs. */
3735 TREE_OPERAND (expr
, 1) = gimple_boolify (TREE_OPERAND (expr
, 1));
3738 case TRUTH_NOT_EXPR
:
3739 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3741 /* These expressions always produce boolean results. */
3742 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3743 TREE_TYPE (expr
) = boolean_type_node
;
3747 switch ((enum annot_expr_kind
) TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)))
3749 case annot_expr_ivdep_kind
:
3750 case annot_expr_no_vector_kind
:
3751 case annot_expr_vector_kind
:
3752 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3753 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3754 TREE_TYPE (expr
) = boolean_type_node
;
3761 if (COMPARISON_CLASS_P (expr
))
3763 /* There expressions always prduce boolean results. */
3764 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3765 TREE_TYPE (expr
) = boolean_type_node
;
3768 /* Other expressions that get here must have boolean values, but
3769 might need to be converted to the appropriate mode. */
3770 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
3772 return fold_convert_loc (loc
, boolean_type_node
, expr
);
3776 /* Given a conditional expression *EXPR_P without side effects, gimplify
3777 its operands. New statements are inserted to PRE_P. */
3779 static enum gimplify_status
3780 gimplify_pure_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
)
3782 tree expr
= *expr_p
, cond
;
3783 enum gimplify_status ret
, tret
;
3784 enum tree_code code
;
3786 cond
= gimple_boolify (COND_EXPR_COND (expr
));
3788 /* We need to handle && and || specially, as their gimplification
3789 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3790 code
= TREE_CODE (cond
);
3791 if (code
== TRUTH_ANDIF_EXPR
)
3792 TREE_SET_CODE (cond
, TRUTH_AND_EXPR
);
3793 else if (code
== TRUTH_ORIF_EXPR
)
3794 TREE_SET_CODE (cond
, TRUTH_OR_EXPR
);
3795 ret
= gimplify_expr (&cond
, pre_p
, NULL
, is_gimple_condexpr
, fb_rvalue
);
3796 COND_EXPR_COND (*expr_p
) = cond
;
3798 tret
= gimplify_expr (&COND_EXPR_THEN (expr
), pre_p
, NULL
,
3799 is_gimple_val
, fb_rvalue
);
3800 ret
= MIN (ret
, tret
);
3801 tret
= gimplify_expr (&COND_EXPR_ELSE (expr
), pre_p
, NULL
,
3802 is_gimple_val
, fb_rvalue
);
3804 return MIN (ret
, tret
);
3807 /* Return true if evaluating EXPR could trap.
3808 EXPR is GENERIC, while tree_could_trap_p can be called
3812 generic_expr_could_trap_p (tree expr
)
3816 if (!expr
|| is_gimple_val (expr
))
3819 if (!EXPR_P (expr
) || tree_could_trap_p (expr
))
3822 n
= TREE_OPERAND_LENGTH (expr
);
3823 for (i
= 0; i
< n
; i
++)
3824 if (generic_expr_could_trap_p (TREE_OPERAND (expr
, i
)))
3830 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3839 The second form is used when *EXPR_P is of type void.
3841 PRE_P points to the list where side effects that must happen before
3842 *EXPR_P should be stored. */
3844 static enum gimplify_status
3845 gimplify_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
, fallback_t fallback
)
3847 tree expr
= *expr_p
;
3848 tree type
= TREE_TYPE (expr
);
3849 location_t loc
= EXPR_LOCATION (expr
);
3850 tree tmp
, arm1
, arm2
;
3851 enum gimplify_status ret
;
3852 tree label_true
, label_false
, label_cont
;
3853 bool have_then_clause_p
, have_else_clause_p
;
3855 enum tree_code pred_code
;
3856 gimple_seq seq
= NULL
;
3858 /* If this COND_EXPR has a value, copy the values into a temporary within
3860 if (!VOID_TYPE_P (type
))
3862 tree then_
= TREE_OPERAND (expr
, 1), else_
= TREE_OPERAND (expr
, 2);
3865 /* If either an rvalue is ok or we do not require an lvalue, create the
3866 temporary. But we cannot do that if the type is addressable. */
3867 if (((fallback
& fb_rvalue
) || !(fallback
& fb_lvalue
))
3868 && !TREE_ADDRESSABLE (type
))
3870 if (gimplify_ctxp
->allow_rhs_cond_expr
3871 /* If either branch has side effects or could trap, it can't be
3872 evaluated unconditionally. */
3873 && !TREE_SIDE_EFFECTS (then_
)
3874 && !generic_expr_could_trap_p (then_
)
3875 && !TREE_SIDE_EFFECTS (else_
)
3876 && !generic_expr_could_trap_p (else_
))
3877 return gimplify_pure_cond_expr (expr_p
, pre_p
);
3879 tmp
= create_tmp_var (type
, "iftmp");
3883 /* Otherwise, only create and copy references to the values. */
3886 type
= build_pointer_type (type
);
3888 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
3889 then_
= build_fold_addr_expr_loc (loc
, then_
);
3891 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
3892 else_
= build_fold_addr_expr_loc (loc
, else_
);
3895 = build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0), then_
, else_
);
3897 tmp
= create_tmp_var (type
, "iftmp");
3898 result
= build_simple_mem_ref_loc (loc
, tmp
);
3901 /* Build the new then clause, `tmp = then_;'. But don't build the
3902 assignment if the value is void; in C++ it can be if it's a throw. */
3903 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
3904 TREE_OPERAND (expr
, 1) = build2 (MODIFY_EXPR
, type
, tmp
, then_
);
3906 /* Similarly, build the new else clause, `tmp = else_;'. */
3907 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
3908 TREE_OPERAND (expr
, 2) = build2 (MODIFY_EXPR
, type
, tmp
, else_
);
3910 TREE_TYPE (expr
) = void_type_node
;
3911 recalculate_side_effects (expr
);
3913 /* Move the COND_EXPR to the prequeue. */
3914 gimplify_stmt (&expr
, pre_p
);
3920 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3921 STRIP_TYPE_NOPS (TREE_OPERAND (expr
, 0));
3922 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == COMPOUND_EXPR
)
3923 gimplify_compound_expr (&TREE_OPERAND (expr
, 0), pre_p
, true);
3925 /* Make sure the condition has BOOLEAN_TYPE. */
3926 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3928 /* Break apart && and || conditions. */
3929 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ANDIF_EXPR
3930 || TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ORIF_EXPR
)
3932 expr
= shortcut_cond_expr (expr
);
3934 if (expr
!= *expr_p
)
3938 /* We can't rely on gimplify_expr to re-gimplify the expanded
3939 form properly, as cleanups might cause the target labels to be
3940 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3941 set up a conditional context. */
3942 gimple_push_condition ();
3943 gimplify_stmt (expr_p
, &seq
);
3944 gimple_pop_condition (pre_p
);
3945 gimple_seq_add_seq (pre_p
, seq
);
3951 /* Now do the normal gimplification. */
3953 /* Gimplify condition. */
3954 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, NULL
, is_gimple_condexpr
,
3956 if (ret
== GS_ERROR
)
3958 gcc_assert (TREE_OPERAND (expr
, 0) != NULL_TREE
);
3960 gimple_push_condition ();
3962 have_then_clause_p
= have_else_clause_p
= false;
3963 if (TREE_OPERAND (expr
, 1) != NULL
3964 && TREE_CODE (TREE_OPERAND (expr
, 1)) == GOTO_EXPR
3965 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr
, 1))) == LABEL_DECL
3966 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr
, 1)))
3967 == current_function_decl
)
3968 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3969 have different locations, otherwise we end up with incorrect
3970 location information on the branches. */
3972 || !EXPR_HAS_LOCATION (expr
)
3973 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr
, 1))
3974 || EXPR_LOCATION (expr
) == EXPR_LOCATION (TREE_OPERAND (expr
, 1))))
3976 label_true
= GOTO_DESTINATION (TREE_OPERAND (expr
, 1));
3977 have_then_clause_p
= true;
3980 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
3981 if (TREE_OPERAND (expr
, 2) != NULL
3982 && TREE_CODE (TREE_OPERAND (expr
, 2)) == GOTO_EXPR
3983 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr
, 2))) == LABEL_DECL
3984 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr
, 2)))
3985 == current_function_decl
)
3986 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3987 have different locations, otherwise we end up with incorrect
3988 location information on the branches. */
3990 || !EXPR_HAS_LOCATION (expr
)
3991 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr
, 2))
3992 || EXPR_LOCATION (expr
) == EXPR_LOCATION (TREE_OPERAND (expr
, 2))))
3994 label_false
= GOTO_DESTINATION (TREE_OPERAND (expr
, 2));
3995 have_else_clause_p
= true;
3998 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
4000 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr
), &pred_code
, &arm1
,
4002 cond_stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
,
4004 gimple_set_no_warning (cond_stmt
, TREE_NO_WARNING (COND_EXPR_COND (expr
)));
4005 gimplify_seq_add_stmt (&seq
, cond_stmt
);
4006 gimple_stmt_iterator gsi
= gsi_last (seq
);
4007 maybe_fold_stmt (&gsi
);
4009 label_cont
= NULL_TREE
;
4010 if (!have_then_clause_p
)
4012 /* For if (...) {} else { code; } put label_true after
4014 if (TREE_OPERAND (expr
, 1) == NULL_TREE
4015 && !have_else_clause_p
4016 && TREE_OPERAND (expr
, 2) != NULL_TREE
)
4017 label_cont
= label_true
;
4020 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_true
));
4021 have_then_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 1), &seq
);
4022 /* For if (...) { code; } else {} or
4023 if (...) { code; } else goto label; or
4024 if (...) { code; return; } else { ... }
4025 label_cont isn't needed. */
4026 if (!have_else_clause_p
4027 && TREE_OPERAND (expr
, 2) != NULL_TREE
4028 && gimple_seq_may_fallthru (seq
))
4031 label_cont
= create_artificial_label (UNKNOWN_LOCATION
);
4033 g
= gimple_build_goto (label_cont
);
4035 /* GIMPLE_COND's are very low level; they have embedded
4036 gotos. This particular embedded goto should not be marked
4037 with the location of the original COND_EXPR, as it would
4038 correspond to the COND_EXPR's condition, not the ELSE or the
4039 THEN arms. To avoid marking it with the wrong location, flag
4040 it as "no location". */
4041 gimple_set_do_not_emit_location (g
);
4043 gimplify_seq_add_stmt (&seq
, g
);
4047 if (!have_else_clause_p
)
4049 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_false
));
4050 have_else_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 2), &seq
);
4053 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_cont
));
4055 gimple_pop_condition (pre_p
);
4056 gimple_seq_add_seq (pre_p
, seq
);
4058 if (ret
== GS_ERROR
)
4060 else if (have_then_clause_p
|| have_else_clause_p
)
4064 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4065 expr
= TREE_OPERAND (expr
, 0);
4066 gimplify_stmt (&expr
, pre_p
);
4073 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4074 to be marked addressable.
4076 We cannot rely on such an expression being directly markable if a temporary
4077 has been created by the gimplification. In this case, we create another
4078 temporary and initialize it with a copy, which will become a store after we
4079 mark it addressable. This can happen if the front-end passed us something
4080 that it could not mark addressable yet, like a Fortran pass-by-reference
4081 parameter (int) floatvar. */
4084 prepare_gimple_addressable (tree
*expr_p
, gimple_seq
*seq_p
)
4086 while (handled_component_p (*expr_p
))
4087 expr_p
= &TREE_OPERAND (*expr_p
, 0);
4088 if (is_gimple_reg (*expr_p
))
4090 /* Do not allow an SSA name as the temporary. */
4091 tree var
= get_initialized_tmp_var (*expr_p
, seq_p
, NULL
, false);
4092 DECL_GIMPLE_REG_P (var
) = 0;
4097 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4098 a call to __builtin_memcpy. */
4100 static enum gimplify_status
4101 gimplify_modify_expr_to_memcpy (tree
*expr_p
, tree size
, bool want_value
,
4104 tree t
, to
, to_ptr
, from
, from_ptr
;
4106 location_t loc
= EXPR_LOCATION (*expr_p
);
4108 to
= TREE_OPERAND (*expr_p
, 0);
4109 from
= TREE_OPERAND (*expr_p
, 1);
4111 /* Mark the RHS addressable. Beware that it may not be possible to do so
4112 directly if a temporary has been created by the gimplification. */
4113 prepare_gimple_addressable (&from
, seq_p
);
4115 mark_addressable (from
);
4116 from_ptr
= build_fold_addr_expr_loc (loc
, from
);
4117 gimplify_arg (&from_ptr
, seq_p
, loc
);
4119 mark_addressable (to
);
4120 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4121 gimplify_arg (&to_ptr
, seq_p
, loc
);
4123 t
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
4125 gs
= gimple_build_call (t
, 3, to_ptr
, from_ptr
, size
);
4129 /* tmp = memcpy() */
4130 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4131 gimple_call_set_lhs (gs
, t
);
4132 gimplify_seq_add_stmt (seq_p
, gs
);
4134 *expr_p
= build_simple_mem_ref (t
);
4138 gimplify_seq_add_stmt (seq_p
, gs
);
4143 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4144 a call to __builtin_memset. In this case we know that the RHS is
4145 a CONSTRUCTOR with an empty element list. */
4147 static enum gimplify_status
4148 gimplify_modify_expr_to_memset (tree
*expr_p
, tree size
, bool want_value
,
4151 tree t
, from
, to
, to_ptr
;
4153 location_t loc
= EXPR_LOCATION (*expr_p
);
4155 /* Assert our assumptions, to abort instead of producing wrong code
4156 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4157 not be immediately exposed. */
4158 from
= TREE_OPERAND (*expr_p
, 1);
4159 if (TREE_CODE (from
) == WITH_SIZE_EXPR
)
4160 from
= TREE_OPERAND (from
, 0);
4162 gcc_assert (TREE_CODE (from
) == CONSTRUCTOR
4163 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from
)));
4166 to
= TREE_OPERAND (*expr_p
, 0);
4168 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4169 gimplify_arg (&to_ptr
, seq_p
, loc
);
4170 t
= builtin_decl_implicit (BUILT_IN_MEMSET
);
4172 gs
= gimple_build_call (t
, 3, to_ptr
, integer_zero_node
, size
);
4176 /* tmp = memset() */
4177 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4178 gimple_call_set_lhs (gs
, t
);
4179 gimplify_seq_add_stmt (seq_p
, gs
);
4181 *expr_p
= build1 (INDIRECT_REF
, TREE_TYPE (to
), t
);
4185 gimplify_seq_add_stmt (seq_p
, gs
);
4190 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4191 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4192 assignment. Return non-null if we detect a potential overlap. */
4194 struct gimplify_init_ctor_preeval_data
4196 /* The base decl of the lhs object. May be NULL, in which case we
4197 have to assume the lhs is indirect. */
4200 /* The alias set of the lhs object. */
4201 alias_set_type lhs_alias_set
;
4205 gimplify_init_ctor_preeval_1 (tree
*tp
, int *walk_subtrees
, void *xdata
)
4207 struct gimplify_init_ctor_preeval_data
*data
4208 = (struct gimplify_init_ctor_preeval_data
*) xdata
;
4211 /* If we find the base object, obviously we have overlap. */
4212 if (data
->lhs_base_decl
== t
)
4215 /* If the constructor component is indirect, determine if we have a
4216 potential overlap with the lhs. The only bits of information we
4217 have to go on at this point are addressability and alias sets. */
4218 if ((INDIRECT_REF_P (t
)
4219 || TREE_CODE (t
) == MEM_REF
)
4220 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4221 && alias_sets_conflict_p (data
->lhs_alias_set
, get_alias_set (t
)))
4224 /* If the constructor component is a call, determine if it can hide a
4225 potential overlap with the lhs through an INDIRECT_REF like above.
4226 ??? Ugh - this is completely broken. In fact this whole analysis
4227 doesn't look conservative. */
4228 if (TREE_CODE (t
) == CALL_EXPR
)
4230 tree type
, fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t
)));
4232 for (type
= TYPE_ARG_TYPES (fntype
); type
; type
= TREE_CHAIN (type
))
4233 if (POINTER_TYPE_P (TREE_VALUE (type
))
4234 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4235 && alias_sets_conflict_p (data
->lhs_alias_set
,
4237 (TREE_TYPE (TREE_VALUE (type
)))))
4241 if (IS_TYPE_OR_DECL_P (t
))
4246 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4247 force values that overlap with the lhs (as described by *DATA)
4248 into temporaries. */
4251 gimplify_init_ctor_preeval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4252 struct gimplify_init_ctor_preeval_data
*data
)
4254 enum gimplify_status one
;
4256 /* If the value is constant, then there's nothing to pre-evaluate. */
4257 if (TREE_CONSTANT (*expr_p
))
4259 /* Ensure it does not have side effects, it might contain a reference to
4260 the object we're initializing. */
4261 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p
));
4265 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4266 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)))
4269 /* Recurse for nested constructors. */
4270 if (TREE_CODE (*expr_p
) == CONSTRUCTOR
)
4272 unsigned HOST_WIDE_INT ix
;
4273 constructor_elt
*ce
;
4274 vec
<constructor_elt
, va_gc
> *v
= CONSTRUCTOR_ELTS (*expr_p
);
4276 FOR_EACH_VEC_SAFE_ELT (v
, ix
, ce
)
4277 gimplify_init_ctor_preeval (&ce
->value
, pre_p
, post_p
, data
);
4282 /* If this is a variable sized type, we must remember the size. */
4283 maybe_with_size_expr (expr_p
);
4285 /* Gimplify the constructor element to something appropriate for the rhs
4286 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4287 the gimplifier will consider this a store to memory. Doing this
4288 gimplification now means that we won't have to deal with complicated
4289 language-specific trees, nor trees like SAVE_EXPR that can induce
4290 exponential search behavior. */
4291 one
= gimplify_expr (expr_p
, pre_p
, post_p
, is_gimple_mem_rhs
, fb_rvalue
);
4292 if (one
== GS_ERROR
)
4298 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4299 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4300 always be true for all scalars, since is_gimple_mem_rhs insists on a
4301 temporary variable for them. */
4302 if (DECL_P (*expr_p
))
4305 /* If this is of variable size, we have no choice but to assume it doesn't
4306 overlap since we can't make a temporary for it. */
4307 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p
))) != INTEGER_CST
)
4310 /* Otherwise, we must search for overlap ... */
4311 if (!walk_tree (expr_p
, gimplify_init_ctor_preeval_1
, data
, NULL
))
4314 /* ... and if found, force the value into a temporary. */
4315 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
4318 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4319 a RANGE_EXPR in a CONSTRUCTOR for an array.
4323 object[var] = value;
4330 We increment var _after_ the loop exit check because we might otherwise
4331 fail if upper == TYPE_MAX_VALUE (type for upper).
4333 Note that we never have to deal with SAVE_EXPRs here, because this has
4334 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4336 static void gimplify_init_ctor_eval (tree
, vec
<constructor_elt
, va_gc
> *,
4337 gimple_seq
*, bool);
4340 gimplify_init_ctor_eval_range (tree object
, tree lower
, tree upper
,
4341 tree value
, tree array_elt_type
,
4342 gimple_seq
*pre_p
, bool cleared
)
4344 tree loop_entry_label
, loop_exit_label
, fall_thru_label
;
4345 tree var
, var_type
, cref
, tmp
;
4347 loop_entry_label
= create_artificial_label (UNKNOWN_LOCATION
);
4348 loop_exit_label
= create_artificial_label (UNKNOWN_LOCATION
);
4349 fall_thru_label
= create_artificial_label (UNKNOWN_LOCATION
);
4351 /* Create and initialize the index variable. */
4352 var_type
= TREE_TYPE (upper
);
4353 var
= create_tmp_var (var_type
);
4354 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, lower
));
4356 /* Add the loop entry label. */
4357 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_entry_label
));
4359 /* Build the reference. */
4360 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4361 var
, NULL_TREE
, NULL_TREE
);
4363 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4364 the store. Otherwise just assign value to the reference. */
4366 if (TREE_CODE (value
) == CONSTRUCTOR
)
4367 /* NB we might have to call ourself recursively through
4368 gimplify_init_ctor_eval if the value is a constructor. */
4369 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4372 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (cref
, value
));
4374 /* We exit the loop when the index var is equal to the upper bound. */
4375 gimplify_seq_add_stmt (pre_p
,
4376 gimple_build_cond (EQ_EXPR
, var
, upper
,
4377 loop_exit_label
, fall_thru_label
));
4379 gimplify_seq_add_stmt (pre_p
, gimple_build_label (fall_thru_label
));
4381 /* Otherwise, increment the index var... */
4382 tmp
= build2 (PLUS_EXPR
, var_type
, var
,
4383 fold_convert (var_type
, integer_one_node
));
4384 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, tmp
));
4386 /* ...and jump back to the loop entry. */
4387 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (loop_entry_label
));
4389 /* Add the loop exit label. */
4390 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_exit_label
));
4393 /* Return true if FDECL is accessing a field that is zero sized. */
4396 zero_sized_field_decl (const_tree fdecl
)
4398 if (TREE_CODE (fdecl
) == FIELD_DECL
&& DECL_SIZE (fdecl
)
4399 && integer_zerop (DECL_SIZE (fdecl
)))
4404 /* Return true if TYPE is zero sized. */
4407 zero_sized_type (const_tree type
)
4409 if (AGGREGATE_TYPE_P (type
) && TYPE_SIZE (type
)
4410 && integer_zerop (TYPE_SIZE (type
)))
4415 /* A subroutine of gimplify_init_constructor. Generate individual
4416 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4417 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4418 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4422 gimplify_init_ctor_eval (tree object
, vec
<constructor_elt
, va_gc
> *elts
,
4423 gimple_seq
*pre_p
, bool cleared
)
4425 tree array_elt_type
= NULL
;
4426 unsigned HOST_WIDE_INT ix
;
4427 tree purpose
, value
;
4429 if (TREE_CODE (TREE_TYPE (object
)) == ARRAY_TYPE
)
4430 array_elt_type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object
)));
4432 FOR_EACH_CONSTRUCTOR_ELT (elts
, ix
, purpose
, value
)
4436 /* NULL values are created above for gimplification errors. */
4440 if (cleared
&& initializer_zerop (value
))
4443 /* ??? Here's to hoping the front end fills in all of the indices,
4444 so we don't have to figure out what's missing ourselves. */
4445 gcc_assert (purpose
);
4447 /* Skip zero-sized fields, unless value has side-effects. This can
4448 happen with calls to functions returning a zero-sized type, which
4449 we shouldn't discard. As a number of downstream passes don't
4450 expect sets of zero-sized fields, we rely on the gimplification of
4451 the MODIFY_EXPR we make below to drop the assignment statement. */
4452 if (! TREE_SIDE_EFFECTS (value
) && zero_sized_field_decl (purpose
))
4455 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4457 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4459 tree lower
= TREE_OPERAND (purpose
, 0);
4460 tree upper
= TREE_OPERAND (purpose
, 1);
4462 /* If the lower bound is equal to upper, just treat it as if
4463 upper was the index. */
4464 if (simple_cst_equal (lower
, upper
))
4468 gimplify_init_ctor_eval_range (object
, lower
, upper
, value
,
4469 array_elt_type
, pre_p
, cleared
);
4476 /* Do not use bitsizetype for ARRAY_REF indices. */
4477 if (TYPE_DOMAIN (TREE_TYPE (object
)))
4479 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object
))),
4481 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4482 purpose
, NULL_TREE
, NULL_TREE
);
4486 gcc_assert (TREE_CODE (purpose
) == FIELD_DECL
);
4487 cref
= build3 (COMPONENT_REF
, TREE_TYPE (purpose
),
4488 unshare_expr (object
), purpose
, NULL_TREE
);
4491 if (TREE_CODE (value
) == CONSTRUCTOR
4492 && TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
)
4493 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4497 tree init
= build2 (INIT_EXPR
, TREE_TYPE (cref
), cref
, value
);
4498 gimplify_and_add (init
, pre_p
);
4504 /* Return the appropriate RHS predicate for this LHS. */
4507 rhs_predicate_for (tree lhs
)
4509 if (is_gimple_reg (lhs
))
4510 return is_gimple_reg_rhs_or_call
;
4512 return is_gimple_mem_rhs_or_call
;
4515 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4516 before the LHS has been gimplified. */
4518 static gimple_predicate
4519 initial_rhs_predicate_for (tree lhs
)
4521 if (is_gimple_reg_type (TREE_TYPE (lhs
)))
4522 return is_gimple_reg_rhs_or_call
;
4524 return is_gimple_mem_rhs_or_call
;
4527 /* Gimplify a C99 compound literal expression. This just means adding
4528 the DECL_EXPR before the current statement and using its anonymous
4531 static enum gimplify_status
4532 gimplify_compound_literal_expr (tree
*expr_p
, gimple_seq
*pre_p
,
4533 bool (*gimple_test_f
) (tree
),
4534 fallback_t fallback
)
4536 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p
);
4537 tree decl
= DECL_EXPR_DECL (decl_s
);
4538 tree init
= DECL_INITIAL (decl
);
4539 /* Mark the decl as addressable if the compound literal
4540 expression is addressable now, otherwise it is marked too late
4541 after we gimplify the initialization expression. */
4542 if (TREE_ADDRESSABLE (*expr_p
))
4543 TREE_ADDRESSABLE (decl
) = 1;
4544 /* Otherwise, if we don't need an lvalue and have a literal directly
4545 substitute it. Check if it matches the gimple predicate, as
4546 otherwise we'd generate a new temporary, and we can as well just
4547 use the decl we already have. */
4548 else if (!TREE_ADDRESSABLE (decl
)
4550 && (fallback
& fb_lvalue
) == 0
4551 && gimple_test_f (init
))
4557 /* Preliminarily mark non-addressed complex variables as eligible
4558 for promotion to gimple registers. We'll transform their uses
4560 if ((TREE_CODE (TREE_TYPE (decl
)) == COMPLEX_TYPE
4561 || TREE_CODE (TREE_TYPE (decl
)) == VECTOR_TYPE
)
4562 && !TREE_THIS_VOLATILE (decl
)
4563 && !needs_to_live_in_memory (decl
))
4564 DECL_GIMPLE_REG_P (decl
) = 1;
4566 /* If the decl is not addressable, then it is being used in some
4567 expression or on the right hand side of a statement, and it can
4568 be put into a readonly data section. */
4569 if (!TREE_ADDRESSABLE (decl
) && (fallback
& fb_lvalue
) == 0)
4570 TREE_READONLY (decl
) = 1;
4572 /* This decl isn't mentioned in the enclosing block, so add it to the
4573 list of temps. FIXME it seems a bit of a kludge to say that
4574 anonymous artificial vars aren't pushed, but everything else is. */
4575 if (DECL_NAME (decl
) == NULL_TREE
&& !DECL_SEEN_IN_BIND_EXPR_P (decl
))
4576 gimple_add_tmp_var (decl
);
4578 gimplify_and_add (decl_s
, pre_p
);
4583 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4584 return a new CONSTRUCTOR if something changed. */
4587 optimize_compound_literals_in_ctor (tree orig_ctor
)
4589 tree ctor
= orig_ctor
;
4590 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (ctor
);
4591 unsigned int idx
, num
= vec_safe_length (elts
);
4593 for (idx
= 0; idx
< num
; idx
++)
4595 tree value
= (*elts
)[idx
].value
;
4596 tree newval
= value
;
4597 if (TREE_CODE (value
) == CONSTRUCTOR
)
4598 newval
= optimize_compound_literals_in_ctor (value
);
4599 else if (TREE_CODE (value
) == COMPOUND_LITERAL_EXPR
)
4601 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (value
);
4602 tree decl
= DECL_EXPR_DECL (decl_s
);
4603 tree init
= DECL_INITIAL (decl
);
4605 if (!TREE_ADDRESSABLE (value
)
4606 && !TREE_ADDRESSABLE (decl
)
4608 && TREE_CODE (init
) == CONSTRUCTOR
)
4609 newval
= optimize_compound_literals_in_ctor (init
);
4611 if (newval
== value
)
4614 if (ctor
== orig_ctor
)
4616 ctor
= copy_node (orig_ctor
);
4617 CONSTRUCTOR_ELTS (ctor
) = vec_safe_copy (elts
);
4618 elts
= CONSTRUCTOR_ELTS (ctor
);
4620 (*elts
)[idx
].value
= newval
;
4625 /* A subroutine of gimplify_modify_expr. Break out elements of a
4626 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4628 Note that we still need to clear any elements that don't have explicit
4629 initializers, so if not all elements are initialized we keep the
4630 original MODIFY_EXPR, we just remove all of the constructor elements.
4632 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4633 GS_ERROR if we would have to create a temporary when gimplifying
4634 this constructor. Otherwise, return GS_OK.
4636 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4638 static enum gimplify_status
4639 gimplify_init_constructor (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4640 bool want_value
, bool notify_temp_creation
)
4642 tree object
, ctor
, type
;
4643 enum gimplify_status ret
;
4644 vec
<constructor_elt
, va_gc
> *elts
;
4646 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == CONSTRUCTOR
);
4648 if (!notify_temp_creation
)
4650 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
4651 is_gimple_lvalue
, fb_lvalue
);
4652 if (ret
== GS_ERROR
)
4656 object
= TREE_OPERAND (*expr_p
, 0);
4657 ctor
= TREE_OPERAND (*expr_p
, 1)
4658 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p
, 1));
4659 type
= TREE_TYPE (ctor
);
4660 elts
= CONSTRUCTOR_ELTS (ctor
);
4663 switch (TREE_CODE (type
))
4667 case QUAL_UNION_TYPE
:
4670 struct gimplify_init_ctor_preeval_data preeval_data
;
4671 HOST_WIDE_INT num_ctor_elements
, num_nonzero_elements
;
4672 bool cleared
, complete_p
, valid_const_initializer
;
4674 /* Aggregate types must lower constructors to initialization of
4675 individual elements. The exception is that a CONSTRUCTOR node
4676 with no elements indicates zero-initialization of the whole. */
4677 if (vec_safe_is_empty (elts
))
4679 if (notify_temp_creation
)
4684 /* Fetch information about the constructor to direct later processing.
4685 We might want to make static versions of it in various cases, and
4686 can only do so if it known to be a valid constant initializer. */
4687 valid_const_initializer
4688 = categorize_ctor_elements (ctor
, &num_nonzero_elements
,
4689 &num_ctor_elements
, &complete_p
);
4691 /* If a const aggregate variable is being initialized, then it
4692 should never be a lose to promote the variable to be static. */
4693 if (valid_const_initializer
4694 && num_nonzero_elements
> 1
4695 && TREE_READONLY (object
)
4697 && (flag_merge_constants
>= 2 || !TREE_ADDRESSABLE (object
)))
4699 if (notify_temp_creation
)
4701 DECL_INITIAL (object
) = ctor
;
4702 TREE_STATIC (object
) = 1;
4703 if (!DECL_NAME (object
))
4704 DECL_NAME (object
) = create_tmp_var_name ("C");
4705 walk_tree (&DECL_INITIAL (object
), force_labels_r
, NULL
, NULL
);
4707 /* ??? C++ doesn't automatically append a .<number> to the
4708 assembler name, and even when it does, it looks at FE private
4709 data structures to figure out what that number should be,
4710 which are not set for this variable. I suppose this is
4711 important for local statics for inline functions, which aren't
4712 "local" in the object file sense. So in order to get a unique
4713 TU-local symbol, we must invoke the lhd version now. */
4714 lhd_set_decl_assembler_name (object
);
4716 *expr_p
= NULL_TREE
;
4720 /* If there are "lots" of initialized elements, even discounting
4721 those that are not address constants (and thus *must* be
4722 computed at runtime), then partition the constructor into
4723 constant and non-constant parts. Block copy the constant
4724 parts in, then generate code for the non-constant parts. */
4725 /* TODO. There's code in cp/typeck.c to do this. */
4727 if (int_size_in_bytes (TREE_TYPE (ctor
)) < 0)
4728 /* store_constructor will ignore the clearing of variable-sized
4729 objects. Initializers for such objects must explicitly set
4730 every field that needs to be set. */
4732 else if (!complete_p
&& !CONSTRUCTOR_NO_CLEARING (ctor
))
4733 /* If the constructor isn't complete, clear the whole object
4734 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4736 ??? This ought not to be needed. For any element not present
4737 in the initializer, we should simply set them to zero. Except
4738 we'd need to *find* the elements that are not present, and that
4739 requires trickery to avoid quadratic compile-time behavior in
4740 large cases or excessive memory use in small cases. */
4742 else if (num_ctor_elements
- num_nonzero_elements
4743 > CLEAR_RATIO (optimize_function_for_speed_p (cfun
))
4744 && num_nonzero_elements
< num_ctor_elements
/ 4)
4745 /* If there are "lots" of zeros, it's more efficient to clear
4746 the memory and then set the nonzero elements. */
4751 /* If there are "lots" of initialized elements, and all of them
4752 are valid address constants, then the entire initializer can
4753 be dropped to memory, and then memcpy'd out. Don't do this
4754 for sparse arrays, though, as it's more efficient to follow
4755 the standard CONSTRUCTOR behavior of memset followed by
4756 individual element initialization. Also don't do this for small
4757 all-zero initializers (which aren't big enough to merit
4758 clearing), and don't try to make bitwise copies of
4759 TREE_ADDRESSABLE types.
4761 We cannot apply such transformation when compiling chkp static
4762 initializer because creation of initializer image in the memory
4763 will require static initialization of bounds for it. It should
4764 result in another gimplification of similar initializer and we
4765 may fall into infinite loop. */
4766 if (valid_const_initializer
4767 && !(cleared
|| num_nonzero_elements
== 0)
4768 && !TREE_ADDRESSABLE (type
)
4769 && (!current_function_decl
4770 || !lookup_attribute ("chkp ctor",
4771 DECL_ATTRIBUTES (current_function_decl
))))
4773 HOST_WIDE_INT size
= int_size_in_bytes (type
);
4776 /* ??? We can still get unbounded array types, at least
4777 from the C++ front end. This seems wrong, but attempt
4778 to work around it for now. */
4781 size
= int_size_in_bytes (TREE_TYPE (object
));
4783 TREE_TYPE (ctor
) = type
= TREE_TYPE (object
);
4786 /* Find the maximum alignment we can assume for the object. */
4787 /* ??? Make use of DECL_OFFSET_ALIGN. */
4788 if (DECL_P (object
))
4789 align
= DECL_ALIGN (object
);
4791 align
= TYPE_ALIGN (type
);
4793 /* Do a block move either if the size is so small as to make
4794 each individual move a sub-unit move on average, or if it
4795 is so large as to make individual moves inefficient. */
4797 && num_nonzero_elements
> 1
4798 && (size
< num_nonzero_elements
4799 || !can_move_by_pieces (size
, align
)))
4801 if (notify_temp_creation
)
4804 walk_tree (&ctor
, force_labels_r
, NULL
, NULL
);
4805 ctor
= tree_output_constant_def (ctor
);
4806 if (!useless_type_conversion_p (type
, TREE_TYPE (ctor
)))
4807 ctor
= build1 (VIEW_CONVERT_EXPR
, type
, ctor
);
4808 TREE_OPERAND (*expr_p
, 1) = ctor
;
4810 /* This is no longer an assignment of a CONSTRUCTOR, but
4811 we still may have processing to do on the LHS. So
4812 pretend we didn't do anything here to let that happen. */
4813 return GS_UNHANDLED
;
4817 /* If the target is volatile, we have non-zero elements and more than
4818 one field to assign, initialize the target from a temporary. */
4819 if (TREE_THIS_VOLATILE (object
)
4820 && !TREE_ADDRESSABLE (type
)
4821 && num_nonzero_elements
> 0
4822 && vec_safe_length (elts
) > 1)
4824 tree temp
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
4825 TREE_OPERAND (*expr_p
, 0) = temp
;
4826 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
4828 build2 (MODIFY_EXPR
, void_type_node
,
4833 if (notify_temp_creation
)
4836 /* If there are nonzero elements and if needed, pre-evaluate to capture
4837 elements overlapping with the lhs into temporaries. We must do this
4838 before clearing to fetch the values before they are zeroed-out. */
4839 if (num_nonzero_elements
> 0 && TREE_CODE (*expr_p
) != INIT_EXPR
)
4841 preeval_data
.lhs_base_decl
= get_base_address (object
);
4842 if (!DECL_P (preeval_data
.lhs_base_decl
))
4843 preeval_data
.lhs_base_decl
= NULL
;
4844 preeval_data
.lhs_alias_set
= get_alias_set (object
);
4846 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p
, 1),
4847 pre_p
, post_p
, &preeval_data
);
4850 bool ctor_has_side_effects_p
4851 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p
, 1));
4855 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4856 Note that we still have to gimplify, in order to handle the
4857 case of variable sized types. Avoid shared tree structures. */
4858 CONSTRUCTOR_ELTS (ctor
) = NULL
;
4859 TREE_SIDE_EFFECTS (ctor
) = 0;
4860 object
= unshare_expr (object
);
4861 gimplify_stmt (expr_p
, pre_p
);
4864 /* If we have not block cleared the object, or if there are nonzero
4865 elements in the constructor, or if the constructor has side effects,
4866 add assignments to the individual scalar fields of the object. */
4868 || num_nonzero_elements
> 0
4869 || ctor_has_side_effects_p
)
4870 gimplify_init_ctor_eval (object
, elts
, pre_p
, cleared
);
4872 *expr_p
= NULL_TREE
;
4880 if (notify_temp_creation
)
4883 /* Extract the real and imaginary parts out of the ctor. */
4884 gcc_assert (elts
->length () == 2);
4885 r
= (*elts
)[0].value
;
4886 i
= (*elts
)[1].value
;
4887 if (r
== NULL
|| i
== NULL
)
4889 tree zero
= build_zero_cst (TREE_TYPE (type
));
4896 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4897 represent creation of a complex value. */
4898 if (TREE_CONSTANT (r
) && TREE_CONSTANT (i
))
4900 ctor
= build_complex (type
, r
, i
);
4901 TREE_OPERAND (*expr_p
, 1) = ctor
;
4905 ctor
= build2 (COMPLEX_EXPR
, type
, r
, i
);
4906 TREE_OPERAND (*expr_p
, 1) = ctor
;
4907 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1),
4910 rhs_predicate_for (TREE_OPERAND (*expr_p
, 0)),
4918 unsigned HOST_WIDE_INT ix
;
4919 constructor_elt
*ce
;
4921 if (notify_temp_creation
)
4924 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4925 if (TREE_CONSTANT (ctor
))
4927 bool constant_p
= true;
4930 /* Even when ctor is constant, it might contain non-*_CST
4931 elements, such as addresses or trapping values like
4932 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4933 in VECTOR_CST nodes. */
4934 FOR_EACH_CONSTRUCTOR_VALUE (elts
, ix
, value
)
4935 if (!CONSTANT_CLASS_P (value
))
4943 TREE_OPERAND (*expr_p
, 1) = build_vector_from_ctor (type
, elts
);
4947 TREE_CONSTANT (ctor
) = 0;
4950 /* Vector types use CONSTRUCTOR all the way through gimple
4951 compilation as a general initializer. */
4952 FOR_EACH_VEC_SAFE_ELT (elts
, ix
, ce
)
4954 enum gimplify_status tret
;
4955 tret
= gimplify_expr (&ce
->value
, pre_p
, post_p
, is_gimple_val
,
4957 if (tret
== GS_ERROR
)
4959 else if (TREE_STATIC (ctor
)
4960 && !initializer_constant_valid_p (ce
->value
,
4961 TREE_TYPE (ce
->value
)))
4962 TREE_STATIC (ctor
) = 0;
4964 if (!is_gimple_reg (TREE_OPERAND (*expr_p
, 0)))
4965 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (ctor
, pre_p
);
4970 /* So how did we get a CONSTRUCTOR for a scalar type? */
4974 if (ret
== GS_ERROR
)
4976 /* If we have gimplified both sides of the initializer but have
4977 not emitted an assignment, do so now. */
4980 tree lhs
= TREE_OPERAND (*expr_p
, 0);
4981 tree rhs
= TREE_OPERAND (*expr_p
, 1);
4982 if (want_value
&& object
== lhs
)
4983 lhs
= unshare_expr (lhs
);
4984 gassign
*init
= gimple_build_assign (lhs
, rhs
);
4985 gimplify_seq_add_stmt (pre_p
, init
);
4999 /* Given a pointer value OP0, return a simplified version of an
5000 indirection through OP0, or NULL_TREE if no simplification is
5001 possible. This may only be applied to a rhs of an expression.
5002 Note that the resulting type may be different from the type pointed
5003 to in the sense that it is still compatible from the langhooks
5007 gimple_fold_indirect_ref_rhs (tree t
)
5009 return gimple_fold_indirect_ref (t
);
5012 /* Subroutine of gimplify_modify_expr to do simplifications of
5013 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5014 something changes. */
5016 static enum gimplify_status
5017 gimplify_modify_expr_rhs (tree
*expr_p
, tree
*from_p
, tree
*to_p
,
5018 gimple_seq
*pre_p
, gimple_seq
*post_p
,
5021 enum gimplify_status ret
= GS_UNHANDLED
;
5027 switch (TREE_CODE (*from_p
))
5030 /* If we're assigning from a read-only variable initialized with
5031 a constructor, do the direct assignment from the constructor,
5032 but only if neither source nor target are volatile since this
5033 latter assignment might end up being done on a per-field basis. */
5034 if (DECL_INITIAL (*from_p
)
5035 && TREE_READONLY (*from_p
)
5036 && !TREE_THIS_VOLATILE (*from_p
)
5037 && !TREE_THIS_VOLATILE (*to_p
)
5038 && TREE_CODE (DECL_INITIAL (*from_p
)) == CONSTRUCTOR
)
5040 tree old_from
= *from_p
;
5041 enum gimplify_status subret
;
5043 /* Move the constructor into the RHS. */
5044 *from_p
= unshare_expr (DECL_INITIAL (*from_p
));
5046 /* Let's see if gimplify_init_constructor will need to put
5048 subret
= gimplify_init_constructor (expr_p
, NULL
, NULL
,
5050 if (subret
== GS_ERROR
)
5052 /* If so, revert the change. */
5064 /* If we have code like
5068 where the type of "x" is a (possibly cv-qualified variant
5069 of "A"), treat the entire expression as identical to "x".
5070 This kind of code arises in C++ when an object is bound
5071 to a const reference, and if "x" is a TARGET_EXPR we want
5072 to take advantage of the optimization below. */
5073 bool volatile_p
= TREE_THIS_VOLATILE (*from_p
);
5074 tree t
= gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p
, 0));
5077 if (TREE_THIS_VOLATILE (t
) != volatile_p
)
5080 t
= build_simple_mem_ref_loc (EXPR_LOCATION (*from_p
),
5081 build_fold_addr_expr (t
));
5082 if (REFERENCE_CLASS_P (t
))
5083 TREE_THIS_VOLATILE (t
) = volatile_p
;
5094 /* If we are initializing something from a TARGET_EXPR, strip the
5095 TARGET_EXPR and initialize it directly, if possible. This can't
5096 be done if the initializer is void, since that implies that the
5097 temporary is set in some non-trivial way.
5099 ??? What about code that pulls out the temp and uses it
5100 elsewhere? I think that such code never uses the TARGET_EXPR as
5101 an initializer. If I'm wrong, we'll die because the temp won't
5102 have any RTL. In that case, I guess we'll need to replace
5103 references somehow. */
5104 tree init
= TARGET_EXPR_INITIAL (*from_p
);
5107 && !VOID_TYPE_P (TREE_TYPE (init
)))
5117 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5119 gimplify_compound_expr (from_p
, pre_p
, true);
5125 /* If we already made some changes, let the front end have a
5126 crack at this before we break it down. */
5127 if (ret
!= GS_UNHANDLED
)
5129 /* If we're initializing from a CONSTRUCTOR, break this into
5130 individual MODIFY_EXPRs. */
5131 return gimplify_init_constructor (expr_p
, pre_p
, post_p
, want_value
,
5135 /* If we're assigning to a non-register type, push the assignment
5136 down into the branches. This is mandatory for ADDRESSABLE types,
5137 since we cannot generate temporaries for such, but it saves a
5138 copy in other cases as well. */
5139 if (!is_gimple_reg_type (TREE_TYPE (*from_p
)))
5141 /* This code should mirror the code in gimplify_cond_expr. */
5142 enum tree_code code
= TREE_CODE (*expr_p
);
5143 tree cond
= *from_p
;
5144 tree result
= *to_p
;
5146 ret
= gimplify_expr (&result
, pre_p
, post_p
,
5147 is_gimple_lvalue
, fb_lvalue
);
5148 if (ret
!= GS_ERROR
)
5151 /* If we are going to write RESULT more than once, clear
5152 TREE_READONLY flag, otherwise we might incorrectly promote
5153 the variable to static const and initialize it at compile
5154 time in one of the branches. */
5156 && TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
5157 && TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5158 TREE_READONLY (result
) = 0;
5159 if (TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
)
5160 TREE_OPERAND (cond
, 1)
5161 = build2 (code
, void_type_node
, result
,
5162 TREE_OPERAND (cond
, 1));
5163 if (TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5164 TREE_OPERAND (cond
, 2)
5165 = build2 (code
, void_type_node
, unshare_expr (result
),
5166 TREE_OPERAND (cond
, 2));
5168 TREE_TYPE (cond
) = void_type_node
;
5169 recalculate_side_effects (cond
);
5173 gimplify_and_add (cond
, pre_p
);
5174 *expr_p
= unshare_expr (result
);
5183 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5184 return slot so that we don't generate a temporary. */
5185 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p
)
5186 && aggregate_value_p (*from_p
, *from_p
))
5190 if (!(rhs_predicate_for (*to_p
))(*from_p
))
5191 /* If we need a temporary, *to_p isn't accurate. */
5193 /* It's OK to use the return slot directly unless it's an NRV. */
5194 else if (TREE_CODE (*to_p
) == RESULT_DECL
5195 && DECL_NAME (*to_p
) == NULL_TREE
5196 && needs_to_live_in_memory (*to_p
))
5198 else if (is_gimple_reg_type (TREE_TYPE (*to_p
))
5199 || (DECL_P (*to_p
) && DECL_REGISTER (*to_p
)))
5200 /* Don't force regs into memory. */
5202 else if (TREE_CODE (*expr_p
) == INIT_EXPR
)
5203 /* It's OK to use the target directly if it's being
5206 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p
)))
5208 /* Always use the target and thus RSO for variable-sized types.
5209 GIMPLE cannot deal with a variable-sized assignment
5210 embedded in a call statement. */
5212 else if (TREE_CODE (*to_p
) != SSA_NAME
5213 && (!is_gimple_variable (*to_p
)
5214 || needs_to_live_in_memory (*to_p
)))
5215 /* Don't use the original target if it's already addressable;
5216 if its address escapes, and the called function uses the
5217 NRV optimization, a conforming program could see *to_p
5218 change before the called function returns; see c++/19317.
5219 When optimizing, the return_slot pass marks more functions
5220 as safe after we have escape info. */
5227 CALL_EXPR_RETURN_SLOT_OPT (*from_p
) = 1;
5228 mark_addressable (*to_p
);
5233 case WITH_SIZE_EXPR
:
5234 /* Likewise for calls that return an aggregate of non-constant size,
5235 since we would not be able to generate a temporary at all. */
5236 if (TREE_CODE (TREE_OPERAND (*from_p
, 0)) == CALL_EXPR
)
5238 *from_p
= TREE_OPERAND (*from_p
, 0);
5239 /* We don't change ret in this case because the
5240 WITH_SIZE_EXPR might have been added in
5241 gimplify_modify_expr, so returning GS_OK would lead to an
5247 /* If we're initializing from a container, push the initialization
5249 case CLEANUP_POINT_EXPR
:
5251 case STATEMENT_LIST
:
5253 tree wrap
= *from_p
;
5256 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_min_lval
,
5258 if (ret
!= GS_ERROR
)
5261 t
= voidify_wrapper_expr (wrap
, *expr_p
);
5262 gcc_assert (t
== *expr_p
);
5266 gimplify_and_add (wrap
, pre_p
);
5267 *expr_p
= unshare_expr (*to_p
);
5274 case COMPOUND_LITERAL_EXPR
:
5276 tree complit
= TREE_OPERAND (*expr_p
, 1);
5277 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (complit
);
5278 tree decl
= DECL_EXPR_DECL (decl_s
);
5279 tree init
= DECL_INITIAL (decl
);
5281 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5282 into struct T x = { 0, 1, 2 } if the address of the
5283 compound literal has never been taken. */
5284 if (!TREE_ADDRESSABLE (complit
)
5285 && !TREE_ADDRESSABLE (decl
)
5288 *expr_p
= copy_node (*expr_p
);
5289 TREE_OPERAND (*expr_p
, 1) = init
;
5304 /* Return true if T looks like a valid GIMPLE statement. */
5307 is_gimple_stmt (tree t
)
5309 const enum tree_code code
= TREE_CODE (t
);
5314 /* The only valid NOP_EXPR is the empty statement. */
5315 return IS_EMPTY_STMT (t
);
5319 /* These are only valid if they're void. */
5320 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
5326 case CASE_LABEL_EXPR
:
5327 case TRY_CATCH_EXPR
:
5328 case TRY_FINALLY_EXPR
:
5329 case EH_FILTER_EXPR
:
5332 case STATEMENT_LIST
:
5336 case OACC_HOST_DATA
:
5339 case OACC_ENTER_DATA
:
5340 case OACC_EXIT_DATA
:
5346 case OMP_DISTRIBUTE
:
5357 case OMP_TARGET_DATA
:
5358 case OMP_TARGET_UPDATE
:
5359 case OMP_TARGET_ENTER_DATA
:
5360 case OMP_TARGET_EXIT_DATA
:
5363 /* These are always void. */
5369 /* These are valid regardless of their type. */
5378 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5379 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5380 DECL_GIMPLE_REG_P set.
5382 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5383 other, unmodified part of the complex object just before the total store.
5384 As a consequence, if the object is still uninitialized, an undefined value
5385 will be loaded into a register, which may result in a spurious exception
5386 if the register is floating-point and the value happens to be a signaling
5387 NaN for example. Then the fully-fledged complex operations lowering pass
5388 followed by a DCE pass are necessary in order to fix things up. */
5390 static enum gimplify_status
5391 gimplify_modify_expr_complex_part (tree
*expr_p
, gimple_seq
*pre_p
,
5394 enum tree_code code
, ocode
;
5395 tree lhs
, rhs
, new_rhs
, other
, realpart
, imagpart
;
5397 lhs
= TREE_OPERAND (*expr_p
, 0);
5398 rhs
= TREE_OPERAND (*expr_p
, 1);
5399 code
= TREE_CODE (lhs
);
5400 lhs
= TREE_OPERAND (lhs
, 0);
5402 ocode
= code
== REALPART_EXPR
? IMAGPART_EXPR
: REALPART_EXPR
;
5403 other
= build1 (ocode
, TREE_TYPE (rhs
), lhs
);
5404 TREE_NO_WARNING (other
) = 1;
5405 other
= get_formal_tmp_var (other
, pre_p
);
5407 realpart
= code
== REALPART_EXPR
? rhs
: other
;
5408 imagpart
= code
== REALPART_EXPR
? other
: rhs
;
5410 if (TREE_CONSTANT (realpart
) && TREE_CONSTANT (imagpart
))
5411 new_rhs
= build_complex (TREE_TYPE (lhs
), realpart
, imagpart
);
5413 new_rhs
= build2 (COMPLEX_EXPR
, TREE_TYPE (lhs
), realpart
, imagpart
);
5415 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (lhs
, new_rhs
));
5416 *expr_p
= (want_value
) ? rhs
: NULL_TREE
;
5421 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5427 PRE_P points to the list where side effects that must happen before
5428 *EXPR_P should be stored.
5430 POST_P points to the list where side effects that must happen after
5431 *EXPR_P should be stored.
5433 WANT_VALUE is nonzero iff we want to use the value of this expression
5434 in another expression. */
5436 static enum gimplify_status
5437 gimplify_modify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5440 tree
*from_p
= &TREE_OPERAND (*expr_p
, 1);
5441 tree
*to_p
= &TREE_OPERAND (*expr_p
, 0);
5442 enum gimplify_status ret
= GS_UNHANDLED
;
5444 location_t loc
= EXPR_LOCATION (*expr_p
);
5445 gimple_stmt_iterator gsi
;
5447 gcc_assert (TREE_CODE (*expr_p
) == MODIFY_EXPR
5448 || TREE_CODE (*expr_p
) == INIT_EXPR
);
5450 /* Trying to simplify a clobber using normal logic doesn't work,
5451 so handle it here. */
5452 if (TREE_CLOBBER_P (*from_p
))
5454 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5455 if (ret
== GS_ERROR
)
5457 gcc_assert (!want_value
5458 && (VAR_P (*to_p
) || TREE_CODE (*to_p
) == MEM_REF
));
5459 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (*to_p
, *from_p
));
5464 /* Insert pointer conversions required by the middle-end that are not
5465 required by the frontend. This fixes middle-end type checking for
5466 for example gcc.dg/redecl-6.c. */
5467 if (POINTER_TYPE_P (TREE_TYPE (*to_p
)))
5469 STRIP_USELESS_TYPE_CONVERSION (*from_p
);
5470 if (!useless_type_conversion_p (TREE_TYPE (*to_p
), TREE_TYPE (*from_p
)))
5471 *from_p
= fold_convert_loc (loc
, TREE_TYPE (*to_p
), *from_p
);
5474 /* See if any simplifications can be done based on what the RHS is. */
5475 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
5477 if (ret
!= GS_UNHANDLED
)
5480 /* For zero sized types only gimplify the left hand side and right hand
5481 side as statements and throw away the assignment. Do this after
5482 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5484 if (zero_sized_type (TREE_TYPE (*from_p
))
5486 /* Don't do this for calls that return addressable types, expand_call
5487 relies on those having a lhs. */
5488 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p
))
5489 && TREE_CODE (*from_p
) == CALL_EXPR
))
5491 gimplify_stmt (from_p
, pre_p
);
5492 gimplify_stmt (to_p
, pre_p
);
5493 *expr_p
= NULL_TREE
;
5497 /* If the value being copied is of variable width, compute the length
5498 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5499 before gimplifying any of the operands so that we can resolve any
5500 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5501 the size of the expression to be copied, not of the destination, so
5502 that is what we must do here. */
5503 maybe_with_size_expr (from_p
);
5505 /* As a special case, we have to temporarily allow for assignments
5506 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5507 a toplevel statement, when gimplifying the GENERIC expression
5508 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5509 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5511 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5512 prevent gimplify_expr from trying to create a new temporary for
5513 foo's LHS, we tell it that it should only gimplify until it
5514 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5515 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5516 and all we need to do here is set 'a' to be its LHS. */
5518 /* Gimplify the RHS first for C++17 and bug 71104. */
5519 gimple_predicate initial_pred
= initial_rhs_predicate_for (*to_p
);
5520 ret
= gimplify_expr (from_p
, pre_p
, post_p
, initial_pred
, fb_rvalue
);
5521 if (ret
== GS_ERROR
)
5524 /* Then gimplify the LHS. */
5525 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5526 twice we have to make sure to gimplify into non-SSA as otherwise
5527 the abnormal edge added later will make those defs not dominate
5529 ??? Technically this applies only to the registers used in the
5530 resulting non-register *TO_P. */
5531 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
5533 && TREE_CODE (*from_p
) == CALL_EXPR
5534 && call_expr_flags (*from_p
) & ECF_RETURNS_TWICE
)
5535 gimplify_ctxp
->into_ssa
= false;
5536 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5537 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
5538 if (ret
== GS_ERROR
)
5541 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5542 guess for the predicate was wrong. */
5543 gimple_predicate final_pred
= rhs_predicate_for (*to_p
);
5544 if (final_pred
!= initial_pred
)
5546 ret
= gimplify_expr (from_p
, pre_p
, post_p
, final_pred
, fb_rvalue
);
5547 if (ret
== GS_ERROR
)
5551 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5552 size as argument to the call. */
5553 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
5555 tree call
= TREE_OPERAND (*from_p
, 0);
5556 tree vlasize
= TREE_OPERAND (*from_p
, 1);
5558 if (TREE_CODE (call
) == CALL_EXPR
5559 && CALL_EXPR_IFN (call
) == IFN_VA_ARG
)
5561 int nargs
= call_expr_nargs (call
);
5562 tree type
= TREE_TYPE (call
);
5563 tree ap
= CALL_EXPR_ARG (call
, 0);
5564 tree tag
= CALL_EXPR_ARG (call
, 1);
5565 tree aptag
= CALL_EXPR_ARG (call
, 2);
5566 tree newcall
= build_call_expr_internal_loc (EXPR_LOCATION (call
),
5570 TREE_OPERAND (*from_p
, 0) = newcall
;
5574 /* Now see if the above changed *from_p to something we handle specially. */
5575 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
5577 if (ret
!= GS_UNHANDLED
)
5580 /* If we've got a variable sized assignment between two lvalues (i.e. does
5581 not involve a call), then we can make things a bit more straightforward
5582 by converting the assignment to memcpy or memset. */
5583 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
5585 tree from
= TREE_OPERAND (*from_p
, 0);
5586 tree size
= TREE_OPERAND (*from_p
, 1);
5588 if (TREE_CODE (from
) == CONSTRUCTOR
)
5589 return gimplify_modify_expr_to_memset (expr_p
, size
, want_value
, pre_p
);
5591 if (is_gimple_addressable (from
))
5594 return gimplify_modify_expr_to_memcpy (expr_p
, size
, want_value
,
5599 /* Transform partial stores to non-addressable complex variables into
5600 total stores. This allows us to use real instead of virtual operands
5601 for these variables, which improves optimization. */
5602 if ((TREE_CODE (*to_p
) == REALPART_EXPR
5603 || TREE_CODE (*to_p
) == IMAGPART_EXPR
)
5604 && is_gimple_reg (TREE_OPERAND (*to_p
, 0)))
5605 return gimplify_modify_expr_complex_part (expr_p
, pre_p
, want_value
);
5607 /* Try to alleviate the effects of the gimplification creating artificial
5608 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5609 make sure not to create DECL_DEBUG_EXPR links across functions. */
5610 if (!gimplify_ctxp
->into_ssa
5612 && DECL_IGNORED_P (*from_p
)
5614 && !DECL_IGNORED_P (*to_p
)
5615 && decl_function_context (*to_p
) == current_function_decl
5616 && decl_function_context (*from_p
) == current_function_decl
)
5618 if (!DECL_NAME (*from_p
) && DECL_NAME (*to_p
))
5620 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p
)));
5621 DECL_HAS_DEBUG_EXPR_P (*from_p
) = 1;
5622 SET_DECL_DEBUG_EXPR (*from_p
, *to_p
);
5625 if (want_value
&& TREE_THIS_VOLATILE (*to_p
))
5626 *from_p
= get_initialized_tmp_var (*from_p
, pre_p
, post_p
);
5628 if (TREE_CODE (*from_p
) == CALL_EXPR
)
5630 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5631 instead of a GIMPLE_ASSIGN. */
5633 if (CALL_EXPR_FN (*from_p
) == NULL_TREE
)
5635 /* Gimplify internal functions created in the FEs. */
5636 int nargs
= call_expr_nargs (*from_p
), i
;
5637 enum internal_fn ifn
= CALL_EXPR_IFN (*from_p
);
5638 auto_vec
<tree
> vargs (nargs
);
5640 for (i
= 0; i
< nargs
; i
++)
5642 gimplify_arg (&CALL_EXPR_ARG (*from_p
, i
), pre_p
,
5643 EXPR_LOCATION (*from_p
));
5644 vargs
.quick_push (CALL_EXPR_ARG (*from_p
, i
));
5646 call_stmt
= gimple_build_call_internal_vec (ifn
, vargs
);
5647 gimple_call_set_nothrow (call_stmt
, TREE_NOTHROW (*from_p
));
5648 gimple_set_location (call_stmt
, EXPR_LOCATION (*expr_p
));
5652 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*from_p
));
5653 CALL_EXPR_FN (*from_p
) = TREE_OPERAND (CALL_EXPR_FN (*from_p
), 0);
5654 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p
));
5655 tree fndecl
= get_callee_fndecl (*from_p
);
5657 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
5658 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
5659 && call_expr_nargs (*from_p
) == 3)
5660 call_stmt
= gimple_build_call_internal (IFN_BUILTIN_EXPECT
, 3,
5661 CALL_EXPR_ARG (*from_p
, 0),
5662 CALL_EXPR_ARG (*from_p
, 1),
5663 CALL_EXPR_ARG (*from_p
, 2));
5666 call_stmt
= gimple_build_call_from_tree (*from_p
, fnptrtype
);
5669 notice_special_calls (call_stmt
);
5670 if (!gimple_call_noreturn_p (call_stmt
) || !should_remove_lhs_p (*to_p
))
5671 gimple_call_set_lhs (call_stmt
, *to_p
);
5672 else if (TREE_CODE (*to_p
) == SSA_NAME
)
5673 /* The above is somewhat premature, avoid ICEing later for a
5674 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5675 ??? This doesn't make it a default-def. */
5676 SSA_NAME_DEF_STMT (*to_p
) = gimple_build_nop ();
5678 if (EXPR_CILK_SPAWN (*from_p
))
5679 gimplify_cilk_detach (pre_p
);
5684 assign
= gimple_build_assign (*to_p
, *from_p
);
5685 gimple_set_location (assign
, EXPR_LOCATION (*expr_p
));
5686 if (COMPARISON_CLASS_P (*from_p
))
5687 gimple_set_no_warning (assign
, TREE_NO_WARNING (*from_p
));
5690 if (gimplify_ctxp
->into_ssa
&& is_gimple_reg (*to_p
))
5692 /* We should have got an SSA name from the start. */
5693 gcc_assert (TREE_CODE (*to_p
) == SSA_NAME
5694 || ! gimple_in_ssa_p (cfun
));
5697 gimplify_seq_add_stmt (pre_p
, assign
);
5698 gsi
= gsi_last (*pre_p
);
5699 maybe_fold_stmt (&gsi
);
5703 *expr_p
= TREE_THIS_VOLATILE (*to_p
) ? *from_p
: unshare_expr (*to_p
);
5712 /* Gimplify a comparison between two variable-sized objects. Do this
5713 with a call to BUILT_IN_MEMCMP. */
5715 static enum gimplify_status
5716 gimplify_variable_sized_compare (tree
*expr_p
)
5718 location_t loc
= EXPR_LOCATION (*expr_p
);
5719 tree op0
= TREE_OPERAND (*expr_p
, 0);
5720 tree op1
= TREE_OPERAND (*expr_p
, 1);
5721 tree t
, arg
, dest
, src
, expr
;
5723 arg
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
5724 arg
= unshare_expr (arg
);
5725 arg
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg
, op0
);
5726 src
= build_fold_addr_expr_loc (loc
, op1
);
5727 dest
= build_fold_addr_expr_loc (loc
, op0
);
5728 t
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
5729 t
= build_call_expr_loc (loc
, t
, 3, dest
, src
, arg
);
5732 = build2 (TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), t
, integer_zero_node
);
5733 SET_EXPR_LOCATION (expr
, loc
);
5739 /* Gimplify a comparison between two aggregate objects of integral scalar
5740 mode as a comparison between the bitwise equivalent scalar values. */
5742 static enum gimplify_status
5743 gimplify_scalar_mode_aggregate_compare (tree
*expr_p
)
5745 location_t loc
= EXPR_LOCATION (*expr_p
);
5746 tree op0
= TREE_OPERAND (*expr_p
, 0);
5747 tree op1
= TREE_OPERAND (*expr_p
, 1);
5749 tree type
= TREE_TYPE (op0
);
5750 tree scalar_type
= lang_hooks
.types
.type_for_mode (TYPE_MODE (type
), 1);
5752 op0
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op0
);
5753 op1
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op1
);
5756 = fold_build2_loc (loc
, TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), op0
, op1
);
5761 /* Gimplify an expression sequence. This function gimplifies each
5762 expression and rewrites the original expression with the last
5763 expression of the sequence in GIMPLE form.
5765 PRE_P points to the list where the side effects for all the
5766 expressions in the sequence will be emitted.
5768 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5770 static enum gimplify_status
5771 gimplify_compound_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
5777 tree
*sub_p
= &TREE_OPERAND (t
, 0);
5779 if (TREE_CODE (*sub_p
) == COMPOUND_EXPR
)
5780 gimplify_compound_expr (sub_p
, pre_p
, false);
5782 gimplify_stmt (sub_p
, pre_p
);
5784 t
= TREE_OPERAND (t
, 1);
5786 while (TREE_CODE (t
) == COMPOUND_EXPR
);
5793 gimplify_stmt (expr_p
, pre_p
);
5798 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5799 gimplify. After gimplification, EXPR_P will point to a new temporary
5800 that holds the original value of the SAVE_EXPR node.
5802 PRE_P points to the list where side effects that must happen before
5803 *EXPR_P should be stored. */
5805 static enum gimplify_status
5806 gimplify_save_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
5808 enum gimplify_status ret
= GS_ALL_DONE
;
5811 gcc_assert (TREE_CODE (*expr_p
) == SAVE_EXPR
);
5812 val
= TREE_OPERAND (*expr_p
, 0);
5814 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5815 if (!SAVE_EXPR_RESOLVED_P (*expr_p
))
5817 /* The operand may be a void-valued expression. It is
5818 being executed only for its side-effects. */
5819 if (TREE_TYPE (val
) == void_type_node
)
5821 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
5822 is_gimple_stmt
, fb_none
);
5826 /* The temporary may not be an SSA name as later abnormal and EH
5827 control flow may invalidate use/def domination. */
5828 val
= get_initialized_tmp_var (val
, pre_p
, post_p
, false);
5830 TREE_OPERAND (*expr_p
, 0) = val
;
5831 SAVE_EXPR_RESOLVED_P (*expr_p
) = 1;
5839 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5846 PRE_P points to the list where side effects that must happen before
5847 *EXPR_P should be stored.
5849 POST_P points to the list where side effects that must happen after
5850 *EXPR_P should be stored. */
5852 static enum gimplify_status
5853 gimplify_addr_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
5855 tree expr
= *expr_p
;
5856 tree op0
= TREE_OPERAND (expr
, 0);
5857 enum gimplify_status ret
;
5858 location_t loc
= EXPR_LOCATION (*expr_p
);
5860 switch (TREE_CODE (op0
))
5864 /* Check if we are dealing with an expression of the form '&*ptr'.
5865 While the front end folds away '&*ptr' into 'ptr', these
5866 expressions may be generated internally by the compiler (e.g.,
5867 builtins like __builtin_va_end). */
5868 /* Caution: the silent array decomposition semantics we allow for
5869 ADDR_EXPR means we can't always discard the pair. */
5870 /* Gimplification of the ADDR_EXPR operand may drop
5871 cv-qualification conversions, so make sure we add them if
5874 tree op00
= TREE_OPERAND (op0
, 0);
5875 tree t_expr
= TREE_TYPE (expr
);
5876 tree t_op00
= TREE_TYPE (op00
);
5878 if (!useless_type_conversion_p (t_expr
, t_op00
))
5879 op00
= fold_convert_loc (loc
, TREE_TYPE (expr
), op00
);
5885 case VIEW_CONVERT_EXPR
:
5886 /* Take the address of our operand and then convert it to the type of
5889 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5890 all clear. The impact of this transformation is even less clear. */
5892 /* If the operand is a useless conversion, look through it. Doing so
5893 guarantees that the ADDR_EXPR and its operand will remain of the
5895 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0
, 0)))
5896 op0
= TREE_OPERAND (op0
, 0);
5898 *expr_p
= fold_convert_loc (loc
, TREE_TYPE (expr
),
5899 build_fold_addr_expr_loc (loc
,
5900 TREE_OPERAND (op0
, 0)));
5905 if (integer_zerop (TREE_OPERAND (op0
, 1)))
5906 goto do_indirect_ref
;
5911 /* If we see a call to a declared builtin or see its address
5912 being taken (we can unify those cases here) then we can mark
5913 the builtin for implicit generation by GCC. */
5914 if (TREE_CODE (op0
) == FUNCTION_DECL
5915 && DECL_BUILT_IN_CLASS (op0
) == BUILT_IN_NORMAL
5916 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0
)))
5917 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0
), true);
5919 /* We use fb_either here because the C frontend sometimes takes
5920 the address of a call that returns a struct; see
5921 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5922 the implied temporary explicit. */
5924 /* Make the operand addressable. */
5925 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, post_p
,
5926 is_gimple_addressable
, fb_either
);
5927 if (ret
== GS_ERROR
)
5930 /* Then mark it. Beware that it may not be possible to do so directly
5931 if a temporary has been created by the gimplification. */
5932 prepare_gimple_addressable (&TREE_OPERAND (expr
, 0), pre_p
);
5934 op0
= TREE_OPERAND (expr
, 0);
5936 /* For various reasons, the gimplification of the expression
5937 may have made a new INDIRECT_REF. */
5938 if (TREE_CODE (op0
) == INDIRECT_REF
)
5939 goto do_indirect_ref
;
5941 mark_addressable (TREE_OPERAND (expr
, 0));
5943 /* The FEs may end up building ADDR_EXPRs early on a decl with
5944 an incomplete type. Re-build ADDR_EXPRs in canonical form
5946 if (!types_compatible_p (TREE_TYPE (op0
), TREE_TYPE (TREE_TYPE (expr
))))
5947 *expr_p
= build_fold_addr_expr (op0
);
5949 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5950 recompute_tree_invariant_for_addr_expr (*expr_p
);
5952 /* If we re-built the ADDR_EXPR add a conversion to the original type
5954 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
5955 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
5963 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5964 value; output operands should be a gimple lvalue. */
5966 static enum gimplify_status
5967 gimplify_asm_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
5971 const char **oconstraints
;
5974 const char *constraint
;
5975 bool allows_mem
, allows_reg
, is_inout
;
5976 enum gimplify_status ret
, tret
;
5978 vec
<tree
, va_gc
> *inputs
;
5979 vec
<tree
, va_gc
> *outputs
;
5980 vec
<tree
, va_gc
> *clobbers
;
5981 vec
<tree
, va_gc
> *labels
;
5985 noutputs
= list_length (ASM_OUTPUTS (expr
));
5986 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
5994 link_next
= NULL_TREE
;
5995 for (i
= 0, link
= ASM_OUTPUTS (expr
); link
; ++i
, link
= link_next
)
5998 size_t constraint_len
;
6000 link_next
= TREE_CHAIN (link
);
6004 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6005 constraint_len
= strlen (constraint
);
6006 if (constraint_len
== 0)
6009 ok
= parse_output_constraint (&constraint
, i
, 0, 0,
6010 &allows_mem
, &allows_reg
, &is_inout
);
6017 if (!allows_reg
&& allows_mem
)
6018 mark_addressable (TREE_VALUE (link
));
6020 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6021 is_inout
? is_gimple_min_lval
: is_gimple_lvalue
,
6022 fb_lvalue
| fb_mayfail
);
6023 if (tret
== GS_ERROR
)
6025 error ("invalid lvalue in asm output %d", i
);
6029 /* If the constraint does not allow memory make sure we gimplify
6030 it to a register if it is not already but its base is. This
6031 happens for complex and vector components. */
6034 tree op
= TREE_VALUE (link
);
6035 if (! is_gimple_val (op
)
6036 && is_gimple_reg_type (TREE_TYPE (op
))
6037 && is_gimple_reg (get_base_address (op
)))
6039 tree tem
= create_tmp_reg (TREE_TYPE (op
));
6043 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
),
6044 tem
, unshare_expr (op
));
6045 gimplify_and_add (ass
, pre_p
);
6047 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
), op
, tem
);
6048 gimplify_and_add (ass
, post_p
);
6050 TREE_VALUE (link
) = tem
;
6055 vec_safe_push (outputs
, link
);
6056 TREE_CHAIN (link
) = NULL_TREE
;
6060 /* An input/output operand. To give the optimizers more
6061 flexibility, split it into separate input and output
6064 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6067 /* Turn the in/out constraint into an output constraint. */
6068 char *p
= xstrdup (constraint
);
6070 TREE_VALUE (TREE_PURPOSE (link
)) = build_string (constraint_len
, p
);
6072 /* And add a matching input constraint. */
6075 sprintf (buf
, "%u", i
);
6077 /* If there are multiple alternatives in the constraint,
6078 handle each of them individually. Those that allow register
6079 will be replaced with operand number, the others will stay
6081 if (strchr (p
, ',') != NULL
)
6083 size_t len
= 0, buflen
= strlen (buf
);
6084 char *beg
, *end
, *str
, *dst
;
6088 end
= strchr (beg
, ',');
6090 end
= strchr (beg
, '\0');
6091 if ((size_t) (end
- beg
) < buflen
)
6094 len
+= end
- beg
+ 1;
6101 str
= (char *) alloca (len
);
6102 for (beg
= p
+ 1, dst
= str
;;)
6105 bool mem_p
, reg_p
, inout_p
;
6107 end
= strchr (beg
, ',');
6112 parse_output_constraint (&tem
, i
, 0, 0,
6113 &mem_p
, ®_p
, &inout_p
);
6118 memcpy (dst
, buf
, buflen
);
6127 memcpy (dst
, beg
, len
);
6136 input
= build_string (dst
- str
, str
);
6139 input
= build_string (strlen (buf
), buf
);
6142 input
= build_string (constraint_len
- 1, constraint
+ 1);
6146 input
= build_tree_list (build_tree_list (NULL_TREE
, input
),
6147 unshare_expr (TREE_VALUE (link
)));
6148 ASM_INPUTS (expr
) = chainon (ASM_INPUTS (expr
), input
);
6152 link_next
= NULL_TREE
;
6153 for (link
= ASM_INPUTS (expr
); link
; ++i
, link
= link_next
)
6155 link_next
= TREE_CHAIN (link
);
6156 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6157 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
6158 oconstraints
, &allows_mem
, &allows_reg
);
6160 /* If we can't make copies, we can only accept memory. */
6161 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link
))))
6167 error ("impossible constraint in %<asm%>");
6168 error ("non-memory input %d must stay in memory", i
);
6173 /* If the operand is a memory input, it should be an lvalue. */
6174 if (!allows_reg
&& allows_mem
)
6176 tree inputv
= TREE_VALUE (link
);
6177 STRIP_NOPS (inputv
);
6178 if (TREE_CODE (inputv
) == PREDECREMENT_EXPR
6179 || TREE_CODE (inputv
) == PREINCREMENT_EXPR
6180 || TREE_CODE (inputv
) == POSTDECREMENT_EXPR
6181 || TREE_CODE (inputv
) == POSTINCREMENT_EXPR
6182 || TREE_CODE (inputv
) == MODIFY_EXPR
)
6183 TREE_VALUE (link
) = error_mark_node
;
6184 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6185 is_gimple_lvalue
, fb_lvalue
| fb_mayfail
);
6186 if (tret
!= GS_ERROR
)
6188 /* Unlike output operands, memory inputs are not guaranteed
6189 to be lvalues by the FE, and while the expressions are
6190 marked addressable there, if it is e.g. a statement
6191 expression, temporaries in it might not end up being
6192 addressable. They might be already used in the IL and thus
6193 it is too late to make them addressable now though. */
6194 tree x
= TREE_VALUE (link
);
6195 while (handled_component_p (x
))
6196 x
= TREE_OPERAND (x
, 0);
6197 if (TREE_CODE (x
) == MEM_REF
6198 && TREE_CODE (TREE_OPERAND (x
, 0)) == ADDR_EXPR
)
6199 x
= TREE_OPERAND (TREE_OPERAND (x
, 0), 0);
6201 || TREE_CODE (x
) == PARM_DECL
6202 || TREE_CODE (x
) == RESULT_DECL
)
6203 && !TREE_ADDRESSABLE (x
)
6204 && is_gimple_reg (x
))
6206 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
),
6208 "memory input %d is not directly addressable",
6210 prepare_gimple_addressable (&TREE_VALUE (link
), pre_p
);
6213 mark_addressable (TREE_VALUE (link
));
6214 if (tret
== GS_ERROR
)
6216 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
), input_location
),
6217 "memory input %d is not directly addressable", i
);
6223 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6224 is_gimple_asm_val
, fb_rvalue
);
6225 if (tret
== GS_ERROR
)
6229 TREE_CHAIN (link
) = NULL_TREE
;
6230 vec_safe_push (inputs
, link
);
6233 link_next
= NULL_TREE
;
6234 for (link
= ASM_CLOBBERS (expr
); link
; ++i
, link
= link_next
)
6236 link_next
= TREE_CHAIN (link
);
6237 TREE_CHAIN (link
) = NULL_TREE
;
6238 vec_safe_push (clobbers
, link
);
6241 link_next
= NULL_TREE
;
6242 for (link
= ASM_LABELS (expr
); link
; ++i
, link
= link_next
)
6244 link_next
= TREE_CHAIN (link
);
6245 TREE_CHAIN (link
) = NULL_TREE
;
6246 vec_safe_push (labels
, link
);
6249 /* Do not add ASMs with errors to the gimple IL stream. */
6250 if (ret
!= GS_ERROR
)
6252 stmt
= gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr
)),
6253 inputs
, outputs
, clobbers
, labels
);
6255 gimple_asm_set_volatile (stmt
, ASM_VOLATILE_P (expr
) || noutputs
== 0);
6256 gimple_asm_set_input (stmt
, ASM_INPUT_P (expr
));
6258 gimplify_seq_add_stmt (pre_p
, stmt
);
6264 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6265 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6266 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6267 return to this function.
6269 FIXME should we complexify the prequeue handling instead? Or use flags
6270 for all the cleanups and let the optimizer tighten them up? The current
6271 code seems pretty fragile; it will break on a cleanup within any
6272 non-conditional nesting. But any such nesting would be broken, anyway;
6273 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6274 and continues out of it. We can do that at the RTL level, though, so
6275 having an optimizer to tighten up try/finally regions would be a Good
6278 static enum gimplify_status
6279 gimplify_cleanup_point_expr (tree
*expr_p
, gimple_seq
*pre_p
)
6281 gimple_stmt_iterator iter
;
6282 gimple_seq body_sequence
= NULL
;
6284 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
6286 /* We only care about the number of conditions between the innermost
6287 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6288 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6289 int old_conds
= gimplify_ctxp
->conditions
;
6290 gimple_seq old_cleanups
= gimplify_ctxp
->conditional_cleanups
;
6291 bool old_in_cleanup_point_expr
= gimplify_ctxp
->in_cleanup_point_expr
;
6292 gimplify_ctxp
->conditions
= 0;
6293 gimplify_ctxp
->conditional_cleanups
= NULL
;
6294 gimplify_ctxp
->in_cleanup_point_expr
= true;
6296 gimplify_stmt (&TREE_OPERAND (*expr_p
, 0), &body_sequence
);
6298 gimplify_ctxp
->conditions
= old_conds
;
6299 gimplify_ctxp
->conditional_cleanups
= old_cleanups
;
6300 gimplify_ctxp
->in_cleanup_point_expr
= old_in_cleanup_point_expr
;
6302 for (iter
= gsi_start (body_sequence
); !gsi_end_p (iter
); )
6304 gimple
*wce
= gsi_stmt (iter
);
6306 if (gimple_code (wce
) == GIMPLE_WITH_CLEANUP_EXPR
)
6308 if (gsi_one_before_end_p (iter
))
6310 /* Note that gsi_insert_seq_before and gsi_remove do not
6311 scan operands, unlike some other sequence mutators. */
6312 if (!gimple_wce_cleanup_eh_only (wce
))
6313 gsi_insert_seq_before_without_update (&iter
,
6314 gimple_wce_cleanup (wce
),
6316 gsi_remove (&iter
, true);
6323 enum gimple_try_flags kind
;
6325 if (gimple_wce_cleanup_eh_only (wce
))
6326 kind
= GIMPLE_TRY_CATCH
;
6328 kind
= GIMPLE_TRY_FINALLY
;
6329 seq
= gsi_split_seq_after (iter
);
6331 gtry
= gimple_build_try (seq
, gimple_wce_cleanup (wce
), kind
);
6332 /* Do not use gsi_replace here, as it may scan operands.
6333 We want to do a simple structural modification only. */
6334 gsi_set_stmt (&iter
, gtry
);
6335 iter
= gsi_start (gtry
->eval
);
6342 gimplify_seq_add_seq (pre_p
, body_sequence
);
6355 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6356 is the cleanup action required. EH_ONLY is true if the cleanup should
6357 only be executed if an exception is thrown, not on normal exit.
6358 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6359 only valid for clobbers. */
6362 gimple_push_cleanup (tree var
, tree cleanup
, bool eh_only
, gimple_seq
*pre_p
,
6363 bool force_uncond
= false)
6366 gimple_seq cleanup_stmts
= NULL
;
6368 /* Errors can result in improperly nested cleanups. Which results in
6369 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6373 if (gimple_conditional_context ())
6375 /* If we're in a conditional context, this is more complex. We only
6376 want to run the cleanup if we actually ran the initialization that
6377 necessitates it, but we want to run it after the end of the
6378 conditional context. So we wrap the try/finally around the
6379 condition and use a flag to determine whether or not to actually
6380 run the destructor. Thus
6384 becomes (approximately)
6388 if (test) { A::A(temp); flag = 1; val = f(temp); }
6391 if (flag) A::~A(temp);
6397 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6398 wce
= gimple_build_wce (cleanup_stmts
);
6399 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6403 tree flag
= create_tmp_var (boolean_type_node
, "cleanup");
6404 gassign
*ffalse
= gimple_build_assign (flag
, boolean_false_node
);
6405 gassign
*ftrue
= gimple_build_assign (flag
, boolean_true_node
);
6407 cleanup
= build3 (COND_EXPR
, void_type_node
, flag
, cleanup
, NULL
);
6408 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6409 wce
= gimple_build_wce (cleanup_stmts
);
6411 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, ffalse
);
6412 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6413 gimplify_seq_add_stmt (pre_p
, ftrue
);
6415 /* Because of this manipulation, and the EH edges that jump
6416 threading cannot redirect, the temporary (VAR) will appear
6417 to be used uninitialized. Don't warn. */
6418 TREE_NO_WARNING (var
) = 1;
6423 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6424 wce
= gimple_build_wce (cleanup_stmts
);
6425 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
6426 gimplify_seq_add_stmt (pre_p
, wce
);
6430 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6432 static enum gimplify_status
6433 gimplify_target_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6435 tree targ
= *expr_p
;
6436 tree temp
= TARGET_EXPR_SLOT (targ
);
6437 tree init
= TARGET_EXPR_INITIAL (targ
);
6438 enum gimplify_status ret
;
6440 bool unpoison_empty_seq
= false;
6441 gimple_stmt_iterator unpoison_it
;
6445 tree cleanup
= NULL_TREE
;
6447 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6448 to the temps list. Handle also variable length TARGET_EXPRs. */
6449 if (TREE_CODE (DECL_SIZE (temp
)) != INTEGER_CST
)
6451 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp
)))
6452 gimplify_type_sizes (TREE_TYPE (temp
), pre_p
);
6453 gimplify_vla_decl (temp
, pre_p
);
6457 /* Save location where we need to place unpoisoning. It's possible
6458 that a variable will be converted to needs_to_live_in_memory. */
6459 unpoison_it
= gsi_last (*pre_p
);
6460 unpoison_empty_seq
= gsi_end_p (unpoison_it
);
6462 gimple_add_tmp_var (temp
);
6465 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6466 expression is supposed to initialize the slot. */
6467 if (VOID_TYPE_P (TREE_TYPE (init
)))
6468 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6471 tree init_expr
= build2 (INIT_EXPR
, void_type_node
, temp
, init
);
6473 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6475 ggc_free (init_expr
);
6477 if (ret
== GS_ERROR
)
6479 /* PR c++/28266 Make sure this is expanded only once. */
6480 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
6484 gimplify_and_add (init
, pre_p
);
6486 /* If needed, push the cleanup for the temp. */
6487 if (TARGET_EXPR_CLEANUP (targ
))
6489 if (CLEANUP_EH_ONLY (targ
))
6490 gimple_push_cleanup (temp
, TARGET_EXPR_CLEANUP (targ
),
6491 CLEANUP_EH_ONLY (targ
), pre_p
);
6493 cleanup
= TARGET_EXPR_CLEANUP (targ
);
6496 /* Add a clobber for the temporary going out of scope, like
6497 gimplify_bind_expr. */
6498 if (gimplify_ctxp
->in_cleanup_point_expr
6499 && needs_to_live_in_memory (temp
))
6501 if (flag_stack_reuse
== SR_ALL
)
6503 tree clobber
= build_constructor (TREE_TYPE (temp
),
6505 TREE_THIS_VOLATILE (clobber
) = true;
6506 clobber
= build2 (MODIFY_EXPR
, TREE_TYPE (temp
), temp
, clobber
);
6507 gimple_push_cleanup (temp
, clobber
, false, pre_p
, true);
6509 if (asan_poisoned_variables
6510 && DECL_ALIGN (temp
) <= MAX_SUPPORTED_STACK_ALIGNMENT
6511 && dbg_cnt (asan_use_after_scope
))
6513 tree asan_cleanup
= build_asan_poison_call_expr (temp
);
6516 if (unpoison_empty_seq
)
6517 unpoison_it
= gsi_start (*pre_p
);
6519 asan_poison_variable (temp
, false, &unpoison_it
,
6520 unpoison_empty_seq
);
6521 gimple_push_cleanup (temp
, asan_cleanup
, false, pre_p
);
6526 gimple_push_cleanup (temp
, cleanup
, false, pre_p
);
6528 /* Only expand this once. */
6529 TREE_OPERAND (targ
, 3) = init
;
6530 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
6533 /* We should have expanded this before. */
6534 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp
));
6540 /* Gimplification of expression trees. */
6542 /* Gimplify an expression which appears at statement context. The
6543 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6544 NULL, a new sequence is allocated.
6546 Return true if we actually added a statement to the queue. */
6549 gimplify_stmt (tree
*stmt_p
, gimple_seq
*seq_p
)
6551 gimple_seq_node last
;
6553 last
= gimple_seq_last (*seq_p
);
6554 gimplify_expr (stmt_p
, seq_p
, NULL
, is_gimple_stmt
, fb_none
);
6555 return last
!= gimple_seq_last (*seq_p
);
6558 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6559 to CTX. If entries already exist, force them to be some flavor of private.
6560 If there is no enclosing parallel, do nothing. */
6563 omp_firstprivatize_variable (struct gimplify_omp_ctx
*ctx
, tree decl
)
6567 if (decl
== NULL
|| !DECL_P (decl
) || ctx
->region_type
== ORT_NONE
)
6572 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6575 if (n
->value
& GOVD_SHARED
)
6576 n
->value
= GOVD_FIRSTPRIVATE
| (n
->value
& GOVD_SEEN
);
6577 else if (n
->value
& GOVD_MAP
)
6578 n
->value
|= GOVD_MAP_TO_ONLY
;
6582 else if ((ctx
->region_type
& ORT_TARGET
) != 0)
6584 if (ctx
->target_map_scalars_firstprivate
)
6585 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
6587 omp_add_variable (ctx
, decl
, GOVD_MAP
| GOVD_MAP_TO_ONLY
);
6589 else if (ctx
->region_type
!= ORT_WORKSHARE
6590 && ctx
->region_type
!= ORT_SIMD
6591 && ctx
->region_type
!= ORT_ACC
6592 && !(ctx
->region_type
& ORT_TARGET_DATA
))
6593 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
6595 ctx
= ctx
->outer_context
;
6600 /* Similarly for each of the type sizes of TYPE. */
6603 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
6605 if (type
== NULL
|| type
== error_mark_node
)
6607 type
= TYPE_MAIN_VARIANT (type
);
6609 if (ctx
->privatized_types
->add (type
))
6612 switch (TREE_CODE (type
))
6618 case FIXED_POINT_TYPE
:
6619 omp_firstprivatize_variable (ctx
, TYPE_MIN_VALUE (type
));
6620 omp_firstprivatize_variable (ctx
, TYPE_MAX_VALUE (type
));
6624 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
6625 omp_firstprivatize_type_sizes (ctx
, TYPE_DOMAIN (type
));
6630 case QUAL_UNION_TYPE
:
6633 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
6634 if (TREE_CODE (field
) == FIELD_DECL
)
6636 omp_firstprivatize_variable (ctx
, DECL_FIELD_OFFSET (field
));
6637 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (field
));
6643 case REFERENCE_TYPE
:
6644 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
6651 omp_firstprivatize_variable (ctx
, TYPE_SIZE (type
));
6652 omp_firstprivatize_variable (ctx
, TYPE_SIZE_UNIT (type
));
6653 lang_hooks
.types
.omp_firstprivatize_type_sizes (ctx
, type
);
6656 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6659 omp_add_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned int flags
)
6662 unsigned int nflags
;
6665 if (error_operand_p (decl
) || ctx
->region_type
== ORT_NONE
)
6668 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6669 there are constructors involved somewhere. Exception is a shared clause,
6670 there is nothing privatized in that case. */
6671 if ((flags
& GOVD_SHARED
) == 0
6672 && (TREE_ADDRESSABLE (TREE_TYPE (decl
))
6673 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl
))))
6676 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6677 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
6679 /* We shouldn't be re-adding the decl with the same data
6681 gcc_assert ((n
->value
& GOVD_DATA_SHARE_CLASS
& flags
) == 0);
6682 nflags
= n
->value
| flags
;
6683 /* The only combination of data sharing classes we should see is
6684 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6685 reduction variables to be used in data sharing clauses. */
6686 gcc_assert ((ctx
->region_type
& ORT_ACC
) != 0
6687 || ((nflags
& GOVD_DATA_SHARE_CLASS
)
6688 == (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
))
6689 || (flags
& GOVD_DATA_SHARE_CLASS
) == 0);
6694 /* When adding a variable-sized variable, we have to handle all sorts
6695 of additional bits of data: the pointer replacement variable, and
6696 the parameters of the type. */
6697 if (DECL_SIZE (decl
) && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
6699 /* Add the pointer replacement variable as PRIVATE if the variable
6700 replacement is private, else FIRSTPRIVATE since we'll need the
6701 address of the original variable either for SHARED, or for the
6702 copy into or out of the context. */
6703 if (!(flags
& GOVD_LOCAL
))
6705 if (flags
& GOVD_MAP
)
6706 nflags
= GOVD_MAP
| GOVD_MAP_TO_ONLY
| GOVD_EXPLICIT
;
6707 else if (flags
& GOVD_PRIVATE
)
6708 nflags
= GOVD_PRIVATE
;
6709 else if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
6710 && (flags
& GOVD_FIRSTPRIVATE
))
6711 nflags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
6713 nflags
= GOVD_FIRSTPRIVATE
;
6714 nflags
|= flags
& GOVD_SEEN
;
6715 t
= DECL_VALUE_EXPR (decl
);
6716 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
6717 t
= TREE_OPERAND (t
, 0);
6718 gcc_assert (DECL_P (t
));
6719 omp_add_variable (ctx
, t
, nflags
);
6722 /* Add all of the variable and type parameters (which should have
6723 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6724 omp_firstprivatize_variable (ctx
, DECL_SIZE_UNIT (decl
));
6725 omp_firstprivatize_variable (ctx
, DECL_SIZE (decl
));
6726 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
6728 /* The variable-sized variable itself is never SHARED, only some form
6729 of PRIVATE. The sharing would take place via the pointer variable
6730 which we remapped above. */
6731 if (flags
& GOVD_SHARED
)
6732 flags
= GOVD_SHARED
| GOVD_DEBUG_PRIVATE
6733 | (flags
& (GOVD_SEEN
| GOVD_EXPLICIT
));
6735 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6736 alloca statement we generate for the variable, so make sure it
6737 is available. This isn't automatically needed for the SHARED
6738 case, since we won't be allocating local storage then.
6739 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6740 in this case omp_notice_variable will be called later
6741 on when it is gimplified. */
6742 else if (! (flags
& (GOVD_LOCAL
| GOVD_MAP
))
6743 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl
))))
6744 omp_notice_variable (ctx
, TYPE_SIZE_UNIT (TREE_TYPE (decl
)), true);
6746 else if ((flags
& (GOVD_MAP
| GOVD_LOCAL
)) == 0
6747 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
6749 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
6751 /* Similar to the direct variable sized case above, we'll need the
6752 size of references being privatized. */
6753 if ((flags
& GOVD_SHARED
) == 0)
6755 t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
6757 omp_notice_variable (ctx
, t
, true);
6764 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, flags
);
6766 /* For reductions clauses in OpenACC loop directives, by default create a
6767 copy clause on the enclosing parallel construct for carrying back the
6769 if (ctx
->region_type
== ORT_ACC
&& (flags
& GOVD_REDUCTION
))
6771 struct gimplify_omp_ctx
*outer_ctx
= ctx
->outer_context
;
6774 n
= splay_tree_lookup (outer_ctx
->variables
, (splay_tree_key
)decl
);
6777 /* Ignore local variables and explicitly declared clauses. */
6778 if (n
->value
& (GOVD_LOCAL
| GOVD_EXPLICIT
))
6780 else if (outer_ctx
->region_type
== ORT_ACC_KERNELS
)
6782 /* According to the OpenACC spec, such a reduction variable
6783 should already have a copy map on a kernels construct,
6784 verify that here. */
6785 gcc_assert (!(n
->value
& GOVD_FIRSTPRIVATE
)
6786 && (n
->value
& GOVD_MAP
));
6788 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
6790 /* Remove firstprivate and make it a copy map. */
6791 n
->value
&= ~GOVD_FIRSTPRIVATE
;
6792 n
->value
|= GOVD_MAP
;
6795 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
6797 splay_tree_insert (outer_ctx
->variables
, (splay_tree_key
)decl
,
6798 GOVD_MAP
| GOVD_SEEN
);
6801 outer_ctx
= outer_ctx
->outer_context
;
6806 /* Notice a threadprivate variable DECL used in OMP context CTX.
6807 This just prints out diagnostics about threadprivate variable uses
6808 in untied tasks. If DECL2 is non-NULL, prevent this warning
6809 on that variable. */
6812 omp_notice_threadprivate_variable (struct gimplify_omp_ctx
*ctx
, tree decl
,
6816 struct gimplify_omp_ctx
*octx
;
6818 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
6819 if ((octx
->region_type
& ORT_TARGET
) != 0)
6821 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
)decl
);
6824 error ("threadprivate variable %qE used in target region",
6826 error_at (octx
->location
, "enclosing target region");
6827 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl
, 0);
6830 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl2
, 0);
6833 if (ctx
->region_type
!= ORT_UNTIED_TASK
)
6835 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6838 error ("threadprivate variable %qE used in untied task",
6840 error_at (ctx
->location
, "enclosing task");
6841 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, 0);
6844 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl2
, 0);
6848 /* Return true if global var DECL is device resident. */
6851 device_resident_p (tree decl
)
6853 tree attr
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl
));
6858 for (tree t
= TREE_VALUE (attr
); t
; t
= TREE_PURPOSE (t
))
6860 tree c
= TREE_VALUE (t
);
6861 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DEVICE_RESIDENT
)
6868 /* Return true if DECL has an ACC DECLARE attribute. */
6871 is_oacc_declared (tree decl
)
6873 tree t
= TREE_CODE (decl
) == MEM_REF
? TREE_OPERAND (decl
, 0) : decl
;
6874 tree declared
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t
));
6875 return declared
!= NULL_TREE
;
6878 /* Determine outer default flags for DECL mentioned in an OMP region
6879 but not declared in an enclosing clause.
6881 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6882 remapped firstprivate instead of shared. To some extent this is
6883 addressed in omp_firstprivatize_type_sizes, but not
6887 omp_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
,
6888 bool in_code
, unsigned flags
)
6890 enum omp_clause_default_kind default_kind
= ctx
->default_kind
;
6891 enum omp_clause_default_kind kind
;
6893 kind
= lang_hooks
.decls
.omp_predetermined_sharing (decl
);
6894 if (kind
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
6895 default_kind
= kind
;
6897 switch (default_kind
)
6899 case OMP_CLAUSE_DEFAULT_NONE
:
6903 if (ctx
->region_type
& ORT_PARALLEL
)
6905 else if (ctx
->region_type
& ORT_TASK
)
6907 else if (ctx
->region_type
& ORT_TEAMS
)
6912 error ("%qE not specified in enclosing %qs",
6913 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rtype
);
6914 error_at (ctx
->location
, "enclosing %qs", rtype
);
6917 case OMP_CLAUSE_DEFAULT_SHARED
:
6918 flags
|= GOVD_SHARED
;
6920 case OMP_CLAUSE_DEFAULT_PRIVATE
:
6921 flags
|= GOVD_PRIVATE
;
6923 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
:
6924 flags
|= GOVD_FIRSTPRIVATE
;
6926 case OMP_CLAUSE_DEFAULT_UNSPECIFIED
:
6927 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6928 gcc_assert ((ctx
->region_type
& ORT_TASK
) != 0);
6929 if (struct gimplify_omp_ctx
*octx
= ctx
->outer_context
)
6931 omp_notice_variable (octx
, decl
, in_code
);
6932 for (; octx
; octx
= octx
->outer_context
)
6936 n2
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
6937 if ((octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)) != 0
6938 && (n2
== NULL
|| (n2
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
6940 if (n2
&& (n2
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
)
6942 flags
|= GOVD_FIRSTPRIVATE
;
6945 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TEAMS
)) != 0)
6947 flags
|= GOVD_SHARED
;
6953 if (TREE_CODE (decl
) == PARM_DECL
6954 || (!is_global_var (decl
)
6955 && DECL_CONTEXT (decl
) == current_function_decl
))
6956 flags
|= GOVD_FIRSTPRIVATE
;
6958 flags
|= GOVD_SHARED
;
6970 /* Determine outer default flags for DECL mentioned in an OACC region
6971 but not declared in an enclosing clause. */
6974 oacc_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned flags
)
6977 bool on_device
= false;
6978 bool declared
= is_oacc_declared (decl
);
6979 tree type
= TREE_TYPE (decl
);
6981 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
6982 type
= TREE_TYPE (type
);
6984 if ((ctx
->region_type
& (ORT_ACC_PARALLEL
| ORT_ACC_KERNELS
)) != 0
6985 && is_global_var (decl
)
6986 && device_resident_p (decl
))
6989 flags
|= GOVD_MAP_TO_ONLY
;
6992 switch (ctx
->region_type
)
6994 case ORT_ACC_KERNELS
:
6997 if (AGGREGATE_TYPE_P (type
))
6999 /* Aggregates default to 'present_or_copy', or 'present'. */
7000 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7003 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7006 /* Scalars default to 'copy'. */
7007 flags
|= GOVD_MAP
| GOVD_MAP_FORCE
;
7011 case ORT_ACC_PARALLEL
:
7014 if (on_device
|| declared
)
7016 else if (AGGREGATE_TYPE_P (type
))
7018 /* Aggregates default to 'present_or_copy', or 'present'. */
7019 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7022 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7025 /* Scalars default to 'firstprivate'. */
7026 flags
|= GOVD_FIRSTPRIVATE
;
7034 if (DECL_ARTIFICIAL (decl
))
7035 ; /* We can get compiler-generated decls, and should not complain
7037 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_NONE
)
7039 error ("%qE not specified in enclosing OpenACC %qs construct",
7040 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rkind
);
7041 inform (ctx
->location
, "enclosing OpenACC %qs construct", rkind
);
7043 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_PRESENT
)
7044 ; /* Handled above. */
7046 gcc_checking_assert (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
);
7051 /* Record the fact that DECL was used within the OMP context CTX.
7052 IN_CODE is true when real code uses DECL, and false when we should
7053 merely emit default(none) errors. Return true if DECL is going to
7054 be remapped and thus DECL shouldn't be gimplified into its
7055 DECL_VALUE_EXPR (if any). */
7058 omp_notice_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, bool in_code
)
7061 unsigned flags
= in_code
? GOVD_SEEN
: 0;
7062 bool ret
= false, shared
;
7064 if (error_operand_p (decl
))
7067 if (ctx
->region_type
== ORT_NONE
)
7068 return lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7070 if (is_global_var (decl
))
7072 /* Threadprivate variables are predetermined. */
7073 if (DECL_THREAD_LOCAL_P (decl
))
7074 return omp_notice_threadprivate_variable (ctx
, decl
, NULL_TREE
);
7076 if (DECL_HAS_VALUE_EXPR_P (decl
))
7078 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
7080 if (value
&& DECL_P (value
) && DECL_THREAD_LOCAL_P (value
))
7081 return omp_notice_threadprivate_variable (ctx
, decl
, value
);
7084 if (gimplify_omp_ctxp
->outer_context
== NULL
7086 && oacc_get_fn_attrib (current_function_decl
))
7088 location_t loc
= DECL_SOURCE_LOCATION (decl
);
7090 if (lookup_attribute ("omp declare target link",
7091 DECL_ATTRIBUTES (decl
)))
7094 "%qE with %<link%> clause used in %<routine%> function",
7098 else if (!lookup_attribute ("omp declare target",
7099 DECL_ATTRIBUTES (decl
)))
7102 "%qE requires a %<declare%> directive for use "
7103 "in a %<routine%> function", DECL_NAME (decl
));
7109 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7110 if ((ctx
->region_type
& ORT_TARGET
) != 0)
7112 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, true);
7115 unsigned nflags
= flags
;
7116 if (ctx
->target_map_pointers_as_0len_arrays
7117 || ctx
->target_map_scalars_firstprivate
)
7119 bool is_declare_target
= false;
7120 bool is_scalar
= false;
7121 if (is_global_var (decl
)
7122 && varpool_node::get_create (decl
)->offloadable
)
7124 struct gimplify_omp_ctx
*octx
;
7125 for (octx
= ctx
->outer_context
;
7126 octx
; octx
= octx
->outer_context
)
7128 n
= splay_tree_lookup (octx
->variables
,
7129 (splay_tree_key
)decl
);
7131 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
7132 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7135 is_declare_target
= octx
== NULL
;
7137 if (!is_declare_target
&& ctx
->target_map_scalars_firstprivate
)
7138 is_scalar
= lang_hooks
.decls
.omp_scalar_p (decl
);
7139 if (is_declare_target
)
7141 else if (ctx
->target_map_pointers_as_0len_arrays
7142 && (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
7143 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
7144 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
7146 nflags
|= GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
7148 nflags
|= GOVD_FIRSTPRIVATE
;
7151 struct gimplify_omp_ctx
*octx
= ctx
->outer_context
;
7152 if ((ctx
->region_type
& ORT_ACC
) && octx
)
7154 /* Look in outer OpenACC contexts, to see if there's a
7155 data attribute for this variable. */
7156 omp_notice_variable (octx
, decl
, in_code
);
7158 for (; octx
; octx
= octx
->outer_context
)
7160 if (!(octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)))
7163 = splay_tree_lookup (octx
->variables
,
7164 (splay_tree_key
) decl
);
7167 if (octx
->region_type
== ORT_ACC_HOST_DATA
)
7168 error ("variable %qE declared in enclosing "
7169 "%<host_data%> region", DECL_NAME (decl
));
7171 if (octx
->region_type
== ORT_ACC_DATA
7172 && (n2
->value
& GOVD_MAP_0LEN_ARRAY
))
7173 nflags
|= GOVD_MAP_0LEN_ARRAY
;
7180 tree type
= TREE_TYPE (decl
);
7183 && gimplify_omp_ctxp
->target_firstprivatize_array_bases
7184 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7185 type
= TREE_TYPE (type
);
7187 && !lang_hooks
.types
.omp_mappable_type (type
))
7189 error ("%qD referenced in target region does not have "
7190 "a mappable type", decl
);
7191 nflags
|= GOVD_MAP
| GOVD_EXPLICIT
;
7193 else if (nflags
== flags
)
7195 if ((ctx
->region_type
& ORT_ACC
) != 0)
7196 nflags
= oacc_default_clause (ctx
, decl
, flags
);
7202 omp_add_variable (ctx
, decl
, nflags
);
7206 /* If nothing changed, there's nothing left to do. */
7207 if ((n
->value
& flags
) == flags
)
7217 if (ctx
->region_type
== ORT_WORKSHARE
7218 || ctx
->region_type
== ORT_SIMD
7219 || ctx
->region_type
== ORT_ACC
7220 || (ctx
->region_type
& ORT_TARGET_DATA
) != 0)
7223 flags
= omp_default_clause (ctx
, decl
, in_code
, flags
);
7225 if ((flags
& GOVD_PRIVATE
)
7226 && lang_hooks
.decls
.omp_private_outer_ref (decl
))
7227 flags
|= GOVD_PRIVATE_OUTER_REF
;
7229 omp_add_variable (ctx
, decl
, flags
);
7231 shared
= (flags
& GOVD_SHARED
) != 0;
7232 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7236 if ((n
->value
& (GOVD_SEEN
| GOVD_LOCAL
)) == 0
7237 && (flags
& (GOVD_SEEN
| GOVD_LOCAL
)) == GOVD_SEEN
7238 && DECL_SIZE (decl
))
7240 if (TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7243 tree t
= DECL_VALUE_EXPR (decl
);
7244 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
7245 t
= TREE_OPERAND (t
, 0);
7246 gcc_assert (DECL_P (t
));
7247 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7248 n2
->value
|= GOVD_SEEN
;
7250 else if (lang_hooks
.decls
.omp_privatize_by_reference (decl
)
7251 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)))
7252 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))))
7256 tree t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7257 gcc_assert (DECL_P (t
));
7258 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7260 omp_notice_variable (ctx
, t
, true);
7264 shared
= ((flags
| n
->value
) & GOVD_SHARED
) != 0;
7265 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7267 /* If nothing changed, there's nothing left to do. */
7268 if ((n
->value
& flags
) == flags
)
7274 /* If the variable is private in the current context, then we don't
7275 need to propagate anything to an outer context. */
7276 if ((flags
& GOVD_PRIVATE
) && !(flags
& GOVD_PRIVATE_OUTER_REF
))
7278 if ((flags
& (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7279 == (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7281 if ((flags
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
7282 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7283 == (GOVD_LASTPRIVATE
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7285 if (ctx
->outer_context
7286 && omp_notice_variable (ctx
->outer_context
, decl
, in_code
))
7291 /* Verify that DECL is private within CTX. If there's specific information
7292 to the contrary in the innermost scope, generate an error. */
7295 omp_is_private (struct gimplify_omp_ctx
*ctx
, tree decl
, int simd
)
7299 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7302 if (n
->value
& GOVD_SHARED
)
7304 if (ctx
== gimplify_omp_ctxp
)
7307 error ("iteration variable %qE is predetermined linear",
7310 error ("iteration variable %qE should be private",
7312 n
->value
= GOVD_PRIVATE
;
7318 else if ((n
->value
& GOVD_EXPLICIT
) != 0
7319 && (ctx
== gimplify_omp_ctxp
7320 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7321 && gimplify_omp_ctxp
->outer_context
== ctx
)))
7323 if ((n
->value
& GOVD_FIRSTPRIVATE
) != 0)
7324 error ("iteration variable %qE should not be firstprivate",
7326 else if ((n
->value
& GOVD_REDUCTION
) != 0)
7327 error ("iteration variable %qE should not be reduction",
7329 else if (simd
== 0 && (n
->value
& GOVD_LINEAR
) != 0)
7330 error ("iteration variable %qE should not be linear",
7332 else if (simd
== 1 && (n
->value
& GOVD_LASTPRIVATE
) != 0)
7333 error ("iteration variable %qE should not be lastprivate",
7335 else if (simd
&& (n
->value
& GOVD_PRIVATE
) != 0)
7336 error ("iteration variable %qE should not be private",
7338 else if (simd
== 2 && (n
->value
& GOVD_LINEAR
) != 0)
7339 error ("iteration variable %qE is predetermined linear",
7342 return (ctx
== gimplify_omp_ctxp
7343 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7344 && gimplify_omp_ctxp
->outer_context
== ctx
));
7347 if (ctx
->region_type
!= ORT_WORKSHARE
7348 && ctx
->region_type
!= ORT_SIMD
7349 && ctx
->region_type
!= ORT_ACC
)
7351 else if (ctx
->outer_context
)
7352 return omp_is_private (ctx
->outer_context
, decl
, simd
);
7356 /* Return true if DECL is private within a parallel region
7357 that binds to the current construct's context or in parallel
7358 region's REDUCTION clause. */
7361 omp_check_private (struct gimplify_omp_ctx
*ctx
, tree decl
, bool copyprivate
)
7367 ctx
= ctx
->outer_context
;
7370 if (is_global_var (decl
))
7373 /* References might be private, but might be shared too,
7374 when checking for copyprivate, assume they might be
7375 private, otherwise assume they might be shared. */
7379 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7382 /* Treat C++ privatized non-static data members outside
7383 of the privatization the same. */
7384 if (omp_member_access_dummy_var (decl
))
7390 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
7392 if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7393 && (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7398 if ((n
->value
& GOVD_LOCAL
) != 0
7399 && omp_member_access_dummy_var (decl
))
7401 return (n
->value
& GOVD_SHARED
) == 0;
7404 while (ctx
->region_type
== ORT_WORKSHARE
7405 || ctx
->region_type
== ORT_SIMD
7406 || ctx
->region_type
== ORT_ACC
);
7410 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7413 find_decl_expr (tree
*tp
, int *walk_subtrees
, void *data
)
7417 /* If this node has been visited, unmark it and keep looking. */
7418 if (TREE_CODE (t
) == DECL_EXPR
&& DECL_EXPR_DECL (t
) == (tree
) data
)
7421 if (IS_TYPE_OR_DECL_P (t
))
7426 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7427 and previous omp contexts. */
7430 gimplify_scan_omp_clauses (tree
*list_p
, gimple_seq
*pre_p
,
7431 enum omp_region_type region_type
,
7432 enum tree_code code
)
7434 struct gimplify_omp_ctx
*ctx
, *outer_ctx
;
7436 hash_map
<tree
, tree
> *struct_map_to_clause
= NULL
;
7437 tree
*prev_list_p
= NULL
;
7439 ctx
= new_omp_context (region_type
);
7440 outer_ctx
= ctx
->outer_context
;
7441 if (code
== OMP_TARGET
)
7443 if (!lang_GNU_Fortran ())
7444 ctx
->target_map_pointers_as_0len_arrays
= true;
7445 ctx
->target_map_scalars_firstprivate
= true;
7447 if (!lang_GNU_Fortran ())
7451 case OMP_TARGET_DATA
:
7452 case OMP_TARGET_ENTER_DATA
:
7453 case OMP_TARGET_EXIT_DATA
:
7455 case OACC_HOST_DATA
:
7456 ctx
->target_firstprivatize_array_bases
= true;
7461 while ((c
= *list_p
) != NULL
)
7463 bool remove
= false;
7464 bool notice_outer
= true;
7465 const char *check_non_private
= NULL
;
7469 switch (OMP_CLAUSE_CODE (c
))
7471 case OMP_CLAUSE_PRIVATE
:
7472 flags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
7473 if (lang_hooks
.decls
.omp_private_outer_ref (OMP_CLAUSE_DECL (c
)))
7475 flags
|= GOVD_PRIVATE_OUTER_REF
;
7476 OMP_CLAUSE_PRIVATE_OUTER_REF (c
) = 1;
7479 notice_outer
= false;
7481 case OMP_CLAUSE_SHARED
:
7482 flags
= GOVD_SHARED
| GOVD_EXPLICIT
;
7484 case OMP_CLAUSE_FIRSTPRIVATE
:
7485 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
7486 check_non_private
= "firstprivate";
7488 case OMP_CLAUSE_LASTPRIVATE
:
7489 flags
= GOVD_LASTPRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
;
7490 check_non_private
= "lastprivate";
7491 decl
= OMP_CLAUSE_DECL (c
);
7492 if (error_operand_p (decl
))
7495 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
7496 || outer_ctx
->region_type
== ORT_COMBINED_TEAMS
)
7497 && splay_tree_lookup (outer_ctx
->variables
,
7498 (splay_tree_key
) decl
) == NULL
)
7500 omp_add_variable (outer_ctx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
7501 if (outer_ctx
->outer_context
)
7502 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
7505 && (outer_ctx
->region_type
& ORT_TASK
) != 0
7506 && outer_ctx
->combined_loop
7507 && splay_tree_lookup (outer_ctx
->variables
,
7508 (splay_tree_key
) decl
) == NULL
)
7510 omp_add_variable (outer_ctx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
7511 if (outer_ctx
->outer_context
)
7512 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
7515 && (outer_ctx
->region_type
== ORT_WORKSHARE
7516 || outer_ctx
->region_type
== ORT_ACC
)
7517 && outer_ctx
->combined_loop
7518 && splay_tree_lookup (outer_ctx
->variables
,
7519 (splay_tree_key
) decl
) == NULL
7520 && !omp_check_private (outer_ctx
, decl
, false))
7522 omp_add_variable (outer_ctx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
7523 if (outer_ctx
->outer_context
7524 && (outer_ctx
->outer_context
->region_type
7525 == ORT_COMBINED_PARALLEL
)
7526 && splay_tree_lookup (outer_ctx
->outer_context
->variables
,
7527 (splay_tree_key
) decl
) == NULL
)
7529 struct gimplify_omp_ctx
*octx
= outer_ctx
->outer_context
;
7530 omp_add_variable (octx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
7531 if (octx
->outer_context
)
7533 octx
= octx
->outer_context
;
7534 if (octx
->region_type
== ORT_WORKSHARE
7535 && octx
->combined_loop
7536 && splay_tree_lookup (octx
->variables
,
7537 (splay_tree_key
) decl
) == NULL
7538 && !omp_check_private (octx
, decl
, false))
7540 omp_add_variable (octx
, decl
,
7541 GOVD_LASTPRIVATE
| GOVD_SEEN
);
7542 octx
= octx
->outer_context
;
7544 && octx
->region_type
== ORT_COMBINED_TEAMS
7545 && (splay_tree_lookup (octx
->variables
,
7546 (splay_tree_key
) decl
)
7549 omp_add_variable (octx
, decl
,
7550 GOVD_SHARED
| GOVD_SEEN
);
7551 octx
= octx
->outer_context
;
7555 omp_notice_variable (octx
, decl
, true);
7558 else if (outer_ctx
->outer_context
)
7559 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
7562 case OMP_CLAUSE_REDUCTION
:
7563 flags
= GOVD_REDUCTION
| GOVD_SEEN
| GOVD_EXPLICIT
;
7564 /* OpenACC permits reductions on private variables. */
7565 if (!(region_type
& ORT_ACC
))
7566 check_non_private
= "reduction";
7567 decl
= OMP_CLAUSE_DECL (c
);
7568 if (TREE_CODE (decl
) == MEM_REF
)
7570 tree type
= TREE_TYPE (decl
);
7571 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type
)), pre_p
,
7572 NULL
, is_gimple_val
, fb_rvalue
, false)
7578 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7581 omp_firstprivatize_variable (ctx
, v
);
7582 omp_notice_variable (ctx
, v
, true);
7584 decl
= TREE_OPERAND (decl
, 0);
7585 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
7587 if (gimplify_expr (&TREE_OPERAND (decl
, 1), pre_p
,
7588 NULL
, is_gimple_val
, fb_rvalue
, false)
7594 v
= TREE_OPERAND (decl
, 1);
7597 omp_firstprivatize_variable (ctx
, v
);
7598 omp_notice_variable (ctx
, v
, true);
7600 decl
= TREE_OPERAND (decl
, 0);
7602 if (TREE_CODE (decl
) == ADDR_EXPR
7603 || TREE_CODE (decl
) == INDIRECT_REF
)
7604 decl
= TREE_OPERAND (decl
, 0);
7607 case OMP_CLAUSE_LINEAR
:
7608 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
), pre_p
, NULL
,
7609 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
7616 if (code
== OMP_SIMD
7617 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
7619 struct gimplify_omp_ctx
*octx
= outer_ctx
;
7621 && octx
->region_type
== ORT_WORKSHARE
7622 && octx
->combined_loop
7623 && !octx
->distribute
)
7625 if (octx
->outer_context
7626 && (octx
->outer_context
->region_type
7627 == ORT_COMBINED_PARALLEL
))
7628 octx
= octx
->outer_context
->outer_context
;
7630 octx
= octx
->outer_context
;
7633 && octx
->region_type
== ORT_WORKSHARE
7634 && octx
->combined_loop
7635 && octx
->distribute
)
7637 error_at (OMP_CLAUSE_LOCATION (c
),
7638 "%<linear%> clause for variable other than "
7639 "loop iterator specified on construct "
7640 "combined with %<distribute%>");
7645 /* For combined #pragma omp parallel for simd, need to put
7646 lastprivate and perhaps firstprivate too on the
7647 parallel. Similarly for #pragma omp for simd. */
7648 struct gimplify_omp_ctx
*octx
= outer_ctx
;
7652 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
7653 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
7655 decl
= OMP_CLAUSE_DECL (c
);
7656 if (error_operand_p (decl
))
7662 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
7663 flags
|= GOVD_FIRSTPRIVATE
;
7664 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
7665 flags
|= GOVD_LASTPRIVATE
;
7667 && octx
->region_type
== ORT_WORKSHARE
7668 && octx
->combined_loop
)
7670 if (octx
->outer_context
7671 && (octx
->outer_context
->region_type
7672 == ORT_COMBINED_PARALLEL
))
7673 octx
= octx
->outer_context
;
7674 else if (omp_check_private (octx
, decl
, false))
7678 && (octx
->region_type
& ORT_TASK
) != 0
7679 && octx
->combined_loop
)
7682 && octx
->region_type
== ORT_COMBINED_PARALLEL
7683 && ctx
->region_type
== ORT_WORKSHARE
7684 && octx
== outer_ctx
)
7685 flags
= GOVD_SEEN
| GOVD_SHARED
;
7687 && octx
->region_type
== ORT_COMBINED_TEAMS
)
7688 flags
= GOVD_SEEN
| GOVD_SHARED
;
7690 && octx
->region_type
== ORT_COMBINED_TARGET
)
7692 flags
&= ~GOVD_LASTPRIVATE
;
7693 if (flags
== GOVD_SEEN
)
7699 = splay_tree_lookup (octx
->variables
,
7700 (splay_tree_key
) decl
);
7701 if (on
&& (on
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7706 omp_add_variable (octx
, decl
, flags
);
7707 if (octx
->outer_context
== NULL
)
7709 octx
= octx
->outer_context
;
7714 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
7715 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
7716 omp_notice_variable (octx
, decl
, true);
7718 flags
= GOVD_LINEAR
| GOVD_EXPLICIT
;
7719 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
7720 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
7722 notice_outer
= false;
7723 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
7727 case OMP_CLAUSE_MAP
:
7728 decl
= OMP_CLAUSE_DECL (c
);
7729 if (error_operand_p (decl
))
7736 if (TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
)
7739 case OMP_TARGET_DATA
:
7740 case OMP_TARGET_ENTER_DATA
:
7741 case OMP_TARGET_EXIT_DATA
:
7742 case OACC_ENTER_DATA
:
7743 case OACC_EXIT_DATA
:
7744 case OACC_HOST_DATA
:
7745 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
7746 || (OMP_CLAUSE_MAP_KIND (c
)
7747 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
7748 /* For target {,enter ,exit }data only the array slice is
7749 mapped, but not the pointer to it. */
7757 if (DECL_P (decl
) && outer_ctx
&& (region_type
& ORT_ACC
))
7759 struct gimplify_omp_ctx
*octx
;
7760 for (octx
= outer_ctx
; octx
; octx
= octx
->outer_context
)
7762 if (octx
->region_type
!= ORT_ACC_HOST_DATA
)
7765 = splay_tree_lookup (octx
->variables
,
7766 (splay_tree_key
) decl
);
7768 error_at (OMP_CLAUSE_LOCATION (c
), "variable %qE "
7769 "declared in enclosing %<host_data%> region",
7773 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
7774 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
7775 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
7776 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
7777 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
7782 else if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
7783 || (OMP_CLAUSE_MAP_KIND (c
)
7784 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
7785 && TREE_CODE (OMP_CLAUSE_SIZE (c
)) != INTEGER_CST
)
7788 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c
), pre_p
, NULL
,
7790 omp_add_variable (ctx
, OMP_CLAUSE_SIZE (c
),
7791 GOVD_FIRSTPRIVATE
| GOVD_SEEN
);
7796 if (TREE_CODE (d
) == ARRAY_REF
)
7798 while (TREE_CODE (d
) == ARRAY_REF
)
7799 d
= TREE_OPERAND (d
, 0);
7800 if (TREE_CODE (d
) == COMPONENT_REF
7801 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
7804 pd
= &OMP_CLAUSE_DECL (c
);
7806 && TREE_CODE (decl
) == INDIRECT_REF
7807 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
7808 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
7811 pd
= &TREE_OPERAND (decl
, 0);
7812 decl
= TREE_OPERAND (decl
, 0);
7814 if (TREE_CODE (decl
) == COMPONENT_REF
)
7816 while (TREE_CODE (decl
) == COMPONENT_REF
)
7817 decl
= TREE_OPERAND (decl
, 0);
7818 if (TREE_CODE (decl
) == INDIRECT_REF
7819 && DECL_P (TREE_OPERAND (decl
, 0))
7820 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
7822 decl
= TREE_OPERAND (decl
, 0);
7824 if (gimplify_expr (pd
, pre_p
, NULL
, is_gimple_lvalue
, fb_lvalue
)
7832 if (error_operand_p (decl
))
7838 tree stype
= TREE_TYPE (decl
);
7839 if (TREE_CODE (stype
) == REFERENCE_TYPE
)
7840 stype
= TREE_TYPE (stype
);
7841 if (TYPE_SIZE_UNIT (stype
) == NULL
7842 || TREE_CODE (TYPE_SIZE_UNIT (stype
)) != INTEGER_CST
)
7844 error_at (OMP_CLAUSE_LOCATION (c
),
7845 "mapping field %qE of variable length "
7846 "structure", OMP_CLAUSE_DECL (c
));
7851 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
7853 /* Error recovery. */
7854 if (prev_list_p
== NULL
)
7859 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
7861 tree ch
= OMP_CLAUSE_CHAIN (*prev_list_p
);
7862 if (ch
== NULL_TREE
|| OMP_CLAUSE_CHAIN (ch
) != c
)
7871 HOST_WIDE_INT bitsize
, bitpos
;
7873 int unsignedp
, reversep
, volatilep
= 0;
7874 tree base
= OMP_CLAUSE_DECL (c
);
7875 while (TREE_CODE (base
) == ARRAY_REF
)
7876 base
= TREE_OPERAND (base
, 0);
7877 if (TREE_CODE (base
) == INDIRECT_REF
)
7878 base
= TREE_OPERAND (base
, 0);
7879 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7880 &mode
, &unsignedp
, &reversep
,
7882 tree orig_base
= base
;
7883 if ((TREE_CODE (base
) == INDIRECT_REF
7884 || (TREE_CODE (base
) == MEM_REF
7885 && integer_zerop (TREE_OPERAND (base
, 1))))
7886 && DECL_P (TREE_OPERAND (base
, 0))
7887 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0)))
7889 base
= TREE_OPERAND (base
, 0);
7890 gcc_assert (base
== decl
7891 && (offset
== NULL_TREE
7892 || TREE_CODE (offset
) == INTEGER_CST
));
7895 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7896 bool ptr
= (OMP_CLAUSE_MAP_KIND (c
)
7897 == GOMP_MAP_ALWAYS_POINTER
);
7898 if (n
== NULL
|| (n
->value
& GOVD_MAP
) == 0)
7900 tree l
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
7902 OMP_CLAUSE_SET_MAP_KIND (l
, GOMP_MAP_STRUCT
);
7903 if (orig_base
!= base
)
7904 OMP_CLAUSE_DECL (l
) = unshare_expr (orig_base
);
7906 OMP_CLAUSE_DECL (l
) = decl
;
7907 OMP_CLAUSE_SIZE (l
) = size_int (1);
7908 if (struct_map_to_clause
== NULL
)
7909 struct_map_to_clause
= new hash_map
<tree
, tree
>;
7910 struct_map_to_clause
->put (decl
, l
);
7913 enum gomp_map_kind mkind
7914 = code
== OMP_TARGET_EXIT_DATA
7915 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
7916 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
7918 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
7919 OMP_CLAUSE_DECL (c2
)
7920 = unshare_expr (OMP_CLAUSE_DECL (c
));
7921 OMP_CLAUSE_CHAIN (c2
) = *prev_list_p
;
7922 OMP_CLAUSE_SIZE (c2
)
7923 = TYPE_SIZE_UNIT (ptr_type_node
);
7924 OMP_CLAUSE_CHAIN (l
) = c2
;
7925 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
7927 tree c4
= OMP_CLAUSE_CHAIN (*prev_list_p
);
7929 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
7931 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
7932 OMP_CLAUSE_DECL (c3
)
7933 = unshare_expr (OMP_CLAUSE_DECL (c4
));
7934 OMP_CLAUSE_SIZE (c3
)
7935 = TYPE_SIZE_UNIT (ptr_type_node
);
7936 OMP_CLAUSE_CHAIN (c3
) = *prev_list_p
;
7937 OMP_CLAUSE_CHAIN (c2
) = c3
;
7944 OMP_CLAUSE_CHAIN (l
) = c
;
7946 list_p
= &OMP_CLAUSE_CHAIN (l
);
7948 if (orig_base
!= base
&& code
== OMP_TARGET
)
7950 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
7952 enum gomp_map_kind mkind
7953 = GOMP_MAP_FIRSTPRIVATE_REFERENCE
;
7954 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
7955 OMP_CLAUSE_DECL (c2
) = decl
;
7956 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
7957 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (l
);
7958 OMP_CLAUSE_CHAIN (l
) = c2
;
7960 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
7961 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) || ptr
)
7967 tree
*osc
= struct_map_to_clause
->get (decl
);
7968 tree
*sc
= NULL
, *scp
= NULL
;
7969 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) || ptr
)
7970 n
->value
|= GOVD_SEEN
;
7973 o1
= wi::to_offset (offset
);
7977 o1
= o1
+ bitpos
/ BITS_PER_UNIT
;
7978 sc
= &OMP_CLAUSE_CHAIN (*osc
);
7980 && (OMP_CLAUSE_MAP_KIND (*sc
)
7981 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
7982 sc
= &OMP_CLAUSE_CHAIN (*sc
);
7983 for (; *sc
!= c
; sc
= &OMP_CLAUSE_CHAIN (*sc
))
7984 if (ptr
&& sc
== prev_list_p
)
7986 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
7988 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
7990 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
7996 HOST_WIDE_INT bitsize2
, bitpos2
;
7997 base
= OMP_CLAUSE_DECL (*sc
);
7998 if (TREE_CODE (base
) == ARRAY_REF
)
8000 while (TREE_CODE (base
) == ARRAY_REF
)
8001 base
= TREE_OPERAND (base
, 0);
8002 if (TREE_CODE (base
) != COMPONENT_REF
8003 || (TREE_CODE (TREE_TYPE (base
))
8007 else if (TREE_CODE (base
) == INDIRECT_REF
8008 && (TREE_CODE (TREE_OPERAND (base
, 0))
8010 && (TREE_CODE (TREE_TYPE
8011 (TREE_OPERAND (base
, 0)))
8013 base
= TREE_OPERAND (base
, 0);
8014 base
= get_inner_reference (base
, &bitsize2
,
8017 &reversep
, &volatilep
);
8018 if ((TREE_CODE (base
) == INDIRECT_REF
8019 || (TREE_CODE (base
) == MEM_REF
8020 && integer_zerop (TREE_OPERAND (base
,
8022 && DECL_P (TREE_OPERAND (base
, 0))
8023 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base
,
8026 base
= TREE_OPERAND (base
, 0);
8031 gcc_assert (offset
== NULL_TREE
8032 || TREE_CODE (offset
) == INTEGER_CST
);
8033 tree d1
= OMP_CLAUSE_DECL (*sc
);
8034 tree d2
= OMP_CLAUSE_DECL (c
);
8035 while (TREE_CODE (d1
) == ARRAY_REF
)
8036 d1
= TREE_OPERAND (d1
, 0);
8037 while (TREE_CODE (d2
) == ARRAY_REF
)
8038 d2
= TREE_OPERAND (d2
, 0);
8039 if (TREE_CODE (d1
) == INDIRECT_REF
)
8040 d1
= TREE_OPERAND (d1
, 0);
8041 if (TREE_CODE (d2
) == INDIRECT_REF
)
8042 d2
= TREE_OPERAND (d2
, 0);
8043 while (TREE_CODE (d1
) == COMPONENT_REF
)
8044 if (TREE_CODE (d2
) == COMPONENT_REF
8045 && TREE_OPERAND (d1
, 1)
8046 == TREE_OPERAND (d2
, 1))
8048 d1
= TREE_OPERAND (d1
, 0);
8049 d2
= TREE_OPERAND (d2
, 0);
8055 error_at (OMP_CLAUSE_LOCATION (c
),
8056 "%qE appears more than once in map "
8057 "clauses", OMP_CLAUSE_DECL (c
));
8062 o2
= wi::to_offset (offset2
);
8066 o2
= o2
+ bitpos2
/ BITS_PER_UNIT
;
8067 if (wi::ltu_p (o1
, o2
)
8068 || (wi::eq_p (o1
, o2
) && bitpos
< bitpos2
))
8078 OMP_CLAUSE_SIZE (*osc
)
8079 = size_binop (PLUS_EXPR
, OMP_CLAUSE_SIZE (*osc
),
8083 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
8085 tree cl
= NULL_TREE
;
8086 enum gomp_map_kind mkind
8087 = code
== OMP_TARGET_EXIT_DATA
8088 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
8089 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
8090 OMP_CLAUSE_DECL (c2
)
8091 = unshare_expr (OMP_CLAUSE_DECL (c
));
8092 OMP_CLAUSE_CHAIN (c2
) = scp
? *scp
: *prev_list_p
;
8093 OMP_CLAUSE_SIZE (c2
)
8094 = TYPE_SIZE_UNIT (ptr_type_node
);
8095 cl
= scp
? *prev_list_p
: c2
;
8096 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
8098 tree c4
= OMP_CLAUSE_CHAIN (*prev_list_p
);
8100 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
8102 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
8103 OMP_CLAUSE_DECL (c3
)
8104 = unshare_expr (OMP_CLAUSE_DECL (c4
));
8105 OMP_CLAUSE_SIZE (c3
)
8106 = TYPE_SIZE_UNIT (ptr_type_node
);
8107 OMP_CLAUSE_CHAIN (c3
) = *prev_list_p
;
8109 OMP_CLAUSE_CHAIN (c2
) = c3
;
8115 if (sc
== prev_list_p
)
8122 *prev_list_p
= OMP_CLAUSE_CHAIN (c
);
8123 list_p
= prev_list_p
;
8125 OMP_CLAUSE_CHAIN (c
) = *sc
;
8132 *list_p
= OMP_CLAUSE_CHAIN (c
);
8133 OMP_CLAUSE_CHAIN (c
) = *sc
;
8140 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_POINTER
8141 && OMP_CLAUSE_CHAIN (c
)
8142 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
)) == OMP_CLAUSE_MAP
8143 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
8144 == GOMP_MAP_ALWAYS_POINTER
))
8145 prev_list_p
= list_p
;
8148 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
8149 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TO
8150 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TOFROM
)
8151 flags
|= GOVD_MAP_ALWAYS_TO
;
8154 case OMP_CLAUSE_DEPEND
:
8155 if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
8157 tree deps
= OMP_CLAUSE_DECL (c
);
8158 while (deps
&& TREE_CODE (deps
) == TREE_LIST
)
8160 if (TREE_CODE (TREE_PURPOSE (deps
)) == TRUNC_DIV_EXPR
8161 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps
), 1)))
8162 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps
), 1),
8163 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
8164 deps
= TREE_CHAIN (deps
);
8168 else if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
8170 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8172 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8173 NULL
, is_gimple_val
, fb_rvalue
);
8174 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8176 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8181 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
8182 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8183 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8191 case OMP_CLAUSE_FROM
:
8192 case OMP_CLAUSE__CACHE_
:
8193 decl
= OMP_CLAUSE_DECL (c
);
8194 if (error_operand_p (decl
))
8199 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
8200 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
8201 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
8202 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
8203 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8210 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
,
8211 NULL
, is_gimple_lvalue
, fb_lvalue
)
8221 case OMP_CLAUSE_USE_DEVICE_PTR
:
8222 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
8224 case OMP_CLAUSE_IS_DEVICE_PTR
:
8225 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
8229 decl
= OMP_CLAUSE_DECL (c
);
8231 if (error_operand_p (decl
))
8236 if (DECL_NAME (decl
) == NULL_TREE
&& (flags
& GOVD_SHARED
) == 0)
8238 tree t
= omp_member_access_dummy_var (decl
);
8241 tree v
= DECL_VALUE_EXPR (decl
);
8242 DECL_NAME (decl
) = DECL_NAME (TREE_OPERAND (v
, 1));
8244 omp_notice_variable (outer_ctx
, t
, true);
8247 if (code
== OACC_DATA
8248 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
8249 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
8250 flags
|= GOVD_MAP_0LEN_ARRAY
;
8251 omp_add_variable (ctx
, decl
, flags
);
8252 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
8253 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8255 omp_add_variable (ctx
, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
8256 GOVD_LOCAL
| GOVD_SEEN
);
8257 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
8258 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c
),
8260 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
8262 omp_add_variable (ctx
,
8263 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
8264 GOVD_LOCAL
| GOVD_SEEN
);
8265 gimplify_omp_ctxp
= ctx
;
8266 push_gimplify_context ();
8268 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
8269 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8271 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c
),
8272 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
));
8273 pop_gimplify_context
8274 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
)));
8275 push_gimplify_context ();
8276 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c
),
8277 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8278 pop_gimplify_context
8279 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
)));
8280 OMP_CLAUSE_REDUCTION_INIT (c
) = NULL_TREE
;
8281 OMP_CLAUSE_REDUCTION_MERGE (c
) = NULL_TREE
;
8283 gimplify_omp_ctxp
= outer_ctx
;
8285 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
8286 && OMP_CLAUSE_LASTPRIVATE_STMT (c
))
8288 gimplify_omp_ctxp
= ctx
;
8289 push_gimplify_context ();
8290 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c
)) != BIND_EXPR
)
8292 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
8294 TREE_SIDE_EFFECTS (bind
) = 1;
8295 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LASTPRIVATE_STMT (c
);
8296 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = bind
;
8298 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c
),
8299 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
8300 pop_gimplify_context
8301 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
)));
8302 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = NULL_TREE
;
8304 gimplify_omp_ctxp
= outer_ctx
;
8306 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
8307 && OMP_CLAUSE_LINEAR_STMT (c
))
8309 gimplify_omp_ctxp
= ctx
;
8310 push_gimplify_context ();
8311 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c
)) != BIND_EXPR
)
8313 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
8315 TREE_SIDE_EFFECTS (bind
) = 1;
8316 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LINEAR_STMT (c
);
8317 OMP_CLAUSE_LINEAR_STMT (c
) = bind
;
8319 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c
),
8320 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
8321 pop_gimplify_context
8322 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
)));
8323 OMP_CLAUSE_LINEAR_STMT (c
) = NULL_TREE
;
8325 gimplify_omp_ctxp
= outer_ctx
;
8331 case OMP_CLAUSE_COPYIN
:
8332 case OMP_CLAUSE_COPYPRIVATE
:
8333 decl
= OMP_CLAUSE_DECL (c
);
8334 if (error_operand_p (decl
))
8339 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_COPYPRIVATE
8341 && !omp_check_private (ctx
, decl
, true))
8344 if (is_global_var (decl
))
8346 if (DECL_THREAD_LOCAL_P (decl
))
8348 else if (DECL_HAS_VALUE_EXPR_P (decl
))
8350 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
8354 && DECL_THREAD_LOCAL_P (value
))
8359 error_at (OMP_CLAUSE_LOCATION (c
),
8360 "copyprivate variable %qE is not threadprivate"
8361 " or private in outer context", DECL_NAME (decl
));
8365 omp_notice_variable (outer_ctx
, decl
, true);
8366 if (check_non_private
8367 && region_type
== ORT_WORKSHARE
8368 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
8369 || decl
== OMP_CLAUSE_DECL (c
)
8370 || (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
8371 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
8373 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
8374 == POINTER_PLUS_EXPR
8375 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8376 (OMP_CLAUSE_DECL (c
), 0), 0))
8378 && omp_check_private (ctx
, decl
, false))
8380 error ("%s variable %qE is private in outer context",
8381 check_non_private
, DECL_NAME (decl
));
8387 if (OMP_CLAUSE_IF_MODIFIER (c
) != ERROR_MARK
8388 && OMP_CLAUSE_IF_MODIFIER (c
) != code
)
8391 for (int i
= 0; i
< 2; i
++)
8392 switch (i
? OMP_CLAUSE_IF_MODIFIER (c
) : code
)
8394 case OMP_PARALLEL
: p
[i
] = "parallel"; break;
8395 case OMP_TASK
: p
[i
] = "task"; break;
8396 case OMP_TASKLOOP
: p
[i
] = "taskloop"; break;
8397 case OMP_TARGET_DATA
: p
[i
] = "target data"; break;
8398 case OMP_TARGET
: p
[i
] = "target"; break;
8399 case OMP_TARGET_UPDATE
: p
[i
] = "target update"; break;
8400 case OMP_TARGET_ENTER_DATA
:
8401 p
[i
] = "target enter data"; break;
8402 case OMP_TARGET_EXIT_DATA
: p
[i
] = "target exit data"; break;
8403 default: gcc_unreachable ();
8405 error_at (OMP_CLAUSE_LOCATION (c
),
8406 "expected %qs %<if%> clause modifier rather than %qs",
8412 case OMP_CLAUSE_FINAL
:
8413 OMP_CLAUSE_OPERAND (c
, 0)
8414 = gimple_boolify (OMP_CLAUSE_OPERAND (c
, 0));
8417 case OMP_CLAUSE_SCHEDULE
:
8418 case OMP_CLAUSE_NUM_THREADS
:
8419 case OMP_CLAUSE_NUM_TEAMS
:
8420 case OMP_CLAUSE_THREAD_LIMIT
:
8421 case OMP_CLAUSE_DIST_SCHEDULE
:
8422 case OMP_CLAUSE_DEVICE
:
8423 case OMP_CLAUSE_PRIORITY
:
8424 case OMP_CLAUSE_GRAINSIZE
:
8425 case OMP_CLAUSE_NUM_TASKS
:
8426 case OMP_CLAUSE_HINT
:
8427 case OMP_CLAUSE__CILK_FOR_COUNT_
:
8428 case OMP_CLAUSE_ASYNC
:
8429 case OMP_CLAUSE_WAIT
:
8430 case OMP_CLAUSE_NUM_GANGS
:
8431 case OMP_CLAUSE_NUM_WORKERS
:
8432 case OMP_CLAUSE_VECTOR_LENGTH
:
8433 case OMP_CLAUSE_WORKER
:
8434 case OMP_CLAUSE_VECTOR
:
8435 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
8436 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8440 case OMP_CLAUSE_GANG
:
8441 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
8442 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8444 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 1), pre_p
, NULL
,
8445 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8449 case OMP_CLAUSE_NOWAIT
:
8450 case OMP_CLAUSE_ORDERED
:
8451 case OMP_CLAUSE_UNTIED
:
8452 case OMP_CLAUSE_COLLAPSE
:
8453 case OMP_CLAUSE_TILE
:
8454 case OMP_CLAUSE_AUTO
:
8455 case OMP_CLAUSE_SEQ
:
8456 case OMP_CLAUSE_INDEPENDENT
:
8457 case OMP_CLAUSE_MERGEABLE
:
8458 case OMP_CLAUSE_PROC_BIND
:
8459 case OMP_CLAUSE_SAFELEN
:
8460 case OMP_CLAUSE_SIMDLEN
:
8461 case OMP_CLAUSE_NOGROUP
:
8462 case OMP_CLAUSE_THREADS
:
8463 case OMP_CLAUSE_SIMD
:
8466 case OMP_CLAUSE_DEFAULTMAP
:
8467 ctx
->target_map_scalars_firstprivate
= false;
8470 case OMP_CLAUSE_ALIGNED
:
8471 decl
= OMP_CLAUSE_DECL (c
);
8472 if (error_operand_p (decl
))
8477 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c
), pre_p
, NULL
,
8478 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8483 if (!is_global_var (decl
)
8484 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
8485 omp_add_variable (ctx
, decl
, GOVD_ALIGNED
);
8488 case OMP_CLAUSE_DEFAULT
:
8489 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
8496 if (code
== OACC_DATA
8497 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
8498 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
8501 *list_p
= OMP_CLAUSE_CHAIN (c
);
8503 list_p
= &OMP_CLAUSE_CHAIN (c
);
8506 gimplify_omp_ctxp
= ctx
;
8507 if (struct_map_to_clause
)
8508 delete struct_map_to_clause
;
8511 /* Return true if DECL is a candidate for shared to firstprivate
8512 optimization. We only consider non-addressable scalars, not
8513 too big, and not references. */
8516 omp_shared_to_firstprivate_optimizable_decl_p (tree decl
)
8518 if (TREE_ADDRESSABLE (decl
))
8520 tree type
= TREE_TYPE (decl
);
8521 if (!is_gimple_reg_type (type
)
8522 || TREE_CODE (type
) == REFERENCE_TYPE
8523 || TREE_ADDRESSABLE (type
))
8525 /* Don't optimize too large decls, as each thread/task will have
8527 HOST_WIDE_INT len
= int_size_in_bytes (type
);
8528 if (len
== -1 || len
> 4 * POINTER_SIZE
/ BITS_PER_UNIT
)
8530 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
8535 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8536 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8537 GOVD_WRITTEN in outer contexts. */
8540 omp_mark_stores (struct gimplify_omp_ctx
*ctx
, tree decl
)
8542 for (; ctx
; ctx
= ctx
->outer_context
)
8544 splay_tree_node n
= splay_tree_lookup (ctx
->variables
,
8545 (splay_tree_key
) decl
);
8548 else if (n
->value
& GOVD_SHARED
)
8550 n
->value
|= GOVD_WRITTEN
;
8553 else if (n
->value
& GOVD_DATA_SHARE_CLASS
)
8558 /* Helper callback for walk_gimple_seq to discover possible stores
8559 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8560 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8564 omp_find_stores_op (tree
*tp
, int *walk_subtrees
, void *data
)
8566 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
8575 if (handled_component_p (op
))
8576 op
= TREE_OPERAND (op
, 0);
8577 else if ((TREE_CODE (op
) == MEM_REF
|| TREE_CODE (op
) == TARGET_MEM_REF
)
8578 && TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
8579 op
= TREE_OPERAND (TREE_OPERAND (op
, 0), 0);
8584 if (!DECL_P (op
) || !omp_shared_to_firstprivate_optimizable_decl_p (op
))
8587 omp_mark_stores (gimplify_omp_ctxp
, op
);
8591 /* Helper callback for walk_gimple_seq to discover possible stores
8592 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8593 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8597 omp_find_stores_stmt (gimple_stmt_iterator
*gsi_p
,
8598 bool *handled_ops_p
,
8599 struct walk_stmt_info
*wi
)
8601 gimple
*stmt
= gsi_stmt (*gsi_p
);
8602 switch (gimple_code (stmt
))
8604 /* Don't recurse on OpenMP constructs for which
8605 gimplify_adjust_omp_clauses already handled the bodies,
8606 except handle gimple_omp_for_pre_body. */
8607 case GIMPLE_OMP_FOR
:
8608 *handled_ops_p
= true;
8609 if (gimple_omp_for_pre_body (stmt
))
8610 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
8611 omp_find_stores_stmt
, omp_find_stores_op
, wi
);
8613 case GIMPLE_OMP_PARALLEL
:
8614 case GIMPLE_OMP_TASK
:
8615 case GIMPLE_OMP_SECTIONS
:
8616 case GIMPLE_OMP_SINGLE
:
8617 case GIMPLE_OMP_TARGET
:
8618 case GIMPLE_OMP_TEAMS
:
8619 case GIMPLE_OMP_CRITICAL
:
8620 *handled_ops_p
= true;
8628 struct gimplify_adjust_omp_clauses_data
8634 /* For all variables that were not actually used within the context,
8635 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8638 gimplify_adjust_omp_clauses_1 (splay_tree_node n
, void *data
)
8640 tree
*list_p
= ((struct gimplify_adjust_omp_clauses_data
*) data
)->list_p
;
8642 = ((struct gimplify_adjust_omp_clauses_data
*) data
)->pre_p
;
8643 tree decl
= (tree
) n
->key
;
8644 unsigned flags
= n
->value
;
8645 enum omp_clause_code code
;
8649 if (flags
& (GOVD_EXPLICIT
| GOVD_LOCAL
))
8651 if ((flags
& GOVD_SEEN
) == 0)
8653 if (flags
& GOVD_DEBUG_PRIVATE
)
8655 gcc_assert ((flags
& GOVD_DATA_SHARE_CLASS
) == GOVD_SHARED
);
8656 private_debug
= true;
8658 else if (flags
& GOVD_MAP
)
8659 private_debug
= false;
8662 = lang_hooks
.decls
.omp_private_debug_clause (decl
,
8663 !!(flags
& GOVD_SHARED
));
8665 code
= OMP_CLAUSE_PRIVATE
;
8666 else if (flags
& GOVD_MAP
)
8668 code
= OMP_CLAUSE_MAP
;
8669 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
8670 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
8672 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl
);
8676 else if (flags
& GOVD_SHARED
)
8678 if (is_global_var (decl
))
8680 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
8684 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8685 if (on
&& (on
->value
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
8686 | GOVD_PRIVATE
| GOVD_REDUCTION
8687 | GOVD_LINEAR
| GOVD_MAP
)) != 0)
8689 ctx
= ctx
->outer_context
;
8694 code
= OMP_CLAUSE_SHARED
;
8696 else if (flags
& GOVD_PRIVATE
)
8697 code
= OMP_CLAUSE_PRIVATE
;
8698 else if (flags
& GOVD_FIRSTPRIVATE
)
8700 code
= OMP_CLAUSE_FIRSTPRIVATE
;
8701 if ((gimplify_omp_ctxp
->region_type
& ORT_TARGET
)
8702 && (gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
8703 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
8705 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8706 "%<target%> construct", decl
);
8710 else if (flags
& GOVD_LASTPRIVATE
)
8711 code
= OMP_CLAUSE_LASTPRIVATE
;
8712 else if (flags
& GOVD_ALIGNED
)
8717 if (((flags
& GOVD_LASTPRIVATE
)
8718 || (code
== OMP_CLAUSE_SHARED
&& (flags
& GOVD_WRITTEN
)))
8719 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
8720 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
8722 tree chain
= *list_p
;
8723 clause
= build_omp_clause (input_location
, code
);
8724 OMP_CLAUSE_DECL (clause
) = decl
;
8725 OMP_CLAUSE_CHAIN (clause
) = chain
;
8727 OMP_CLAUSE_PRIVATE_DEBUG (clause
) = 1;
8728 else if (code
== OMP_CLAUSE_PRIVATE
&& (flags
& GOVD_PRIVATE_OUTER_REF
))
8729 OMP_CLAUSE_PRIVATE_OUTER_REF (clause
) = 1;
8730 else if (code
== OMP_CLAUSE_SHARED
8731 && (flags
& GOVD_WRITTEN
) == 0
8732 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
8733 OMP_CLAUSE_SHARED_READONLY (clause
) = 1;
8734 else if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_EXPLICIT
) == 0)
8735 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause
) = 1;
8736 else if (code
== OMP_CLAUSE_MAP
&& (flags
& GOVD_MAP_0LEN_ARRAY
) != 0)
8738 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
8739 OMP_CLAUSE_DECL (nc
) = decl
;
8740 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
8741 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
8742 OMP_CLAUSE_DECL (clause
)
8743 = build_simple_mem_ref_loc (input_location
, decl
);
8744 OMP_CLAUSE_DECL (clause
)
8745 = build2 (MEM_REF
, char_type_node
, OMP_CLAUSE_DECL (clause
),
8746 build_int_cst (build_pointer_type (char_type_node
), 0));
8747 OMP_CLAUSE_SIZE (clause
) = size_zero_node
;
8748 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
8749 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_ALLOC
);
8750 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause
) = 1;
8751 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
8752 OMP_CLAUSE_CHAIN (nc
) = chain
;
8753 OMP_CLAUSE_CHAIN (clause
) = nc
;
8754 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8755 gimplify_omp_ctxp
= ctx
->outer_context
;
8756 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0),
8757 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
8758 gimplify_omp_ctxp
= ctx
;
8760 else if (code
== OMP_CLAUSE_MAP
)
8763 /* Not all combinations of these GOVD_MAP flags are actually valid. */
8764 switch (flags
& (GOVD_MAP_TO_ONLY
8766 | GOVD_MAP_FORCE_PRESENT
))
8769 kind
= GOMP_MAP_TOFROM
;
8771 case GOVD_MAP_FORCE
:
8772 kind
= GOMP_MAP_TOFROM
| GOMP_MAP_FLAG_FORCE
;
8774 case GOVD_MAP_TO_ONLY
:
8777 case GOVD_MAP_TO_ONLY
| GOVD_MAP_FORCE
:
8778 kind
= GOMP_MAP_TO
| GOMP_MAP_FLAG_FORCE
;
8780 case GOVD_MAP_FORCE_PRESENT
:
8781 kind
= GOMP_MAP_FORCE_PRESENT
;
8786 OMP_CLAUSE_SET_MAP_KIND (clause
, kind
);
8787 if (DECL_SIZE (decl
)
8788 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
8790 tree decl2
= DECL_VALUE_EXPR (decl
);
8791 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
8792 decl2
= TREE_OPERAND (decl2
, 0);
8793 gcc_assert (DECL_P (decl2
));
8794 tree mem
= build_simple_mem_ref (decl2
);
8795 OMP_CLAUSE_DECL (clause
) = mem
;
8796 OMP_CLAUSE_SIZE (clause
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
8797 if (gimplify_omp_ctxp
->outer_context
)
8799 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
8800 omp_notice_variable (ctx
, decl2
, true);
8801 omp_notice_variable (ctx
, OMP_CLAUSE_SIZE (clause
), true);
8803 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
8805 OMP_CLAUSE_DECL (nc
) = decl
;
8806 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
8807 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
)
8808 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
8810 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
8811 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
8812 OMP_CLAUSE_CHAIN (clause
) = nc
;
8814 else if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
8815 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
8817 OMP_CLAUSE_DECL (clause
) = build_simple_mem_ref (decl
);
8818 OMP_CLAUSE_SIZE (clause
)
8819 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))));
8820 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8821 gimplify_omp_ctxp
= ctx
->outer_context
;
8822 gimplify_expr (&OMP_CLAUSE_SIZE (clause
),
8823 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
8824 gimplify_omp_ctxp
= ctx
;
8825 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
8827 OMP_CLAUSE_DECL (nc
) = decl
;
8828 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
8829 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_REFERENCE
);
8830 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
8831 OMP_CLAUSE_CHAIN (clause
) = nc
;
8834 OMP_CLAUSE_SIZE (clause
) = DECL_SIZE_UNIT (decl
);
8836 if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_LASTPRIVATE
) != 0)
8838 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
8839 OMP_CLAUSE_DECL (nc
) = decl
;
8840 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc
) = 1;
8841 OMP_CLAUSE_CHAIN (nc
) = chain
;
8842 OMP_CLAUSE_CHAIN (clause
) = nc
;
8843 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8844 gimplify_omp_ctxp
= ctx
->outer_context
;
8845 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
);
8846 gimplify_omp_ctxp
= ctx
;
8849 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8850 gimplify_omp_ctxp
= ctx
->outer_context
;
8851 lang_hooks
.decls
.omp_finish_clause (clause
, pre_p
);
8852 if (gimplify_omp_ctxp
)
8853 for (; clause
!= chain
; clause
= OMP_CLAUSE_CHAIN (clause
))
8854 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
8855 && DECL_P (OMP_CLAUSE_SIZE (clause
)))
8856 omp_notice_variable (gimplify_omp_ctxp
, OMP_CLAUSE_SIZE (clause
),
8858 gimplify_omp_ctxp
= ctx
;
8863 gimplify_adjust_omp_clauses (gimple_seq
*pre_p
, gimple_seq body
, tree
*list_p
,
8864 enum tree_code code
)
8866 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8871 struct gimplify_omp_ctx
*octx
;
8872 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
8873 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TASK
| ORT_TEAMS
)) != 0)
8877 struct walk_stmt_info wi
;
8878 memset (&wi
, 0, sizeof (wi
));
8879 walk_gimple_seq (body
, omp_find_stores_stmt
,
8880 omp_find_stores_op
, &wi
);
8883 while ((c
= *list_p
) != NULL
)
8886 bool remove
= false;
8888 switch (OMP_CLAUSE_CODE (c
))
8890 case OMP_CLAUSE_FIRSTPRIVATE
:
8891 if ((ctx
->region_type
& ORT_TARGET
)
8892 && (ctx
->region_type
& ORT_ACC
) == 0
8893 && TYPE_ATOMIC (strip_array_types
8894 (TREE_TYPE (OMP_CLAUSE_DECL (c
)))))
8896 error_at (OMP_CLAUSE_LOCATION (c
),
8897 "%<_Atomic%> %qD in %<firstprivate%> clause on "
8898 "%<target%> construct", OMP_CLAUSE_DECL (c
));
8903 case OMP_CLAUSE_PRIVATE
:
8904 case OMP_CLAUSE_SHARED
:
8905 case OMP_CLAUSE_LINEAR
:
8906 decl
= OMP_CLAUSE_DECL (c
);
8907 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8908 remove
= !(n
->value
& GOVD_SEEN
);
8911 bool shared
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
;
8912 if ((n
->value
& GOVD_DEBUG_PRIVATE
)
8913 || lang_hooks
.decls
.omp_private_debug_clause (decl
, shared
))
8915 gcc_assert ((n
->value
& GOVD_DEBUG_PRIVATE
) == 0
8916 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
8918 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_PRIVATE
);
8919 OMP_CLAUSE_PRIVATE_DEBUG (c
) = 1;
8921 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
8922 && (n
->value
& GOVD_WRITTEN
) == 0
8924 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
8925 OMP_CLAUSE_SHARED_READONLY (c
) = 1;
8926 else if (DECL_P (decl
)
8927 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
8928 && (n
->value
& GOVD_WRITTEN
) != 0)
8929 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
8930 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
8931 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
8932 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
8936 case OMP_CLAUSE_LASTPRIVATE
:
8937 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8938 accurately reflect the presence of a FIRSTPRIVATE clause. */
8939 decl
= OMP_CLAUSE_DECL (c
);
8940 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8941 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
8942 = (n
->value
& GOVD_FIRSTPRIVATE
) != 0;
8943 if (code
== OMP_DISTRIBUTE
8944 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
8947 error_at (OMP_CLAUSE_LOCATION (c
),
8948 "same variable used in %<firstprivate%> and "
8949 "%<lastprivate%> clauses on %<distribute%> "
8953 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
8955 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
8956 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
8959 case OMP_CLAUSE_ALIGNED
:
8960 decl
= OMP_CLAUSE_DECL (c
);
8961 if (!is_global_var (decl
))
8963 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8964 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
8965 if (!remove
&& TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
8967 struct gimplify_omp_ctx
*octx
;
8969 && (n
->value
& (GOVD_DATA_SHARE_CLASS
8970 & ~GOVD_FIRSTPRIVATE
)))
8973 for (octx
= ctx
->outer_context
; octx
;
8974 octx
= octx
->outer_context
)
8976 n
= splay_tree_lookup (octx
->variables
,
8977 (splay_tree_key
) decl
);
8980 if (n
->value
& GOVD_LOCAL
)
8982 /* We have to avoid assigning a shared variable
8983 to itself when trying to add
8984 __builtin_assume_aligned. */
8985 if (n
->value
& GOVD_SHARED
)
8993 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
8995 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8996 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
9001 case OMP_CLAUSE_MAP
:
9002 if (code
== OMP_TARGET_EXIT_DATA
9003 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
9008 decl
= OMP_CLAUSE_DECL (c
);
9009 /* Data clauses associated with acc parallel reductions must be
9010 compatible with present_or_copy. Warn and adjust the clause
9011 if that is not the case. */
9012 if (ctx
->region_type
== ORT_ACC_PARALLEL
)
9014 tree t
= DECL_P (decl
) ? decl
: TREE_OPERAND (decl
, 0);
9018 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
9020 if (n
&& (n
->value
& GOVD_REDUCTION
))
9022 enum gomp_map_kind kind
= OMP_CLAUSE_MAP_KIND (c
);
9024 OMP_CLAUSE_MAP_IN_REDUCTION (c
) = 1;
9025 if ((kind
& GOMP_MAP_TOFROM
) != GOMP_MAP_TOFROM
9026 && kind
!= GOMP_MAP_FORCE_PRESENT
9027 && kind
!= GOMP_MAP_POINTER
)
9029 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
9030 "incompatible data clause with reduction "
9031 "on %qE; promoting to present_or_copy",
9033 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
9039 if ((ctx
->region_type
& ORT_TARGET
) != 0
9040 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
9042 if (TREE_CODE (decl
) == INDIRECT_REF
9043 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
9044 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9046 decl
= TREE_OPERAND (decl
, 0);
9047 if (TREE_CODE (decl
) == COMPONENT_REF
)
9049 while (TREE_CODE (decl
) == COMPONENT_REF
)
9050 decl
= TREE_OPERAND (decl
, 0);
9053 n
= splay_tree_lookup (ctx
->variables
,
9054 (splay_tree_key
) decl
);
9055 if (!(n
->value
& GOVD_SEEN
))
9062 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
9063 if ((ctx
->region_type
& ORT_TARGET
) != 0
9064 && !(n
->value
& GOVD_SEEN
)
9065 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) == 0
9066 && (!is_global_var (decl
)
9067 || !lookup_attribute ("omp declare target link",
9068 DECL_ATTRIBUTES (decl
))))
9071 /* For struct element mapping, if struct is never referenced
9072 in target block and none of the mapping has always modifier,
9073 remove all the struct element mappings, which immediately
9074 follow the GOMP_MAP_STRUCT map clause. */
9075 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
9077 HOST_WIDE_INT cnt
= tree_to_shwi (OMP_CLAUSE_SIZE (c
));
9079 OMP_CLAUSE_CHAIN (c
)
9080 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c
));
9083 else if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
9084 && code
== OMP_TARGET_EXIT_DATA
)
9086 else if (DECL_SIZE (decl
)
9087 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
9088 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_POINTER
9089 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
9090 && (OMP_CLAUSE_MAP_KIND (c
)
9091 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
9093 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9094 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9096 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DEVICEPTR
);
9098 tree decl2
= DECL_VALUE_EXPR (decl
);
9099 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
9100 decl2
= TREE_OPERAND (decl2
, 0);
9101 gcc_assert (DECL_P (decl2
));
9102 tree mem
= build_simple_mem_ref (decl2
);
9103 OMP_CLAUSE_DECL (c
) = mem
;
9104 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
9105 if (ctx
->outer_context
)
9107 omp_notice_variable (ctx
->outer_context
, decl2
, true);
9108 omp_notice_variable (ctx
->outer_context
,
9109 OMP_CLAUSE_SIZE (c
), true);
9111 if (((ctx
->region_type
& ORT_TARGET
) != 0
9112 || !ctx
->target_firstprivatize_array_bases
)
9113 && ((n
->value
& GOVD_SEEN
) == 0
9114 || (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
)) == 0))
9116 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9118 OMP_CLAUSE_DECL (nc
) = decl
;
9119 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
9120 if (ctx
->target_firstprivatize_array_bases
)
9121 OMP_CLAUSE_SET_MAP_KIND (nc
,
9122 GOMP_MAP_FIRSTPRIVATE_POINTER
);
9124 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
9125 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (c
);
9126 OMP_CLAUSE_CHAIN (c
) = nc
;
9132 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
9133 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
9134 gcc_assert ((n
->value
& GOVD_SEEN
) == 0
9135 || ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
9141 case OMP_CLAUSE_FROM
:
9142 case OMP_CLAUSE__CACHE_
:
9143 decl
= OMP_CLAUSE_DECL (c
);
9146 if (DECL_SIZE (decl
)
9147 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
9149 tree decl2
= DECL_VALUE_EXPR (decl
);
9150 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
9151 decl2
= TREE_OPERAND (decl2
, 0);
9152 gcc_assert (DECL_P (decl2
));
9153 tree mem
= build_simple_mem_ref (decl2
);
9154 OMP_CLAUSE_DECL (c
) = mem
;
9155 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
9156 if (ctx
->outer_context
)
9158 omp_notice_variable (ctx
->outer_context
, decl2
, true);
9159 omp_notice_variable (ctx
->outer_context
,
9160 OMP_CLAUSE_SIZE (c
), true);
9163 else if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
9164 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
9167 case OMP_CLAUSE_REDUCTION
:
9168 decl
= OMP_CLAUSE_DECL (c
);
9169 /* OpenACC reductions need a present_or_copy data clause.
9170 Add one if necessary. Error is the reduction is private. */
9171 if (ctx
->region_type
== ORT_ACC_PARALLEL
)
9173 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
9174 if (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
9175 error_at (OMP_CLAUSE_LOCATION (c
), "invalid private "
9176 "reduction on %qE", DECL_NAME (decl
));
9177 else if ((n
->value
& GOVD_MAP
) == 0)
9179 tree next
= OMP_CLAUSE_CHAIN (c
);
9180 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_MAP
);
9181 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_TOFROM
);
9182 OMP_CLAUSE_DECL (nc
) = decl
;
9183 OMP_CLAUSE_CHAIN (c
) = nc
;
9184 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
);
9187 OMP_CLAUSE_MAP_IN_REDUCTION (nc
) = 1;
9188 if (OMP_CLAUSE_CHAIN (nc
) == NULL
)
9190 nc
= OMP_CLAUSE_CHAIN (nc
);
9192 OMP_CLAUSE_CHAIN (nc
) = next
;
9193 n
->value
|= GOVD_MAP
;
9197 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
9198 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
9200 case OMP_CLAUSE_COPYIN
:
9201 case OMP_CLAUSE_COPYPRIVATE
:
9203 case OMP_CLAUSE_NUM_THREADS
:
9204 case OMP_CLAUSE_NUM_TEAMS
:
9205 case OMP_CLAUSE_THREAD_LIMIT
:
9206 case OMP_CLAUSE_DIST_SCHEDULE
:
9207 case OMP_CLAUSE_DEVICE
:
9208 case OMP_CLAUSE_SCHEDULE
:
9209 case OMP_CLAUSE_NOWAIT
:
9210 case OMP_CLAUSE_ORDERED
:
9211 case OMP_CLAUSE_DEFAULT
:
9212 case OMP_CLAUSE_UNTIED
:
9213 case OMP_CLAUSE_COLLAPSE
:
9214 case OMP_CLAUSE_FINAL
:
9215 case OMP_CLAUSE_MERGEABLE
:
9216 case OMP_CLAUSE_PROC_BIND
:
9217 case OMP_CLAUSE_SAFELEN
:
9218 case OMP_CLAUSE_SIMDLEN
:
9219 case OMP_CLAUSE_DEPEND
:
9220 case OMP_CLAUSE_PRIORITY
:
9221 case OMP_CLAUSE_GRAINSIZE
:
9222 case OMP_CLAUSE_NUM_TASKS
:
9223 case OMP_CLAUSE_NOGROUP
:
9224 case OMP_CLAUSE_THREADS
:
9225 case OMP_CLAUSE_SIMD
:
9226 case OMP_CLAUSE_HINT
:
9227 case OMP_CLAUSE_DEFAULTMAP
:
9228 case OMP_CLAUSE_USE_DEVICE_PTR
:
9229 case OMP_CLAUSE_IS_DEVICE_PTR
:
9230 case OMP_CLAUSE__CILK_FOR_COUNT_
:
9231 case OMP_CLAUSE_ASYNC
:
9232 case OMP_CLAUSE_WAIT
:
9233 case OMP_CLAUSE_INDEPENDENT
:
9234 case OMP_CLAUSE_NUM_GANGS
:
9235 case OMP_CLAUSE_NUM_WORKERS
:
9236 case OMP_CLAUSE_VECTOR_LENGTH
:
9237 case OMP_CLAUSE_GANG
:
9238 case OMP_CLAUSE_WORKER
:
9239 case OMP_CLAUSE_VECTOR
:
9240 case OMP_CLAUSE_AUTO
:
9241 case OMP_CLAUSE_SEQ
:
9242 case OMP_CLAUSE_TILE
:
9250 *list_p
= OMP_CLAUSE_CHAIN (c
);
9252 list_p
= &OMP_CLAUSE_CHAIN (c
);
9255 /* Add in any implicit data sharing. */
9256 struct gimplify_adjust_omp_clauses_data data
;
9257 data
.list_p
= list_p
;
9259 splay_tree_foreach (ctx
->variables
, gimplify_adjust_omp_clauses_1
, &data
);
9261 gimplify_omp_ctxp
= ctx
->outer_context
;
9262 delete_omp_context (ctx
);
9265 /* Gimplify OACC_CACHE. */
9268 gimplify_oacc_cache (tree
*expr_p
, gimple_seq
*pre_p
)
9270 tree expr
= *expr_p
;
9272 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr
), pre_p
, ORT_ACC
,
9274 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OACC_CACHE_CLAUSES (expr
),
9277 /* TODO: Do something sensible with this information. */
9279 *expr_p
= NULL_TREE
;
9282 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9283 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9284 kind. The entry kind will replace the one in CLAUSE, while the exit
9285 kind will be used in a new omp_clause and returned to the caller. */
9288 gimplify_oacc_declare_1 (tree clause
)
9290 HOST_WIDE_INT kind
, new_op
;
9294 kind
= OMP_CLAUSE_MAP_KIND (clause
);
9298 case GOMP_MAP_ALLOC
:
9299 case GOMP_MAP_FORCE_ALLOC
:
9300 case GOMP_MAP_FORCE_TO
:
9301 new_op
= GOMP_MAP_DELETE
;
9305 case GOMP_MAP_FORCE_FROM
:
9306 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
9307 new_op
= GOMP_MAP_FORCE_FROM
;
9311 case GOMP_MAP_FORCE_TOFROM
:
9312 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_TO
);
9313 new_op
= GOMP_MAP_FORCE_FROM
;
9318 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
9319 new_op
= GOMP_MAP_FROM
;
9323 case GOMP_MAP_TOFROM
:
9324 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_TO
);
9325 new_op
= GOMP_MAP_FROM
;
9329 case GOMP_MAP_DEVICE_RESIDENT
:
9330 case GOMP_MAP_FORCE_DEVICEPTR
:
9331 case GOMP_MAP_FORCE_PRESENT
:
9333 case GOMP_MAP_POINTER
:
9344 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
), OMP_CLAUSE_MAP
);
9345 OMP_CLAUSE_SET_MAP_KIND (c
, new_op
);
9346 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clause
);
9352 /* Gimplify OACC_DECLARE. */
9355 gimplify_oacc_declare (tree
*expr_p
, gimple_seq
*pre_p
)
9357 tree expr
= *expr_p
;
9359 tree clauses
, t
, decl
;
9361 clauses
= OACC_DECLARE_CLAUSES (expr
);
9363 gimplify_scan_omp_clauses (&clauses
, pre_p
, ORT_TARGET_DATA
, OACC_DECLARE
);
9364 gimplify_adjust_omp_clauses (pre_p
, NULL
, &clauses
, OACC_DECLARE
);
9366 for (t
= clauses
; t
; t
= OMP_CLAUSE_CHAIN (t
))
9368 decl
= OMP_CLAUSE_DECL (t
);
9370 if (TREE_CODE (decl
) == MEM_REF
)
9371 decl
= TREE_OPERAND (decl
, 0);
9373 if (VAR_P (decl
) && !is_oacc_declared (decl
))
9375 tree attr
= get_identifier ("oacc declare target");
9376 DECL_ATTRIBUTES (decl
) = tree_cons (attr
, NULL_TREE
,
9377 DECL_ATTRIBUTES (decl
));
9381 && !is_global_var (decl
)
9382 && DECL_CONTEXT (decl
) == current_function_decl
)
9384 tree c
= gimplify_oacc_declare_1 (t
);
9387 if (oacc_declare_returns
== NULL
)
9388 oacc_declare_returns
= new hash_map
<tree
, tree
>;
9390 oacc_declare_returns
->put (decl
, c
);
9394 if (gimplify_omp_ctxp
)
9395 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_SEEN
);
9398 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
9401 gimplify_seq_add_stmt (pre_p
, stmt
);
9403 *expr_p
= NULL_TREE
;
9406 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
9407 gimplification of the body, as well as scanning the body for used
9408 variables. We need to do this scan now, because variable-sized
9409 decls will be decomposed during gimplification. */
9412 gimplify_omp_parallel (tree
*expr_p
, gimple_seq
*pre_p
)
9414 tree expr
= *expr_p
;
9416 gimple_seq body
= NULL
;
9418 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr
), pre_p
,
9419 OMP_PARALLEL_COMBINED (expr
)
9420 ? ORT_COMBINED_PARALLEL
9421 : ORT_PARALLEL
, OMP_PARALLEL
);
9423 push_gimplify_context ();
9425 g
= gimplify_and_return_first (OMP_PARALLEL_BODY (expr
), &body
);
9426 if (gimple_code (g
) == GIMPLE_BIND
)
9427 pop_gimplify_context (g
);
9429 pop_gimplify_context (NULL
);
9431 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_PARALLEL_CLAUSES (expr
),
9434 g
= gimple_build_omp_parallel (body
,
9435 OMP_PARALLEL_CLAUSES (expr
),
9436 NULL_TREE
, NULL_TREE
);
9437 if (OMP_PARALLEL_COMBINED (expr
))
9438 gimple_omp_set_subcode (g
, GF_OMP_PARALLEL_COMBINED
);
9439 gimplify_seq_add_stmt (pre_p
, g
);
9440 *expr_p
= NULL_TREE
;
9443 /* Gimplify the contents of an OMP_TASK statement. This involves
9444 gimplification of the body, as well as scanning the body for used
9445 variables. We need to do this scan now, because variable-sized
9446 decls will be decomposed during gimplification. */
9449 gimplify_omp_task (tree
*expr_p
, gimple_seq
*pre_p
)
9451 tree expr
= *expr_p
;
9453 gimple_seq body
= NULL
;
9455 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr
), pre_p
,
9456 omp_find_clause (OMP_TASK_CLAUSES (expr
),
9458 ? ORT_UNTIED_TASK
: ORT_TASK
, OMP_TASK
);
9460 push_gimplify_context ();
9462 g
= gimplify_and_return_first (OMP_TASK_BODY (expr
), &body
);
9463 if (gimple_code (g
) == GIMPLE_BIND
)
9464 pop_gimplify_context (g
);
9466 pop_gimplify_context (NULL
);
9468 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_TASK_CLAUSES (expr
),
9471 g
= gimple_build_omp_task (body
,
9472 OMP_TASK_CLAUSES (expr
),
9473 NULL_TREE
, NULL_TREE
,
9474 NULL_TREE
, NULL_TREE
, NULL_TREE
);
9475 gimplify_seq_add_stmt (pre_p
, g
);
9476 *expr_p
= NULL_TREE
;
9479 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9480 with non-NULL OMP_FOR_INIT. */
9483 find_combined_omp_for (tree
*tp
, int *walk_subtrees
, void *)
9486 switch (TREE_CODE (*tp
))
9492 if (OMP_FOR_INIT (*tp
) != NULL_TREE
)
9496 case STATEMENT_LIST
:
9506 /* Gimplify the gross structure of an OMP_FOR statement. */
9508 static enum gimplify_status
9509 gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
9511 tree for_stmt
, orig_for_stmt
, inner_for_stmt
= NULL_TREE
, decl
, var
, t
;
9512 enum gimplify_status ret
= GS_ALL_DONE
;
9513 enum gimplify_status tret
;
9515 gimple_seq for_body
, for_pre_body
;
9517 bitmap has_decl_expr
= NULL
;
9518 enum omp_region_type ort
= ORT_WORKSHARE
;
9520 orig_for_stmt
= for_stmt
= *expr_p
;
9522 switch (TREE_CODE (for_stmt
))
9526 case OMP_DISTRIBUTE
:
9532 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_UNTIED
))
9533 ort
= ORT_UNTIED_TASK
;
9545 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9546 clause for the IV. */
9547 if (ort
== ORT_SIMD
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
9549 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), 0);
9550 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
9551 decl
= TREE_OPERAND (t
, 0);
9552 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
9553 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
9554 && OMP_CLAUSE_DECL (c
) == decl
)
9556 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
9561 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
9563 gcc_assert (TREE_CODE (for_stmt
) != OACC_LOOP
);
9564 inner_for_stmt
= walk_tree (&OMP_FOR_BODY (for_stmt
),
9565 find_combined_omp_for
, NULL
, NULL
);
9566 if (inner_for_stmt
== NULL_TREE
)
9568 gcc_assert (seen_error ());
9569 *expr_p
= NULL_TREE
;
9574 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
)
9575 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt
), pre_p
, ort
,
9576 TREE_CODE (for_stmt
));
9578 if (TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
)
9579 gimplify_omp_ctxp
->distribute
= true;
9581 /* Handle OMP_FOR_INIT. */
9582 for_pre_body
= NULL
;
9583 if (ort
== ORT_SIMD
&& OMP_FOR_PRE_BODY (for_stmt
))
9585 has_decl_expr
= BITMAP_ALLOC (NULL
);
9586 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == DECL_EXPR
9587 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt
)))
9590 t
= OMP_FOR_PRE_BODY (for_stmt
);
9591 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
9593 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == STATEMENT_LIST
)
9595 tree_stmt_iterator si
;
9596 for (si
= tsi_start (OMP_FOR_PRE_BODY (for_stmt
)); !tsi_end_p (si
);
9600 if (TREE_CODE (t
) == DECL_EXPR
9601 && TREE_CODE (DECL_EXPR_DECL (t
)) == VAR_DECL
)
9602 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
9606 if (OMP_FOR_PRE_BODY (for_stmt
))
9608 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
|| gimplify_omp_ctxp
)
9609 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
9612 struct gimplify_omp_ctx ctx
;
9613 memset (&ctx
, 0, sizeof (ctx
));
9614 ctx
.region_type
= ORT_NONE
;
9615 gimplify_omp_ctxp
= &ctx
;
9616 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
9617 gimplify_omp_ctxp
= NULL
;
9620 OMP_FOR_PRE_BODY (for_stmt
) = NULL_TREE
;
9622 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
9623 for_stmt
= inner_for_stmt
;
9625 /* For taskloop, need to gimplify the start, end and step before the
9626 taskloop, outside of the taskloop omp context. */
9627 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
9629 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
9631 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
9632 if (!is_gimple_constant (TREE_OPERAND (t
, 1)))
9635 = get_initialized_tmp_var (TREE_OPERAND (t
, 1),
9636 pre_p
, NULL
, false);
9637 tree c
= build_omp_clause (input_location
,
9638 OMP_CLAUSE_FIRSTPRIVATE
);
9639 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (t
, 1);
9640 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
9641 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
9644 /* Handle OMP_FOR_COND. */
9645 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
9646 if (!is_gimple_constant (TREE_OPERAND (t
, 1)))
9649 = get_initialized_tmp_var (TREE_OPERAND (t
, 1),
9650 gimple_seq_empty_p (for_pre_body
)
9651 ? pre_p
: &for_pre_body
, NULL
,
9653 tree c
= build_omp_clause (input_location
,
9654 OMP_CLAUSE_FIRSTPRIVATE
);
9655 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (t
, 1);
9656 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
9657 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
9660 /* Handle OMP_FOR_INCR. */
9661 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
9662 if (TREE_CODE (t
) == MODIFY_EXPR
)
9664 decl
= TREE_OPERAND (t
, 0);
9665 t
= TREE_OPERAND (t
, 1);
9666 tree
*tp
= &TREE_OPERAND (t
, 1);
9667 if (TREE_CODE (t
) == PLUS_EXPR
&& *tp
== decl
)
9668 tp
= &TREE_OPERAND (t
, 0);
9670 if (!is_gimple_constant (*tp
))
9672 gimple_seq
*seq
= gimple_seq_empty_p (for_pre_body
)
9673 ? pre_p
: &for_pre_body
;
9674 *tp
= get_initialized_tmp_var (*tp
, seq
, NULL
, false);
9675 tree c
= build_omp_clause (input_location
,
9676 OMP_CLAUSE_FIRSTPRIVATE
);
9677 OMP_CLAUSE_DECL (c
) = *tp
;
9678 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
9679 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
9684 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt
), pre_p
, ort
,
9688 if (orig_for_stmt
!= for_stmt
)
9689 gimplify_omp_ctxp
->combined_loop
= true;
9692 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
9693 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt
)));
9694 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
9695 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt
)));
9697 tree c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
);
9698 bool is_doacross
= false;
9699 if (c
&& OMP_CLAUSE_ORDERED_EXPR (c
))
9702 gimplify_omp_ctxp
->loop_iter_var
.create (TREE_VEC_LENGTH
9703 (OMP_FOR_INIT (for_stmt
))
9706 int collapse
= 1, tile
= 0;
9707 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_COLLAPSE
);
9709 collapse
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c
));
9710 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_TILE
);
9712 tile
= list_length (OMP_CLAUSE_TILE_LIST (c
));
9713 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
9715 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
9716 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
9717 decl
= TREE_OPERAND (t
, 0);
9718 gcc_assert (DECL_P (decl
));
9719 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl
))
9720 || POINTER_TYPE_P (TREE_TYPE (decl
)));
9723 if (TREE_CODE (for_stmt
) == OMP_FOR
&& OMP_FOR_ORIG_DECLS (for_stmt
))
9724 gimplify_omp_ctxp
->loop_iter_var
.quick_push
9725 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
));
9727 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
9728 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
9731 /* Make sure the iteration variable is private. */
9733 tree c2
= NULL_TREE
;
9734 if (orig_for_stmt
!= for_stmt
)
9735 /* Do this only on innermost construct for combined ones. */;
9736 else if (ort
== ORT_SIMD
)
9738 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
9739 (splay_tree_key
) decl
);
9740 omp_is_private (gimplify_omp_ctxp
, decl
,
9741 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
9743 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
9744 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
9745 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
9747 c
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
9748 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
9749 unsigned int flags
= GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
;
9751 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
9753 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
9754 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
9756 struct gimplify_omp_ctx
*outer
9757 = gimplify_omp_ctxp
->outer_context
;
9758 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
9760 if (outer
->region_type
== ORT_WORKSHARE
9761 && outer
->combined_loop
)
9763 n
= splay_tree_lookup (outer
->variables
,
9764 (splay_tree_key
)decl
);
9765 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
9767 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
9768 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
9772 struct gimplify_omp_ctx
*octx
= outer
->outer_context
;
9774 && octx
->region_type
== ORT_COMBINED_PARALLEL
9775 && octx
->outer_context
9776 && (octx
->outer_context
->region_type
9778 && octx
->outer_context
->combined_loop
)
9780 octx
= octx
->outer_context
;
9781 n
= splay_tree_lookup (octx
->variables
,
9782 (splay_tree_key
)decl
);
9783 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
9785 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
9786 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
9793 OMP_CLAUSE_DECL (c
) = decl
;
9794 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
9795 OMP_FOR_CLAUSES (for_stmt
) = c
;
9796 omp_add_variable (gimplify_omp_ctxp
, decl
, flags
);
9797 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
9799 if (outer
->region_type
== ORT_WORKSHARE
9800 && outer
->combined_loop
)
9802 if (outer
->outer_context
9803 && (outer
->outer_context
->region_type
9804 == ORT_COMBINED_PARALLEL
))
9805 outer
= outer
->outer_context
;
9806 else if (omp_check_private (outer
, decl
, false))
9809 else if (((outer
->region_type
& ORT_TASK
) != 0)
9810 && outer
->combined_loop
9811 && !omp_check_private (gimplify_omp_ctxp
,
9814 else if (outer
->region_type
!= ORT_COMBINED_PARALLEL
)
9816 omp_notice_variable (outer
, decl
, true);
9821 n
= splay_tree_lookup (outer
->variables
,
9822 (splay_tree_key
)decl
);
9823 if (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
9825 omp_add_variable (outer
, decl
,
9826 GOVD_LASTPRIVATE
| GOVD_SEEN
);
9827 if (outer
->region_type
== ORT_COMBINED_PARALLEL
9828 && outer
->outer_context
9829 && (outer
->outer_context
->region_type
9831 && outer
->outer_context
->combined_loop
)
9833 outer
= outer
->outer_context
;
9834 n
= splay_tree_lookup (outer
->variables
,
9835 (splay_tree_key
)decl
);
9836 if (omp_check_private (outer
, decl
, false))
9839 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
9841 omp_add_variable (outer
, decl
,
9847 if (outer
&& outer
->outer_context
9848 && (outer
->outer_context
->region_type
9849 == ORT_COMBINED_TEAMS
))
9851 outer
= outer
->outer_context
;
9852 n
= splay_tree_lookup (outer
->variables
,
9853 (splay_tree_key
)decl
);
9855 || (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
9856 omp_add_variable (outer
, decl
,
9857 GOVD_SHARED
| GOVD_SEEN
);
9861 if (outer
&& outer
->outer_context
)
9862 omp_notice_variable (outer
->outer_context
, decl
,
9872 || !bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)));
9873 struct gimplify_omp_ctx
*outer
9874 = gimplify_omp_ctxp
->outer_context
;
9875 if (outer
&& lastprivate
)
9877 if (outer
->region_type
== ORT_WORKSHARE
9878 && outer
->combined_loop
)
9880 n
= splay_tree_lookup (outer
->variables
,
9881 (splay_tree_key
)decl
);
9882 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
9884 lastprivate
= false;
9887 else if (outer
->outer_context
9888 && (outer
->outer_context
->region_type
9889 == ORT_COMBINED_PARALLEL
))
9890 outer
= outer
->outer_context
;
9891 else if (omp_check_private (outer
, decl
, false))
9894 else if (((outer
->region_type
& ORT_TASK
) != 0)
9895 && outer
->combined_loop
9896 && !omp_check_private (gimplify_omp_ctxp
,
9899 else if (outer
->region_type
!= ORT_COMBINED_PARALLEL
)
9901 omp_notice_variable (outer
, decl
, true);
9906 n
= splay_tree_lookup (outer
->variables
,
9907 (splay_tree_key
)decl
);
9908 if (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
9910 omp_add_variable (outer
, decl
,
9911 GOVD_LASTPRIVATE
| GOVD_SEEN
);
9912 if (outer
->region_type
== ORT_COMBINED_PARALLEL
9913 && outer
->outer_context
9914 && (outer
->outer_context
->region_type
9916 && outer
->outer_context
->combined_loop
)
9918 outer
= outer
->outer_context
;
9919 n
= splay_tree_lookup (outer
->variables
,
9920 (splay_tree_key
)decl
);
9921 if (omp_check_private (outer
, decl
, false))
9924 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
9926 omp_add_variable (outer
, decl
,
9932 if (outer
&& outer
->outer_context
9933 && (outer
->outer_context
->region_type
9934 == ORT_COMBINED_TEAMS
))
9936 outer
= outer
->outer_context
;
9937 n
= splay_tree_lookup (outer
->variables
,
9938 (splay_tree_key
)decl
);
9940 || (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
9941 omp_add_variable (outer
, decl
,
9942 GOVD_SHARED
| GOVD_SEEN
);
9946 if (outer
&& outer
->outer_context
)
9947 omp_notice_variable (outer
->outer_context
, decl
,
9953 c
= build_omp_clause (input_location
,
9954 lastprivate
? OMP_CLAUSE_LASTPRIVATE
9955 : OMP_CLAUSE_PRIVATE
);
9956 OMP_CLAUSE_DECL (c
) = decl
;
9957 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
9958 OMP_FOR_CLAUSES (for_stmt
) = c
;
9959 omp_add_variable (gimplify_omp_ctxp
, decl
,
9960 (lastprivate
? GOVD_LASTPRIVATE
: GOVD_PRIVATE
)
9961 | GOVD_EXPLICIT
| GOVD_SEEN
);
9965 else if (omp_is_private (gimplify_omp_ctxp
, decl
, 0))
9966 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
9968 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_PRIVATE
| GOVD_SEEN
);
9970 /* If DECL is not a gimple register, create a temporary variable to act
9971 as an iteration counter. This is valid, since DECL cannot be
9972 modified in the body of the loop. Similarly for any iteration vars
9973 in simd with collapse > 1 where the iterator vars must be
9975 if (orig_for_stmt
!= for_stmt
)
9977 else if (!is_gimple_reg (decl
)
9979 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) > 1))
9981 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
9982 /* Make sure omp_add_variable is not called on it prematurely.
9983 We call it ourselves a few lines later. */
9984 gimplify_omp_ctxp
= NULL
;
9985 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
9986 gimplify_omp_ctxp
= ctx
;
9987 TREE_OPERAND (t
, 0) = var
;
9989 gimplify_seq_add_stmt (&for_body
, gimple_build_assign (decl
, var
));
9992 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
9994 c2
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
9995 OMP_CLAUSE_LINEAR_NO_COPYIN (c2
) = 1;
9996 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2
) = 1;
9997 OMP_CLAUSE_DECL (c2
) = var
;
9998 OMP_CLAUSE_CHAIN (c2
) = OMP_FOR_CLAUSES (for_stmt
);
9999 OMP_FOR_CLAUSES (for_stmt
) = c2
;
10000 omp_add_variable (gimplify_omp_ctxp
, var
,
10001 GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
);
10002 if (c
== NULL_TREE
)
10009 omp_add_variable (gimplify_omp_ctxp
, var
,
10010 GOVD_PRIVATE
| GOVD_SEEN
);
10015 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
10016 is_gimple_val
, fb_rvalue
, false);
10017 ret
= MIN (ret
, tret
);
10018 if (ret
== GS_ERROR
)
10021 /* Handle OMP_FOR_COND. */
10022 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
10023 gcc_assert (COMPARISON_CLASS_P (t
));
10024 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
10026 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
10027 is_gimple_val
, fb_rvalue
, false);
10028 ret
= MIN (ret
, tret
);
10030 /* Handle OMP_FOR_INCR. */
10031 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
10032 switch (TREE_CODE (t
))
10034 case PREINCREMENT_EXPR
:
10035 case POSTINCREMENT_EXPR
:
10037 tree decl
= TREE_OPERAND (t
, 0);
10038 /* c_omp_for_incr_canonicalize_ptr() should have been
10039 called to massage things appropriately. */
10040 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
10042 if (orig_for_stmt
!= for_stmt
)
10044 t
= build_int_cst (TREE_TYPE (decl
), 1);
10046 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
10047 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
10048 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
10049 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
10053 case PREDECREMENT_EXPR
:
10054 case POSTDECREMENT_EXPR
:
10055 /* c_omp_for_incr_canonicalize_ptr() should have been
10056 called to massage things appropriately. */
10057 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
10058 if (orig_for_stmt
!= for_stmt
)
10060 t
= build_int_cst (TREE_TYPE (decl
), -1);
10062 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
10063 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
10064 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
10065 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
10069 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
10070 TREE_OPERAND (t
, 0) = var
;
10072 t
= TREE_OPERAND (t
, 1);
10073 switch (TREE_CODE (t
))
10076 if (TREE_OPERAND (t
, 1) == decl
)
10078 TREE_OPERAND (t
, 1) = TREE_OPERAND (t
, 0);
10079 TREE_OPERAND (t
, 0) = var
;
10085 case POINTER_PLUS_EXPR
:
10086 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
10087 TREE_OPERAND (t
, 0) = var
;
10090 gcc_unreachable ();
10093 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
10094 is_gimple_val
, fb_rvalue
, false);
10095 ret
= MIN (ret
, tret
);
10098 tree step
= TREE_OPERAND (t
, 1);
10099 tree stept
= TREE_TYPE (decl
);
10100 if (POINTER_TYPE_P (stept
))
10102 step
= fold_convert (stept
, step
);
10103 if (TREE_CODE (t
) == MINUS_EXPR
)
10104 step
= fold_build1 (NEGATE_EXPR
, stept
, step
);
10105 OMP_CLAUSE_LINEAR_STEP (c
) = step
;
10106 if (step
!= TREE_OPERAND (t
, 1))
10108 tret
= gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
),
10109 &for_pre_body
, NULL
,
10110 is_gimple_val
, fb_rvalue
, false);
10111 ret
= MIN (ret
, tret
);
10117 gcc_unreachable ();
10123 OMP_CLAUSE_LINEAR_STEP (c2
) = OMP_CLAUSE_LINEAR_STEP (c
);
10126 if ((var
!= decl
|| collapse
> 1 || tile
) && orig_for_stmt
== for_stmt
)
10128 for (c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10129 if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
10130 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) == NULL
)
10131 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10132 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)
10133 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) == NULL
))
10134 && OMP_CLAUSE_DECL (c
) == decl
)
10136 if (is_doacross
&& (collapse
== 1 || i
>= collapse
))
10140 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
10141 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
10142 gcc_assert (TREE_OPERAND (t
, 0) == var
);
10143 t
= TREE_OPERAND (t
, 1);
10144 gcc_assert (TREE_CODE (t
) == PLUS_EXPR
10145 || TREE_CODE (t
) == MINUS_EXPR
10146 || TREE_CODE (t
) == POINTER_PLUS_EXPR
);
10147 gcc_assert (TREE_OPERAND (t
, 0) == var
);
10148 t
= build2 (TREE_CODE (t
), TREE_TYPE (decl
),
10149 is_doacross
? var
: decl
,
10150 TREE_OPERAND (t
, 1));
10153 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
10154 seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
);
10156 seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
);
10157 gimplify_assign (decl
, t
, seq
);
10162 BITMAP_FREE (has_decl_expr
);
10164 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
10166 push_gimplify_context ();
10167 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt
)) != BIND_EXPR
)
10169 OMP_FOR_BODY (orig_for_stmt
)
10170 = build3 (BIND_EXPR
, void_type_node
, NULL
,
10171 OMP_FOR_BODY (orig_for_stmt
), NULL
);
10172 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt
)) = 1;
10176 gimple
*g
= gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt
),
10179 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
10181 if (gimple_code (g
) == GIMPLE_BIND
)
10182 pop_gimplify_context (g
);
10184 pop_gimplify_context (NULL
);
10187 if (orig_for_stmt
!= for_stmt
)
10188 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
10190 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
10191 decl
= TREE_OPERAND (t
, 0);
10192 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10193 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
10194 gimplify_omp_ctxp
= ctx
->outer_context
;
10195 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
10196 gimplify_omp_ctxp
= ctx
;
10197 omp_add_variable (gimplify_omp_ctxp
, var
, GOVD_PRIVATE
| GOVD_SEEN
);
10198 TREE_OPERAND (t
, 0) = var
;
10199 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
10200 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
10201 TREE_OPERAND (TREE_OPERAND (t
, 1), 0) = var
;
10204 gimplify_adjust_omp_clauses (pre_p
, for_body
,
10205 &OMP_FOR_CLAUSES (orig_for_stmt
),
10206 TREE_CODE (orig_for_stmt
));
10209 switch (TREE_CODE (orig_for_stmt
))
10211 case OMP_FOR
: kind
= GF_OMP_FOR_KIND_FOR
; break;
10212 case OMP_SIMD
: kind
= GF_OMP_FOR_KIND_SIMD
; break;
10213 case CILK_SIMD
: kind
= GF_OMP_FOR_KIND_CILKSIMD
; break;
10214 case CILK_FOR
: kind
= GF_OMP_FOR_KIND_CILKFOR
; break;
10215 case OMP_DISTRIBUTE
: kind
= GF_OMP_FOR_KIND_DISTRIBUTE
; break;
10216 case OMP_TASKLOOP
: kind
= GF_OMP_FOR_KIND_TASKLOOP
; break;
10217 case OACC_LOOP
: kind
= GF_OMP_FOR_KIND_OACC_LOOP
; break;
10219 gcc_unreachable ();
10221 gfor
= gimple_build_omp_for (for_body
, kind
, OMP_FOR_CLAUSES (orig_for_stmt
),
10222 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)),
10224 if (orig_for_stmt
!= for_stmt
)
10225 gimple_omp_for_set_combined_p (gfor
, true);
10226 if (gimplify_omp_ctxp
10227 && (gimplify_omp_ctxp
->combined_loop
10228 || (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
10229 && gimplify_omp_ctxp
->outer_context
10230 && gimplify_omp_ctxp
->outer_context
->combined_loop
)))
10232 gimple_omp_for_set_combined_into_p (gfor
, true);
10233 if (gimplify_omp_ctxp
->combined_loop
)
10234 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_SIMD
);
10236 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_FOR
);
10239 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
10241 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
10242 gimple_omp_for_set_index (gfor
, i
, TREE_OPERAND (t
, 0));
10243 gimple_omp_for_set_initial (gfor
, i
, TREE_OPERAND (t
, 1));
10244 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
10245 gimple_omp_for_set_cond (gfor
, i
, TREE_CODE (t
));
10246 gimple_omp_for_set_final (gfor
, i
, TREE_OPERAND (t
, 1));
10247 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
10248 gimple_omp_for_set_incr (gfor
, i
, TREE_OPERAND (t
, 1));
10251 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10252 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10253 The outer taskloop stands for computing the number of iterations,
10254 counts for collapsed loops and holding taskloop specific clauses.
10255 The task construct stands for the effect of data sharing on the
10256 explicit task it creates and the inner taskloop stands for expansion
10257 of the static loop inside of the explicit task construct. */
10258 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
10260 tree
*gfor_clauses_ptr
= gimple_omp_for_clauses_ptr (gfor
);
10261 tree task_clauses
= NULL_TREE
;
10262 tree c
= *gfor_clauses_ptr
;
10263 tree
*gtask_clauses_ptr
= &task_clauses
;
10264 tree outer_for_clauses
= NULL_TREE
;
10265 tree
*gforo_clauses_ptr
= &outer_for_clauses
;
10266 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
10267 switch (OMP_CLAUSE_CODE (c
))
10269 /* These clauses are allowed on task, move them there. */
10270 case OMP_CLAUSE_SHARED
:
10271 case OMP_CLAUSE_FIRSTPRIVATE
:
10272 case OMP_CLAUSE_DEFAULT
:
10273 case OMP_CLAUSE_IF
:
10274 case OMP_CLAUSE_UNTIED
:
10275 case OMP_CLAUSE_FINAL
:
10276 case OMP_CLAUSE_MERGEABLE
:
10277 case OMP_CLAUSE_PRIORITY
:
10278 *gtask_clauses_ptr
= c
;
10279 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
10281 case OMP_CLAUSE_PRIVATE
:
10282 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c
))
10284 /* We want private on outer for and firstprivate
10287 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
10288 OMP_CLAUSE_FIRSTPRIVATE
);
10289 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
10290 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
);
10291 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
10292 *gforo_clauses_ptr
= c
;
10293 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
10297 *gtask_clauses_ptr
= c
;
10298 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
10301 /* These clauses go into outer taskloop clauses. */
10302 case OMP_CLAUSE_GRAINSIZE
:
10303 case OMP_CLAUSE_NUM_TASKS
:
10304 case OMP_CLAUSE_NOGROUP
:
10305 *gforo_clauses_ptr
= c
;
10306 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
10308 /* Taskloop clause we duplicate on both taskloops. */
10309 case OMP_CLAUSE_COLLAPSE
:
10310 *gfor_clauses_ptr
= c
;
10311 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
10312 *gforo_clauses_ptr
= copy_node (c
);
10313 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
10315 /* For lastprivate, keep the clause on inner taskloop, and add
10316 a shared clause on task. If the same decl is also firstprivate,
10317 add also firstprivate clause on the inner taskloop. */
10318 case OMP_CLAUSE_LASTPRIVATE
:
10319 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c
))
10321 /* For taskloop C++ lastprivate IVs, we want:
10322 1) private on outer taskloop
10323 2) firstprivate and shared on task
10324 3) lastprivate on inner taskloop */
10326 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
10327 OMP_CLAUSE_FIRSTPRIVATE
);
10328 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
10329 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
);
10330 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
10331 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
) = 1;
10332 *gforo_clauses_ptr
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
10333 OMP_CLAUSE_PRIVATE
);
10334 OMP_CLAUSE_DECL (*gforo_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
10335 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr
) = 1;
10336 TREE_TYPE (*gforo_clauses_ptr
) = TREE_TYPE (c
);
10337 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
10339 *gfor_clauses_ptr
= c
;
10340 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
10342 = build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_SHARED
);
10343 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
10344 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
10345 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr
) = 1;
10347 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
10350 gcc_unreachable ();
10352 *gfor_clauses_ptr
= NULL_TREE
;
10353 *gtask_clauses_ptr
= NULL_TREE
;
10354 *gforo_clauses_ptr
= NULL_TREE
;
10355 g
= gimple_build_bind (NULL_TREE
, gfor
, NULL_TREE
);
10356 g
= gimple_build_omp_task (g
, task_clauses
, NULL_TREE
, NULL_TREE
,
10357 NULL_TREE
, NULL_TREE
, NULL_TREE
);
10358 gimple_omp_task_set_taskloop_p (g
, true);
10359 g
= gimple_build_bind (NULL_TREE
, g
, NULL_TREE
);
10361 = gimple_build_omp_for (g
, GF_OMP_FOR_KIND_TASKLOOP
, outer_for_clauses
,
10362 gimple_omp_for_collapse (gfor
),
10363 gimple_omp_for_pre_body (gfor
));
10364 gimple_omp_for_set_pre_body (gfor
, NULL
);
10365 gimple_omp_for_set_combined_p (gforo
, true);
10366 gimple_omp_for_set_combined_into_p (gfor
, true);
10367 for (i
= 0; i
< (int) gimple_omp_for_collapse (gfor
); i
++)
10369 tree type
= TREE_TYPE (gimple_omp_for_index (gfor
, i
));
10370 tree v
= create_tmp_var (type
);
10371 gimple_omp_for_set_index (gforo
, i
, v
);
10372 t
= unshare_expr (gimple_omp_for_initial (gfor
, i
));
10373 gimple_omp_for_set_initial (gforo
, i
, t
);
10374 gimple_omp_for_set_cond (gforo
, i
,
10375 gimple_omp_for_cond (gfor
, i
));
10376 t
= unshare_expr (gimple_omp_for_final (gfor
, i
));
10377 gimple_omp_for_set_final (gforo
, i
, t
);
10378 t
= unshare_expr (gimple_omp_for_incr (gfor
, i
));
10379 gcc_assert (TREE_OPERAND (t
, 0) == gimple_omp_for_index (gfor
, i
));
10380 TREE_OPERAND (t
, 0) = v
;
10381 gimple_omp_for_set_incr (gforo
, i
, t
);
10382 t
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
10383 OMP_CLAUSE_DECL (t
) = v
;
10384 OMP_CLAUSE_CHAIN (t
) = gimple_omp_for_clauses (gforo
);
10385 gimple_omp_for_set_clauses (gforo
, t
);
10387 gimplify_seq_add_stmt (pre_p
, gforo
);
10390 gimplify_seq_add_stmt (pre_p
, gfor
);
10391 if (ret
!= GS_ALL_DONE
)
10393 *expr_p
= NULL_TREE
;
10394 return GS_ALL_DONE
;
10397 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10398 of OMP_TARGET's body. */
10401 find_omp_teams (tree
*tp
, int *walk_subtrees
, void *)
10403 *walk_subtrees
= 0;
10404 switch (TREE_CODE (*tp
))
10409 case STATEMENT_LIST
:
10410 *walk_subtrees
= 1;
10418 /* Helper function of optimize_target_teams, determine if the expression
10419 can be computed safely before the target construct on the host. */
10422 computable_teams_clause (tree
*tp
, int *walk_subtrees
, void *)
10428 *walk_subtrees
= 0;
10431 switch (TREE_CODE (*tp
))
10436 *walk_subtrees
= 0;
10437 if (error_operand_p (*tp
)
10438 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
10439 || DECL_HAS_VALUE_EXPR_P (*tp
)
10440 || DECL_THREAD_LOCAL_P (*tp
)
10441 || TREE_SIDE_EFFECTS (*tp
)
10442 || TREE_THIS_VOLATILE (*tp
))
10444 if (is_global_var (*tp
)
10445 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp
))
10446 || lookup_attribute ("omp declare target link",
10447 DECL_ATTRIBUTES (*tp
))))
10450 && !DECL_SEEN_IN_BIND_EXPR_P (*tp
)
10451 && !is_global_var (*tp
)
10452 && decl_function_context (*tp
) == current_function_decl
)
10454 n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
10455 (splay_tree_key
) *tp
);
10458 if (gimplify_omp_ctxp
->target_map_scalars_firstprivate
)
10462 else if (n
->value
& GOVD_LOCAL
)
10464 else if (n
->value
& GOVD_FIRSTPRIVATE
)
10466 else if ((n
->value
& (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
10467 == (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
10471 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
10475 if (TARGET_EXPR_INITIAL (*tp
)
10476 || TREE_CODE (TARGET_EXPR_SLOT (*tp
)) != VAR_DECL
)
10478 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp
),
10479 walk_subtrees
, NULL
);
10480 /* Allow some reasonable subset of integral arithmetics. */
10484 case TRUNC_DIV_EXPR
:
10485 case CEIL_DIV_EXPR
:
10486 case FLOOR_DIV_EXPR
:
10487 case ROUND_DIV_EXPR
:
10488 case TRUNC_MOD_EXPR
:
10489 case CEIL_MOD_EXPR
:
10490 case FLOOR_MOD_EXPR
:
10491 case ROUND_MOD_EXPR
:
10493 case EXACT_DIV_EXPR
:
10504 case NON_LVALUE_EXPR
:
10506 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
10509 /* And disallow anything else, except for comparisons. */
10511 if (COMPARISON_CLASS_P (*tp
))
10517 /* Try to determine if the num_teams and/or thread_limit expressions
10518 can have their values determined already before entering the
10520 INTEGER_CSTs trivially are,
10521 integral decls that are firstprivate (explicitly or implicitly)
10522 or explicitly map(always, to:) or map(always, tofrom:) on the target
10523 region too, and expressions involving simple arithmetics on those
10524 too, function calls are not ok, dereferencing something neither etc.
10525 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10526 EXPR based on what we find:
10527 0 stands for clause not specified at all, use implementation default
10528 -1 stands for value that can't be determined easily before entering
10529 the target construct.
10530 If teams construct is not present at all, use 1 for num_teams
10531 and 0 for thread_limit (only one team is involved, and the thread
10532 limit is implementation defined. */
10535 optimize_target_teams (tree target
, gimple_seq
*pre_p
)
10537 tree body
= OMP_BODY (target
);
10538 tree teams
= walk_tree (&body
, find_omp_teams
, NULL
, NULL
);
10539 tree num_teams
= integer_zero_node
;
10540 tree thread_limit
= integer_zero_node
;
10541 location_t num_teams_loc
= EXPR_LOCATION (target
);
10542 location_t thread_limit_loc
= EXPR_LOCATION (target
);
10544 struct gimplify_omp_ctx
*target_ctx
= gimplify_omp_ctxp
;
10546 if (teams
== NULL_TREE
)
10547 num_teams
= integer_one_node
;
10549 for (c
= OMP_TEAMS_CLAUSES (teams
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10551 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
)
10554 num_teams_loc
= OMP_CLAUSE_LOCATION (c
);
10556 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREAD_LIMIT
)
10559 thread_limit_loc
= OMP_CLAUSE_LOCATION (c
);
10563 expr
= OMP_CLAUSE_OPERAND (c
, 0);
10564 if (TREE_CODE (expr
) == INTEGER_CST
)
10569 if (walk_tree (&expr
, computable_teams_clause
, NULL
, NULL
))
10571 *p
= integer_minus_one_node
;
10575 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
10576 if (gimplify_expr (p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
, false)
10579 gimplify_omp_ctxp
= target_ctx
;
10580 *p
= integer_minus_one_node
;
10583 gimplify_omp_ctxp
= target_ctx
;
10584 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
10585 OMP_CLAUSE_OPERAND (c
, 0) = *p
;
10587 c
= build_omp_clause (thread_limit_loc
, OMP_CLAUSE_THREAD_LIMIT
);
10588 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
10589 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
10590 OMP_TARGET_CLAUSES (target
) = c
;
10591 c
= build_omp_clause (num_teams_loc
, OMP_CLAUSE_NUM_TEAMS
);
10592 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
10593 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
10594 OMP_TARGET_CLAUSES (target
) = c
;
10597 /* Gimplify the gross structure of several OMP constructs. */
10600 gimplify_omp_workshare (tree
*expr_p
, gimple_seq
*pre_p
)
10602 tree expr
= *expr_p
;
10604 gimple_seq body
= NULL
;
10605 enum omp_region_type ort
;
10607 switch (TREE_CODE (expr
))
10611 ort
= ORT_WORKSHARE
;
10614 ort
= OMP_TARGET_COMBINED (expr
) ? ORT_COMBINED_TARGET
: ORT_TARGET
;
10617 ort
= ORT_ACC_KERNELS
;
10619 case OACC_PARALLEL
:
10620 ort
= ORT_ACC_PARALLEL
;
10623 ort
= ORT_ACC_DATA
;
10625 case OMP_TARGET_DATA
:
10626 ort
= ORT_TARGET_DATA
;
10629 ort
= OMP_TEAMS_COMBINED (expr
) ? ORT_COMBINED_TEAMS
: ORT_TEAMS
;
10631 case OACC_HOST_DATA
:
10632 ort
= ORT_ACC_HOST_DATA
;
10635 gcc_unreachable ();
10637 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr
), pre_p
, ort
,
10639 if (TREE_CODE (expr
) == OMP_TARGET
)
10640 optimize_target_teams (expr
, pre_p
);
10641 if ((ort
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0)
10643 push_gimplify_context ();
10644 gimple
*g
= gimplify_and_return_first (OMP_BODY (expr
), &body
);
10645 if (gimple_code (g
) == GIMPLE_BIND
)
10646 pop_gimplify_context (g
);
10648 pop_gimplify_context (NULL
);
10649 if ((ort
& ORT_TARGET_DATA
) != 0)
10651 enum built_in_function end_ix
;
10652 switch (TREE_CODE (expr
))
10655 case OACC_HOST_DATA
:
10656 end_ix
= BUILT_IN_GOACC_DATA_END
;
10658 case OMP_TARGET_DATA
:
10659 end_ix
= BUILT_IN_GOMP_TARGET_END_DATA
;
10662 gcc_unreachable ();
10664 tree fn
= builtin_decl_explicit (end_ix
);
10665 g
= gimple_build_call (fn
, 0);
10666 gimple_seq cleanup
= NULL
;
10667 gimple_seq_add_stmt (&cleanup
, g
);
10668 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
10670 gimple_seq_add_stmt (&body
, g
);
10674 gimplify_and_add (OMP_BODY (expr
), &body
);
10675 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_CLAUSES (expr
),
10678 switch (TREE_CODE (expr
))
10681 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_DATA
,
10682 OMP_CLAUSES (expr
));
10685 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_KERNELS
,
10686 OMP_CLAUSES (expr
));
10688 case OACC_HOST_DATA
:
10689 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_HOST_DATA
,
10690 OMP_CLAUSES (expr
));
10692 case OACC_PARALLEL
:
10693 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_PARALLEL
,
10694 OMP_CLAUSES (expr
));
10697 stmt
= gimple_build_omp_sections (body
, OMP_CLAUSES (expr
));
10700 stmt
= gimple_build_omp_single (body
, OMP_CLAUSES (expr
));
10703 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_REGION
,
10704 OMP_CLAUSES (expr
));
10706 case OMP_TARGET_DATA
:
10707 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_DATA
,
10708 OMP_CLAUSES (expr
));
10711 stmt
= gimple_build_omp_teams (body
, OMP_CLAUSES (expr
));
10714 gcc_unreachable ();
10717 gimplify_seq_add_stmt (pre_p
, stmt
);
10718 *expr_p
= NULL_TREE
;
10721 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10722 target update constructs. */
10725 gimplify_omp_target_update (tree
*expr_p
, gimple_seq
*pre_p
)
10727 tree expr
= *expr_p
;
10730 enum omp_region_type ort
= ORT_WORKSHARE
;
10732 switch (TREE_CODE (expr
))
10734 case OACC_ENTER_DATA
:
10735 case OACC_EXIT_DATA
:
10736 kind
= GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
;
10740 kind
= GF_OMP_TARGET_KIND_OACC_UPDATE
;
10743 case OMP_TARGET_UPDATE
:
10744 kind
= GF_OMP_TARGET_KIND_UPDATE
;
10746 case OMP_TARGET_ENTER_DATA
:
10747 kind
= GF_OMP_TARGET_KIND_ENTER_DATA
;
10749 case OMP_TARGET_EXIT_DATA
:
10750 kind
= GF_OMP_TARGET_KIND_EXIT_DATA
;
10753 gcc_unreachable ();
10755 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr
), pre_p
,
10756 ort
, TREE_CODE (expr
));
10757 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OMP_STANDALONE_CLAUSES (expr
),
10759 stmt
= gimple_build_omp_target (NULL
, kind
, OMP_STANDALONE_CLAUSES (expr
));
10761 gimplify_seq_add_stmt (pre_p
, stmt
);
10762 *expr_p
= NULL_TREE
;
10765 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
10766 stabilized the lhs of the atomic operation as *ADDR. Return true if
10767 EXPR is this stabilized form. */
10770 goa_lhs_expr_p (tree expr
, tree addr
)
10772 /* Also include casts to other type variants. The C front end is fond
10773 of adding these for e.g. volatile variables. This is like
10774 STRIP_TYPE_NOPS but includes the main variant lookup. */
10775 STRIP_USELESS_TYPE_CONVERSION (expr
);
10777 if (TREE_CODE (expr
) == INDIRECT_REF
)
10779 expr
= TREE_OPERAND (expr
, 0);
10780 while (expr
!= addr
10781 && (CONVERT_EXPR_P (expr
)
10782 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
10783 && TREE_CODE (expr
) == TREE_CODE (addr
)
10784 && types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (addr
)))
10786 expr
= TREE_OPERAND (expr
, 0);
10787 addr
= TREE_OPERAND (addr
, 0);
10791 return (TREE_CODE (addr
) == ADDR_EXPR
10792 && TREE_CODE (expr
) == ADDR_EXPR
10793 && TREE_OPERAND (addr
, 0) == TREE_OPERAND (expr
, 0));
10795 if (TREE_CODE (addr
) == ADDR_EXPR
&& expr
== TREE_OPERAND (addr
, 0))
10800 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
10801 expression does not involve the lhs, evaluate it into a temporary.
10802 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
10803 or -1 if an error was encountered. */
10806 goa_stabilize_expr (tree
*expr_p
, gimple_seq
*pre_p
, tree lhs_addr
,
10809 tree expr
= *expr_p
;
10812 if (goa_lhs_expr_p (expr
, lhs_addr
))
10817 if (is_gimple_val (expr
))
10821 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
10824 case tcc_comparison
:
10825 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
, lhs_addr
,
10829 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
, lhs_addr
,
10832 case tcc_expression
:
10833 switch (TREE_CODE (expr
))
10835 case TRUTH_ANDIF_EXPR
:
10836 case TRUTH_ORIF_EXPR
:
10837 case TRUTH_AND_EXPR
:
10838 case TRUTH_OR_EXPR
:
10839 case TRUTH_XOR_EXPR
:
10840 case BIT_INSERT_EXPR
:
10841 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
10842 lhs_addr
, lhs_var
);
10844 case TRUTH_NOT_EXPR
:
10845 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
10846 lhs_addr
, lhs_var
);
10848 case COMPOUND_EXPR
:
10849 /* Break out any preevaluations from cp_build_modify_expr. */
10850 for (; TREE_CODE (expr
) == COMPOUND_EXPR
;
10851 expr
= TREE_OPERAND (expr
, 1))
10852 gimplify_stmt (&TREE_OPERAND (expr
, 0), pre_p
);
10854 return goa_stabilize_expr (expr_p
, pre_p
, lhs_addr
, lhs_var
);
10859 case tcc_reference
:
10860 if (TREE_CODE (expr
) == BIT_FIELD_REF
)
10861 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
10862 lhs_addr
, lhs_var
);
10870 enum gimplify_status gs
;
10871 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
10872 if (gs
!= GS_ALL_DONE
)
10879 /* Gimplify an OMP_ATOMIC statement. */
10881 static enum gimplify_status
10882 gimplify_omp_atomic (tree
*expr_p
, gimple_seq
*pre_p
)
10884 tree addr
= TREE_OPERAND (*expr_p
, 0);
10885 tree rhs
= TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
10886 ? NULL
: TREE_OPERAND (*expr_p
, 1);
10887 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr
)));
10889 gomp_atomic_load
*loadstmt
;
10890 gomp_atomic_store
*storestmt
;
10892 tmp_load
= create_tmp_reg (type
);
10893 if (rhs
&& goa_stabilize_expr (&rhs
, pre_p
, addr
, tmp_load
) < 0)
10896 if (gimplify_expr (&addr
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
10900 loadstmt
= gimple_build_omp_atomic_load (tmp_load
, addr
);
10901 gimplify_seq_add_stmt (pre_p
, loadstmt
);
10902 if (rhs
&& gimplify_expr (&rhs
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
10906 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
)
10908 storestmt
= gimple_build_omp_atomic_store (rhs
);
10909 gimplify_seq_add_stmt (pre_p
, storestmt
);
10910 if (OMP_ATOMIC_SEQ_CST (*expr_p
))
10912 gimple_omp_atomic_set_seq_cst (loadstmt
);
10913 gimple_omp_atomic_set_seq_cst (storestmt
);
10915 switch (TREE_CODE (*expr_p
))
10917 case OMP_ATOMIC_READ
:
10918 case OMP_ATOMIC_CAPTURE_OLD
:
10919 *expr_p
= tmp_load
;
10920 gimple_omp_atomic_set_need_value (loadstmt
);
10922 case OMP_ATOMIC_CAPTURE_NEW
:
10924 gimple_omp_atomic_set_need_value (storestmt
);
10931 return GS_ALL_DONE
;
10934 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10935 body, and adding some EH bits. */
10937 static enum gimplify_status
10938 gimplify_transaction (tree
*expr_p
, gimple_seq
*pre_p
)
10940 tree expr
= *expr_p
, temp
, tbody
= TRANSACTION_EXPR_BODY (expr
);
10942 gtransaction
*trans_stmt
;
10943 gimple_seq body
= NULL
;
10946 /* Wrap the transaction body in a BIND_EXPR so we have a context
10947 where to put decls for OMP. */
10948 if (TREE_CODE (tbody
) != BIND_EXPR
)
10950 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, tbody
, NULL
);
10951 TREE_SIDE_EFFECTS (bind
) = 1;
10952 SET_EXPR_LOCATION (bind
, EXPR_LOCATION (tbody
));
10953 TRANSACTION_EXPR_BODY (expr
) = bind
;
10956 push_gimplify_context ();
10957 temp
= voidify_wrapper_expr (*expr_p
, NULL
);
10959 body_stmt
= gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr
), &body
);
10960 pop_gimplify_context (body_stmt
);
10962 trans_stmt
= gimple_build_transaction (body
);
10963 if (TRANSACTION_EXPR_OUTER (expr
))
10964 subcode
= GTMA_IS_OUTER
;
10965 else if (TRANSACTION_EXPR_RELAXED (expr
))
10966 subcode
= GTMA_IS_RELAXED
;
10967 gimple_transaction_set_subcode (trans_stmt
, subcode
);
10969 gimplify_seq_add_stmt (pre_p
, trans_stmt
);
10977 *expr_p
= NULL_TREE
;
10978 return GS_ALL_DONE
;
10981 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10982 is the OMP_BODY of the original EXPR (which has already been
10983 gimplified so it's not present in the EXPR).
10985 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10988 gimplify_omp_ordered (tree expr
, gimple_seq body
)
10993 tree source_c
= NULL_TREE
;
10994 tree sink_c
= NULL_TREE
;
10996 if (gimplify_omp_ctxp
)
10998 for (c
= OMP_ORDERED_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10999 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
11000 && gimplify_omp_ctxp
->loop_iter_var
.is_empty ()
11001 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
11002 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
))
11004 error_at (OMP_CLAUSE_LOCATION (c
),
11005 "%<ordered%> construct with %<depend%> clause must be "
11006 "closely nested inside a loop with %<ordered%> clause "
11007 "with a parameter");
11010 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
11011 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
11014 for (decls
= OMP_CLAUSE_DECL (c
), i
= 0;
11015 decls
&& TREE_CODE (decls
) == TREE_LIST
;
11016 decls
= TREE_CHAIN (decls
), ++i
)
11017 if (i
>= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
11019 else if (TREE_VALUE (decls
)
11020 != gimplify_omp_ctxp
->loop_iter_var
[2 * i
])
11022 error_at (OMP_CLAUSE_LOCATION (c
),
11023 "variable %qE is not an iteration "
11024 "of outermost loop %d, expected %qE",
11025 TREE_VALUE (decls
), i
+ 1,
11026 gimplify_omp_ctxp
->loop_iter_var
[2 * i
]);
11032 = gimplify_omp_ctxp
->loop_iter_var
[2 * i
+ 1];
11033 if (!fail
&& i
!= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
11035 error_at (OMP_CLAUSE_LOCATION (c
),
11036 "number of variables in %<depend(sink)%> "
11037 "clause does not match number of "
11038 "iteration variables");
11043 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
11044 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
11048 error_at (OMP_CLAUSE_LOCATION (c
),
11049 "more than one %<depend(source)%> clause on an "
11050 "%<ordered%> construct");
11057 if (source_c
&& sink_c
)
11059 error_at (OMP_CLAUSE_LOCATION (source_c
),
11060 "%<depend(source)%> clause specified together with "
11061 "%<depend(sink:)%> clauses on the same construct");
11066 return gimple_build_nop ();
11067 return gimple_build_omp_ordered (body
, OMP_ORDERED_CLAUSES (expr
));
11070 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
11071 expression produces a value to be used as an operand inside a GIMPLE
11072 statement, the value will be stored back in *EXPR_P. This value will
11073 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
11074 an SSA_NAME. The corresponding sequence of GIMPLE statements is
11075 emitted in PRE_P and POST_P.
11077 Additionally, this process may overwrite parts of the input
11078 expression during gimplification. Ideally, it should be
11079 possible to do non-destructive gimplification.
11081 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
11082 the expression needs to evaluate to a value to be used as
11083 an operand in a GIMPLE statement, this value will be stored in
11084 *EXPR_P on exit. This happens when the caller specifies one
11085 of fb_lvalue or fb_rvalue fallback flags.
11087 PRE_P will contain the sequence of GIMPLE statements corresponding
11088 to the evaluation of EXPR and all the side-effects that must
11089 be executed before the main expression. On exit, the last
11090 statement of PRE_P is the core statement being gimplified. For
11091 instance, when gimplifying 'if (++a)' the last statement in
11092 PRE_P will be 'if (t.1)' where t.1 is the result of
11093 pre-incrementing 'a'.
11095 POST_P will contain the sequence of GIMPLE statements corresponding
11096 to the evaluation of all the side-effects that must be executed
11097 after the main expression. If this is NULL, the post
11098 side-effects are stored at the end of PRE_P.
11100 The reason why the output is split in two is to handle post
11101 side-effects explicitly. In some cases, an expression may have
11102 inner and outer post side-effects which need to be emitted in
11103 an order different from the one given by the recursive
11104 traversal. For instance, for the expression (*p--)++ the post
11105 side-effects of '--' must actually occur *after* the post
11106 side-effects of '++'. However, gimplification will first visit
11107 the inner expression, so if a separate POST sequence was not
11108 used, the resulting sequence would be:
11115 However, the post-decrement operation in line #2 must not be
11116 evaluated until after the store to *p at line #4, so the
11117 correct sequence should be:
11124 So, by specifying a separate post queue, it is possible
11125 to emit the post side-effects in the correct order.
11126 If POST_P is NULL, an internal queue will be used. Before
11127 returning to the caller, the sequence POST_P is appended to
11128 the main output sequence PRE_P.
11130 GIMPLE_TEST_F points to a function that takes a tree T and
11131 returns nonzero if T is in the GIMPLE form requested by the
11132 caller. The GIMPLE predicates are in gimple.c.
11134 FALLBACK tells the function what sort of a temporary we want if
11135 gimplification cannot produce an expression that complies with
11138 fb_none means that no temporary should be generated
11139 fb_rvalue means that an rvalue is OK to generate
11140 fb_lvalue means that an lvalue is OK to generate
11141 fb_either means that either is OK, but an lvalue is preferable.
11142 fb_mayfail means that gimplification may fail (in which case
11143 GS_ERROR will be returned)
11145 The return value is either GS_ERROR or GS_ALL_DONE, since this
11146 function iterates until EXPR is completely gimplified or an error
11149 enum gimplify_status
11150 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
11151 bool (*gimple_test_f
) (tree
), fallback_t fallback
)
11154 gimple_seq internal_pre
= NULL
;
11155 gimple_seq internal_post
= NULL
;
11158 location_t saved_location
;
11159 enum gimplify_status ret
;
11160 gimple_stmt_iterator pre_last_gsi
, post_last_gsi
;
11163 save_expr
= *expr_p
;
11164 if (save_expr
== NULL_TREE
)
11165 return GS_ALL_DONE
;
11167 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
11168 is_statement
= gimple_test_f
== is_gimple_stmt
;
11170 gcc_assert (pre_p
);
11172 /* Consistency checks. */
11173 if (gimple_test_f
== is_gimple_reg
)
11174 gcc_assert (fallback
& (fb_rvalue
| fb_lvalue
));
11175 else if (gimple_test_f
== is_gimple_val
11176 || gimple_test_f
== is_gimple_call_addr
11177 || gimple_test_f
== is_gimple_condexpr
11178 || gimple_test_f
== is_gimple_mem_rhs
11179 || gimple_test_f
== is_gimple_mem_rhs_or_call
11180 || gimple_test_f
== is_gimple_reg_rhs
11181 || gimple_test_f
== is_gimple_reg_rhs_or_call
11182 || gimple_test_f
== is_gimple_asm_val
11183 || gimple_test_f
== is_gimple_mem_ref_addr
)
11184 gcc_assert (fallback
& fb_rvalue
);
11185 else if (gimple_test_f
== is_gimple_min_lval
11186 || gimple_test_f
== is_gimple_lvalue
)
11187 gcc_assert (fallback
& fb_lvalue
);
11188 else if (gimple_test_f
== is_gimple_addressable
)
11189 gcc_assert (fallback
& fb_either
);
11190 else if (gimple_test_f
== is_gimple_stmt
)
11191 gcc_assert (fallback
== fb_none
);
11194 /* We should have recognized the GIMPLE_TEST_F predicate to
11195 know what kind of fallback to use in case a temporary is
11196 needed to hold the value or address of *EXPR_P. */
11197 gcc_unreachable ();
11200 /* We used to check the predicate here and return immediately if it
11201 succeeds. This is wrong; the design is for gimplification to be
11202 idempotent, and for the predicates to only test for valid forms, not
11203 whether they are fully simplified. */
11205 pre_p
= &internal_pre
;
11207 if (post_p
== NULL
)
11208 post_p
= &internal_post
;
11210 /* Remember the last statements added to PRE_P and POST_P. Every
11211 new statement added by the gimplification helpers needs to be
11212 annotated with location information. To centralize the
11213 responsibility, we remember the last statement that had been
11214 added to both queues before gimplifying *EXPR_P. If
11215 gimplification produces new statements in PRE_P and POST_P, those
11216 statements will be annotated with the same location information
11218 pre_last_gsi
= gsi_last (*pre_p
);
11219 post_last_gsi
= gsi_last (*post_p
);
11221 saved_location
= input_location
;
11222 if (save_expr
!= error_mark_node
11223 && EXPR_HAS_LOCATION (*expr_p
))
11224 input_location
= EXPR_LOCATION (*expr_p
);
11226 /* Loop over the specific gimplifiers until the toplevel node
11227 remains the same. */
11230 /* Strip away as many useless type conversions as possible
11231 at the toplevel. */
11232 STRIP_USELESS_TYPE_CONVERSION (*expr_p
);
11234 /* Remember the expr. */
11235 save_expr
= *expr_p
;
11237 /* Die, die, die, my darling. */
11238 if (save_expr
== error_mark_node
11239 || (TREE_TYPE (save_expr
)
11240 && TREE_TYPE (save_expr
) == error_mark_node
))
11246 /* Do any language-specific gimplification. */
11247 ret
= ((enum gimplify_status
)
11248 lang_hooks
.gimplify_expr (expr_p
, pre_p
, post_p
));
11251 if (*expr_p
== NULL_TREE
)
11253 if (*expr_p
!= save_expr
)
11256 else if (ret
!= GS_UNHANDLED
)
11259 /* Make sure that all the cases set 'ret' appropriately. */
11260 ret
= GS_UNHANDLED
;
11261 switch (TREE_CODE (*expr_p
))
11263 /* First deal with the special cases. */
11265 case POSTINCREMENT_EXPR
:
11266 case POSTDECREMENT_EXPR
:
11267 case PREINCREMENT_EXPR
:
11268 case PREDECREMENT_EXPR
:
11269 ret
= gimplify_self_mod_expr (expr_p
, pre_p
, post_p
,
11270 fallback
!= fb_none
,
11271 TREE_TYPE (*expr_p
));
11274 case VIEW_CONVERT_EXPR
:
11275 if (is_gimple_reg_type (TREE_TYPE (*expr_p
))
11276 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
11278 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
11279 post_p
, is_gimple_val
, fb_rvalue
);
11280 recalculate_side_effects (*expr_p
);
11286 case ARRAY_RANGE_REF
:
11287 case REALPART_EXPR
:
11288 case IMAGPART_EXPR
:
11289 case COMPONENT_REF
:
11290 ret
= gimplify_compound_lval (expr_p
, pre_p
, post_p
,
11291 fallback
? fallback
: fb_rvalue
);
11295 ret
= gimplify_cond_expr (expr_p
, pre_p
, fallback
);
11297 /* C99 code may assign to an array in a structure value of a
11298 conditional expression, and this has undefined behavior
11299 only on execution, so create a temporary if an lvalue is
11301 if (fallback
== fb_lvalue
)
11303 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
11304 mark_addressable (*expr_p
);
11310 ret
= gimplify_call_expr (expr_p
, pre_p
, fallback
!= fb_none
);
11312 /* C99 code may assign to an array in a structure returned
11313 from a function, and this has undefined behavior only on
11314 execution, so create a temporary if an lvalue is
11316 if (fallback
== fb_lvalue
)
11318 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
11319 mark_addressable (*expr_p
);
11325 gcc_unreachable ();
11327 case COMPOUND_EXPR
:
11328 ret
= gimplify_compound_expr (expr_p
, pre_p
, fallback
!= fb_none
);
11331 case COMPOUND_LITERAL_EXPR
:
11332 ret
= gimplify_compound_literal_expr (expr_p
, pre_p
,
11333 gimple_test_f
, fallback
);
11338 ret
= gimplify_modify_expr (expr_p
, pre_p
, post_p
,
11339 fallback
!= fb_none
);
11342 case TRUTH_ANDIF_EXPR
:
11343 case TRUTH_ORIF_EXPR
:
11345 /* Preserve the original type of the expression and the
11346 source location of the outer expression. */
11347 tree org_type
= TREE_TYPE (*expr_p
);
11348 *expr_p
= gimple_boolify (*expr_p
);
11349 *expr_p
= build3_loc (input_location
, COND_EXPR
,
11353 org_type
, boolean_true_node
),
11356 org_type
, boolean_false_node
));
11361 case TRUTH_NOT_EXPR
:
11363 tree type
= TREE_TYPE (*expr_p
);
11364 /* The parsers are careful to generate TRUTH_NOT_EXPR
11365 only with operands that are always zero or one.
11366 We do not fold here but handle the only interesting case
11367 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
11368 *expr_p
= gimple_boolify (*expr_p
);
11369 if (TYPE_PRECISION (TREE_TYPE (*expr_p
)) == 1)
11370 *expr_p
= build1_loc (input_location
, BIT_NOT_EXPR
,
11371 TREE_TYPE (*expr_p
),
11372 TREE_OPERAND (*expr_p
, 0));
11374 *expr_p
= build2_loc (input_location
, BIT_XOR_EXPR
,
11375 TREE_TYPE (*expr_p
),
11376 TREE_OPERAND (*expr_p
, 0),
11377 build_int_cst (TREE_TYPE (*expr_p
), 1));
11378 if (!useless_type_conversion_p (type
, TREE_TYPE (*expr_p
)))
11379 *expr_p
= fold_convert_loc (input_location
, type
, *expr_p
);
11385 ret
= gimplify_addr_expr (expr_p
, pre_p
, post_p
);
11388 case ANNOTATE_EXPR
:
11390 tree cond
= TREE_OPERAND (*expr_p
, 0);
11391 tree kind
= TREE_OPERAND (*expr_p
, 1);
11392 tree type
= TREE_TYPE (cond
);
11393 if (!INTEGRAL_TYPE_P (type
))
11399 tree tmp
= create_tmp_var (type
);
11400 gimplify_arg (&cond
, pre_p
, EXPR_LOCATION (*expr_p
));
11402 = gimple_build_call_internal (IFN_ANNOTATE
, 2, cond
, kind
);
11403 gimple_call_set_lhs (call
, tmp
);
11404 gimplify_seq_add_stmt (pre_p
, call
);
11411 ret
= gimplify_va_arg_expr (expr_p
, pre_p
, post_p
);
11415 if (IS_EMPTY_STMT (*expr_p
))
11421 if (VOID_TYPE_P (TREE_TYPE (*expr_p
))
11422 || fallback
== fb_none
)
11424 /* Just strip a conversion to void (or in void context) and
11426 *expr_p
= TREE_OPERAND (*expr_p
, 0);
11431 ret
= gimplify_conversion (expr_p
);
11432 if (ret
== GS_ERROR
)
11434 if (*expr_p
!= save_expr
)
11438 case FIX_TRUNC_EXPR
:
11439 /* unary_expr: ... | '(' cast ')' val | ... */
11440 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
11441 is_gimple_val
, fb_rvalue
);
11442 recalculate_side_effects (*expr_p
);
11447 bool volatilep
= TREE_THIS_VOLATILE (*expr_p
);
11448 bool notrap
= TREE_THIS_NOTRAP (*expr_p
);
11449 tree saved_ptr_type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 0));
11451 *expr_p
= fold_indirect_ref_loc (input_location
, *expr_p
);
11452 if (*expr_p
!= save_expr
)
11458 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
11459 is_gimple_reg
, fb_rvalue
);
11460 if (ret
== GS_ERROR
)
11463 recalculate_side_effects (*expr_p
);
11464 *expr_p
= fold_build2_loc (input_location
, MEM_REF
,
11465 TREE_TYPE (*expr_p
),
11466 TREE_OPERAND (*expr_p
, 0),
11467 build_int_cst (saved_ptr_type
, 0));
11468 TREE_THIS_VOLATILE (*expr_p
) = volatilep
;
11469 TREE_THIS_NOTRAP (*expr_p
) = notrap
;
11474 /* We arrive here through the various re-gimplifcation paths. */
11476 /* First try re-folding the whole thing. */
11477 tmp
= fold_binary (MEM_REF
, TREE_TYPE (*expr_p
),
11478 TREE_OPERAND (*expr_p
, 0),
11479 TREE_OPERAND (*expr_p
, 1));
11482 REF_REVERSE_STORAGE_ORDER (tmp
)
11483 = REF_REVERSE_STORAGE_ORDER (*expr_p
);
11485 recalculate_side_effects (*expr_p
);
11489 /* Avoid re-gimplifying the address operand if it is already
11490 in suitable form. Re-gimplifying would mark the address
11491 operand addressable. Always gimplify when not in SSA form
11492 as we still may have to gimplify decls with value-exprs. */
11493 if (!gimplify_ctxp
|| !gimple_in_ssa_p (cfun
)
11494 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p
, 0)))
11496 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
11497 is_gimple_mem_ref_addr
, fb_rvalue
);
11498 if (ret
== GS_ERROR
)
11501 recalculate_side_effects (*expr_p
);
11505 /* Constants need not be gimplified. */
11512 /* Drop the overflow flag on constants, we do not want
11513 that in the GIMPLE IL. */
11514 if (TREE_OVERFLOW_P (*expr_p
))
11515 *expr_p
= drop_tree_overflow (*expr_p
);
11520 /* If we require an lvalue, such as for ADDR_EXPR, retain the
11521 CONST_DECL node. Otherwise the decl is replaceable by its
11523 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11524 if (fallback
& fb_lvalue
)
11528 *expr_p
= DECL_INITIAL (*expr_p
);
11534 ret
= gimplify_decl_expr (expr_p
, pre_p
);
11538 ret
= gimplify_bind_expr (expr_p
, pre_p
);
11542 ret
= gimplify_loop_expr (expr_p
, pre_p
);
11546 ret
= gimplify_switch_expr (expr_p
, pre_p
);
11550 ret
= gimplify_exit_expr (expr_p
);
11554 /* If the target is not LABEL, then it is a computed jump
11555 and the target needs to be gimplified. */
11556 if (TREE_CODE (GOTO_DESTINATION (*expr_p
)) != LABEL_DECL
)
11558 ret
= gimplify_expr (&GOTO_DESTINATION (*expr_p
), pre_p
,
11559 NULL
, is_gimple_val
, fb_rvalue
);
11560 if (ret
== GS_ERROR
)
11563 gimplify_seq_add_stmt (pre_p
,
11564 gimple_build_goto (GOTO_DESTINATION (*expr_p
)));
11569 gimplify_seq_add_stmt (pre_p
,
11570 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p
),
11571 PREDICT_EXPR_OUTCOME (*expr_p
)));
11576 ret
= gimplify_label_expr (expr_p
, pre_p
);
11577 label
= LABEL_EXPR_LABEL (*expr_p
);
11578 gcc_assert (decl_function_context (label
) == current_function_decl
);
11580 /* If the label is used in a goto statement, or address of the label
11581 is taken, we need to unpoison all variables that were seen so far.
11582 Doing so would prevent us from reporting a false positives. */
11583 if (asan_poisoned_variables
11584 && asan_used_labels
!= NULL
11585 && asan_used_labels
->contains (label
))
11586 asan_poison_variables (asan_poisoned_variables
, false, pre_p
);
11589 case CASE_LABEL_EXPR
:
11590 ret
= gimplify_case_label_expr (expr_p
, pre_p
);
11592 if (gimplify_ctxp
->live_switch_vars
)
11593 asan_poison_variables (gimplify_ctxp
->live_switch_vars
, false,
11598 ret
= gimplify_return_expr (*expr_p
, pre_p
);
11602 /* Don't reduce this in place; let gimplify_init_constructor work its
11603 magic. Buf if we're just elaborating this for side effects, just
11604 gimplify any element that has side-effects. */
11605 if (fallback
== fb_none
)
11607 unsigned HOST_WIDE_INT ix
;
11609 tree temp
= NULL_TREE
;
11610 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p
), ix
, val
)
11611 if (TREE_SIDE_EFFECTS (val
))
11612 append_to_statement_list (val
, &temp
);
11615 ret
= temp
? GS_OK
: GS_ALL_DONE
;
11617 /* C99 code may assign to an array in a constructed
11618 structure or union, and this has undefined behavior only
11619 on execution, so create a temporary if an lvalue is
11621 else if (fallback
== fb_lvalue
)
11623 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
11624 mark_addressable (*expr_p
);
11631 /* The following are special cases that are not handled by the
11632 original GIMPLE grammar. */
11634 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11637 ret
= gimplify_save_expr (expr_p
, pre_p
, post_p
);
11640 case BIT_FIELD_REF
:
11641 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
11642 post_p
, is_gimple_lvalue
, fb_either
);
11643 recalculate_side_effects (*expr_p
);
11646 case TARGET_MEM_REF
:
11648 enum gimplify_status r0
= GS_ALL_DONE
, r1
= GS_ALL_DONE
;
11650 if (TMR_BASE (*expr_p
))
11651 r0
= gimplify_expr (&TMR_BASE (*expr_p
), pre_p
,
11652 post_p
, is_gimple_mem_ref_addr
, fb_either
);
11653 if (TMR_INDEX (*expr_p
))
11654 r1
= gimplify_expr (&TMR_INDEX (*expr_p
), pre_p
,
11655 post_p
, is_gimple_val
, fb_rvalue
);
11656 if (TMR_INDEX2 (*expr_p
))
11657 r1
= gimplify_expr (&TMR_INDEX2 (*expr_p
), pre_p
,
11658 post_p
, is_gimple_val
, fb_rvalue
);
11659 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11660 ret
= MIN (r0
, r1
);
11664 case NON_LVALUE_EXPR
:
11665 /* This should have been stripped above. */
11666 gcc_unreachable ();
11669 ret
= gimplify_asm_expr (expr_p
, pre_p
, post_p
);
11672 case TRY_FINALLY_EXPR
:
11673 case TRY_CATCH_EXPR
:
11675 gimple_seq eval
, cleanup
;
11678 /* Calls to destructors are generated automatically in FINALLY/CATCH
11679 block. They should have location as UNKNOWN_LOCATION. However,
11680 gimplify_call_expr will reset these call stmts to input_location
11681 if it finds stmt's location is unknown. To prevent resetting for
11682 destructors, we set the input_location to unknown.
11683 Note that this only affects the destructor calls in FINALLY/CATCH
11684 block, and will automatically reset to its original value by the
11685 end of gimplify_expr. */
11686 input_location
= UNKNOWN_LOCATION
;
11687 eval
= cleanup
= NULL
;
11688 gimplify_and_add (TREE_OPERAND (*expr_p
, 0), &eval
);
11689 gimplify_and_add (TREE_OPERAND (*expr_p
, 1), &cleanup
);
11690 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11691 if (gimple_seq_empty_p (cleanup
))
11693 gimple_seq_add_seq (pre_p
, eval
);
11697 try_
= gimple_build_try (eval
, cleanup
,
11698 TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
11699 ? GIMPLE_TRY_FINALLY
11700 : GIMPLE_TRY_CATCH
);
11701 if (EXPR_HAS_LOCATION (save_expr
))
11702 gimple_set_location (try_
, EXPR_LOCATION (save_expr
));
11703 else if (LOCATION_LOCUS (saved_location
) != UNKNOWN_LOCATION
)
11704 gimple_set_location (try_
, saved_location
);
11705 if (TREE_CODE (*expr_p
) == TRY_CATCH_EXPR
)
11706 gimple_try_set_catch_is_cleanup (try_
,
11707 TRY_CATCH_IS_CLEANUP (*expr_p
));
11708 gimplify_seq_add_stmt (pre_p
, try_
);
11713 case CLEANUP_POINT_EXPR
:
11714 ret
= gimplify_cleanup_point_expr (expr_p
, pre_p
);
11718 ret
= gimplify_target_expr (expr_p
, pre_p
, post_p
);
11724 gimple_seq handler
= NULL
;
11725 gimplify_and_add (CATCH_BODY (*expr_p
), &handler
);
11726 c
= gimple_build_catch (CATCH_TYPES (*expr_p
), handler
);
11727 gimplify_seq_add_stmt (pre_p
, c
);
11732 case EH_FILTER_EXPR
:
11735 gimple_seq failure
= NULL
;
11737 gimplify_and_add (EH_FILTER_FAILURE (*expr_p
), &failure
);
11738 ehf
= gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p
), failure
);
11739 gimple_set_no_warning (ehf
, TREE_NO_WARNING (*expr_p
));
11740 gimplify_seq_add_stmt (pre_p
, ehf
);
11747 enum gimplify_status r0
, r1
;
11748 r0
= gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p
), pre_p
,
11749 post_p
, is_gimple_val
, fb_rvalue
);
11750 r1
= gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p
), pre_p
,
11751 post_p
, is_gimple_val
, fb_rvalue
);
11752 TREE_SIDE_EFFECTS (*expr_p
) = 0;
11753 ret
= MIN (r0
, r1
);
11758 /* We get here when taking the address of a label. We mark
11759 the label as "forced"; meaning it can never be removed and
11760 it is a potential target for any computed goto. */
11761 FORCED_LABEL (*expr_p
) = 1;
11765 case STATEMENT_LIST
:
11766 ret
= gimplify_statement_list (expr_p
, pre_p
);
11769 case WITH_SIZE_EXPR
:
11771 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
11772 post_p
== &internal_post
? NULL
: post_p
,
11773 gimple_test_f
, fallback
);
11774 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
11775 is_gimple_val
, fb_rvalue
);
11782 ret
= gimplify_var_or_parm_decl (expr_p
);
11786 /* When within an OMP context, notice uses of variables. */
11787 if (gimplify_omp_ctxp
)
11788 omp_notice_variable (gimplify_omp_ctxp
, *expr_p
, true);
11793 /* Allow callbacks into the gimplifier during optimization. */
11798 gimplify_omp_parallel (expr_p
, pre_p
);
11803 gimplify_omp_task (expr_p
, pre_p
);
11811 case OMP_DISTRIBUTE
:
11814 ret
= gimplify_omp_for (expr_p
, pre_p
);
11818 gimplify_oacc_cache (expr_p
, pre_p
);
11823 gimplify_oacc_declare (expr_p
, pre_p
);
11827 case OACC_HOST_DATA
:
11830 case OACC_PARALLEL
:
11834 case OMP_TARGET_DATA
:
11836 gimplify_omp_workshare (expr_p
, pre_p
);
11840 case OACC_ENTER_DATA
:
11841 case OACC_EXIT_DATA
:
11843 case OMP_TARGET_UPDATE
:
11844 case OMP_TARGET_ENTER_DATA
:
11845 case OMP_TARGET_EXIT_DATA
:
11846 gimplify_omp_target_update (expr_p
, pre_p
);
11852 case OMP_TASKGROUP
:
11856 gimple_seq body
= NULL
;
11859 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
11860 switch (TREE_CODE (*expr_p
))
11863 g
= gimple_build_omp_section (body
);
11866 g
= gimple_build_omp_master (body
);
11868 case OMP_TASKGROUP
:
11870 gimple_seq cleanup
= NULL
;
11872 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END
);
11873 g
= gimple_build_call (fn
, 0);
11874 gimple_seq_add_stmt (&cleanup
, g
);
11875 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
11877 gimple_seq_add_stmt (&body
, g
);
11878 g
= gimple_build_omp_taskgroup (body
);
11882 g
= gimplify_omp_ordered (*expr_p
, body
);
11885 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p
),
11886 pre_p
, ORT_WORKSHARE
, OMP_CRITICAL
);
11887 gimplify_adjust_omp_clauses (pre_p
, body
,
11888 &OMP_CRITICAL_CLAUSES (*expr_p
),
11890 g
= gimple_build_omp_critical (body
,
11891 OMP_CRITICAL_NAME (*expr_p
),
11892 OMP_CRITICAL_CLAUSES (*expr_p
));
11895 gcc_unreachable ();
11897 gimplify_seq_add_stmt (pre_p
, g
);
11903 case OMP_ATOMIC_READ
:
11904 case OMP_ATOMIC_CAPTURE_OLD
:
11905 case OMP_ATOMIC_CAPTURE_NEW
:
11906 ret
= gimplify_omp_atomic (expr_p
, pre_p
);
11909 case TRANSACTION_EXPR
:
11910 ret
= gimplify_transaction (expr_p
, pre_p
);
11913 case TRUTH_AND_EXPR
:
11914 case TRUTH_OR_EXPR
:
11915 case TRUTH_XOR_EXPR
:
11917 tree orig_type
= TREE_TYPE (*expr_p
);
11918 tree new_type
, xop0
, xop1
;
11919 *expr_p
= gimple_boolify (*expr_p
);
11920 new_type
= TREE_TYPE (*expr_p
);
11921 if (!useless_type_conversion_p (orig_type
, new_type
))
11923 *expr_p
= fold_convert_loc (input_location
, orig_type
, *expr_p
);
11928 /* Boolified binary truth expressions are semantically equivalent
11929 to bitwise binary expressions. Canonicalize them to the
11930 bitwise variant. */
11931 switch (TREE_CODE (*expr_p
))
11933 case TRUTH_AND_EXPR
:
11934 TREE_SET_CODE (*expr_p
, BIT_AND_EXPR
);
11936 case TRUTH_OR_EXPR
:
11937 TREE_SET_CODE (*expr_p
, BIT_IOR_EXPR
);
11939 case TRUTH_XOR_EXPR
:
11940 TREE_SET_CODE (*expr_p
, BIT_XOR_EXPR
);
11945 /* Now make sure that operands have compatible type to
11946 expression's new_type. */
11947 xop0
= TREE_OPERAND (*expr_p
, 0);
11948 xop1
= TREE_OPERAND (*expr_p
, 1);
11949 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop0
)))
11950 TREE_OPERAND (*expr_p
, 0) = fold_convert_loc (input_location
,
11953 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop1
)))
11954 TREE_OPERAND (*expr_p
, 1) = fold_convert_loc (input_location
,
11957 /* Continue classified as tcc_binary. */
11961 case VEC_COND_EXPR
:
11963 enum gimplify_status r0
, r1
, r2
;
11965 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
11966 post_p
, is_gimple_condexpr
, fb_rvalue
);
11967 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
11968 post_p
, is_gimple_val
, fb_rvalue
);
11969 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
11970 post_p
, is_gimple_val
, fb_rvalue
);
11972 ret
= MIN (MIN (r0
, r1
), r2
);
11973 recalculate_side_effects (*expr_p
);
11978 case VEC_PERM_EXPR
:
11979 /* Classified as tcc_expression. */
11982 case BIT_INSERT_EXPR
:
11983 /* Argument 3 is a constant. */
11986 case POINTER_PLUS_EXPR
:
11988 enum gimplify_status r0
, r1
;
11989 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
11990 post_p
, is_gimple_val
, fb_rvalue
);
11991 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
11992 post_p
, is_gimple_val
, fb_rvalue
);
11993 recalculate_side_effects (*expr_p
);
11994 ret
= MIN (r0
, r1
);
11998 case CILK_SYNC_STMT
:
12000 if (!fn_contains_cilk_spawn_p (cfun
))
12002 error_at (EXPR_LOCATION (*expr_p
),
12003 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
12008 gimplify_cilk_sync (expr_p
, pre_p
);
12015 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p
)))
12017 case tcc_comparison
:
12018 /* Handle comparison of objects of non scalar mode aggregates
12019 with a call to memcmp. It would be nice to only have to do
12020 this for variable-sized objects, but then we'd have to allow
12021 the same nest of reference nodes we allow for MODIFY_EXPR and
12022 that's too complex.
12024 Compare scalar mode aggregates as scalar mode values. Using
12025 memcmp for them would be very inefficient at best, and is
12026 plain wrong if bitfields are involved. */
12028 tree type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 1));
12030 /* Vector comparisons need no boolification. */
12031 if (TREE_CODE (type
) == VECTOR_TYPE
)
12033 else if (!AGGREGATE_TYPE_P (type
))
12035 tree org_type
= TREE_TYPE (*expr_p
);
12036 *expr_p
= gimple_boolify (*expr_p
);
12037 if (!useless_type_conversion_p (org_type
,
12038 TREE_TYPE (*expr_p
)))
12040 *expr_p
= fold_convert_loc (input_location
,
12041 org_type
, *expr_p
);
12047 else if (TYPE_MODE (type
) != BLKmode
)
12048 ret
= gimplify_scalar_mode_aggregate_compare (expr_p
);
12050 ret
= gimplify_variable_sized_compare (expr_p
);
12055 /* If *EXPR_P does not need to be special-cased, handle it
12056 according to its class. */
12058 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
12059 post_p
, is_gimple_val
, fb_rvalue
);
12065 enum gimplify_status r0
, r1
;
12067 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
12068 post_p
, is_gimple_val
, fb_rvalue
);
12069 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
12070 post_p
, is_gimple_val
, fb_rvalue
);
12072 ret
= MIN (r0
, r1
);
12078 enum gimplify_status r0
, r1
, r2
;
12080 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
12081 post_p
, is_gimple_val
, fb_rvalue
);
12082 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
12083 post_p
, is_gimple_val
, fb_rvalue
);
12084 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
12085 post_p
, is_gimple_val
, fb_rvalue
);
12087 ret
= MIN (MIN (r0
, r1
), r2
);
12091 case tcc_declaration
:
12094 goto dont_recalculate
;
12097 gcc_unreachable ();
12100 recalculate_side_effects (*expr_p
);
12106 gcc_assert (*expr_p
|| ret
!= GS_OK
);
12108 while (ret
== GS_OK
);
12110 /* If we encountered an error_mark somewhere nested inside, either
12111 stub out the statement or propagate the error back out. */
12112 if (ret
== GS_ERROR
)
12119 /* This was only valid as a return value from the langhook, which
12120 we handled. Make sure it doesn't escape from any other context. */
12121 gcc_assert (ret
!= GS_UNHANDLED
);
12123 if (fallback
== fb_none
&& *expr_p
&& !is_gimple_stmt (*expr_p
))
12125 /* We aren't looking for a value, and we don't have a valid
12126 statement. If it doesn't have side-effects, throw it away.
12127 We can also get here with code such as "*&&L;", where L is
12128 a LABEL_DECL that is marked as FORCED_LABEL. */
12129 if (TREE_CODE (*expr_p
) == LABEL_DECL
12130 || !TREE_SIDE_EFFECTS (*expr_p
))
12132 else if (!TREE_THIS_VOLATILE (*expr_p
))
12134 /* This is probably a _REF that contains something nested that
12135 has side effects. Recurse through the operands to find it. */
12136 enum tree_code code
= TREE_CODE (*expr_p
);
12140 case COMPONENT_REF
:
12141 case REALPART_EXPR
:
12142 case IMAGPART_EXPR
:
12143 case VIEW_CONVERT_EXPR
:
12144 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
12145 gimple_test_f
, fallback
);
12149 case ARRAY_RANGE_REF
:
12150 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
12151 gimple_test_f
, fallback
);
12152 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
12153 gimple_test_f
, fallback
);
12157 /* Anything else with side-effects must be converted to
12158 a valid statement before we get here. */
12159 gcc_unreachable ();
12164 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p
))
12165 && TYPE_MODE (TREE_TYPE (*expr_p
)) != BLKmode
)
12167 /* Historically, the compiler has treated a bare reference
12168 to a non-BLKmode volatile lvalue as forcing a load. */
12169 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p
));
12171 /* Normally, we do not want to create a temporary for a
12172 TREE_ADDRESSABLE type because such a type should not be
12173 copied by bitwise-assignment. However, we make an
12174 exception here, as all we are doing here is ensuring that
12175 we read the bytes that make up the type. We use
12176 create_tmp_var_raw because create_tmp_var will abort when
12177 given a TREE_ADDRESSABLE type. */
12178 tree tmp
= create_tmp_var_raw (type
, "vol");
12179 gimple_add_tmp_var (tmp
);
12180 gimplify_assign (tmp
, *expr_p
, pre_p
);
12184 /* We can't do anything useful with a volatile reference to
12185 an incomplete type, so just throw it away. Likewise for
12186 a BLKmode type, since any implicit inner load should
12187 already have been turned into an explicit one by the
12188 gimplification process. */
12192 /* If we are gimplifying at the statement level, we're done. Tack
12193 everything together and return. */
12194 if (fallback
== fb_none
|| is_statement
)
12196 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12197 it out for GC to reclaim it. */
12198 *expr_p
= NULL_TREE
;
12200 if (!gimple_seq_empty_p (internal_pre
)
12201 || !gimple_seq_empty_p (internal_post
))
12203 gimplify_seq_add_seq (&internal_pre
, internal_post
);
12204 gimplify_seq_add_seq (pre_p
, internal_pre
);
12207 /* The result of gimplifying *EXPR_P is going to be the last few
12208 statements in *PRE_P and *POST_P. Add location information
12209 to all the statements that were added by the gimplification
12211 if (!gimple_seq_empty_p (*pre_p
))
12212 annotate_all_with_location_after (*pre_p
, pre_last_gsi
, input_location
);
12214 if (!gimple_seq_empty_p (*post_p
))
12215 annotate_all_with_location_after (*post_p
, post_last_gsi
,
12221 #ifdef ENABLE_GIMPLE_CHECKING
12224 enum tree_code code
= TREE_CODE (*expr_p
);
12225 /* These expressions should already be in gimple IR form. */
12226 gcc_assert (code
!= MODIFY_EXPR
12227 && code
!= ASM_EXPR
12228 && code
!= BIND_EXPR
12229 && code
!= CATCH_EXPR
12230 && (code
!= COND_EXPR
|| gimplify_ctxp
->allow_rhs_cond_expr
)
12231 && code
!= EH_FILTER_EXPR
12232 && code
!= GOTO_EXPR
12233 && code
!= LABEL_EXPR
12234 && code
!= LOOP_EXPR
12235 && code
!= SWITCH_EXPR
12236 && code
!= TRY_FINALLY_EXPR
12237 && code
!= OACC_PARALLEL
12238 && code
!= OACC_KERNELS
12239 && code
!= OACC_DATA
12240 && code
!= OACC_HOST_DATA
12241 && code
!= OACC_DECLARE
12242 && code
!= OACC_UPDATE
12243 && code
!= OACC_ENTER_DATA
12244 && code
!= OACC_EXIT_DATA
12245 && code
!= OACC_CACHE
12246 && code
!= OMP_CRITICAL
12248 && code
!= OACC_LOOP
12249 && code
!= OMP_MASTER
12250 && code
!= OMP_TASKGROUP
12251 && code
!= OMP_ORDERED
12252 && code
!= OMP_PARALLEL
12253 && code
!= OMP_SECTIONS
12254 && code
!= OMP_SECTION
12255 && code
!= OMP_SINGLE
);
12259 /* Otherwise we're gimplifying a subexpression, so the resulting
12260 value is interesting. If it's a valid operand that matches
12261 GIMPLE_TEST_F, we're done. Unless we are handling some
12262 post-effects internally; if that's the case, we need to copy into
12263 a temporary before adding the post-effects to POST_P. */
12264 if (gimple_seq_empty_p (internal_post
) && (*gimple_test_f
) (*expr_p
))
12267 /* Otherwise, we need to create a new temporary for the gimplified
12270 /* We can't return an lvalue if we have an internal postqueue. The
12271 object the lvalue refers to would (probably) be modified by the
12272 postqueue; we need to copy the value out first, which means an
12274 if ((fallback
& fb_lvalue
)
12275 && gimple_seq_empty_p (internal_post
)
12276 && is_gimple_addressable (*expr_p
))
12278 /* An lvalue will do. Take the address of the expression, store it
12279 in a temporary, and replace the expression with an INDIRECT_REF of
12281 tmp
= build_fold_addr_expr_loc (input_location
, *expr_p
);
12282 gimplify_expr (&tmp
, pre_p
, post_p
, is_gimple_reg
, fb_rvalue
);
12283 *expr_p
= build_simple_mem_ref (tmp
);
12285 else if ((fallback
& fb_rvalue
) && is_gimple_reg_rhs_or_call (*expr_p
))
12287 /* An rvalue will do. Assign the gimplified expression into a
12288 new temporary TMP and replace the original expression with
12289 TMP. First, make sure that the expression has a type so that
12290 it can be assigned into a temporary. */
12291 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p
)));
12292 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
12296 #ifdef ENABLE_GIMPLE_CHECKING
12297 if (!(fallback
& fb_mayfail
))
12299 fprintf (stderr
, "gimplification failed:\n");
12300 print_generic_expr (stderr
, *expr_p
);
12301 debug_tree (*expr_p
);
12302 internal_error ("gimplification failed");
12305 gcc_assert (fallback
& fb_mayfail
);
12307 /* If this is an asm statement, and the user asked for the
12308 impossible, don't die. Fail and let gimplify_asm_expr
12314 /* Make sure the temporary matches our predicate. */
12315 gcc_assert ((*gimple_test_f
) (*expr_p
));
12317 if (!gimple_seq_empty_p (internal_post
))
12319 annotate_all_with_location (internal_post
, input_location
);
12320 gimplify_seq_add_seq (pre_p
, internal_post
);
12324 input_location
= saved_location
;
12328 /* Like gimplify_expr but make sure the gimplified result is not itself
12329 a SSA name (but a decl if it were). Temporaries required by
12330 evaluating *EXPR_P may be still SSA names. */
12332 static enum gimplify_status
12333 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
12334 bool (*gimple_test_f
) (tree
), fallback_t fallback
,
12337 bool was_ssa_name_p
= TREE_CODE (*expr_p
) == SSA_NAME
;
12338 enum gimplify_status ret
= gimplify_expr (expr_p
, pre_p
, post_p
,
12339 gimple_test_f
, fallback
);
12341 && TREE_CODE (*expr_p
) == SSA_NAME
)
12343 tree name
= *expr_p
;
12344 if (was_ssa_name_p
)
12345 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, NULL
, false);
12348 /* Avoid the extra copy if possible. */
12349 *expr_p
= create_tmp_reg (TREE_TYPE (name
));
12350 gimple_set_lhs (SSA_NAME_DEF_STMT (name
), *expr_p
);
12351 release_ssa_name (name
);
12357 /* Look through TYPE for variable-sized objects and gimplify each such
12358 size that we find. Add to LIST_P any statements generated. */
12361 gimplify_type_sizes (tree type
, gimple_seq
*list_p
)
12365 if (type
== NULL
|| type
== error_mark_node
)
12368 /* We first do the main variant, then copy into any other variants. */
12369 type
= TYPE_MAIN_VARIANT (type
);
12371 /* Avoid infinite recursion. */
12372 if (TYPE_SIZES_GIMPLIFIED (type
))
12375 TYPE_SIZES_GIMPLIFIED (type
) = 1;
12377 switch (TREE_CODE (type
))
12380 case ENUMERAL_TYPE
:
12383 case FIXED_POINT_TYPE
:
12384 gimplify_one_sizepos (&TYPE_MIN_VALUE (type
), list_p
);
12385 gimplify_one_sizepos (&TYPE_MAX_VALUE (type
), list_p
);
12387 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
12389 TYPE_MIN_VALUE (t
) = TYPE_MIN_VALUE (type
);
12390 TYPE_MAX_VALUE (t
) = TYPE_MAX_VALUE (type
);
12395 /* These types may not have declarations, so handle them here. */
12396 gimplify_type_sizes (TREE_TYPE (type
), list_p
);
12397 gimplify_type_sizes (TYPE_DOMAIN (type
), list_p
);
12398 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12399 with assigned stack slots, for -O1+ -g they should be tracked
12401 if (!(TYPE_NAME (type
)
12402 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
12403 && DECL_IGNORED_P (TYPE_NAME (type
)))
12404 && TYPE_DOMAIN (type
)
12405 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type
)))
12407 t
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
12408 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
12409 DECL_IGNORED_P (t
) = 0;
12410 t
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
12411 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
12412 DECL_IGNORED_P (t
) = 0;
12418 case QUAL_UNION_TYPE
:
12419 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
12420 if (TREE_CODE (field
) == FIELD_DECL
)
12422 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field
), list_p
);
12423 gimplify_one_sizepos (&DECL_SIZE (field
), list_p
);
12424 gimplify_one_sizepos (&DECL_SIZE_UNIT (field
), list_p
);
12425 gimplify_type_sizes (TREE_TYPE (field
), list_p
);
12430 case REFERENCE_TYPE
:
12431 /* We used to recurse on the pointed-to type here, which turned out to
12432 be incorrect because its definition might refer to variables not
12433 yet initialized at this point if a forward declaration is involved.
12435 It was actually useful for anonymous pointed-to types to ensure
12436 that the sizes evaluation dominates every possible later use of the
12437 values. Restricting to such types here would be safe since there
12438 is no possible forward declaration around, but would introduce an
12439 undesirable middle-end semantic to anonymity. We then defer to
12440 front-ends the responsibility of ensuring that the sizes are
12441 evaluated both early and late enough, e.g. by attaching artificial
12442 type declarations to the tree. */
12449 gimplify_one_sizepos (&TYPE_SIZE (type
), list_p
);
12450 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type
), list_p
);
12452 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
12454 TYPE_SIZE (t
) = TYPE_SIZE (type
);
12455 TYPE_SIZE_UNIT (t
) = TYPE_SIZE_UNIT (type
);
12456 TYPE_SIZES_GIMPLIFIED (t
) = 1;
12460 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12461 a size or position, has had all of its SAVE_EXPRs evaluated.
12462 We add any required statements to *STMT_P. */
12465 gimplify_one_sizepos (tree
*expr_p
, gimple_seq
*stmt_p
)
12467 tree expr
= *expr_p
;
12469 /* We don't do anything if the value isn't there, is constant, or contains
12470 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
12471 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
12472 will want to replace it with a new variable, but that will cause problems
12473 if this type is from outside the function. It's OK to have that here. */
12474 if (is_gimple_sizepos (expr
))
12477 *expr_p
= unshare_expr (expr
);
12479 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12480 if the def vanishes. */
12481 gimplify_expr (expr_p
, stmt_p
, NULL
, is_gimple_val
, fb_rvalue
, false);
12484 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12485 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12486 is true, also gimplify the parameters. */
12489 gimplify_body (tree fndecl
, bool do_parms
)
12491 location_t saved_location
= input_location
;
12492 gimple_seq parm_stmts
, seq
;
12493 gimple
*outer_stmt
;
12495 struct cgraph_node
*cgn
;
12497 timevar_push (TV_TREE_GIMPLIFY
);
12499 init_tree_ssa (cfun
);
12501 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12503 default_rtl_profile ();
12505 gcc_assert (gimplify_ctxp
== NULL
);
12506 push_gimplify_context (true);
12508 if (flag_openacc
|| flag_openmp
)
12510 gcc_assert (gimplify_omp_ctxp
== NULL
);
12511 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl
)))
12512 gimplify_omp_ctxp
= new_omp_context (ORT_TARGET
);
12515 /* Unshare most shared trees in the body and in that of any nested functions.
12516 It would seem we don't have to do this for nested functions because
12517 they are supposed to be output and then the outer function gimplified
12518 first, but the g++ front end doesn't always do it that way. */
12519 unshare_body (fndecl
);
12520 unvisit_body (fndecl
);
12522 cgn
= cgraph_node::get (fndecl
);
12523 if (cgn
&& cgn
->origin
)
12524 nonlocal_vlas
= new hash_set
<tree
>;
12526 /* Make sure input_location isn't set to something weird. */
12527 input_location
= DECL_SOURCE_LOCATION (fndecl
);
12529 /* Resolve callee-copies. This has to be done before processing
12530 the body so that DECL_VALUE_EXPR gets processed correctly. */
12531 parm_stmts
= do_parms
? gimplify_parameters () : NULL
;
12533 /* Gimplify the function's body. */
12535 gimplify_stmt (&DECL_SAVED_TREE (fndecl
), &seq
);
12536 outer_stmt
= gimple_seq_first_stmt (seq
);
12539 outer_stmt
= gimple_build_nop ();
12540 gimplify_seq_add_stmt (&seq
, outer_stmt
);
12543 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12544 not the case, wrap everything in a GIMPLE_BIND to make it so. */
12545 if (gimple_code (outer_stmt
) == GIMPLE_BIND
12546 && gimple_seq_first (seq
) == gimple_seq_last (seq
))
12547 outer_bind
= as_a
<gbind
*> (outer_stmt
);
12549 outer_bind
= gimple_build_bind (NULL_TREE
, seq
, NULL
);
12551 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
12553 /* If we had callee-copies statements, insert them at the beginning
12554 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
12555 if (!gimple_seq_empty_p (parm_stmts
))
12559 gimplify_seq_add_seq (&parm_stmts
, gimple_bind_body (outer_bind
));
12560 gimple_bind_set_body (outer_bind
, parm_stmts
);
12562 for (parm
= DECL_ARGUMENTS (current_function_decl
);
12563 parm
; parm
= DECL_CHAIN (parm
))
12564 if (DECL_HAS_VALUE_EXPR_P (parm
))
12566 DECL_HAS_VALUE_EXPR_P (parm
) = 0;
12567 DECL_IGNORED_P (parm
) = 0;
12573 if (nonlocal_vla_vars
)
12575 /* tree-nested.c may later on call declare_vars (..., true);
12576 which relies on BLOCK_VARS chain to be the tail of the
12577 gimple_bind_vars chain. Ensure we don't violate that
12579 if (gimple_bind_block (outer_bind
)
12580 == DECL_INITIAL (current_function_decl
))
12581 declare_vars (nonlocal_vla_vars
, outer_bind
, true);
12583 BLOCK_VARS (DECL_INITIAL (current_function_decl
))
12584 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl
)),
12585 nonlocal_vla_vars
);
12586 nonlocal_vla_vars
= NULL_TREE
;
12588 delete nonlocal_vlas
;
12589 nonlocal_vlas
= NULL
;
12592 if ((flag_openacc
|| flag_openmp
|| flag_openmp_simd
)
12593 && gimplify_omp_ctxp
)
12595 delete_omp_context (gimplify_omp_ctxp
);
12596 gimplify_omp_ctxp
= NULL
;
12599 pop_gimplify_context (outer_bind
);
12600 gcc_assert (gimplify_ctxp
== NULL
);
12602 if (flag_checking
&& !seen_error ())
12603 verify_gimple_in_seq (gimple_bind_body (outer_bind
));
12605 timevar_pop (TV_TREE_GIMPLIFY
);
12606 input_location
= saved_location
;
12611 typedef char *char_p
; /* For DEF_VEC_P. */
12613 /* Return whether we should exclude FNDECL from instrumentation. */
12616 flag_instrument_functions_exclude_p (tree fndecl
)
12620 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_functions
;
12621 if (v
&& v
->length () > 0)
12627 name
= lang_hooks
.decl_printable_name (fndecl
, 0);
12628 FOR_EACH_VEC_ELT (*v
, i
, s
)
12629 if (strstr (name
, s
) != NULL
)
12633 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_files
;
12634 if (v
&& v
->length () > 0)
12640 name
= DECL_SOURCE_FILE (fndecl
);
12641 FOR_EACH_VEC_ELT (*v
, i
, s
)
12642 if (strstr (name
, s
) != NULL
)
12649 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
12650 node for the function we want to gimplify.
12652 Return the sequence of GIMPLE statements corresponding to the body
12656 gimplify_function_tree (tree fndecl
)
12662 gcc_assert (!gimple_body (fndecl
));
12664 if (DECL_STRUCT_FUNCTION (fndecl
))
12665 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
12667 push_struct_function (fndecl
);
12669 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12671 cfun
->curr_properties
|= PROP_gimple_lva
;
12673 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
12675 /* Preliminarily mark non-addressed complex variables as eligible
12676 for promotion to gimple registers. We'll transform their uses
12677 as we find them. */
12678 if ((TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
12679 || TREE_CODE (TREE_TYPE (parm
)) == VECTOR_TYPE
)
12680 && !TREE_THIS_VOLATILE (parm
)
12681 && !needs_to_live_in_memory (parm
))
12682 DECL_GIMPLE_REG_P (parm
) = 1;
12685 ret
= DECL_RESULT (fndecl
);
12686 if ((TREE_CODE (TREE_TYPE (ret
)) == COMPLEX_TYPE
12687 || TREE_CODE (TREE_TYPE (ret
)) == VECTOR_TYPE
)
12688 && !needs_to_live_in_memory (ret
))
12689 DECL_GIMPLE_REG_P (ret
) = 1;
12691 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS
))
12692 asan_poisoned_variables
= new hash_set
<tree
> ();
12693 bind
= gimplify_body (fndecl
, true);
12694 if (asan_poisoned_variables
)
12696 delete asan_poisoned_variables
;
12697 asan_poisoned_variables
= NULL
;
12700 /* The tree body of the function is no longer needed, replace it
12701 with the new GIMPLE body. */
12703 gimple_seq_add_stmt (&seq
, bind
);
12704 gimple_set_body (fndecl
, seq
);
12706 /* If we're instrumenting function entry/exit, then prepend the call to
12707 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12708 catch the exit hook. */
12709 /* ??? Add some way to ignore exceptions for this TFE. */
12710 if (flag_instrument_function_entry_exit
12711 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
)
12712 /* Do not instrument extern inline functions. */
12713 && !(DECL_DECLARED_INLINE_P (fndecl
)
12714 && DECL_EXTERNAL (fndecl
)
12715 && DECL_DISREGARD_INLINE_LIMITS (fndecl
))
12716 && !flag_instrument_functions_exclude_p (fndecl
))
12721 gimple_seq cleanup
= NULL
, body
= NULL
;
12725 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
12726 call
= gimple_build_call (x
, 1, integer_zero_node
);
12727 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
12728 gimple_call_set_lhs (call
, tmp_var
);
12729 gimplify_seq_add_stmt (&cleanup
, call
);
12730 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT
);
12731 call
= gimple_build_call (x
, 2,
12732 build_fold_addr_expr (current_function_decl
),
12734 gimplify_seq_add_stmt (&cleanup
, call
);
12735 tf
= gimple_build_try (seq
, cleanup
, GIMPLE_TRY_FINALLY
);
12737 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
12738 call
= gimple_build_call (x
, 1, integer_zero_node
);
12739 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
12740 gimple_call_set_lhs (call
, tmp_var
);
12741 gimplify_seq_add_stmt (&body
, call
);
12742 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER
);
12743 call
= gimple_build_call (x
, 2,
12744 build_fold_addr_expr (current_function_decl
),
12746 gimplify_seq_add_stmt (&body
, call
);
12747 gimplify_seq_add_stmt (&body
, tf
);
12748 new_bind
= gimple_build_bind (NULL
, body
, NULL
);
12750 /* Replace the current function body with the body
12751 wrapped in the try/finally TF. */
12753 gimple_seq_add_stmt (&seq
, new_bind
);
12754 gimple_set_body (fndecl
, seq
);
12758 if (sanitize_flags_p (SANITIZE_THREAD
))
12760 gcall
*call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
12761 gimple
*tf
= gimple_build_try (seq
, call
, GIMPLE_TRY_FINALLY
);
12762 gbind
*new_bind
= gimple_build_bind (NULL
, tf
, NULL
);
12763 /* Replace the current function body with the body
12764 wrapped in the try/finally TF. */
12766 gimple_seq_add_stmt (&seq
, new_bind
);
12767 gimple_set_body (fndecl
, seq
);
12770 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
12771 cfun
->curr_properties
|= PROP_gimple_any
;
12775 dump_function (TDI_gimple
, fndecl
);
12778 /* Return a dummy expression of type TYPE in order to keep going after an
12782 dummy_object (tree type
)
12784 tree t
= build_int_cst (build_pointer_type (type
), 0);
12785 return build2 (MEM_REF
, type
, t
, t
);
12788 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
12789 builtin function, but a very special sort of operator. */
12791 enum gimplify_status
12792 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
,
12793 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
12795 tree promoted_type
, have_va_type
;
12796 tree valist
= TREE_OPERAND (*expr_p
, 0);
12797 tree type
= TREE_TYPE (*expr_p
);
12798 tree t
, tag
, aptag
;
12799 location_t loc
= EXPR_LOCATION (*expr_p
);
12801 /* Verify that valist is of the proper type. */
12802 have_va_type
= TREE_TYPE (valist
);
12803 if (have_va_type
== error_mark_node
)
12805 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
12806 if (have_va_type
== NULL_TREE
12807 && POINTER_TYPE_P (TREE_TYPE (valist
)))
12808 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
12810 = targetm
.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist
)));
12811 gcc_assert (have_va_type
!= NULL_TREE
);
12813 /* Generate a diagnostic for requesting data of a type that cannot
12814 be passed through `...' due to type promotion at the call site. */
12815 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
12818 static bool gave_help
;
12820 /* Use the expansion point to handle cases such as passing bool (defined
12821 in a system header) through `...'. */
12822 source_location xloc
12823 = expansion_point_location_if_in_system_header (loc
);
12825 /* Unfortunately, this is merely undefined, rather than a constraint
12826 violation, so we cannot make this an error. If this call is never
12827 executed, the program is still strictly conforming. */
12828 warned
= warning_at (xloc
, 0,
12829 "%qT is promoted to %qT when passed through %<...%>",
12830 type
, promoted_type
);
12831 if (!gave_help
&& warned
)
12834 inform (xloc
, "(so you should pass %qT not %qT to %<va_arg%>)",
12835 promoted_type
, type
);
12838 /* We can, however, treat "undefined" any way we please.
12839 Call abort to encourage the user to fix the program. */
12841 inform (xloc
, "if this code is reached, the program will abort");
12842 /* Before the abort, allow the evaluation of the va_list
12843 expression to exit or longjmp. */
12844 gimplify_and_add (valist
, pre_p
);
12845 t
= build_call_expr_loc (loc
,
12846 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
12847 gimplify_and_add (t
, pre_p
);
12849 /* This is dead code, but go ahead and finish so that the
12850 mode of the result comes out right. */
12851 *expr_p
= dummy_object (type
);
12852 return GS_ALL_DONE
;
12855 tag
= build_int_cst (build_pointer_type (type
), 0);
12856 aptag
= build_int_cst (TREE_TYPE (valist
), 0);
12858 *expr_p
= build_call_expr_internal_loc (loc
, IFN_VA_ARG
, type
, 3,
12859 valist
, tag
, aptag
);
12861 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12862 needs to be expanded. */
12863 cfun
->curr_properties
&= ~PROP_gimple_lva
;
12868 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12870 DST/SRC are the destination and source respectively. You can pass
12871 ungimplified trees in DST or SRC, in which case they will be
12872 converted to a gimple operand if necessary.
12874 This function returns the newly created GIMPLE_ASSIGN tuple. */
12877 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
12879 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12880 gimplify_and_add (t
, seq_p
);
12882 return gimple_seq_last_stmt (*seq_p
);
12886 gimplify_hasher::hash (const elt_t
*p
)
12889 return iterative_hash_expr (t
, 0);
12893 gimplify_hasher::equal (const elt_t
*p1
, const elt_t
*p2
)
12897 enum tree_code code
= TREE_CODE (t1
);
12899 if (TREE_CODE (t2
) != code
12900 || TREE_TYPE (t1
) != TREE_TYPE (t2
))
12903 if (!operand_equal_p (t1
, t2
, 0))
12906 /* Only allow them to compare equal if they also hash equal; otherwise
12907 results are nondeterminate, and we fail bootstrap comparison. */
12908 gcc_checking_assert (hash (p1
) == hash (p2
));