1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2021 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
40 #include "fold-const.h"
45 #include "gimple-fold.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
56 #include "omp-general.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "stringpool.h"
68 #include "omp-offload.h"
70 #include "tree-nested.h"
72 /* Hash set of poisoned variables in a bind expr. */
73 static hash_set
<tree
> *asan_poisoned_variables
= NULL
;
75 enum gimplify_omp_var_data
78 GOVD_EXPLICIT
= 0x000002,
79 GOVD_SHARED
= 0x000004,
80 GOVD_PRIVATE
= 0x000008,
81 GOVD_FIRSTPRIVATE
= 0x000010,
82 GOVD_LASTPRIVATE
= 0x000020,
83 GOVD_REDUCTION
= 0x000040,
86 GOVD_DEBUG_PRIVATE
= 0x000200,
87 GOVD_PRIVATE_OUTER_REF
= 0x000400,
88 GOVD_LINEAR
= 0x000800,
89 GOVD_ALIGNED
= 0x001000,
91 /* Flag for GOVD_MAP: don't copy back. */
92 GOVD_MAP_TO_ONLY
= 0x002000,
94 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
95 GOVD_LINEAR_LASTPRIVATE_NO_OUTER
= 0x004000,
97 GOVD_MAP_0LEN_ARRAY
= 0x008000,
99 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
100 GOVD_MAP_ALWAYS_TO
= 0x010000,
102 /* Flag for shared vars that are or might be stored to in the region. */
103 GOVD_WRITTEN
= 0x020000,
105 /* Flag for GOVD_MAP, if it is a forced mapping. */
106 GOVD_MAP_FORCE
= 0x040000,
108 /* Flag for GOVD_MAP: must be present already. */
109 GOVD_MAP_FORCE_PRESENT
= 0x080000,
111 /* Flag for GOVD_MAP: only allocate. */
112 GOVD_MAP_ALLOC_ONLY
= 0x100000,
114 /* Flag for GOVD_MAP: only copy back. */
115 GOVD_MAP_FROM_ONLY
= 0x200000,
117 GOVD_NONTEMPORAL
= 0x400000,
119 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
120 GOVD_LASTPRIVATE_CONDITIONAL
= 0x800000,
122 GOVD_CONDTEMP
= 0x1000000,
124 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
125 GOVD_REDUCTION_INSCAN
= 0x2000000,
127 /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for
129 GOVD_MAP_HAS_ATTACHMENTS
= 0x4000000,
131 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
132 GOVD_FIRSTPRIVATE_IMPLICIT
= 0x8000000,
134 GOVD_DATA_SHARE_CLASS
= (GOVD_SHARED
| GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
135 | GOVD_LASTPRIVATE
| GOVD_REDUCTION
| GOVD_LINEAR
142 ORT_WORKSHARE
= 0x00,
143 ORT_TASKGROUP
= 0x01,
147 ORT_COMBINED_PARALLEL
= ORT_PARALLEL
| 1,
150 ORT_UNTIED_TASK
= ORT_TASK
| 1,
151 ORT_TASKLOOP
= ORT_TASK
| 2,
152 ORT_UNTIED_TASKLOOP
= ORT_UNTIED_TASK
| 2,
155 ORT_COMBINED_TEAMS
= ORT_TEAMS
| 1,
156 ORT_HOST_TEAMS
= ORT_TEAMS
| 2,
157 ORT_COMBINED_HOST_TEAMS
= ORT_COMBINED_TEAMS
| 2,
160 ORT_TARGET_DATA
= 0x40,
162 /* Data region with offloading. */
164 ORT_COMBINED_TARGET
= ORT_TARGET
| 1,
165 ORT_IMPLICIT_TARGET
= ORT_TARGET
| 2,
167 /* OpenACC variants. */
168 ORT_ACC
= 0x100, /* A generic OpenACC region. */
169 ORT_ACC_DATA
= ORT_ACC
| ORT_TARGET_DATA
, /* Data construct. */
170 ORT_ACC_PARALLEL
= ORT_ACC
| ORT_TARGET
, /* Parallel construct */
171 ORT_ACC_KERNELS
= ORT_ACC
| ORT_TARGET
| 2, /* Kernels construct. */
172 ORT_ACC_SERIAL
= ORT_ACC
| ORT_TARGET
| 4, /* Serial construct. */
173 ORT_ACC_HOST_DATA
= ORT_ACC
| ORT_TARGET_DATA
| 2, /* Host data. */
175 /* Dummy OpenMP region, used to disable expansion of
176 DECL_VALUE_EXPRs in taskloop pre body. */
180 /* Gimplify hashtable helper. */
182 struct gimplify_hasher
: free_ptr_hash
<elt_t
>
184 static inline hashval_t
hash (const elt_t
*);
185 static inline bool equal (const elt_t
*, const elt_t
*);
190 struct gimplify_ctx
*prev_context
;
192 vec
<gbind
*> bind_expr_stack
;
194 gimple_seq conditional_cleanups
;
198 vec
<tree
> case_labels
;
199 hash_set
<tree
> *live_switch_vars
;
200 /* The formal temporary table. Should this be persistent? */
201 hash_table
<gimplify_hasher
> *temp_htab
;
204 unsigned into_ssa
: 1;
205 unsigned allow_rhs_cond_expr
: 1;
206 unsigned in_cleanup_point_expr
: 1;
207 unsigned keep_stack
: 1;
208 unsigned save_stack
: 1;
209 unsigned in_switch_expr
: 1;
212 enum gimplify_defaultmap_kind
215 GDMK_SCALAR_TARGET
, /* w/ Fortran's target attr, implicit mapping, only. */
221 struct gimplify_omp_ctx
223 struct gimplify_omp_ctx
*outer_context
;
224 splay_tree variables
;
225 hash_set
<tree
> *privatized_types
;
227 /* Iteration variables in an OMP_FOR. */
228 vec
<tree
> loop_iter_var
;
230 enum omp_clause_default_kind default_kind
;
231 enum omp_region_type region_type
;
235 bool target_firstprivatize_array_bases
;
237 bool order_concurrent
;
243 static struct gimplify_ctx
*gimplify_ctxp
;
244 static struct gimplify_omp_ctx
*gimplify_omp_ctxp
;
245 static bool in_omp_construct
;
247 /* Forward declaration. */
248 static enum gimplify_status
gimplify_compound_expr (tree
*, gimple_seq
*, bool);
249 static hash_map
<tree
, tree
> *oacc_declare_returns
;
250 static enum gimplify_status
gimplify_expr (tree
*, gimple_seq
*, gimple_seq
*,
251 bool (*) (tree
), fallback_t
, bool);
253 /* Shorter alias name for the above function for use in gimplify.c
257 gimplify_seq_add_stmt (gimple_seq
*seq_p
, gimple
*gs
)
259 gimple_seq_add_stmt_without_update (seq_p
, gs
);
262 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
263 NULL, a new sequence is allocated. This function is
264 similar to gimple_seq_add_seq, but does not scan the operands.
265 During gimplification, we need to manipulate statement sequences
266 before the def/use vectors have been constructed. */
269 gimplify_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
271 gimple_stmt_iterator si
;
276 si
= gsi_last (*dst_p
);
277 gsi_insert_seq_after_without_update (&si
, src
, GSI_NEW_STMT
);
281 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
282 and popping gimplify contexts. */
284 static struct gimplify_ctx
*ctx_pool
= NULL
;
286 /* Return a gimplify context struct from the pool. */
288 static inline struct gimplify_ctx
*
291 struct gimplify_ctx
* c
= ctx_pool
;
294 ctx_pool
= c
->prev_context
;
296 c
= XNEW (struct gimplify_ctx
);
298 memset (c
, '\0', sizeof (*c
));
302 /* Put gimplify context C back into the pool. */
305 ctx_free (struct gimplify_ctx
*c
)
307 c
->prev_context
= ctx_pool
;
311 /* Free allocated ctx stack memory. */
314 free_gimplify_stack (void)
316 struct gimplify_ctx
*c
;
318 while ((c
= ctx_pool
))
320 ctx_pool
= c
->prev_context
;
326 /* Set up a context for the gimplifier. */
329 push_gimplify_context (bool in_ssa
, bool rhs_cond_ok
)
331 struct gimplify_ctx
*c
= ctx_alloc ();
333 c
->prev_context
= gimplify_ctxp
;
335 gimplify_ctxp
->into_ssa
= in_ssa
;
336 gimplify_ctxp
->allow_rhs_cond_expr
= rhs_cond_ok
;
339 /* Tear down a context for the gimplifier. If BODY is non-null, then
340 put the temporaries into the outer BIND_EXPR. Otherwise, put them
343 BODY is not a sequence, but the first tuple in a sequence. */
346 pop_gimplify_context (gimple
*body
)
348 struct gimplify_ctx
*c
= gimplify_ctxp
;
351 && (!c
->bind_expr_stack
.exists ()
352 || c
->bind_expr_stack
.is_empty ()));
353 c
->bind_expr_stack
.release ();
354 gimplify_ctxp
= c
->prev_context
;
357 declare_vars (c
->temps
, body
, false);
359 record_vars (c
->temps
);
366 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
369 gimple_push_bind_expr (gbind
*bind_stmt
)
371 gimplify_ctxp
->bind_expr_stack
.reserve (8);
372 gimplify_ctxp
->bind_expr_stack
.safe_push (bind_stmt
);
375 /* Pop the first element off the stack of bindings. */
378 gimple_pop_bind_expr (void)
380 gimplify_ctxp
->bind_expr_stack
.pop ();
383 /* Return the first element of the stack of bindings. */
386 gimple_current_bind_expr (void)
388 return gimplify_ctxp
->bind_expr_stack
.last ();
391 /* Return the stack of bindings created during gimplification. */
394 gimple_bind_expr_stack (void)
396 return gimplify_ctxp
->bind_expr_stack
;
399 /* Return true iff there is a COND_EXPR between us and the innermost
400 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
403 gimple_conditional_context (void)
405 return gimplify_ctxp
->conditions
> 0;
408 /* Note that we've entered a COND_EXPR. */
411 gimple_push_condition (void)
413 #ifdef ENABLE_GIMPLE_CHECKING
414 if (gimplify_ctxp
->conditions
== 0)
415 gcc_assert (gimple_seq_empty_p (gimplify_ctxp
->conditional_cleanups
));
417 ++(gimplify_ctxp
->conditions
);
420 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
421 now, add any conditional cleanups we've seen to the prequeue. */
424 gimple_pop_condition (gimple_seq
*pre_p
)
426 int conds
= --(gimplify_ctxp
->conditions
);
428 gcc_assert (conds
>= 0);
431 gimplify_seq_add_seq (pre_p
, gimplify_ctxp
->conditional_cleanups
);
432 gimplify_ctxp
->conditional_cleanups
= NULL
;
436 /* A stable comparison routine for use with splay trees and DECLs. */
439 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
444 return DECL_UID (a
) - DECL_UID (b
);
447 /* Create a new omp construct that deals with variable remapping. */
449 static struct gimplify_omp_ctx
*
450 new_omp_context (enum omp_region_type region_type
)
452 struct gimplify_omp_ctx
*c
;
454 c
= XCNEW (struct gimplify_omp_ctx
);
455 c
->outer_context
= gimplify_omp_ctxp
;
456 c
->variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
457 c
->privatized_types
= new hash_set
<tree
>;
458 c
->location
= input_location
;
459 c
->region_type
= region_type
;
460 if ((region_type
& ORT_TASK
) == 0)
461 c
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
463 c
->default_kind
= OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
464 c
->defaultmap
[GDMK_SCALAR
] = GOVD_MAP
;
465 c
->defaultmap
[GDMK_SCALAR_TARGET
] = GOVD_MAP
;
466 c
->defaultmap
[GDMK_AGGREGATE
] = GOVD_MAP
;
467 c
->defaultmap
[GDMK_ALLOCATABLE
] = GOVD_MAP
;
468 c
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
;
473 /* Destroy an omp construct that deals with variable remapping. */
476 delete_omp_context (struct gimplify_omp_ctx
*c
)
478 splay_tree_delete (c
->variables
);
479 delete c
->privatized_types
;
480 c
->loop_iter_var
.release ();
484 static void omp_add_variable (struct gimplify_omp_ctx
*, tree
, unsigned int);
485 static bool omp_notice_variable (struct gimplify_omp_ctx
*, tree
, bool);
487 /* Both gimplify the statement T and append it to *SEQ_P. This function
488 behaves exactly as gimplify_stmt, but you don't have to pass T as a
492 gimplify_and_add (tree t
, gimple_seq
*seq_p
)
494 gimplify_stmt (&t
, seq_p
);
497 /* Gimplify statement T into sequence *SEQ_P, and return the first
498 tuple in the sequence of generated tuples for this statement.
499 Return NULL if gimplifying T produced no tuples. */
502 gimplify_and_return_first (tree t
, gimple_seq
*seq_p
)
504 gimple_stmt_iterator last
= gsi_last (*seq_p
);
506 gimplify_and_add (t
, seq_p
);
508 if (!gsi_end_p (last
))
511 return gsi_stmt (last
);
514 return gimple_seq_first_stmt (*seq_p
);
517 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
518 LHS, or for a call argument. */
521 is_gimple_mem_rhs (tree t
)
523 /* If we're dealing with a renamable type, either source or dest must be
524 a renamed variable. */
525 if (is_gimple_reg_type (TREE_TYPE (t
)))
526 return is_gimple_val (t
);
528 return is_gimple_val (t
) || is_gimple_lvalue (t
);
531 /* Return true if T is a CALL_EXPR or an expression that can be
532 assigned to a temporary. Note that this predicate should only be
533 used during gimplification. See the rationale for this in
534 gimplify_modify_expr. */
537 is_gimple_reg_rhs_or_call (tree t
)
539 return (get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
540 || TREE_CODE (t
) == CALL_EXPR
);
543 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
544 this predicate should only be used during gimplification. See the
545 rationale for this in gimplify_modify_expr. */
548 is_gimple_mem_rhs_or_call (tree t
)
550 /* If we're dealing with a renamable type, either source or dest must be
551 a renamed variable. */
552 if (is_gimple_reg_type (TREE_TYPE (t
)))
553 return is_gimple_val (t
);
555 return (is_gimple_val (t
)
556 || is_gimple_lvalue (t
)
557 || TREE_CLOBBER_P (t
)
558 || TREE_CODE (t
) == CALL_EXPR
);
561 /* Create a temporary with a name derived from VAL. Subroutine of
562 lookup_tmp_var; nobody else should call this function. */
565 create_tmp_from_val (tree val
)
567 /* Drop all qualifiers and address-space information from the value type. */
568 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (val
));
569 tree var
= create_tmp_var (type
, get_name (val
));
573 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
574 an existing expression temporary. */
577 lookup_tmp_var (tree val
, bool is_formal
)
581 /* If not optimizing, never really reuse a temporary. local-alloc
582 won't allocate any variable that is used in more than one basic
583 block, which means it will go into memory, causing much extra
584 work in reload and final and poorer code generation, outweighing
585 the extra memory allocation here. */
586 if (!optimize
|| !is_formal
|| TREE_SIDE_EFFECTS (val
))
587 ret
= create_tmp_from_val (val
);
594 if (!gimplify_ctxp
->temp_htab
)
595 gimplify_ctxp
->temp_htab
= new hash_table
<gimplify_hasher
> (1000);
596 slot
= gimplify_ctxp
->temp_htab
->find_slot (&elt
, INSERT
);
599 elt_p
= XNEW (elt_t
);
601 elt_p
->temp
= ret
= create_tmp_from_val (val
);
614 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
617 internal_get_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
618 bool is_formal
, bool allow_ssa
)
622 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
623 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
624 gimplify_expr (&val
, pre_p
, post_p
, is_gimple_reg_rhs_or_call
,
628 && gimplify_ctxp
->into_ssa
629 && is_gimple_reg_type (TREE_TYPE (val
)))
631 t
= make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val
)));
632 if (! gimple_in_ssa_p (cfun
))
634 const char *name
= get_name (val
);
636 SET_SSA_NAME_VAR_OR_IDENTIFIER (t
, create_tmp_var_name (name
));
640 t
= lookup_tmp_var (val
, is_formal
);
642 mod
= build2 (INIT_EXPR
, TREE_TYPE (t
), t
, unshare_expr (val
));
644 SET_EXPR_LOCATION (mod
, EXPR_LOC_OR_LOC (val
, input_location
));
646 /* gimplify_modify_expr might want to reduce this further. */
647 gimplify_and_add (mod
, pre_p
);
653 /* Return a formal temporary variable initialized with VAL. PRE_P is as
654 in gimplify_expr. Only use this function if:
656 1) The value of the unfactored expression represented by VAL will not
657 change between the initialization and use of the temporary, and
658 2) The temporary will not be otherwise modified.
660 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
661 and #2 means it is inappropriate for && temps.
663 For other cases, use get_initialized_tmp_var instead. */
666 get_formal_tmp_var (tree val
, gimple_seq
*pre_p
)
668 return internal_get_tmp_var (val
, pre_p
, NULL
, true, true);
671 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
672 are as in gimplify_expr. */
675 get_initialized_tmp_var (tree val
, gimple_seq
*pre_p
,
676 gimple_seq
*post_p
/* = NULL */,
677 bool allow_ssa
/* = true */)
679 return internal_get_tmp_var (val
, pre_p
, post_p
, false, allow_ssa
);
682 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
683 generate debug info for them; otherwise don't. */
686 declare_vars (tree vars
, gimple
*gs
, bool debug_info
)
693 gbind
*scope
= as_a
<gbind
*> (gs
);
695 temps
= nreverse (last
);
697 block
= gimple_bind_block (scope
);
698 gcc_assert (!block
|| TREE_CODE (block
) == BLOCK
);
699 if (!block
|| !debug_info
)
701 DECL_CHAIN (last
) = gimple_bind_vars (scope
);
702 gimple_bind_set_vars (scope
, temps
);
706 /* We need to attach the nodes both to the BIND_EXPR and to its
707 associated BLOCK for debugging purposes. The key point here
708 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
709 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
710 if (BLOCK_VARS (block
))
711 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), temps
);
714 gimple_bind_set_vars (scope
,
715 chainon (gimple_bind_vars (scope
), temps
));
716 BLOCK_VARS (block
) = temps
;
722 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
723 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
724 no such upper bound can be obtained. */
727 force_constant_size (tree var
)
729 /* The only attempt we make is by querying the maximum size of objects
730 of the variable's type. */
732 HOST_WIDE_INT max_size
;
734 gcc_assert (VAR_P (var
));
736 max_size
= max_int_size_in_bytes (TREE_TYPE (var
));
738 gcc_assert (max_size
>= 0);
741 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var
)), max_size
);
743 = build_int_cst (TREE_TYPE (DECL_SIZE (var
)), max_size
* BITS_PER_UNIT
);
746 /* Push the temporary variable TMP into the current binding. */
749 gimple_add_tmp_var_fn (struct function
*fn
, tree tmp
)
751 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
753 /* Later processing assumes that the object size is constant, which might
754 not be true at this point. Force the use of a constant upper bound in
756 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
757 force_constant_size (tmp
);
759 DECL_CONTEXT (tmp
) = fn
->decl
;
760 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
762 record_vars_into (tmp
, fn
->decl
);
765 /* Push the temporary variable TMP into the current binding. */
768 gimple_add_tmp_var (tree tmp
)
770 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
772 /* Later processing assumes that the object size is constant, which might
773 not be true at this point. Force the use of a constant upper bound in
775 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
776 force_constant_size (tmp
);
778 DECL_CONTEXT (tmp
) = current_function_decl
;
779 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
783 DECL_CHAIN (tmp
) = gimplify_ctxp
->temps
;
784 gimplify_ctxp
->temps
= tmp
;
786 /* Mark temporaries local within the nearest enclosing parallel. */
787 if (gimplify_omp_ctxp
)
789 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
790 int flag
= GOVD_LOCAL
| GOVD_SEEN
;
792 && (ctx
->region_type
== ORT_WORKSHARE
793 || ctx
->region_type
== ORT_TASKGROUP
794 || ctx
->region_type
== ORT_SIMD
795 || ctx
->region_type
== ORT_ACC
))
797 if (ctx
->region_type
== ORT_SIMD
798 && TREE_ADDRESSABLE (tmp
)
799 && !TREE_STATIC (tmp
))
801 if (TREE_CODE (DECL_SIZE_UNIT (tmp
)) != INTEGER_CST
)
802 ctx
->add_safelen1
= true;
803 else if (ctx
->in_for_exprs
)
806 flag
= GOVD_PRIVATE
| GOVD_SEEN
;
809 ctx
= ctx
->outer_context
;
812 omp_add_variable (ctx
, tmp
, flag
);
821 /* This case is for nested functions. We need to expose the locals
823 body_seq
= gimple_body (current_function_decl
);
824 declare_vars (tmp
, gimple_seq_first_stmt (body_seq
), false);
830 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
831 nodes that are referenced more than once in GENERIC functions. This is
832 necessary because gimplification (translation into GIMPLE) is performed
833 by modifying tree nodes in-place, so gimplication of a shared node in a
834 first context could generate an invalid GIMPLE form in a second context.
836 This is achieved with a simple mark/copy/unmark algorithm that walks the
837 GENERIC representation top-down, marks nodes with TREE_VISITED the first
838 time it encounters them, duplicates them if they already have TREE_VISITED
839 set, and finally removes the TREE_VISITED marks it has set.
841 The algorithm works only at the function level, i.e. it generates a GENERIC
842 representation of a function with no nodes shared within the function when
843 passed a GENERIC function (except for nodes that are allowed to be shared).
845 At the global level, it is also necessary to unshare tree nodes that are
846 referenced in more than one function, for the same aforementioned reason.
847 This requires some cooperation from the front-end. There are 2 strategies:
849 1. Manual unsharing. The front-end needs to call unshare_expr on every
850 expression that might end up being shared across functions.
852 2. Deep unsharing. This is an extension of regular unsharing. Instead
853 of calling unshare_expr on expressions that might be shared across
854 functions, the front-end pre-marks them with TREE_VISITED. This will
855 ensure that they are unshared on the first reference within functions
856 when the regular unsharing algorithm runs. The counterpart is that
857 this algorithm must look deeper than for manual unsharing, which is
858 specified by LANG_HOOKS_DEEP_UNSHARING.
860 If there are only few specific cases of node sharing across functions, it is
861 probably easier for a front-end to unshare the expressions manually. On the
862 contrary, if the expressions generated at the global level are as widespread
863 as expressions generated within functions, deep unsharing is very likely the
866 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
867 These nodes model computations that must be done once. If we were to
868 unshare something like SAVE_EXPR(i++), the gimplification process would
869 create wrong code. However, if DATA is non-null, it must hold a pointer
870 set that is used to unshare the subtrees of these nodes. */
873 mostly_copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data
)
876 enum tree_code code
= TREE_CODE (t
);
878 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
879 copy their subtrees if we can make sure to do it only once. */
880 if (code
== SAVE_EXPR
|| code
== TARGET_EXPR
|| code
== BIND_EXPR
)
882 if (data
&& !((hash_set
<tree
> *)data
)->add (t
))
888 /* Stop at types, decls, constants like copy_tree_r. */
889 else if (TREE_CODE_CLASS (code
) == tcc_type
890 || TREE_CODE_CLASS (code
) == tcc_declaration
891 || TREE_CODE_CLASS (code
) == tcc_constant
)
894 /* Cope with the statement expression extension. */
895 else if (code
== STATEMENT_LIST
)
898 /* Leave the bulk of the work to copy_tree_r itself. */
900 copy_tree_r (tp
, walk_subtrees
, NULL
);
905 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
906 If *TP has been visited already, then *TP is deeply copied by calling
907 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
910 copy_if_shared_r (tree
*tp
, int *walk_subtrees
, void *data
)
913 enum tree_code code
= TREE_CODE (t
);
915 /* Skip types, decls, and constants. But we do want to look at their
916 types and the bounds of types. Mark them as visited so we properly
917 unmark their subtrees on the unmark pass. If we've already seen them,
918 don't look down further. */
919 if (TREE_CODE_CLASS (code
) == tcc_type
920 || TREE_CODE_CLASS (code
) == tcc_declaration
921 || TREE_CODE_CLASS (code
) == tcc_constant
)
923 if (TREE_VISITED (t
))
926 TREE_VISITED (t
) = 1;
929 /* If this node has been visited already, unshare it and don't look
931 else if (TREE_VISITED (t
))
933 walk_tree (tp
, mostly_copy_tree_r
, data
, NULL
);
937 /* Otherwise, mark the node as visited and keep looking. */
939 TREE_VISITED (t
) = 1;
944 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
945 copy_if_shared_r callback unmodified. */
948 copy_if_shared (tree
*tp
, void *data
)
950 walk_tree (tp
, copy_if_shared_r
, data
, NULL
);
953 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
954 any nested functions. */
957 unshare_body (tree fndecl
)
959 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
960 /* If the language requires deep unsharing, we need a pointer set to make
961 sure we don't repeatedly unshare subtrees of unshareable nodes. */
962 hash_set
<tree
> *visited
963 = lang_hooks
.deep_unsharing
? new hash_set
<tree
> : NULL
;
965 copy_if_shared (&DECL_SAVED_TREE (fndecl
), visited
);
966 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl
)), visited
);
967 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)), visited
);
972 for (cgn
= first_nested_function (cgn
); cgn
;
973 cgn
= next_nested_function (cgn
))
974 unshare_body (cgn
->decl
);
977 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
978 Subtrees are walked until the first unvisited node is encountered. */
981 unmark_visited_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
985 /* If this node has been visited, unmark it and keep looking. */
986 if (TREE_VISITED (t
))
987 TREE_VISITED (t
) = 0;
989 /* Otherwise, don't look any deeper. */
996 /* Unmark the visited trees rooted at *TP. */
999 unmark_visited (tree
*tp
)
1001 walk_tree (tp
, unmark_visited_r
, NULL
, NULL
);
1004 /* Likewise, but mark all trees as not visited. */
1007 unvisit_body (tree fndecl
)
1009 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
1011 unmark_visited (&DECL_SAVED_TREE (fndecl
));
1012 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl
)));
1013 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)));
1016 for (cgn
= first_nested_function (cgn
);
1017 cgn
; cgn
= next_nested_function (cgn
))
1018 unvisit_body (cgn
->decl
);
1021 /* Unconditionally make an unshared copy of EXPR. This is used when using
1022 stored expressions which span multiple functions, such as BINFO_VTABLE,
1023 as the normal unsharing process can't tell that they're shared. */
1026 unshare_expr (tree expr
)
1028 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1032 /* Worker for unshare_expr_without_location. */
1035 prune_expr_location (tree
*tp
, int *walk_subtrees
, void *)
1038 SET_EXPR_LOCATION (*tp
, UNKNOWN_LOCATION
);
1044 /* Similar to unshare_expr but also prune all expression locations
1048 unshare_expr_without_location (tree expr
)
1050 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1052 walk_tree (&expr
, prune_expr_location
, NULL
, NULL
);
1056 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1057 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1058 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1059 EXPR is the location of the EXPR. */
1062 rexpr_location (tree expr
, location_t or_else
= UNKNOWN_LOCATION
)
1067 if (EXPR_HAS_LOCATION (expr
))
1068 return EXPR_LOCATION (expr
);
1070 if (TREE_CODE (expr
) != STATEMENT_LIST
)
1073 tree_stmt_iterator i
= tsi_start (expr
);
1076 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
1082 if (!found
|| !tsi_one_before_end_p (i
))
1085 return rexpr_location (tsi_stmt (i
), or_else
);
1088 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1089 rexpr_location for the potential recursion. */
1092 rexpr_has_location (tree expr
)
1094 return rexpr_location (expr
) != UNKNOWN_LOCATION
;
1098 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1099 contain statements and have a value. Assign its value to a temporary
1100 and give it void_type_node. Return the temporary, or NULL_TREE if
1101 WRAPPER was already void. */
1104 voidify_wrapper_expr (tree wrapper
, tree temp
)
1106 tree type
= TREE_TYPE (wrapper
);
1107 if (type
&& !VOID_TYPE_P (type
))
1111 /* Set p to point to the body of the wrapper. Loop until we find
1112 something that isn't a wrapper. */
1113 for (p
= &wrapper
; p
&& *p
; )
1115 switch (TREE_CODE (*p
))
1118 TREE_SIDE_EFFECTS (*p
) = 1;
1119 TREE_TYPE (*p
) = void_type_node
;
1120 /* For a BIND_EXPR, the body is operand 1. */
1121 p
= &BIND_EXPR_BODY (*p
);
1124 case CLEANUP_POINT_EXPR
:
1125 case TRY_FINALLY_EXPR
:
1126 case TRY_CATCH_EXPR
:
1127 TREE_SIDE_EFFECTS (*p
) = 1;
1128 TREE_TYPE (*p
) = void_type_node
;
1129 p
= &TREE_OPERAND (*p
, 0);
1132 case STATEMENT_LIST
:
1134 tree_stmt_iterator i
= tsi_last (*p
);
1135 TREE_SIDE_EFFECTS (*p
) = 1;
1136 TREE_TYPE (*p
) = void_type_node
;
1137 p
= tsi_end_p (i
) ? NULL
: tsi_stmt_ptr (i
);
1142 /* Advance to the last statement. Set all container types to
1144 for (; TREE_CODE (*p
) == COMPOUND_EXPR
; p
= &TREE_OPERAND (*p
, 1))
1146 TREE_SIDE_EFFECTS (*p
) = 1;
1147 TREE_TYPE (*p
) = void_type_node
;
1151 case TRANSACTION_EXPR
:
1152 TREE_SIDE_EFFECTS (*p
) = 1;
1153 TREE_TYPE (*p
) = void_type_node
;
1154 p
= &TRANSACTION_EXPR_BODY (*p
);
1158 /* Assume that any tree upon which voidify_wrapper_expr is
1159 directly called is a wrapper, and that its body is op0. */
1162 TREE_SIDE_EFFECTS (*p
) = 1;
1163 TREE_TYPE (*p
) = void_type_node
;
1164 p
= &TREE_OPERAND (*p
, 0);
1172 if (p
== NULL
|| IS_EMPTY_STMT (*p
))
1176 /* The wrapper is on the RHS of an assignment that we're pushing
1178 gcc_assert (TREE_CODE (temp
) == INIT_EXPR
1179 || TREE_CODE (temp
) == MODIFY_EXPR
);
1180 TREE_OPERAND (temp
, 1) = *p
;
1185 temp
= create_tmp_var (type
, "retval");
1186 *p
= build2 (INIT_EXPR
, type
, temp
, *p
);
1195 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1196 a temporary through which they communicate. */
1199 build_stack_save_restore (gcall
**save
, gcall
**restore
)
1203 *save
= gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE
), 0);
1204 tmp_var
= create_tmp_var (ptr_type_node
, "saved_stack");
1205 gimple_call_set_lhs (*save
, tmp_var
);
1208 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE
),
1212 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1215 build_asan_poison_call_expr (tree decl
)
1217 /* Do not poison variables that have size equal to zero. */
1218 tree unit_size
= DECL_SIZE_UNIT (decl
);
1219 if (zerop (unit_size
))
1222 tree base
= build_fold_addr_expr (decl
);
1224 return build_call_expr_internal_loc (UNKNOWN_LOCATION
, IFN_ASAN_MARK
,
1226 build_int_cst (integer_type_node
,
1231 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1232 on POISON flag, shadow memory of a DECL variable. The call will be
1233 put on location identified by IT iterator, where BEFORE flag drives
1234 position where the stmt will be put. */
1237 asan_poison_variable (tree decl
, bool poison
, gimple_stmt_iterator
*it
,
1240 tree unit_size
= DECL_SIZE_UNIT (decl
);
1241 tree base
= build_fold_addr_expr (decl
);
1243 /* Do not poison variables that have size equal to zero. */
1244 if (zerop (unit_size
))
1247 /* It's necessary to have all stack variables aligned to ASAN granularity
1249 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1250 unsigned shadow_granularity
1251 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE
: ASAN_SHADOW_GRANULARITY
;
1252 if (DECL_ALIGN_UNIT (decl
) <= shadow_granularity
)
1253 SET_DECL_ALIGN (decl
, BITS_PER_UNIT
* shadow_granularity
);
1255 HOST_WIDE_INT flags
= poison
? ASAN_MARK_POISON
: ASAN_MARK_UNPOISON
;
1258 = gimple_build_call_internal (IFN_ASAN_MARK
, 3,
1259 build_int_cst (integer_type_node
, flags
),
1263 gsi_insert_before (it
, g
, GSI_NEW_STMT
);
1265 gsi_insert_after (it
, g
, GSI_NEW_STMT
);
1268 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1269 either poisons or unpoisons a DECL. Created statement is appended
1270 to SEQ_P gimple sequence. */
1273 asan_poison_variable (tree decl
, bool poison
, gimple_seq
*seq_p
)
1275 gimple_stmt_iterator it
= gsi_last (*seq_p
);
1276 bool before
= false;
1281 asan_poison_variable (decl
, poison
, &it
, before
);
1284 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1287 sort_by_decl_uid (const void *a
, const void *b
)
1289 const tree
*t1
= (const tree
*)a
;
1290 const tree
*t2
= (const tree
*)b
;
1292 int uid1
= DECL_UID (*t1
);
1293 int uid2
= DECL_UID (*t2
);
1297 else if (uid1
> uid2
)
1303 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1304 depending on POISON flag. Created statement is appended
1305 to SEQ_P gimple sequence. */
1308 asan_poison_variables (hash_set
<tree
> *variables
, bool poison
, gimple_seq
*seq_p
)
1310 unsigned c
= variables
->elements ();
1314 auto_vec
<tree
> sorted_variables (c
);
1316 for (hash_set
<tree
>::iterator it
= variables
->begin ();
1317 it
!= variables
->end (); ++it
)
1318 sorted_variables
.safe_push (*it
);
1320 sorted_variables
.qsort (sort_by_decl_uid
);
1324 FOR_EACH_VEC_ELT (sorted_variables
, i
, var
)
1326 asan_poison_variable (var
, poison
, seq_p
);
1328 /* Add use_after_scope_memory attribute for the variable in order
1329 to prevent re-written into SSA. */
1330 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
,
1331 DECL_ATTRIBUTES (var
)))
1332 DECL_ATTRIBUTES (var
)
1333 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
),
1335 DECL_ATTRIBUTES (var
));
1339 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1341 static enum gimplify_status
1342 gimplify_bind_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1344 tree bind_expr
= *expr_p
;
1345 bool old_keep_stack
= gimplify_ctxp
->keep_stack
;
1346 bool old_save_stack
= gimplify_ctxp
->save_stack
;
1349 gimple_seq body
, cleanup
;
1351 location_t start_locus
= 0, end_locus
= 0;
1352 tree ret_clauses
= NULL
;
1354 tree temp
= voidify_wrapper_expr (bind_expr
, NULL
);
1356 /* Mark variables seen in this bind expr. */
1357 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1361 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
1363 /* Mark variable as local. */
1364 if (ctx
&& ctx
->region_type
!= ORT_NONE
&& !DECL_EXTERNAL (t
))
1366 if (! DECL_SEEN_IN_BIND_EXPR_P (t
)
1367 || splay_tree_lookup (ctx
->variables
,
1368 (splay_tree_key
) t
) == NULL
)
1370 int flag
= GOVD_LOCAL
;
1371 if (ctx
->region_type
== ORT_SIMD
1372 && TREE_ADDRESSABLE (t
)
1373 && !TREE_STATIC (t
))
1375 if (TREE_CODE (DECL_SIZE_UNIT (t
)) != INTEGER_CST
)
1376 ctx
->add_safelen1
= true;
1378 flag
= GOVD_PRIVATE
;
1380 omp_add_variable (ctx
, t
, flag
| GOVD_SEEN
);
1382 /* Static locals inside of target construct or offloaded
1383 routines need to be "omp declare target". */
1384 if (TREE_STATIC (t
))
1385 for (; ctx
; ctx
= ctx
->outer_context
)
1386 if ((ctx
->region_type
& ORT_TARGET
) != 0)
1388 if (!lookup_attribute ("omp declare target",
1389 DECL_ATTRIBUTES (t
)))
1391 tree id
= get_identifier ("omp declare target");
1393 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (t
));
1394 varpool_node
*node
= varpool_node::get (t
);
1397 node
->offloadable
= 1;
1398 if (ENABLE_OFFLOADING
&& !DECL_EXTERNAL (t
))
1400 g
->have_offload
= true;
1402 vec_safe_push (offload_vars
, t
);
1410 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
1412 if (DECL_HARD_REGISTER (t
) && !is_global_var (t
) && cfun
)
1413 cfun
->has_local_explicit_reg_vars
= true;
1417 bind_stmt
= gimple_build_bind (BIND_EXPR_VARS (bind_expr
), NULL
,
1418 BIND_EXPR_BLOCK (bind_expr
));
1419 gimple_push_bind_expr (bind_stmt
);
1421 gimplify_ctxp
->keep_stack
= false;
1422 gimplify_ctxp
->save_stack
= false;
1424 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1426 gimplify_stmt (&BIND_EXPR_BODY (bind_expr
), &body
);
1427 gimple_bind_set_body (bind_stmt
, body
);
1429 /* Source location wise, the cleanup code (stack_restore and clobbers)
1430 belongs to the end of the block, so propagate what we have. The
1431 stack_save operation belongs to the beginning of block, which we can
1432 infer from the bind_expr directly if the block has no explicit
1434 if (BIND_EXPR_BLOCK (bind_expr
))
1436 end_locus
= BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1437 start_locus
= BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1439 if (start_locus
== 0)
1440 start_locus
= EXPR_LOCATION (bind_expr
);
1445 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1446 the stack space allocated to the VLAs. */
1447 if (gimplify_ctxp
->save_stack
&& !gimplify_ctxp
->keep_stack
)
1449 gcall
*stack_restore
;
1451 /* Save stack on entry and restore it on exit. Add a try_finally
1452 block to achieve this. */
1453 build_stack_save_restore (&stack_save
, &stack_restore
);
1455 gimple_set_location (stack_save
, start_locus
);
1456 gimple_set_location (stack_restore
, end_locus
);
1458 gimplify_seq_add_stmt (&cleanup
, stack_restore
);
1461 /* Add clobbers for all variables that go out of scope. */
1462 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1465 && !is_global_var (t
)
1466 && DECL_CONTEXT (t
) == current_function_decl
)
1468 if (!DECL_HARD_REGISTER (t
)
1469 && !TREE_THIS_VOLATILE (t
)
1470 && !DECL_HAS_VALUE_EXPR_P (t
)
1471 /* Only care for variables that have to be in memory. Others
1472 will be rewritten into SSA names, hence moved to the
1474 && !is_gimple_reg (t
)
1475 && flag_stack_reuse
!= SR_NONE
)
1477 tree clobber
= build_clobber (TREE_TYPE (t
));
1478 gimple
*clobber_stmt
;
1479 clobber_stmt
= gimple_build_assign (t
, clobber
);
1480 gimple_set_location (clobber_stmt
, end_locus
);
1481 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1484 if (flag_openacc
&& oacc_declare_returns
!= NULL
)
1487 if (DECL_HAS_VALUE_EXPR_P (key
))
1489 key
= DECL_VALUE_EXPR (key
);
1490 if (TREE_CODE (key
) == INDIRECT_REF
)
1491 key
= TREE_OPERAND (key
, 0);
1493 tree
*c
= oacc_declare_returns
->get (key
);
1497 OMP_CLAUSE_CHAIN (*c
) = ret_clauses
;
1499 ret_clauses
= unshare_expr (*c
);
1501 oacc_declare_returns
->remove (key
);
1503 if (oacc_declare_returns
->is_empty ())
1505 delete oacc_declare_returns
;
1506 oacc_declare_returns
= NULL
;
1512 if (asan_poisoned_variables
!= NULL
1513 && asan_poisoned_variables
->contains (t
))
1515 asan_poisoned_variables
->remove (t
);
1516 asan_poison_variable (t
, true, &cleanup
);
1519 if (gimplify_ctxp
->live_switch_vars
!= NULL
1520 && gimplify_ctxp
->live_switch_vars
->contains (t
))
1521 gimplify_ctxp
->live_switch_vars
->remove (t
);
1527 gimple_stmt_iterator si
= gsi_start (cleanup
);
1529 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
1531 gsi_insert_seq_before_without_update (&si
, stmt
, GSI_NEW_STMT
);
1537 gimple_seq new_body
;
1540 gs
= gimple_build_try (gimple_bind_body (bind_stmt
), cleanup
,
1541 GIMPLE_TRY_FINALLY
);
1544 gimplify_seq_add_stmt (&new_body
, stack_save
);
1545 gimplify_seq_add_stmt (&new_body
, gs
);
1546 gimple_bind_set_body (bind_stmt
, new_body
);
1549 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1550 if (!gimplify_ctxp
->keep_stack
)
1551 gimplify_ctxp
->keep_stack
= old_keep_stack
;
1552 gimplify_ctxp
->save_stack
= old_save_stack
;
1554 gimple_pop_bind_expr ();
1556 gimplify_seq_add_stmt (pre_p
, bind_stmt
);
1564 *expr_p
= NULL_TREE
;
1568 /* Maybe add early return predict statement to PRE_P sequence. */
1571 maybe_add_early_return_predict_stmt (gimple_seq
*pre_p
)
1573 /* If we are not in a conditional context, add PREDICT statement. */
1574 if (gimple_conditional_context ())
1576 gimple
*predict
= gimple_build_predict (PRED_TREE_EARLY_RETURN
,
1578 gimplify_seq_add_stmt (pre_p
, predict
);
1582 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1583 GIMPLE value, it is assigned to a new temporary and the statement is
1584 re-written to return the temporary.
1586 PRE_P points to the sequence where side effects that must happen before
1587 STMT should be stored. */
1589 static enum gimplify_status
1590 gimplify_return_expr (tree stmt
, gimple_seq
*pre_p
)
1593 tree ret_expr
= TREE_OPERAND (stmt
, 0);
1594 tree result_decl
, result
;
1596 if (ret_expr
== error_mark_node
)
1600 || TREE_CODE (ret_expr
) == RESULT_DECL
)
1602 maybe_add_early_return_predict_stmt (pre_p
);
1603 greturn
*ret
= gimple_build_return (ret_expr
);
1604 copy_warning (ret
, stmt
);
1605 gimplify_seq_add_stmt (pre_p
, ret
);
1609 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1610 result_decl
= NULL_TREE
;
1611 else if (TREE_CODE (ret_expr
) == COMPOUND_EXPR
)
1613 /* Used in C++ for handling EH cleanup of the return value if a local
1614 cleanup throws. Assume the front-end knows what it's doing. */
1615 result_decl
= DECL_RESULT (current_function_decl
);
1616 /* But crash if we end up trying to modify ret_expr below. */
1617 ret_expr
= NULL_TREE
;
1621 result_decl
= TREE_OPERAND (ret_expr
, 0);
1623 /* See through a return by reference. */
1624 if (TREE_CODE (result_decl
) == INDIRECT_REF
)
1625 result_decl
= TREE_OPERAND (result_decl
, 0);
1627 gcc_assert ((TREE_CODE (ret_expr
) == MODIFY_EXPR
1628 || TREE_CODE (ret_expr
) == INIT_EXPR
)
1629 && TREE_CODE (result_decl
) == RESULT_DECL
);
1632 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1633 Recall that aggregate_value_p is FALSE for any aggregate type that is
1634 returned in registers. If we're returning values in registers, then
1635 we don't want to extend the lifetime of the RESULT_DECL, particularly
1636 across another call. In addition, for those aggregates for which
1637 hard_function_value generates a PARALLEL, we'll die during normal
1638 expansion of structure assignments; there's special code in expand_return
1639 to handle this case that does not exist in expand_expr. */
1642 else if (aggregate_value_p (result_decl
, TREE_TYPE (current_function_decl
)))
1644 if (!poly_int_tree_p (DECL_SIZE (result_decl
)))
1646 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl
)))
1647 gimplify_type_sizes (TREE_TYPE (result_decl
), pre_p
);
1648 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1649 should be effectively allocated by the caller, i.e. all calls to
1650 this function must be subject to the Return Slot Optimization. */
1651 gimplify_one_sizepos (&DECL_SIZE (result_decl
), pre_p
);
1652 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl
), pre_p
);
1654 result
= result_decl
;
1656 else if (gimplify_ctxp
->return_temp
)
1657 result
= gimplify_ctxp
->return_temp
;
1660 result
= create_tmp_reg (TREE_TYPE (result_decl
));
1662 /* ??? With complex control flow (usually involving abnormal edges),
1663 we can wind up warning about an uninitialized value for this. Due
1664 to how this variable is constructed and initialized, this is never
1665 true. Give up and never warn. */
1666 suppress_warning (result
, OPT_Wuninitialized
);
1668 gimplify_ctxp
->return_temp
= result
;
1671 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1672 Then gimplify the whole thing. */
1673 if (result
!= result_decl
)
1674 TREE_OPERAND (ret_expr
, 0) = result
;
1676 gimplify_and_add (TREE_OPERAND (stmt
, 0), pre_p
);
1678 maybe_add_early_return_predict_stmt (pre_p
);
1679 ret
= gimple_build_return (result
);
1680 copy_warning (ret
, stmt
);
1681 gimplify_seq_add_stmt (pre_p
, ret
);
1686 /* Gimplify a variable-length array DECL. */
1689 gimplify_vla_decl (tree decl
, gimple_seq
*seq_p
)
1691 /* This is a variable-sized decl. Simplify its size and mark it
1692 for deferred expansion. */
1693 tree t
, addr
, ptr_type
;
1695 gimplify_one_sizepos (&DECL_SIZE (decl
), seq_p
);
1696 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl
), seq_p
);
1698 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1699 if (DECL_HAS_VALUE_EXPR_P (decl
))
1702 /* All occurrences of this decl in final gimplified code will be
1703 replaced by indirection. Setting DECL_VALUE_EXPR does two
1704 things: First, it lets the rest of the gimplifier know what
1705 replacement to use. Second, it lets the debug info know
1706 where to find the value. */
1707 ptr_type
= build_pointer_type (TREE_TYPE (decl
));
1708 addr
= create_tmp_var (ptr_type
, get_name (decl
));
1709 DECL_IGNORED_P (addr
) = 0;
1710 t
= build_fold_indirect_ref (addr
);
1711 TREE_THIS_NOTRAP (t
) = 1;
1712 SET_DECL_VALUE_EXPR (decl
, t
);
1713 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
1715 t
= build_alloca_call_expr (DECL_SIZE_UNIT (decl
), DECL_ALIGN (decl
),
1716 max_int_size_in_bytes (TREE_TYPE (decl
)));
1717 /* The call has been built for a variable-sized object. */
1718 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
1719 t
= fold_convert (ptr_type
, t
);
1720 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
1722 gimplify_and_add (t
, seq_p
);
1724 /* Record the dynamic allocation associated with DECL if requested. */
1725 if (flag_callgraph_info
& CALLGRAPH_INFO_DYNAMIC_ALLOC
)
1726 record_dynamic_alloc (decl
);
1729 /* A helper function to be called via walk_tree. Mark all labels under *TP
1730 as being forced. To be called for DECL_INITIAL of static variables. */
1733 force_labels_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1737 if (TREE_CODE (*tp
) == LABEL_DECL
)
1739 FORCED_LABEL (*tp
) = 1;
1740 cfun
->has_forced_label_in_static
= 1;
1746 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1747 Build a call to internal const function DEFERRED_INIT:
1748 1st argument: SIZE of the DECL;
1749 2nd argument: INIT_TYPE;
1750 3rd argument: IS_VLA, 0 NO, 1 YES;
1752 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, IS_VLA)
1753 if IS_VLA is false, the LHS is the DECL itself,
1754 if IS_VLA is true, the LHS is a MEM_REF whose address is the pointer
1757 gimple_add_init_for_auto_var (tree decl
,
1758 enum auto_init_type init_type
,
1762 gcc_assert (auto_var_p (decl
));
1763 gcc_assert (init_type
> AUTO_INIT_UNINITIALIZED
);
1764 location_t loc
= EXPR_LOCATION (decl
);
1765 tree decl_size
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1768 = build_int_cst (integer_type_node
, (int) init_type
);
1770 = build_int_cst (integer_type_node
, (int) is_vla
);
1772 tree call
= build_call_expr_internal_loc (loc
, IFN_DEFERRED_INIT
,
1773 TREE_TYPE (decl
), 3,
1774 decl_size
, init_type_node
,
1777 gimplify_assign (decl
, call
, seq_p
);
1780 /* Generate padding initialization for automatic vairable DECL.
1781 C guarantees that brace-init with fewer initializers than members
1782 aggregate will initialize the rest of the aggregate as-if it were
1783 static initialization. In turn static initialization guarantees
1784 that padding is initialized to zero. So, we always initialize paddings
1785 to zeroes regardless INIT_TYPE.
1786 To do the padding initialization, we insert a call to
1787 __builtin_clear_padding (&decl, 0, for_auto_init = true).
1788 Note, we add an additional dummy argument for __builtin_clear_padding,
1789 'for_auto_init' to distinguish whether this call is for automatic
1790 variable initialization or not.
1793 gimple_add_padding_init_for_auto_var (tree decl
, bool is_vla
,
1796 tree addr_of_decl
= NULL_TREE
;
1797 bool for_auto_init
= true;
1798 tree fn
= builtin_decl_explicit (BUILT_IN_CLEAR_PADDING
);
1802 /* The temporary address variable for this vla should be
1803 created in gimplify_vla_decl. */
1804 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
1805 gcc_assert (TREE_CODE (DECL_VALUE_EXPR (decl
)) == INDIRECT_REF
);
1806 addr_of_decl
= TREE_OPERAND (DECL_VALUE_EXPR (decl
), 0);
1810 mark_addressable (decl
);
1811 addr_of_decl
= build_fold_addr_expr (decl
);
1814 gimple
*call
= gimple_build_call (fn
,
1816 build_zero_cst (TREE_TYPE (addr_of_decl
)),
1817 build_int_cst (integer_type_node
,
1818 (int) for_auto_init
));
1819 gimplify_seq_add_stmt (seq_p
, call
);
1822 /* Return true if the DECL need to be automaticly initialized by the
1825 is_var_need_auto_init (tree decl
)
1827 if (auto_var_p (decl
)
1828 && (TREE_CODE (decl
) != VAR_DECL
1829 || !DECL_HARD_REGISTER (decl
))
1830 && (flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
)
1831 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl
)))
1832 && !is_empty_type (TREE_TYPE (decl
)))
1837 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1838 and initialization explicit. */
1840 static enum gimplify_status
1841 gimplify_decl_expr (tree
*stmt_p
, gimple_seq
*seq_p
)
1843 tree stmt
= *stmt_p
;
1844 tree decl
= DECL_EXPR_DECL (stmt
);
1846 *stmt_p
= NULL_TREE
;
1848 if (TREE_TYPE (decl
) == error_mark_node
)
1851 if ((TREE_CODE (decl
) == TYPE_DECL
1853 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl
)))
1855 gimplify_type_sizes (TREE_TYPE (decl
), seq_p
);
1856 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1857 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl
)), seq_p
);
1860 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1861 in case its size expressions contain problematic nodes like CALL_EXPR. */
1862 if (TREE_CODE (decl
) == TYPE_DECL
1863 && DECL_ORIGINAL_TYPE (decl
)
1864 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl
)))
1866 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl
), seq_p
);
1867 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl
)) == REFERENCE_TYPE
)
1868 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl
)), seq_p
);
1871 if (VAR_P (decl
) && !DECL_EXTERNAL (decl
))
1873 tree init
= DECL_INITIAL (decl
);
1874 bool is_vla
= false;
1875 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
1876 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
1877 If the decl has VALUE_EXPR that was created by FE (usually
1878 C++FE), it's a proxy varaible, and FE already initialized
1879 the VALUE_EXPR of it, we should not initialize it anymore. */
1880 bool decl_had_value_expr_p
= DECL_HAS_VALUE_EXPR_P (decl
);
1883 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl
), &size
)
1884 || (!TREE_STATIC (decl
)
1885 && flag_stack_check
== GENERIC_STACK_CHECK
1887 (unsigned HOST_WIDE_INT
) STACK_CHECK_MAX_VAR_SIZE
)))
1889 gimplify_vla_decl (decl
, seq_p
);
1893 if (asan_poisoned_variables
1895 && TREE_ADDRESSABLE (decl
)
1896 && !TREE_STATIC (decl
)
1897 && !DECL_HAS_VALUE_EXPR_P (decl
)
1898 && DECL_ALIGN (decl
) <= MAX_SUPPORTED_STACK_ALIGNMENT
1899 && dbg_cnt (asan_use_after_scope
)
1900 && !gimplify_omp_ctxp
1901 /* GNAT introduces temporaries to hold return values of calls in
1902 initializers of variables defined in other units, so the
1903 declaration of the variable is discarded completely. We do not
1904 want to issue poison calls for such dropped variables. */
1905 && (DECL_SEEN_IN_BIND_EXPR_P (decl
)
1906 || (DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)))
1908 asan_poisoned_variables
->add (decl
);
1909 asan_poison_variable (decl
, false, seq_p
);
1910 if (!DECL_ARTIFICIAL (decl
) && gimplify_ctxp
->live_switch_vars
)
1911 gimplify_ctxp
->live_switch_vars
->add (decl
);
1914 /* Some front ends do not explicitly declare all anonymous
1915 artificial variables. We compensate here by declaring the
1916 variables, though it would be better if the front ends would
1917 explicitly declare them. */
1918 if (!DECL_SEEN_IN_BIND_EXPR_P (decl
)
1919 && DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)
1920 gimple_add_tmp_var (decl
);
1922 if (init
&& init
!= error_mark_node
)
1924 if (!TREE_STATIC (decl
))
1926 DECL_INITIAL (decl
) = NULL_TREE
;
1927 init
= build2 (INIT_EXPR
, void_type_node
, decl
, init
);
1928 gimplify_and_add (init
, seq_p
);
1930 /* Clear TREE_READONLY if we really have an initialization. */
1931 if (!DECL_INITIAL (decl
)
1932 && !omp_privatize_by_reference (decl
))
1933 TREE_READONLY (decl
) = 0;
1936 /* We must still examine initializers for static variables
1937 as they may contain a label address. */
1938 walk_tree (&init
, force_labels_r
, NULL
, NULL
);
1940 /* When there is no explicit initializer, if the user requested,
1941 We should insert an artifical initializer for this automatic
1943 else if (is_var_need_auto_init (decl
)
1944 && !decl_had_value_expr_p
)
1946 gimple_add_init_for_auto_var (decl
,
1950 /* The expanding of a call to the above .DEFERRED_INIT will apply
1951 block initialization to the whole space covered by this variable.
1952 As a result, all the paddings will be initialized to zeroes
1953 for zero initialization and 0xFE byte-repeatable patterns for
1954 pattern initialization.
1955 In order to make the paddings as zeroes for pattern init, We
1956 should add a call to __builtin_clear_padding to clear the
1957 paddings to zero in compatiple with CLANG.
1958 We cannot insert this call if the variable is a gimple register
1959 since __builtin_clear_padding will take the address of the
1960 variable. As a result, if a long double/_Complex long double
1961 variable will spilled into stack later, its padding is 0XFE. */
1962 if (flag_auto_var_init
== AUTO_INIT_PATTERN
1963 && !is_gimple_reg (decl
)
1964 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl
)))
1965 gimple_add_padding_init_for_auto_var (decl
, is_vla
, seq_p
);
1972 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1973 and replacing the LOOP_EXPR with goto, but if the loop contains an
1974 EXIT_EXPR, we need to append a label for it to jump to. */
1976 static enum gimplify_status
1977 gimplify_loop_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1979 tree saved_label
= gimplify_ctxp
->exit_label
;
1980 tree start_label
= create_artificial_label (UNKNOWN_LOCATION
);
1982 gimplify_seq_add_stmt (pre_p
, gimple_build_label (start_label
));
1984 gimplify_ctxp
->exit_label
= NULL_TREE
;
1986 gimplify_and_add (LOOP_EXPR_BODY (*expr_p
), pre_p
);
1988 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (start_label
));
1990 if (gimplify_ctxp
->exit_label
)
1991 gimplify_seq_add_stmt (pre_p
,
1992 gimple_build_label (gimplify_ctxp
->exit_label
));
1994 gimplify_ctxp
->exit_label
= saved_label
;
2000 /* Gimplify a statement list onto a sequence. These may be created either
2001 by an enlightened front-end, or by shortcut_cond_expr. */
2003 static enum gimplify_status
2004 gimplify_statement_list (tree
*expr_p
, gimple_seq
*pre_p
)
2006 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
2008 tree_stmt_iterator i
= tsi_start (*expr_p
);
2010 while (!tsi_end_p (i
))
2012 gimplify_stmt (tsi_stmt_ptr (i
), pre_p
);
2025 /* Callback for walk_gimple_seq. */
2028 warn_switch_unreachable_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2029 struct walk_stmt_info
*wi
)
2031 gimple
*stmt
= gsi_stmt (*gsi_p
);
2033 *handled_ops_p
= true;
2034 switch (gimple_code (stmt
))
2037 /* A compiler-generated cleanup or a user-written try block.
2038 If it's empty, don't dive into it--that would result in
2039 worse location info. */
2040 if (gimple_try_eval (stmt
) == NULL
)
2043 return integer_zero_node
;
2048 case GIMPLE_EH_FILTER
:
2049 case GIMPLE_TRANSACTION
:
2050 /* Walk the sub-statements. */
2051 *handled_ops_p
= false;
2055 /* Ignore these. We may generate them before declarations that
2056 are never executed. If there's something to warn about,
2057 there will be non-debug stmts too, and we'll catch those. */
2061 if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2063 *handled_ops_p
= false;
2068 /* Save the first "real" statement (not a decl/lexical scope/...). */
2070 return integer_zero_node
;
2075 /* Possibly warn about unreachable statements between switch's controlling
2076 expression and the first case. SEQ is the body of a switch expression. */
2079 maybe_warn_switch_unreachable (gimple_seq seq
)
2081 if (!warn_switch_unreachable
2082 /* This warning doesn't play well with Fortran when optimizations
2084 || lang_GNU_Fortran ()
2088 struct walk_stmt_info wi
;
2089 memset (&wi
, 0, sizeof (wi
));
2090 walk_gimple_seq (seq
, warn_switch_unreachable_r
, NULL
, &wi
);
2091 gimple
*stmt
= (gimple
*) wi
.info
;
2093 if (stmt
&& gimple_code (stmt
) != GIMPLE_LABEL
)
2095 if (gimple_code (stmt
) == GIMPLE_GOTO
2096 && TREE_CODE (gimple_goto_dest (stmt
)) == LABEL_DECL
2097 && DECL_ARTIFICIAL (gimple_goto_dest (stmt
)))
2098 /* Don't warn for compiler-generated gotos. These occur
2099 in Duff's devices, for example. */;
2101 warning_at (gimple_location (stmt
), OPT_Wswitch_unreachable
,
2102 "statement will never be executed");
2107 /* A label entry that pairs label and a location. */
2114 /* Find LABEL in vector of label entries VEC. */
2116 static struct label_entry
*
2117 find_label_entry (const auto_vec
<struct label_entry
> *vec
, tree label
)
2120 struct label_entry
*l
;
2122 FOR_EACH_VEC_ELT (*vec
, i
, l
)
2123 if (l
->label
== label
)
2128 /* Return true if LABEL, a LABEL_DECL, represents a case label
2129 in a vector of labels CASES. */
2132 case_label_p (const vec
<tree
> *cases
, tree label
)
2137 FOR_EACH_VEC_ELT (*cases
, i
, l
)
2138 if (CASE_LABEL (l
) == label
)
2143 /* Find the last nondebug statement in a scope STMT. */
2146 last_stmt_in_scope (gimple
*stmt
)
2151 switch (gimple_code (stmt
))
2155 gbind
*bind
= as_a
<gbind
*> (stmt
);
2156 stmt
= gimple_seq_last_nondebug_stmt (gimple_bind_body (bind
));
2157 return last_stmt_in_scope (stmt
);
2162 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
2163 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt
));
2164 gimple
*last_eval
= last_stmt_in_scope (stmt
);
2165 if (gimple_stmt_may_fallthru (last_eval
)
2166 && (last_eval
== NULL
2167 || !gimple_call_internal_p (last_eval
, IFN_FALLTHROUGH
))
2168 && gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
2170 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt
));
2171 return last_stmt_in_scope (stmt
);
2185 /* Collect interesting labels in LABELS and return the statement preceding
2186 another case label, or a user-defined label. Store a location useful
2187 to give warnings at *PREVLOC (usually the location of the returned
2188 statement or of its surrounding scope). */
2191 collect_fallthrough_labels (gimple_stmt_iterator
*gsi_p
,
2192 auto_vec
<struct label_entry
> *labels
,
2193 location_t
*prevloc
)
2195 gimple
*prev
= NULL
;
2197 *prevloc
= UNKNOWN_LOCATION
;
2200 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
)
2202 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2203 which starts on a GIMPLE_SWITCH and ends with a break label.
2204 Handle that as a single statement that can fall through. */
2205 gbind
*bind
= as_a
<gbind
*> (gsi_stmt (*gsi_p
));
2206 gimple
*first
= gimple_seq_first_stmt (gimple_bind_body (bind
));
2207 gimple
*last
= gimple_seq_last_stmt (gimple_bind_body (bind
));
2209 && gimple_code (first
) == GIMPLE_SWITCH
2210 && gimple_code (last
) == GIMPLE_LABEL
)
2212 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2213 if (SWITCH_BREAK_LABEL_P (label
))
2221 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
2222 || gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_TRY
)
2224 /* Nested scope. Only look at the last statement of
2225 the innermost scope. */
2226 location_t bind_loc
= gimple_location (gsi_stmt (*gsi_p
));
2227 gimple
*last
= last_stmt_in_scope (gsi_stmt (*gsi_p
));
2231 /* It might be a label without a location. Use the
2232 location of the scope then. */
2233 if (!gimple_has_location (prev
))
2234 *prevloc
= bind_loc
;
2240 /* Ifs are tricky. */
2241 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_COND
)
2243 gcond
*cond_stmt
= as_a
<gcond
*> (gsi_stmt (*gsi_p
));
2244 tree false_lab
= gimple_cond_false_label (cond_stmt
);
2245 location_t if_loc
= gimple_location (cond_stmt
);
2248 if (i > 1) goto <D.2259>; else goto D;
2249 we can't do much with the else-branch. */
2250 if (!DECL_ARTIFICIAL (false_lab
))
2253 /* Go on until the false label, then one step back. */
2254 for (; !gsi_end_p (*gsi_p
); gsi_next (gsi_p
))
2256 gimple
*stmt
= gsi_stmt (*gsi_p
);
2257 if (gimple_code (stmt
) == GIMPLE_LABEL
2258 && gimple_label_label (as_a
<glabel
*> (stmt
)) == false_lab
)
2262 /* Not found? Oops. */
2263 if (gsi_end_p (*gsi_p
))
2266 struct label_entry l
= { false_lab
, if_loc
};
2267 labels
->safe_push (l
);
2269 /* Go to the last statement of the then branch. */
2272 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2278 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_GOTO
2279 && !gimple_has_location (gsi_stmt (*gsi_p
)))
2281 /* Look at the statement before, it might be
2282 attribute fallthrough, in which case don't warn. */
2284 bool fallthru_before_dest
2285 = gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_FALLTHROUGH
);
2287 tree goto_dest
= gimple_goto_dest (gsi_stmt (*gsi_p
));
2288 if (!fallthru_before_dest
)
2290 struct label_entry l
= { goto_dest
, if_loc
};
2291 labels
->safe_push (l
);
2294 /* And move back. */
2298 /* Remember the last statement. Skip labels that are of no interest
2300 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2302 tree label
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (*gsi_p
)));
2303 if (find_label_entry (labels
, label
))
2304 prev
= gsi_stmt (*gsi_p
);
2306 else if (gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_ASAN_MARK
))
2308 else if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_PREDICT
)
2310 else if (!is_gimple_debug (gsi_stmt (*gsi_p
)))
2311 prev
= gsi_stmt (*gsi_p
);
2314 while (!gsi_end_p (*gsi_p
)
2315 /* Stop if we find a case or a user-defined label. */
2316 && (gimple_code (gsi_stmt (*gsi_p
)) != GIMPLE_LABEL
2317 || !gimple_has_location (gsi_stmt (*gsi_p
))));
2319 if (prev
&& gimple_has_location (prev
))
2320 *prevloc
= gimple_location (prev
);
2324 /* Return true if the switch fallthough warning should occur. LABEL is
2325 the label statement that we're falling through to. */
2328 should_warn_for_implicit_fallthrough (gimple_stmt_iterator
*gsi_p
, tree label
)
2330 gimple_stmt_iterator gsi
= *gsi_p
;
2332 /* Don't warn if the label is marked with a "falls through" comment. */
2333 if (FALLTHROUGH_LABEL_P (label
))
2336 /* Don't warn for non-case labels followed by a statement:
2341 as these are likely intentional. */
2342 if (!case_label_p (&gimplify_ctxp
->case_labels
, label
))
2345 while (!gsi_end_p (gsi
)
2346 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2347 && (l
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi
))))
2348 && !case_label_p (&gimplify_ctxp
->case_labels
, l
))
2349 gsi_next_nondebug (&gsi
);
2350 if (gsi_end_p (gsi
) || gimple_code (gsi_stmt (gsi
)) != GIMPLE_LABEL
)
2354 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2355 immediately breaks. */
2358 /* Skip all immediately following labels. */
2359 while (!gsi_end_p (gsi
)
2360 && (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2361 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_PREDICT
))
2362 gsi_next_nondebug (&gsi
);
2364 /* { ... something; default:; } */
2366 /* { ... something; default: break; } or
2367 { ... something; default: goto L; } */
2368 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_GOTO
2369 /* { ... something; default: return; } */
2370 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
2376 /* Callback for walk_gimple_seq. */
2379 warn_implicit_fallthrough_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2380 struct walk_stmt_info
*)
2382 gimple
*stmt
= gsi_stmt (*gsi_p
);
2384 *handled_ops_p
= true;
2385 switch (gimple_code (stmt
))
2390 case GIMPLE_EH_FILTER
:
2391 case GIMPLE_TRANSACTION
:
2392 /* Walk the sub-statements. */
2393 *handled_ops_p
= false;
2396 /* Find a sequence of form:
2403 and possibly warn. */
2406 /* Found a label. Skip all immediately following labels. */
2407 while (!gsi_end_p (*gsi_p
)
2408 && gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2409 gsi_next_nondebug (gsi_p
);
2411 /* There might be no more statements. */
2412 if (gsi_end_p (*gsi_p
))
2413 return integer_zero_node
;
2415 /* Vector of labels that fall through. */
2416 auto_vec
<struct label_entry
> labels
;
2418 gimple
*prev
= collect_fallthrough_labels (gsi_p
, &labels
, &prevloc
);
2420 /* There might be no more statements. */
2421 if (gsi_end_p (*gsi_p
))
2422 return integer_zero_node
;
2424 gimple
*next
= gsi_stmt (*gsi_p
);
2426 /* If what follows is a label, then we may have a fallthrough. */
2427 if (gimple_code (next
) == GIMPLE_LABEL
2428 && gimple_has_location (next
)
2429 && (label
= gimple_label_label (as_a
<glabel
*> (next
)))
2432 struct label_entry
*l
;
2433 bool warned_p
= false;
2434 auto_diagnostic_group d
;
2435 if (!should_warn_for_implicit_fallthrough (gsi_p
, label
))
2437 else if (gimple_code (prev
) == GIMPLE_LABEL
2438 && (label
= gimple_label_label (as_a
<glabel
*> (prev
)))
2439 && (l
= find_label_entry (&labels
, label
)))
2440 warned_p
= warning_at (l
->loc
, OPT_Wimplicit_fallthrough_
,
2441 "this statement may fall through");
2442 else if (!gimple_call_internal_p (prev
, IFN_FALLTHROUGH
)
2443 /* Try to be clever and don't warn when the statement
2444 can't actually fall through. */
2445 && gimple_stmt_may_fallthru (prev
)
2446 && prevloc
!= UNKNOWN_LOCATION
)
2447 warned_p
= warning_at (prevloc
,
2448 OPT_Wimplicit_fallthrough_
,
2449 "this statement may fall through");
2451 inform (gimple_location (next
), "here");
2453 /* Mark this label as processed so as to prevent multiple
2454 warnings in nested switches. */
2455 FALLTHROUGH_LABEL_P (label
) = true;
2457 /* So that next warn_implicit_fallthrough_r will start looking for
2458 a new sequence starting with this label. */
2469 /* Warn when a switch case falls through. */
2472 maybe_warn_implicit_fallthrough (gimple_seq seq
)
2474 if (!warn_implicit_fallthrough
)
2477 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2480 || lang_GNU_OBJC ()))
2483 struct walk_stmt_info wi
;
2484 memset (&wi
, 0, sizeof (wi
));
2485 walk_gimple_seq (seq
, warn_implicit_fallthrough_r
, NULL
, &wi
);
2488 /* Callback for walk_gimple_seq. */
2491 expand_FALLTHROUGH_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2492 struct walk_stmt_info
*wi
)
2494 gimple
*stmt
= gsi_stmt (*gsi_p
);
2496 *handled_ops_p
= true;
2497 switch (gimple_code (stmt
))
2502 case GIMPLE_EH_FILTER
:
2503 case GIMPLE_TRANSACTION
:
2504 /* Walk the sub-statements. */
2505 *handled_ops_p
= false;
2508 if (gimple_call_internal_p (stmt
, IFN_FALLTHROUGH
))
2510 gsi_remove (gsi_p
, true);
2511 if (gsi_end_p (*gsi_p
))
2513 *static_cast<location_t
*>(wi
->info
) = gimple_location (stmt
);
2514 return integer_zero_node
;
2518 location_t loc
= gimple_location (stmt
);
2520 gimple_stmt_iterator gsi2
= *gsi_p
;
2521 stmt
= gsi_stmt (gsi2
);
2522 if (gimple_code (stmt
) == GIMPLE_GOTO
&& !gimple_has_location (stmt
))
2524 /* Go on until the artificial label. */
2525 tree goto_dest
= gimple_goto_dest (stmt
);
2526 for (; !gsi_end_p (gsi2
); gsi_next (&gsi2
))
2528 if (gimple_code (gsi_stmt (gsi2
)) == GIMPLE_LABEL
2529 && gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi2
)))
2534 /* Not found? Stop. */
2535 if (gsi_end_p (gsi2
))
2538 /* Look one past it. */
2542 /* We're looking for a case label or default label here. */
2543 while (!gsi_end_p (gsi2
))
2545 stmt
= gsi_stmt (gsi2
);
2546 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2548 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
2549 if (gimple_has_location (stmt
) && DECL_ARTIFICIAL (label
))
2555 else if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2557 else if (!is_gimple_debug (stmt
))
2558 /* Anything else is not expected. */
2563 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2564 "a case label or default label");
2573 /* Expand all FALLTHROUGH () calls in SEQ. */
2576 expand_FALLTHROUGH (gimple_seq
*seq_p
)
2578 struct walk_stmt_info wi
;
2580 memset (&wi
, 0, sizeof (wi
));
2581 wi
.info
= (void *) &loc
;
2582 walk_gimple_seq_mod (seq_p
, expand_FALLTHROUGH_r
, NULL
, &wi
);
2583 if (wi
.callback_result
== integer_zero_node
)
2584 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2585 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2586 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2587 "a case label or default label");
2591 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2594 static enum gimplify_status
2595 gimplify_switch_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2597 tree switch_expr
= *expr_p
;
2598 gimple_seq switch_body_seq
= NULL
;
2599 enum gimplify_status ret
;
2600 tree index_type
= TREE_TYPE (switch_expr
);
2601 if (index_type
== NULL_TREE
)
2602 index_type
= TREE_TYPE (SWITCH_COND (switch_expr
));
2604 ret
= gimplify_expr (&SWITCH_COND (switch_expr
), pre_p
, NULL
, is_gimple_val
,
2606 if (ret
== GS_ERROR
|| ret
== GS_UNHANDLED
)
2609 if (SWITCH_BODY (switch_expr
))
2612 vec
<tree
> saved_labels
;
2613 hash_set
<tree
> *saved_live_switch_vars
= NULL
;
2614 tree default_case
= NULL_TREE
;
2615 gswitch
*switch_stmt
;
2617 /* Save old labels, get new ones from body, then restore the old
2618 labels. Save all the things from the switch body to append after. */
2619 saved_labels
= gimplify_ctxp
->case_labels
;
2620 gimplify_ctxp
->case_labels
.create (8);
2622 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2623 saved_live_switch_vars
= gimplify_ctxp
->live_switch_vars
;
2624 tree_code body_type
= TREE_CODE (SWITCH_BODY (switch_expr
));
2625 if (body_type
== BIND_EXPR
|| body_type
== STATEMENT_LIST
)
2626 gimplify_ctxp
->live_switch_vars
= new hash_set
<tree
> (4);
2628 gimplify_ctxp
->live_switch_vars
= NULL
;
2630 bool old_in_switch_expr
= gimplify_ctxp
->in_switch_expr
;
2631 gimplify_ctxp
->in_switch_expr
= true;
2633 gimplify_stmt (&SWITCH_BODY (switch_expr
), &switch_body_seq
);
2635 gimplify_ctxp
->in_switch_expr
= old_in_switch_expr
;
2636 maybe_warn_switch_unreachable (switch_body_seq
);
2637 maybe_warn_implicit_fallthrough (switch_body_seq
);
2638 /* Only do this for the outermost GIMPLE_SWITCH. */
2639 if (!gimplify_ctxp
->in_switch_expr
)
2640 expand_FALLTHROUGH (&switch_body_seq
);
2642 labels
= gimplify_ctxp
->case_labels
;
2643 gimplify_ctxp
->case_labels
= saved_labels
;
2645 if (gimplify_ctxp
->live_switch_vars
)
2647 gcc_assert (gimplify_ctxp
->live_switch_vars
->is_empty ());
2648 delete gimplify_ctxp
->live_switch_vars
;
2650 gimplify_ctxp
->live_switch_vars
= saved_live_switch_vars
;
2652 preprocess_case_label_vec_for_gimple (labels
, index_type
,
2655 bool add_bind
= false;
2658 glabel
*new_default
;
2661 = build_case_label (NULL_TREE
, NULL_TREE
,
2662 create_artificial_label (UNKNOWN_LOCATION
));
2663 if (old_in_switch_expr
)
2665 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case
)) = 1;
2668 new_default
= gimple_build_label (CASE_LABEL (default_case
));
2669 gimplify_seq_add_stmt (&switch_body_seq
, new_default
);
2671 else if (old_in_switch_expr
)
2673 gimple
*last
= gimple_seq_last_stmt (switch_body_seq
);
2674 if (last
&& gimple_code (last
) == GIMPLE_LABEL
)
2676 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2677 if (SWITCH_BREAK_LABEL_P (label
))
2682 switch_stmt
= gimple_build_switch (SWITCH_COND (switch_expr
),
2683 default_case
, labels
);
2684 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2685 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2686 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2687 so that we can easily find the start and end of the switch
2691 gimple_seq bind_body
= NULL
;
2692 gimplify_seq_add_stmt (&bind_body
, switch_stmt
);
2693 gimple_seq_add_seq (&bind_body
, switch_body_seq
);
2694 gbind
*bind
= gimple_build_bind (NULL_TREE
, bind_body
, NULL_TREE
);
2695 gimple_set_location (bind
, EXPR_LOCATION (switch_expr
));
2696 gimplify_seq_add_stmt (pre_p
, bind
);
2700 gimplify_seq_add_stmt (pre_p
, switch_stmt
);
2701 gimplify_seq_add_seq (pre_p
, switch_body_seq
);
2711 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2713 static enum gimplify_status
2714 gimplify_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2716 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p
))
2717 == current_function_decl
);
2719 tree label
= LABEL_EXPR_LABEL (*expr_p
);
2720 glabel
*label_stmt
= gimple_build_label (label
);
2721 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2722 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2724 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2725 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2727 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2728 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2734 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2736 static enum gimplify_status
2737 gimplify_case_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2739 struct gimplify_ctx
*ctxp
;
2742 /* Invalid programs can play Duff's Device type games with, for example,
2743 #pragma omp parallel. At least in the C front end, we don't
2744 detect such invalid branches until after gimplification, in the
2745 diagnose_omp_blocks pass. */
2746 for (ctxp
= gimplify_ctxp
; ; ctxp
= ctxp
->prev_context
)
2747 if (ctxp
->case_labels
.exists ())
2750 tree label
= CASE_LABEL (*expr_p
);
2751 label_stmt
= gimple_build_label (label
);
2752 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2753 ctxp
->case_labels
.safe_push (*expr_p
);
2754 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2756 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2757 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2759 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2760 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2766 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2770 build_and_jump (tree
*label_p
)
2772 if (label_p
== NULL
)
2773 /* If there's nowhere to jump, just fall through. */
2776 if (*label_p
== NULL_TREE
)
2778 tree label
= create_artificial_label (UNKNOWN_LOCATION
);
2782 return build1 (GOTO_EXPR
, void_type_node
, *label_p
);
2785 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2786 This also involves building a label to jump to and communicating it to
2787 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2789 static enum gimplify_status
2790 gimplify_exit_expr (tree
*expr_p
)
2792 tree cond
= TREE_OPERAND (*expr_p
, 0);
2795 expr
= build_and_jump (&gimplify_ctxp
->exit_label
);
2796 expr
= build3 (COND_EXPR
, void_type_node
, cond
, expr
, NULL_TREE
);
2802 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2803 different from its canonical type, wrap the whole thing inside a
2804 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2807 The canonical type of a COMPONENT_REF is the type of the field being
2808 referenced--unless the field is a bit-field which can be read directly
2809 in a smaller mode, in which case the canonical type is the
2810 sign-appropriate type corresponding to that mode. */
2813 canonicalize_component_ref (tree
*expr_p
)
2815 tree expr
= *expr_p
;
2818 gcc_assert (TREE_CODE (expr
) == COMPONENT_REF
);
2820 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
)))
2821 type
= TREE_TYPE (get_unwidened (expr
, NULL_TREE
));
2823 type
= TREE_TYPE (TREE_OPERAND (expr
, 1));
2825 /* One could argue that all the stuff below is not necessary for
2826 the non-bitfield case and declare it a FE error if type
2827 adjustment would be needed. */
2828 if (TREE_TYPE (expr
) != type
)
2830 #ifdef ENABLE_TYPES_CHECKING
2831 tree old_type
= TREE_TYPE (expr
);
2835 /* We need to preserve qualifiers and propagate them from
2837 type_quals
= TYPE_QUALS (type
)
2838 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr
, 0)));
2839 if (TYPE_QUALS (type
) != type_quals
)
2840 type
= build_qualified_type (TYPE_MAIN_VARIANT (type
), type_quals
);
2842 /* Set the type of the COMPONENT_REF to the underlying type. */
2843 TREE_TYPE (expr
) = type
;
2845 #ifdef ENABLE_TYPES_CHECKING
2846 /* It is now a FE error, if the conversion from the canonical
2847 type to the original expression type is not useless. */
2848 gcc_assert (useless_type_conversion_p (old_type
, type
));
2853 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2854 to foo, embed that change in the ADDR_EXPR by converting
2859 where L is the lower bound. For simplicity, only do this for constant
2861 The constraint is that the type of &array[L] is trivially convertible
2865 canonicalize_addr_expr (tree
*expr_p
)
2867 tree expr
= *expr_p
;
2868 tree addr_expr
= TREE_OPERAND (expr
, 0);
2869 tree datype
, ddatype
, pddatype
;
2871 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2872 if (!POINTER_TYPE_P (TREE_TYPE (expr
))
2873 || TREE_CODE (addr_expr
) != ADDR_EXPR
)
2876 /* The addr_expr type should be a pointer to an array. */
2877 datype
= TREE_TYPE (TREE_TYPE (addr_expr
));
2878 if (TREE_CODE (datype
) != ARRAY_TYPE
)
2881 /* The pointer to element type shall be trivially convertible to
2882 the expression pointer type. */
2883 ddatype
= TREE_TYPE (datype
);
2884 pddatype
= build_pointer_type (ddatype
);
2885 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr
)),
2889 /* The lower bound and element sizes must be constant. */
2890 if (!TYPE_SIZE_UNIT (ddatype
)
2891 || TREE_CODE (TYPE_SIZE_UNIT (ddatype
)) != INTEGER_CST
2892 || !TYPE_DOMAIN (datype
) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))
2893 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))) != INTEGER_CST
)
2896 /* All checks succeeded. Build a new node to merge the cast. */
2897 *expr_p
= build4 (ARRAY_REF
, ddatype
, TREE_OPERAND (addr_expr
, 0),
2898 TYPE_MIN_VALUE (TYPE_DOMAIN (datype
)),
2899 NULL_TREE
, NULL_TREE
);
2900 *expr_p
= build1 (ADDR_EXPR
, pddatype
, *expr_p
);
2902 /* We can have stripped a required restrict qualifier above. */
2903 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
2904 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
2907 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2908 underneath as appropriate. */
2910 static enum gimplify_status
2911 gimplify_conversion (tree
*expr_p
)
2913 location_t loc
= EXPR_LOCATION (*expr_p
);
2914 gcc_assert (CONVERT_EXPR_P (*expr_p
));
2916 /* Then strip away all but the outermost conversion. */
2917 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p
, 0));
2919 /* And remove the outermost conversion if it's useless. */
2920 if (tree_ssa_useless_type_conversion (*expr_p
))
2921 *expr_p
= TREE_OPERAND (*expr_p
, 0);
2923 /* If we still have a conversion at the toplevel,
2924 then canonicalize some constructs. */
2925 if (CONVERT_EXPR_P (*expr_p
))
2927 tree sub
= TREE_OPERAND (*expr_p
, 0);
2929 /* If a NOP conversion is changing the type of a COMPONENT_REF
2930 expression, then canonicalize its type now in order to expose more
2931 redundant conversions. */
2932 if (TREE_CODE (sub
) == COMPONENT_REF
)
2933 canonicalize_component_ref (&TREE_OPERAND (*expr_p
, 0));
2935 /* If a NOP conversion is changing a pointer to array of foo
2936 to a pointer to foo, embed that change in the ADDR_EXPR. */
2937 else if (TREE_CODE (sub
) == ADDR_EXPR
)
2938 canonicalize_addr_expr (expr_p
);
2941 /* If we have a conversion to a non-register type force the
2942 use of a VIEW_CONVERT_EXPR instead. */
2943 if (CONVERT_EXPR_P (*expr_p
) && !is_gimple_reg_type (TREE_TYPE (*expr_p
)))
2944 *expr_p
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (*expr_p
),
2945 TREE_OPERAND (*expr_p
, 0));
2947 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2948 if (TREE_CODE (*expr_p
) == CONVERT_EXPR
)
2949 TREE_SET_CODE (*expr_p
, NOP_EXPR
);
2954 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2955 DECL_VALUE_EXPR, and it's worth re-examining things. */
2957 static enum gimplify_status
2958 gimplify_var_or_parm_decl (tree
*expr_p
)
2960 tree decl
= *expr_p
;
2962 /* ??? If this is a local variable, and it has not been seen in any
2963 outer BIND_EXPR, then it's probably the result of a duplicate
2964 declaration, for which we've already issued an error. It would
2965 be really nice if the front end wouldn't leak these at all.
2966 Currently the only known culprit is C++ destructors, as seen
2967 in g++.old-deja/g++.jason/binding.C. */
2969 && !DECL_SEEN_IN_BIND_EXPR_P (decl
)
2970 && !TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)
2971 && decl_function_context (decl
) == current_function_decl
)
2973 gcc_assert (seen_error ());
2977 /* When within an OMP context, notice uses of variables. */
2978 if (gimplify_omp_ctxp
&& omp_notice_variable (gimplify_omp_ctxp
, decl
, true))
2981 /* If the decl is an alias for another expression, substitute it now. */
2982 if (DECL_HAS_VALUE_EXPR_P (decl
))
2984 *expr_p
= unshare_expr (DECL_VALUE_EXPR (decl
));
2991 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2994 recalculate_side_effects (tree t
)
2996 enum tree_code code
= TREE_CODE (t
);
2997 int len
= TREE_OPERAND_LENGTH (t
);
3000 switch (TREE_CODE_CLASS (code
))
3002 case tcc_expression
:
3008 case PREDECREMENT_EXPR
:
3009 case PREINCREMENT_EXPR
:
3010 case POSTDECREMENT_EXPR
:
3011 case POSTINCREMENT_EXPR
:
3012 /* All of these have side-effects, no matter what their
3021 case tcc_comparison
: /* a comparison expression */
3022 case tcc_unary
: /* a unary arithmetic expression */
3023 case tcc_binary
: /* a binary arithmetic expression */
3024 case tcc_reference
: /* a reference */
3025 case tcc_vl_exp
: /* a function call */
3026 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
3027 for (i
= 0; i
< len
; ++i
)
3029 tree op
= TREE_OPERAND (t
, i
);
3030 if (op
&& TREE_SIDE_EFFECTS (op
))
3031 TREE_SIDE_EFFECTS (t
) = 1;
3036 /* No side-effects. */
3044 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3048 : min_lval '[' val ']'
3050 | compound_lval '[' val ']'
3051 | compound_lval '.' ID
3053 This is not part of the original SIMPLE definition, which separates
3054 array and member references, but it seems reasonable to handle them
3055 together. Also, this way we don't run into problems with union
3056 aliasing; gcc requires that for accesses through a union to alias, the
3057 union reference must be explicit, which was not always the case when we
3058 were splitting up array and member refs.
3060 PRE_P points to the sequence where side effects that must happen before
3061 *EXPR_P should be stored.
3063 POST_P points to the sequence where side effects that must happen after
3064 *EXPR_P should be stored. */
3066 static enum gimplify_status
3067 gimplify_compound_lval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3068 fallback_t fallback
)
3071 enum gimplify_status ret
= GS_ALL_DONE
, tret
;
3073 location_t loc
= EXPR_LOCATION (*expr_p
);
3074 tree expr
= *expr_p
;
3076 /* Create a stack of the subexpressions so later we can walk them in
3077 order from inner to outer. */
3078 auto_vec
<tree
, 10> expr_stack
;
3080 /* We can handle anything that get_inner_reference can deal with. */
3081 for (p
= expr_p
; ; p
= &TREE_OPERAND (*p
, 0))
3084 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3085 if (TREE_CODE (*p
) == INDIRECT_REF
)
3086 *p
= fold_indirect_ref_loc (loc
, *p
);
3088 if (handled_component_p (*p
))
3090 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3091 additional COMPONENT_REFs. */
3092 else if ((VAR_P (*p
) || TREE_CODE (*p
) == PARM_DECL
)
3093 && gimplify_var_or_parm_decl (p
) == GS_OK
)
3098 expr_stack
.safe_push (*p
);
3101 gcc_assert (expr_stack
.length ());
3103 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3104 walked through and P points to the innermost expression.
3106 Java requires that we elaborated nodes in source order. That
3107 means we must gimplify the inner expression followed by each of
3108 the indices, in order. But we can't gimplify the inner
3109 expression until we deal with any variable bounds, sizes, or
3110 positions in order to deal with PLACEHOLDER_EXPRs.
3112 So we do this in three steps. First we deal with the annotations
3113 for any variables in the components, then we gimplify the base,
3114 then we gimplify any indices, from left to right. */
3115 for (i
= expr_stack
.length () - 1; i
>= 0; i
--)
3117 tree t
= expr_stack
[i
];
3119 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3121 /* Gimplify the low bound and element type size and put them into
3122 the ARRAY_REF. If these values are set, they have already been
3124 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3126 tree low
= unshare_expr (array_ref_low_bound (t
));
3127 if (!is_gimple_min_invariant (low
))
3129 TREE_OPERAND (t
, 2) = low
;
3130 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
3131 post_p
, is_gimple_reg
,
3133 ret
= MIN (ret
, tret
);
3138 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3139 is_gimple_reg
, fb_rvalue
);
3140 ret
= MIN (ret
, tret
);
3143 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
3145 tree elmt_size
= array_ref_element_size (t
);
3146 if (!is_gimple_min_invariant (elmt_size
))
3148 elmt_size
= unshare_expr (elmt_size
);
3149 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
, 0)));
3150 tree factor
= size_int (TYPE_ALIGN_UNIT (elmt_type
));
3152 /* Divide the element size by the alignment of the element
3154 elmt_size
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3157 TREE_OPERAND (t
, 3) = elmt_size
;
3158 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
3159 post_p
, is_gimple_reg
,
3161 ret
= MIN (ret
, tret
);
3166 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
, post_p
,
3167 is_gimple_reg
, fb_rvalue
);
3168 ret
= MIN (ret
, tret
);
3171 else if (TREE_CODE (t
) == COMPONENT_REF
)
3173 /* Set the field offset into T and gimplify it. */
3174 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3176 tree offset
= component_ref_field_offset (t
);
3177 if (!is_gimple_min_invariant (offset
))
3179 offset
= unshare_expr (offset
);
3180 tree field
= TREE_OPERAND (t
, 1);
3182 = size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
);
3184 /* Divide the offset by its alignment. */
3185 offset
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3188 TREE_OPERAND (t
, 2) = offset
;
3189 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
3190 post_p
, is_gimple_reg
,
3192 ret
= MIN (ret
, tret
);
3197 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3198 is_gimple_reg
, fb_rvalue
);
3199 ret
= MIN (ret
, tret
);
3204 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3205 so as to match the min_lval predicate. Failure to do so may result
3206 in the creation of large aggregate temporaries. */
3207 tret
= gimplify_expr (p
, pre_p
, post_p
, is_gimple_min_lval
,
3208 fallback
| fb_lvalue
);
3209 ret
= MIN (ret
, tret
);
3211 /* And finally, the indices and operands of ARRAY_REF. During this
3212 loop we also remove any useless conversions. */
3213 for (; expr_stack
.length () > 0; )
3215 tree t
= expr_stack
.pop ();
3217 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3219 /* Gimplify the dimension. */
3220 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1)))
3222 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, post_p
,
3223 is_gimple_val
, fb_rvalue
);
3224 ret
= MIN (ret
, tret
);
3228 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t
, 0));
3230 /* The innermost expression P may have originally had
3231 TREE_SIDE_EFFECTS set which would have caused all the outer
3232 expressions in *EXPR_P leading to P to also have had
3233 TREE_SIDE_EFFECTS set. */
3234 recalculate_side_effects (t
);
3237 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3238 if ((fallback
& fb_rvalue
) && TREE_CODE (*expr_p
) == COMPONENT_REF
)
3240 canonicalize_component_ref (expr_p
);
3243 expr_stack
.release ();
3245 gcc_assert (*expr_p
== expr
|| ret
!= GS_ALL_DONE
);
3250 /* Gimplify the self modifying expression pointed to by EXPR_P
3253 PRE_P points to the list where side effects that must happen before
3254 *EXPR_P should be stored.
3256 POST_P points to the list where side effects that must happen after
3257 *EXPR_P should be stored.
3259 WANT_VALUE is nonzero iff we want to use the value of this expression
3260 in another expression.
3262 ARITH_TYPE is the type the computation should be performed in. */
3264 enum gimplify_status
3265 gimplify_self_mod_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3266 bool want_value
, tree arith_type
)
3268 enum tree_code code
;
3269 tree lhs
, lvalue
, rhs
, t1
;
3270 gimple_seq post
= NULL
, *orig_post_p
= post_p
;
3272 enum tree_code arith_code
;
3273 enum gimplify_status ret
;
3274 location_t loc
= EXPR_LOCATION (*expr_p
);
3276 code
= TREE_CODE (*expr_p
);
3278 gcc_assert (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
3279 || code
== PREINCREMENT_EXPR
|| code
== PREDECREMENT_EXPR
);
3281 /* Prefix or postfix? */
3282 if (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
)
3283 /* Faster to treat as prefix if result is not used. */
3284 postfix
= want_value
;
3288 /* For postfix, make sure the inner expression's post side effects
3289 are executed after side effects from this expression. */
3293 /* Add or subtract? */
3294 if (code
== PREINCREMENT_EXPR
|| code
== POSTINCREMENT_EXPR
)
3295 arith_code
= PLUS_EXPR
;
3297 arith_code
= MINUS_EXPR
;
3299 /* Gimplify the LHS into a GIMPLE lvalue. */
3300 lvalue
= TREE_OPERAND (*expr_p
, 0);
3301 ret
= gimplify_expr (&lvalue
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
3302 if (ret
== GS_ERROR
)
3305 /* Extract the operands to the arithmetic operation. */
3307 rhs
= TREE_OPERAND (*expr_p
, 1);
3309 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3310 that as the result value and in the postqueue operation. */
3313 ret
= gimplify_expr (&lhs
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
3314 if (ret
== GS_ERROR
)
3317 lhs
= get_initialized_tmp_var (lhs
, pre_p
);
3320 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3321 if (POINTER_TYPE_P (TREE_TYPE (lhs
)))
3323 rhs
= convert_to_ptrofftype_loc (loc
, rhs
);
3324 if (arith_code
== MINUS_EXPR
)
3325 rhs
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (rhs
), rhs
);
3326 t1
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (*expr_p
), lhs
, rhs
);
3329 t1
= fold_convert (TREE_TYPE (*expr_p
),
3330 fold_build2 (arith_code
, arith_type
,
3331 fold_convert (arith_type
, lhs
),
3332 fold_convert (arith_type
, rhs
)));
3336 gimplify_assign (lvalue
, t1
, pre_p
);
3337 gimplify_seq_add_seq (orig_post_p
, post
);
3343 *expr_p
= build2 (MODIFY_EXPR
, TREE_TYPE (lvalue
), lvalue
, t1
);
3348 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3351 maybe_with_size_expr (tree
*expr_p
)
3353 tree expr
= *expr_p
;
3354 tree type
= TREE_TYPE (expr
);
3357 /* If we've already wrapped this or the type is error_mark_node, we can't do
3359 if (TREE_CODE (expr
) == WITH_SIZE_EXPR
3360 || type
== error_mark_node
)
3363 /* If the size isn't known or is a constant, we have nothing to do. */
3364 size
= TYPE_SIZE_UNIT (type
);
3365 if (!size
|| poly_int_tree_p (size
))
3368 /* Otherwise, make a WITH_SIZE_EXPR. */
3369 size
= unshare_expr (size
);
3370 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, expr
);
3371 *expr_p
= build2 (WITH_SIZE_EXPR
, type
, expr
, size
);
3374 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3375 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3376 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3377 gimplified to an SSA name. */
3379 enum gimplify_status
3380 gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
3383 bool (*test
) (tree
);
3386 /* In general, we allow lvalues for function arguments to avoid
3387 extra overhead of copying large aggregates out of even larger
3388 aggregates into temporaries only to copy the temporaries to
3389 the argument list. Make optimizers happy by pulling out to
3390 temporaries those types that fit in registers. */
3391 if (is_gimple_reg_type (TREE_TYPE (*arg_p
)))
3392 test
= is_gimple_val
, fb
= fb_rvalue
;
3395 test
= is_gimple_lvalue
, fb
= fb_either
;
3396 /* Also strip a TARGET_EXPR that would force an extra copy. */
3397 if (TREE_CODE (*arg_p
) == TARGET_EXPR
)
3399 tree init
= TARGET_EXPR_INITIAL (*arg_p
);
3401 && !VOID_TYPE_P (TREE_TYPE (init
)))
3406 /* If this is a variable sized type, we must remember the size. */
3407 maybe_with_size_expr (arg_p
);
3409 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3410 /* Make sure arguments have the same location as the function call
3412 protected_set_expr_location (*arg_p
, call_location
);
3414 /* There is a sequence point before a function call. Side effects in
3415 the argument list must occur before the actual call. So, when
3416 gimplifying arguments, force gimplify_expr to use an internal
3417 post queue which is then appended to the end of PRE_P. */
3418 return gimplify_expr (arg_p
, pre_p
, NULL
, test
, fb
, allow_ssa
);
3421 /* Don't fold inside offloading or taskreg regions: it can break code by
3422 adding decl references that weren't in the source. We'll do it during
3423 omplower pass instead. */
3426 maybe_fold_stmt (gimple_stmt_iterator
*gsi
)
3428 struct gimplify_omp_ctx
*ctx
;
3429 for (ctx
= gimplify_omp_ctxp
; ctx
; ctx
= ctx
->outer_context
)
3430 if ((ctx
->region_type
& (ORT_TARGET
| ORT_PARALLEL
| ORT_TASK
)) != 0)
3432 else if ((ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
3434 /* Delay folding of builtins until the IL is in consistent state
3435 so the diagnostic machinery can do a better job. */
3436 if (gimple_call_builtin_p (gsi_stmt (*gsi
)))
3438 return fold_stmt (gsi
);
3441 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3442 WANT_VALUE is true if the result of the call is desired. */
3444 static enum gimplify_status
3445 gimplify_call_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
3447 tree fndecl
, parms
, p
, fnptrtype
;
3448 enum gimplify_status ret
;
3451 bool builtin_va_start_p
= false;
3452 location_t loc
= EXPR_LOCATION (*expr_p
);
3454 gcc_assert (TREE_CODE (*expr_p
) == CALL_EXPR
);
3456 /* For reliable diagnostics during inlining, it is necessary that
3457 every call_expr be annotated with file and line. */
3458 if (! EXPR_HAS_LOCATION (*expr_p
))
3459 SET_EXPR_LOCATION (*expr_p
, input_location
);
3461 /* Gimplify internal functions created in the FEs. */
3462 if (CALL_EXPR_FN (*expr_p
) == NULL_TREE
)
3467 nargs
= call_expr_nargs (*expr_p
);
3468 enum internal_fn ifn
= CALL_EXPR_IFN (*expr_p
);
3469 auto_vec
<tree
> vargs (nargs
);
3471 for (i
= 0; i
< nargs
; i
++)
3473 gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3474 EXPR_LOCATION (*expr_p
));
3475 vargs
.quick_push (CALL_EXPR_ARG (*expr_p
, i
));
3478 gcall
*call
= gimple_build_call_internal_vec (ifn
, vargs
);
3479 gimple_call_set_nothrow (call
, TREE_NOTHROW (*expr_p
));
3480 gimplify_seq_add_stmt (pre_p
, call
);
3484 /* This may be a call to a builtin function.
3486 Builtin function calls may be transformed into different
3487 (and more efficient) builtin function calls under certain
3488 circumstances. Unfortunately, gimplification can muck things
3489 up enough that the builtin expanders are not aware that certain
3490 transformations are still valid.
3492 So we attempt transformation/gimplification of the call before
3493 we gimplify the CALL_EXPR. At this time we do not manage to
3494 transform all calls in the same manner as the expanders do, but
3495 we do transform most of them. */
3496 fndecl
= get_callee_fndecl (*expr_p
);
3497 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
3498 switch (DECL_FUNCTION_CODE (fndecl
))
3500 CASE_BUILT_IN_ALLOCA
:
3501 /* If the call has been built for a variable-sized object, then we
3502 want to restore the stack level when the enclosing BIND_EXPR is
3503 exited to reclaim the allocated space; otherwise, we precisely
3504 need to do the opposite and preserve the latest stack level. */
3505 if (CALL_ALLOCA_FOR_VAR_P (*expr_p
))
3506 gimplify_ctxp
->save_stack
= true;
3508 gimplify_ctxp
->keep_stack
= true;
3511 case BUILT_IN_VA_START
:
3513 builtin_va_start_p
= TRUE
;
3514 if (call_expr_nargs (*expr_p
) < 2)
3516 error ("too few arguments to function %<va_start%>");
3517 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3521 if (fold_builtin_next_arg (*expr_p
, true))
3523 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3529 case BUILT_IN_EH_RETURN
:
3530 cfun
->calls_eh_return
= true;
3533 case BUILT_IN_CLEAR_PADDING
:
3534 if (call_expr_nargs (*expr_p
) == 1)
3536 /* Remember the original type of the argument in an internal
3537 dummy second argument, as in GIMPLE pointer conversions are
3538 useless. also mark this call as not for automatic initialization
3539 in the internal dummy third argument. */
3540 p
= CALL_EXPR_ARG (*expr_p
, 0);
3541 bool for_auto_init
= false;
3543 = build_call_expr_loc (EXPR_LOCATION (*expr_p
), fndecl
, 3, p
,
3544 build_zero_cst (TREE_TYPE (p
)),
3545 build_int_cst (integer_type_node
,
3546 (int) for_auto_init
));
3554 if (fndecl
&& fndecl_built_in_p (fndecl
))
3556 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3557 if (new_tree
&& new_tree
!= *expr_p
)
3559 /* There was a transformation of this call which computes the
3560 same value, but in a more efficient way. Return and try
3567 /* Remember the original function pointer type. */
3568 fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
3573 && (cfun
->curr_properties
& PROP_gimple_any
) == 0)
3575 tree variant
= omp_resolve_declare_variant (fndecl
);
3576 if (variant
!= fndecl
)
3577 CALL_EXPR_FN (*expr_p
) = build1 (ADDR_EXPR
, fnptrtype
, variant
);
3580 /* There is a sequence point before the call, so any side effects in
3581 the calling expression must occur before the actual call. Force
3582 gimplify_expr to use an internal post queue. */
3583 ret
= gimplify_expr (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
3584 is_gimple_call_addr
, fb_rvalue
);
3586 nargs
= call_expr_nargs (*expr_p
);
3588 /* Get argument types for verification. */
3589 fndecl
= get_callee_fndecl (*expr_p
);
3592 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3594 parms
= TYPE_ARG_TYPES (TREE_TYPE (fnptrtype
));
3596 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
3597 p
= DECL_ARGUMENTS (fndecl
);
3602 for (i
= 0; i
< nargs
&& p
; i
++, p
= TREE_CHAIN (p
))
3605 /* If the last argument is __builtin_va_arg_pack () and it is not
3606 passed as a named argument, decrease the number of CALL_EXPR
3607 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3610 && TREE_CODE (CALL_EXPR_ARG (*expr_p
, nargs
- 1)) == CALL_EXPR
)
3612 tree last_arg
= CALL_EXPR_ARG (*expr_p
, nargs
- 1);
3613 tree last_arg_fndecl
= get_callee_fndecl (last_arg
);
3616 && fndecl_built_in_p (last_arg_fndecl
, BUILT_IN_VA_ARG_PACK
))
3618 tree call
= *expr_p
;
3621 *expr_p
= build_call_array_loc (loc
, TREE_TYPE (call
),
3622 CALL_EXPR_FN (call
),
3623 nargs
, CALL_EXPR_ARGP (call
));
3625 /* Copy all CALL_EXPR flags, location and block, except
3626 CALL_EXPR_VA_ARG_PACK flag. */
3627 CALL_EXPR_STATIC_CHAIN (*expr_p
) = CALL_EXPR_STATIC_CHAIN (call
);
3628 CALL_EXPR_TAILCALL (*expr_p
) = CALL_EXPR_TAILCALL (call
);
3629 CALL_EXPR_RETURN_SLOT_OPT (*expr_p
)
3630 = CALL_EXPR_RETURN_SLOT_OPT (call
);
3631 CALL_FROM_THUNK_P (*expr_p
) = CALL_FROM_THUNK_P (call
);
3632 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (call
));
3634 /* Set CALL_EXPR_VA_ARG_PACK. */
3635 CALL_EXPR_VA_ARG_PACK (*expr_p
) = 1;
3639 /* If the call returns twice then after building the CFG the call
3640 argument computations will no longer dominate the call because
3641 we add an abnormal incoming edge to the call. So do not use SSA
3643 bool returns_twice
= call_expr_flags (*expr_p
) & ECF_RETURNS_TWICE
;
3645 /* Gimplify the function arguments. */
3648 for (i
= (PUSH_ARGS_REVERSED
? nargs
- 1 : 0);
3649 PUSH_ARGS_REVERSED
? i
>= 0 : i
< nargs
;
3650 PUSH_ARGS_REVERSED
? i
-- : i
++)
3652 enum gimplify_status t
;
3654 /* Avoid gimplifying the second argument to va_start, which needs to
3655 be the plain PARM_DECL. */
3656 if ((i
!= 1) || !builtin_va_start_p
)
3658 t
= gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3659 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3667 /* Gimplify the static chain. */
3668 if (CALL_EXPR_STATIC_CHAIN (*expr_p
))
3670 if (fndecl
&& !DECL_STATIC_CHAIN (fndecl
))
3671 CALL_EXPR_STATIC_CHAIN (*expr_p
) = NULL
;
3674 enum gimplify_status t
;
3675 t
= gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p
), pre_p
,
3676 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3682 /* Verify the function result. */
3683 if (want_value
&& fndecl
3684 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype
))))
3686 error_at (loc
, "using result of function returning %<void%>");
3690 /* Try this again in case gimplification exposed something. */
3691 if (ret
!= GS_ERROR
)
3693 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3695 if (new_tree
&& new_tree
!= *expr_p
)
3697 /* There was a transformation of this call which computes the
3698 same value, but in a more efficient way. Return and try
3706 *expr_p
= error_mark_node
;
3710 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3711 decl. This allows us to eliminate redundant or useless
3712 calls to "const" functions. */
3713 if (TREE_CODE (*expr_p
) == CALL_EXPR
)
3715 int flags
= call_expr_flags (*expr_p
);
3716 if (flags
& (ECF_CONST
| ECF_PURE
)
3717 /* An infinite loop is considered a side effect. */
3718 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
3719 TREE_SIDE_EFFECTS (*expr_p
) = 0;
3722 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3723 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3724 form and delegate the creation of a GIMPLE_CALL to
3725 gimplify_modify_expr. This is always possible because when
3726 WANT_VALUE is true, the caller wants the result of this call into
3727 a temporary, which means that we will emit an INIT_EXPR in
3728 internal_get_tmp_var which will then be handled by
3729 gimplify_modify_expr. */
3732 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3733 have to do is replicate it as a GIMPLE_CALL tuple. */
3734 gimple_stmt_iterator gsi
;
3735 call
= gimple_build_call_from_tree (*expr_p
, fnptrtype
);
3736 notice_special_calls (call
);
3737 gimplify_seq_add_stmt (pre_p
, call
);
3738 gsi
= gsi_last (*pre_p
);
3739 maybe_fold_stmt (&gsi
);
3740 *expr_p
= NULL_TREE
;
3743 /* Remember the original function type. */
3744 CALL_EXPR_FN (*expr_p
) = build1 (NOP_EXPR
, fnptrtype
,
3745 CALL_EXPR_FN (*expr_p
));
3750 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3751 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3753 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3754 condition is true or false, respectively. If null, we should generate
3755 our own to skip over the evaluation of this specific expression.
3757 LOCUS is the source location of the COND_EXPR.
3759 This function is the tree equivalent of do_jump.
3761 shortcut_cond_r should only be called by shortcut_cond_expr. */
3764 shortcut_cond_r (tree pred
, tree
*true_label_p
, tree
*false_label_p
,
3767 tree local_label
= NULL_TREE
;
3768 tree t
, expr
= NULL
;
3770 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3771 retain the shortcut semantics. Just insert the gotos here;
3772 shortcut_cond_expr will append the real blocks later. */
3773 if (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3775 location_t new_locus
;
3777 /* Turn if (a && b) into
3779 if (a); else goto no;
3780 if (b) goto yes; else goto no;
3783 if (false_label_p
== NULL
)
3784 false_label_p
= &local_label
;
3786 /* Keep the original source location on the first 'if'. */
3787 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), NULL
, false_label_p
, locus
);
3788 append_to_statement_list (t
, &expr
);
3790 /* Set the source location of the && on the second 'if'. */
3791 new_locus
= rexpr_location (pred
, locus
);
3792 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3794 append_to_statement_list (t
, &expr
);
3796 else if (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3798 location_t new_locus
;
3800 /* Turn if (a || b) into
3803 if (b) goto yes; else goto no;
3806 if (true_label_p
== NULL
)
3807 true_label_p
= &local_label
;
3809 /* Keep the original source location on the first 'if'. */
3810 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), true_label_p
, NULL
, locus
);
3811 append_to_statement_list (t
, &expr
);
3813 /* Set the source location of the || on the second 'if'. */
3814 new_locus
= rexpr_location (pred
, locus
);
3815 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3817 append_to_statement_list (t
, &expr
);
3819 else if (TREE_CODE (pred
) == COND_EXPR
3820 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 1)))
3821 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 2))))
3823 location_t new_locus
;
3825 /* As long as we're messing with gotos, turn if (a ? b : c) into
3827 if (b) goto yes; else goto no;
3829 if (c) goto yes; else goto no;
3831 Don't do this if one of the arms has void type, which can happen
3832 in C++ when the arm is throw. */
3834 /* Keep the original source location on the first 'if'. Set the source
3835 location of the ? on the second 'if'. */
3836 new_locus
= rexpr_location (pred
, locus
);
3837 expr
= build3 (COND_EXPR
, void_type_node
, TREE_OPERAND (pred
, 0),
3838 shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
,
3839 false_label_p
, locus
),
3840 shortcut_cond_r (TREE_OPERAND (pred
, 2), true_label_p
,
3841 false_label_p
, new_locus
));
3845 expr
= build3 (COND_EXPR
, void_type_node
, pred
,
3846 build_and_jump (true_label_p
),
3847 build_and_jump (false_label_p
));
3848 SET_EXPR_LOCATION (expr
, locus
);
3853 t
= build1 (LABEL_EXPR
, void_type_node
, local_label
);
3854 append_to_statement_list (t
, &expr
);
3860 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3861 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3862 statement, if it is the last one. Otherwise, return NULL. */
3865 find_goto (tree expr
)
3870 if (TREE_CODE (expr
) == GOTO_EXPR
)
3873 if (TREE_CODE (expr
) != STATEMENT_LIST
)
3876 tree_stmt_iterator i
= tsi_start (expr
);
3878 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
3881 if (!tsi_one_before_end_p (i
))
3884 return find_goto (tsi_stmt (i
));
3887 /* Same as find_goto, except that it returns NULL if the destination
3888 is not a LABEL_DECL. */
3891 find_goto_label (tree expr
)
3893 tree dest
= find_goto (expr
);
3894 if (dest
&& TREE_CODE (GOTO_DESTINATION (dest
)) == LABEL_DECL
)
3899 /* Given a conditional expression EXPR with short-circuit boolean
3900 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3901 predicate apart into the equivalent sequence of conditionals. */
3904 shortcut_cond_expr (tree expr
)
3906 tree pred
= TREE_OPERAND (expr
, 0);
3907 tree then_
= TREE_OPERAND (expr
, 1);
3908 tree else_
= TREE_OPERAND (expr
, 2);
3909 tree true_label
, false_label
, end_label
, t
;
3911 tree
*false_label_p
;
3912 bool emit_end
, emit_false
, jump_over_else
;
3913 bool then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3914 bool else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3916 /* First do simple transformations. */
3919 /* If there is no 'else', turn
3922 if (a) if (b) then c. */
3923 while (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3925 /* Keep the original source location on the first 'if'. */
3926 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3927 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3928 /* Set the source location of the && on the second 'if'. */
3929 if (rexpr_has_location (pred
))
3930 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
3931 then_
= shortcut_cond_expr (expr
);
3932 then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3933 pred
= TREE_OPERAND (pred
, 0);
3934 expr
= build3 (COND_EXPR
, void_type_node
, pred
, then_
, NULL_TREE
);
3935 SET_EXPR_LOCATION (expr
, locus
);
3941 /* If there is no 'then', turn
3944 if (a); else if (b); else d. */
3945 while (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3947 /* Keep the original source location on the first 'if'. */
3948 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3949 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3950 /* Set the source location of the || on the second 'if'. */
3951 if (rexpr_has_location (pred
))
3952 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
3953 else_
= shortcut_cond_expr (expr
);
3954 else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3955 pred
= TREE_OPERAND (pred
, 0);
3956 expr
= build3 (COND_EXPR
, void_type_node
, pred
, NULL_TREE
, else_
);
3957 SET_EXPR_LOCATION (expr
, locus
);
3961 /* If we're done, great. */
3962 if (TREE_CODE (pred
) != TRUTH_ANDIF_EXPR
3963 && TREE_CODE (pred
) != TRUTH_ORIF_EXPR
)
3966 /* Otherwise we need to mess with gotos. Change
3969 if (a); else goto no;
3972 and recursively gimplify the condition. */
3974 true_label
= false_label
= end_label
= NULL_TREE
;
3976 /* If our arms just jump somewhere, hijack those labels so we don't
3977 generate jumps to jumps. */
3979 if (tree then_goto
= find_goto_label (then_
))
3981 true_label
= GOTO_DESTINATION (then_goto
);
3986 if (tree else_goto
= find_goto_label (else_
))
3988 false_label
= GOTO_DESTINATION (else_goto
);
3993 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3995 true_label_p
= &true_label
;
3997 true_label_p
= NULL
;
3999 /* The 'else' branch also needs a label if it contains interesting code. */
4000 if (false_label
|| else_se
)
4001 false_label_p
= &false_label
;
4003 false_label_p
= NULL
;
4005 /* If there was nothing else in our arms, just forward the label(s). */
4006 if (!then_se
&& !else_se
)
4007 return shortcut_cond_r (pred
, true_label_p
, false_label_p
,
4008 EXPR_LOC_OR_LOC (expr
, input_location
));
4010 /* If our last subexpression already has a terminal label, reuse it. */
4012 t
= expr_last (else_
);
4014 t
= expr_last (then_
);
4017 if (t
&& TREE_CODE (t
) == LABEL_EXPR
)
4018 end_label
= LABEL_EXPR_LABEL (t
);
4020 /* If we don't care about jumping to the 'else' branch, jump to the end
4021 if the condition is false. */
4023 false_label_p
= &end_label
;
4025 /* We only want to emit these labels if we aren't hijacking them. */
4026 emit_end
= (end_label
== NULL_TREE
);
4027 emit_false
= (false_label
== NULL_TREE
);
4029 /* We only emit the jump over the else clause if we have to--if the
4030 then clause may fall through. Otherwise we can wind up with a
4031 useless jump and a useless label at the end of gimplified code,
4032 which will cause us to think that this conditional as a whole
4033 falls through even if it doesn't. If we then inline a function
4034 which ends with such a condition, that can cause us to issue an
4035 inappropriate warning about control reaching the end of a
4036 non-void function. */
4037 jump_over_else
= block_may_fallthru (then_
);
4039 pred
= shortcut_cond_r (pred
, true_label_p
, false_label_p
,
4040 EXPR_LOC_OR_LOC (expr
, input_location
));
4043 append_to_statement_list (pred
, &expr
);
4045 append_to_statement_list (then_
, &expr
);
4050 tree last
= expr_last (expr
);
4051 t
= build_and_jump (&end_label
);
4052 if (rexpr_has_location (last
))
4053 SET_EXPR_LOCATION (t
, rexpr_location (last
));
4054 append_to_statement_list (t
, &expr
);
4058 t
= build1 (LABEL_EXPR
, void_type_node
, false_label
);
4059 append_to_statement_list (t
, &expr
);
4061 append_to_statement_list (else_
, &expr
);
4063 if (emit_end
&& end_label
)
4065 t
= build1 (LABEL_EXPR
, void_type_node
, end_label
);
4066 append_to_statement_list (t
, &expr
);
4072 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4075 gimple_boolify (tree expr
)
4077 tree type
= TREE_TYPE (expr
);
4078 location_t loc
= EXPR_LOCATION (expr
);
4080 if (TREE_CODE (expr
) == NE_EXPR
4081 && TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
4082 && integer_zerop (TREE_OPERAND (expr
, 1)))
4084 tree call
= TREE_OPERAND (expr
, 0);
4085 tree fn
= get_callee_fndecl (call
);
4087 /* For __builtin_expect ((long) (x), y) recurse into x as well
4088 if x is truth_value_p. */
4090 && fndecl_built_in_p (fn
, BUILT_IN_EXPECT
)
4091 && call_expr_nargs (call
) == 2)
4093 tree arg
= CALL_EXPR_ARG (call
, 0);
4096 if (TREE_CODE (arg
) == NOP_EXPR
4097 && TREE_TYPE (arg
) == TREE_TYPE (call
))
4098 arg
= TREE_OPERAND (arg
, 0);
4099 if (truth_value_p (TREE_CODE (arg
)))
4101 arg
= gimple_boolify (arg
);
4102 CALL_EXPR_ARG (call
, 0)
4103 = fold_convert_loc (loc
, TREE_TYPE (call
), arg
);
4109 switch (TREE_CODE (expr
))
4111 case TRUTH_AND_EXPR
:
4113 case TRUTH_XOR_EXPR
:
4114 case TRUTH_ANDIF_EXPR
:
4115 case TRUTH_ORIF_EXPR
:
4116 /* Also boolify the arguments of truth exprs. */
4117 TREE_OPERAND (expr
, 1) = gimple_boolify (TREE_OPERAND (expr
, 1));
4120 case TRUTH_NOT_EXPR
:
4121 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4123 /* These expressions always produce boolean results. */
4124 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4125 TREE_TYPE (expr
) = boolean_type_node
;
4129 switch ((enum annot_expr_kind
) TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)))
4131 case annot_expr_ivdep_kind
:
4132 case annot_expr_unroll_kind
:
4133 case annot_expr_no_vector_kind
:
4134 case annot_expr_vector_kind
:
4135 case annot_expr_parallel_kind
:
4136 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4137 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4138 TREE_TYPE (expr
) = boolean_type_node
;
4145 if (COMPARISON_CLASS_P (expr
))
4147 /* There expressions always prduce boolean results. */
4148 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4149 TREE_TYPE (expr
) = boolean_type_node
;
4152 /* Other expressions that get here must have boolean values, but
4153 might need to be converted to the appropriate mode. */
4154 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
4156 return fold_convert_loc (loc
, boolean_type_node
, expr
);
4160 /* Given a conditional expression *EXPR_P without side effects, gimplify
4161 its operands. New statements are inserted to PRE_P. */
4163 static enum gimplify_status
4164 gimplify_pure_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
)
4166 tree expr
= *expr_p
, cond
;
4167 enum gimplify_status ret
, tret
;
4168 enum tree_code code
;
4170 cond
= gimple_boolify (COND_EXPR_COND (expr
));
4172 /* We need to handle && and || specially, as their gimplification
4173 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4174 code
= TREE_CODE (cond
);
4175 if (code
== TRUTH_ANDIF_EXPR
)
4176 TREE_SET_CODE (cond
, TRUTH_AND_EXPR
);
4177 else if (code
== TRUTH_ORIF_EXPR
)
4178 TREE_SET_CODE (cond
, TRUTH_OR_EXPR
);
4179 ret
= gimplify_expr (&cond
, pre_p
, NULL
, is_gimple_condexpr
, fb_rvalue
);
4180 COND_EXPR_COND (*expr_p
) = cond
;
4182 tret
= gimplify_expr (&COND_EXPR_THEN (expr
), pre_p
, NULL
,
4183 is_gimple_val
, fb_rvalue
);
4184 ret
= MIN (ret
, tret
);
4185 tret
= gimplify_expr (&COND_EXPR_ELSE (expr
), pre_p
, NULL
,
4186 is_gimple_val
, fb_rvalue
);
4188 return MIN (ret
, tret
);
4191 /* Return true if evaluating EXPR could trap.
4192 EXPR is GENERIC, while tree_could_trap_p can be called
4196 generic_expr_could_trap_p (tree expr
)
4200 if (!expr
|| is_gimple_val (expr
))
4203 if (!EXPR_P (expr
) || tree_could_trap_p (expr
))
4206 n
= TREE_OPERAND_LENGTH (expr
);
4207 for (i
= 0; i
< n
; i
++)
4208 if (generic_expr_could_trap_p (TREE_OPERAND (expr
, i
)))
4214 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4223 The second form is used when *EXPR_P is of type void.
4225 PRE_P points to the list where side effects that must happen before
4226 *EXPR_P should be stored. */
4228 static enum gimplify_status
4229 gimplify_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
, fallback_t fallback
)
4231 tree expr
= *expr_p
;
4232 tree type
= TREE_TYPE (expr
);
4233 location_t loc
= EXPR_LOCATION (expr
);
4234 tree tmp
, arm1
, arm2
;
4235 enum gimplify_status ret
;
4236 tree label_true
, label_false
, label_cont
;
4237 bool have_then_clause_p
, have_else_clause_p
;
4239 enum tree_code pred_code
;
4240 gimple_seq seq
= NULL
;
4242 /* If this COND_EXPR has a value, copy the values into a temporary within
4244 if (!VOID_TYPE_P (type
))
4246 tree then_
= TREE_OPERAND (expr
, 1), else_
= TREE_OPERAND (expr
, 2);
4249 /* If either an rvalue is ok or we do not require an lvalue, create the
4250 temporary. But we cannot do that if the type is addressable. */
4251 if (((fallback
& fb_rvalue
) || !(fallback
& fb_lvalue
))
4252 && !TREE_ADDRESSABLE (type
))
4254 if (gimplify_ctxp
->allow_rhs_cond_expr
4255 /* If either branch has side effects or could trap, it can't be
4256 evaluated unconditionally. */
4257 && !TREE_SIDE_EFFECTS (then_
)
4258 && !generic_expr_could_trap_p (then_
)
4259 && !TREE_SIDE_EFFECTS (else_
)
4260 && !generic_expr_could_trap_p (else_
))
4261 return gimplify_pure_cond_expr (expr_p
, pre_p
);
4263 tmp
= create_tmp_var (type
, "iftmp");
4267 /* Otherwise, only create and copy references to the values. */
4270 type
= build_pointer_type (type
);
4272 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4273 then_
= build_fold_addr_expr_loc (loc
, then_
);
4275 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4276 else_
= build_fold_addr_expr_loc (loc
, else_
);
4279 = build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0), then_
, else_
);
4281 tmp
= create_tmp_var (type
, "iftmp");
4282 result
= build_simple_mem_ref_loc (loc
, tmp
);
4285 /* Build the new then clause, `tmp = then_;'. But don't build the
4286 assignment if the value is void; in C++ it can be if it's a throw. */
4287 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4288 TREE_OPERAND (expr
, 1) = build2 (INIT_EXPR
, type
, tmp
, then_
);
4290 /* Similarly, build the new else clause, `tmp = else_;'. */
4291 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4292 TREE_OPERAND (expr
, 2) = build2 (INIT_EXPR
, type
, tmp
, else_
);
4294 TREE_TYPE (expr
) = void_type_node
;
4295 recalculate_side_effects (expr
);
4297 /* Move the COND_EXPR to the prequeue. */
4298 gimplify_stmt (&expr
, pre_p
);
4304 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4305 STRIP_TYPE_NOPS (TREE_OPERAND (expr
, 0));
4306 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == COMPOUND_EXPR
)
4307 gimplify_compound_expr (&TREE_OPERAND (expr
, 0), pre_p
, true);
4309 /* Make sure the condition has BOOLEAN_TYPE. */
4310 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4312 /* Break apart && and || conditions. */
4313 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ANDIF_EXPR
4314 || TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ORIF_EXPR
)
4316 expr
= shortcut_cond_expr (expr
);
4318 if (expr
!= *expr_p
)
4322 /* We can't rely on gimplify_expr to re-gimplify the expanded
4323 form properly, as cleanups might cause the target labels to be
4324 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4325 set up a conditional context. */
4326 gimple_push_condition ();
4327 gimplify_stmt (expr_p
, &seq
);
4328 gimple_pop_condition (pre_p
);
4329 gimple_seq_add_seq (pre_p
, seq
);
4335 /* Now do the normal gimplification. */
4337 /* Gimplify condition. */
4338 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, NULL
,
4339 is_gimple_condexpr_for_cond
, fb_rvalue
);
4340 if (ret
== GS_ERROR
)
4342 gcc_assert (TREE_OPERAND (expr
, 0) != NULL_TREE
);
4344 gimple_push_condition ();
4346 have_then_clause_p
= have_else_clause_p
= false;
4347 label_true
= find_goto_label (TREE_OPERAND (expr
, 1));
4349 && DECL_CONTEXT (GOTO_DESTINATION (label_true
)) == current_function_decl
4350 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4351 have different locations, otherwise we end up with incorrect
4352 location information on the branches. */
4354 || !EXPR_HAS_LOCATION (expr
)
4355 || !rexpr_has_location (label_true
)
4356 || EXPR_LOCATION (expr
) == rexpr_location (label_true
)))
4358 have_then_clause_p
= true;
4359 label_true
= GOTO_DESTINATION (label_true
);
4362 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
4363 label_false
= find_goto_label (TREE_OPERAND (expr
, 2));
4365 && DECL_CONTEXT (GOTO_DESTINATION (label_false
)) == current_function_decl
4366 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4367 have different locations, otherwise we end up with incorrect
4368 location information on the branches. */
4370 || !EXPR_HAS_LOCATION (expr
)
4371 || !rexpr_has_location (label_false
)
4372 || EXPR_LOCATION (expr
) == rexpr_location (label_false
)))
4374 have_else_clause_p
= true;
4375 label_false
= GOTO_DESTINATION (label_false
);
4378 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
4380 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr
), &pred_code
, &arm1
,
4382 cond_stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
,
4384 gimple_set_location (cond_stmt
, EXPR_LOCATION (expr
));
4385 copy_warning (cond_stmt
, COND_EXPR_COND (expr
));
4386 gimplify_seq_add_stmt (&seq
, cond_stmt
);
4387 gimple_stmt_iterator gsi
= gsi_last (seq
);
4388 maybe_fold_stmt (&gsi
);
4390 label_cont
= NULL_TREE
;
4391 if (!have_then_clause_p
)
4393 /* For if (...) {} else { code; } put label_true after
4395 if (TREE_OPERAND (expr
, 1) == NULL_TREE
4396 && !have_else_clause_p
4397 && TREE_OPERAND (expr
, 2) != NULL_TREE
)
4398 label_cont
= label_true
;
4401 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_true
));
4402 have_then_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 1), &seq
);
4403 /* For if (...) { code; } else {} or
4404 if (...) { code; } else goto label; or
4405 if (...) { code; return; } else { ... }
4406 label_cont isn't needed. */
4407 if (!have_else_clause_p
4408 && TREE_OPERAND (expr
, 2) != NULL_TREE
4409 && gimple_seq_may_fallthru (seq
))
4412 label_cont
= create_artificial_label (UNKNOWN_LOCATION
);
4414 g
= gimple_build_goto (label_cont
);
4416 /* GIMPLE_COND's are very low level; they have embedded
4417 gotos. This particular embedded goto should not be marked
4418 with the location of the original COND_EXPR, as it would
4419 correspond to the COND_EXPR's condition, not the ELSE or the
4420 THEN arms. To avoid marking it with the wrong location, flag
4421 it as "no location". */
4422 gimple_set_do_not_emit_location (g
);
4424 gimplify_seq_add_stmt (&seq
, g
);
4428 if (!have_else_clause_p
)
4430 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_false
));
4431 have_else_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 2), &seq
);
4434 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_cont
));
4436 gimple_pop_condition (pre_p
);
4437 gimple_seq_add_seq (pre_p
, seq
);
4439 if (ret
== GS_ERROR
)
4441 else if (have_then_clause_p
|| have_else_clause_p
)
4445 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4446 expr
= TREE_OPERAND (expr
, 0);
4447 gimplify_stmt (&expr
, pre_p
);
4454 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4455 to be marked addressable.
4457 We cannot rely on such an expression being directly markable if a temporary
4458 has been created by the gimplification. In this case, we create another
4459 temporary and initialize it with a copy, which will become a store after we
4460 mark it addressable. This can happen if the front-end passed us something
4461 that it could not mark addressable yet, like a Fortran pass-by-reference
4462 parameter (int) floatvar. */
4465 prepare_gimple_addressable (tree
*expr_p
, gimple_seq
*seq_p
)
4467 while (handled_component_p (*expr_p
))
4468 expr_p
= &TREE_OPERAND (*expr_p
, 0);
4469 if (is_gimple_reg (*expr_p
))
4471 /* Do not allow an SSA name as the temporary. */
4472 tree var
= get_initialized_tmp_var (*expr_p
, seq_p
, NULL
, false);
4473 DECL_NOT_GIMPLE_REG_P (var
) = 1;
4478 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4479 a call to __builtin_memcpy. */
4481 static enum gimplify_status
4482 gimplify_modify_expr_to_memcpy (tree
*expr_p
, tree size
, bool want_value
,
4485 tree t
, to
, to_ptr
, from
, from_ptr
;
4487 location_t loc
= EXPR_LOCATION (*expr_p
);
4489 to
= TREE_OPERAND (*expr_p
, 0);
4490 from
= TREE_OPERAND (*expr_p
, 1);
4492 /* Mark the RHS addressable. Beware that it may not be possible to do so
4493 directly if a temporary has been created by the gimplification. */
4494 prepare_gimple_addressable (&from
, seq_p
);
4496 mark_addressable (from
);
4497 from_ptr
= build_fold_addr_expr_loc (loc
, from
);
4498 gimplify_arg (&from_ptr
, seq_p
, loc
);
4500 mark_addressable (to
);
4501 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4502 gimplify_arg (&to_ptr
, seq_p
, loc
);
4504 t
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
4506 gs
= gimple_build_call (t
, 3, to_ptr
, from_ptr
, size
);
4507 gimple_call_set_alloca_for_var (gs
, true);
4511 /* tmp = memcpy() */
4512 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4513 gimple_call_set_lhs (gs
, t
);
4514 gimplify_seq_add_stmt (seq_p
, gs
);
4516 *expr_p
= build_simple_mem_ref (t
);
4520 gimplify_seq_add_stmt (seq_p
, gs
);
4525 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4526 a call to __builtin_memset. In this case we know that the RHS is
4527 a CONSTRUCTOR with an empty element list. */
4529 static enum gimplify_status
4530 gimplify_modify_expr_to_memset (tree
*expr_p
, tree size
, bool want_value
,
4533 tree t
, from
, to
, to_ptr
;
4535 location_t loc
= EXPR_LOCATION (*expr_p
);
4537 /* Assert our assumptions, to abort instead of producing wrong code
4538 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4539 not be immediately exposed. */
4540 from
= TREE_OPERAND (*expr_p
, 1);
4541 if (TREE_CODE (from
) == WITH_SIZE_EXPR
)
4542 from
= TREE_OPERAND (from
, 0);
4544 gcc_assert (TREE_CODE (from
) == CONSTRUCTOR
4545 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from
)));
4548 to
= TREE_OPERAND (*expr_p
, 0);
4550 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4551 gimplify_arg (&to_ptr
, seq_p
, loc
);
4552 t
= builtin_decl_implicit (BUILT_IN_MEMSET
);
4554 gs
= gimple_build_call (t
, 3, to_ptr
, integer_zero_node
, size
);
4558 /* tmp = memset() */
4559 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4560 gimple_call_set_lhs (gs
, t
);
4561 gimplify_seq_add_stmt (seq_p
, gs
);
4563 *expr_p
= build1 (INDIRECT_REF
, TREE_TYPE (to
), t
);
4567 gimplify_seq_add_stmt (seq_p
, gs
);
4572 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4573 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4574 assignment. Return non-null if we detect a potential overlap. */
4576 struct gimplify_init_ctor_preeval_data
4578 /* The base decl of the lhs object. May be NULL, in which case we
4579 have to assume the lhs is indirect. */
4582 /* The alias set of the lhs object. */
4583 alias_set_type lhs_alias_set
;
4587 gimplify_init_ctor_preeval_1 (tree
*tp
, int *walk_subtrees
, void *xdata
)
4589 struct gimplify_init_ctor_preeval_data
*data
4590 = (struct gimplify_init_ctor_preeval_data
*) xdata
;
4593 /* If we find the base object, obviously we have overlap. */
4594 if (data
->lhs_base_decl
== t
)
4597 /* If the constructor component is indirect, determine if we have a
4598 potential overlap with the lhs. The only bits of information we
4599 have to go on at this point are addressability and alias sets. */
4600 if ((INDIRECT_REF_P (t
)
4601 || TREE_CODE (t
) == MEM_REF
)
4602 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4603 && alias_sets_conflict_p (data
->lhs_alias_set
, get_alias_set (t
)))
4606 /* If the constructor component is a call, determine if it can hide a
4607 potential overlap with the lhs through an INDIRECT_REF like above.
4608 ??? Ugh - this is completely broken. In fact this whole analysis
4609 doesn't look conservative. */
4610 if (TREE_CODE (t
) == CALL_EXPR
)
4612 tree type
, fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t
)));
4614 for (type
= TYPE_ARG_TYPES (fntype
); type
; type
= TREE_CHAIN (type
))
4615 if (POINTER_TYPE_P (TREE_VALUE (type
))
4616 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4617 && alias_sets_conflict_p (data
->lhs_alias_set
,
4619 (TREE_TYPE (TREE_VALUE (type
)))))
4623 if (IS_TYPE_OR_DECL_P (t
))
4628 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4629 force values that overlap with the lhs (as described by *DATA)
4630 into temporaries. */
4633 gimplify_init_ctor_preeval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4634 struct gimplify_init_ctor_preeval_data
*data
)
4636 enum gimplify_status one
;
4638 /* If the value is constant, then there's nothing to pre-evaluate. */
4639 if (TREE_CONSTANT (*expr_p
))
4641 /* Ensure it does not have side effects, it might contain a reference to
4642 the object we're initializing. */
4643 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p
));
4647 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4648 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)))
4651 /* Recurse for nested constructors. */
4652 if (TREE_CODE (*expr_p
) == CONSTRUCTOR
)
4654 unsigned HOST_WIDE_INT ix
;
4655 constructor_elt
*ce
;
4656 vec
<constructor_elt
, va_gc
> *v
= CONSTRUCTOR_ELTS (*expr_p
);
4658 FOR_EACH_VEC_SAFE_ELT (v
, ix
, ce
)
4659 gimplify_init_ctor_preeval (&ce
->value
, pre_p
, post_p
, data
);
4664 /* If this is a variable sized type, we must remember the size. */
4665 maybe_with_size_expr (expr_p
);
4667 /* Gimplify the constructor element to something appropriate for the rhs
4668 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4669 the gimplifier will consider this a store to memory. Doing this
4670 gimplification now means that we won't have to deal with complicated
4671 language-specific trees, nor trees like SAVE_EXPR that can induce
4672 exponential search behavior. */
4673 one
= gimplify_expr (expr_p
, pre_p
, post_p
, is_gimple_mem_rhs
, fb_rvalue
);
4674 if (one
== GS_ERROR
)
4680 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4681 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4682 always be true for all scalars, since is_gimple_mem_rhs insists on a
4683 temporary variable for them. */
4684 if (DECL_P (*expr_p
))
4687 /* If this is of variable size, we have no choice but to assume it doesn't
4688 overlap since we can't make a temporary for it. */
4689 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p
))) != INTEGER_CST
)
4692 /* Otherwise, we must search for overlap ... */
4693 if (!walk_tree (expr_p
, gimplify_init_ctor_preeval_1
, data
, NULL
))
4696 /* ... and if found, force the value into a temporary. */
4697 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
4700 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4701 a RANGE_EXPR in a CONSTRUCTOR for an array.
4705 object[var] = value;
4712 We increment var _after_ the loop exit check because we might otherwise
4713 fail if upper == TYPE_MAX_VALUE (type for upper).
4715 Note that we never have to deal with SAVE_EXPRs here, because this has
4716 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4718 static void gimplify_init_ctor_eval (tree
, vec
<constructor_elt
, va_gc
> *,
4719 gimple_seq
*, bool);
4722 gimplify_init_ctor_eval_range (tree object
, tree lower
, tree upper
,
4723 tree value
, tree array_elt_type
,
4724 gimple_seq
*pre_p
, bool cleared
)
4726 tree loop_entry_label
, loop_exit_label
, fall_thru_label
;
4727 tree var
, var_type
, cref
, tmp
;
4729 loop_entry_label
= create_artificial_label (UNKNOWN_LOCATION
);
4730 loop_exit_label
= create_artificial_label (UNKNOWN_LOCATION
);
4731 fall_thru_label
= create_artificial_label (UNKNOWN_LOCATION
);
4733 /* Create and initialize the index variable. */
4734 var_type
= TREE_TYPE (upper
);
4735 var
= create_tmp_var (var_type
);
4736 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, lower
));
4738 /* Add the loop entry label. */
4739 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_entry_label
));
4741 /* Build the reference. */
4742 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4743 var
, NULL_TREE
, NULL_TREE
);
4745 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4746 the store. Otherwise just assign value to the reference. */
4748 if (TREE_CODE (value
) == CONSTRUCTOR
)
4749 /* NB we might have to call ourself recursively through
4750 gimplify_init_ctor_eval if the value is a constructor. */
4751 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4755 if (gimplify_expr (&value
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
4757 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (cref
, value
));
4760 /* We exit the loop when the index var is equal to the upper bound. */
4761 gimplify_seq_add_stmt (pre_p
,
4762 gimple_build_cond (EQ_EXPR
, var
, upper
,
4763 loop_exit_label
, fall_thru_label
));
4765 gimplify_seq_add_stmt (pre_p
, gimple_build_label (fall_thru_label
));
4767 /* Otherwise, increment the index var... */
4768 tmp
= build2 (PLUS_EXPR
, var_type
, var
,
4769 fold_convert (var_type
, integer_one_node
));
4770 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, tmp
));
4772 /* ...and jump back to the loop entry. */
4773 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (loop_entry_label
));
4775 /* Add the loop exit label. */
4776 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_exit_label
));
4779 /* A subroutine of gimplify_init_constructor. Generate individual
4780 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4781 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4782 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4786 gimplify_init_ctor_eval (tree object
, vec
<constructor_elt
, va_gc
> *elts
,
4787 gimple_seq
*pre_p
, bool cleared
)
4789 tree array_elt_type
= NULL
;
4790 unsigned HOST_WIDE_INT ix
;
4791 tree purpose
, value
;
4793 if (TREE_CODE (TREE_TYPE (object
)) == ARRAY_TYPE
)
4794 array_elt_type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object
)));
4796 FOR_EACH_CONSTRUCTOR_ELT (elts
, ix
, purpose
, value
)
4800 /* NULL values are created above for gimplification errors. */
4804 if (cleared
&& initializer_zerop (value
))
4807 /* ??? Here's to hoping the front end fills in all of the indices,
4808 so we don't have to figure out what's missing ourselves. */
4809 gcc_assert (purpose
);
4811 /* Skip zero-sized fields, unless value has side-effects. This can
4812 happen with calls to functions returning a empty type, which
4813 we shouldn't discard. As a number of downstream passes don't
4814 expect sets of empty type fields, we rely on the gimplification of
4815 the MODIFY_EXPR we make below to drop the assignment statement. */
4816 if (!TREE_SIDE_EFFECTS (value
)
4817 && TREE_CODE (purpose
) == FIELD_DECL
4818 && is_empty_type (TREE_TYPE (purpose
)))
4821 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4823 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4825 tree lower
= TREE_OPERAND (purpose
, 0);
4826 tree upper
= TREE_OPERAND (purpose
, 1);
4828 /* If the lower bound is equal to upper, just treat it as if
4829 upper was the index. */
4830 if (simple_cst_equal (lower
, upper
))
4834 gimplify_init_ctor_eval_range (object
, lower
, upper
, value
,
4835 array_elt_type
, pre_p
, cleared
);
4842 /* Do not use bitsizetype for ARRAY_REF indices. */
4843 if (TYPE_DOMAIN (TREE_TYPE (object
)))
4845 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object
))),
4847 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4848 purpose
, NULL_TREE
, NULL_TREE
);
4852 gcc_assert (TREE_CODE (purpose
) == FIELD_DECL
);
4853 cref
= build3 (COMPONENT_REF
, TREE_TYPE (purpose
),
4854 unshare_expr (object
), purpose
, NULL_TREE
);
4857 if (TREE_CODE (value
) == CONSTRUCTOR
4858 && TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
)
4859 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4863 tree init
= build2 (INIT_EXPR
, TREE_TYPE (cref
), cref
, value
);
4864 gimplify_and_add (init
, pre_p
);
4870 /* Return the appropriate RHS predicate for this LHS. */
4873 rhs_predicate_for (tree lhs
)
4875 if (is_gimple_reg (lhs
))
4876 return is_gimple_reg_rhs_or_call
;
4878 return is_gimple_mem_rhs_or_call
;
4881 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4882 before the LHS has been gimplified. */
4884 static gimple_predicate
4885 initial_rhs_predicate_for (tree lhs
)
4887 if (is_gimple_reg_type (TREE_TYPE (lhs
)))
4888 return is_gimple_reg_rhs_or_call
;
4890 return is_gimple_mem_rhs_or_call
;
4893 /* Gimplify a C99 compound literal expression. This just means adding
4894 the DECL_EXPR before the current statement and using its anonymous
4897 static enum gimplify_status
4898 gimplify_compound_literal_expr (tree
*expr_p
, gimple_seq
*pre_p
,
4899 bool (*gimple_test_f
) (tree
),
4900 fallback_t fallback
)
4902 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p
);
4903 tree decl
= DECL_EXPR_DECL (decl_s
);
4904 tree init
= DECL_INITIAL (decl
);
4905 /* Mark the decl as addressable if the compound literal
4906 expression is addressable now, otherwise it is marked too late
4907 after we gimplify the initialization expression. */
4908 if (TREE_ADDRESSABLE (*expr_p
))
4909 TREE_ADDRESSABLE (decl
) = 1;
4910 /* Otherwise, if we don't need an lvalue and have a literal directly
4911 substitute it. Check if it matches the gimple predicate, as
4912 otherwise we'd generate a new temporary, and we can as well just
4913 use the decl we already have. */
4914 else if (!TREE_ADDRESSABLE (decl
)
4915 && !TREE_THIS_VOLATILE (decl
)
4917 && (fallback
& fb_lvalue
) == 0
4918 && gimple_test_f (init
))
4924 /* If the decl is not addressable, then it is being used in some
4925 expression or on the right hand side of a statement, and it can
4926 be put into a readonly data section. */
4927 if (!TREE_ADDRESSABLE (decl
) && (fallback
& fb_lvalue
) == 0)
4928 TREE_READONLY (decl
) = 1;
4930 /* This decl isn't mentioned in the enclosing block, so add it to the
4931 list of temps. FIXME it seems a bit of a kludge to say that
4932 anonymous artificial vars aren't pushed, but everything else is. */
4933 if (DECL_NAME (decl
) == NULL_TREE
&& !DECL_SEEN_IN_BIND_EXPR_P (decl
))
4934 gimple_add_tmp_var (decl
);
4936 gimplify_and_add (decl_s
, pre_p
);
4941 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4942 return a new CONSTRUCTOR if something changed. */
4945 optimize_compound_literals_in_ctor (tree orig_ctor
)
4947 tree ctor
= orig_ctor
;
4948 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (ctor
);
4949 unsigned int idx
, num
= vec_safe_length (elts
);
4951 for (idx
= 0; idx
< num
; idx
++)
4953 tree value
= (*elts
)[idx
].value
;
4954 tree newval
= value
;
4955 if (TREE_CODE (value
) == CONSTRUCTOR
)
4956 newval
= optimize_compound_literals_in_ctor (value
);
4957 else if (TREE_CODE (value
) == COMPOUND_LITERAL_EXPR
)
4959 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (value
);
4960 tree decl
= DECL_EXPR_DECL (decl_s
);
4961 tree init
= DECL_INITIAL (decl
);
4963 if (!TREE_ADDRESSABLE (value
)
4964 && !TREE_ADDRESSABLE (decl
)
4966 && TREE_CODE (init
) == CONSTRUCTOR
)
4967 newval
= optimize_compound_literals_in_ctor (init
);
4969 if (newval
== value
)
4972 if (ctor
== orig_ctor
)
4974 ctor
= copy_node (orig_ctor
);
4975 CONSTRUCTOR_ELTS (ctor
) = vec_safe_copy (elts
);
4976 elts
= CONSTRUCTOR_ELTS (ctor
);
4978 (*elts
)[idx
].value
= newval
;
4983 /* A subroutine of gimplify_modify_expr. Break out elements of a
4984 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4986 Note that we still need to clear any elements that don't have explicit
4987 initializers, so if not all elements are initialized we keep the
4988 original MODIFY_EXPR, we just remove all of the constructor elements.
4990 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4991 GS_ERROR if we would have to create a temporary when gimplifying
4992 this constructor. Otherwise, return GS_OK.
4994 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4996 static enum gimplify_status
4997 gimplify_init_constructor (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4998 bool want_value
, bool notify_temp_creation
)
5000 tree object
, ctor
, type
;
5001 enum gimplify_status ret
;
5002 vec
<constructor_elt
, va_gc
> *elts
;
5003 bool cleared
= false;
5004 bool is_empty_ctor
= false;
5005 bool is_init_expr
= (TREE_CODE (*expr_p
) == INIT_EXPR
);
5007 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == CONSTRUCTOR
);
5009 if (!notify_temp_creation
)
5011 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
5012 is_gimple_lvalue
, fb_lvalue
);
5013 if (ret
== GS_ERROR
)
5017 object
= TREE_OPERAND (*expr_p
, 0);
5018 ctor
= TREE_OPERAND (*expr_p
, 1)
5019 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p
, 1));
5020 type
= TREE_TYPE (ctor
);
5021 elts
= CONSTRUCTOR_ELTS (ctor
);
5024 switch (TREE_CODE (type
))
5028 case QUAL_UNION_TYPE
:
5031 /* Use readonly data for initializers of this or smaller size
5032 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5034 const HOST_WIDE_INT min_unique_size
= 64;
5035 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5036 is smaller than this, use readonly data. */
5037 const int unique_nonzero_ratio
= 8;
5038 /* True if a single access of the object must be ensured. This is the
5039 case if the target is volatile, the type is non-addressable and more
5040 than one field need to be assigned. */
5041 const bool ensure_single_access
5042 = TREE_THIS_VOLATILE (object
)
5043 && !TREE_ADDRESSABLE (type
)
5044 && vec_safe_length (elts
) > 1;
5045 struct gimplify_init_ctor_preeval_data preeval_data
;
5046 HOST_WIDE_INT num_ctor_elements
, num_nonzero_elements
;
5047 HOST_WIDE_INT num_unique_nonzero_elements
;
5048 bool complete_p
, valid_const_initializer
;
5050 /* Aggregate types must lower constructors to initialization of
5051 individual elements. The exception is that a CONSTRUCTOR node
5052 with no elements indicates zero-initialization of the whole. */
5053 if (vec_safe_is_empty (elts
))
5055 if (notify_temp_creation
)
5057 is_empty_ctor
= true;
5061 /* Fetch information about the constructor to direct later processing.
5062 We might want to make static versions of it in various cases, and
5063 can only do so if it known to be a valid constant initializer. */
5064 valid_const_initializer
5065 = categorize_ctor_elements (ctor
, &num_nonzero_elements
,
5066 &num_unique_nonzero_elements
,
5067 &num_ctor_elements
, &complete_p
);
5069 /* If a const aggregate variable is being initialized, then it
5070 should never be a lose to promote the variable to be static. */
5071 if (valid_const_initializer
5072 && num_nonzero_elements
> 1
5073 && TREE_READONLY (object
)
5075 && !DECL_REGISTER (object
)
5076 && (flag_merge_constants
>= 2 || !TREE_ADDRESSABLE (object
))
5077 /* For ctors that have many repeated nonzero elements
5078 represented through RANGE_EXPRs, prefer initializing
5079 those through runtime loops over copies of large amounts
5080 of data from readonly data section. */
5081 && (num_unique_nonzero_elements
5082 > num_nonzero_elements
/ unique_nonzero_ratio
5083 || ((unsigned HOST_WIDE_INT
) int_size_in_bytes (type
)
5084 <= (unsigned HOST_WIDE_INT
) min_unique_size
)))
5086 if (notify_temp_creation
)
5089 DECL_INITIAL (object
) = ctor
;
5090 TREE_STATIC (object
) = 1;
5091 if (!DECL_NAME (object
))
5092 DECL_NAME (object
) = create_tmp_var_name ("C");
5093 walk_tree (&DECL_INITIAL (object
), force_labels_r
, NULL
, NULL
);
5095 /* ??? C++ doesn't automatically append a .<number> to the
5096 assembler name, and even when it does, it looks at FE private
5097 data structures to figure out what that number should be,
5098 which are not set for this variable. I suppose this is
5099 important for local statics for inline functions, which aren't
5100 "local" in the object file sense. So in order to get a unique
5101 TU-local symbol, we must invoke the lhd version now. */
5102 lhd_set_decl_assembler_name (object
);
5104 *expr_p
= NULL_TREE
;
5108 /* If there are "lots" of initialized elements, even discounting
5109 those that are not address constants (and thus *must* be
5110 computed at runtime), then partition the constructor into
5111 constant and non-constant parts. Block copy the constant
5112 parts in, then generate code for the non-constant parts. */
5113 /* TODO. There's code in cp/typeck.c to do this. */
5115 if (int_size_in_bytes (TREE_TYPE (ctor
)) < 0)
5116 /* store_constructor will ignore the clearing of variable-sized
5117 objects. Initializers for such objects must explicitly set
5118 every field that needs to be set. */
5120 else if (!complete_p
)
5121 /* If the constructor isn't complete, clear the whole object
5122 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5124 ??? This ought not to be needed. For any element not present
5125 in the initializer, we should simply set them to zero. Except
5126 we'd need to *find* the elements that are not present, and that
5127 requires trickery to avoid quadratic compile-time behavior in
5128 large cases or excessive memory use in small cases. */
5129 cleared
= !CONSTRUCTOR_NO_CLEARING (ctor
);
5130 else if (num_ctor_elements
- num_nonzero_elements
5131 > CLEAR_RATIO (optimize_function_for_speed_p (cfun
))
5132 && num_nonzero_elements
< num_ctor_elements
/ 4)
5133 /* If there are "lots" of zeros, it's more efficient to clear
5134 the memory and then set the nonzero elements. */
5136 else if (ensure_single_access
&& num_nonzero_elements
== 0)
5137 /* If a single access to the target must be ensured and all elements
5138 are zero, then it's optimal to clear whatever their number. */
5143 /* If there are "lots" of initialized elements, and all of them
5144 are valid address constants, then the entire initializer can
5145 be dropped to memory, and then memcpy'd out. Don't do this
5146 for sparse arrays, though, as it's more efficient to follow
5147 the standard CONSTRUCTOR behavior of memset followed by
5148 individual element initialization. Also don't do this for small
5149 all-zero initializers (which aren't big enough to merit
5150 clearing), and don't try to make bitwise copies of
5151 TREE_ADDRESSABLE types. */
5152 if (valid_const_initializer
5154 && !(cleared
|| num_nonzero_elements
== 0)
5155 && !TREE_ADDRESSABLE (type
))
5157 HOST_WIDE_INT size
= int_size_in_bytes (type
);
5160 /* ??? We can still get unbounded array types, at least
5161 from the C++ front end. This seems wrong, but attempt
5162 to work around it for now. */
5165 size
= int_size_in_bytes (TREE_TYPE (object
));
5167 TREE_TYPE (ctor
) = type
= TREE_TYPE (object
);
5170 /* Find the maximum alignment we can assume for the object. */
5171 /* ??? Make use of DECL_OFFSET_ALIGN. */
5172 if (DECL_P (object
))
5173 align
= DECL_ALIGN (object
);
5175 align
= TYPE_ALIGN (type
);
5177 /* Do a block move either if the size is so small as to make
5178 each individual move a sub-unit move on average, or if it
5179 is so large as to make individual moves inefficient. */
5181 && num_nonzero_elements
> 1
5182 /* For ctors that have many repeated nonzero elements
5183 represented through RANGE_EXPRs, prefer initializing
5184 those through runtime loops over copies of large amounts
5185 of data from readonly data section. */
5186 && (num_unique_nonzero_elements
5187 > num_nonzero_elements
/ unique_nonzero_ratio
5188 || size
<= min_unique_size
)
5189 && (size
< num_nonzero_elements
5190 || !can_move_by_pieces (size
, align
)))
5192 if (notify_temp_creation
)
5195 walk_tree (&ctor
, force_labels_r
, NULL
, NULL
);
5196 ctor
= tree_output_constant_def (ctor
);
5197 if (!useless_type_conversion_p (type
, TREE_TYPE (ctor
)))
5198 ctor
= build1 (VIEW_CONVERT_EXPR
, type
, ctor
);
5199 TREE_OPERAND (*expr_p
, 1) = ctor
;
5201 /* This is no longer an assignment of a CONSTRUCTOR, but
5202 we still may have processing to do on the LHS. So
5203 pretend we didn't do anything here to let that happen. */
5204 return GS_UNHANDLED
;
5208 /* If a single access to the target must be ensured and there are
5209 nonzero elements or the zero elements are not assigned en masse,
5210 initialize the target from a temporary. */
5211 if (ensure_single_access
&& (num_nonzero_elements
> 0 || !cleared
))
5213 if (notify_temp_creation
)
5216 tree temp
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
5217 TREE_OPERAND (*expr_p
, 0) = temp
;
5218 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
5220 build2 (MODIFY_EXPR
, void_type_node
,
5225 if (notify_temp_creation
)
5228 /* If there are nonzero elements and if needed, pre-evaluate to capture
5229 elements overlapping with the lhs into temporaries. We must do this
5230 before clearing to fetch the values before they are zeroed-out. */
5231 if (num_nonzero_elements
> 0 && TREE_CODE (*expr_p
) != INIT_EXPR
)
5233 preeval_data
.lhs_base_decl
= get_base_address (object
);
5234 if (!DECL_P (preeval_data
.lhs_base_decl
))
5235 preeval_data
.lhs_base_decl
= NULL
;
5236 preeval_data
.lhs_alias_set
= get_alias_set (object
);
5238 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p
, 1),
5239 pre_p
, post_p
, &preeval_data
);
5242 bool ctor_has_side_effects_p
5243 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p
, 1));
5247 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5248 Note that we still have to gimplify, in order to handle the
5249 case of variable sized types. Avoid shared tree structures. */
5250 CONSTRUCTOR_ELTS (ctor
) = NULL
;
5251 TREE_SIDE_EFFECTS (ctor
) = 0;
5252 object
= unshare_expr (object
);
5253 gimplify_stmt (expr_p
, pre_p
);
5256 /* If we have not block cleared the object, or if there are nonzero
5257 elements in the constructor, or if the constructor has side effects,
5258 add assignments to the individual scalar fields of the object. */
5260 || num_nonzero_elements
> 0
5261 || ctor_has_side_effects_p
)
5262 gimplify_init_ctor_eval (object
, elts
, pre_p
, cleared
);
5264 *expr_p
= NULL_TREE
;
5272 if (notify_temp_creation
)
5275 /* Extract the real and imaginary parts out of the ctor. */
5276 gcc_assert (elts
->length () == 2);
5277 r
= (*elts
)[0].value
;
5278 i
= (*elts
)[1].value
;
5279 if (r
== NULL
|| i
== NULL
)
5281 tree zero
= build_zero_cst (TREE_TYPE (type
));
5288 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5289 represent creation of a complex value. */
5290 if (TREE_CONSTANT (r
) && TREE_CONSTANT (i
))
5292 ctor
= build_complex (type
, r
, i
);
5293 TREE_OPERAND (*expr_p
, 1) = ctor
;
5297 ctor
= build2 (COMPLEX_EXPR
, type
, r
, i
);
5298 TREE_OPERAND (*expr_p
, 1) = ctor
;
5299 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1),
5302 rhs_predicate_for (TREE_OPERAND (*expr_p
, 0)),
5310 unsigned HOST_WIDE_INT ix
;
5311 constructor_elt
*ce
;
5313 if (notify_temp_creation
)
5316 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5317 if (TREE_CONSTANT (ctor
))
5319 bool constant_p
= true;
5322 /* Even when ctor is constant, it might contain non-*_CST
5323 elements, such as addresses or trapping values like
5324 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5325 in VECTOR_CST nodes. */
5326 FOR_EACH_CONSTRUCTOR_VALUE (elts
, ix
, value
)
5327 if (!CONSTANT_CLASS_P (value
))
5335 TREE_OPERAND (*expr_p
, 1) = build_vector_from_ctor (type
, elts
);
5339 TREE_CONSTANT (ctor
) = 0;
5342 /* Vector types use CONSTRUCTOR all the way through gimple
5343 compilation as a general initializer. */
5344 FOR_EACH_VEC_SAFE_ELT (elts
, ix
, ce
)
5346 enum gimplify_status tret
;
5347 tret
= gimplify_expr (&ce
->value
, pre_p
, post_p
, is_gimple_val
,
5349 if (tret
== GS_ERROR
)
5351 else if (TREE_STATIC (ctor
)
5352 && !initializer_constant_valid_p (ce
->value
,
5353 TREE_TYPE (ce
->value
)))
5354 TREE_STATIC (ctor
) = 0;
5356 recompute_constructor_flags (ctor
);
5357 if (!is_gimple_reg (TREE_OPERAND (*expr_p
, 0)))
5358 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (ctor
, pre_p
);
5363 /* So how did we get a CONSTRUCTOR for a scalar type? */
5367 if (ret
== GS_ERROR
)
5369 /* If we have gimplified both sides of the initializer but have
5370 not emitted an assignment, do so now. */
5373 tree lhs
= TREE_OPERAND (*expr_p
, 0);
5374 tree rhs
= TREE_OPERAND (*expr_p
, 1);
5375 if (want_value
&& object
== lhs
)
5376 lhs
= unshare_expr (lhs
);
5377 gassign
*init
= gimple_build_assign (lhs
, rhs
);
5378 gimplify_seq_add_stmt (pre_p
, init
);
5391 /* If the user requests to initialize automatic variables, we
5392 should initialize paddings inside the variable. Add a call to
5393 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5394 initialize paddings of object always to zero regardless of
5395 INIT_TYPE. Note, we will not insert this call if the aggregate
5396 variable has be completely cleared already or it's initialized
5397 with an empty constructor. We cannot insert this call if the
5398 variable is a gimple register since __builtin_clear_padding will take
5399 the address of the variable. As a result, if a long double/_Complex long
5400 double variable will be spilled into stack later, its padding cannot
5401 be cleared with __builtin_clear_padding. We should clear its padding
5402 when it is spilled into memory. */
5404 && !is_gimple_reg (object
)
5405 && clear_padding_type_may_have_padding_p (type
)
5406 && ((AGGREGATE_TYPE_P (type
) && !cleared
&& !is_empty_ctor
)
5407 || !AGGREGATE_TYPE_P (type
))
5408 && is_var_need_auto_init (object
))
5409 gimple_add_padding_init_for_auto_var (object
, false, pre_p
);
5414 /* Given a pointer value OP0, return a simplified version of an
5415 indirection through OP0, or NULL_TREE if no simplification is
5416 possible. This may only be applied to a rhs of an expression.
5417 Note that the resulting type may be different from the type pointed
5418 to in the sense that it is still compatible from the langhooks
5422 gimple_fold_indirect_ref_rhs (tree t
)
5424 return gimple_fold_indirect_ref (t
);
5427 /* Subroutine of gimplify_modify_expr to do simplifications of
5428 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5429 something changes. */
5431 static enum gimplify_status
5432 gimplify_modify_expr_rhs (tree
*expr_p
, tree
*from_p
, tree
*to_p
,
5433 gimple_seq
*pre_p
, gimple_seq
*post_p
,
5436 enum gimplify_status ret
= GS_UNHANDLED
;
5442 switch (TREE_CODE (*from_p
))
5445 /* If we're assigning from a read-only variable initialized with
5446 a constructor and not volatile, do the direct assignment from
5447 the constructor, but only if the target is not volatile either
5448 since this latter assignment might end up being done on a per
5449 field basis. However, if the target is volatile and the type
5450 is aggregate and non-addressable, gimplify_init_constructor
5451 knows that it needs to ensure a single access to the target
5452 and it will return GS_OK only in this case. */
5453 if (TREE_READONLY (*from_p
)
5454 && DECL_INITIAL (*from_p
)
5455 && TREE_CODE (DECL_INITIAL (*from_p
)) == CONSTRUCTOR
5456 && !TREE_THIS_VOLATILE (*from_p
)
5457 && (!TREE_THIS_VOLATILE (*to_p
)
5458 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p
))
5459 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p
)))))
5461 tree old_from
= *from_p
;
5462 enum gimplify_status subret
;
5464 /* Move the constructor into the RHS. */
5465 *from_p
= unshare_expr (DECL_INITIAL (*from_p
));
5467 /* Let's see if gimplify_init_constructor will need to put
5469 subret
= gimplify_init_constructor (expr_p
, NULL
, NULL
,
5471 if (subret
== GS_ERROR
)
5473 /* If so, revert the change. */
5485 /* If we have code like
5489 where the type of "x" is a (possibly cv-qualified variant
5490 of "A"), treat the entire expression as identical to "x".
5491 This kind of code arises in C++ when an object is bound
5492 to a const reference, and if "x" is a TARGET_EXPR we want
5493 to take advantage of the optimization below. */
5494 bool volatile_p
= TREE_THIS_VOLATILE (*from_p
);
5495 tree t
= gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p
, 0));
5498 if (TREE_THIS_VOLATILE (t
) != volatile_p
)
5501 t
= build_simple_mem_ref_loc (EXPR_LOCATION (*from_p
),
5502 build_fold_addr_expr (t
));
5503 if (REFERENCE_CLASS_P (t
))
5504 TREE_THIS_VOLATILE (t
) = volatile_p
;
5515 /* If we are initializing something from a TARGET_EXPR, strip the
5516 TARGET_EXPR and initialize it directly, if possible. This can't
5517 be done if the initializer is void, since that implies that the
5518 temporary is set in some non-trivial way.
5520 ??? What about code that pulls out the temp and uses it
5521 elsewhere? I think that such code never uses the TARGET_EXPR as
5522 an initializer. If I'm wrong, we'll die because the temp won't
5523 have any RTL. In that case, I guess we'll need to replace
5524 references somehow. */
5525 tree init
= TARGET_EXPR_INITIAL (*from_p
);
5528 && (TREE_CODE (*expr_p
) != MODIFY_EXPR
5529 || !TARGET_EXPR_NO_ELIDE (*from_p
))
5530 && !VOID_TYPE_P (TREE_TYPE (init
)))
5540 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5542 gimplify_compound_expr (from_p
, pre_p
, true);
5548 /* If we already made some changes, let the front end have a
5549 crack at this before we break it down. */
5550 if (ret
!= GS_UNHANDLED
)
5553 /* If we're initializing from a CONSTRUCTOR, break this into
5554 individual MODIFY_EXPRs. */
5555 ret
= gimplify_init_constructor (expr_p
, pre_p
, post_p
, want_value
,
5560 /* If we're assigning to a non-register type, push the assignment
5561 down into the branches. This is mandatory for ADDRESSABLE types,
5562 since we cannot generate temporaries for such, but it saves a
5563 copy in other cases as well. */
5564 if (!is_gimple_reg_type (TREE_TYPE (*from_p
)))
5566 /* This code should mirror the code in gimplify_cond_expr. */
5567 enum tree_code code
= TREE_CODE (*expr_p
);
5568 tree cond
= *from_p
;
5569 tree result
= *to_p
;
5571 ret
= gimplify_expr (&result
, pre_p
, post_p
,
5572 is_gimple_lvalue
, fb_lvalue
);
5573 if (ret
!= GS_ERROR
)
5576 /* If we are going to write RESULT more than once, clear
5577 TREE_READONLY flag, otherwise we might incorrectly promote
5578 the variable to static const and initialize it at compile
5579 time in one of the branches. */
5581 && TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
5582 && TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5583 TREE_READONLY (result
) = 0;
5584 if (TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
)
5585 TREE_OPERAND (cond
, 1)
5586 = build2 (code
, void_type_node
, result
,
5587 TREE_OPERAND (cond
, 1));
5588 if (TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5589 TREE_OPERAND (cond
, 2)
5590 = build2 (code
, void_type_node
, unshare_expr (result
),
5591 TREE_OPERAND (cond
, 2));
5593 TREE_TYPE (cond
) = void_type_node
;
5594 recalculate_side_effects (cond
);
5598 gimplify_and_add (cond
, pre_p
);
5599 *expr_p
= unshare_expr (result
);
5608 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5609 return slot so that we don't generate a temporary. */
5610 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p
)
5611 && aggregate_value_p (*from_p
, *from_p
))
5615 if (!(rhs_predicate_for (*to_p
))(*from_p
))
5616 /* If we need a temporary, *to_p isn't accurate. */
5618 /* It's OK to use the return slot directly unless it's an NRV. */
5619 else if (TREE_CODE (*to_p
) == RESULT_DECL
5620 && DECL_NAME (*to_p
) == NULL_TREE
5621 && needs_to_live_in_memory (*to_p
))
5623 else if (is_gimple_reg_type (TREE_TYPE (*to_p
))
5624 || (DECL_P (*to_p
) && DECL_REGISTER (*to_p
)))
5625 /* Don't force regs into memory. */
5627 else if (TREE_CODE (*expr_p
) == INIT_EXPR
)
5628 /* It's OK to use the target directly if it's being
5631 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p
)))
5633 /* Always use the target and thus RSO for variable-sized types.
5634 GIMPLE cannot deal with a variable-sized assignment
5635 embedded in a call statement. */
5637 else if (TREE_CODE (*to_p
) != SSA_NAME
5638 && (!is_gimple_variable (*to_p
)
5639 || needs_to_live_in_memory (*to_p
)))
5640 /* Don't use the original target if it's already addressable;
5641 if its address escapes, and the called function uses the
5642 NRV optimization, a conforming program could see *to_p
5643 change before the called function returns; see c++/19317.
5644 When optimizing, the return_slot pass marks more functions
5645 as safe after we have escape info. */
5652 CALL_EXPR_RETURN_SLOT_OPT (*from_p
) = 1;
5653 mark_addressable (*to_p
);
5658 case WITH_SIZE_EXPR
:
5659 /* Likewise for calls that return an aggregate of non-constant size,
5660 since we would not be able to generate a temporary at all. */
5661 if (TREE_CODE (TREE_OPERAND (*from_p
, 0)) == CALL_EXPR
)
5663 *from_p
= TREE_OPERAND (*from_p
, 0);
5664 /* We don't change ret in this case because the
5665 WITH_SIZE_EXPR might have been added in
5666 gimplify_modify_expr, so returning GS_OK would lead to an
5672 /* If we're initializing from a container, push the initialization
5674 case CLEANUP_POINT_EXPR
:
5676 case STATEMENT_LIST
:
5678 tree wrap
= *from_p
;
5681 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_min_lval
,
5683 if (ret
!= GS_ERROR
)
5686 t
= voidify_wrapper_expr (wrap
, *expr_p
);
5687 gcc_assert (t
== *expr_p
);
5691 gimplify_and_add (wrap
, pre_p
);
5692 *expr_p
= unshare_expr (*to_p
);
5700 /* Pull out compound literal expressions from a NOP_EXPR.
5701 Those are created in the C FE to drop qualifiers during
5702 lvalue conversion. */
5703 if ((TREE_CODE (TREE_OPERAND (*from_p
, 0)) == COMPOUND_LITERAL_EXPR
)
5704 && tree_ssa_useless_type_conversion (*from_p
))
5706 *from_p
= TREE_OPERAND (*from_p
, 0);
5712 case COMPOUND_LITERAL_EXPR
:
5714 tree complit
= TREE_OPERAND (*expr_p
, 1);
5715 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (complit
);
5716 tree decl
= DECL_EXPR_DECL (decl_s
);
5717 tree init
= DECL_INITIAL (decl
);
5719 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5720 into struct T x = { 0, 1, 2 } if the address of the
5721 compound literal has never been taken. */
5722 if (!TREE_ADDRESSABLE (complit
)
5723 && !TREE_ADDRESSABLE (decl
)
5726 *expr_p
= copy_node (*expr_p
);
5727 TREE_OPERAND (*expr_p
, 1) = init
;
5742 /* Return true if T looks like a valid GIMPLE statement. */
5745 is_gimple_stmt (tree t
)
5747 const enum tree_code code
= TREE_CODE (t
);
5752 /* The only valid NOP_EXPR is the empty statement. */
5753 return IS_EMPTY_STMT (t
);
5757 /* These are only valid if they're void. */
5758 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
5764 case CASE_LABEL_EXPR
:
5765 case TRY_CATCH_EXPR
:
5766 case TRY_FINALLY_EXPR
:
5767 case EH_FILTER_EXPR
:
5770 case STATEMENT_LIST
:
5775 case OACC_HOST_DATA
:
5778 case OACC_ENTER_DATA
:
5779 case OACC_EXIT_DATA
:
5784 case OMP_DISTRIBUTE
:
5799 case OMP_TARGET_DATA
:
5800 case OMP_TARGET_UPDATE
:
5801 case OMP_TARGET_ENTER_DATA
:
5802 case OMP_TARGET_EXIT_DATA
:
5805 /* These are always void. */
5811 /* These are valid regardless of their type. */
5820 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5821 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5823 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5824 other, unmodified part of the complex object just before the total store.
5825 As a consequence, if the object is still uninitialized, an undefined value
5826 will be loaded into a register, which may result in a spurious exception
5827 if the register is floating-point and the value happens to be a signaling
5828 NaN for example. Then the fully-fledged complex operations lowering pass
5829 followed by a DCE pass are necessary in order to fix things up. */
5831 static enum gimplify_status
5832 gimplify_modify_expr_complex_part (tree
*expr_p
, gimple_seq
*pre_p
,
5835 enum tree_code code
, ocode
;
5836 tree lhs
, rhs
, new_rhs
, other
, realpart
, imagpart
;
5838 lhs
= TREE_OPERAND (*expr_p
, 0);
5839 rhs
= TREE_OPERAND (*expr_p
, 1);
5840 code
= TREE_CODE (lhs
);
5841 lhs
= TREE_OPERAND (lhs
, 0);
5843 ocode
= code
== REALPART_EXPR
? IMAGPART_EXPR
: REALPART_EXPR
;
5844 other
= build1 (ocode
, TREE_TYPE (rhs
), lhs
);
5845 suppress_warning (other
);
5846 other
= get_formal_tmp_var (other
, pre_p
);
5848 realpart
= code
== REALPART_EXPR
? rhs
: other
;
5849 imagpart
= code
== REALPART_EXPR
? other
: rhs
;
5851 if (TREE_CONSTANT (realpart
) && TREE_CONSTANT (imagpart
))
5852 new_rhs
= build_complex (TREE_TYPE (lhs
), realpart
, imagpart
);
5854 new_rhs
= build2 (COMPLEX_EXPR
, TREE_TYPE (lhs
), realpart
, imagpart
);
5856 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (lhs
, new_rhs
));
5857 *expr_p
= (want_value
) ? rhs
: NULL_TREE
;
5862 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5868 PRE_P points to the list where side effects that must happen before
5869 *EXPR_P should be stored.
5871 POST_P points to the list where side effects that must happen after
5872 *EXPR_P should be stored.
5874 WANT_VALUE is nonzero iff we want to use the value of this expression
5875 in another expression. */
5877 static enum gimplify_status
5878 gimplify_modify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5881 tree
*from_p
= &TREE_OPERAND (*expr_p
, 1);
5882 tree
*to_p
= &TREE_OPERAND (*expr_p
, 0);
5883 enum gimplify_status ret
= GS_UNHANDLED
;
5885 location_t loc
= EXPR_LOCATION (*expr_p
);
5886 gimple_stmt_iterator gsi
;
5888 gcc_assert (TREE_CODE (*expr_p
) == MODIFY_EXPR
5889 || TREE_CODE (*expr_p
) == INIT_EXPR
);
5891 /* Trying to simplify a clobber using normal logic doesn't work,
5892 so handle it here. */
5893 if (TREE_CLOBBER_P (*from_p
))
5895 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5896 if (ret
== GS_ERROR
)
5898 gcc_assert (!want_value
);
5899 if (!VAR_P (*to_p
) && TREE_CODE (*to_p
) != MEM_REF
)
5901 tree addr
= get_initialized_tmp_var (build_fold_addr_expr (*to_p
),
5903 *to_p
= build_simple_mem_ref_loc (EXPR_LOCATION (*to_p
), addr
);
5905 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (*to_p
, *from_p
));
5910 /* Insert pointer conversions required by the middle-end that are not
5911 required by the frontend. This fixes middle-end type checking for
5912 for example gcc.dg/redecl-6.c. */
5913 if (POINTER_TYPE_P (TREE_TYPE (*to_p
)))
5915 STRIP_USELESS_TYPE_CONVERSION (*from_p
);
5916 if (!useless_type_conversion_p (TREE_TYPE (*to_p
), TREE_TYPE (*from_p
)))
5917 *from_p
= fold_convert_loc (loc
, TREE_TYPE (*to_p
), *from_p
);
5920 /* See if any simplifications can be done based on what the RHS is. */
5921 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
5923 if (ret
!= GS_UNHANDLED
)
5926 /* For empty types only gimplify the left hand side and right hand
5927 side as statements and throw away the assignment. Do this after
5928 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5930 if (is_empty_type (TREE_TYPE (*from_p
))
5932 /* Don't do this for calls that return addressable types, expand_call
5933 relies on those having a lhs. */
5934 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p
))
5935 && TREE_CODE (*from_p
) == CALL_EXPR
))
5937 gimplify_stmt (from_p
, pre_p
);
5938 gimplify_stmt (to_p
, pre_p
);
5939 *expr_p
= NULL_TREE
;
5943 /* If the value being copied is of variable width, compute the length
5944 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5945 before gimplifying any of the operands so that we can resolve any
5946 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5947 the size of the expression to be copied, not of the destination, so
5948 that is what we must do here. */
5949 maybe_with_size_expr (from_p
);
5951 /* As a special case, we have to temporarily allow for assignments
5952 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5953 a toplevel statement, when gimplifying the GENERIC expression
5954 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5955 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5957 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5958 prevent gimplify_expr from trying to create a new temporary for
5959 foo's LHS, we tell it that it should only gimplify until it
5960 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5961 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5962 and all we need to do here is set 'a' to be its LHS. */
5964 /* Gimplify the RHS first for C++17 and bug 71104. */
5965 gimple_predicate initial_pred
= initial_rhs_predicate_for (*to_p
);
5966 ret
= gimplify_expr (from_p
, pre_p
, post_p
, initial_pred
, fb_rvalue
);
5967 if (ret
== GS_ERROR
)
5970 /* Then gimplify the LHS. */
5971 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5972 twice we have to make sure to gimplify into non-SSA as otherwise
5973 the abnormal edge added later will make those defs not dominate
5975 ??? Technically this applies only to the registers used in the
5976 resulting non-register *TO_P. */
5977 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
5979 && TREE_CODE (*from_p
) == CALL_EXPR
5980 && call_expr_flags (*from_p
) & ECF_RETURNS_TWICE
)
5981 gimplify_ctxp
->into_ssa
= false;
5982 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5983 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
5984 if (ret
== GS_ERROR
)
5987 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5988 guess for the predicate was wrong. */
5989 gimple_predicate final_pred
= rhs_predicate_for (*to_p
);
5990 if (final_pred
!= initial_pred
)
5992 ret
= gimplify_expr (from_p
, pre_p
, post_p
, final_pred
, fb_rvalue
);
5993 if (ret
== GS_ERROR
)
5997 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5998 size as argument to the call. */
5999 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
6001 tree call
= TREE_OPERAND (*from_p
, 0);
6002 tree vlasize
= TREE_OPERAND (*from_p
, 1);
6004 if (TREE_CODE (call
) == CALL_EXPR
6005 && CALL_EXPR_IFN (call
) == IFN_VA_ARG
)
6007 int nargs
= call_expr_nargs (call
);
6008 tree type
= TREE_TYPE (call
);
6009 tree ap
= CALL_EXPR_ARG (call
, 0);
6010 tree tag
= CALL_EXPR_ARG (call
, 1);
6011 tree aptag
= CALL_EXPR_ARG (call
, 2);
6012 tree newcall
= build_call_expr_internal_loc (EXPR_LOCATION (call
),
6016 TREE_OPERAND (*from_p
, 0) = newcall
;
6020 /* Now see if the above changed *from_p to something we handle specially. */
6021 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
6023 if (ret
!= GS_UNHANDLED
)
6026 /* If we've got a variable sized assignment between two lvalues (i.e. does
6027 not involve a call), then we can make things a bit more straightforward
6028 by converting the assignment to memcpy or memset. */
6029 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
6031 tree from
= TREE_OPERAND (*from_p
, 0);
6032 tree size
= TREE_OPERAND (*from_p
, 1);
6034 if (TREE_CODE (from
) == CONSTRUCTOR
)
6035 return gimplify_modify_expr_to_memset (expr_p
, size
, want_value
, pre_p
);
6037 if (is_gimple_addressable (from
))
6040 return gimplify_modify_expr_to_memcpy (expr_p
, size
, want_value
,
6045 /* Transform partial stores to non-addressable complex variables into
6046 total stores. This allows us to use real instead of virtual operands
6047 for these variables, which improves optimization. */
6048 if ((TREE_CODE (*to_p
) == REALPART_EXPR
6049 || TREE_CODE (*to_p
) == IMAGPART_EXPR
)
6050 && is_gimple_reg (TREE_OPERAND (*to_p
, 0)))
6051 return gimplify_modify_expr_complex_part (expr_p
, pre_p
, want_value
);
6053 /* Try to alleviate the effects of the gimplification creating artificial
6054 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6055 make sure not to create DECL_DEBUG_EXPR links across functions. */
6056 if (!gimplify_ctxp
->into_ssa
6058 && DECL_IGNORED_P (*from_p
)
6060 && !DECL_IGNORED_P (*to_p
)
6061 && decl_function_context (*to_p
) == current_function_decl
6062 && decl_function_context (*from_p
) == current_function_decl
)
6064 if (!DECL_NAME (*from_p
) && DECL_NAME (*to_p
))
6066 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p
)));
6067 DECL_HAS_DEBUG_EXPR_P (*from_p
) = 1;
6068 SET_DECL_DEBUG_EXPR (*from_p
, *to_p
);
6071 if (want_value
&& TREE_THIS_VOLATILE (*to_p
))
6072 *from_p
= get_initialized_tmp_var (*from_p
, pre_p
, post_p
);
6074 if (TREE_CODE (*from_p
) == CALL_EXPR
)
6076 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6077 instead of a GIMPLE_ASSIGN. */
6079 if (CALL_EXPR_FN (*from_p
) == NULL_TREE
)
6081 /* Gimplify internal functions created in the FEs. */
6082 int nargs
= call_expr_nargs (*from_p
), i
;
6083 enum internal_fn ifn
= CALL_EXPR_IFN (*from_p
);
6084 auto_vec
<tree
> vargs (nargs
);
6086 for (i
= 0; i
< nargs
; i
++)
6088 gimplify_arg (&CALL_EXPR_ARG (*from_p
, i
), pre_p
,
6089 EXPR_LOCATION (*from_p
));
6090 vargs
.quick_push (CALL_EXPR_ARG (*from_p
, i
));
6092 call_stmt
= gimple_build_call_internal_vec (ifn
, vargs
);
6093 gimple_call_set_nothrow (call_stmt
, TREE_NOTHROW (*from_p
));
6094 gimple_set_location (call_stmt
, EXPR_LOCATION (*expr_p
));
6098 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*from_p
));
6099 CALL_EXPR_FN (*from_p
) = TREE_OPERAND (CALL_EXPR_FN (*from_p
), 0);
6100 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p
));
6101 tree fndecl
= get_callee_fndecl (*from_p
);
6103 && fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
6104 && call_expr_nargs (*from_p
) == 3)
6105 call_stmt
= gimple_build_call_internal (IFN_BUILTIN_EXPECT
, 3,
6106 CALL_EXPR_ARG (*from_p
, 0),
6107 CALL_EXPR_ARG (*from_p
, 1),
6108 CALL_EXPR_ARG (*from_p
, 2));
6111 call_stmt
= gimple_build_call_from_tree (*from_p
, fnptrtype
);
6114 notice_special_calls (call_stmt
);
6115 if (!gimple_call_noreturn_p (call_stmt
) || !should_remove_lhs_p (*to_p
))
6116 gimple_call_set_lhs (call_stmt
, *to_p
);
6117 else if (TREE_CODE (*to_p
) == SSA_NAME
)
6118 /* The above is somewhat premature, avoid ICEing later for a
6119 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6120 ??? This doesn't make it a default-def. */
6121 SSA_NAME_DEF_STMT (*to_p
) = gimple_build_nop ();
6127 assign
= gimple_build_assign (*to_p
, *from_p
);
6128 gimple_set_location (assign
, EXPR_LOCATION (*expr_p
));
6129 if (COMPARISON_CLASS_P (*from_p
))
6130 copy_warning (assign
, *from_p
);
6133 if (gimplify_ctxp
->into_ssa
&& is_gimple_reg (*to_p
))
6135 /* We should have got an SSA name from the start. */
6136 gcc_assert (TREE_CODE (*to_p
) == SSA_NAME
6137 || ! gimple_in_ssa_p (cfun
));
6140 gimplify_seq_add_stmt (pre_p
, assign
);
6141 gsi
= gsi_last (*pre_p
);
6142 maybe_fold_stmt (&gsi
);
6146 *expr_p
= TREE_THIS_VOLATILE (*to_p
) ? *from_p
: unshare_expr (*to_p
);
6155 /* Gimplify a comparison between two variable-sized objects. Do this
6156 with a call to BUILT_IN_MEMCMP. */
6158 static enum gimplify_status
6159 gimplify_variable_sized_compare (tree
*expr_p
)
6161 location_t loc
= EXPR_LOCATION (*expr_p
);
6162 tree op0
= TREE_OPERAND (*expr_p
, 0);
6163 tree op1
= TREE_OPERAND (*expr_p
, 1);
6164 tree t
, arg
, dest
, src
, expr
;
6166 arg
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
6167 arg
= unshare_expr (arg
);
6168 arg
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg
, op0
);
6169 src
= build_fold_addr_expr_loc (loc
, op1
);
6170 dest
= build_fold_addr_expr_loc (loc
, op0
);
6171 t
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
6172 t
= build_call_expr_loc (loc
, t
, 3, dest
, src
, arg
);
6175 = build2 (TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), t
, integer_zero_node
);
6176 SET_EXPR_LOCATION (expr
, loc
);
6182 /* Gimplify a comparison between two aggregate objects of integral scalar
6183 mode as a comparison between the bitwise equivalent scalar values. */
6185 static enum gimplify_status
6186 gimplify_scalar_mode_aggregate_compare (tree
*expr_p
)
6188 location_t loc
= EXPR_LOCATION (*expr_p
);
6189 tree op0
= TREE_OPERAND (*expr_p
, 0);
6190 tree op1
= TREE_OPERAND (*expr_p
, 1);
6192 tree type
= TREE_TYPE (op0
);
6193 tree scalar_type
= lang_hooks
.types
.type_for_mode (TYPE_MODE (type
), 1);
6195 op0
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op0
);
6196 op1
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op1
);
6199 = fold_build2_loc (loc
, TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), op0
, op1
);
6204 /* Gimplify an expression sequence. This function gimplifies each
6205 expression and rewrites the original expression with the last
6206 expression of the sequence in GIMPLE form.
6208 PRE_P points to the list where the side effects for all the
6209 expressions in the sequence will be emitted.
6211 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6213 static enum gimplify_status
6214 gimplify_compound_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
6220 tree
*sub_p
= &TREE_OPERAND (t
, 0);
6222 if (TREE_CODE (*sub_p
) == COMPOUND_EXPR
)
6223 gimplify_compound_expr (sub_p
, pre_p
, false);
6225 gimplify_stmt (sub_p
, pre_p
);
6227 t
= TREE_OPERAND (t
, 1);
6229 while (TREE_CODE (t
) == COMPOUND_EXPR
);
6236 gimplify_stmt (expr_p
, pre_p
);
6241 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6242 gimplify. After gimplification, EXPR_P will point to a new temporary
6243 that holds the original value of the SAVE_EXPR node.
6245 PRE_P points to the list where side effects that must happen before
6246 *EXPR_P should be stored. */
6248 static enum gimplify_status
6249 gimplify_save_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6251 enum gimplify_status ret
= GS_ALL_DONE
;
6254 gcc_assert (TREE_CODE (*expr_p
) == SAVE_EXPR
);
6255 val
= TREE_OPERAND (*expr_p
, 0);
6257 if (TREE_TYPE (val
) == error_mark_node
)
6260 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6261 if (!SAVE_EXPR_RESOLVED_P (*expr_p
))
6263 /* The operand may be a void-valued expression. It is
6264 being executed only for its side-effects. */
6265 if (TREE_TYPE (val
) == void_type_node
)
6267 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
6268 is_gimple_stmt
, fb_none
);
6272 /* The temporary may not be an SSA name as later abnormal and EH
6273 control flow may invalidate use/def domination. When in SSA
6274 form then assume there are no such issues and SAVE_EXPRs only
6275 appear via GENERIC foldings. */
6276 val
= get_initialized_tmp_var (val
, pre_p
, post_p
,
6277 gimple_in_ssa_p (cfun
));
6279 TREE_OPERAND (*expr_p
, 0) = val
;
6280 SAVE_EXPR_RESOLVED_P (*expr_p
) = 1;
6288 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6295 PRE_P points to the list where side effects that must happen before
6296 *EXPR_P should be stored.
6298 POST_P points to the list where side effects that must happen after
6299 *EXPR_P should be stored. */
6301 static enum gimplify_status
6302 gimplify_addr_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6304 tree expr
= *expr_p
;
6305 tree op0
= TREE_OPERAND (expr
, 0);
6306 enum gimplify_status ret
;
6307 location_t loc
= EXPR_LOCATION (*expr_p
);
6309 switch (TREE_CODE (op0
))
6313 /* Check if we are dealing with an expression of the form '&*ptr'.
6314 While the front end folds away '&*ptr' into 'ptr', these
6315 expressions may be generated internally by the compiler (e.g.,
6316 builtins like __builtin_va_end). */
6317 /* Caution: the silent array decomposition semantics we allow for
6318 ADDR_EXPR means we can't always discard the pair. */
6319 /* Gimplification of the ADDR_EXPR operand may drop
6320 cv-qualification conversions, so make sure we add them if
6323 tree op00
= TREE_OPERAND (op0
, 0);
6324 tree t_expr
= TREE_TYPE (expr
);
6325 tree t_op00
= TREE_TYPE (op00
);
6327 if (!useless_type_conversion_p (t_expr
, t_op00
))
6328 op00
= fold_convert_loc (loc
, TREE_TYPE (expr
), op00
);
6334 case VIEW_CONVERT_EXPR
:
6335 /* Take the address of our operand and then convert it to the type of
6338 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6339 all clear. The impact of this transformation is even less clear. */
6341 /* If the operand is a useless conversion, look through it. Doing so
6342 guarantees that the ADDR_EXPR and its operand will remain of the
6344 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0
, 0)))
6345 op0
= TREE_OPERAND (op0
, 0);
6347 *expr_p
= fold_convert_loc (loc
, TREE_TYPE (expr
),
6348 build_fold_addr_expr_loc (loc
,
6349 TREE_OPERAND (op0
, 0)));
6354 if (integer_zerop (TREE_OPERAND (op0
, 1)))
6355 goto do_indirect_ref
;
6360 /* If we see a call to a declared builtin or see its address
6361 being taken (we can unify those cases here) then we can mark
6362 the builtin for implicit generation by GCC. */
6363 if (TREE_CODE (op0
) == FUNCTION_DECL
6364 && fndecl_built_in_p (op0
, BUILT_IN_NORMAL
)
6365 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0
)))
6366 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0
), true);
6368 /* We use fb_either here because the C frontend sometimes takes
6369 the address of a call that returns a struct; see
6370 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6371 the implied temporary explicit. */
6373 /* Make the operand addressable. */
6374 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, post_p
,
6375 is_gimple_addressable
, fb_either
);
6376 if (ret
== GS_ERROR
)
6379 /* Then mark it. Beware that it may not be possible to do so directly
6380 if a temporary has been created by the gimplification. */
6381 prepare_gimple_addressable (&TREE_OPERAND (expr
, 0), pre_p
);
6383 op0
= TREE_OPERAND (expr
, 0);
6385 /* For various reasons, the gimplification of the expression
6386 may have made a new INDIRECT_REF. */
6387 if (TREE_CODE (op0
) == INDIRECT_REF
6388 || (TREE_CODE (op0
) == MEM_REF
6389 && integer_zerop (TREE_OPERAND (op0
, 1))))
6390 goto do_indirect_ref
;
6392 mark_addressable (TREE_OPERAND (expr
, 0));
6394 /* The FEs may end up building ADDR_EXPRs early on a decl with
6395 an incomplete type. Re-build ADDR_EXPRs in canonical form
6397 if (!types_compatible_p (TREE_TYPE (op0
), TREE_TYPE (TREE_TYPE (expr
))))
6398 *expr_p
= build_fold_addr_expr (op0
);
6400 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6401 recompute_tree_invariant_for_addr_expr (*expr_p
);
6403 /* If we re-built the ADDR_EXPR add a conversion to the original type
6405 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
6406 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
6414 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6415 value; output operands should be a gimple lvalue. */
6417 static enum gimplify_status
6418 gimplify_asm_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6422 const char **oconstraints
;
6425 const char *constraint
;
6426 bool allows_mem
, allows_reg
, is_inout
;
6427 enum gimplify_status ret
, tret
;
6429 vec
<tree
, va_gc
> *inputs
;
6430 vec
<tree
, va_gc
> *outputs
;
6431 vec
<tree
, va_gc
> *clobbers
;
6432 vec
<tree
, va_gc
> *labels
;
6436 noutputs
= list_length (ASM_OUTPUTS (expr
));
6437 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
6445 link_next
= NULL_TREE
;
6446 for (i
= 0, link
= ASM_OUTPUTS (expr
); link
; ++i
, link
= link_next
)
6449 size_t constraint_len
;
6451 link_next
= TREE_CHAIN (link
);
6455 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6456 constraint_len
= strlen (constraint
);
6457 if (constraint_len
== 0)
6460 ok
= parse_output_constraint (&constraint
, i
, 0, 0,
6461 &allows_mem
, &allows_reg
, &is_inout
);
6468 /* If we can't make copies, we can only accept memory.
6469 Similarly for VLAs. */
6470 tree outtype
= TREE_TYPE (TREE_VALUE (link
));
6471 if (outtype
!= error_mark_node
6472 && (TREE_ADDRESSABLE (outtype
)
6473 || !COMPLETE_TYPE_P (outtype
)
6474 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype
))))
6480 error ("impossible constraint in %<asm%>");
6481 error ("non-memory output %d must stay in memory", i
);
6486 if (!allows_reg
&& allows_mem
)
6487 mark_addressable (TREE_VALUE (link
));
6489 tree orig
= TREE_VALUE (link
);
6490 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6491 is_inout
? is_gimple_min_lval
: is_gimple_lvalue
,
6492 fb_lvalue
| fb_mayfail
);
6493 if (tret
== GS_ERROR
)
6495 if (orig
!= error_mark_node
)
6496 error ("invalid lvalue in %<asm%> output %d", i
);
6500 /* If the constraint does not allow memory make sure we gimplify
6501 it to a register if it is not already but its base is. This
6502 happens for complex and vector components. */
6505 tree op
= TREE_VALUE (link
);
6506 if (! is_gimple_val (op
)
6507 && is_gimple_reg_type (TREE_TYPE (op
))
6508 && is_gimple_reg (get_base_address (op
)))
6510 tree tem
= create_tmp_reg (TREE_TYPE (op
));
6514 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
),
6515 tem
, unshare_expr (op
));
6516 gimplify_and_add (ass
, pre_p
);
6518 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
), op
, tem
);
6519 gimplify_and_add (ass
, post_p
);
6521 TREE_VALUE (link
) = tem
;
6526 vec_safe_push (outputs
, link
);
6527 TREE_CHAIN (link
) = NULL_TREE
;
6531 /* An input/output operand. To give the optimizers more
6532 flexibility, split it into separate input and output
6535 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6538 /* Turn the in/out constraint into an output constraint. */
6539 char *p
= xstrdup (constraint
);
6541 TREE_VALUE (TREE_PURPOSE (link
)) = build_string (constraint_len
, p
);
6543 /* And add a matching input constraint. */
6546 sprintf (buf
, "%u", i
);
6548 /* If there are multiple alternatives in the constraint,
6549 handle each of them individually. Those that allow register
6550 will be replaced with operand number, the others will stay
6552 if (strchr (p
, ',') != NULL
)
6554 size_t len
= 0, buflen
= strlen (buf
);
6555 char *beg
, *end
, *str
, *dst
;
6559 end
= strchr (beg
, ',');
6561 end
= strchr (beg
, '\0');
6562 if ((size_t) (end
- beg
) < buflen
)
6565 len
+= end
- beg
+ 1;
6572 str
= (char *) alloca (len
);
6573 for (beg
= p
+ 1, dst
= str
;;)
6576 bool mem_p
, reg_p
, inout_p
;
6578 end
= strchr (beg
, ',');
6583 parse_output_constraint (&tem
, i
, 0, 0,
6584 &mem_p
, ®_p
, &inout_p
);
6589 memcpy (dst
, buf
, buflen
);
6598 memcpy (dst
, beg
, len
);
6607 input
= build_string (dst
- str
, str
);
6610 input
= build_string (strlen (buf
), buf
);
6613 input
= build_string (constraint_len
- 1, constraint
+ 1);
6617 input
= build_tree_list (build_tree_list (NULL_TREE
, input
),
6618 unshare_expr (TREE_VALUE (link
)));
6619 ASM_INPUTS (expr
) = chainon (ASM_INPUTS (expr
), input
);
6623 link_next
= NULL_TREE
;
6624 for (link
= ASM_INPUTS (expr
); link
; ++i
, link
= link_next
)
6626 link_next
= TREE_CHAIN (link
);
6627 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6628 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
6629 oconstraints
, &allows_mem
, &allows_reg
);
6631 /* If we can't make copies, we can only accept memory. */
6632 tree intype
= TREE_TYPE (TREE_VALUE (link
));
6633 if (intype
!= error_mark_node
6634 && (TREE_ADDRESSABLE (intype
)
6635 || !COMPLETE_TYPE_P (intype
)
6636 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype
))))
6642 error ("impossible constraint in %<asm%>");
6643 error ("non-memory input %d must stay in memory", i
);
6648 /* If the operand is a memory input, it should be an lvalue. */
6649 if (!allows_reg
&& allows_mem
)
6651 tree inputv
= TREE_VALUE (link
);
6652 STRIP_NOPS (inputv
);
6653 if (TREE_CODE (inputv
) == PREDECREMENT_EXPR
6654 || TREE_CODE (inputv
) == PREINCREMENT_EXPR
6655 || TREE_CODE (inputv
) == POSTDECREMENT_EXPR
6656 || TREE_CODE (inputv
) == POSTINCREMENT_EXPR
6657 || TREE_CODE (inputv
) == MODIFY_EXPR
)
6658 TREE_VALUE (link
) = error_mark_node
;
6659 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6660 is_gimple_lvalue
, fb_lvalue
| fb_mayfail
);
6661 if (tret
!= GS_ERROR
)
6663 /* Unlike output operands, memory inputs are not guaranteed
6664 to be lvalues by the FE, and while the expressions are
6665 marked addressable there, if it is e.g. a statement
6666 expression, temporaries in it might not end up being
6667 addressable. They might be already used in the IL and thus
6668 it is too late to make them addressable now though. */
6669 tree x
= TREE_VALUE (link
);
6670 while (handled_component_p (x
))
6671 x
= TREE_OPERAND (x
, 0);
6672 if (TREE_CODE (x
) == MEM_REF
6673 && TREE_CODE (TREE_OPERAND (x
, 0)) == ADDR_EXPR
)
6674 x
= TREE_OPERAND (TREE_OPERAND (x
, 0), 0);
6676 || TREE_CODE (x
) == PARM_DECL
6677 || TREE_CODE (x
) == RESULT_DECL
)
6678 && !TREE_ADDRESSABLE (x
)
6679 && is_gimple_reg (x
))
6681 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
),
6683 "memory input %d is not directly addressable",
6685 prepare_gimple_addressable (&TREE_VALUE (link
), pre_p
);
6688 mark_addressable (TREE_VALUE (link
));
6689 if (tret
== GS_ERROR
)
6691 if (inputv
!= error_mark_node
)
6692 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
), input_location
),
6693 "memory input %d is not directly addressable", i
);
6699 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6700 is_gimple_asm_val
, fb_rvalue
);
6701 if (tret
== GS_ERROR
)
6705 TREE_CHAIN (link
) = NULL_TREE
;
6706 vec_safe_push (inputs
, link
);
6709 link_next
= NULL_TREE
;
6710 for (link
= ASM_CLOBBERS (expr
); link
; ++i
, link
= link_next
)
6712 link_next
= TREE_CHAIN (link
);
6713 TREE_CHAIN (link
) = NULL_TREE
;
6714 vec_safe_push (clobbers
, link
);
6717 link_next
= NULL_TREE
;
6718 for (link
= ASM_LABELS (expr
); link
; ++i
, link
= link_next
)
6720 link_next
= TREE_CHAIN (link
);
6721 TREE_CHAIN (link
) = NULL_TREE
;
6722 vec_safe_push (labels
, link
);
6725 /* Do not add ASMs with errors to the gimple IL stream. */
6726 if (ret
!= GS_ERROR
)
6728 stmt
= gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr
)),
6729 inputs
, outputs
, clobbers
, labels
);
6731 gimple_asm_set_volatile (stmt
, ASM_VOLATILE_P (expr
) || noutputs
== 0);
6732 gimple_asm_set_input (stmt
, ASM_INPUT_P (expr
));
6733 gimple_asm_set_inline (stmt
, ASM_INLINE_P (expr
));
6735 gimplify_seq_add_stmt (pre_p
, stmt
);
6741 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6742 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6743 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6744 return to this function.
6746 FIXME should we complexify the prequeue handling instead? Or use flags
6747 for all the cleanups and let the optimizer tighten them up? The current
6748 code seems pretty fragile; it will break on a cleanup within any
6749 non-conditional nesting. But any such nesting would be broken, anyway;
6750 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6751 and continues out of it. We can do that at the RTL level, though, so
6752 having an optimizer to tighten up try/finally regions would be a Good
6755 static enum gimplify_status
6756 gimplify_cleanup_point_expr (tree
*expr_p
, gimple_seq
*pre_p
)
6758 gimple_stmt_iterator iter
;
6759 gimple_seq body_sequence
= NULL
;
6761 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
6763 /* We only care about the number of conditions between the innermost
6764 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6765 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6766 int old_conds
= gimplify_ctxp
->conditions
;
6767 gimple_seq old_cleanups
= gimplify_ctxp
->conditional_cleanups
;
6768 bool old_in_cleanup_point_expr
= gimplify_ctxp
->in_cleanup_point_expr
;
6769 gimplify_ctxp
->conditions
= 0;
6770 gimplify_ctxp
->conditional_cleanups
= NULL
;
6771 gimplify_ctxp
->in_cleanup_point_expr
= true;
6773 gimplify_stmt (&TREE_OPERAND (*expr_p
, 0), &body_sequence
);
6775 gimplify_ctxp
->conditions
= old_conds
;
6776 gimplify_ctxp
->conditional_cleanups
= old_cleanups
;
6777 gimplify_ctxp
->in_cleanup_point_expr
= old_in_cleanup_point_expr
;
6779 for (iter
= gsi_start (body_sequence
); !gsi_end_p (iter
); )
6781 gimple
*wce
= gsi_stmt (iter
);
6783 if (gimple_code (wce
) == GIMPLE_WITH_CLEANUP_EXPR
)
6785 if (gsi_one_before_end_p (iter
))
6787 /* Note that gsi_insert_seq_before and gsi_remove do not
6788 scan operands, unlike some other sequence mutators. */
6789 if (!gimple_wce_cleanup_eh_only (wce
))
6790 gsi_insert_seq_before_without_update (&iter
,
6791 gimple_wce_cleanup (wce
),
6793 gsi_remove (&iter
, true);
6800 enum gimple_try_flags kind
;
6802 if (gimple_wce_cleanup_eh_only (wce
))
6803 kind
= GIMPLE_TRY_CATCH
;
6805 kind
= GIMPLE_TRY_FINALLY
;
6806 seq
= gsi_split_seq_after (iter
);
6808 gtry
= gimple_build_try (seq
, gimple_wce_cleanup (wce
), kind
);
6809 /* Do not use gsi_replace here, as it may scan operands.
6810 We want to do a simple structural modification only. */
6811 gsi_set_stmt (&iter
, gtry
);
6812 iter
= gsi_start (gtry
->eval
);
6819 gimplify_seq_add_seq (pre_p
, body_sequence
);
6832 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6833 is the cleanup action required. EH_ONLY is true if the cleanup should
6834 only be executed if an exception is thrown, not on normal exit.
6835 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6836 only valid for clobbers. */
6839 gimple_push_cleanup (tree var
, tree cleanup
, bool eh_only
, gimple_seq
*pre_p
,
6840 bool force_uncond
= false)
6843 gimple_seq cleanup_stmts
= NULL
;
6845 /* Errors can result in improperly nested cleanups. Which results in
6846 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6850 if (gimple_conditional_context ())
6852 /* If we're in a conditional context, this is more complex. We only
6853 want to run the cleanup if we actually ran the initialization that
6854 necessitates it, but we want to run it after the end of the
6855 conditional context. So we wrap the try/finally around the
6856 condition and use a flag to determine whether or not to actually
6857 run the destructor. Thus
6861 becomes (approximately)
6865 if (test) { A::A(temp); flag = 1; val = f(temp); }
6868 if (flag) A::~A(temp);
6874 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6875 wce
= gimple_build_wce (cleanup_stmts
);
6876 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6880 tree flag
= create_tmp_var (boolean_type_node
, "cleanup");
6881 gassign
*ffalse
= gimple_build_assign (flag
, boolean_false_node
);
6882 gassign
*ftrue
= gimple_build_assign (flag
, boolean_true_node
);
6884 cleanup
= build3 (COND_EXPR
, void_type_node
, flag
, cleanup
, NULL
);
6885 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6886 wce
= gimple_build_wce (cleanup_stmts
);
6888 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, ffalse
);
6889 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6890 gimplify_seq_add_stmt (pre_p
, ftrue
);
6892 /* Because of this manipulation, and the EH edges that jump
6893 threading cannot redirect, the temporary (VAR) will appear
6894 to be used uninitialized. Don't warn. */
6895 suppress_warning (var
, OPT_Wuninitialized
);
6900 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6901 wce
= gimple_build_wce (cleanup_stmts
);
6902 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
6903 gimplify_seq_add_stmt (pre_p
, wce
);
6907 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6909 static enum gimplify_status
6910 gimplify_target_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6912 tree targ
= *expr_p
;
6913 tree temp
= TARGET_EXPR_SLOT (targ
);
6914 tree init
= TARGET_EXPR_INITIAL (targ
);
6915 enum gimplify_status ret
;
6917 bool unpoison_empty_seq
= false;
6918 gimple_stmt_iterator unpoison_it
;
6922 tree cleanup
= NULL_TREE
;
6924 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6925 to the temps list. Handle also variable length TARGET_EXPRs. */
6926 if (!poly_int_tree_p (DECL_SIZE (temp
)))
6928 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp
)))
6929 gimplify_type_sizes (TREE_TYPE (temp
), pre_p
);
6930 gimplify_vla_decl (temp
, pre_p
);
6934 /* Save location where we need to place unpoisoning. It's possible
6935 that a variable will be converted to needs_to_live_in_memory. */
6936 unpoison_it
= gsi_last (*pre_p
);
6937 unpoison_empty_seq
= gsi_end_p (unpoison_it
);
6939 gimple_add_tmp_var (temp
);
6942 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6943 expression is supposed to initialize the slot. */
6944 if (VOID_TYPE_P (TREE_TYPE (init
)))
6945 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6948 tree init_expr
= build2 (INIT_EXPR
, void_type_node
, temp
, init
);
6950 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6952 ggc_free (init_expr
);
6954 if (ret
== GS_ERROR
)
6956 /* PR c++/28266 Make sure this is expanded only once. */
6957 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
6961 gimplify_and_add (init
, pre_p
);
6963 /* If needed, push the cleanup for the temp. */
6964 if (TARGET_EXPR_CLEANUP (targ
))
6966 if (CLEANUP_EH_ONLY (targ
))
6967 gimple_push_cleanup (temp
, TARGET_EXPR_CLEANUP (targ
),
6968 CLEANUP_EH_ONLY (targ
), pre_p
);
6970 cleanup
= TARGET_EXPR_CLEANUP (targ
);
6973 /* Add a clobber for the temporary going out of scope, like
6974 gimplify_bind_expr. */
6975 if (gimplify_ctxp
->in_cleanup_point_expr
6976 && needs_to_live_in_memory (temp
))
6978 if (flag_stack_reuse
== SR_ALL
)
6980 tree clobber
= build_clobber (TREE_TYPE (temp
));
6981 clobber
= build2 (MODIFY_EXPR
, TREE_TYPE (temp
), temp
, clobber
);
6982 gimple_push_cleanup (temp
, clobber
, false, pre_p
, true);
6984 if (asan_poisoned_variables
6985 && DECL_ALIGN (temp
) <= MAX_SUPPORTED_STACK_ALIGNMENT
6986 && !TREE_STATIC (temp
)
6987 && dbg_cnt (asan_use_after_scope
)
6988 && !gimplify_omp_ctxp
)
6990 tree asan_cleanup
= build_asan_poison_call_expr (temp
);
6993 if (unpoison_empty_seq
)
6994 unpoison_it
= gsi_start (*pre_p
);
6996 asan_poison_variable (temp
, false, &unpoison_it
,
6997 unpoison_empty_seq
);
6998 gimple_push_cleanup (temp
, asan_cleanup
, false, pre_p
);
7003 gimple_push_cleanup (temp
, cleanup
, false, pre_p
);
7005 /* Only expand this once. */
7006 TREE_OPERAND (targ
, 3) = init
;
7007 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
7010 /* We should have expanded this before. */
7011 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp
));
7017 /* Gimplification of expression trees. */
7019 /* Gimplify an expression which appears at statement context. The
7020 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7021 NULL, a new sequence is allocated.
7023 Return true if we actually added a statement to the queue. */
7026 gimplify_stmt (tree
*stmt_p
, gimple_seq
*seq_p
)
7028 gimple_seq_node last
;
7030 last
= gimple_seq_last (*seq_p
);
7031 gimplify_expr (stmt_p
, seq_p
, NULL
, is_gimple_stmt
, fb_none
);
7032 return last
!= gimple_seq_last (*seq_p
);
7035 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7036 to CTX. If entries already exist, force them to be some flavor of private.
7037 If there is no enclosing parallel, do nothing. */
7040 omp_firstprivatize_variable (struct gimplify_omp_ctx
*ctx
, tree decl
)
7044 if (decl
== NULL
|| !DECL_P (decl
) || ctx
->region_type
== ORT_NONE
)
7049 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7052 if (n
->value
& GOVD_SHARED
)
7053 n
->value
= GOVD_FIRSTPRIVATE
| (n
->value
& GOVD_SEEN
);
7054 else if (n
->value
& GOVD_MAP
)
7055 n
->value
|= GOVD_MAP_TO_ONLY
;
7059 else if ((ctx
->region_type
& ORT_TARGET
) != 0)
7061 if (ctx
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
7062 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
7064 omp_add_variable (ctx
, decl
, GOVD_MAP
| GOVD_MAP_TO_ONLY
);
7066 else if (ctx
->region_type
!= ORT_WORKSHARE
7067 && ctx
->region_type
!= ORT_TASKGROUP
7068 && ctx
->region_type
!= ORT_SIMD
7069 && ctx
->region_type
!= ORT_ACC
7070 && !(ctx
->region_type
& ORT_TARGET_DATA
))
7071 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
7073 ctx
= ctx
->outer_context
;
7078 /* Similarly for each of the type sizes of TYPE. */
7081 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
7083 if (type
== NULL
|| type
== error_mark_node
)
7085 type
= TYPE_MAIN_VARIANT (type
);
7087 if (ctx
->privatized_types
->add (type
))
7090 switch (TREE_CODE (type
))
7096 case FIXED_POINT_TYPE
:
7097 omp_firstprivatize_variable (ctx
, TYPE_MIN_VALUE (type
));
7098 omp_firstprivatize_variable (ctx
, TYPE_MAX_VALUE (type
));
7102 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
7103 omp_firstprivatize_type_sizes (ctx
, TYPE_DOMAIN (type
));
7108 case QUAL_UNION_TYPE
:
7111 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
7112 if (TREE_CODE (field
) == FIELD_DECL
)
7114 omp_firstprivatize_variable (ctx
, DECL_FIELD_OFFSET (field
));
7115 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (field
));
7121 case REFERENCE_TYPE
:
7122 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
7129 omp_firstprivatize_variable (ctx
, TYPE_SIZE (type
));
7130 omp_firstprivatize_variable (ctx
, TYPE_SIZE_UNIT (type
));
7131 lang_hooks
.types
.omp_firstprivatize_type_sizes (ctx
, type
);
7134 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7137 omp_add_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned int flags
)
7140 unsigned int nflags
;
7143 if (error_operand_p (decl
) || ctx
->region_type
== ORT_NONE
)
7146 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7147 there are constructors involved somewhere. Exception is a shared clause,
7148 there is nothing privatized in that case. */
7149 if ((flags
& GOVD_SHARED
) == 0
7150 && (TREE_ADDRESSABLE (TREE_TYPE (decl
))
7151 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl
))))
7154 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7155 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7157 /* We shouldn't be re-adding the decl with the same data
7159 gcc_assert ((n
->value
& GOVD_DATA_SHARE_CLASS
& flags
) == 0);
7160 nflags
= n
->value
| flags
;
7161 /* The only combination of data sharing classes we should see is
7162 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7163 reduction variables to be used in data sharing clauses. */
7164 gcc_assert ((ctx
->region_type
& ORT_ACC
) != 0
7165 || ((nflags
& GOVD_DATA_SHARE_CLASS
)
7166 == (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
))
7167 || (flags
& GOVD_DATA_SHARE_CLASS
) == 0);
7172 /* When adding a variable-sized variable, we have to handle all sorts
7173 of additional bits of data: the pointer replacement variable, and
7174 the parameters of the type. */
7175 if (DECL_SIZE (decl
) && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7177 /* Add the pointer replacement variable as PRIVATE if the variable
7178 replacement is private, else FIRSTPRIVATE since we'll need the
7179 address of the original variable either for SHARED, or for the
7180 copy into or out of the context. */
7181 if (!(flags
& GOVD_LOCAL
) && ctx
->region_type
!= ORT_TASKGROUP
)
7183 if (flags
& GOVD_MAP
)
7184 nflags
= GOVD_MAP
| GOVD_MAP_TO_ONLY
| GOVD_EXPLICIT
;
7185 else if (flags
& GOVD_PRIVATE
)
7186 nflags
= GOVD_PRIVATE
;
7187 else if (((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7188 && (flags
& GOVD_FIRSTPRIVATE
))
7189 || (ctx
->region_type
== ORT_TARGET_DATA
7190 && (flags
& GOVD_DATA_SHARE_CLASS
) == 0))
7191 nflags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
7193 nflags
= GOVD_FIRSTPRIVATE
;
7194 nflags
|= flags
& GOVD_SEEN
;
7195 t
= DECL_VALUE_EXPR (decl
);
7196 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
7197 t
= TREE_OPERAND (t
, 0);
7198 gcc_assert (DECL_P (t
));
7199 omp_add_variable (ctx
, t
, nflags
);
7202 /* Add all of the variable and type parameters (which should have
7203 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7204 omp_firstprivatize_variable (ctx
, DECL_SIZE_UNIT (decl
));
7205 omp_firstprivatize_variable (ctx
, DECL_SIZE (decl
));
7206 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7208 /* The variable-sized variable itself is never SHARED, only some form
7209 of PRIVATE. The sharing would take place via the pointer variable
7210 which we remapped above. */
7211 if (flags
& GOVD_SHARED
)
7212 flags
= GOVD_SHARED
| GOVD_DEBUG_PRIVATE
7213 | (flags
& (GOVD_SEEN
| GOVD_EXPLICIT
));
7215 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7216 alloca statement we generate for the variable, so make sure it
7217 is available. This isn't automatically needed for the SHARED
7218 case, since we won't be allocating local storage then.
7219 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7220 in this case omp_notice_variable will be called later
7221 on when it is gimplified. */
7222 else if (! (flags
& (GOVD_LOCAL
| GOVD_MAP
))
7223 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl
))))
7224 omp_notice_variable (ctx
, TYPE_SIZE_UNIT (TREE_TYPE (decl
)), true);
7226 else if ((flags
& (GOVD_MAP
| GOVD_LOCAL
)) == 0
7227 && omp_privatize_by_reference (decl
))
7229 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7231 /* Similar to the direct variable sized case above, we'll need the
7232 size of references being privatized. */
7233 if ((flags
& GOVD_SHARED
) == 0)
7235 t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7236 if (t
&& DECL_P (t
))
7237 omp_notice_variable (ctx
, t
, true);
7244 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, flags
);
7246 /* For reductions clauses in OpenACC loop directives, by default create a
7247 copy clause on the enclosing parallel construct for carrying back the
7249 if (ctx
->region_type
== ORT_ACC
&& (flags
& GOVD_REDUCTION
))
7251 struct gimplify_omp_ctx
*outer_ctx
= ctx
->outer_context
;
7254 n
= splay_tree_lookup (outer_ctx
->variables
, (splay_tree_key
)decl
);
7257 /* Ignore local variables and explicitly declared clauses. */
7258 if (n
->value
& (GOVD_LOCAL
| GOVD_EXPLICIT
))
7260 else if (outer_ctx
->region_type
== ORT_ACC_KERNELS
)
7262 /* According to the OpenACC spec, such a reduction variable
7263 should already have a copy map on a kernels construct,
7264 verify that here. */
7265 gcc_assert (!(n
->value
& GOVD_FIRSTPRIVATE
)
7266 && (n
->value
& GOVD_MAP
));
7268 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7270 /* Remove firstprivate and make it a copy map. */
7271 n
->value
&= ~GOVD_FIRSTPRIVATE
;
7272 n
->value
|= GOVD_MAP
;
7275 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7277 splay_tree_insert (outer_ctx
->variables
, (splay_tree_key
)decl
,
7278 GOVD_MAP
| GOVD_SEEN
);
7281 outer_ctx
= outer_ctx
->outer_context
;
7286 /* Notice a threadprivate variable DECL used in OMP context CTX.
7287 This just prints out diagnostics about threadprivate variable uses
7288 in untied tasks. If DECL2 is non-NULL, prevent this warning
7289 on that variable. */
7292 omp_notice_threadprivate_variable (struct gimplify_omp_ctx
*ctx
, tree decl
,
7296 struct gimplify_omp_ctx
*octx
;
7298 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
7299 if ((octx
->region_type
& ORT_TARGET
) != 0
7300 || octx
->order_concurrent
)
7302 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
)decl
);
7305 if (octx
->order_concurrent
)
7307 error ("threadprivate variable %qE used in a region with"
7308 " %<order(concurrent)%> clause", DECL_NAME (decl
));
7309 inform (octx
->location
, "enclosing region");
7313 error ("threadprivate variable %qE used in target region",
7315 inform (octx
->location
, "enclosing target region");
7317 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl
, 0);
7320 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl2
, 0);
7323 if (ctx
->region_type
!= ORT_UNTIED_TASK
)
7325 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7328 error ("threadprivate variable %qE used in untied task",
7330 inform (ctx
->location
, "enclosing task");
7331 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, 0);
7334 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl2
, 0);
7338 /* Return true if global var DECL is device resident. */
7341 device_resident_p (tree decl
)
7343 tree attr
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl
));
7348 for (tree t
= TREE_VALUE (attr
); t
; t
= TREE_PURPOSE (t
))
7350 tree c
= TREE_VALUE (t
);
7351 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DEVICE_RESIDENT
)
7358 /* Return true if DECL has an ACC DECLARE attribute. */
7361 is_oacc_declared (tree decl
)
7363 tree t
= TREE_CODE (decl
) == MEM_REF
? TREE_OPERAND (decl
, 0) : decl
;
7364 tree declared
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t
));
7365 return declared
!= NULL_TREE
;
7368 /* Determine outer default flags for DECL mentioned in an OMP region
7369 but not declared in an enclosing clause.
7371 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7372 remapped firstprivate instead of shared. To some extent this is
7373 addressed in omp_firstprivatize_type_sizes, but not
7377 omp_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
,
7378 bool in_code
, unsigned flags
)
7380 enum omp_clause_default_kind default_kind
= ctx
->default_kind
;
7381 enum omp_clause_default_kind kind
;
7383 kind
= lang_hooks
.decls
.omp_predetermined_sharing (decl
);
7384 if (ctx
->region_type
& ORT_TASK
)
7386 tree detach_clause
= omp_find_clause (ctx
->clauses
, OMP_CLAUSE_DETACH
);
7388 /* The event-handle specified by a detach clause should always be firstprivate,
7389 regardless of the current default. */
7390 if (detach_clause
&& OMP_CLAUSE_DECL (detach_clause
) == decl
)
7391 kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
7393 if (kind
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
7394 default_kind
= kind
;
7395 else if (VAR_P (decl
) && TREE_STATIC (decl
) && DECL_IN_CONSTANT_POOL (decl
))
7396 default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
7397 /* For C/C++ default({,first}private), variables with static storage duration
7398 declared in a namespace or global scope and referenced in construct
7399 must be explicitly specified, i.e. acts as default(none). */
7400 else if ((default_kind
== OMP_CLAUSE_DEFAULT_PRIVATE
7401 || default_kind
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
7403 && is_global_var (decl
)
7404 && (DECL_FILE_SCOPE_P (decl
)
7405 || (DECL_CONTEXT (decl
)
7406 && TREE_CODE (DECL_CONTEXT (decl
)) == NAMESPACE_DECL
))
7407 && !lang_GNU_Fortran ())
7408 default_kind
= OMP_CLAUSE_DEFAULT_NONE
;
7410 switch (default_kind
)
7412 case OMP_CLAUSE_DEFAULT_NONE
:
7416 if (ctx
->region_type
& ORT_PARALLEL
)
7418 else if ((ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
)
7420 else if (ctx
->region_type
& ORT_TASK
)
7422 else if (ctx
->region_type
& ORT_TEAMS
)
7427 error ("%qE not specified in enclosing %qs",
7428 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rtype
);
7429 inform (ctx
->location
, "enclosing %qs", rtype
);
7432 case OMP_CLAUSE_DEFAULT_SHARED
:
7433 flags
|= GOVD_SHARED
;
7435 case OMP_CLAUSE_DEFAULT_PRIVATE
:
7436 flags
|= GOVD_PRIVATE
;
7438 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
:
7439 flags
|= GOVD_FIRSTPRIVATE
;
7441 case OMP_CLAUSE_DEFAULT_UNSPECIFIED
:
7442 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7443 gcc_assert ((ctx
->region_type
& ORT_TASK
) != 0);
7444 if (struct gimplify_omp_ctx
*octx
= ctx
->outer_context
)
7446 omp_notice_variable (octx
, decl
, in_code
);
7447 for (; octx
; octx
= octx
->outer_context
)
7451 n2
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
7452 if ((octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)) != 0
7453 && (n2
== NULL
|| (n2
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7455 if (n2
&& (n2
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
)
7457 flags
|= GOVD_FIRSTPRIVATE
;
7460 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TEAMS
)) != 0)
7462 flags
|= GOVD_SHARED
;
7468 if (TREE_CODE (decl
) == PARM_DECL
7469 || (!is_global_var (decl
)
7470 && DECL_CONTEXT (decl
) == current_function_decl
))
7471 flags
|= GOVD_FIRSTPRIVATE
;
7473 flags
|= GOVD_SHARED
;
7485 /* Determine outer default flags for DECL mentioned in an OACC region
7486 but not declared in an enclosing clause. */
7489 oacc_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned flags
)
7492 bool on_device
= false;
7493 bool is_private
= false;
7494 bool declared
= is_oacc_declared (decl
);
7495 tree type
= TREE_TYPE (decl
);
7497 if (omp_privatize_by_reference (decl
))
7498 type
= TREE_TYPE (type
);
7500 /* For Fortran COMMON blocks, only used variables in those blocks are
7501 transfered and remapped. The block itself will have a private clause to
7502 avoid transfering the data twice.
7503 The hook evaluates to false by default. For a variable in Fortran's COMMON
7504 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7505 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7506 the whole block. For C++ and Fortran, it can also be true under certain
7507 other conditions, if DECL_HAS_VALUE_EXPR. */
7508 if (RECORD_OR_UNION_TYPE_P (type
))
7509 is_private
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7511 if ((ctx
->region_type
& (ORT_ACC_PARALLEL
| ORT_ACC_KERNELS
)) != 0
7512 && is_global_var (decl
)
7513 && device_resident_p (decl
)
7517 flags
|= GOVD_MAP_TO_ONLY
;
7520 switch (ctx
->region_type
)
7522 case ORT_ACC_KERNELS
:
7526 flags
|= GOVD_FIRSTPRIVATE
;
7527 else if (AGGREGATE_TYPE_P (type
))
7529 /* Aggregates default to 'present_or_copy', or 'present'. */
7530 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7533 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7536 /* Scalars default to 'copy'. */
7537 flags
|= GOVD_MAP
| GOVD_MAP_FORCE
;
7541 case ORT_ACC_PARALLEL
:
7542 case ORT_ACC_SERIAL
:
7543 rkind
= ctx
->region_type
== ORT_ACC_PARALLEL
? "parallel" : "serial";
7546 flags
|= GOVD_FIRSTPRIVATE
;
7547 else if (on_device
|| declared
)
7549 else if (AGGREGATE_TYPE_P (type
))
7551 /* Aggregates default to 'present_or_copy', or 'present'. */
7552 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7555 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7558 /* Scalars default to 'firstprivate'. */
7559 flags
|= GOVD_FIRSTPRIVATE
;
7567 if (DECL_ARTIFICIAL (decl
))
7568 ; /* We can get compiler-generated decls, and should not complain
7570 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_NONE
)
7572 error ("%qE not specified in enclosing OpenACC %qs construct",
7573 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rkind
);
7574 inform (ctx
->location
, "enclosing OpenACC %qs construct", rkind
);
7576 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_PRESENT
)
7577 ; /* Handled above. */
7579 gcc_checking_assert (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
);
7584 /* Record the fact that DECL was used within the OMP context CTX.
7585 IN_CODE is true when real code uses DECL, and false when we should
7586 merely emit default(none) errors. Return true if DECL is going to
7587 be remapped and thus DECL shouldn't be gimplified into its
7588 DECL_VALUE_EXPR (if any). */
7591 omp_notice_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, bool in_code
)
7594 unsigned flags
= in_code
? GOVD_SEEN
: 0;
7595 bool ret
= false, shared
;
7597 if (error_operand_p (decl
))
7600 if (ctx
->region_type
== ORT_NONE
)
7601 return lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7603 if (is_global_var (decl
))
7605 /* Threadprivate variables are predetermined. */
7606 if (DECL_THREAD_LOCAL_P (decl
))
7607 return omp_notice_threadprivate_variable (ctx
, decl
, NULL_TREE
);
7609 if (DECL_HAS_VALUE_EXPR_P (decl
))
7611 if (ctx
->region_type
& ORT_ACC
)
7612 /* For OpenACC, defer expansion of value to avoid transfering
7613 privatized common block data instead of im-/explicitly transfered
7614 variables which are in common blocks. */
7618 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
7620 if (value
&& DECL_P (value
) && DECL_THREAD_LOCAL_P (value
))
7621 return omp_notice_threadprivate_variable (ctx
, decl
, value
);
7625 if (gimplify_omp_ctxp
->outer_context
== NULL
7627 && oacc_get_fn_attrib (current_function_decl
))
7629 location_t loc
= DECL_SOURCE_LOCATION (decl
);
7631 if (lookup_attribute ("omp declare target link",
7632 DECL_ATTRIBUTES (decl
)))
7635 "%qE with %<link%> clause used in %<routine%> function",
7639 else if (!lookup_attribute ("omp declare target",
7640 DECL_ATTRIBUTES (decl
)))
7643 "%qE requires a %<declare%> directive for use "
7644 "in a %<routine%> function", DECL_NAME (decl
));
7650 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7651 if ((ctx
->region_type
& ORT_TARGET
) != 0)
7653 if (ctx
->region_type
& ORT_ACC
)
7654 /* For OpenACC, as remarked above, defer expansion. */
7659 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7662 unsigned nflags
= flags
;
7663 if ((ctx
->region_type
& ORT_ACC
) == 0)
7665 bool is_declare_target
= false;
7666 if (is_global_var (decl
)
7667 && varpool_node::get_create (decl
)->offloadable
)
7669 struct gimplify_omp_ctx
*octx
;
7670 for (octx
= ctx
->outer_context
;
7671 octx
; octx
= octx
->outer_context
)
7673 n
= splay_tree_lookup (octx
->variables
,
7674 (splay_tree_key
)decl
);
7676 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
7677 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7680 is_declare_target
= octx
== NULL
;
7682 if (!is_declare_target
)
7685 enum omp_clause_defaultmap_kind kind
;
7686 if (lang_hooks
.decls
.omp_allocatable_p (decl
))
7687 gdmk
= GDMK_ALLOCATABLE
;
7688 else if (lang_hooks
.decls
.omp_scalar_target_p (decl
))
7689 gdmk
= GDMK_SCALAR_TARGET
;
7690 else if (lang_hooks
.decls
.omp_scalar_p (decl
, false))
7692 else if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
7693 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
7694 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
7696 gdmk
= GDMK_POINTER
;
7698 gdmk
= GDMK_AGGREGATE
;
7699 kind
= lang_hooks
.decls
.omp_predetermined_mapping (decl
);
7700 if (kind
!= OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
)
7702 if (kind
== OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
)
7703 nflags
|= GOVD_FIRSTPRIVATE
;
7704 else if (kind
== OMP_CLAUSE_DEFAULTMAP_TO
)
7705 nflags
|= GOVD_MAP
| GOVD_MAP_TO_ONLY
;
7709 else if (ctx
->defaultmap
[gdmk
] == 0)
7711 tree d
= lang_hooks
.decls
.omp_report_decl (decl
);
7712 error ("%qE not specified in enclosing %<target%>",
7714 inform (ctx
->location
, "enclosing %<target%>");
7716 else if (ctx
->defaultmap
[gdmk
]
7717 & (GOVD_MAP_0LEN_ARRAY
| GOVD_FIRSTPRIVATE
))
7718 nflags
|= ctx
->defaultmap
[gdmk
];
7721 gcc_assert (ctx
->defaultmap
[gdmk
] & GOVD_MAP
);
7722 nflags
|= ctx
->defaultmap
[gdmk
] & ~GOVD_MAP
;
7727 struct gimplify_omp_ctx
*octx
= ctx
->outer_context
;
7728 if ((ctx
->region_type
& ORT_ACC
) && octx
)
7730 /* Look in outer OpenACC contexts, to see if there's a
7731 data attribute for this variable. */
7732 omp_notice_variable (octx
, decl
, in_code
);
7734 for (; octx
; octx
= octx
->outer_context
)
7736 if (!(octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)))
7739 = splay_tree_lookup (octx
->variables
,
7740 (splay_tree_key
) decl
);
7743 if (octx
->region_type
== ORT_ACC_HOST_DATA
)
7744 error ("variable %qE declared in enclosing "
7745 "%<host_data%> region", DECL_NAME (decl
));
7747 if (octx
->region_type
== ORT_ACC_DATA
7748 && (n2
->value
& GOVD_MAP_0LEN_ARRAY
))
7749 nflags
|= GOVD_MAP_0LEN_ARRAY
;
7755 if ((nflags
& ~(GOVD_MAP_TO_ONLY
| GOVD_MAP_FROM_ONLY
7756 | GOVD_MAP_ALLOC_ONLY
)) == flags
)
7758 tree type
= TREE_TYPE (decl
);
7760 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
7761 && omp_privatize_by_reference (decl
))
7762 type
= TREE_TYPE (type
);
7763 if (!lang_hooks
.types
.omp_mappable_type (type
))
7765 error ("%qD referenced in target region does not have "
7766 "a mappable type", decl
);
7767 nflags
|= GOVD_MAP
| GOVD_EXPLICIT
;
7771 if ((ctx
->region_type
& ORT_ACC
) != 0)
7772 nflags
= oacc_default_clause (ctx
, decl
, flags
);
7778 omp_add_variable (ctx
, decl
, nflags
);
7782 /* If nothing changed, there's nothing left to do. */
7783 if ((n
->value
& flags
) == flags
)
7793 if (ctx
->region_type
== ORT_WORKSHARE
7794 || ctx
->region_type
== ORT_TASKGROUP
7795 || ctx
->region_type
== ORT_SIMD
7796 || ctx
->region_type
== ORT_ACC
7797 || (ctx
->region_type
& ORT_TARGET_DATA
) != 0)
7800 flags
= omp_default_clause (ctx
, decl
, in_code
, flags
);
7802 if ((flags
& GOVD_PRIVATE
)
7803 && lang_hooks
.decls
.omp_private_outer_ref (decl
))
7804 flags
|= GOVD_PRIVATE_OUTER_REF
;
7806 omp_add_variable (ctx
, decl
, flags
);
7808 shared
= (flags
& GOVD_SHARED
) != 0;
7809 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7813 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
7814 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
7815 if (ctx
->region_type
== ORT_SIMD
7816 && ctx
->in_for_exprs
7817 && ((n
->value
& (GOVD_PRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
))
7819 flags
&= ~GOVD_SEEN
;
7821 if ((n
->value
& (GOVD_SEEN
| GOVD_LOCAL
)) == 0
7822 && (flags
& (GOVD_SEEN
| GOVD_LOCAL
)) == GOVD_SEEN
7823 && DECL_SIZE (decl
))
7825 if (TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7828 tree t
= DECL_VALUE_EXPR (decl
);
7829 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
7830 t
= TREE_OPERAND (t
, 0);
7831 gcc_assert (DECL_P (t
));
7832 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7833 n2
->value
|= GOVD_SEEN
;
7835 else if (omp_privatize_by_reference (decl
)
7836 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)))
7837 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))))
7841 tree t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7842 gcc_assert (DECL_P (t
));
7843 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7845 omp_notice_variable (ctx
, t
, true);
7849 if (ctx
->region_type
& ORT_ACC
)
7850 /* For OpenACC, as remarked above, defer expansion. */
7853 shared
= ((flags
| n
->value
) & GOVD_SHARED
) != 0;
7854 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7856 /* If nothing changed, there's nothing left to do. */
7857 if ((n
->value
& flags
) == flags
)
7863 /* If the variable is private in the current context, then we don't
7864 need to propagate anything to an outer context. */
7865 if ((flags
& GOVD_PRIVATE
) && !(flags
& GOVD_PRIVATE_OUTER_REF
))
7867 if ((flags
& (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7868 == (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7870 if ((flags
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
7871 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7872 == (GOVD_LASTPRIVATE
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7874 if (ctx
->outer_context
7875 && omp_notice_variable (ctx
->outer_context
, decl
, in_code
))
7880 /* Verify that DECL is private within CTX. If there's specific information
7881 to the contrary in the innermost scope, generate an error. */
7884 omp_is_private (struct gimplify_omp_ctx
*ctx
, tree decl
, int simd
)
7888 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7891 if (n
->value
& GOVD_SHARED
)
7893 if (ctx
== gimplify_omp_ctxp
)
7896 error ("iteration variable %qE is predetermined linear",
7899 error ("iteration variable %qE should be private",
7901 n
->value
= GOVD_PRIVATE
;
7907 else if ((n
->value
& GOVD_EXPLICIT
) != 0
7908 && (ctx
== gimplify_omp_ctxp
7909 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7910 && gimplify_omp_ctxp
->outer_context
== ctx
)))
7912 if ((n
->value
& GOVD_FIRSTPRIVATE
) != 0)
7913 error ("iteration variable %qE should not be firstprivate",
7915 else if ((n
->value
& GOVD_REDUCTION
) != 0)
7916 error ("iteration variable %qE should not be reduction",
7918 else if (simd
!= 1 && (n
->value
& GOVD_LINEAR
) != 0)
7919 error ("iteration variable %qE should not be linear",
7922 return (ctx
== gimplify_omp_ctxp
7923 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7924 && gimplify_omp_ctxp
->outer_context
== ctx
));
7927 if (ctx
->region_type
!= ORT_WORKSHARE
7928 && ctx
->region_type
!= ORT_TASKGROUP
7929 && ctx
->region_type
!= ORT_SIMD
7930 && ctx
->region_type
!= ORT_ACC
)
7932 else if (ctx
->outer_context
)
7933 return omp_is_private (ctx
->outer_context
, decl
, simd
);
7937 /* Return true if DECL is private within a parallel region
7938 that binds to the current construct's context or in parallel
7939 region's REDUCTION clause. */
7942 omp_check_private (struct gimplify_omp_ctx
*ctx
, tree decl
, bool copyprivate
)
7948 ctx
= ctx
->outer_context
;
7951 if (is_global_var (decl
))
7954 /* References might be private, but might be shared too,
7955 when checking for copyprivate, assume they might be
7956 private, otherwise assume they might be shared. */
7960 if (omp_privatize_by_reference (decl
))
7963 /* Treat C++ privatized non-static data members outside
7964 of the privatization the same. */
7965 if (omp_member_access_dummy_var (decl
))
7971 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
7973 if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7974 && (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7976 if ((ctx
->region_type
& ORT_TARGET_DATA
) != 0
7978 || (n
->value
& GOVD_MAP
) == 0)
7985 if ((n
->value
& GOVD_LOCAL
) != 0
7986 && omp_member_access_dummy_var (decl
))
7988 return (n
->value
& GOVD_SHARED
) == 0;
7991 if (ctx
->region_type
== ORT_WORKSHARE
7992 || ctx
->region_type
== ORT_TASKGROUP
7993 || ctx
->region_type
== ORT_SIMD
7994 || ctx
->region_type
== ORT_ACC
)
8003 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8006 find_decl_expr (tree
*tp
, int *walk_subtrees
, void *data
)
8010 /* If this node has been visited, unmark it and keep looking. */
8011 if (TREE_CODE (t
) == DECL_EXPR
&& DECL_EXPR_DECL (t
) == (tree
) data
)
8014 if (IS_TYPE_OR_DECL_P (t
))
8020 /* Gimplify the affinity clause but effectively ignore it.
8023 if ((step > 1) ? var <= end : var > end)
8024 locatator_var_expr; */
8027 gimplify_omp_affinity (tree
*list_p
, gimple_seq
*pre_p
)
8029 tree last_iter
= NULL_TREE
;
8030 tree last_bind
= NULL_TREE
;
8031 tree label
= NULL_TREE
;
8032 tree
*last_body
= NULL
;
8033 for (tree c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8034 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_AFFINITY
)
8036 tree t
= OMP_CLAUSE_DECL (c
);
8037 if (TREE_CODE (t
) == TREE_LIST
8039 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8041 if (TREE_VALUE (t
) == null_pointer_node
)
8043 if (TREE_PURPOSE (t
) != last_iter
)
8047 append_to_statement_list (label
, last_body
);
8048 gimplify_and_add (last_bind
, pre_p
);
8049 last_bind
= NULL_TREE
;
8051 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8053 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
8054 is_gimple_val
, fb_rvalue
) == GS_ERROR
8055 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
8056 is_gimple_val
, fb_rvalue
) == GS_ERROR
8057 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
8058 is_gimple_val
, fb_rvalue
) == GS_ERROR
8059 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
8060 is_gimple_val
, fb_rvalue
)
8064 last_iter
= TREE_PURPOSE (t
);
8065 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
8066 last_bind
= build3 (BIND_EXPR
, void_type_node
, BLOCK_VARS (block
),
8068 last_body
= &BIND_EXPR_BODY (last_bind
);
8069 tree cond
= NULL_TREE
;
8070 location_t loc
= OMP_CLAUSE_LOCATION (c
);
8071 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8073 tree var
= TREE_VEC_ELT (it
, 0);
8074 tree begin
= TREE_VEC_ELT (it
, 1);
8075 tree end
= TREE_VEC_ELT (it
, 2);
8076 tree step
= TREE_VEC_ELT (it
, 3);
8077 loc
= DECL_SOURCE_LOCATION (var
);
8078 tree tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8080 append_to_statement_list_force (tem
, last_body
);
8082 tree cond1
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8083 step
, build_zero_cst (TREE_TYPE (step
)));
8084 tree cond2
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
,
8086 tree cond3
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8088 cond1
= fold_build3_loc (loc
, COND_EXPR
, boolean_type_node
,
8089 cond1
, cond2
, cond3
);
8091 cond
= fold_build2_loc (loc
, TRUTH_AND_EXPR
,
8092 boolean_type_node
, cond
, cond1
);
8096 tree cont_label
= create_artificial_label (loc
);
8097 label
= build1 (LABEL_EXPR
, void_type_node
, cont_label
);
8098 tree tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, cond
,
8100 build_and_jump (&cont_label
));
8101 append_to_statement_list_force (tem
, last_body
);
8103 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
8105 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
), 0),
8107 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
8109 if (error_operand_p (TREE_VALUE (t
)))
8111 append_to_statement_list_force (TREE_VALUE (t
), last_body
);
8112 TREE_VALUE (t
) = null_pointer_node
;
8118 append_to_statement_list (label
, last_body
);
8119 gimplify_and_add (last_bind
, pre_p
);
8120 last_bind
= NULL_TREE
;
8122 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8124 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8125 NULL
, is_gimple_val
, fb_rvalue
);
8126 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8128 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8130 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8131 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8133 gimplify_and_add (OMP_CLAUSE_DECL (c
), pre_p
);
8138 append_to_statement_list (label
, last_body
);
8139 gimplify_and_add (last_bind
, pre_p
);
8144 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8145 lower all the depend clauses by populating corresponding depend
8146 array. Returns 0 if there are no such depend clauses, or
8147 2 if all depend clauses should be removed, 1 otherwise. */
8150 gimplify_omp_depend (tree
*list_p
, gimple_seq
*pre_p
)
8154 size_t n
[4] = { 0, 0, 0, 0 };
8156 tree counts
[4] = { NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
};
8157 tree last_iter
= NULL_TREE
, last_count
= NULL_TREE
;
8159 location_t first_loc
= UNKNOWN_LOCATION
;
8161 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8162 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8164 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8166 case OMP_CLAUSE_DEPEND_IN
:
8169 case OMP_CLAUSE_DEPEND_OUT
:
8170 case OMP_CLAUSE_DEPEND_INOUT
:
8173 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8176 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8179 case OMP_CLAUSE_DEPEND_SOURCE
:
8180 case OMP_CLAUSE_DEPEND_SINK
:
8185 tree t
= OMP_CLAUSE_DECL (c
);
8186 if (first_loc
== UNKNOWN_LOCATION
)
8187 first_loc
= OMP_CLAUSE_LOCATION (c
);
8188 if (TREE_CODE (t
) == TREE_LIST
8190 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8192 if (TREE_PURPOSE (t
) != last_iter
)
8194 tree tcnt
= size_one_node
;
8195 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8197 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
8198 is_gimple_val
, fb_rvalue
) == GS_ERROR
8199 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
8200 is_gimple_val
, fb_rvalue
) == GS_ERROR
8201 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
8202 is_gimple_val
, fb_rvalue
) == GS_ERROR
8203 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
8204 is_gimple_val
, fb_rvalue
)
8207 tree var
= TREE_VEC_ELT (it
, 0);
8208 tree begin
= TREE_VEC_ELT (it
, 1);
8209 tree end
= TREE_VEC_ELT (it
, 2);
8210 tree step
= TREE_VEC_ELT (it
, 3);
8211 tree orig_step
= TREE_VEC_ELT (it
, 4);
8212 tree type
= TREE_TYPE (var
);
8213 tree stype
= TREE_TYPE (step
);
8214 location_t loc
= DECL_SOURCE_LOCATION (var
);
8216 /* Compute count for this iterator as
8218 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8219 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8220 and compute product of those for the entire depend
8222 if (POINTER_TYPE_P (type
))
8223 endmbegin
= fold_build2_loc (loc
, POINTER_DIFF_EXPR
,
8226 endmbegin
= fold_build2_loc (loc
, MINUS_EXPR
, type
,
8228 tree stepm1
= fold_build2_loc (loc
, MINUS_EXPR
, stype
,
8230 build_int_cst (stype
, 1));
8231 tree stepp1
= fold_build2_loc (loc
, PLUS_EXPR
, stype
, step
,
8232 build_int_cst (stype
, 1));
8233 tree pos
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
8234 unshare_expr (endmbegin
),
8236 pos
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
8238 tree neg
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
8240 if (TYPE_UNSIGNED (stype
))
8242 neg
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, neg
);
8243 step
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, step
);
8245 neg
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
8248 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8251 pos
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, pos
,
8252 build_int_cst (stype
, 0));
8253 cond
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
,
8255 neg
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, neg
,
8256 build_int_cst (stype
, 0));
8257 tree osteptype
= TREE_TYPE (orig_step
);
8258 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8260 build_int_cst (osteptype
, 0));
8261 tree cnt
= fold_build3_loc (loc
, COND_EXPR
, stype
,
8263 cnt
= fold_convert_loc (loc
, sizetype
, cnt
);
8264 if (gimplify_expr (&cnt
, pre_p
, NULL
, is_gimple_val
,
8265 fb_rvalue
) == GS_ERROR
)
8267 tcnt
= size_binop_loc (loc
, MULT_EXPR
, tcnt
, cnt
);
8269 if (gimplify_expr (&tcnt
, pre_p
, NULL
, is_gimple_val
,
8270 fb_rvalue
) == GS_ERROR
)
8272 last_iter
= TREE_PURPOSE (t
);
8275 if (counts
[i
] == NULL_TREE
)
8276 counts
[i
] = last_count
;
8278 counts
[i
] = size_binop_loc (OMP_CLAUSE_LOCATION (c
),
8279 PLUS_EXPR
, counts
[i
], last_count
);
8284 for (i
= 0; i
< 4; i
++)
8290 tree total
= size_zero_node
;
8291 for (i
= 0; i
< 4; i
++)
8293 unused
[i
] = counts
[i
] == NULL_TREE
&& n
[i
] == 0;
8294 if (counts
[i
] == NULL_TREE
)
8295 counts
[i
] = size_zero_node
;
8297 counts
[i
] = size_binop (PLUS_EXPR
, counts
[i
], size_int (n
[i
]));
8298 if (gimplify_expr (&counts
[i
], pre_p
, NULL
, is_gimple_val
,
8299 fb_rvalue
) == GS_ERROR
)
8301 total
= size_binop (PLUS_EXPR
, total
, counts
[i
]);
8304 if (gimplify_expr (&total
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
8307 bool is_old
= unused
[1] && unused
[3];
8308 tree totalpx
= size_binop (PLUS_EXPR
, unshare_expr (total
),
8309 size_int (is_old
? 1 : 4));
8310 tree type
= build_array_type (ptr_type_node
, build_index_type (totalpx
));
8311 tree array
= create_tmp_var_raw (type
);
8312 TREE_ADDRESSABLE (array
) = 1;
8313 if (!poly_int_tree_p (totalpx
))
8315 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array
)))
8316 gimplify_type_sizes (TREE_TYPE (array
), pre_p
);
8317 if (gimplify_omp_ctxp
)
8319 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8321 && (ctx
->region_type
== ORT_WORKSHARE
8322 || ctx
->region_type
== ORT_TASKGROUP
8323 || ctx
->region_type
== ORT_SIMD
8324 || ctx
->region_type
== ORT_ACC
))
8325 ctx
= ctx
->outer_context
;
8327 omp_add_variable (ctx
, array
, GOVD_LOCAL
| GOVD_SEEN
);
8329 gimplify_vla_decl (array
, pre_p
);
8332 gimple_add_tmp_var (array
);
8333 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
8338 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8339 build_int_cst (ptr_type_node
, 0));
8340 gimplify_and_add (tem
, pre_p
);
8341 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
8344 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8345 fold_convert (ptr_type_node
, total
));
8346 gimplify_and_add (tem
, pre_p
);
8347 for (i
= 1; i
< (is_old
? 2 : 4); i
++)
8349 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (i
+ !is_old
),
8350 NULL_TREE
, NULL_TREE
);
8351 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, counts
[i
- 1]);
8352 gimplify_and_add (tem
, pre_p
);
8359 for (i
= 0; i
< 4; i
++)
8361 if (i
&& (i
>= j
|| unused
[i
- 1]))
8363 cnts
[i
] = cnts
[i
- 1];
8366 cnts
[i
] = create_tmp_var (sizetype
);
8368 g
= gimple_build_assign (cnts
[i
], size_int (is_old
? 2 : 5));
8373 t
= size_binop (PLUS_EXPR
, counts
[0], size_int (2));
8375 t
= size_binop (PLUS_EXPR
, cnts
[i
- 1], counts
[i
- 1]);
8376 if (gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
8379 g
= gimple_build_assign (cnts
[i
], t
);
8381 gimple_seq_add_stmt (pre_p
, g
);
8384 last_iter
= NULL_TREE
;
8385 tree last_bind
= NULL_TREE
;
8386 tree
*last_body
= NULL
;
8387 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8388 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8390 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8392 case OMP_CLAUSE_DEPEND_IN
:
8395 case OMP_CLAUSE_DEPEND_OUT
:
8396 case OMP_CLAUSE_DEPEND_INOUT
:
8399 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8402 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8405 case OMP_CLAUSE_DEPEND_SOURCE
:
8406 case OMP_CLAUSE_DEPEND_SINK
:
8411 tree t
= OMP_CLAUSE_DECL (c
);
8412 if (TREE_CODE (t
) == TREE_LIST
8414 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8416 if (TREE_PURPOSE (t
) != last_iter
)
8419 gimplify_and_add (last_bind
, pre_p
);
8420 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
8421 last_bind
= build3 (BIND_EXPR
, void_type_node
,
8422 BLOCK_VARS (block
), NULL
, block
);
8423 TREE_SIDE_EFFECTS (last_bind
) = 1;
8424 SET_EXPR_LOCATION (last_bind
, OMP_CLAUSE_LOCATION (c
));
8425 tree
*p
= &BIND_EXPR_BODY (last_bind
);
8426 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8428 tree var
= TREE_VEC_ELT (it
, 0);
8429 tree begin
= TREE_VEC_ELT (it
, 1);
8430 tree end
= TREE_VEC_ELT (it
, 2);
8431 tree step
= TREE_VEC_ELT (it
, 3);
8432 tree orig_step
= TREE_VEC_ELT (it
, 4);
8433 tree type
= TREE_TYPE (var
);
8434 location_t loc
= DECL_SOURCE_LOCATION (var
);
8442 if (orig_step > 0) {
8443 if (var < end) goto beg_label;
8445 if (var > end) goto beg_label;
8447 for each iterator, with inner iterators added to
8449 tree beg_label
= create_artificial_label (loc
);
8450 tree cond_label
= NULL_TREE
;
8451 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8453 append_to_statement_list_force (tem
, p
);
8454 tem
= build_and_jump (&cond_label
);
8455 append_to_statement_list_force (tem
, p
);
8456 tem
= build1 (LABEL_EXPR
, void_type_node
, beg_label
);
8457 append_to_statement_list (tem
, p
);
8458 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
8459 NULL_TREE
, NULL_TREE
);
8460 TREE_SIDE_EFFECTS (bind
) = 1;
8461 SET_EXPR_LOCATION (bind
, loc
);
8462 append_to_statement_list_force (bind
, p
);
8463 if (POINTER_TYPE_P (type
))
8464 tem
= build2_loc (loc
, POINTER_PLUS_EXPR
, type
,
8465 var
, fold_convert_loc (loc
, sizetype
,
8468 tem
= build2_loc (loc
, PLUS_EXPR
, type
, var
, step
);
8469 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8471 append_to_statement_list_force (tem
, p
);
8472 tem
= build1 (LABEL_EXPR
, void_type_node
, cond_label
);
8473 append_to_statement_list (tem
, p
);
8474 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8478 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8479 cond
, build_and_jump (&beg_label
),
8481 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8484 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8485 cond
, build_and_jump (&beg_label
),
8487 tree osteptype
= TREE_TYPE (orig_step
);
8488 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8490 build_int_cst (osteptype
, 0));
8491 tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8493 append_to_statement_list_force (tem
, p
);
8494 p
= &BIND_EXPR_BODY (bind
);
8498 last_iter
= TREE_PURPOSE (t
);
8499 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
8501 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
),
8503 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
8505 if (error_operand_p (TREE_VALUE (t
)))
8507 TREE_VALUE (t
) = build_fold_addr_expr (TREE_VALUE (t
));
8508 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8509 NULL_TREE
, NULL_TREE
);
8510 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8511 void_type_node
, r
, TREE_VALUE (t
));
8512 append_to_statement_list_force (tem
, last_body
);
8513 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8514 void_type_node
, cnts
[i
],
8515 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)));
8516 append_to_statement_list_force (tem
, last_body
);
8517 TREE_VALUE (t
) = null_pointer_node
;
8523 gimplify_and_add (last_bind
, pre_p
);
8524 last_bind
= NULL_TREE
;
8526 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8528 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8529 NULL
, is_gimple_val
, fb_rvalue
);
8530 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8532 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8534 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
8535 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8536 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8538 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8539 NULL_TREE
, NULL_TREE
);
8540 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, OMP_CLAUSE_DECL (c
));
8541 gimplify_and_add (tem
, pre_p
);
8542 g
= gimple_build_assign (cnts
[i
], size_binop (PLUS_EXPR
, cnts
[i
],
8544 gimple_seq_add_stmt (pre_p
, g
);
8548 gimplify_and_add (last_bind
, pre_p
);
8549 tree cond
= boolean_false_node
;
8553 cond
= build2_loc (first_loc
, NE_EXPR
, boolean_type_node
, cnts
[0],
8554 size_binop_loc (first_loc
, PLUS_EXPR
, counts
[0],
8557 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8558 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8560 size_binop_loc (first_loc
, PLUS_EXPR
,
8566 tree prev
= size_int (5);
8567 for (i
= 0; i
< 4; i
++)
8571 prev
= size_binop_loc (first_loc
, PLUS_EXPR
, counts
[i
], prev
);
8572 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8573 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8574 cnts
[i
], unshare_expr (prev
)));
8577 tem
= build3_loc (first_loc
, COND_EXPR
, void_type_node
, cond
,
8578 build_call_expr_loc (first_loc
,
8579 builtin_decl_explicit (BUILT_IN_TRAP
),
8581 gimplify_and_add (tem
, pre_p
);
8582 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
8583 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
8584 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
8585 OMP_CLAUSE_CHAIN (c
) = *list_p
;
8590 /* Insert a GOMP_MAP_ALLOC or GOMP_MAP_RELEASE node following a
8591 GOMP_MAP_STRUCT mapping. C is an always_pointer mapping. STRUCT_NODE is
8592 the struct node to insert the new mapping after (when the struct node is
8593 initially created). PREV_NODE is the first of two or three mappings for a
8594 pointer, and is either:
8595 - the node before C, when a pair of mappings is used, e.g. for a C/C++
8597 - not the node before C. This is true when we have a reference-to-pointer
8598 type (with a mapping for the reference and for the pointer), or for
8599 Fortran derived-type mappings with a GOMP_MAP_TO_PSET.
8600 If SCP is non-null, the new node is inserted before *SCP.
8601 if SCP is null, the new node is inserted before PREV_NODE.
8603 - PREV_NODE, if SCP is non-null.
8604 - The newly-created ALLOC or RELEASE node, if SCP is null.
8605 - The second newly-created ALLOC or RELEASE node, if we are mapping a
8606 reference to a pointer. */
8609 insert_struct_comp_map (enum tree_code code
, tree c
, tree struct_node
,
8610 tree prev_node
, tree
*scp
)
8612 enum gomp_map_kind mkind
8613 = (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
)
8614 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
8616 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
8617 tree cl
= scp
? prev_node
: c2
;
8618 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
8619 OMP_CLAUSE_DECL (c2
) = unshare_expr (OMP_CLAUSE_DECL (c
));
8620 OMP_CLAUSE_CHAIN (c2
) = scp
? *scp
: prev_node
;
8621 if (OMP_CLAUSE_CHAIN (prev_node
) != c
8622 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node
)) == OMP_CLAUSE_MAP
8623 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node
))
8624 == GOMP_MAP_TO_PSET
))
8625 OMP_CLAUSE_SIZE (c2
) = OMP_CLAUSE_SIZE (OMP_CLAUSE_CHAIN (prev_node
));
8627 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (ptr_type_node
);
8629 OMP_CLAUSE_CHAIN (struct_node
) = c2
;
8631 /* We might need to create an additional mapping if we have a reference to a
8632 pointer (in C++). Don't do this if we have something other than a
8633 GOMP_MAP_ALWAYS_POINTER though, i.e. a GOMP_MAP_TO_PSET. */
8634 if (OMP_CLAUSE_CHAIN (prev_node
) != c
8635 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node
)) == OMP_CLAUSE_MAP
8636 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node
))
8637 == GOMP_MAP_ALWAYS_POINTER
)
8638 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node
))
8639 == GOMP_MAP_ATTACH_DETACH
)))
8641 tree c4
= OMP_CLAUSE_CHAIN (prev_node
);
8642 tree c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
8643 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
8644 OMP_CLAUSE_DECL (c3
) = unshare_expr (OMP_CLAUSE_DECL (c4
));
8645 OMP_CLAUSE_SIZE (c3
) = TYPE_SIZE_UNIT (ptr_type_node
);
8646 OMP_CLAUSE_CHAIN (c3
) = prev_node
;
8648 OMP_CLAUSE_CHAIN (c2
) = c3
;
8659 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8660 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8661 If BASE_REF is non-NULL and the containing object is a reference, set
8662 *BASE_REF to that reference before dereferencing the object.
8663 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8664 has array type, else return NULL. */
8667 extract_base_bit_offset (tree base
, tree
*base_ref
, poly_int64
*bitposp
,
8668 poly_offset_int
*poffsetp
)
8671 poly_int64 bitsize
, bitpos
;
8673 int unsignedp
, reversep
, volatilep
= 0;
8674 poly_offset_int poffset
;
8678 *base_ref
= NULL_TREE
;
8680 while (TREE_CODE (base
) == ARRAY_REF
)
8681 base
= TREE_OPERAND (base
, 0);
8683 if (TREE_CODE (base
) == INDIRECT_REF
)
8684 base
= TREE_OPERAND (base
, 0);
8688 if (TREE_CODE (base
) == ARRAY_REF
)
8690 while (TREE_CODE (base
) == ARRAY_REF
)
8691 base
= TREE_OPERAND (base
, 0);
8692 if (TREE_CODE (base
) != COMPONENT_REF
8693 || TREE_CODE (TREE_TYPE (base
)) != ARRAY_TYPE
)
8696 else if (TREE_CODE (base
) == INDIRECT_REF
8697 && TREE_CODE (TREE_OPERAND (base
, 0)) == COMPONENT_REF
8698 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0)))
8700 base
= TREE_OPERAND (base
, 0);
8703 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
, &mode
,
8704 &unsignedp
, &reversep
, &volatilep
);
8706 tree orig_base
= base
;
8708 if ((TREE_CODE (base
) == INDIRECT_REF
8709 || (TREE_CODE (base
) == MEM_REF
8710 && integer_zerop (TREE_OPERAND (base
, 1))))
8711 && DECL_P (TREE_OPERAND (base
, 0))
8712 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0))) == REFERENCE_TYPE
)
8713 base
= TREE_OPERAND (base
, 0);
8715 gcc_assert (offset
== NULL_TREE
|| poly_int_tree_p (offset
));
8718 poffset
= wi::to_poly_offset (offset
);
8722 if (maybe_ne (bitpos
, 0))
8723 poffset
+= bits_to_bytes_round_down (bitpos
);
8726 *poffsetp
= poffset
;
8728 /* Set *BASE_REF if BASE was a dereferenced reference variable. */
8729 if (base_ref
&& orig_base
!= base
)
8730 *base_ref
= orig_base
;
8735 /* Returns true if EXPR is or contains (as a sub-component) BASE_PTR. */
8738 is_or_contains_p (tree expr
, tree base_ptr
)
8740 while (expr
!= base_ptr
)
8741 if (TREE_CODE (base_ptr
) == COMPONENT_REF
)
8742 base_ptr
= TREE_OPERAND (base_ptr
, 0);
8745 return expr
== base_ptr
;
8748 /* Implement OpenMP 5.x map ordering rules for target directives. There are
8749 several rules, and with some level of ambiguity, hopefully we can at least
8750 collect the complexity here in one place. */
8753 omp_target_reorder_clauses (tree
*list_p
)
8755 /* Collect refs to alloc/release/delete maps. */
8756 auto_vec
<tree
, 32> ard
;
8758 while (*cp
!= NULL_TREE
)
8759 if (OMP_CLAUSE_CODE (*cp
) == OMP_CLAUSE_MAP
8760 && (OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_ALLOC
8761 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_RELEASE
8762 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_DELETE
))
8764 /* Unlink cp and push to ard. */
8766 tree nc
= OMP_CLAUSE_CHAIN (c
);
8770 /* Any associated pointer type maps should also move along. */
8771 while (*cp
!= NULL_TREE
8772 && OMP_CLAUSE_CODE (*cp
) == OMP_CLAUSE_MAP
8773 && (OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
8774 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8775 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_ATTACH_DETACH
8776 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_POINTER
8777 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_ALWAYS_POINTER
8778 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_TO_PSET
))
8781 nc
= OMP_CLAUSE_CHAIN (c
);
8787 cp
= &OMP_CLAUSE_CHAIN (*cp
);
8789 /* Link alloc/release/delete maps to the end of list. */
8790 for (unsigned int i
= 0; i
< ard
.length (); i
++)
8793 cp
= &OMP_CLAUSE_CHAIN (ard
[i
]);
8797 /* OpenMP 5.0 requires that pointer variables are mapped before
8798 its use as a base-pointer. */
8799 auto_vec
<tree
*, 32> atf
;
8800 for (tree
*cp
= list_p
; *cp
; cp
= &OMP_CLAUSE_CHAIN (*cp
))
8801 if (OMP_CLAUSE_CODE (*cp
) == OMP_CLAUSE_MAP
)
8803 /* Collect alloc, to, from, to/from clause tree pointers. */
8804 gomp_map_kind k
= OMP_CLAUSE_MAP_KIND (*cp
);
8805 if (k
== GOMP_MAP_ALLOC
8807 || k
== GOMP_MAP_FROM
8808 || k
== GOMP_MAP_TOFROM
8809 || k
== GOMP_MAP_ALWAYS_TO
8810 || k
== GOMP_MAP_ALWAYS_FROM
8811 || k
== GOMP_MAP_ALWAYS_TOFROM
)
8815 for (unsigned int i
= 0; i
< atf
.length (); i
++)
8819 tree decl
= OMP_CLAUSE_DECL (*cp
);
8820 if (TREE_CODE (decl
) == INDIRECT_REF
|| TREE_CODE (decl
) == MEM_REF
)
8822 tree base_ptr
= TREE_OPERAND (decl
, 0);
8823 STRIP_TYPE_NOPS (base_ptr
);
8824 for (unsigned int j
= i
+ 1; j
< atf
.length (); j
++)
8827 tree decl2
= OMP_CLAUSE_DECL (*cp2
);
8828 if (is_or_contains_p (decl2
, base_ptr
))
8830 /* Move *cp2 to before *cp. */
8832 *cp2
= OMP_CLAUSE_CHAIN (c
);
8833 OMP_CLAUSE_CHAIN (c
) = *cp
;
8842 /* DECL is supposed to have lastprivate semantics in the outer contexts
8843 of combined/composite constructs, starting with OCTX.
8844 Add needed lastprivate, shared or map clause if no data sharing or
8845 mapping clause are present. IMPLICIT_P is true if it is an implicit
8846 clause (IV on simd), in which case the lastprivate will not be
8847 copied to some constructs. */
8850 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx
*octx
,
8851 tree decl
, bool implicit_p
)
8853 struct gimplify_omp_ctx
*orig_octx
= octx
;
8854 for (; octx
; octx
= octx
->outer_context
)
8856 if ((octx
->region_type
== ORT_COMBINED_PARALLEL
8857 || (octx
->region_type
& ORT_COMBINED_TEAMS
) == ORT_COMBINED_TEAMS
)
8858 && splay_tree_lookup (octx
->variables
,
8859 (splay_tree_key
) decl
) == NULL
)
8861 omp_add_variable (octx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
8864 if ((octx
->region_type
& ORT_TASK
) != 0
8865 && octx
->combined_loop
8866 && splay_tree_lookup (octx
->variables
,
8867 (splay_tree_key
) decl
) == NULL
)
8869 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
8873 && octx
->region_type
== ORT_WORKSHARE
8874 && octx
->combined_loop
8875 && splay_tree_lookup (octx
->variables
,
8876 (splay_tree_key
) decl
) == NULL
8877 && octx
->outer_context
8878 && octx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
8879 && splay_tree_lookup (octx
->outer_context
->variables
,
8880 (splay_tree_key
) decl
) == NULL
)
8882 octx
= octx
->outer_context
;
8883 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
8886 if ((octx
->region_type
== ORT_WORKSHARE
|| octx
->region_type
== ORT_ACC
)
8887 && octx
->combined_loop
8888 && splay_tree_lookup (octx
->variables
,
8889 (splay_tree_key
) decl
) == NULL
8890 && !omp_check_private (octx
, decl
, false))
8892 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
8895 if (octx
->region_type
== ORT_COMBINED_TARGET
)
8897 splay_tree_node n
= splay_tree_lookup (octx
->variables
,
8898 (splay_tree_key
) decl
);
8901 omp_add_variable (octx
, decl
, GOVD_MAP
| GOVD_SEEN
);
8902 octx
= octx
->outer_context
;
8904 else if (!implicit_p
8905 && (n
->value
& GOVD_FIRSTPRIVATE_IMPLICIT
))
8907 n
->value
&= ~(GOVD_FIRSTPRIVATE
8908 | GOVD_FIRSTPRIVATE_IMPLICIT
8910 omp_add_variable (octx
, decl
, GOVD_MAP
| GOVD_SEEN
);
8911 octx
= octx
->outer_context
;
8916 if (octx
&& (implicit_p
|| octx
!= orig_octx
))
8917 omp_notice_variable (octx
, decl
, true);
8920 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8921 and previous omp contexts. */
8924 gimplify_scan_omp_clauses (tree
*list_p
, gimple_seq
*pre_p
,
8925 enum omp_region_type region_type
,
8926 enum tree_code code
)
8928 struct gimplify_omp_ctx
*ctx
, *outer_ctx
;
8930 hash_map
<tree
, tree
> *struct_map_to_clause
= NULL
;
8931 hash_set
<tree
> *struct_deref_set
= NULL
;
8932 tree
*prev_list_p
= NULL
, *orig_list_p
= list_p
;
8933 int handled_depend_iterators
= -1;
8936 ctx
= new_omp_context (region_type
);
8938 outer_ctx
= ctx
->outer_context
;
8939 if (code
== OMP_TARGET
)
8941 if (!lang_GNU_Fortran ())
8942 ctx
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
8943 ctx
->defaultmap
[GDMK_SCALAR
] = GOVD_FIRSTPRIVATE
;
8944 ctx
->defaultmap
[GDMK_SCALAR_TARGET
] = (lang_GNU_Fortran ()
8945 ? GOVD_MAP
: GOVD_FIRSTPRIVATE
);
8947 if (!lang_GNU_Fortran ())
8951 case OMP_TARGET_DATA
:
8952 case OMP_TARGET_ENTER_DATA
:
8953 case OMP_TARGET_EXIT_DATA
:
8955 case OACC_HOST_DATA
:
8958 ctx
->target_firstprivatize_array_bases
= true;
8963 if (code
== OMP_TARGET
8964 || code
== OMP_TARGET_DATA
8965 || code
== OMP_TARGET_ENTER_DATA
8966 || code
== OMP_TARGET_EXIT_DATA
)
8967 omp_target_reorder_clauses (list_p
);
8969 while ((c
= *list_p
) != NULL
)
8971 bool remove
= false;
8972 bool notice_outer
= true;
8973 const char *check_non_private
= NULL
;
8977 switch (OMP_CLAUSE_CODE (c
))
8979 case OMP_CLAUSE_PRIVATE
:
8980 flags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
8981 if (lang_hooks
.decls
.omp_private_outer_ref (OMP_CLAUSE_DECL (c
)))
8983 flags
|= GOVD_PRIVATE_OUTER_REF
;
8984 OMP_CLAUSE_PRIVATE_OUTER_REF (c
) = 1;
8987 notice_outer
= false;
8989 case OMP_CLAUSE_SHARED
:
8990 flags
= GOVD_SHARED
| GOVD_EXPLICIT
;
8992 case OMP_CLAUSE_FIRSTPRIVATE
:
8993 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
8994 check_non_private
= "firstprivate";
8995 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
8997 gcc_assert (code
== OMP_TARGET
);
8998 flags
|= GOVD_FIRSTPRIVATE_IMPLICIT
;
9001 case OMP_CLAUSE_LASTPRIVATE
:
9002 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
9005 case OMP_DISTRIBUTE
:
9006 error_at (OMP_CLAUSE_LOCATION (c
),
9007 "conditional %<lastprivate%> clause on "
9008 "%qs construct", "distribute");
9009 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
9012 error_at (OMP_CLAUSE_LOCATION (c
),
9013 "conditional %<lastprivate%> clause on "
9014 "%qs construct", "taskloop");
9015 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
9020 flags
= GOVD_LASTPRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
;
9021 if (code
!= OMP_LOOP
)
9022 check_non_private
= "lastprivate";
9023 decl
= OMP_CLAUSE_DECL (c
);
9024 if (error_operand_p (decl
))
9026 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
9027 && !lang_hooks
.decls
.omp_scalar_p (decl
, true))
9029 error_at (OMP_CLAUSE_LOCATION (c
),
9030 "non-scalar variable %qD in conditional "
9031 "%<lastprivate%> clause", decl
);
9032 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
9034 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
9035 flags
|= GOVD_LASTPRIVATE_CONDITIONAL
;
9036 omp_lastprivate_for_combined_outer_constructs (outer_ctx
, decl
,
9039 case OMP_CLAUSE_REDUCTION
:
9040 if (OMP_CLAUSE_REDUCTION_TASK (c
))
9042 if (region_type
== ORT_WORKSHARE
|| code
== OMP_SCOPE
)
9045 nowait
= omp_find_clause (*list_p
,
9046 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
9048 && (outer_ctx
== NULL
9049 || outer_ctx
->region_type
!= ORT_COMBINED_PARALLEL
))
9051 error_at (OMP_CLAUSE_LOCATION (c
),
9052 "%<task%> reduction modifier on a construct "
9053 "with a %<nowait%> clause");
9054 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
9057 else if ((region_type
& ORT_PARALLEL
) != ORT_PARALLEL
)
9059 error_at (OMP_CLAUSE_LOCATION (c
),
9060 "invalid %<task%> reduction modifier on construct "
9061 "other than %<parallel%>, %qs, %<sections%> or "
9062 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
9063 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
9066 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
9070 error_at (OMP_CLAUSE_LOCATION (c
),
9071 "%<inscan%> %<reduction%> clause on "
9072 "%qs construct", "sections");
9073 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
9076 error_at (OMP_CLAUSE_LOCATION (c
),
9077 "%<inscan%> %<reduction%> clause on "
9078 "%qs construct", "parallel");
9079 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
9082 error_at (OMP_CLAUSE_LOCATION (c
),
9083 "%<inscan%> %<reduction%> clause on "
9084 "%qs construct", "teams");
9085 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
9088 error_at (OMP_CLAUSE_LOCATION (c
),
9089 "%<inscan%> %<reduction%> clause on "
9090 "%qs construct", "taskloop");
9091 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
9094 error_at (OMP_CLAUSE_LOCATION (c
),
9095 "%<inscan%> %<reduction%> clause on "
9096 "%qs construct", "scope");
9097 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
9103 case OMP_CLAUSE_IN_REDUCTION
:
9104 case OMP_CLAUSE_TASK_REDUCTION
:
9105 flags
= GOVD_REDUCTION
| GOVD_SEEN
| GOVD_EXPLICIT
;
9106 /* OpenACC permits reductions on private variables. */
9107 if (!(region_type
& ORT_ACC
)
9108 /* taskgroup is actually not a worksharing region. */
9109 && code
!= OMP_TASKGROUP
)
9110 check_non_private
= omp_clause_code_name
[OMP_CLAUSE_CODE (c
)];
9111 decl
= OMP_CLAUSE_DECL (c
);
9112 if (TREE_CODE (decl
) == MEM_REF
)
9114 tree type
= TREE_TYPE (decl
);
9115 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
9116 gimplify_ctxp
->into_ssa
= false;
9117 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type
)), pre_p
,
9118 NULL
, is_gimple_val
, fb_rvalue
, false)
9121 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
9125 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
9126 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
9129 omp_firstprivatize_variable (ctx
, v
);
9130 omp_notice_variable (ctx
, v
, true);
9132 decl
= TREE_OPERAND (decl
, 0);
9133 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
9135 gimplify_ctxp
->into_ssa
= false;
9136 if (gimplify_expr (&TREE_OPERAND (decl
, 1), pre_p
,
9137 NULL
, is_gimple_val
, fb_rvalue
, false)
9140 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
9144 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
9145 v
= TREE_OPERAND (decl
, 1);
9148 omp_firstprivatize_variable (ctx
, v
);
9149 omp_notice_variable (ctx
, v
, true);
9151 decl
= TREE_OPERAND (decl
, 0);
9153 if (TREE_CODE (decl
) == ADDR_EXPR
9154 || TREE_CODE (decl
) == INDIRECT_REF
)
9155 decl
= TREE_OPERAND (decl
, 0);
9158 case OMP_CLAUSE_LINEAR
:
9159 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
), pre_p
, NULL
,
9160 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9167 if (code
== OMP_SIMD
9168 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
9170 struct gimplify_omp_ctx
*octx
= outer_ctx
;
9172 && octx
->region_type
== ORT_WORKSHARE
9173 && octx
->combined_loop
9174 && !octx
->distribute
)
9176 if (octx
->outer_context
9177 && (octx
->outer_context
->region_type
9178 == ORT_COMBINED_PARALLEL
))
9179 octx
= octx
->outer_context
->outer_context
;
9181 octx
= octx
->outer_context
;
9184 && octx
->region_type
== ORT_WORKSHARE
9185 && octx
->combined_loop
9186 && octx
->distribute
)
9188 error_at (OMP_CLAUSE_LOCATION (c
),
9189 "%<linear%> clause for variable other than "
9190 "loop iterator specified on construct "
9191 "combined with %<distribute%>");
9196 /* For combined #pragma omp parallel for simd, need to put
9197 lastprivate and perhaps firstprivate too on the
9198 parallel. Similarly for #pragma omp for simd. */
9199 struct gimplify_omp_ctx
*octx
= outer_ctx
;
9200 bool taskloop_seen
= false;
9204 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
9205 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
9207 decl
= OMP_CLAUSE_DECL (c
);
9208 if (error_operand_p (decl
))
9214 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
9215 flags
|= GOVD_FIRSTPRIVATE
;
9216 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
9217 flags
|= GOVD_LASTPRIVATE
;
9219 && octx
->region_type
== ORT_WORKSHARE
9220 && octx
->combined_loop
)
9222 if (octx
->outer_context
9223 && (octx
->outer_context
->region_type
9224 == ORT_COMBINED_PARALLEL
))
9225 octx
= octx
->outer_context
;
9226 else if (omp_check_private (octx
, decl
, false))
9230 && (octx
->region_type
& ORT_TASK
) != 0
9231 && octx
->combined_loop
)
9232 taskloop_seen
= true;
9234 && octx
->region_type
== ORT_COMBINED_PARALLEL
9235 && ((ctx
->region_type
== ORT_WORKSHARE
9236 && octx
== outer_ctx
)
9238 flags
= GOVD_SEEN
| GOVD_SHARED
;
9240 && ((octx
->region_type
& ORT_COMBINED_TEAMS
)
9241 == ORT_COMBINED_TEAMS
))
9242 flags
= GOVD_SEEN
| GOVD_SHARED
;
9244 && octx
->region_type
== ORT_COMBINED_TARGET
)
9246 if (flags
& GOVD_LASTPRIVATE
)
9247 flags
= GOVD_SEEN
| GOVD_MAP
;
9252 = splay_tree_lookup (octx
->variables
,
9253 (splay_tree_key
) decl
);
9254 if (on
&& (on
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
9259 omp_add_variable (octx
, decl
, flags
);
9260 if (octx
->outer_context
== NULL
)
9262 octx
= octx
->outer_context
;
9267 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
9268 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
9269 omp_notice_variable (octx
, decl
, true);
9271 flags
= GOVD_LINEAR
| GOVD_EXPLICIT
;
9272 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
9273 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
9275 notice_outer
= false;
9276 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
9280 case OMP_CLAUSE_MAP
:
9281 decl
= OMP_CLAUSE_DECL (c
);
9282 if (error_operand_p (decl
))
9289 if (TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
)
9292 case OMP_TARGET_DATA
:
9293 case OMP_TARGET_ENTER_DATA
:
9294 case OMP_TARGET_EXIT_DATA
:
9295 case OACC_ENTER_DATA
:
9296 case OACC_EXIT_DATA
:
9297 case OACC_HOST_DATA
:
9298 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9299 || (OMP_CLAUSE_MAP_KIND (c
)
9300 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
9301 /* For target {,enter ,exit }data only the array slice is
9302 mapped, but not the pointer to it. */
9308 /* For Fortran, not only the pointer to the data is mapped but also
9309 the address of the pointer, the array descriptor etc.; for
9310 'exit data' - and in particular for 'delete:' - having an 'alloc:'
9311 does not make sense. Likewise, for 'update' only transferring the
9312 data itself is needed as the rest has been handled in previous
9313 directives. However, for 'exit data', the array descriptor needs
9314 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE.
9316 NOTE: Generally, it is not safe to perform "enter data" operations
9317 on arrays where the data *or the descriptor* may go out of scope
9318 before a corresponding "exit data" operation -- and such a
9319 descriptor may be synthesized temporarily, e.g. to pass an
9320 explicit-shape array to a function expecting an assumed-shape
9321 argument. Performing "enter data" inside the called function
9322 would thus be problematic. */
9323 if (code
== OMP_TARGET_EXIT_DATA
9324 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
)
9325 OMP_CLAUSE_SET_MAP_KIND (c
, OMP_CLAUSE_MAP_KIND (*prev_list_p
)
9327 ? GOMP_MAP_DELETE
: GOMP_MAP_RELEASE
);
9328 else if ((code
== OMP_TARGET_EXIT_DATA
|| code
== OMP_TARGET_UPDATE
)
9329 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
9330 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
))
9335 if (DECL_P (decl
) && outer_ctx
&& (region_type
& ORT_ACC
))
9337 struct gimplify_omp_ctx
*octx
;
9338 for (octx
= outer_ctx
; octx
; octx
= octx
->outer_context
)
9340 if (octx
->region_type
!= ORT_ACC_HOST_DATA
)
9343 = splay_tree_lookup (octx
->variables
,
9344 (splay_tree_key
) decl
);
9346 error_at (OMP_CLAUSE_LOCATION (c
), "variable %qE "
9347 "declared in enclosing %<host_data%> region",
9351 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
9352 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
9353 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
9354 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
9355 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9360 else if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9361 || (OMP_CLAUSE_MAP_KIND (c
)
9362 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9363 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9364 && TREE_CODE (OMP_CLAUSE_SIZE (c
)) != INTEGER_CST
)
9367 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c
), pre_p
, NULL
,
9369 if ((region_type
& ORT_TARGET
) != 0)
9370 omp_add_variable (ctx
, OMP_CLAUSE_SIZE (c
),
9371 GOVD_FIRSTPRIVATE
| GOVD_SEEN
);
9377 if (TREE_CODE (d
) == ARRAY_REF
)
9379 while (TREE_CODE (d
) == ARRAY_REF
)
9380 d
= TREE_OPERAND (d
, 0);
9381 if (TREE_CODE (d
) == COMPONENT_REF
9382 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
9385 pd
= &OMP_CLAUSE_DECL (c
);
9387 && TREE_CODE (decl
) == INDIRECT_REF
9388 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
9389 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9392 pd
= &TREE_OPERAND (decl
, 0);
9393 decl
= TREE_OPERAND (decl
, 0);
9395 bool indir_p
= false;
9396 tree orig_decl
= decl
;
9397 tree decl_ref
= NULL_TREE
;
9398 if ((region_type
& (ORT_ACC
| ORT_TARGET
| ORT_TARGET_DATA
)) != 0
9399 && TREE_CODE (*pd
) == COMPONENT_REF
9400 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
9401 && code
!= OACC_UPDATE
)
9403 while (TREE_CODE (decl
) == COMPONENT_REF
)
9405 decl
= TREE_OPERAND (decl
, 0);
9406 if (((TREE_CODE (decl
) == MEM_REF
9407 && integer_zerop (TREE_OPERAND (decl
, 1)))
9408 || INDIRECT_REF_P (decl
))
9409 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9413 decl
= TREE_OPERAND (decl
, 0);
9415 if (TREE_CODE (decl
) == INDIRECT_REF
9416 && DECL_P (TREE_OPERAND (decl
, 0))
9417 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9421 decl
= TREE_OPERAND (decl
, 0);
9425 else if (TREE_CODE (decl
) == COMPONENT_REF
)
9427 while (TREE_CODE (decl
) == COMPONENT_REF
)
9428 decl
= TREE_OPERAND (decl
, 0);
9429 if (TREE_CODE (decl
) == INDIRECT_REF
9430 && DECL_P (TREE_OPERAND (decl
, 0))
9431 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9433 decl
= TREE_OPERAND (decl
, 0);
9435 if (decl
!= orig_decl
&& DECL_P (decl
) && indir_p
)
9438 = ((code
== OACC_EXIT_DATA
|| code
== OMP_TARGET_EXIT_DATA
)
9439 ? GOMP_MAP_DETACH
: GOMP_MAP_ATTACH
);
9440 /* We have a dereference of a struct member. Make this an
9441 attach/detach operation, and ensure the base pointer is
9442 mapped as a FIRSTPRIVATE_POINTER. */
9443 OMP_CLAUSE_SET_MAP_KIND (c
, k
);
9444 flags
= GOVD_MAP
| GOVD_SEEN
| GOVD_EXPLICIT
;
9445 tree next_clause
= OMP_CLAUSE_CHAIN (c
);
9446 if (k
== GOMP_MAP_ATTACH
9447 && code
!= OACC_ENTER_DATA
9448 && code
!= OMP_TARGET_ENTER_DATA
9450 || (OMP_CLAUSE_CODE (next_clause
) != OMP_CLAUSE_MAP
)
9451 || (OMP_CLAUSE_MAP_KIND (next_clause
)
9452 != GOMP_MAP_POINTER
)
9453 || OMP_CLAUSE_DECL (next_clause
) != decl
)
9454 && (!struct_deref_set
9455 || !struct_deref_set
->contains (decl
)))
9457 if (!struct_deref_set
)
9458 struct_deref_set
= new hash_set
<tree
> ();
9459 /* As well as the attach, we also need a
9460 FIRSTPRIVATE_POINTER clause to properly map the
9461 pointer to the struct base. */
9462 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9464 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_ALLOC
);
9465 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c2
)
9468 = build_int_cst (build_pointer_type (char_type_node
),
9470 OMP_CLAUSE_DECL (c2
)
9471 = build2 (MEM_REF
, char_type_node
,
9472 decl_ref
? decl_ref
: decl
, charptr_zero
);
9473 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
9474 tree c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9476 OMP_CLAUSE_SET_MAP_KIND (c3
,
9477 GOMP_MAP_FIRSTPRIVATE_POINTER
);
9478 OMP_CLAUSE_DECL (c3
) = decl
;
9479 OMP_CLAUSE_SIZE (c3
) = size_zero_node
;
9480 tree mapgrp
= *prev_list_p
;
9482 OMP_CLAUSE_CHAIN (c3
) = mapgrp
;
9483 OMP_CLAUSE_CHAIN (c2
) = c3
;
9485 struct_deref_set
->add (decl
);
9489 /* An "attach/detach" operation on an update directive should
9490 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
9491 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
9492 depends on the previous mapping. */
9493 if (code
== OACC_UPDATE
9494 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9495 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_ALWAYS_POINTER
);
9497 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
9498 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
9499 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
9500 && code
!= OACC_UPDATE
9501 && code
!= OMP_TARGET_UPDATE
)
9503 if (error_operand_p (decl
))
9509 tree stype
= TREE_TYPE (decl
);
9510 if (TREE_CODE (stype
) == REFERENCE_TYPE
)
9511 stype
= TREE_TYPE (stype
);
9512 if (TYPE_SIZE_UNIT (stype
) == NULL
9513 || TREE_CODE (TYPE_SIZE_UNIT (stype
)) != INTEGER_CST
)
9515 error_at (OMP_CLAUSE_LOCATION (c
),
9516 "mapping field %qE of variable length "
9517 "structure", OMP_CLAUSE_DECL (c
));
9522 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
9523 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9525 /* Error recovery. */
9526 if (prev_list_p
== NULL
)
9531 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
9533 tree ch
= OMP_CLAUSE_CHAIN (*prev_list_p
);
9534 if (ch
== NULL_TREE
|| OMP_CLAUSE_CHAIN (ch
) != c
)
9542 poly_offset_int offset1
;
9547 = extract_base_bit_offset (OMP_CLAUSE_DECL (c
), &base_ref
,
9548 &bitpos1
, &offset1
);
9550 gcc_assert (base
== decl
);
9553 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
9554 bool ptr
= (OMP_CLAUSE_MAP_KIND (c
)
9555 == GOMP_MAP_ALWAYS_POINTER
);
9556 bool attach_detach
= (OMP_CLAUSE_MAP_KIND (c
)
9557 == GOMP_MAP_ATTACH_DETACH
);
9558 bool attach
= OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
9559 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
;
9560 bool has_attachments
= false;
9561 /* For OpenACC, pointers in structs should trigger an
9564 && ((region_type
& (ORT_ACC
| ORT_TARGET
| ORT_TARGET_DATA
))
9565 || code
== OMP_TARGET_ENTER_DATA
9566 || code
== OMP_TARGET_EXIT_DATA
))
9569 /* Turn a GOMP_MAP_ATTACH_DETACH clause into a
9570 GOMP_MAP_ATTACH or GOMP_MAP_DETACH clause after we
9571 have detected a case that needs a GOMP_MAP_STRUCT
9574 = ((code
== OACC_EXIT_DATA
|| code
== OMP_TARGET_EXIT_DATA
)
9575 ? GOMP_MAP_DETACH
: GOMP_MAP_ATTACH
);
9576 OMP_CLAUSE_SET_MAP_KIND (c
, k
);
9577 has_attachments
= true;
9579 if (n
== NULL
|| (n
->value
& GOVD_MAP
) == 0)
9581 tree l
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9583 gomp_map_kind k
= attach
? GOMP_MAP_FORCE_PRESENT
9586 OMP_CLAUSE_SET_MAP_KIND (l
, k
);
9588 OMP_CLAUSE_DECL (l
) = unshare_expr (base_ref
);
9590 OMP_CLAUSE_DECL (l
) = decl
;
9594 : DECL_P (OMP_CLAUSE_DECL (l
))
9595 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l
))
9596 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l
))));
9597 if (struct_map_to_clause
== NULL
)
9598 struct_map_to_clause
= new hash_map
<tree
, tree
>;
9599 struct_map_to_clause
->put (decl
, l
);
9600 if (ptr
|| attach_detach
)
9602 insert_struct_comp_map (code
, c
, l
, *prev_list_p
,
9609 OMP_CLAUSE_CHAIN (l
) = c
;
9611 list_p
= &OMP_CLAUSE_CHAIN (l
);
9613 if (base_ref
&& code
== OMP_TARGET
)
9615 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9617 enum gomp_map_kind mkind
9618 = GOMP_MAP_FIRSTPRIVATE_REFERENCE
;
9619 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
9620 OMP_CLAUSE_DECL (c2
) = decl
;
9621 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
9622 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (l
);
9623 OMP_CLAUSE_CHAIN (l
) = c2
;
9625 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
9626 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
))
9630 if (has_attachments
)
9631 flags
|= GOVD_MAP_HAS_ATTACHMENTS
;
9634 else if (struct_map_to_clause
)
9636 tree
*osc
= struct_map_to_clause
->get (decl
);
9637 tree
*sc
= NULL
, *scp
= NULL
;
9638 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
))
9641 n
->value
|= GOVD_SEEN
;
9642 sc
= &OMP_CLAUSE_CHAIN (*osc
);
9644 && (OMP_CLAUSE_MAP_KIND (*sc
)
9645 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
9646 sc
= &OMP_CLAUSE_CHAIN (*sc
);
9647 /* Here "prev_list_p" is the end of the inserted
9648 alloc/release nodes after the struct node, OSC. */
9649 for (; *sc
!= c
; sc
= &OMP_CLAUSE_CHAIN (*sc
))
9650 if ((ptr
|| attach_detach
) && sc
== prev_list_p
)
9652 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
9654 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
9656 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
9661 tree sc_decl
= OMP_CLAUSE_DECL (*sc
);
9662 poly_offset_int offsetn
;
9665 = extract_base_bit_offset (sc_decl
, NULL
,
9666 &bitposn
, &offsetn
);
9671 if ((region_type
& ORT_ACC
) != 0)
9673 /* This duplicate checking code is currently only
9674 enabled for OpenACC. */
9675 tree d1
= OMP_CLAUSE_DECL (*sc
);
9676 tree d2
= OMP_CLAUSE_DECL (c
);
9677 while (TREE_CODE (d1
) == ARRAY_REF
)
9678 d1
= TREE_OPERAND (d1
, 0);
9679 while (TREE_CODE (d2
) == ARRAY_REF
)
9680 d2
= TREE_OPERAND (d2
, 0);
9681 if (TREE_CODE (d1
) == INDIRECT_REF
)
9682 d1
= TREE_OPERAND (d1
, 0);
9683 if (TREE_CODE (d2
) == INDIRECT_REF
)
9684 d2
= TREE_OPERAND (d2
, 0);
9685 while (TREE_CODE (d1
) == COMPONENT_REF
)
9686 if (TREE_CODE (d2
) == COMPONENT_REF
9687 && TREE_OPERAND (d1
, 1)
9688 == TREE_OPERAND (d2
, 1))
9690 d1
= TREE_OPERAND (d1
, 0);
9691 d2
= TREE_OPERAND (d2
, 0);
9697 error_at (OMP_CLAUSE_LOCATION (c
),
9698 "%qE appears more than once in map "
9699 "clauses", OMP_CLAUSE_DECL (c
));
9704 if (maybe_lt (offset1
, offsetn
)
9705 || (known_eq (offset1
, offsetn
)
9706 && maybe_lt (bitpos1
, bitposn
)))
9708 if (ptr
|| attach_detach
)
9717 OMP_CLAUSE_SIZE (*osc
)
9718 = size_binop (PLUS_EXPR
, OMP_CLAUSE_SIZE (*osc
),
9720 if (ptr
|| attach_detach
)
9722 tree cl
= insert_struct_comp_map (code
, c
, NULL
,
9724 if (sc
== prev_list_p
)
9731 *prev_list_p
= OMP_CLAUSE_CHAIN (c
);
9732 list_p
= prev_list_p
;
9734 OMP_CLAUSE_CHAIN (c
) = *sc
;
9741 *list_p
= OMP_CLAUSE_CHAIN (c
);
9742 OMP_CLAUSE_CHAIN (c
) = *sc
;
9748 else if ((code
== OACC_ENTER_DATA
9749 || code
== OACC_EXIT_DATA
9750 || code
== OACC_DATA
9751 || code
== OACC_PARALLEL
9752 || code
== OACC_KERNELS
9753 || code
== OACC_SERIAL
)
9754 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9756 gomp_map_kind k
= (code
== OACC_EXIT_DATA
9757 ? GOMP_MAP_DETACH
: GOMP_MAP_ATTACH
);
9758 OMP_CLAUSE_SET_MAP_KIND (c
, k
);
9761 if (code
== OMP_TARGET
&& OMP_CLAUSE_MAP_IN_REDUCTION (c
))
9763 /* Don't gimplify *pd fully at this point, as the base
9764 will need to be adjusted during omp lowering. */
9765 auto_vec
<tree
, 10> expr_stack
;
9767 while (handled_component_p (*p
)
9768 || TREE_CODE (*p
) == INDIRECT_REF
9769 || TREE_CODE (*p
) == ADDR_EXPR
9770 || TREE_CODE (*p
) == MEM_REF
9771 || TREE_CODE (*p
) == NON_LVALUE_EXPR
)
9773 expr_stack
.safe_push (*p
);
9774 p
= &TREE_OPERAND (*p
, 0);
9776 for (int i
= expr_stack
.length () - 1; i
>= 0; i
--)
9778 tree t
= expr_stack
[i
];
9779 if (TREE_CODE (t
) == ARRAY_REF
9780 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
9782 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
9784 tree low
= unshare_expr (array_ref_low_bound (t
));
9785 if (!is_gimple_min_invariant (low
))
9787 TREE_OPERAND (t
, 2) = low
;
9788 if (gimplify_expr (&TREE_OPERAND (t
, 2),
9791 fb_rvalue
) == GS_ERROR
)
9795 else if (gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
9796 NULL
, is_gimple_reg
,
9797 fb_rvalue
) == GS_ERROR
)
9799 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
9801 tree elmt_size
= array_ref_element_size (t
);
9802 if (!is_gimple_min_invariant (elmt_size
))
9804 elmt_size
= unshare_expr (elmt_size
);
9806 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
,
9809 = size_int (TYPE_ALIGN_UNIT (elmt_type
));
9811 = size_binop (EXACT_DIV_EXPR
, elmt_size
,
9813 TREE_OPERAND (t
, 3) = elmt_size
;
9814 if (gimplify_expr (&TREE_OPERAND (t
, 3),
9817 fb_rvalue
) == GS_ERROR
)
9821 else if (gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
9822 NULL
, is_gimple_reg
,
9823 fb_rvalue
) == GS_ERROR
)
9826 else if (TREE_CODE (t
) == COMPONENT_REF
)
9828 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
9830 tree offset
= component_ref_field_offset (t
);
9831 if (!is_gimple_min_invariant (offset
))
9833 offset
= unshare_expr (offset
);
9834 tree field
= TREE_OPERAND (t
, 1);
9836 = size_int (DECL_OFFSET_ALIGN (field
)
9838 offset
= size_binop (EXACT_DIV_EXPR
, offset
,
9840 TREE_OPERAND (t
, 2) = offset
;
9841 if (gimplify_expr (&TREE_OPERAND (t
, 2),
9844 fb_rvalue
) == GS_ERROR
)
9848 else if (gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
9849 NULL
, is_gimple_reg
,
9850 fb_rvalue
) == GS_ERROR
)
9854 for (; expr_stack
.length () > 0; )
9856 tree t
= expr_stack
.pop ();
9858 if (TREE_CODE (t
) == ARRAY_REF
9859 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
9861 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1))
9862 && gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
,
9863 NULL
, is_gimple_val
,
9864 fb_rvalue
) == GS_ERROR
)
9869 else if (gimplify_expr (pd
, pre_p
, NULL
, is_gimple_lvalue
,
9870 fb_lvalue
) == GS_ERROR
)
9877 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_POINTER
9878 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH_DETACH
9879 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
9880 && OMP_CLAUSE_CHAIN (c
)
9881 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
)) == OMP_CLAUSE_MAP
9882 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
9883 == GOMP_MAP_ALWAYS_POINTER
)
9884 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
9885 == GOMP_MAP_ATTACH_DETACH
)
9886 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
9887 == GOMP_MAP_TO_PSET
)))
9888 prev_list_p
= list_p
;
9894 /* DECL_P (decl) == true */
9896 if (struct_map_to_clause
9897 && (sc
= struct_map_to_clause
->get (decl
)) != NULL
9898 && OMP_CLAUSE_MAP_KIND (*sc
) == GOMP_MAP_STRUCT
9899 && decl
== OMP_CLAUSE_DECL (*sc
))
9901 /* We have found a map of the whole structure after a
9902 leading GOMP_MAP_STRUCT has been created, so refill the
9903 leading clause into a map of the whole structure
9904 variable, and remove the current one.
9905 TODO: we should be able to remove some maps of the
9906 following structure element maps if they are of
9907 compatible TO/FROM/ALLOC type. */
9908 OMP_CLAUSE_SET_MAP_KIND (*sc
, OMP_CLAUSE_MAP_KIND (c
));
9909 OMP_CLAUSE_SIZE (*sc
) = unshare_expr (OMP_CLAUSE_SIZE (c
));
9914 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
9915 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TO
9916 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TOFROM
)
9917 flags
|= GOVD_MAP_ALWAYS_TO
;
9919 if ((code
== OMP_TARGET
9920 || code
== OMP_TARGET_DATA
9921 || code
== OMP_TARGET_ENTER_DATA
9922 || code
== OMP_TARGET_EXIT_DATA
)
9923 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9925 for (struct gimplify_omp_ctx
*octx
= outer_ctx
; octx
;
9926 octx
= octx
->outer_context
)
9929 = splay_tree_lookup (octx
->variables
,
9930 (splay_tree_key
) OMP_CLAUSE_DECL (c
));
9931 /* If this is contained in an outer OpenMP region as a
9932 firstprivate value, remove the attach/detach. */
9933 if (n
&& (n
->value
& GOVD_FIRSTPRIVATE
))
9935 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
9940 enum gomp_map_kind map_kind
= (code
== OMP_TARGET_EXIT_DATA
9943 OMP_CLAUSE_SET_MAP_KIND (c
, map_kind
);
9948 case OMP_CLAUSE_AFFINITY
:
9949 gimplify_omp_affinity (list_p
, pre_p
);
9952 case OMP_CLAUSE_DEPEND
:
9953 if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
9955 tree deps
= OMP_CLAUSE_DECL (c
);
9956 while (deps
&& TREE_CODE (deps
) == TREE_LIST
)
9958 if (TREE_CODE (TREE_PURPOSE (deps
)) == TRUNC_DIV_EXPR
9959 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps
), 1)))
9960 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps
), 1),
9961 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
9962 deps
= TREE_CHAIN (deps
);
9966 else if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
9968 if (handled_depend_iterators
== -1)
9969 handled_depend_iterators
= gimplify_omp_depend (list_p
, pre_p
);
9970 if (handled_depend_iterators
)
9972 if (handled_depend_iterators
== 2)
9976 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
9978 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
9979 NULL
, is_gimple_val
, fb_rvalue
);
9980 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
9982 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
9987 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
9988 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
9989 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9994 if (code
== OMP_TASK
)
9995 ctx
->has_depend
= true;
9999 case OMP_CLAUSE_FROM
:
10000 case OMP_CLAUSE__CACHE_
:
10001 decl
= OMP_CLAUSE_DECL (c
);
10002 if (error_operand_p (decl
))
10007 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
10008 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
10009 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
10010 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
10011 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
10016 if (!DECL_P (decl
))
10018 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
,
10019 NULL
, is_gimple_lvalue
, fb_lvalue
)
10029 case OMP_CLAUSE_USE_DEVICE_PTR
:
10030 case OMP_CLAUSE_USE_DEVICE_ADDR
:
10031 flags
= GOVD_EXPLICIT
;
10034 case OMP_CLAUSE_IS_DEVICE_PTR
:
10035 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
10039 decl
= OMP_CLAUSE_DECL (c
);
10041 if (error_operand_p (decl
))
10046 if (DECL_NAME (decl
) == NULL_TREE
&& (flags
& GOVD_SHARED
) == 0)
10048 tree t
= omp_member_access_dummy_var (decl
);
10051 tree v
= DECL_VALUE_EXPR (decl
);
10052 DECL_NAME (decl
) = DECL_NAME (TREE_OPERAND (v
, 1));
10054 omp_notice_variable (outer_ctx
, t
, true);
10057 if (code
== OACC_DATA
10058 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
10059 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
10060 flags
|= GOVD_MAP_0LEN_ARRAY
;
10061 omp_add_variable (ctx
, decl
, flags
);
10062 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10063 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
10064 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
10065 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10067 struct gimplify_omp_ctx
*pctx
10068 = code
== OMP_TARGET
? outer_ctx
: ctx
;
10070 omp_add_variable (pctx
, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
10071 GOVD_LOCAL
| GOVD_SEEN
);
10073 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
10074 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c
),
10076 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
10077 NULL
) == NULL_TREE
)
10078 omp_add_variable (pctx
,
10079 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
10080 GOVD_LOCAL
| GOVD_SEEN
);
10081 gimplify_omp_ctxp
= pctx
;
10082 push_gimplify_context ();
10084 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
10085 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
10087 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c
),
10088 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
));
10089 pop_gimplify_context
10090 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
)));
10091 push_gimplify_context ();
10092 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c
),
10093 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
10094 pop_gimplify_context
10095 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
)));
10096 OMP_CLAUSE_REDUCTION_INIT (c
) = NULL_TREE
;
10097 OMP_CLAUSE_REDUCTION_MERGE (c
) = NULL_TREE
;
10099 gimplify_omp_ctxp
= outer_ctx
;
10101 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
10102 && OMP_CLAUSE_LASTPRIVATE_STMT (c
))
10104 gimplify_omp_ctxp
= ctx
;
10105 push_gimplify_context ();
10106 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c
)) != BIND_EXPR
)
10108 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
10110 TREE_SIDE_EFFECTS (bind
) = 1;
10111 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LASTPRIVATE_STMT (c
);
10112 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = bind
;
10114 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c
),
10115 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
10116 pop_gimplify_context
10117 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
)));
10118 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = NULL_TREE
;
10120 gimplify_omp_ctxp
= outer_ctx
;
10122 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10123 && OMP_CLAUSE_LINEAR_STMT (c
))
10125 gimplify_omp_ctxp
= ctx
;
10126 push_gimplify_context ();
10127 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c
)) != BIND_EXPR
)
10129 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
10131 TREE_SIDE_EFFECTS (bind
) = 1;
10132 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LINEAR_STMT (c
);
10133 OMP_CLAUSE_LINEAR_STMT (c
) = bind
;
10135 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c
),
10136 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
10137 pop_gimplify_context
10138 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
)));
10139 OMP_CLAUSE_LINEAR_STMT (c
) = NULL_TREE
;
10141 gimplify_omp_ctxp
= outer_ctx
;
10147 case OMP_CLAUSE_COPYIN
:
10148 case OMP_CLAUSE_COPYPRIVATE
:
10149 decl
= OMP_CLAUSE_DECL (c
);
10150 if (error_operand_p (decl
))
10155 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_COPYPRIVATE
10157 && !omp_check_private (ctx
, decl
, true))
10160 if (is_global_var (decl
))
10162 if (DECL_THREAD_LOCAL_P (decl
))
10164 else if (DECL_HAS_VALUE_EXPR_P (decl
))
10166 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
10170 && DECL_THREAD_LOCAL_P (value
))
10175 error_at (OMP_CLAUSE_LOCATION (c
),
10176 "copyprivate variable %qE is not threadprivate"
10177 " or private in outer context", DECL_NAME (decl
));
10180 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10181 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
10182 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
10184 && ((region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
10185 || (region_type
== ORT_WORKSHARE
10186 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10187 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
10188 || code
== OMP_LOOP
)))
10189 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
10190 || (code
== OMP_LOOP
10191 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10192 && ((outer_ctx
->region_type
& ORT_COMBINED_TEAMS
)
10193 == ORT_COMBINED_TEAMS
))))
10196 = splay_tree_lookup (outer_ctx
->variables
,
10197 (splay_tree_key
)decl
);
10198 if (on
== NULL
|| (on
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
10200 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10201 && TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
10202 && (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
10203 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
10204 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
10205 == POINTER_TYPE
))))
10206 omp_firstprivatize_variable (outer_ctx
, decl
);
10209 omp_add_variable (outer_ctx
, decl
,
10210 GOVD_SEEN
| GOVD_SHARED
);
10211 if (outer_ctx
->outer_context
)
10212 omp_notice_variable (outer_ctx
->outer_context
, decl
,
10218 omp_notice_variable (outer_ctx
, decl
, true);
10219 if (check_non_private
10220 && (region_type
== ORT_WORKSHARE
|| code
== OMP_SCOPE
)
10221 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
10222 || decl
== OMP_CLAUSE_DECL (c
)
10223 || (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
10224 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
10226 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
10227 == POINTER_PLUS_EXPR
10228 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
10229 (OMP_CLAUSE_DECL (c
), 0), 0))
10231 && omp_check_private (ctx
, decl
, false))
10233 error ("%s variable %qE is private in outer context",
10234 check_non_private
, DECL_NAME (decl
));
10239 case OMP_CLAUSE_DETACH
:
10240 flags
= GOVD_FIRSTPRIVATE
| GOVD_SEEN
;
10243 case OMP_CLAUSE_IF
:
10244 if (OMP_CLAUSE_IF_MODIFIER (c
) != ERROR_MARK
10245 && OMP_CLAUSE_IF_MODIFIER (c
) != code
)
10248 for (int i
= 0; i
< 2; i
++)
10249 switch (i
? OMP_CLAUSE_IF_MODIFIER (c
) : code
)
10251 case VOID_CST
: p
[i
] = "cancel"; break;
10252 case OMP_PARALLEL
: p
[i
] = "parallel"; break;
10253 case OMP_SIMD
: p
[i
] = "simd"; break;
10254 case OMP_TASK
: p
[i
] = "task"; break;
10255 case OMP_TASKLOOP
: p
[i
] = "taskloop"; break;
10256 case OMP_TARGET_DATA
: p
[i
] = "target data"; break;
10257 case OMP_TARGET
: p
[i
] = "target"; break;
10258 case OMP_TARGET_UPDATE
: p
[i
] = "target update"; break;
10259 case OMP_TARGET_ENTER_DATA
:
10260 p
[i
] = "target enter data"; break;
10261 case OMP_TARGET_EXIT_DATA
: p
[i
] = "target exit data"; break;
10262 default: gcc_unreachable ();
10264 error_at (OMP_CLAUSE_LOCATION (c
),
10265 "expected %qs %<if%> clause modifier rather than %qs",
10269 /* Fall through. */
10271 case OMP_CLAUSE_FINAL
:
10272 OMP_CLAUSE_OPERAND (c
, 0)
10273 = gimple_boolify (OMP_CLAUSE_OPERAND (c
, 0));
10274 /* Fall through. */
10276 case OMP_CLAUSE_SCHEDULE
:
10277 case OMP_CLAUSE_NUM_THREADS
:
10278 case OMP_CLAUSE_NUM_TEAMS
:
10279 case OMP_CLAUSE_THREAD_LIMIT
:
10280 case OMP_CLAUSE_DIST_SCHEDULE
:
10281 case OMP_CLAUSE_DEVICE
:
10282 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEVICE
10283 && OMP_CLAUSE_DEVICE_ANCESTOR (c
))
10285 if (code
!= OMP_TARGET
)
10287 error_at (OMP_CLAUSE_LOCATION (c
),
10288 "%<device%> clause with %<ancestor%> is only "
10289 "allowed on %<target%> construct");
10294 tree clauses
= *orig_list_p
;
10295 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
10296 if (OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_DEVICE
10297 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_FIRSTPRIVATE
10298 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_PRIVATE
10299 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_DEFAULTMAP
10300 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_MAP
10303 error_at (OMP_CLAUSE_LOCATION (c
),
10304 "with %<ancestor%>, only the %<device%>, "
10305 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
10306 "and %<map%> clauses may appear on the "
10312 /* Fall through. */
10314 case OMP_CLAUSE_PRIORITY
:
10315 case OMP_CLAUSE_GRAINSIZE
:
10316 case OMP_CLAUSE_NUM_TASKS
:
10317 case OMP_CLAUSE_FILTER
:
10318 case OMP_CLAUSE_HINT
:
10319 case OMP_CLAUSE_ASYNC
:
10320 case OMP_CLAUSE_WAIT
:
10321 case OMP_CLAUSE_NUM_GANGS
:
10322 case OMP_CLAUSE_NUM_WORKERS
:
10323 case OMP_CLAUSE_VECTOR_LENGTH
:
10324 case OMP_CLAUSE_WORKER
:
10325 case OMP_CLAUSE_VECTOR
:
10326 if (OMP_CLAUSE_OPERAND (c
, 0)
10327 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c
, 0)))
10329 if (error_operand_p (OMP_CLAUSE_OPERAND (c
, 0)))
10334 /* All these clauses care about value, not a particular decl,
10335 so try to force it into a SSA_NAME or fresh temporary. */
10336 OMP_CLAUSE_OPERAND (c
, 0)
10337 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c
, 0),
10338 pre_p
, NULL
, true);
10342 case OMP_CLAUSE_GANG
:
10343 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
10344 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
10346 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 1), pre_p
, NULL
,
10347 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
10351 case OMP_CLAUSE_NOWAIT
:
10355 case OMP_CLAUSE_ORDERED
:
10356 case OMP_CLAUSE_UNTIED
:
10357 case OMP_CLAUSE_COLLAPSE
:
10358 case OMP_CLAUSE_TILE
:
10359 case OMP_CLAUSE_AUTO
:
10360 case OMP_CLAUSE_SEQ
:
10361 case OMP_CLAUSE_INDEPENDENT
:
10362 case OMP_CLAUSE_MERGEABLE
:
10363 case OMP_CLAUSE_PROC_BIND
:
10364 case OMP_CLAUSE_SAFELEN
:
10365 case OMP_CLAUSE_SIMDLEN
:
10366 case OMP_CLAUSE_NOGROUP
:
10367 case OMP_CLAUSE_THREADS
:
10368 case OMP_CLAUSE_SIMD
:
10369 case OMP_CLAUSE_BIND
:
10370 case OMP_CLAUSE_IF_PRESENT
:
10371 case OMP_CLAUSE_FINALIZE
:
10374 case OMP_CLAUSE_ORDER
:
10375 ctx
->order_concurrent
= true;
10378 case OMP_CLAUSE_DEFAULTMAP
:
10379 enum gimplify_defaultmap_kind gdmkmin
, gdmkmax
;
10380 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c
))
10382 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
:
10383 gdmkmin
= GDMK_SCALAR
;
10384 gdmkmax
= GDMK_POINTER
;
10386 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
:
10387 gdmkmin
= GDMK_SCALAR
;
10388 gdmkmax
= GDMK_SCALAR_TARGET
;
10390 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE
:
10391 gdmkmin
= gdmkmax
= GDMK_AGGREGATE
;
10393 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE
:
10394 gdmkmin
= gdmkmax
= GDMK_ALLOCATABLE
;
10396 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER
:
10397 gdmkmin
= gdmkmax
= GDMK_POINTER
;
10400 gcc_unreachable ();
10402 for (int gdmk
= gdmkmin
; gdmk
<= gdmkmax
; gdmk
++)
10403 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c
))
10405 case OMP_CLAUSE_DEFAULTMAP_ALLOC
:
10406 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_ALLOC_ONLY
;
10408 case OMP_CLAUSE_DEFAULTMAP_TO
:
10409 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_TO_ONLY
;
10411 case OMP_CLAUSE_DEFAULTMAP_FROM
:
10412 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_FROM_ONLY
;
10414 case OMP_CLAUSE_DEFAULTMAP_TOFROM
:
10415 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
10417 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
:
10418 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
10420 case OMP_CLAUSE_DEFAULTMAP_NONE
:
10421 ctx
->defaultmap
[gdmk
] = 0;
10423 case OMP_CLAUSE_DEFAULTMAP_DEFAULT
:
10427 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
10429 case GDMK_SCALAR_TARGET
:
10430 ctx
->defaultmap
[gdmk
] = (lang_GNU_Fortran ()
10431 ? GOVD_MAP
: GOVD_FIRSTPRIVATE
);
10433 case GDMK_AGGREGATE
:
10434 case GDMK_ALLOCATABLE
:
10435 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
10438 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
10439 if (!lang_GNU_Fortran ())
10440 ctx
->defaultmap
[gdmk
] |= GOVD_MAP_0LEN_ARRAY
;
10443 gcc_unreachable ();
10447 gcc_unreachable ();
10451 case OMP_CLAUSE_ALIGNED
:
10452 decl
= OMP_CLAUSE_DECL (c
);
10453 if (error_operand_p (decl
))
10458 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c
), pre_p
, NULL
,
10459 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
10464 if (!is_global_var (decl
)
10465 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
10466 omp_add_variable (ctx
, decl
, GOVD_ALIGNED
);
10469 case OMP_CLAUSE_NONTEMPORAL
:
10470 decl
= OMP_CLAUSE_DECL (c
);
10471 if (error_operand_p (decl
))
10476 omp_add_variable (ctx
, decl
, GOVD_NONTEMPORAL
);
10479 case OMP_CLAUSE_ALLOCATE
:
10480 decl
= OMP_CLAUSE_DECL (c
);
10481 if (error_operand_p (decl
))
10486 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
), pre_p
, NULL
,
10487 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
10492 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
10493 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
10496 else if (code
== OMP_TASKLOOP
10497 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
10498 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
10499 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
10500 pre_p
, NULL
, false);
10503 case OMP_CLAUSE_DEFAULT
:
10504 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
10507 case OMP_CLAUSE_INCLUSIVE
:
10508 case OMP_CLAUSE_EXCLUSIVE
:
10509 decl
= OMP_CLAUSE_DECL (c
);
10511 splay_tree_node n
= splay_tree_lookup (outer_ctx
->variables
,
10512 (splay_tree_key
) decl
);
10513 if (n
== NULL
|| (n
->value
& GOVD_REDUCTION
) == 0)
10515 error_at (OMP_CLAUSE_LOCATION (c
),
10516 "%qD specified in %qs clause but not in %<inscan%> "
10517 "%<reduction%> clause on the containing construct",
10518 decl
, omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
10523 n
->value
|= GOVD_REDUCTION_INSCAN
;
10524 if (outer_ctx
->region_type
== ORT_SIMD
10525 && outer_ctx
->outer_context
10526 && outer_ctx
->outer_context
->region_type
== ORT_WORKSHARE
)
10528 n
= splay_tree_lookup (outer_ctx
->outer_context
->variables
,
10529 (splay_tree_key
) decl
);
10530 if (n
&& (n
->value
& GOVD_REDUCTION
) != 0)
10531 n
->value
|= GOVD_REDUCTION_INSCAN
;
10537 case OMP_CLAUSE_NOHOST
:
10539 gcc_unreachable ();
10542 if (code
== OACC_DATA
10543 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
10544 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
10545 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
10548 *list_p
= OMP_CLAUSE_CHAIN (c
);
10550 list_p
= &OMP_CLAUSE_CHAIN (c
);
10553 ctx
->clauses
= *orig_list_p
;
10554 gimplify_omp_ctxp
= ctx
;
10555 if (struct_map_to_clause
)
10556 delete struct_map_to_clause
;
10557 if (struct_deref_set
)
10558 delete struct_deref_set
;
10561 /* Return true if DECL is a candidate for shared to firstprivate
10562 optimization. We only consider non-addressable scalars, not
10563 too big, and not references. */
10566 omp_shared_to_firstprivate_optimizable_decl_p (tree decl
)
10568 if (TREE_ADDRESSABLE (decl
))
10570 tree type
= TREE_TYPE (decl
);
10571 if (!is_gimple_reg_type (type
)
10572 || TREE_CODE (type
) == REFERENCE_TYPE
10573 || TREE_ADDRESSABLE (type
))
10575 /* Don't optimize too large decls, as each thread/task will have
10577 HOST_WIDE_INT len
= int_size_in_bytes (type
);
10578 if (len
== -1 || len
> 4 * POINTER_SIZE
/ BITS_PER_UNIT
)
10580 if (omp_privatize_by_reference (decl
))
10585 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
10586 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
10587 GOVD_WRITTEN in outer contexts. */
10590 omp_mark_stores (struct gimplify_omp_ctx
*ctx
, tree decl
)
10592 for (; ctx
; ctx
= ctx
->outer_context
)
10594 splay_tree_node n
= splay_tree_lookup (ctx
->variables
,
10595 (splay_tree_key
) decl
);
10598 else if (n
->value
& GOVD_SHARED
)
10600 n
->value
|= GOVD_WRITTEN
;
10603 else if (n
->value
& GOVD_DATA_SHARE_CLASS
)
10608 /* Helper callback for walk_gimple_seq to discover possible stores
10609 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
10610 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
10614 omp_find_stores_op (tree
*tp
, int *walk_subtrees
, void *data
)
10616 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
10618 *walk_subtrees
= 0;
10625 if (handled_component_p (op
))
10626 op
= TREE_OPERAND (op
, 0);
10627 else if ((TREE_CODE (op
) == MEM_REF
|| TREE_CODE (op
) == TARGET_MEM_REF
)
10628 && TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
10629 op
= TREE_OPERAND (TREE_OPERAND (op
, 0), 0);
10634 if (!DECL_P (op
) || !omp_shared_to_firstprivate_optimizable_decl_p (op
))
10637 omp_mark_stores (gimplify_omp_ctxp
, op
);
10641 /* Helper callback for walk_gimple_seq to discover possible stores
10642 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
10643 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
10647 omp_find_stores_stmt (gimple_stmt_iterator
*gsi_p
,
10648 bool *handled_ops_p
,
10649 struct walk_stmt_info
*wi
)
10651 gimple
*stmt
= gsi_stmt (*gsi_p
);
10652 switch (gimple_code (stmt
))
10654 /* Don't recurse on OpenMP constructs for which
10655 gimplify_adjust_omp_clauses already handled the bodies,
10656 except handle gimple_omp_for_pre_body. */
10657 case GIMPLE_OMP_FOR
:
10658 *handled_ops_p
= true;
10659 if (gimple_omp_for_pre_body (stmt
))
10660 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
10661 omp_find_stores_stmt
, omp_find_stores_op
, wi
);
10663 case GIMPLE_OMP_PARALLEL
:
10664 case GIMPLE_OMP_TASK
:
10665 case GIMPLE_OMP_SECTIONS
:
10666 case GIMPLE_OMP_SINGLE
:
10667 case GIMPLE_OMP_SCOPE
:
10668 case GIMPLE_OMP_TARGET
:
10669 case GIMPLE_OMP_TEAMS
:
10670 case GIMPLE_OMP_CRITICAL
:
10671 *handled_ops_p
= true;
10679 struct gimplify_adjust_omp_clauses_data
10685 /* For all variables that were not actually used within the context,
10686 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
10689 gimplify_adjust_omp_clauses_1 (splay_tree_node n
, void *data
)
10691 tree
*list_p
= ((struct gimplify_adjust_omp_clauses_data
*) data
)->list_p
;
10693 = ((struct gimplify_adjust_omp_clauses_data
*) data
)->pre_p
;
10694 tree decl
= (tree
) n
->key
;
10695 unsigned flags
= n
->value
;
10696 enum omp_clause_code code
;
10698 bool private_debug
;
10700 if (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
10701 && (flags
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0)
10702 flags
= GOVD_SHARED
| GOVD_SEEN
| GOVD_WRITTEN
;
10703 if (flags
& (GOVD_EXPLICIT
| GOVD_LOCAL
))
10705 if ((flags
& GOVD_SEEN
) == 0)
10707 if ((flags
& GOVD_MAP_HAS_ATTACHMENTS
) != 0)
10709 if (flags
& GOVD_DEBUG_PRIVATE
)
10711 gcc_assert ((flags
& GOVD_DATA_SHARE_CLASS
) == GOVD_SHARED
);
10712 private_debug
= true;
10714 else if (flags
& GOVD_MAP
)
10715 private_debug
= false;
10718 = lang_hooks
.decls
.omp_private_debug_clause (decl
,
10719 !!(flags
& GOVD_SHARED
));
10721 code
= OMP_CLAUSE_PRIVATE
;
10722 else if (flags
& GOVD_MAP
)
10724 code
= OMP_CLAUSE_MAP
;
10725 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
10726 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
10728 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl
);
10732 && DECL_IN_CONSTANT_POOL (decl
)
10733 && !lookup_attribute ("omp declare target",
10734 DECL_ATTRIBUTES (decl
)))
10736 tree id
= get_identifier ("omp declare target");
10737 DECL_ATTRIBUTES (decl
)
10738 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
10739 varpool_node
*node
= varpool_node::get (decl
);
10742 node
->offloadable
= 1;
10743 if (ENABLE_OFFLOADING
)
10744 g
->have_offload
= true;
10748 else if (flags
& GOVD_SHARED
)
10750 if (is_global_var (decl
))
10752 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
10753 while (ctx
!= NULL
)
10756 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10757 if (on
&& (on
->value
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
10758 | GOVD_PRIVATE
| GOVD_REDUCTION
10759 | GOVD_LINEAR
| GOVD_MAP
)) != 0)
10761 ctx
= ctx
->outer_context
;
10766 code
= OMP_CLAUSE_SHARED
;
10767 /* Don't optimize shared into firstprivate for read-only vars
10768 on tasks with depend clause, we shouldn't try to copy them
10769 until the dependencies are satisfied. */
10770 if (gimplify_omp_ctxp
->has_depend
)
10771 flags
|= GOVD_WRITTEN
;
10773 else if (flags
& GOVD_PRIVATE
)
10774 code
= OMP_CLAUSE_PRIVATE
;
10775 else if (flags
& GOVD_FIRSTPRIVATE
)
10777 code
= OMP_CLAUSE_FIRSTPRIVATE
;
10778 if ((gimplify_omp_ctxp
->region_type
& ORT_TARGET
)
10779 && (gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
10780 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
10782 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
10783 "%<target%> construct", decl
);
10787 else if (flags
& GOVD_LASTPRIVATE
)
10788 code
= OMP_CLAUSE_LASTPRIVATE
;
10789 else if (flags
& (GOVD_ALIGNED
| GOVD_NONTEMPORAL
))
10791 else if (flags
& GOVD_CONDTEMP
)
10793 code
= OMP_CLAUSE__CONDTEMP_
;
10794 gimple_add_tmp_var (decl
);
10797 gcc_unreachable ();
10799 if (((flags
& GOVD_LASTPRIVATE
)
10800 || (code
== OMP_CLAUSE_SHARED
&& (flags
& GOVD_WRITTEN
)))
10801 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10802 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
10804 tree chain
= *list_p
;
10805 clause
= build_omp_clause (input_location
, code
);
10806 OMP_CLAUSE_DECL (clause
) = decl
;
10807 OMP_CLAUSE_CHAIN (clause
) = chain
;
10809 OMP_CLAUSE_PRIVATE_DEBUG (clause
) = 1;
10810 else if (code
== OMP_CLAUSE_PRIVATE
&& (flags
& GOVD_PRIVATE_OUTER_REF
))
10811 OMP_CLAUSE_PRIVATE_OUTER_REF (clause
) = 1;
10812 else if (code
== OMP_CLAUSE_SHARED
10813 && (flags
& GOVD_WRITTEN
) == 0
10814 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10815 OMP_CLAUSE_SHARED_READONLY (clause
) = 1;
10816 else if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_EXPLICIT
) == 0)
10817 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause
) = 1;
10818 else if (code
== OMP_CLAUSE_MAP
&& (flags
& GOVD_MAP_0LEN_ARRAY
) != 0)
10820 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
10821 OMP_CLAUSE_DECL (nc
) = decl
;
10822 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
10823 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
10824 OMP_CLAUSE_DECL (clause
)
10825 = build_simple_mem_ref_loc (input_location
, decl
);
10826 OMP_CLAUSE_DECL (clause
)
10827 = build2 (MEM_REF
, char_type_node
, OMP_CLAUSE_DECL (clause
),
10828 build_int_cst (build_pointer_type (char_type_node
), 0));
10829 OMP_CLAUSE_SIZE (clause
) = size_zero_node
;
10830 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
10831 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_ALLOC
);
10832 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause
) = 1;
10833 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
10834 OMP_CLAUSE_CHAIN (nc
) = chain
;
10835 OMP_CLAUSE_CHAIN (clause
) = nc
;
10836 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10837 gimplify_omp_ctxp
= ctx
->outer_context
;
10838 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0),
10839 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
10840 gimplify_omp_ctxp
= ctx
;
10842 else if (code
== OMP_CLAUSE_MAP
)
10845 /* Not all combinations of these GOVD_MAP flags are actually valid. */
10846 switch (flags
& (GOVD_MAP_TO_ONLY
10848 | GOVD_MAP_FORCE_PRESENT
10849 | GOVD_MAP_ALLOC_ONLY
10850 | GOVD_MAP_FROM_ONLY
))
10853 kind
= GOMP_MAP_TOFROM
;
10855 case GOVD_MAP_FORCE
:
10856 kind
= GOMP_MAP_TOFROM
| GOMP_MAP_FLAG_FORCE
;
10858 case GOVD_MAP_TO_ONLY
:
10859 kind
= GOMP_MAP_TO
;
10861 case GOVD_MAP_FROM_ONLY
:
10862 kind
= GOMP_MAP_FROM
;
10864 case GOVD_MAP_ALLOC_ONLY
:
10865 kind
= GOMP_MAP_ALLOC
;
10867 case GOVD_MAP_TO_ONLY
| GOVD_MAP_FORCE
:
10868 kind
= GOMP_MAP_TO
| GOMP_MAP_FLAG_FORCE
;
10870 case GOVD_MAP_FORCE_PRESENT
:
10871 kind
= GOMP_MAP_FORCE_PRESENT
;
10874 gcc_unreachable ();
10876 OMP_CLAUSE_SET_MAP_KIND (clause
, kind
);
10877 if (DECL_SIZE (decl
)
10878 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
10880 tree decl2
= DECL_VALUE_EXPR (decl
);
10881 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
10882 decl2
= TREE_OPERAND (decl2
, 0);
10883 gcc_assert (DECL_P (decl2
));
10884 tree mem
= build_simple_mem_ref (decl2
);
10885 OMP_CLAUSE_DECL (clause
) = mem
;
10886 OMP_CLAUSE_SIZE (clause
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
10887 if (gimplify_omp_ctxp
->outer_context
)
10889 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
10890 omp_notice_variable (ctx
, decl2
, true);
10891 omp_notice_variable (ctx
, OMP_CLAUSE_SIZE (clause
), true);
10893 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
10895 OMP_CLAUSE_DECL (nc
) = decl
;
10896 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
10897 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
)
10898 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
10900 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
10901 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
10902 OMP_CLAUSE_CHAIN (clause
) = nc
;
10904 else if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
10905 && omp_privatize_by_reference (decl
))
10907 OMP_CLAUSE_DECL (clause
) = build_simple_mem_ref (decl
);
10908 OMP_CLAUSE_SIZE (clause
)
10909 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))));
10910 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10911 gimplify_omp_ctxp
= ctx
->outer_context
;
10912 gimplify_expr (&OMP_CLAUSE_SIZE (clause
),
10913 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
10914 gimplify_omp_ctxp
= ctx
;
10915 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
10917 OMP_CLAUSE_DECL (nc
) = decl
;
10918 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
10919 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_REFERENCE
);
10920 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
10921 OMP_CLAUSE_CHAIN (clause
) = nc
;
10924 OMP_CLAUSE_SIZE (clause
) = DECL_SIZE_UNIT (decl
);
10926 if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_LASTPRIVATE
) != 0)
10928 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
10929 OMP_CLAUSE_DECL (nc
) = decl
;
10930 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc
) = 1;
10931 OMP_CLAUSE_CHAIN (nc
) = chain
;
10932 OMP_CLAUSE_CHAIN (clause
) = nc
;
10933 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10934 gimplify_omp_ctxp
= ctx
->outer_context
;
10935 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
10936 (ctx
->region_type
& ORT_ACC
) != 0);
10937 gimplify_omp_ctxp
= ctx
;
10940 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10941 gimplify_omp_ctxp
= ctx
->outer_context
;
10942 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
10943 in simd. Those are only added for the local vars inside of simd body
10944 and they don't need to be e.g. default constructible. */
10945 if (code
!= OMP_CLAUSE_PRIVATE
|| ctx
->region_type
!= ORT_SIMD
)
10946 lang_hooks
.decls
.omp_finish_clause (clause
, pre_p
,
10947 (ctx
->region_type
& ORT_ACC
) != 0);
10948 if (gimplify_omp_ctxp
)
10949 for (; clause
!= chain
; clause
= OMP_CLAUSE_CHAIN (clause
))
10950 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
10951 && DECL_P (OMP_CLAUSE_SIZE (clause
)))
10952 omp_notice_variable (gimplify_omp_ctxp
, OMP_CLAUSE_SIZE (clause
),
10954 gimplify_omp_ctxp
= ctx
;
10959 gimplify_adjust_omp_clauses (gimple_seq
*pre_p
, gimple_seq body
, tree
*list_p
,
10960 enum tree_code code
)
10962 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10963 tree
*orig_list_p
= list_p
;
10965 bool has_inscan_reductions
= false;
10969 struct gimplify_omp_ctx
*octx
;
10970 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
10971 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TASK
| ORT_TEAMS
)) != 0)
10975 struct walk_stmt_info wi
;
10976 memset (&wi
, 0, sizeof (wi
));
10977 walk_gimple_seq (body
, omp_find_stores_stmt
,
10978 omp_find_stores_op
, &wi
);
10982 if (ctx
->add_safelen1
)
10984 /* If there are VLAs in the body of simd loop, prevent
10986 gcc_assert (ctx
->region_type
== ORT_SIMD
);
10987 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
10988 OMP_CLAUSE_SAFELEN_EXPR (c
) = integer_one_node
;
10989 OMP_CLAUSE_CHAIN (c
) = *list_p
;
10991 list_p
= &OMP_CLAUSE_CHAIN (c
);
10994 if (ctx
->region_type
== ORT_WORKSHARE
10995 && ctx
->outer_context
10996 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
)
10998 for (c
= ctx
->outer_context
->clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10999 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
11000 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
11002 decl
= OMP_CLAUSE_DECL (c
);
11004 = splay_tree_lookup (ctx
->outer_context
->variables
,
11005 (splay_tree_key
) decl
);
11006 gcc_checking_assert (!splay_tree_lookup (ctx
->variables
,
11007 (splay_tree_key
) decl
));
11008 omp_add_variable (ctx
, decl
, n
->value
);
11009 tree c2
= copy_node (c
);
11010 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
11012 if ((n
->value
& GOVD_FIRSTPRIVATE
) == 0)
11014 c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
11015 OMP_CLAUSE_FIRSTPRIVATE
);
11016 OMP_CLAUSE_DECL (c2
) = decl
;
11017 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
11021 while ((c
= *list_p
) != NULL
)
11024 bool remove
= false;
11026 switch (OMP_CLAUSE_CODE (c
))
11028 case OMP_CLAUSE_FIRSTPRIVATE
:
11029 if ((ctx
->region_type
& ORT_TARGET
)
11030 && (ctx
->region_type
& ORT_ACC
) == 0
11031 && TYPE_ATOMIC (strip_array_types
11032 (TREE_TYPE (OMP_CLAUSE_DECL (c
)))))
11034 error_at (OMP_CLAUSE_LOCATION (c
),
11035 "%<_Atomic%> %qD in %<firstprivate%> clause on "
11036 "%<target%> construct", OMP_CLAUSE_DECL (c
));
11040 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11042 decl
= OMP_CLAUSE_DECL (c
);
11043 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11044 if ((n
->value
& GOVD_MAP
) != 0)
11049 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c
) = 0;
11050 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
) = 0;
11053 case OMP_CLAUSE_PRIVATE
:
11054 case OMP_CLAUSE_SHARED
:
11055 case OMP_CLAUSE_LINEAR
:
11056 decl
= OMP_CLAUSE_DECL (c
);
11057 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11058 remove
= !(n
->value
& GOVD_SEEN
);
11059 if ((n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0
11060 && code
== OMP_PARALLEL
11061 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11065 bool shared
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
;
11066 if ((n
->value
& GOVD_DEBUG_PRIVATE
)
11067 || lang_hooks
.decls
.omp_private_debug_clause (decl
, shared
))
11069 gcc_assert ((n
->value
& GOVD_DEBUG_PRIVATE
) == 0
11070 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
11072 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_PRIVATE
);
11073 OMP_CLAUSE_PRIVATE_DEBUG (c
) = 1;
11075 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
11078 n
->value
|= GOVD_WRITTEN
;
11079 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
11080 && (n
->value
& GOVD_WRITTEN
) == 0
11082 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
11083 OMP_CLAUSE_SHARED_READONLY (c
) = 1;
11084 else if (DECL_P (decl
)
11085 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
11086 && (n
->value
& GOVD_WRITTEN
) != 0)
11087 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11088 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
11089 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
11090 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
11093 n
->value
&= ~GOVD_EXPLICIT
;
11096 case OMP_CLAUSE_LASTPRIVATE
:
11097 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
11098 accurately reflect the presence of a FIRSTPRIVATE clause. */
11099 decl
= OMP_CLAUSE_DECL (c
);
11100 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11101 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
11102 = (n
->value
& GOVD_FIRSTPRIVATE
) != 0;
11103 if (code
== OMP_DISTRIBUTE
11104 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
11107 error_at (OMP_CLAUSE_LOCATION (c
),
11108 "same variable used in %<firstprivate%> and "
11109 "%<lastprivate%> clauses on %<distribute%> "
11113 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
11115 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
11116 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
11117 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) && code
== OMP_PARALLEL
)
11121 case OMP_CLAUSE_ALIGNED
:
11122 decl
= OMP_CLAUSE_DECL (c
);
11123 if (!is_global_var (decl
))
11125 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11126 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
11127 if (!remove
&& TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
11129 struct gimplify_omp_ctx
*octx
;
11131 && (n
->value
& (GOVD_DATA_SHARE_CLASS
11132 & ~GOVD_FIRSTPRIVATE
)))
11135 for (octx
= ctx
->outer_context
; octx
;
11136 octx
= octx
->outer_context
)
11138 n
= splay_tree_lookup (octx
->variables
,
11139 (splay_tree_key
) decl
);
11142 if (n
->value
& GOVD_LOCAL
)
11144 /* We have to avoid assigning a shared variable
11145 to itself when trying to add
11146 __builtin_assume_aligned. */
11147 if (n
->value
& GOVD_SHARED
)
11155 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
11157 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11158 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
11163 case OMP_CLAUSE_NONTEMPORAL
:
11164 decl
= OMP_CLAUSE_DECL (c
);
11165 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11166 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
11169 case OMP_CLAUSE_MAP
:
11170 if (code
== OMP_TARGET_EXIT_DATA
11171 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
11176 decl
= OMP_CLAUSE_DECL (c
);
11177 /* Data clauses associated with reductions must be
11178 compatible with present_or_copy. Warn and adjust the clause
11179 if that is not the case. */
11180 if (ctx
->region_type
== ORT_ACC_PARALLEL
11181 || ctx
->region_type
== ORT_ACC_SERIAL
)
11183 tree t
= DECL_P (decl
) ? decl
: TREE_OPERAND (decl
, 0);
11187 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
11189 if (n
&& (n
->value
& GOVD_REDUCTION
))
11191 enum gomp_map_kind kind
= OMP_CLAUSE_MAP_KIND (c
);
11193 OMP_CLAUSE_MAP_IN_REDUCTION (c
) = 1;
11194 if ((kind
& GOMP_MAP_TOFROM
) != GOMP_MAP_TOFROM
11195 && kind
!= GOMP_MAP_FORCE_PRESENT
11196 && kind
!= GOMP_MAP_POINTER
)
11198 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
11199 "incompatible data clause with reduction "
11200 "on %qE; promoting to %<present_or_copy%>",
11202 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
11206 if (!DECL_P (decl
))
11208 if ((ctx
->region_type
& ORT_TARGET
) != 0
11209 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
11211 if (TREE_CODE (decl
) == INDIRECT_REF
11212 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
11213 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
11214 == REFERENCE_TYPE
))
11215 decl
= TREE_OPERAND (decl
, 0);
11216 if (TREE_CODE (decl
) == COMPONENT_REF
)
11218 while (TREE_CODE (decl
) == COMPONENT_REF
)
11219 decl
= TREE_OPERAND (decl
, 0);
11222 n
= splay_tree_lookup (ctx
->variables
,
11223 (splay_tree_key
) decl
);
11224 if (!(n
->value
& GOVD_SEEN
))
11231 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11232 if ((ctx
->region_type
& ORT_TARGET
) != 0
11233 && !(n
->value
& GOVD_SEEN
)
11234 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) == 0
11235 && (!is_global_var (decl
)
11236 || !lookup_attribute ("omp declare target link",
11237 DECL_ATTRIBUTES (decl
))))
11240 /* For struct element mapping, if struct is never referenced
11241 in target block and none of the mapping has always modifier,
11242 remove all the struct element mappings, which immediately
11243 follow the GOMP_MAP_STRUCT map clause. */
11244 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
11246 HOST_WIDE_INT cnt
= tree_to_shwi (OMP_CLAUSE_SIZE (c
));
11248 OMP_CLAUSE_CHAIN (c
)
11249 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c
));
11252 else if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
11253 && (code
== OMP_TARGET_EXIT_DATA
11254 || code
== OACC_EXIT_DATA
))
11256 else if (DECL_SIZE (decl
)
11257 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
11258 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_POINTER
11259 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
11260 && (OMP_CLAUSE_MAP_KIND (c
)
11261 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
11263 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
11264 for these, TREE_CODE (DECL_SIZE (decl)) will always be
11266 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DEVICEPTR
);
11268 tree decl2
= DECL_VALUE_EXPR (decl
);
11269 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
11270 decl2
= TREE_OPERAND (decl2
, 0);
11271 gcc_assert (DECL_P (decl2
));
11272 tree mem
= build_simple_mem_ref (decl2
);
11273 OMP_CLAUSE_DECL (c
) = mem
;
11274 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
11275 if (ctx
->outer_context
)
11277 omp_notice_variable (ctx
->outer_context
, decl2
, true);
11278 omp_notice_variable (ctx
->outer_context
,
11279 OMP_CLAUSE_SIZE (c
), true);
11281 if (((ctx
->region_type
& ORT_TARGET
) != 0
11282 || !ctx
->target_firstprivatize_array_bases
)
11283 && ((n
->value
& GOVD_SEEN
) == 0
11284 || (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
)) == 0))
11286 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
11288 OMP_CLAUSE_DECL (nc
) = decl
;
11289 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
11290 if (ctx
->target_firstprivatize_array_bases
)
11291 OMP_CLAUSE_SET_MAP_KIND (nc
,
11292 GOMP_MAP_FIRSTPRIVATE_POINTER
);
11294 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
11295 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (c
);
11296 OMP_CLAUSE_CHAIN (c
) = nc
;
11302 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
11303 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
11304 gcc_assert ((n
->value
& GOVD_SEEN
) == 0
11305 || ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
11310 case OMP_CLAUSE_TO
:
11311 case OMP_CLAUSE_FROM
:
11312 case OMP_CLAUSE__CACHE_
:
11313 decl
= OMP_CLAUSE_DECL (c
);
11314 if (!DECL_P (decl
))
11316 if (DECL_SIZE (decl
)
11317 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
11319 tree decl2
= DECL_VALUE_EXPR (decl
);
11320 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
11321 decl2
= TREE_OPERAND (decl2
, 0);
11322 gcc_assert (DECL_P (decl2
));
11323 tree mem
= build_simple_mem_ref (decl2
);
11324 OMP_CLAUSE_DECL (c
) = mem
;
11325 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
11326 if (ctx
->outer_context
)
11328 omp_notice_variable (ctx
->outer_context
, decl2
, true);
11329 omp_notice_variable (ctx
->outer_context
,
11330 OMP_CLAUSE_SIZE (c
), true);
11333 else if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
11334 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
11337 case OMP_CLAUSE_REDUCTION
:
11338 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
11340 decl
= OMP_CLAUSE_DECL (c
);
11341 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11342 if ((n
->value
& GOVD_REDUCTION_INSCAN
) == 0)
11345 error_at (OMP_CLAUSE_LOCATION (c
),
11346 "%qD specified in %<inscan%> %<reduction%> clause "
11347 "but not in %<scan%> directive clause", decl
);
11350 has_inscan_reductions
= true;
11353 case OMP_CLAUSE_IN_REDUCTION
:
11354 case OMP_CLAUSE_TASK_REDUCTION
:
11355 decl
= OMP_CLAUSE_DECL (c
);
11356 /* OpenACC reductions need a present_or_copy data clause.
11357 Add one if necessary. Emit error when the reduction is private. */
11358 if (ctx
->region_type
== ORT_ACC_PARALLEL
11359 || ctx
->region_type
== ORT_ACC_SERIAL
)
11361 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11362 if (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
11365 error_at (OMP_CLAUSE_LOCATION (c
), "invalid private "
11366 "reduction on %qE", DECL_NAME (decl
));
11368 else if ((n
->value
& GOVD_MAP
) == 0)
11370 tree next
= OMP_CLAUSE_CHAIN (c
);
11371 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_MAP
);
11372 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_TOFROM
);
11373 OMP_CLAUSE_DECL (nc
) = decl
;
11374 OMP_CLAUSE_CHAIN (c
) = nc
;
11375 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
11380 OMP_CLAUSE_MAP_IN_REDUCTION (nc
) = 1;
11381 if (OMP_CLAUSE_CHAIN (nc
) == NULL
)
11383 nc
= OMP_CLAUSE_CHAIN (nc
);
11385 OMP_CLAUSE_CHAIN (nc
) = next
;
11386 n
->value
|= GOVD_MAP
;
11390 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
11391 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
11394 case OMP_CLAUSE_ALLOCATE
:
11395 decl
= OMP_CLAUSE_DECL (c
);
11396 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11397 if (n
!= NULL
&& !(n
->value
& GOVD_SEEN
))
11399 if ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
| GOVD_LINEAR
))
11401 && (n
->value
& (GOVD_REDUCTION
| GOVD_LASTPRIVATE
)) == 0)
11405 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
11406 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)) != INTEGER_CST
11407 && ((ctx
->region_type
& (ORT_PARALLEL
| ORT_TARGET
)) != 0
11408 || (ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASK
11409 || (ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
))
11411 tree allocator
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
11412 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) allocator
);
11415 enum omp_clause_default_kind default_kind
11416 = ctx
->default_kind
;
11417 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
11418 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
11420 ctx
->default_kind
= default_kind
;
11423 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
11428 case OMP_CLAUSE_COPYIN
:
11429 case OMP_CLAUSE_COPYPRIVATE
:
11430 case OMP_CLAUSE_IF
:
11431 case OMP_CLAUSE_NUM_THREADS
:
11432 case OMP_CLAUSE_NUM_TEAMS
:
11433 case OMP_CLAUSE_THREAD_LIMIT
:
11434 case OMP_CLAUSE_DIST_SCHEDULE
:
11435 case OMP_CLAUSE_DEVICE
:
11436 case OMP_CLAUSE_SCHEDULE
:
11437 case OMP_CLAUSE_NOWAIT
:
11438 case OMP_CLAUSE_ORDERED
:
11439 case OMP_CLAUSE_DEFAULT
:
11440 case OMP_CLAUSE_UNTIED
:
11441 case OMP_CLAUSE_COLLAPSE
:
11442 case OMP_CLAUSE_FINAL
:
11443 case OMP_CLAUSE_MERGEABLE
:
11444 case OMP_CLAUSE_PROC_BIND
:
11445 case OMP_CLAUSE_SAFELEN
:
11446 case OMP_CLAUSE_SIMDLEN
:
11447 case OMP_CLAUSE_DEPEND
:
11448 case OMP_CLAUSE_PRIORITY
:
11449 case OMP_CLAUSE_GRAINSIZE
:
11450 case OMP_CLAUSE_NUM_TASKS
:
11451 case OMP_CLAUSE_NOGROUP
:
11452 case OMP_CLAUSE_THREADS
:
11453 case OMP_CLAUSE_SIMD
:
11454 case OMP_CLAUSE_FILTER
:
11455 case OMP_CLAUSE_HINT
:
11456 case OMP_CLAUSE_DEFAULTMAP
:
11457 case OMP_CLAUSE_ORDER
:
11458 case OMP_CLAUSE_BIND
:
11459 case OMP_CLAUSE_DETACH
:
11460 case OMP_CLAUSE_USE_DEVICE_PTR
:
11461 case OMP_CLAUSE_USE_DEVICE_ADDR
:
11462 case OMP_CLAUSE_IS_DEVICE_PTR
:
11463 case OMP_CLAUSE_ASYNC
:
11464 case OMP_CLAUSE_WAIT
:
11465 case OMP_CLAUSE_INDEPENDENT
:
11466 case OMP_CLAUSE_NUM_GANGS
:
11467 case OMP_CLAUSE_NUM_WORKERS
:
11468 case OMP_CLAUSE_VECTOR_LENGTH
:
11469 case OMP_CLAUSE_GANG
:
11470 case OMP_CLAUSE_WORKER
:
11471 case OMP_CLAUSE_VECTOR
:
11472 case OMP_CLAUSE_AUTO
:
11473 case OMP_CLAUSE_SEQ
:
11474 case OMP_CLAUSE_TILE
:
11475 case OMP_CLAUSE_IF_PRESENT
:
11476 case OMP_CLAUSE_FINALIZE
:
11477 case OMP_CLAUSE_INCLUSIVE
:
11478 case OMP_CLAUSE_EXCLUSIVE
:
11481 case OMP_CLAUSE_NOHOST
:
11483 gcc_unreachable ();
11487 *list_p
= OMP_CLAUSE_CHAIN (c
);
11489 list_p
= &OMP_CLAUSE_CHAIN (c
);
11492 /* Add in any implicit data sharing. */
11493 struct gimplify_adjust_omp_clauses_data data
;
11494 data
.list_p
= list_p
;
11495 data
.pre_p
= pre_p
;
11496 splay_tree_foreach (ctx
->variables
, gimplify_adjust_omp_clauses_1
, &data
);
11498 if (has_inscan_reductions
)
11499 for (c
= *orig_list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11500 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11501 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
11503 error_at (OMP_CLAUSE_LOCATION (c
),
11504 "%<inscan%> %<reduction%> clause used together with "
11505 "%<linear%> clause for a variable other than loop "
11510 gimplify_omp_ctxp
= ctx
->outer_context
;
11511 delete_omp_context (ctx
);
11514 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
11515 -1 if unknown yet (simd is involved, won't be known until vectorization)
11516 and 1 if they do. If SCORES is non-NULL, it should point to an array
11517 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
11518 of the CONSTRUCTS (position -1 if it will never match) followed by
11519 number of constructs in the OpenMP context construct trait. If the
11520 score depends on whether it will be in a declare simd clone or not,
11521 the function returns 2 and there will be two sets of the scores, the first
11522 one for the case that it is not in a declare simd clone, the other
11523 that it is in a declare simd clone. */
11526 omp_construct_selector_matches (enum tree_code
*constructs
, int nconstructs
,
11529 int matched
= 0, cnt
= 0;
11530 bool simd_seen
= false;
11531 bool target_seen
= false;
11532 int declare_simd_cnt
= -1;
11533 auto_vec
<enum tree_code
, 16> codes
;
11534 for (struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
; ctx
;)
11536 if (((ctx
->region_type
& ORT_PARALLEL
) && ctx
->code
== OMP_PARALLEL
)
11537 || ((ctx
->region_type
& (ORT_TARGET
| ORT_IMPLICIT_TARGET
| ORT_ACC
))
11538 == ORT_TARGET
&& ctx
->code
== OMP_TARGET
)
11539 || ((ctx
->region_type
& ORT_TEAMS
) && ctx
->code
== OMP_TEAMS
)
11540 || (ctx
->region_type
== ORT_WORKSHARE
&& ctx
->code
== OMP_FOR
)
11541 || (ctx
->region_type
== ORT_SIMD
11542 && ctx
->code
== OMP_SIMD
11543 && !omp_find_clause (ctx
->clauses
, OMP_CLAUSE_BIND
)))
11547 codes
.safe_push (ctx
->code
);
11548 else if (matched
< nconstructs
&& ctx
->code
== constructs
[matched
])
11550 if (ctx
->code
== OMP_SIMD
)
11558 if (ctx
->code
== OMP_TARGET
)
11560 if (scores
== NULL
)
11561 return matched
< nconstructs
? 0 : simd_seen
? -1 : 1;
11562 target_seen
= true;
11566 else if (ctx
->region_type
== ORT_WORKSHARE
11567 && ctx
->code
== OMP_LOOP
11568 && ctx
->outer_context
11569 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
11570 && ctx
->outer_context
->outer_context
11571 && ctx
->outer_context
->outer_context
->code
== OMP_LOOP
11572 && ctx
->outer_context
->outer_context
->distribute
)
11573 ctx
= ctx
->outer_context
->outer_context
;
11574 ctx
= ctx
->outer_context
;
11577 && lookup_attribute ("omp declare simd",
11578 DECL_ATTRIBUTES (current_function_decl
)))
11580 /* Declare simd is a maybe case, it is supposed to be added only to the
11581 omp-simd-clone.c added clones and not to the base function. */
11582 declare_simd_cnt
= cnt
++;
11584 codes
.safe_push (OMP_SIMD
);
11586 && constructs
[0] == OMP_SIMD
)
11588 gcc_assert (matched
== 0);
11590 if (++matched
== nconstructs
)
11594 if (tree attr
= lookup_attribute ("omp declare variant variant",
11595 DECL_ATTRIBUTES (current_function_decl
)))
11597 enum tree_code variant_constructs
[5];
11598 int variant_nconstructs
= 0;
11600 variant_nconstructs
11601 = omp_constructor_traits_to_codes (TREE_VALUE (attr
),
11602 variant_constructs
);
11603 for (int i
= 0; i
< variant_nconstructs
; i
++)
11607 codes
.safe_push (variant_constructs
[i
]);
11608 else if (matched
< nconstructs
11609 && variant_constructs
[i
] == constructs
[matched
])
11611 if (variant_constructs
[i
] == OMP_SIMD
)
11622 && lookup_attribute ("omp declare target block",
11623 DECL_ATTRIBUTES (current_function_decl
)))
11626 codes
.safe_push (OMP_TARGET
);
11627 else if (matched
< nconstructs
&& constructs
[matched
] == OMP_TARGET
)
11632 for (int pass
= 0; pass
< (declare_simd_cnt
== -1 ? 1 : 2); pass
++)
11634 int j
= codes
.length () - 1;
11635 for (int i
= nconstructs
- 1; i
>= 0; i
--)
11638 && (pass
!= 0 || declare_simd_cnt
!= j
)
11639 && constructs
[i
] != codes
[j
])
11641 if (pass
== 0 && declare_simd_cnt
!= -1 && j
> declare_simd_cnt
)
11646 *scores
++ = ((pass
== 0 && declare_simd_cnt
!= -1)
11647 ? codes
.length () - 1 : codes
.length ());
11649 return declare_simd_cnt
== -1 ? 1 : 2;
11651 if (matched
== nconstructs
)
11652 return simd_seen
? -1 : 1;
11656 /* Gimplify OACC_CACHE. */
11659 gimplify_oacc_cache (tree
*expr_p
, gimple_seq
*pre_p
)
11661 tree expr
= *expr_p
;
11663 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr
), pre_p
, ORT_ACC
,
11665 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OACC_CACHE_CLAUSES (expr
),
11668 /* TODO: Do something sensible with this information. */
11670 *expr_p
= NULL_TREE
;
11673 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
11674 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
11675 kind. The entry kind will replace the one in CLAUSE, while the exit
11676 kind will be used in a new omp_clause and returned to the caller. */
11679 gimplify_oacc_declare_1 (tree clause
)
11681 HOST_WIDE_INT kind
, new_op
;
11685 kind
= OMP_CLAUSE_MAP_KIND (clause
);
11689 case GOMP_MAP_ALLOC
:
11690 new_op
= GOMP_MAP_RELEASE
;
11694 case GOMP_MAP_FROM
:
11695 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
11696 new_op
= GOMP_MAP_FROM
;
11700 case GOMP_MAP_TOFROM
:
11701 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_TO
);
11702 new_op
= GOMP_MAP_FROM
;
11706 case GOMP_MAP_DEVICE_RESIDENT
:
11707 case GOMP_MAP_FORCE_DEVICEPTR
:
11708 case GOMP_MAP_FORCE_PRESENT
:
11709 case GOMP_MAP_LINK
:
11710 case GOMP_MAP_POINTER
:
11715 gcc_unreachable ();
11721 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
), OMP_CLAUSE_MAP
);
11722 OMP_CLAUSE_SET_MAP_KIND (c
, new_op
);
11723 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clause
);
11729 /* Gimplify OACC_DECLARE. */
11732 gimplify_oacc_declare (tree
*expr_p
, gimple_seq
*pre_p
)
11734 tree expr
= *expr_p
;
11736 tree clauses
, t
, decl
;
11738 clauses
= OACC_DECLARE_CLAUSES (expr
);
11740 gimplify_scan_omp_clauses (&clauses
, pre_p
, ORT_TARGET_DATA
, OACC_DECLARE
);
11741 gimplify_adjust_omp_clauses (pre_p
, NULL
, &clauses
, OACC_DECLARE
);
11743 for (t
= clauses
; t
; t
= OMP_CLAUSE_CHAIN (t
))
11745 decl
= OMP_CLAUSE_DECL (t
);
11747 if (TREE_CODE (decl
) == MEM_REF
)
11748 decl
= TREE_OPERAND (decl
, 0);
11750 if (VAR_P (decl
) && !is_oacc_declared (decl
))
11752 tree attr
= get_identifier ("oacc declare target");
11753 DECL_ATTRIBUTES (decl
) = tree_cons (attr
, NULL_TREE
,
11754 DECL_ATTRIBUTES (decl
));
11758 && !is_global_var (decl
)
11759 && DECL_CONTEXT (decl
) == current_function_decl
)
11761 tree c
= gimplify_oacc_declare_1 (t
);
11764 if (oacc_declare_returns
== NULL
)
11765 oacc_declare_returns
= new hash_map
<tree
, tree
>;
11767 oacc_declare_returns
->put (decl
, c
);
11771 if (gimplify_omp_ctxp
)
11772 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_SEEN
);
11775 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
11778 gimplify_seq_add_stmt (pre_p
, stmt
);
11780 *expr_p
= NULL_TREE
;
11783 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
11784 gimplification of the body, as well as scanning the body for used
11785 variables. We need to do this scan now, because variable-sized
11786 decls will be decomposed during gimplification. */
11789 gimplify_omp_parallel (tree
*expr_p
, gimple_seq
*pre_p
)
11791 tree expr
= *expr_p
;
11793 gimple_seq body
= NULL
;
11795 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr
), pre_p
,
11796 OMP_PARALLEL_COMBINED (expr
)
11797 ? ORT_COMBINED_PARALLEL
11798 : ORT_PARALLEL
, OMP_PARALLEL
);
11800 push_gimplify_context ();
11802 g
= gimplify_and_return_first (OMP_PARALLEL_BODY (expr
), &body
);
11803 if (gimple_code (g
) == GIMPLE_BIND
)
11804 pop_gimplify_context (g
);
11806 pop_gimplify_context (NULL
);
11808 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_PARALLEL_CLAUSES (expr
),
11811 g
= gimple_build_omp_parallel (body
,
11812 OMP_PARALLEL_CLAUSES (expr
),
11813 NULL_TREE
, NULL_TREE
);
11814 if (OMP_PARALLEL_COMBINED (expr
))
11815 gimple_omp_set_subcode (g
, GF_OMP_PARALLEL_COMBINED
);
11816 gimplify_seq_add_stmt (pre_p
, g
);
11817 *expr_p
= NULL_TREE
;
11820 /* Gimplify the contents of an OMP_TASK statement. This involves
11821 gimplification of the body, as well as scanning the body for used
11822 variables. We need to do this scan now, because variable-sized
11823 decls will be decomposed during gimplification. */
11826 gimplify_omp_task (tree
*expr_p
, gimple_seq
*pre_p
)
11828 tree expr
= *expr_p
;
11830 gimple_seq body
= NULL
;
11832 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
11833 for (tree c
= OMP_TASK_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11834 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
11835 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET
)
11837 error_at (OMP_CLAUSE_LOCATION (c
),
11838 "%<mutexinoutset%> kind in %<depend%> clause on a "
11839 "%<taskwait%> construct");
11843 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr
), pre_p
,
11844 omp_find_clause (OMP_TASK_CLAUSES (expr
),
11846 ? ORT_UNTIED_TASK
: ORT_TASK
, OMP_TASK
);
11848 if (OMP_TASK_BODY (expr
))
11850 push_gimplify_context ();
11852 g
= gimplify_and_return_first (OMP_TASK_BODY (expr
), &body
);
11853 if (gimple_code (g
) == GIMPLE_BIND
)
11854 pop_gimplify_context (g
);
11856 pop_gimplify_context (NULL
);
11859 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_TASK_CLAUSES (expr
),
11862 g
= gimple_build_omp_task (body
,
11863 OMP_TASK_CLAUSES (expr
),
11864 NULL_TREE
, NULL_TREE
,
11865 NULL_TREE
, NULL_TREE
, NULL_TREE
);
11866 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
11867 gimple_omp_task_set_taskwait_p (g
, true);
11868 gimplify_seq_add_stmt (pre_p
, g
);
11869 *expr_p
= NULL_TREE
;
11872 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
11873 force it into a temporary initialized in PRE_P and add firstprivate clause
11874 to ORIG_FOR_STMT. */
11877 gimplify_omp_taskloop_expr (tree type
, tree
*tp
, gimple_seq
*pre_p
,
11878 tree orig_for_stmt
)
11880 if (*tp
== NULL
|| is_gimple_constant (*tp
))
11883 *tp
= get_initialized_tmp_var (*tp
, pre_p
, NULL
, false);
11884 /* Reference to pointer conversion is considered useless,
11885 but is significant for firstprivate clause. Force it
11888 && TREE_CODE (type
) == POINTER_TYPE
11889 && TREE_CODE (TREE_TYPE (*tp
)) == REFERENCE_TYPE
)
11891 tree v
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
11892 tree m
= build2 (INIT_EXPR
, TREE_TYPE (v
), v
, *tp
);
11893 gimplify_and_add (m
, pre_p
);
11897 tree c
= build_omp_clause (input_location
, OMP_CLAUSE_FIRSTPRIVATE
);
11898 OMP_CLAUSE_DECL (c
) = *tp
;
11899 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
11900 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
11903 /* Gimplify the gross structure of an OMP_FOR statement. */
11905 static enum gimplify_status
11906 gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
11908 tree for_stmt
, orig_for_stmt
, inner_for_stmt
= NULL_TREE
, decl
, var
, t
;
11909 enum gimplify_status ret
= GS_ALL_DONE
;
11910 enum gimplify_status tret
;
11912 gimple_seq for_body
, for_pre_body
;
11914 bitmap has_decl_expr
= NULL
;
11915 enum omp_region_type ort
= ORT_WORKSHARE
;
11916 bool openacc
= TREE_CODE (*expr_p
) == OACC_LOOP
;
11918 orig_for_stmt
= for_stmt
= *expr_p
;
11920 bool loop_p
= (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_BIND
)
11922 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
11924 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
11925 gcc_assert (TREE_CODE (for_stmt
) != OACC_LOOP
);
11926 inner_for_stmt
= walk_tree (&OMP_FOR_BODY (for_stmt
),
11927 find_combined_omp_for
, data
, NULL
);
11928 if (inner_for_stmt
== NULL_TREE
)
11930 gcc_assert (seen_error ());
11931 *expr_p
= NULL_TREE
;
11934 if (data
[2] && OMP_FOR_PRE_BODY (*data
[2]))
11936 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data
[2]),
11937 &OMP_FOR_PRE_BODY (for_stmt
));
11938 OMP_FOR_PRE_BODY (*data
[2]) = NULL_TREE
;
11940 if (OMP_FOR_PRE_BODY (inner_for_stmt
))
11942 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt
),
11943 &OMP_FOR_PRE_BODY (for_stmt
));
11944 OMP_FOR_PRE_BODY (inner_for_stmt
) = NULL_TREE
;
11949 /* We have some statements or variable declarations in between
11950 the composite construct directives. Move them around the
11953 for (i
= 0; i
< 3; i
++)
11957 if (i
< 2 && data
[i
+ 1] == &OMP_BODY (t
))
11958 data
[i
+ 1] = data
[i
];
11959 *data
[i
] = OMP_BODY (t
);
11960 tree body
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
11961 NULL_TREE
, make_node (BLOCK
));
11962 OMP_BODY (t
) = body
;
11963 append_to_statement_list_force (inner_for_stmt
,
11964 &BIND_EXPR_BODY (body
));
11966 data
[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body
)));
11967 gcc_assert (*data
[3] == inner_for_stmt
);
11972 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
11974 && OMP_FOR_ORIG_DECLS (inner_for_stmt
)
11975 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
11977 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
11980 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
11981 /* Class iterators aren't allowed on OMP_SIMD, so the only
11982 case we need to solve is distribute parallel for. They are
11983 allowed on the loop construct, but that is already handled
11984 in gimplify_omp_loop. */
11985 gcc_assert (TREE_CODE (inner_for_stmt
) == OMP_FOR
11986 && TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
11988 tree orig_decl
= TREE_PURPOSE (orig
);
11989 tree last
= TREE_VALUE (orig
);
11991 for (pc
= &OMP_FOR_CLAUSES (inner_for_stmt
);
11992 *pc
; pc
= &OMP_CLAUSE_CHAIN (*pc
))
11993 if ((OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
11994 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LASTPRIVATE
)
11995 && OMP_CLAUSE_DECL (*pc
) == orig_decl
)
11997 if (*pc
== NULL_TREE
)
12000 for (spc
= &OMP_PARALLEL_CLAUSES (*data
[1]);
12001 *spc
; spc
= &OMP_CLAUSE_CHAIN (*spc
))
12002 if (OMP_CLAUSE_CODE (*spc
) == OMP_CLAUSE_PRIVATE
12003 && OMP_CLAUSE_DECL (*spc
) == orig_decl
)
12008 *spc
= OMP_CLAUSE_CHAIN (c
);
12009 OMP_CLAUSE_CHAIN (c
) = NULL_TREE
;
12013 if (*pc
== NULL_TREE
)
12015 else if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
)
12017 /* private clause will appear only on inner_for_stmt.
12018 Change it into firstprivate, and add private clause
12020 tree c
= copy_node (*pc
);
12021 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
12022 OMP_FOR_CLAUSES (for_stmt
) = c
;
12023 OMP_CLAUSE_CODE (*pc
) = OMP_CLAUSE_FIRSTPRIVATE
;
12024 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
12028 /* lastprivate clause will appear on both inner_for_stmt
12029 and for_stmt. Add firstprivate clause to
12031 tree c
= build_omp_clause (OMP_CLAUSE_LOCATION (*pc
),
12032 OMP_CLAUSE_FIRSTPRIVATE
);
12033 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (*pc
);
12034 OMP_CLAUSE_CHAIN (c
) = *pc
;
12036 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
12038 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
12039 OMP_CLAUSE_FIRSTPRIVATE
);
12040 OMP_CLAUSE_DECL (c
) = last
;
12041 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
12042 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
12043 c
= build_omp_clause (UNKNOWN_LOCATION
,
12044 *pc
? OMP_CLAUSE_SHARED
12045 : OMP_CLAUSE_FIRSTPRIVATE
);
12046 OMP_CLAUSE_DECL (c
) = orig_decl
;
12047 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
12048 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
12050 /* Similarly, take care of C++ range for temporaries, those should
12051 be firstprivate on OMP_PARALLEL if any. */
12053 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
12054 if (OMP_FOR_ORIG_DECLS (inner_for_stmt
)
12055 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
12057 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
12061 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
12062 tree v
= TREE_CHAIN (orig
);
12063 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
12064 OMP_CLAUSE_FIRSTPRIVATE
);
12065 /* First add firstprivate clause for the __for_end artificial
12067 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 1);
12068 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
12070 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
12071 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
12072 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
12073 if (TREE_VEC_ELT (v
, 0))
12075 /* And now the same for __for_range artificial decl if it
12077 c
= build_omp_clause (UNKNOWN_LOCATION
,
12078 OMP_CLAUSE_FIRSTPRIVATE
);
12079 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 0);
12080 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
12082 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
12083 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
12084 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
12089 switch (TREE_CODE (for_stmt
))
12092 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
))
12094 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
12095 OMP_CLAUSE_SCHEDULE
))
12096 error_at (EXPR_LOCATION (for_stmt
),
12097 "%qs clause may not appear on non-rectangular %qs",
12098 "schedule", "for");
12099 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
))
12100 error_at (EXPR_LOCATION (for_stmt
),
12101 "%qs clause may not appear on non-rectangular %qs",
12105 case OMP_DISTRIBUTE
:
12106 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
)
12107 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
12108 OMP_CLAUSE_DIST_SCHEDULE
))
12109 error_at (EXPR_LOCATION (for_stmt
),
12110 "%qs clause may not appear on non-rectangular %qs",
12111 "dist_schedule", "distribute");
12117 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_UNTIED
))
12118 ort
= ORT_UNTIED_TASKLOOP
;
12120 ort
= ORT_TASKLOOP
;
12126 gcc_unreachable ();
12129 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
12130 clause for the IV. */
12131 if (ort
== ORT_SIMD
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
12133 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), 0);
12134 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12135 decl
= TREE_OPERAND (t
, 0);
12136 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12137 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
12138 && OMP_CLAUSE_DECL (c
) == decl
)
12140 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
12145 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
)
12146 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt
), pre_p
, ort
,
12147 loop_p
&& TREE_CODE (for_stmt
) != OMP_SIMD
12148 ? OMP_LOOP
: TREE_CODE (for_stmt
));
12150 if (TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
)
12151 gimplify_omp_ctxp
->distribute
= true;
12153 /* Handle OMP_FOR_INIT. */
12154 for_pre_body
= NULL
;
12155 if ((ort
== ORT_SIMD
12156 || (inner_for_stmt
&& TREE_CODE (inner_for_stmt
) == OMP_SIMD
))
12157 && OMP_FOR_PRE_BODY (for_stmt
))
12159 has_decl_expr
= BITMAP_ALLOC (NULL
);
12160 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == DECL_EXPR
12161 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt
)))
12164 t
= OMP_FOR_PRE_BODY (for_stmt
);
12165 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
12167 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == STATEMENT_LIST
)
12169 tree_stmt_iterator si
;
12170 for (si
= tsi_start (OMP_FOR_PRE_BODY (for_stmt
)); !tsi_end_p (si
);
12174 if (TREE_CODE (t
) == DECL_EXPR
12175 && TREE_CODE (DECL_EXPR_DECL (t
)) == VAR_DECL
)
12176 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
12180 if (OMP_FOR_PRE_BODY (for_stmt
))
12182 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
|| gimplify_omp_ctxp
)
12183 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
12186 struct gimplify_omp_ctx ctx
;
12187 memset (&ctx
, 0, sizeof (ctx
));
12188 ctx
.region_type
= ORT_NONE
;
12189 gimplify_omp_ctxp
= &ctx
;
12190 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
12191 gimplify_omp_ctxp
= NULL
;
12194 OMP_FOR_PRE_BODY (for_stmt
) = NULL_TREE
;
12196 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
12197 for_stmt
= inner_for_stmt
;
12199 /* For taskloop, need to gimplify the start, end and step before the
12200 taskloop, outside of the taskloop omp context. */
12201 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
12203 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12205 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
12206 gimple_seq
*for_pre_p
= (gimple_seq_empty_p (for_pre_body
)
12207 ? pre_p
: &for_pre_body
);
12208 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
12209 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
12211 tree v
= TREE_OPERAND (t
, 1);
12212 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
12213 for_pre_p
, orig_for_stmt
);
12214 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
12215 for_pre_p
, orig_for_stmt
);
12218 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
12221 /* Handle OMP_FOR_COND. */
12222 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
12223 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
12225 tree v
= TREE_OPERAND (t
, 1);
12226 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
12227 for_pre_p
, orig_for_stmt
);
12228 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
12229 for_pre_p
, orig_for_stmt
);
12232 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
12235 /* Handle OMP_FOR_INCR. */
12236 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
12237 if (TREE_CODE (t
) == MODIFY_EXPR
)
12239 decl
= TREE_OPERAND (t
, 0);
12240 t
= TREE_OPERAND (t
, 1);
12241 tree
*tp
= &TREE_OPERAND (t
, 1);
12242 if (TREE_CODE (t
) == PLUS_EXPR
&& *tp
== decl
)
12243 tp
= &TREE_OPERAND (t
, 0);
12245 gimplify_omp_taskloop_expr (NULL_TREE
, tp
, for_pre_p
,
12250 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt
), pre_p
, ort
,
12254 if (orig_for_stmt
!= for_stmt
)
12255 gimplify_omp_ctxp
->combined_loop
= true;
12258 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
12259 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt
)));
12260 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
12261 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt
)));
12263 tree c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
);
12264 bool is_doacross
= false;
12265 if (c
&& OMP_CLAUSE_ORDERED_EXPR (c
))
12267 is_doacross
= true;
12268 gimplify_omp_ctxp
->loop_iter_var
.create (TREE_VEC_LENGTH
12269 (OMP_FOR_INIT (for_stmt
))
12272 int collapse
= 1, tile
= 0;
12273 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_COLLAPSE
);
12275 collapse
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c
));
12276 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_TILE
);
12278 tile
= list_length (OMP_CLAUSE_TILE_LIST (c
));
12279 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ALLOCATE
);
12280 hash_set
<tree
> *allocate_uids
= NULL
;
12283 allocate_uids
= new hash_set
<tree
>;
12284 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
12285 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
)
12286 allocate_uids
->add (OMP_CLAUSE_DECL (c
));
12288 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12290 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
12291 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12292 decl
= TREE_OPERAND (t
, 0);
12293 gcc_assert (DECL_P (decl
));
12294 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl
))
12295 || POINTER_TYPE_P (TREE_TYPE (decl
)));
12298 if (TREE_CODE (for_stmt
) == OMP_FOR
&& OMP_FOR_ORIG_DECLS (for_stmt
))
12300 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
12301 if (TREE_CODE (orig_decl
) == TREE_LIST
)
12303 orig_decl
= TREE_PURPOSE (orig_decl
);
12307 gimplify_omp_ctxp
->loop_iter_var
.quick_push (orig_decl
);
12310 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
12311 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
12314 if (for_stmt
== orig_for_stmt
)
12316 tree orig_decl
= decl
;
12317 if (OMP_FOR_ORIG_DECLS (for_stmt
))
12319 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
12320 if (TREE_CODE (orig_decl
) == TREE_LIST
)
12322 orig_decl
= TREE_PURPOSE (orig_decl
);
12327 if (is_global_var (orig_decl
) && DECL_THREAD_LOCAL_P (orig_decl
))
12328 error_at (EXPR_LOCATION (for_stmt
),
12329 "threadprivate iteration variable %qD", orig_decl
);
12332 /* Make sure the iteration variable is private. */
12333 tree c
= NULL_TREE
;
12334 tree c2
= NULL_TREE
;
12335 if (orig_for_stmt
!= for_stmt
)
12337 /* Preserve this information until we gimplify the inner simd. */
12339 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
12340 TREE_PRIVATE (t
) = 1;
12342 else if (ort
== ORT_SIMD
)
12344 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
12345 (splay_tree_key
) decl
);
12346 omp_is_private (gimplify_omp_ctxp
, decl
,
12347 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
12349 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
12351 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
12352 if (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
)
12353 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
12354 OMP_CLAUSE_LASTPRIVATE
);
12355 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
12356 OMP_CLAUSE_LASTPRIVATE
))
12357 if (OMP_CLAUSE_DECL (c3
) == decl
)
12359 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
12360 "conditional %<lastprivate%> on loop "
12361 "iterator %qD ignored", decl
);
12362 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
12363 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
12366 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1 && !loop_p
)
12368 c
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
12369 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
12370 unsigned int flags
= GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
;
12372 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
12373 || TREE_PRIVATE (t
))
12375 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
12376 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
12378 struct gimplify_omp_ctx
*outer
12379 = gimplify_omp_ctxp
->outer_context
;
12380 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
12382 if (outer
->region_type
== ORT_WORKSHARE
12383 && outer
->combined_loop
)
12385 n
= splay_tree_lookup (outer
->variables
,
12386 (splay_tree_key
)decl
);
12387 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
12389 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
12390 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
12394 struct gimplify_omp_ctx
*octx
= outer
->outer_context
;
12396 && octx
->region_type
== ORT_COMBINED_PARALLEL
12397 && octx
->outer_context
12398 && (octx
->outer_context
->region_type
12400 && octx
->outer_context
->combined_loop
)
12402 octx
= octx
->outer_context
;
12403 n
= splay_tree_lookup (octx
->variables
,
12404 (splay_tree_key
)decl
);
12405 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
12407 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
12408 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
12415 OMP_CLAUSE_DECL (c
) = decl
;
12416 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
12417 OMP_FOR_CLAUSES (for_stmt
) = c
;
12418 omp_add_variable (gimplify_omp_ctxp
, decl
, flags
);
12419 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
12420 omp_lastprivate_for_combined_outer_constructs (outer
, decl
,
12427 || !bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)));
12428 if (TREE_PRIVATE (t
))
12429 lastprivate
= false;
12430 if (loop_p
&& OMP_FOR_ORIG_DECLS (for_stmt
))
12432 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
12433 if (TREE_CODE (elt
) == TREE_LIST
&& TREE_PURPOSE (elt
))
12434 lastprivate
= false;
12437 struct gimplify_omp_ctx
*outer
12438 = gimplify_omp_ctxp
->outer_context
;
12439 if (outer
&& lastprivate
)
12440 omp_lastprivate_for_combined_outer_constructs (outer
, decl
,
12443 c
= build_omp_clause (input_location
,
12444 lastprivate
? OMP_CLAUSE_LASTPRIVATE
12445 : OMP_CLAUSE_PRIVATE
);
12446 OMP_CLAUSE_DECL (c
) = decl
;
12447 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
12448 OMP_FOR_CLAUSES (for_stmt
) = c
;
12449 omp_add_variable (gimplify_omp_ctxp
, decl
,
12450 (lastprivate
? GOVD_LASTPRIVATE
: GOVD_PRIVATE
)
12451 | GOVD_EXPLICIT
| GOVD_SEEN
);
12455 else if (omp_is_private (gimplify_omp_ctxp
, decl
, 0))
12457 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
12458 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
12459 (splay_tree_key
) decl
);
12460 if (n
&& (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
))
12461 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
12462 OMP_CLAUSE_LASTPRIVATE
);
12463 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
12464 OMP_CLAUSE_LASTPRIVATE
))
12465 if (OMP_CLAUSE_DECL (c3
) == decl
)
12467 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
12468 "conditional %<lastprivate%> on loop "
12469 "iterator %qD ignored", decl
);
12470 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
12471 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
12475 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_PRIVATE
| GOVD_SEEN
);
12477 /* If DECL is not a gimple register, create a temporary variable to act
12478 as an iteration counter. This is valid, since DECL cannot be
12479 modified in the body of the loop. Similarly for any iteration vars
12480 in simd with collapse > 1 where the iterator vars must be
12481 lastprivate. And similarly for vars mentioned in allocate clauses. */
12482 if (orig_for_stmt
!= for_stmt
)
12484 else if (!is_gimple_reg (decl
)
12485 || (ort
== ORT_SIMD
12486 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) > 1)
12487 || (allocate_uids
&& allocate_uids
->contains (decl
)))
12489 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12490 /* Make sure omp_add_variable is not called on it prematurely.
12491 We call it ourselves a few lines later. */
12492 gimplify_omp_ctxp
= NULL
;
12493 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
12494 gimplify_omp_ctxp
= ctx
;
12495 TREE_OPERAND (t
, 0) = var
;
12497 gimplify_seq_add_stmt (&for_body
, gimple_build_assign (decl
, var
));
12499 if (ort
== ORT_SIMD
12500 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
12502 c2
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
12503 OMP_CLAUSE_LINEAR_NO_COPYIN (c2
) = 1;
12504 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2
) = 1;
12505 OMP_CLAUSE_DECL (c2
) = var
;
12506 OMP_CLAUSE_CHAIN (c2
) = OMP_FOR_CLAUSES (for_stmt
);
12507 OMP_FOR_CLAUSES (for_stmt
) = c2
;
12508 omp_add_variable (gimplify_omp_ctxp
, var
,
12509 GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
);
12510 if (c
== NULL_TREE
)
12517 omp_add_variable (gimplify_omp_ctxp
, var
,
12518 GOVD_PRIVATE
| GOVD_SEEN
);
12523 gimplify_omp_ctxp
->in_for_exprs
= true;
12524 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
12526 tree lb
= TREE_OPERAND (t
, 1);
12527 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 1), &for_pre_body
, NULL
,
12528 is_gimple_val
, fb_rvalue
, false);
12529 ret
= MIN (ret
, tret
);
12530 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 2), &for_pre_body
, NULL
,
12531 is_gimple_val
, fb_rvalue
, false);
12534 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
12535 is_gimple_val
, fb_rvalue
, false);
12536 gimplify_omp_ctxp
->in_for_exprs
= false;
12537 ret
= MIN (ret
, tret
);
12538 if (ret
== GS_ERROR
)
12541 /* Handle OMP_FOR_COND. */
12542 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
12543 gcc_assert (COMPARISON_CLASS_P (t
));
12544 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
12546 gimplify_omp_ctxp
->in_for_exprs
= true;
12547 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
12549 tree ub
= TREE_OPERAND (t
, 1);
12550 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 1), &for_pre_body
, NULL
,
12551 is_gimple_val
, fb_rvalue
, false);
12552 ret
= MIN (ret
, tret
);
12553 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 2), &for_pre_body
, NULL
,
12554 is_gimple_val
, fb_rvalue
, false);
12557 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
12558 is_gimple_val
, fb_rvalue
, false);
12559 gimplify_omp_ctxp
->in_for_exprs
= false;
12560 ret
= MIN (ret
, tret
);
12562 /* Handle OMP_FOR_INCR. */
12563 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
12564 switch (TREE_CODE (t
))
12566 case PREINCREMENT_EXPR
:
12567 case POSTINCREMENT_EXPR
:
12569 tree decl
= TREE_OPERAND (t
, 0);
12570 /* c_omp_for_incr_canonicalize_ptr() should have been
12571 called to massage things appropriately. */
12572 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
12574 if (orig_for_stmt
!= for_stmt
)
12576 t
= build_int_cst (TREE_TYPE (decl
), 1);
12578 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
12579 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
12580 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
12581 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
12585 case PREDECREMENT_EXPR
:
12586 case POSTDECREMENT_EXPR
:
12587 /* c_omp_for_incr_canonicalize_ptr() should have been
12588 called to massage things appropriately. */
12589 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
12590 if (orig_for_stmt
!= for_stmt
)
12592 t
= build_int_cst (TREE_TYPE (decl
), -1);
12594 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
12595 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
12596 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
12597 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
12601 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
12602 TREE_OPERAND (t
, 0) = var
;
12604 t
= TREE_OPERAND (t
, 1);
12605 switch (TREE_CODE (t
))
12608 if (TREE_OPERAND (t
, 1) == decl
)
12610 TREE_OPERAND (t
, 1) = TREE_OPERAND (t
, 0);
12611 TREE_OPERAND (t
, 0) = var
;
12617 case POINTER_PLUS_EXPR
:
12618 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
12619 TREE_OPERAND (t
, 0) = var
;
12622 gcc_unreachable ();
12625 gimplify_omp_ctxp
->in_for_exprs
= true;
12626 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
12627 is_gimple_val
, fb_rvalue
, false);
12628 ret
= MIN (ret
, tret
);
12631 tree step
= TREE_OPERAND (t
, 1);
12632 tree stept
= TREE_TYPE (decl
);
12633 if (POINTER_TYPE_P (stept
))
12635 step
= fold_convert (stept
, step
);
12636 if (TREE_CODE (t
) == MINUS_EXPR
)
12637 step
= fold_build1 (NEGATE_EXPR
, stept
, step
);
12638 OMP_CLAUSE_LINEAR_STEP (c
) = step
;
12639 if (step
!= TREE_OPERAND (t
, 1))
12641 tret
= gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
),
12642 &for_pre_body
, NULL
,
12643 is_gimple_val
, fb_rvalue
, false);
12644 ret
= MIN (ret
, tret
);
12647 gimplify_omp_ctxp
->in_for_exprs
= false;
12651 gcc_unreachable ();
12657 OMP_CLAUSE_LINEAR_STEP (c2
) = OMP_CLAUSE_LINEAR_STEP (c
);
12660 if ((var
!= decl
|| collapse
> 1 || tile
) && orig_for_stmt
== for_stmt
)
12662 for (c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12663 if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12664 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) == NULL
)
12665 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
12666 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)
12667 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) == NULL
))
12668 && OMP_CLAUSE_DECL (c
) == decl
)
12670 if (is_doacross
&& (collapse
== 1 || i
>= collapse
))
12674 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
12675 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12676 gcc_assert (TREE_OPERAND (t
, 0) == var
);
12677 t
= TREE_OPERAND (t
, 1);
12678 gcc_assert (TREE_CODE (t
) == PLUS_EXPR
12679 || TREE_CODE (t
) == MINUS_EXPR
12680 || TREE_CODE (t
) == POINTER_PLUS_EXPR
);
12681 gcc_assert (TREE_OPERAND (t
, 0) == var
);
12682 t
= build2 (TREE_CODE (t
), TREE_TYPE (decl
),
12683 is_doacross
? var
: decl
,
12684 TREE_OPERAND (t
, 1));
12687 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
12688 seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
);
12690 seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
);
12691 push_gimplify_context ();
12692 gimplify_assign (decl
, t
, seq
);
12693 gimple
*bind
= NULL
;
12694 if (gimplify_ctxp
->temps
)
12696 bind
= gimple_build_bind (NULL_TREE
, *seq
, NULL_TREE
);
12698 gimplify_seq_add_stmt (seq
, bind
);
12700 pop_gimplify_context (bind
);
12703 if (OMP_FOR_NON_RECTANGULAR (for_stmt
) && var
!= decl
)
12704 for (int j
= i
+ 1; j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
12706 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
12707 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12708 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
12709 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
12710 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
12711 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
12712 gcc_assert (COMPARISON_CLASS_P (t
));
12713 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
12714 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
12715 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
12719 BITMAP_FREE (has_decl_expr
);
12720 delete allocate_uids
;
12722 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
12723 || (loop_p
&& orig_for_stmt
== for_stmt
))
12725 push_gimplify_context ();
12726 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt
)) != BIND_EXPR
)
12728 OMP_FOR_BODY (orig_for_stmt
)
12729 = build3 (BIND_EXPR
, void_type_node
, NULL
,
12730 OMP_FOR_BODY (orig_for_stmt
), NULL
);
12731 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt
)) = 1;
12735 gimple
*g
= gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt
),
12738 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
12739 || (loop_p
&& orig_for_stmt
== for_stmt
))
12741 if (gimple_code (g
) == GIMPLE_BIND
)
12742 pop_gimplify_context (g
);
12744 pop_gimplify_context (NULL
);
12747 if (orig_for_stmt
!= for_stmt
)
12748 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12750 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
12751 decl
= TREE_OPERAND (t
, 0);
12752 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12753 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
12754 gimplify_omp_ctxp
= ctx
->outer_context
;
12755 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
12756 gimplify_omp_ctxp
= ctx
;
12757 omp_add_variable (gimplify_omp_ctxp
, var
, GOVD_PRIVATE
| GOVD_SEEN
);
12758 TREE_OPERAND (t
, 0) = var
;
12759 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
12760 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
12761 TREE_OPERAND (TREE_OPERAND (t
, 1), 0) = var
;
12762 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
12763 for (int j
= i
+ 1;
12764 j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
12766 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
12767 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12768 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
12769 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
12771 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
12772 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
12774 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
12775 gcc_assert (COMPARISON_CLASS_P (t
));
12776 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
12777 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
12779 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
12780 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
12785 gimplify_adjust_omp_clauses (pre_p
, for_body
,
12786 &OMP_FOR_CLAUSES (orig_for_stmt
),
12787 TREE_CODE (orig_for_stmt
));
12790 switch (TREE_CODE (orig_for_stmt
))
12792 case OMP_FOR
: kind
= GF_OMP_FOR_KIND_FOR
; break;
12793 case OMP_SIMD
: kind
= GF_OMP_FOR_KIND_SIMD
; break;
12794 case OMP_DISTRIBUTE
: kind
= GF_OMP_FOR_KIND_DISTRIBUTE
; break;
12795 case OMP_TASKLOOP
: kind
= GF_OMP_FOR_KIND_TASKLOOP
; break;
12796 case OACC_LOOP
: kind
= GF_OMP_FOR_KIND_OACC_LOOP
; break;
12798 gcc_unreachable ();
12800 if (loop_p
&& kind
== GF_OMP_FOR_KIND_SIMD
)
12802 gimplify_seq_add_seq (pre_p
, for_pre_body
);
12803 for_pre_body
= NULL
;
12805 gfor
= gimple_build_omp_for (for_body
, kind
, OMP_FOR_CLAUSES (orig_for_stmt
),
12806 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)),
12808 if (orig_for_stmt
!= for_stmt
)
12809 gimple_omp_for_set_combined_p (gfor
, true);
12810 if (gimplify_omp_ctxp
12811 && (gimplify_omp_ctxp
->combined_loop
12812 || (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
12813 && gimplify_omp_ctxp
->outer_context
12814 && gimplify_omp_ctxp
->outer_context
->combined_loop
)))
12816 gimple_omp_for_set_combined_into_p (gfor
, true);
12817 if (gimplify_omp_ctxp
->combined_loop
)
12818 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_SIMD
);
12820 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_FOR
);
12823 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12825 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
12826 gimple_omp_for_set_index (gfor
, i
, TREE_OPERAND (t
, 0));
12827 gimple_omp_for_set_initial (gfor
, i
, TREE_OPERAND (t
, 1));
12828 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
12829 gimple_omp_for_set_cond (gfor
, i
, TREE_CODE (t
));
12830 gimple_omp_for_set_final (gfor
, i
, TREE_OPERAND (t
, 1));
12831 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
12832 gimple_omp_for_set_incr (gfor
, i
, TREE_OPERAND (t
, 1));
12835 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
12836 constructs with GIMPLE_OMP_TASK sandwiched in between them.
12837 The outer taskloop stands for computing the number of iterations,
12838 counts for collapsed loops and holding taskloop specific clauses.
12839 The task construct stands for the effect of data sharing on the
12840 explicit task it creates and the inner taskloop stands for expansion
12841 of the static loop inside of the explicit task construct. */
12842 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
12844 tree
*gfor_clauses_ptr
= gimple_omp_for_clauses_ptr (gfor
);
12845 tree task_clauses
= NULL_TREE
;
12846 tree c
= *gfor_clauses_ptr
;
12847 tree
*gtask_clauses_ptr
= &task_clauses
;
12848 tree outer_for_clauses
= NULL_TREE
;
12849 tree
*gforo_clauses_ptr
= &outer_for_clauses
;
12850 bitmap lastprivate_uids
= NULL
;
12851 if (omp_find_clause (c
, OMP_CLAUSE_ALLOCATE
))
12853 c
= omp_find_clause (c
, OMP_CLAUSE_LASTPRIVATE
);
12856 lastprivate_uids
= BITMAP_ALLOC (NULL
);
12857 for (; c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
12858 OMP_CLAUSE_LASTPRIVATE
))
12859 bitmap_set_bit (lastprivate_uids
,
12860 DECL_UID (OMP_CLAUSE_DECL (c
)));
12862 c
= *gfor_clauses_ptr
;
12864 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
12865 switch (OMP_CLAUSE_CODE (c
))
12867 /* These clauses are allowed on task, move them there. */
12868 case OMP_CLAUSE_SHARED
:
12869 case OMP_CLAUSE_FIRSTPRIVATE
:
12870 case OMP_CLAUSE_DEFAULT
:
12871 case OMP_CLAUSE_IF
:
12872 case OMP_CLAUSE_UNTIED
:
12873 case OMP_CLAUSE_FINAL
:
12874 case OMP_CLAUSE_MERGEABLE
:
12875 case OMP_CLAUSE_PRIORITY
:
12876 case OMP_CLAUSE_REDUCTION
:
12877 case OMP_CLAUSE_IN_REDUCTION
:
12878 *gtask_clauses_ptr
= c
;
12879 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12881 case OMP_CLAUSE_PRIVATE
:
12882 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c
))
12884 /* We want private on outer for and firstprivate
12887 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12888 OMP_CLAUSE_FIRSTPRIVATE
);
12889 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12890 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
12892 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12893 *gforo_clauses_ptr
= c
;
12894 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12898 *gtask_clauses_ptr
= c
;
12899 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12902 /* These clauses go into outer taskloop clauses. */
12903 case OMP_CLAUSE_GRAINSIZE
:
12904 case OMP_CLAUSE_NUM_TASKS
:
12905 case OMP_CLAUSE_NOGROUP
:
12906 *gforo_clauses_ptr
= c
;
12907 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12909 /* Collapse clause we duplicate on both taskloops. */
12910 case OMP_CLAUSE_COLLAPSE
:
12911 *gfor_clauses_ptr
= c
;
12912 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12913 *gforo_clauses_ptr
= copy_node (c
);
12914 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
12916 /* For lastprivate, keep the clause on inner taskloop, and add
12917 a shared clause on task. If the same decl is also firstprivate,
12918 add also firstprivate clause on the inner taskloop. */
12919 case OMP_CLAUSE_LASTPRIVATE
:
12920 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
12922 /* For taskloop C++ lastprivate IVs, we want:
12923 1) private on outer taskloop
12924 2) firstprivate and shared on task
12925 3) lastprivate on inner taskloop */
12927 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12928 OMP_CLAUSE_FIRSTPRIVATE
);
12929 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12930 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
12932 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12933 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
) = 1;
12934 *gforo_clauses_ptr
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12935 OMP_CLAUSE_PRIVATE
);
12936 OMP_CLAUSE_DECL (*gforo_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12937 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr
) = 1;
12938 TREE_TYPE (*gforo_clauses_ptr
) = TREE_TYPE (c
);
12939 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
12941 *gfor_clauses_ptr
= c
;
12942 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12944 = build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_SHARED
);
12945 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12946 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
12947 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr
) = 1;
12949 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12951 /* Allocate clause we duplicate on task and inner taskloop
12952 if the decl is lastprivate, otherwise just put on task. */
12953 case OMP_CLAUSE_ALLOCATE
:
12954 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
12955 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
12957 /* Additionally, put firstprivate clause on task
12958 for the allocator if it is not constant. */
12960 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12961 OMP_CLAUSE_FIRSTPRIVATE
);
12962 OMP_CLAUSE_DECL (*gtask_clauses_ptr
)
12963 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
12964 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12966 if (lastprivate_uids
12967 && bitmap_bit_p (lastprivate_uids
,
12968 DECL_UID (OMP_CLAUSE_DECL (c
))))
12970 *gfor_clauses_ptr
= c
;
12971 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12972 *gtask_clauses_ptr
= copy_node (c
);
12973 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12977 *gtask_clauses_ptr
= c
;
12978 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12982 gcc_unreachable ();
12984 *gfor_clauses_ptr
= NULL_TREE
;
12985 *gtask_clauses_ptr
= NULL_TREE
;
12986 *gforo_clauses_ptr
= NULL_TREE
;
12987 BITMAP_FREE (lastprivate_uids
);
12988 g
= gimple_build_bind (NULL_TREE
, gfor
, NULL_TREE
);
12989 g
= gimple_build_omp_task (g
, task_clauses
, NULL_TREE
, NULL_TREE
,
12990 NULL_TREE
, NULL_TREE
, NULL_TREE
);
12991 gimple_omp_task_set_taskloop_p (g
, true);
12992 g
= gimple_build_bind (NULL_TREE
, g
, NULL_TREE
);
12994 = gimple_build_omp_for (g
, GF_OMP_FOR_KIND_TASKLOOP
, outer_for_clauses
,
12995 gimple_omp_for_collapse (gfor
),
12996 gimple_omp_for_pre_body (gfor
));
12997 gimple_omp_for_set_pre_body (gfor
, NULL
);
12998 gimple_omp_for_set_combined_p (gforo
, true);
12999 gimple_omp_for_set_combined_into_p (gfor
, true);
13000 for (i
= 0; i
< (int) gimple_omp_for_collapse (gfor
); i
++)
13002 tree type
= TREE_TYPE (gimple_omp_for_index (gfor
, i
));
13003 tree v
= create_tmp_var (type
);
13004 gimple_omp_for_set_index (gforo
, i
, v
);
13005 t
= unshare_expr (gimple_omp_for_initial (gfor
, i
));
13006 gimple_omp_for_set_initial (gforo
, i
, t
);
13007 gimple_omp_for_set_cond (gforo
, i
,
13008 gimple_omp_for_cond (gfor
, i
));
13009 t
= unshare_expr (gimple_omp_for_final (gfor
, i
));
13010 gimple_omp_for_set_final (gforo
, i
, t
);
13011 t
= unshare_expr (gimple_omp_for_incr (gfor
, i
));
13012 gcc_assert (TREE_OPERAND (t
, 0) == gimple_omp_for_index (gfor
, i
));
13013 TREE_OPERAND (t
, 0) = v
;
13014 gimple_omp_for_set_incr (gforo
, i
, t
);
13015 t
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
13016 OMP_CLAUSE_DECL (t
) = v
;
13017 OMP_CLAUSE_CHAIN (t
) = gimple_omp_for_clauses (gforo
);
13018 gimple_omp_for_set_clauses (gforo
, t
);
13019 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
13021 tree
*p1
= NULL
, *p2
= NULL
;
13022 t
= gimple_omp_for_initial (gforo
, i
);
13023 if (TREE_CODE (t
) == TREE_VEC
)
13024 p1
= &TREE_VEC_ELT (t
, 0);
13025 t
= gimple_omp_for_final (gforo
, i
);
13026 if (TREE_CODE (t
) == TREE_VEC
)
13029 p2
= &TREE_VEC_ELT (t
, 0);
13031 p1
= &TREE_VEC_ELT (t
, 0);
13036 for (j
= 0; j
< i
; j
++)
13037 if (*p1
== gimple_omp_for_index (gfor
, j
))
13039 *p1
= gimple_omp_for_index (gforo
, j
);
13044 gcc_assert (j
< i
);
13048 gimplify_seq_add_stmt (pre_p
, gforo
);
13051 gimplify_seq_add_stmt (pre_p
, gfor
);
13053 if (TREE_CODE (orig_for_stmt
) == OMP_FOR
)
13055 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
13056 unsigned lastprivate_conditional
= 0;
13058 && (ctx
->region_type
== ORT_TARGET_DATA
13059 || ctx
->region_type
== ORT_TASKGROUP
))
13060 ctx
= ctx
->outer_context
;
13061 if (ctx
&& (ctx
->region_type
& ORT_PARALLEL
) != 0)
13062 for (tree c
= gimple_omp_for_clauses (gfor
);
13063 c
; c
= OMP_CLAUSE_CHAIN (c
))
13064 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
13065 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
13066 ++lastprivate_conditional
;
13067 if (lastprivate_conditional
)
13069 struct omp_for_data fd
;
13070 omp_extract_for_data (gfor
, &fd
, NULL
);
13071 tree type
= build_array_type_nelts (unsigned_type_for (fd
.iter_type
),
13072 lastprivate_conditional
);
13073 tree var
= create_tmp_var_raw (type
);
13074 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
13075 OMP_CLAUSE_DECL (c
) = var
;
13076 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
13077 gimple_omp_for_set_clauses (gfor
, c
);
13078 omp_add_variable (ctx
, var
, GOVD_CONDTEMP
| GOVD_SEEN
);
13081 else if (TREE_CODE (orig_for_stmt
) == OMP_SIMD
)
13083 unsigned lastprivate_conditional
= 0;
13084 for (tree c
= gimple_omp_for_clauses (gfor
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13085 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
13086 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
13087 ++lastprivate_conditional
;
13088 if (lastprivate_conditional
)
13090 struct omp_for_data fd
;
13091 omp_extract_for_data (gfor
, &fd
, NULL
);
13092 tree type
= unsigned_type_for (fd
.iter_type
);
13093 while (lastprivate_conditional
--)
13095 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
13096 OMP_CLAUSE__CONDTEMP_
);
13097 OMP_CLAUSE_DECL (c
) = create_tmp_var (type
);
13098 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
13099 gimple_omp_for_set_clauses (gfor
, c
);
13104 if (ret
!= GS_ALL_DONE
)
13106 *expr_p
= NULL_TREE
;
13107 return GS_ALL_DONE
;
13110 /* Helper for gimplify_omp_loop, called through walk_tree. */
13113 replace_reduction_placeholders (tree
*tp
, int *walk_subtrees
, void *data
)
13117 tree
*d
= (tree
*) data
;
13118 if (*tp
== OMP_CLAUSE_REDUCTION_PLACEHOLDER (d
[0]))
13120 *tp
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (d
[1]);
13121 *walk_subtrees
= 0;
13123 else if (*tp
== OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d
[0]))
13125 *tp
= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d
[1]);
13126 *walk_subtrees
= 0;
13132 /* Gimplify the gross structure of an OMP_LOOP statement. */
13134 static enum gimplify_status
13135 gimplify_omp_loop (tree
*expr_p
, gimple_seq
*pre_p
)
13137 tree for_stmt
= *expr_p
;
13138 tree clauses
= OMP_FOR_CLAUSES (for_stmt
);
13139 struct gimplify_omp_ctx
*octx
= gimplify_omp_ctxp
;
13140 enum omp_clause_bind_kind kind
= OMP_CLAUSE_BIND_THREAD
;
13143 /* If order is not present, the behavior is as if order(concurrent)
13145 tree order
= omp_find_clause (clauses
, OMP_CLAUSE_ORDER
);
13146 if (order
== NULL_TREE
)
13148 order
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_ORDER
);
13149 OMP_CLAUSE_CHAIN (order
) = clauses
;
13150 OMP_FOR_CLAUSES (for_stmt
) = clauses
= order
;
13153 tree bind
= omp_find_clause (clauses
, OMP_CLAUSE_BIND
);
13154 if (bind
== NULL_TREE
)
13156 if (!flag_openmp
) /* flag_openmp_simd */
13158 else if (octx
&& (octx
->region_type
& ORT_TEAMS
) != 0)
13159 kind
= OMP_CLAUSE_BIND_TEAMS
;
13160 else if (octx
&& (octx
->region_type
& ORT_PARALLEL
) != 0)
13161 kind
= OMP_CLAUSE_BIND_PARALLEL
;
13164 for (; octx
; octx
= octx
->outer_context
)
13166 if ((octx
->region_type
& ORT_ACC
) != 0
13167 || octx
->region_type
== ORT_NONE
13168 || octx
->region_type
== ORT_IMPLICIT_TARGET
)
13172 if (octx
== NULL
&& !in_omp_construct
)
13173 error_at (EXPR_LOCATION (for_stmt
),
13174 "%<bind%> clause not specified on a %<loop%> "
13175 "construct not nested inside another OpenMP construct");
13177 bind
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_BIND
);
13178 OMP_CLAUSE_CHAIN (bind
) = clauses
;
13179 OMP_CLAUSE_BIND_KIND (bind
) = kind
;
13180 OMP_FOR_CLAUSES (for_stmt
) = bind
;
13183 switch (OMP_CLAUSE_BIND_KIND (bind
))
13185 case OMP_CLAUSE_BIND_THREAD
:
13187 case OMP_CLAUSE_BIND_PARALLEL
:
13188 if (!flag_openmp
) /* flag_openmp_simd */
13190 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
13193 for (; octx
; octx
= octx
->outer_context
)
13194 if (octx
->region_type
== ORT_SIMD
13195 && omp_find_clause (octx
->clauses
, OMP_CLAUSE_BIND
) == NULL_TREE
)
13197 error_at (EXPR_LOCATION (for_stmt
),
13198 "%<bind(parallel)%> on a %<loop%> construct nested "
13199 "inside %<simd%> construct");
13200 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
13203 kind
= OMP_CLAUSE_BIND_PARALLEL
;
13205 case OMP_CLAUSE_BIND_TEAMS
:
13206 if (!flag_openmp
) /* flag_openmp_simd */
13208 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
13212 && octx
->region_type
!= ORT_IMPLICIT_TARGET
13213 && octx
->region_type
!= ORT_NONE
13214 && (octx
->region_type
& ORT_TEAMS
) == 0)
13215 || in_omp_construct
)
13217 error_at (EXPR_LOCATION (for_stmt
),
13218 "%<bind(teams)%> on a %<loop%> region not strictly "
13219 "nested inside of a %<teams%> region");
13220 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
13223 kind
= OMP_CLAUSE_BIND_TEAMS
;
13226 gcc_unreachable ();
13229 for (tree
*pc
= &OMP_FOR_CLAUSES (for_stmt
); *pc
; )
13230 switch (OMP_CLAUSE_CODE (*pc
))
13232 case OMP_CLAUSE_REDUCTION
:
13233 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc
))
13235 error_at (OMP_CLAUSE_LOCATION (*pc
),
13236 "%<inscan%> %<reduction%> clause on "
13237 "%qs construct", "loop");
13238 OMP_CLAUSE_REDUCTION_INSCAN (*pc
) = 0;
13240 if (OMP_CLAUSE_REDUCTION_TASK (*pc
))
13242 error_at (OMP_CLAUSE_LOCATION (*pc
),
13243 "invalid %<task%> reduction modifier on construct "
13244 "other than %<parallel%>, %qs or %<sections%>",
13245 lang_GNU_Fortran () ? "do" : "for");
13246 OMP_CLAUSE_REDUCTION_TASK (*pc
) = 0;
13248 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13250 case OMP_CLAUSE_LASTPRIVATE
:
13251 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
13253 tree t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
13254 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
13255 if (OMP_CLAUSE_DECL (*pc
) == TREE_OPERAND (t
, 0))
13257 if (OMP_FOR_ORIG_DECLS (for_stmt
)
13258 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
13260 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
13263 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
13264 if (OMP_CLAUSE_DECL (*pc
) == TREE_PURPOSE (orig
))
13268 if (i
== TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)))
13270 error_at (OMP_CLAUSE_LOCATION (*pc
),
13271 "%<lastprivate%> clause on a %<loop%> construct refers "
13272 "to a variable %qD which is not the loop iterator",
13273 OMP_CLAUSE_DECL (*pc
));
13274 *pc
= OMP_CLAUSE_CHAIN (*pc
);
13277 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13280 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13284 TREE_SET_CODE (for_stmt
, OMP_SIMD
);
13289 case OMP_CLAUSE_BIND_THREAD
: last
= 0; break;
13290 case OMP_CLAUSE_BIND_PARALLEL
: last
= 1; break;
13291 case OMP_CLAUSE_BIND_TEAMS
: last
= 2; break;
13293 for (int pass
= 1; pass
<= last
; pass
++)
13297 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
13298 append_to_statement_list (*expr_p
, &BIND_EXPR_BODY (bind
));
13299 *expr_p
= make_node (OMP_PARALLEL
);
13300 TREE_TYPE (*expr_p
) = void_type_node
;
13301 OMP_PARALLEL_BODY (*expr_p
) = bind
;
13302 OMP_PARALLEL_COMBINED (*expr_p
) = 1;
13303 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (for_stmt
));
13304 tree
*pc
= &OMP_PARALLEL_CLAUSES (*expr_p
);
13305 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
13306 if (OMP_FOR_ORIG_DECLS (for_stmt
)
13307 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
))
13310 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
13311 if (TREE_PURPOSE (elt
) && TREE_VALUE (elt
))
13313 *pc
= build_omp_clause (UNKNOWN_LOCATION
,
13314 OMP_CLAUSE_FIRSTPRIVATE
);
13315 OMP_CLAUSE_DECL (*pc
) = TREE_VALUE (elt
);
13316 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13320 tree t
= make_node (pass
== 2 ? OMP_DISTRIBUTE
: OMP_FOR
);
13321 tree
*pc
= &OMP_FOR_CLAUSES (t
);
13322 TREE_TYPE (t
) = void_type_node
;
13323 OMP_FOR_BODY (t
) = *expr_p
;
13324 SET_EXPR_LOCATION (t
, EXPR_LOCATION (for_stmt
));
13325 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13326 switch (OMP_CLAUSE_CODE (c
))
13328 case OMP_CLAUSE_BIND
:
13329 case OMP_CLAUSE_ORDER
:
13330 case OMP_CLAUSE_COLLAPSE
:
13331 *pc
= copy_node (c
);
13332 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13334 case OMP_CLAUSE_PRIVATE
:
13335 case OMP_CLAUSE_FIRSTPRIVATE
:
13336 /* Only needed on innermost. */
13338 case OMP_CLAUSE_LASTPRIVATE
:
13339 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
) && pass
!= last
)
13341 *pc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
13342 OMP_CLAUSE_FIRSTPRIVATE
);
13343 OMP_CLAUSE_DECL (*pc
) = OMP_CLAUSE_DECL (c
);
13344 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
13345 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13347 *pc
= copy_node (c
);
13348 OMP_CLAUSE_LASTPRIVATE_STMT (*pc
) = NULL_TREE
;
13349 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
13350 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
13353 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc
) = 1;
13355 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
13356 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc
) = 0;
13358 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13360 case OMP_CLAUSE_REDUCTION
:
13361 *pc
= copy_node (c
);
13362 OMP_CLAUSE_DECL (*pc
) = unshare_expr (OMP_CLAUSE_DECL (c
));
13363 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
13364 OMP_CLAUSE_REDUCTION_INIT (*pc
)
13365 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c
));
13366 OMP_CLAUSE_REDUCTION_MERGE (*pc
)
13367 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c
));
13368 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
))
13370 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
)
13371 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
));
13372 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
13373 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
)
13374 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
));
13376 tree data
[2] = { c
, nc
};
13377 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (nc
),
13378 replace_reduction_placeholders
,
13380 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (nc
),
13381 replace_reduction_placeholders
,
13384 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13387 gcc_unreachable ();
13392 return gimplify_omp_for (expr_p
, pre_p
);
13396 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
13397 of OMP_TARGET's body. */
13400 find_omp_teams (tree
*tp
, int *walk_subtrees
, void *)
13402 *walk_subtrees
= 0;
13403 switch (TREE_CODE (*tp
))
13408 case STATEMENT_LIST
:
13409 *walk_subtrees
= 1;
13417 /* Helper function of optimize_target_teams, determine if the expression
13418 can be computed safely before the target construct on the host. */
13421 computable_teams_clause (tree
*tp
, int *walk_subtrees
, void *)
13427 *walk_subtrees
= 0;
13430 switch (TREE_CODE (*tp
))
13435 *walk_subtrees
= 0;
13436 if (error_operand_p (*tp
)
13437 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
13438 || DECL_HAS_VALUE_EXPR_P (*tp
)
13439 || DECL_THREAD_LOCAL_P (*tp
)
13440 || TREE_SIDE_EFFECTS (*tp
)
13441 || TREE_THIS_VOLATILE (*tp
))
13443 if (is_global_var (*tp
)
13444 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp
))
13445 || lookup_attribute ("omp declare target link",
13446 DECL_ATTRIBUTES (*tp
))))
13449 && !DECL_SEEN_IN_BIND_EXPR_P (*tp
)
13450 && !is_global_var (*tp
)
13451 && decl_function_context (*tp
) == current_function_decl
)
13453 n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
13454 (splay_tree_key
) *tp
);
13457 if (gimplify_omp_ctxp
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
13461 else if (n
->value
& GOVD_LOCAL
)
13463 else if (n
->value
& GOVD_FIRSTPRIVATE
)
13465 else if ((n
->value
& (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
13466 == (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
13470 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
13474 if (TARGET_EXPR_INITIAL (*tp
)
13475 || TREE_CODE (TARGET_EXPR_SLOT (*tp
)) != VAR_DECL
)
13477 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp
),
13478 walk_subtrees
, NULL
);
13479 /* Allow some reasonable subset of integral arithmetics. */
13483 case TRUNC_DIV_EXPR
:
13484 case CEIL_DIV_EXPR
:
13485 case FLOOR_DIV_EXPR
:
13486 case ROUND_DIV_EXPR
:
13487 case TRUNC_MOD_EXPR
:
13488 case CEIL_MOD_EXPR
:
13489 case FLOOR_MOD_EXPR
:
13490 case ROUND_MOD_EXPR
:
13492 case EXACT_DIV_EXPR
:
13503 case NON_LVALUE_EXPR
:
13505 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
13508 /* And disallow anything else, except for comparisons. */
13510 if (COMPARISON_CLASS_P (*tp
))
13516 /* Try to determine if the num_teams and/or thread_limit expressions
13517 can have their values determined already before entering the
13519 INTEGER_CSTs trivially are,
13520 integral decls that are firstprivate (explicitly or implicitly)
13521 or explicitly map(always, to:) or map(always, tofrom:) on the target
13522 region too, and expressions involving simple arithmetics on those
13523 too, function calls are not ok, dereferencing something neither etc.
13524 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
13525 EXPR based on what we find:
13526 0 stands for clause not specified at all, use implementation default
13527 -1 stands for value that can't be determined easily before entering
13528 the target construct.
13529 If teams construct is not present at all, use 1 for num_teams
13530 and 0 for thread_limit (only one team is involved, and the thread
13531 limit is implementation defined. */
13534 optimize_target_teams (tree target
, gimple_seq
*pre_p
)
13536 tree body
= OMP_BODY (target
);
13537 tree teams
= walk_tree (&body
, find_omp_teams
, NULL
, NULL
);
13538 tree num_teams
= integer_zero_node
;
13539 tree thread_limit
= integer_zero_node
;
13540 location_t num_teams_loc
= EXPR_LOCATION (target
);
13541 location_t thread_limit_loc
= EXPR_LOCATION (target
);
13543 struct gimplify_omp_ctx
*target_ctx
= gimplify_omp_ctxp
;
13545 if (teams
== NULL_TREE
)
13546 num_teams
= integer_one_node
;
13548 for (c
= OMP_TEAMS_CLAUSES (teams
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13550 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
)
13553 num_teams_loc
= OMP_CLAUSE_LOCATION (c
);
13555 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREAD_LIMIT
)
13558 thread_limit_loc
= OMP_CLAUSE_LOCATION (c
);
13562 expr
= OMP_CLAUSE_OPERAND (c
, 0);
13563 if (TREE_CODE (expr
) == INTEGER_CST
)
13568 if (walk_tree (&expr
, computable_teams_clause
, NULL
, NULL
))
13570 *p
= integer_minus_one_node
;
13574 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
13575 if (gimplify_expr (p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
, false)
13578 gimplify_omp_ctxp
= target_ctx
;
13579 *p
= integer_minus_one_node
;
13582 gimplify_omp_ctxp
= target_ctx
;
13583 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
13584 OMP_CLAUSE_OPERAND (c
, 0) = *p
;
13586 c
= build_omp_clause (thread_limit_loc
, OMP_CLAUSE_THREAD_LIMIT
);
13587 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
13588 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
13589 OMP_TARGET_CLAUSES (target
) = c
;
13590 c
= build_omp_clause (num_teams_loc
, OMP_CLAUSE_NUM_TEAMS
);
13591 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
13592 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
13593 OMP_TARGET_CLAUSES (target
) = c
;
13596 /* Gimplify the gross structure of several OMP constructs. */
13599 gimplify_omp_workshare (tree
*expr_p
, gimple_seq
*pre_p
)
13601 tree expr
= *expr_p
;
13603 gimple_seq body
= NULL
;
13604 enum omp_region_type ort
;
13606 switch (TREE_CODE (expr
))
13610 ort
= ORT_WORKSHARE
;
13613 ort
= ORT_TASKGROUP
;
13616 ort
= OMP_TARGET_COMBINED (expr
) ? ORT_COMBINED_TARGET
: ORT_TARGET
;
13619 ort
= ORT_ACC_KERNELS
;
13621 case OACC_PARALLEL
:
13622 ort
= ORT_ACC_PARALLEL
;
13625 ort
= ORT_ACC_SERIAL
;
13628 ort
= ORT_ACC_DATA
;
13630 case OMP_TARGET_DATA
:
13631 ort
= ORT_TARGET_DATA
;
13634 ort
= OMP_TEAMS_COMBINED (expr
) ? ORT_COMBINED_TEAMS
: ORT_TEAMS
;
13635 if (gimplify_omp_ctxp
== NULL
13636 || gimplify_omp_ctxp
->region_type
== ORT_IMPLICIT_TARGET
)
13637 ort
= (enum omp_region_type
) (ort
| ORT_HOST_TEAMS
);
13639 case OACC_HOST_DATA
:
13640 ort
= ORT_ACC_HOST_DATA
;
13643 gcc_unreachable ();
13646 bool save_in_omp_construct
= in_omp_construct
;
13647 if ((ort
& ORT_ACC
) == 0)
13648 in_omp_construct
= false;
13649 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr
), pre_p
, ort
,
13651 if (TREE_CODE (expr
) == OMP_TARGET
)
13652 optimize_target_teams (expr
, pre_p
);
13653 if ((ort
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
13654 || (ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
13656 push_gimplify_context ();
13657 gimple
*g
= gimplify_and_return_first (OMP_BODY (expr
), &body
);
13658 if (gimple_code (g
) == GIMPLE_BIND
)
13659 pop_gimplify_context (g
);
13661 pop_gimplify_context (NULL
);
13662 if ((ort
& ORT_TARGET_DATA
) != 0)
13664 enum built_in_function end_ix
;
13665 switch (TREE_CODE (expr
))
13668 case OACC_HOST_DATA
:
13669 end_ix
= BUILT_IN_GOACC_DATA_END
;
13671 case OMP_TARGET_DATA
:
13672 end_ix
= BUILT_IN_GOMP_TARGET_END_DATA
;
13675 gcc_unreachable ();
13677 tree fn
= builtin_decl_explicit (end_ix
);
13678 g
= gimple_build_call (fn
, 0);
13679 gimple_seq cleanup
= NULL
;
13680 gimple_seq_add_stmt (&cleanup
, g
);
13681 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
13683 gimple_seq_add_stmt (&body
, g
);
13687 gimplify_and_add (OMP_BODY (expr
), &body
);
13688 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_CLAUSES (expr
),
13690 in_omp_construct
= save_in_omp_construct
;
13692 switch (TREE_CODE (expr
))
13695 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_DATA
,
13696 OMP_CLAUSES (expr
));
13698 case OACC_HOST_DATA
:
13699 if (omp_find_clause (OMP_CLAUSES (expr
), OMP_CLAUSE_IF_PRESENT
))
13701 for (tree c
= OMP_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13702 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
13703 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
) = 1;
13706 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_HOST_DATA
,
13707 OMP_CLAUSES (expr
));
13710 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_KERNELS
,
13711 OMP_CLAUSES (expr
));
13713 case OACC_PARALLEL
:
13714 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_PARALLEL
,
13715 OMP_CLAUSES (expr
));
13718 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_SERIAL
,
13719 OMP_CLAUSES (expr
));
13722 stmt
= gimple_build_omp_sections (body
, OMP_CLAUSES (expr
));
13725 stmt
= gimple_build_omp_single (body
, OMP_CLAUSES (expr
));
13728 stmt
= gimple_build_omp_scope (body
, OMP_CLAUSES (expr
));
13731 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_REGION
,
13732 OMP_CLAUSES (expr
));
13734 case OMP_TARGET_DATA
:
13735 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
13736 to be evaluated before the use_device_{ptr,addr} clauses if they
13737 refer to the same variables. */
13739 tree use_device_clauses
;
13740 tree
*pc
, *uc
= &use_device_clauses
;
13741 for (pc
= &OMP_CLAUSES (expr
); *pc
; )
13742 if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_PTR
13743 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_ADDR
)
13746 *pc
= OMP_CLAUSE_CHAIN (*pc
);
13747 uc
= &OMP_CLAUSE_CHAIN (*uc
);
13750 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13752 *pc
= use_device_clauses
;
13753 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_DATA
,
13754 OMP_CLAUSES (expr
));
13758 stmt
= gimple_build_omp_teams (body
, OMP_CLAUSES (expr
));
13759 if ((ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
13760 gimple_omp_teams_set_host (as_a
<gomp_teams
*> (stmt
), true);
13763 gcc_unreachable ();
13766 gimplify_seq_add_stmt (pre_p
, stmt
);
13767 *expr_p
= NULL_TREE
;
13770 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
13771 target update constructs. */
13774 gimplify_omp_target_update (tree
*expr_p
, gimple_seq
*pre_p
)
13776 tree expr
= *expr_p
;
13779 enum omp_region_type ort
= ORT_WORKSHARE
;
13781 switch (TREE_CODE (expr
))
13783 case OACC_ENTER_DATA
:
13784 kind
= GF_OMP_TARGET_KIND_OACC_ENTER_DATA
;
13787 case OACC_EXIT_DATA
:
13788 kind
= GF_OMP_TARGET_KIND_OACC_EXIT_DATA
;
13792 kind
= GF_OMP_TARGET_KIND_OACC_UPDATE
;
13795 case OMP_TARGET_UPDATE
:
13796 kind
= GF_OMP_TARGET_KIND_UPDATE
;
13798 case OMP_TARGET_ENTER_DATA
:
13799 kind
= GF_OMP_TARGET_KIND_ENTER_DATA
;
13801 case OMP_TARGET_EXIT_DATA
:
13802 kind
= GF_OMP_TARGET_KIND_EXIT_DATA
;
13805 gcc_unreachable ();
13807 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr
), pre_p
,
13808 ort
, TREE_CODE (expr
));
13809 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OMP_STANDALONE_CLAUSES (expr
),
13811 if (TREE_CODE (expr
) == OACC_UPDATE
13812 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
13813 OMP_CLAUSE_IF_PRESENT
))
13815 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
13817 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13818 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
13819 switch (OMP_CLAUSE_MAP_KIND (c
))
13821 case GOMP_MAP_FORCE_TO
:
13822 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
13824 case GOMP_MAP_FORCE_FROM
:
13825 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FROM
);
13831 else if (TREE_CODE (expr
) == OACC_EXIT_DATA
13832 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
13833 OMP_CLAUSE_FINALIZE
))
13835 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
13837 bool have_clause
= false;
13838 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13839 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
13840 switch (OMP_CLAUSE_MAP_KIND (c
))
13842 case GOMP_MAP_FROM
:
13843 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_FROM
);
13844 have_clause
= true;
13846 case GOMP_MAP_RELEASE
:
13847 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_DELETE
);
13848 have_clause
= true;
13850 case GOMP_MAP_TO_PSET
:
13851 /* Fortran arrays with descriptors must map that descriptor when
13852 doing standalone "attach" operations (in OpenACC). In that
13853 case GOMP_MAP_TO_PSET appears by itself with no preceding
13854 clause (see trans-openmp.c:gfc_trans_omp_clauses). */
13856 case GOMP_MAP_POINTER
:
13857 /* TODO PR92929: we may see these here, but they'll always follow
13858 one of the clauses above, and will be handled by libgomp as
13859 one group, so no handling required here. */
13860 gcc_assert (have_clause
);
13862 case GOMP_MAP_DETACH
:
13863 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_DETACH
);
13864 have_clause
= false;
13866 case GOMP_MAP_STRUCT
:
13867 have_clause
= false;
13870 gcc_unreachable ();
13873 stmt
= gimple_build_omp_target (NULL
, kind
, OMP_STANDALONE_CLAUSES (expr
));
13875 gimplify_seq_add_stmt (pre_p
, stmt
);
13876 *expr_p
= NULL_TREE
;
13879 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
13880 stabilized the lhs of the atomic operation as *ADDR. Return true if
13881 EXPR is this stabilized form. */
13884 goa_lhs_expr_p (tree expr
, tree addr
)
13886 /* Also include casts to other type variants. The C front end is fond
13887 of adding these for e.g. volatile variables. This is like
13888 STRIP_TYPE_NOPS but includes the main variant lookup. */
13889 STRIP_USELESS_TYPE_CONVERSION (expr
);
13891 if (TREE_CODE (expr
) == INDIRECT_REF
)
13893 expr
= TREE_OPERAND (expr
, 0);
13894 while (expr
!= addr
13895 && (CONVERT_EXPR_P (expr
)
13896 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
13897 && TREE_CODE (expr
) == TREE_CODE (addr
)
13898 && types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (addr
)))
13900 expr
= TREE_OPERAND (expr
, 0);
13901 addr
= TREE_OPERAND (addr
, 0);
13905 return (TREE_CODE (addr
) == ADDR_EXPR
13906 && TREE_CODE (expr
) == ADDR_EXPR
13907 && TREE_OPERAND (addr
, 0) == TREE_OPERAND (expr
, 0));
13909 if (TREE_CODE (addr
) == ADDR_EXPR
&& expr
== TREE_OPERAND (addr
, 0))
13914 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
13915 expression does not involve the lhs, evaluate it into a temporary.
13916 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
13917 or -1 if an error was encountered. */
13920 goa_stabilize_expr (tree
*expr_p
, gimple_seq
*pre_p
, tree lhs_addr
,
13921 tree lhs_var
, tree
&target_expr
, bool rhs
, int depth
)
13923 tree expr
= *expr_p
;
13926 if (goa_lhs_expr_p (expr
, lhs_addr
))
13932 if (is_gimple_val (expr
))
13935 /* Maximum depth of lhs in expression is for the
13936 __builtin_clear_padding (...), __builtin_clear_padding (...),
13937 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
13941 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
13944 case tcc_comparison
:
13945 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
, lhs_addr
,
13946 lhs_var
, target_expr
, true, depth
);
13949 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
, lhs_addr
,
13950 lhs_var
, target_expr
, true, depth
);
13952 case tcc_expression
:
13953 switch (TREE_CODE (expr
))
13955 case TRUTH_ANDIF_EXPR
:
13956 case TRUTH_ORIF_EXPR
:
13957 case TRUTH_AND_EXPR
:
13958 case TRUTH_OR_EXPR
:
13959 case TRUTH_XOR_EXPR
:
13960 case BIT_INSERT_EXPR
:
13961 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
13962 lhs_addr
, lhs_var
, target_expr
, true,
13965 case TRUTH_NOT_EXPR
:
13966 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
13967 lhs_addr
, lhs_var
, target_expr
, true,
13971 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
, lhs_var
,
13972 target_expr
, true, depth
))
13974 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
13975 lhs_addr
, lhs_var
, target_expr
, true,
13977 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
13978 lhs_addr
, lhs_var
, target_expr
, false,
13983 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
, lhs_var
,
13984 target_expr
, true, depth
))
13986 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
13987 lhs_addr
, lhs_var
, target_expr
, false,
13990 case COMPOUND_EXPR
:
13991 /* Break out any preevaluations from cp_build_modify_expr. */
13992 for (; TREE_CODE (expr
) == COMPOUND_EXPR
;
13993 expr
= TREE_OPERAND (expr
, 1))
13995 /* Special-case __builtin_clear_padding call before
13996 __builtin_memcmp. */
13997 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
)
13999 tree fndecl
= get_callee_fndecl (TREE_OPERAND (expr
, 0));
14001 && fndecl_built_in_p (fndecl
, BUILT_IN_CLEAR_PADDING
)
14002 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
14004 || goa_stabilize_expr (&TREE_OPERAND (expr
, 0), NULL
,
14006 target_expr
, true, depth
)))
14010 saw_lhs
= goa_stabilize_expr (&TREE_OPERAND (expr
, 0),
14011 pre_p
, lhs_addr
, lhs_var
,
14012 target_expr
, true, depth
);
14013 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1),
14014 pre_p
, lhs_addr
, lhs_var
,
14015 target_expr
, rhs
, depth
);
14021 gimplify_stmt (&TREE_OPERAND (expr
, 0), pre_p
);
14024 return goa_stabilize_expr (&expr
, pre_p
, lhs_addr
, lhs_var
,
14025 target_expr
, rhs
, depth
);
14027 return goa_stabilize_expr (expr_p
, pre_p
, lhs_addr
, lhs_var
,
14028 target_expr
, rhs
, depth
);
14030 if (!goa_stabilize_expr (&TREE_OPERAND (expr
, 0), NULL
, lhs_addr
,
14031 lhs_var
, target_expr
, true, depth
))
14033 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
14034 lhs_addr
, lhs_var
, target_expr
, true,
14036 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
14037 lhs_addr
, lhs_var
, target_expr
, true,
14039 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 2), pre_p
,
14040 lhs_addr
, lhs_var
, target_expr
, true,
14044 if (TARGET_EXPR_INITIAL (expr
))
14046 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
,
14047 lhs_var
, target_expr
, true,
14050 if (expr
== target_expr
)
14054 saw_lhs
= goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr
),
14055 pre_p
, lhs_addr
, lhs_var
,
14056 target_expr
, true, depth
);
14057 if (saw_lhs
&& target_expr
== NULL_TREE
&& pre_p
)
14058 target_expr
= expr
;
14066 case tcc_reference
:
14067 if (TREE_CODE (expr
) == BIT_FIELD_REF
14068 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
14069 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
14070 lhs_addr
, lhs_var
, target_expr
, true,
14074 if (TREE_CODE (expr
) == CALL_EXPR
)
14076 if (tree fndecl
= get_callee_fndecl (expr
))
14077 if (fndecl_built_in_p (fndecl
, BUILT_IN_CLEAR_PADDING
)
14078 || fndecl_built_in_p (fndecl
, BUILT_IN_MEMCMP
))
14080 int nargs
= call_expr_nargs (expr
);
14081 for (int i
= 0; i
< nargs
; i
++)
14082 saw_lhs
|= goa_stabilize_expr (&CALL_EXPR_ARG (expr
, i
),
14083 pre_p
, lhs_addr
, lhs_var
,
14084 target_expr
, true, depth
);
14093 if (saw_lhs
== 0 && pre_p
)
14095 enum gimplify_status gs
;
14096 if (TREE_CODE (expr
) == CALL_EXPR
&& VOID_TYPE_P (TREE_TYPE (expr
)))
14098 gimplify_stmt (&expr
, pre_p
);
14102 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
14104 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_lvalue
, fb_lvalue
);
14105 if (gs
!= GS_ALL_DONE
)
14112 /* Gimplify an OMP_ATOMIC statement. */
14114 static enum gimplify_status
14115 gimplify_omp_atomic (tree
*expr_p
, gimple_seq
*pre_p
)
14117 tree addr
= TREE_OPERAND (*expr_p
, 0);
14118 tree rhs
= TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
14119 ? NULL
: TREE_OPERAND (*expr_p
, 1);
14120 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr
)));
14122 gomp_atomic_load
*loadstmt
;
14123 gomp_atomic_store
*storestmt
;
14124 tree target_expr
= NULL_TREE
;
14126 tmp_load
= create_tmp_reg (type
);
14128 && goa_stabilize_expr (&rhs
, pre_p
, addr
, tmp_load
, target_expr
,
14132 if (gimplify_expr (&addr
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
14136 loadstmt
= gimple_build_omp_atomic_load (tmp_load
, addr
,
14137 OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
14138 gimplify_seq_add_stmt (pre_p
, loadstmt
);
14141 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
14142 representatives. Use BIT_FIELD_REF on the lhs instead. */
14144 if (TREE_CODE (rhs
) == COND_EXPR
)
14145 rhsarg
= TREE_OPERAND (rhs
, 1);
14146 if (TREE_CODE (rhsarg
) == BIT_INSERT_EXPR
14147 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load
)))
14149 tree bitpos
= TREE_OPERAND (rhsarg
, 2);
14150 tree op1
= TREE_OPERAND (rhsarg
, 1);
14152 tree tmp_store
= tmp_load
;
14153 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_CAPTURE_OLD
)
14154 tmp_store
= get_initialized_tmp_var (tmp_load
, pre_p
);
14155 if (INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
14156 bitsize
= bitsize_int (TYPE_PRECISION (TREE_TYPE (op1
)));
14158 bitsize
= TYPE_SIZE (TREE_TYPE (op1
));
14159 gcc_assert (TREE_OPERAND (rhsarg
, 0) == tmp_load
);
14160 tree t
= build2_loc (EXPR_LOCATION (rhsarg
),
14161 MODIFY_EXPR
, void_type_node
,
14162 build3_loc (EXPR_LOCATION (rhsarg
),
14163 BIT_FIELD_REF
, TREE_TYPE (op1
),
14164 tmp_store
, bitsize
, bitpos
), op1
);
14165 if (TREE_CODE (rhs
) == COND_EXPR
)
14166 t
= build3_loc (EXPR_LOCATION (rhs
), COND_EXPR
, void_type_node
,
14167 TREE_OPERAND (rhs
, 0), t
, void_node
);
14168 gimplify_and_add (t
, pre_p
);
14171 bool save_allow_rhs_cond_expr
= gimplify_ctxp
->allow_rhs_cond_expr
;
14172 if (TREE_CODE (rhs
) == COND_EXPR
)
14173 gimplify_ctxp
->allow_rhs_cond_expr
= true;
14174 enum gimplify_status gs
= gimplify_expr (&rhs
, pre_p
, NULL
,
14175 is_gimple_val
, fb_rvalue
);
14176 gimplify_ctxp
->allow_rhs_cond_expr
= save_allow_rhs_cond_expr
;
14177 if (gs
!= GS_ALL_DONE
)
14181 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
)
14184 = gimple_build_omp_atomic_store (rhs
, OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
14185 if (TREE_CODE (*expr_p
) != OMP_ATOMIC_READ
&& OMP_ATOMIC_WEAK (*expr_p
))
14187 gimple_omp_atomic_set_weak (loadstmt
);
14188 gimple_omp_atomic_set_weak (storestmt
);
14190 gimplify_seq_add_stmt (pre_p
, storestmt
);
14191 switch (TREE_CODE (*expr_p
))
14193 case OMP_ATOMIC_READ
:
14194 case OMP_ATOMIC_CAPTURE_OLD
:
14195 *expr_p
= tmp_load
;
14196 gimple_omp_atomic_set_need_value (loadstmt
);
14198 case OMP_ATOMIC_CAPTURE_NEW
:
14200 gimple_omp_atomic_set_need_value (storestmt
);
14207 return GS_ALL_DONE
;
14210 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
14211 body, and adding some EH bits. */
14213 static enum gimplify_status
14214 gimplify_transaction (tree
*expr_p
, gimple_seq
*pre_p
)
14216 tree expr
= *expr_p
, temp
, tbody
= TRANSACTION_EXPR_BODY (expr
);
14218 gtransaction
*trans_stmt
;
14219 gimple_seq body
= NULL
;
14222 /* Wrap the transaction body in a BIND_EXPR so we have a context
14223 where to put decls for OMP. */
14224 if (TREE_CODE (tbody
) != BIND_EXPR
)
14226 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, tbody
, NULL
);
14227 TREE_SIDE_EFFECTS (bind
) = 1;
14228 SET_EXPR_LOCATION (bind
, EXPR_LOCATION (tbody
));
14229 TRANSACTION_EXPR_BODY (expr
) = bind
;
14232 push_gimplify_context ();
14233 temp
= voidify_wrapper_expr (*expr_p
, NULL
);
14235 body_stmt
= gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr
), &body
);
14236 pop_gimplify_context (body_stmt
);
14238 trans_stmt
= gimple_build_transaction (body
);
14239 if (TRANSACTION_EXPR_OUTER (expr
))
14240 subcode
= GTMA_IS_OUTER
;
14241 else if (TRANSACTION_EXPR_RELAXED (expr
))
14242 subcode
= GTMA_IS_RELAXED
;
14243 gimple_transaction_set_subcode (trans_stmt
, subcode
);
14245 gimplify_seq_add_stmt (pre_p
, trans_stmt
);
14253 *expr_p
= NULL_TREE
;
14254 return GS_ALL_DONE
;
14257 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
14258 is the OMP_BODY of the original EXPR (which has already been
14259 gimplified so it's not present in the EXPR).
14261 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
14264 gimplify_omp_ordered (tree expr
, gimple_seq body
)
14269 tree source_c
= NULL_TREE
;
14270 tree sink_c
= NULL_TREE
;
14272 if (gimplify_omp_ctxp
)
14274 for (c
= OMP_ORDERED_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
14275 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
14276 && gimplify_omp_ctxp
->loop_iter_var
.is_empty ()
14277 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
14278 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
))
14280 error_at (OMP_CLAUSE_LOCATION (c
),
14281 "%<ordered%> construct with %<depend%> clause must be "
14282 "closely nested inside a loop with %<ordered%> clause "
14283 "with a parameter");
14286 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
14287 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
14290 for (decls
= OMP_CLAUSE_DECL (c
), i
= 0;
14291 decls
&& TREE_CODE (decls
) == TREE_LIST
;
14292 decls
= TREE_CHAIN (decls
), ++i
)
14293 if (i
>= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
14295 else if (TREE_VALUE (decls
)
14296 != gimplify_omp_ctxp
->loop_iter_var
[2 * i
])
14298 error_at (OMP_CLAUSE_LOCATION (c
),
14299 "variable %qE is not an iteration "
14300 "of outermost loop %d, expected %qE",
14301 TREE_VALUE (decls
), i
+ 1,
14302 gimplify_omp_ctxp
->loop_iter_var
[2 * i
]);
14308 = gimplify_omp_ctxp
->loop_iter_var
[2 * i
+ 1];
14309 if (!fail
&& i
!= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
14311 error_at (OMP_CLAUSE_LOCATION (c
),
14312 "number of variables in %<depend%> clause with "
14313 "%<sink%> modifier does not match number of "
14314 "iteration variables");
14319 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
14320 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
14324 error_at (OMP_CLAUSE_LOCATION (c
),
14325 "more than one %<depend%> clause with %<source%> "
14326 "modifier on an %<ordered%> construct");
14333 if (source_c
&& sink_c
)
14335 error_at (OMP_CLAUSE_LOCATION (source_c
),
14336 "%<depend%> clause with %<source%> modifier specified "
14337 "together with %<depend%> clauses with %<sink%> modifier "
14338 "on the same construct");
14343 return gimple_build_nop ();
14344 return gimple_build_omp_ordered (body
, OMP_ORDERED_CLAUSES (expr
));
14347 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
14348 expression produces a value to be used as an operand inside a GIMPLE
14349 statement, the value will be stored back in *EXPR_P. This value will
14350 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
14351 an SSA_NAME. The corresponding sequence of GIMPLE statements is
14352 emitted in PRE_P and POST_P.
14354 Additionally, this process may overwrite parts of the input
14355 expression during gimplification. Ideally, it should be
14356 possible to do non-destructive gimplification.
14358 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
14359 the expression needs to evaluate to a value to be used as
14360 an operand in a GIMPLE statement, this value will be stored in
14361 *EXPR_P on exit. This happens when the caller specifies one
14362 of fb_lvalue or fb_rvalue fallback flags.
14364 PRE_P will contain the sequence of GIMPLE statements corresponding
14365 to the evaluation of EXPR and all the side-effects that must
14366 be executed before the main expression. On exit, the last
14367 statement of PRE_P is the core statement being gimplified. For
14368 instance, when gimplifying 'if (++a)' the last statement in
14369 PRE_P will be 'if (t.1)' where t.1 is the result of
14370 pre-incrementing 'a'.
14372 POST_P will contain the sequence of GIMPLE statements corresponding
14373 to the evaluation of all the side-effects that must be executed
14374 after the main expression. If this is NULL, the post
14375 side-effects are stored at the end of PRE_P.
14377 The reason why the output is split in two is to handle post
14378 side-effects explicitly. In some cases, an expression may have
14379 inner and outer post side-effects which need to be emitted in
14380 an order different from the one given by the recursive
14381 traversal. For instance, for the expression (*p--)++ the post
14382 side-effects of '--' must actually occur *after* the post
14383 side-effects of '++'. However, gimplification will first visit
14384 the inner expression, so if a separate POST sequence was not
14385 used, the resulting sequence would be:
14392 However, the post-decrement operation in line #2 must not be
14393 evaluated until after the store to *p at line #4, so the
14394 correct sequence should be:
14401 So, by specifying a separate post queue, it is possible
14402 to emit the post side-effects in the correct order.
14403 If POST_P is NULL, an internal queue will be used. Before
14404 returning to the caller, the sequence POST_P is appended to
14405 the main output sequence PRE_P.
14407 GIMPLE_TEST_F points to a function that takes a tree T and
14408 returns nonzero if T is in the GIMPLE form requested by the
14409 caller. The GIMPLE predicates are in gimple.c.
14411 FALLBACK tells the function what sort of a temporary we want if
14412 gimplification cannot produce an expression that complies with
14415 fb_none means that no temporary should be generated
14416 fb_rvalue means that an rvalue is OK to generate
14417 fb_lvalue means that an lvalue is OK to generate
14418 fb_either means that either is OK, but an lvalue is preferable.
14419 fb_mayfail means that gimplification may fail (in which case
14420 GS_ERROR will be returned)
14422 The return value is either GS_ERROR or GS_ALL_DONE, since this
14423 function iterates until EXPR is completely gimplified or an error
14426 enum gimplify_status
14427 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
14428 bool (*gimple_test_f
) (tree
), fallback_t fallback
)
14431 gimple_seq internal_pre
= NULL
;
14432 gimple_seq internal_post
= NULL
;
14435 location_t saved_location
;
14436 enum gimplify_status ret
;
14437 gimple_stmt_iterator pre_last_gsi
, post_last_gsi
;
14440 save_expr
= *expr_p
;
14441 if (save_expr
== NULL_TREE
)
14442 return GS_ALL_DONE
;
14444 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
14445 is_statement
= gimple_test_f
== is_gimple_stmt
;
14447 gcc_assert (pre_p
);
14449 /* Consistency checks. */
14450 if (gimple_test_f
== is_gimple_reg
)
14451 gcc_assert (fallback
& (fb_rvalue
| fb_lvalue
));
14452 else if (gimple_test_f
== is_gimple_val
14453 || gimple_test_f
== is_gimple_call_addr
14454 || gimple_test_f
== is_gimple_condexpr
14455 || gimple_test_f
== is_gimple_condexpr_for_cond
14456 || gimple_test_f
== is_gimple_mem_rhs
14457 || gimple_test_f
== is_gimple_mem_rhs_or_call
14458 || gimple_test_f
== is_gimple_reg_rhs
14459 || gimple_test_f
== is_gimple_reg_rhs_or_call
14460 || gimple_test_f
== is_gimple_asm_val
14461 || gimple_test_f
== is_gimple_mem_ref_addr
)
14462 gcc_assert (fallback
& fb_rvalue
);
14463 else if (gimple_test_f
== is_gimple_min_lval
14464 || gimple_test_f
== is_gimple_lvalue
)
14465 gcc_assert (fallback
& fb_lvalue
);
14466 else if (gimple_test_f
== is_gimple_addressable
)
14467 gcc_assert (fallback
& fb_either
);
14468 else if (gimple_test_f
== is_gimple_stmt
)
14469 gcc_assert (fallback
== fb_none
);
14472 /* We should have recognized the GIMPLE_TEST_F predicate to
14473 know what kind of fallback to use in case a temporary is
14474 needed to hold the value or address of *EXPR_P. */
14475 gcc_unreachable ();
14478 /* We used to check the predicate here and return immediately if it
14479 succeeds. This is wrong; the design is for gimplification to be
14480 idempotent, and for the predicates to only test for valid forms, not
14481 whether they are fully simplified. */
14483 pre_p
= &internal_pre
;
14485 if (post_p
== NULL
)
14486 post_p
= &internal_post
;
14488 /* Remember the last statements added to PRE_P and POST_P. Every
14489 new statement added by the gimplification helpers needs to be
14490 annotated with location information. To centralize the
14491 responsibility, we remember the last statement that had been
14492 added to both queues before gimplifying *EXPR_P. If
14493 gimplification produces new statements in PRE_P and POST_P, those
14494 statements will be annotated with the same location information
14496 pre_last_gsi
= gsi_last (*pre_p
);
14497 post_last_gsi
= gsi_last (*post_p
);
14499 saved_location
= input_location
;
14500 if (save_expr
!= error_mark_node
14501 && EXPR_HAS_LOCATION (*expr_p
))
14502 input_location
= EXPR_LOCATION (*expr_p
);
14504 /* Loop over the specific gimplifiers until the toplevel node
14505 remains the same. */
14508 /* Strip away as many useless type conversions as possible
14509 at the toplevel. */
14510 STRIP_USELESS_TYPE_CONVERSION (*expr_p
);
14512 /* Remember the expr. */
14513 save_expr
= *expr_p
;
14515 /* Die, die, die, my darling. */
14516 if (error_operand_p (save_expr
))
14522 /* Do any language-specific gimplification. */
14523 ret
= ((enum gimplify_status
)
14524 lang_hooks
.gimplify_expr (expr_p
, pre_p
, post_p
));
14527 if (*expr_p
== NULL_TREE
)
14529 if (*expr_p
!= save_expr
)
14532 else if (ret
!= GS_UNHANDLED
)
14535 /* Make sure that all the cases set 'ret' appropriately. */
14536 ret
= GS_UNHANDLED
;
14537 switch (TREE_CODE (*expr_p
))
14539 /* First deal with the special cases. */
14541 case POSTINCREMENT_EXPR
:
14542 case POSTDECREMENT_EXPR
:
14543 case PREINCREMENT_EXPR
:
14544 case PREDECREMENT_EXPR
:
14545 ret
= gimplify_self_mod_expr (expr_p
, pre_p
, post_p
,
14546 fallback
!= fb_none
,
14547 TREE_TYPE (*expr_p
));
14550 case VIEW_CONVERT_EXPR
:
14551 if ((fallback
& fb_rvalue
)
14552 && is_gimple_reg_type (TREE_TYPE (*expr_p
))
14553 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
14555 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14556 post_p
, is_gimple_val
, fb_rvalue
);
14557 recalculate_side_effects (*expr_p
);
14563 case ARRAY_RANGE_REF
:
14564 case REALPART_EXPR
:
14565 case IMAGPART_EXPR
:
14566 case COMPONENT_REF
:
14567 ret
= gimplify_compound_lval (expr_p
, pre_p
, post_p
,
14568 fallback
? fallback
: fb_rvalue
);
14572 ret
= gimplify_cond_expr (expr_p
, pre_p
, fallback
);
14574 /* C99 code may assign to an array in a structure value of a
14575 conditional expression, and this has undefined behavior
14576 only on execution, so create a temporary if an lvalue is
14578 if (fallback
== fb_lvalue
)
14580 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
14581 mark_addressable (*expr_p
);
14587 ret
= gimplify_call_expr (expr_p
, pre_p
, fallback
!= fb_none
);
14589 /* C99 code may assign to an array in a structure returned
14590 from a function, and this has undefined behavior only on
14591 execution, so create a temporary if an lvalue is
14593 if (fallback
== fb_lvalue
)
14595 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
14596 mark_addressable (*expr_p
);
14602 gcc_unreachable ();
14604 case COMPOUND_EXPR
:
14605 ret
= gimplify_compound_expr (expr_p
, pre_p
, fallback
!= fb_none
);
14608 case COMPOUND_LITERAL_EXPR
:
14609 ret
= gimplify_compound_literal_expr (expr_p
, pre_p
,
14610 gimple_test_f
, fallback
);
14615 ret
= gimplify_modify_expr (expr_p
, pre_p
, post_p
,
14616 fallback
!= fb_none
);
14619 case TRUTH_ANDIF_EXPR
:
14620 case TRUTH_ORIF_EXPR
:
14622 /* Preserve the original type of the expression and the
14623 source location of the outer expression. */
14624 tree org_type
= TREE_TYPE (*expr_p
);
14625 *expr_p
= gimple_boolify (*expr_p
);
14626 *expr_p
= build3_loc (input_location
, COND_EXPR
,
14630 org_type
, boolean_true_node
),
14633 org_type
, boolean_false_node
));
14638 case TRUTH_NOT_EXPR
:
14640 tree type
= TREE_TYPE (*expr_p
);
14641 /* The parsers are careful to generate TRUTH_NOT_EXPR
14642 only with operands that are always zero or one.
14643 We do not fold here but handle the only interesting case
14644 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
14645 *expr_p
= gimple_boolify (*expr_p
);
14646 if (TYPE_PRECISION (TREE_TYPE (*expr_p
)) == 1)
14647 *expr_p
= build1_loc (input_location
, BIT_NOT_EXPR
,
14648 TREE_TYPE (*expr_p
),
14649 TREE_OPERAND (*expr_p
, 0));
14651 *expr_p
= build2_loc (input_location
, BIT_XOR_EXPR
,
14652 TREE_TYPE (*expr_p
),
14653 TREE_OPERAND (*expr_p
, 0),
14654 build_int_cst (TREE_TYPE (*expr_p
), 1));
14655 if (!useless_type_conversion_p (type
, TREE_TYPE (*expr_p
)))
14656 *expr_p
= fold_convert_loc (input_location
, type
, *expr_p
);
14662 ret
= gimplify_addr_expr (expr_p
, pre_p
, post_p
);
14665 case ANNOTATE_EXPR
:
14667 tree cond
= TREE_OPERAND (*expr_p
, 0);
14668 tree kind
= TREE_OPERAND (*expr_p
, 1);
14669 tree data
= TREE_OPERAND (*expr_p
, 2);
14670 tree type
= TREE_TYPE (cond
);
14671 if (!INTEGRAL_TYPE_P (type
))
14677 tree tmp
= create_tmp_var (type
);
14678 gimplify_arg (&cond
, pre_p
, EXPR_LOCATION (*expr_p
));
14680 = gimple_build_call_internal (IFN_ANNOTATE
, 3, cond
, kind
, data
);
14681 gimple_call_set_lhs (call
, tmp
);
14682 gimplify_seq_add_stmt (pre_p
, call
);
14689 ret
= gimplify_va_arg_expr (expr_p
, pre_p
, post_p
);
14693 if (IS_EMPTY_STMT (*expr_p
))
14699 if (VOID_TYPE_P (TREE_TYPE (*expr_p
))
14700 || fallback
== fb_none
)
14702 /* Just strip a conversion to void (or in void context) and
14704 *expr_p
= TREE_OPERAND (*expr_p
, 0);
14709 ret
= gimplify_conversion (expr_p
);
14710 if (ret
== GS_ERROR
)
14712 if (*expr_p
!= save_expr
)
14716 case FIX_TRUNC_EXPR
:
14717 /* unary_expr: ... | '(' cast ')' val | ... */
14718 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
14719 is_gimple_val
, fb_rvalue
);
14720 recalculate_side_effects (*expr_p
);
14725 bool volatilep
= TREE_THIS_VOLATILE (*expr_p
);
14726 bool notrap
= TREE_THIS_NOTRAP (*expr_p
);
14727 tree saved_ptr_type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 0));
14729 *expr_p
= fold_indirect_ref_loc (input_location
, *expr_p
);
14730 if (*expr_p
!= save_expr
)
14736 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
14737 is_gimple_reg
, fb_rvalue
);
14738 if (ret
== GS_ERROR
)
14741 recalculate_side_effects (*expr_p
);
14742 *expr_p
= fold_build2_loc (input_location
, MEM_REF
,
14743 TREE_TYPE (*expr_p
),
14744 TREE_OPERAND (*expr_p
, 0),
14745 build_int_cst (saved_ptr_type
, 0));
14746 TREE_THIS_VOLATILE (*expr_p
) = volatilep
;
14747 TREE_THIS_NOTRAP (*expr_p
) = notrap
;
14752 /* We arrive here through the various re-gimplifcation paths. */
14754 /* First try re-folding the whole thing. */
14755 tmp
= fold_binary (MEM_REF
, TREE_TYPE (*expr_p
),
14756 TREE_OPERAND (*expr_p
, 0),
14757 TREE_OPERAND (*expr_p
, 1));
14760 REF_REVERSE_STORAGE_ORDER (tmp
)
14761 = REF_REVERSE_STORAGE_ORDER (*expr_p
);
14763 recalculate_side_effects (*expr_p
);
14767 /* Avoid re-gimplifying the address operand if it is already
14768 in suitable form. Re-gimplifying would mark the address
14769 operand addressable. Always gimplify when not in SSA form
14770 as we still may have to gimplify decls with value-exprs. */
14771 if (!gimplify_ctxp
|| !gimple_in_ssa_p (cfun
)
14772 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p
, 0)))
14774 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
14775 is_gimple_mem_ref_addr
, fb_rvalue
);
14776 if (ret
== GS_ERROR
)
14779 recalculate_side_effects (*expr_p
);
14783 /* Constants need not be gimplified. */
14790 /* Drop the overflow flag on constants, we do not want
14791 that in the GIMPLE IL. */
14792 if (TREE_OVERFLOW_P (*expr_p
))
14793 *expr_p
= drop_tree_overflow (*expr_p
);
14798 /* If we require an lvalue, such as for ADDR_EXPR, retain the
14799 CONST_DECL node. Otherwise the decl is replaceable by its
14801 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
14802 if (fallback
& fb_lvalue
)
14806 *expr_p
= DECL_INITIAL (*expr_p
);
14812 ret
= gimplify_decl_expr (expr_p
, pre_p
);
14816 ret
= gimplify_bind_expr (expr_p
, pre_p
);
14820 ret
= gimplify_loop_expr (expr_p
, pre_p
);
14824 ret
= gimplify_switch_expr (expr_p
, pre_p
);
14828 ret
= gimplify_exit_expr (expr_p
);
14832 /* If the target is not LABEL, then it is a computed jump
14833 and the target needs to be gimplified. */
14834 if (TREE_CODE (GOTO_DESTINATION (*expr_p
)) != LABEL_DECL
)
14836 ret
= gimplify_expr (&GOTO_DESTINATION (*expr_p
), pre_p
,
14837 NULL
, is_gimple_val
, fb_rvalue
);
14838 if (ret
== GS_ERROR
)
14841 gimplify_seq_add_stmt (pre_p
,
14842 gimple_build_goto (GOTO_DESTINATION (*expr_p
)));
14847 gimplify_seq_add_stmt (pre_p
,
14848 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p
),
14849 PREDICT_EXPR_OUTCOME (*expr_p
)));
14854 ret
= gimplify_label_expr (expr_p
, pre_p
);
14855 label
= LABEL_EXPR_LABEL (*expr_p
);
14856 gcc_assert (decl_function_context (label
) == current_function_decl
);
14858 /* If the label is used in a goto statement, or address of the label
14859 is taken, we need to unpoison all variables that were seen so far.
14860 Doing so would prevent us from reporting a false positives. */
14861 if (asan_poisoned_variables
14862 && asan_used_labels
!= NULL
14863 && asan_used_labels
->contains (label
)
14864 && !gimplify_omp_ctxp
)
14865 asan_poison_variables (asan_poisoned_variables
, false, pre_p
);
14868 case CASE_LABEL_EXPR
:
14869 ret
= gimplify_case_label_expr (expr_p
, pre_p
);
14871 if (gimplify_ctxp
->live_switch_vars
)
14872 asan_poison_variables (gimplify_ctxp
->live_switch_vars
, false,
14877 ret
= gimplify_return_expr (*expr_p
, pre_p
);
14881 /* Don't reduce this in place; let gimplify_init_constructor work its
14882 magic. Buf if we're just elaborating this for side effects, just
14883 gimplify any element that has side-effects. */
14884 if (fallback
== fb_none
)
14886 unsigned HOST_WIDE_INT ix
;
14888 tree temp
= NULL_TREE
;
14889 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p
), ix
, val
)
14890 if (TREE_SIDE_EFFECTS (val
))
14891 append_to_statement_list (val
, &temp
);
14894 ret
= temp
? GS_OK
: GS_ALL_DONE
;
14896 /* C99 code may assign to an array in a constructed
14897 structure or union, and this has undefined behavior only
14898 on execution, so create a temporary if an lvalue is
14900 else if (fallback
== fb_lvalue
)
14902 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
14903 mark_addressable (*expr_p
);
14910 /* The following are special cases that are not handled by the
14911 original GIMPLE grammar. */
14913 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
14916 ret
= gimplify_save_expr (expr_p
, pre_p
, post_p
);
14919 case BIT_FIELD_REF
:
14920 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14921 post_p
, is_gimple_lvalue
, fb_either
);
14922 recalculate_side_effects (*expr_p
);
14925 case TARGET_MEM_REF
:
14927 enum gimplify_status r0
= GS_ALL_DONE
, r1
= GS_ALL_DONE
;
14929 if (TMR_BASE (*expr_p
))
14930 r0
= gimplify_expr (&TMR_BASE (*expr_p
), pre_p
,
14931 post_p
, is_gimple_mem_ref_addr
, fb_either
);
14932 if (TMR_INDEX (*expr_p
))
14933 r1
= gimplify_expr (&TMR_INDEX (*expr_p
), pre_p
,
14934 post_p
, is_gimple_val
, fb_rvalue
);
14935 if (TMR_INDEX2 (*expr_p
))
14936 r1
= gimplify_expr (&TMR_INDEX2 (*expr_p
), pre_p
,
14937 post_p
, is_gimple_val
, fb_rvalue
);
14938 /* TMR_STEP and TMR_OFFSET are always integer constants. */
14939 ret
= MIN (r0
, r1
);
14943 case NON_LVALUE_EXPR
:
14944 /* This should have been stripped above. */
14945 gcc_unreachable ();
14948 ret
= gimplify_asm_expr (expr_p
, pre_p
, post_p
);
14951 case TRY_FINALLY_EXPR
:
14952 case TRY_CATCH_EXPR
:
14954 gimple_seq eval
, cleanup
;
14957 /* Calls to destructors are generated automatically in FINALLY/CATCH
14958 block. They should have location as UNKNOWN_LOCATION. However,
14959 gimplify_call_expr will reset these call stmts to input_location
14960 if it finds stmt's location is unknown. To prevent resetting for
14961 destructors, we set the input_location to unknown.
14962 Note that this only affects the destructor calls in FINALLY/CATCH
14963 block, and will automatically reset to its original value by the
14964 end of gimplify_expr. */
14965 input_location
= UNKNOWN_LOCATION
;
14966 eval
= cleanup
= NULL
;
14967 gimplify_and_add (TREE_OPERAND (*expr_p
, 0), &eval
);
14968 if (TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
14969 && TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == EH_ELSE_EXPR
)
14971 gimple_seq n
= NULL
, e
= NULL
;
14972 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
14974 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
14976 if (!gimple_seq_empty_p (n
) && !gimple_seq_empty_p (e
))
14978 geh_else
*stmt
= gimple_build_eh_else (n
, e
);
14979 gimple_seq_add_stmt (&cleanup
, stmt
);
14983 gimplify_and_add (TREE_OPERAND (*expr_p
, 1), &cleanup
);
14984 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
14985 if (gimple_seq_empty_p (cleanup
))
14987 gimple_seq_add_seq (pre_p
, eval
);
14991 try_
= gimple_build_try (eval
, cleanup
,
14992 TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
14993 ? GIMPLE_TRY_FINALLY
14994 : GIMPLE_TRY_CATCH
);
14995 if (EXPR_HAS_LOCATION (save_expr
))
14996 gimple_set_location (try_
, EXPR_LOCATION (save_expr
));
14997 else if (LOCATION_LOCUS (saved_location
) != UNKNOWN_LOCATION
)
14998 gimple_set_location (try_
, saved_location
);
14999 if (TREE_CODE (*expr_p
) == TRY_CATCH_EXPR
)
15000 gimple_try_set_catch_is_cleanup (try_
,
15001 TRY_CATCH_IS_CLEANUP (*expr_p
));
15002 gimplify_seq_add_stmt (pre_p
, try_
);
15007 case CLEANUP_POINT_EXPR
:
15008 ret
= gimplify_cleanup_point_expr (expr_p
, pre_p
);
15012 ret
= gimplify_target_expr (expr_p
, pre_p
, post_p
);
15018 gimple_seq handler
= NULL
;
15019 gimplify_and_add (CATCH_BODY (*expr_p
), &handler
);
15020 c
= gimple_build_catch (CATCH_TYPES (*expr_p
), handler
);
15021 gimplify_seq_add_stmt (pre_p
, c
);
15026 case EH_FILTER_EXPR
:
15029 gimple_seq failure
= NULL
;
15031 gimplify_and_add (EH_FILTER_FAILURE (*expr_p
), &failure
);
15032 ehf
= gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p
), failure
);
15033 copy_warning (ehf
, *expr_p
);
15034 gimplify_seq_add_stmt (pre_p
, ehf
);
15041 enum gimplify_status r0
, r1
;
15042 r0
= gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p
), pre_p
,
15043 post_p
, is_gimple_val
, fb_rvalue
);
15044 r1
= gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p
), pre_p
,
15045 post_p
, is_gimple_val
, fb_rvalue
);
15046 TREE_SIDE_EFFECTS (*expr_p
) = 0;
15047 ret
= MIN (r0
, r1
);
15052 /* We get here when taking the address of a label. We mark
15053 the label as "forced"; meaning it can never be removed and
15054 it is a potential target for any computed goto. */
15055 FORCED_LABEL (*expr_p
) = 1;
15059 case STATEMENT_LIST
:
15060 ret
= gimplify_statement_list (expr_p
, pre_p
);
15063 case WITH_SIZE_EXPR
:
15065 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
15066 post_p
== &internal_post
? NULL
: post_p
,
15067 gimple_test_f
, fallback
);
15068 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
15069 is_gimple_val
, fb_rvalue
);
15076 ret
= gimplify_var_or_parm_decl (expr_p
);
15080 /* When within an OMP context, notice uses of variables. */
15081 if (gimplify_omp_ctxp
)
15082 omp_notice_variable (gimplify_omp_ctxp
, *expr_p
, true);
15086 case DEBUG_EXPR_DECL
:
15087 gcc_unreachable ();
15089 case DEBUG_BEGIN_STMT
:
15090 gimplify_seq_add_stmt (pre_p
,
15091 gimple_build_debug_begin_stmt
15092 (TREE_BLOCK (*expr_p
),
15093 EXPR_LOCATION (*expr_p
)));
15099 /* Allow callbacks into the gimplifier during optimization. */
15104 gimplify_omp_parallel (expr_p
, pre_p
);
15109 gimplify_omp_task (expr_p
, pre_p
);
15115 case OMP_DISTRIBUTE
:
15118 ret
= gimplify_omp_for (expr_p
, pre_p
);
15122 ret
= gimplify_omp_loop (expr_p
, pre_p
);
15126 gimplify_oacc_cache (expr_p
, pre_p
);
15131 gimplify_oacc_declare (expr_p
, pre_p
);
15135 case OACC_HOST_DATA
:
15138 case OACC_PARALLEL
:
15144 case OMP_TARGET_DATA
:
15146 gimplify_omp_workshare (expr_p
, pre_p
);
15150 case OACC_ENTER_DATA
:
15151 case OACC_EXIT_DATA
:
15153 case OMP_TARGET_UPDATE
:
15154 case OMP_TARGET_ENTER_DATA
:
15155 case OMP_TARGET_EXIT_DATA
:
15156 gimplify_omp_target_update (expr_p
, pre_p
);
15167 gimple_seq body
= NULL
;
15169 bool saved_in_omp_construct
= in_omp_construct
;
15171 in_omp_construct
= true;
15172 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
15173 in_omp_construct
= saved_in_omp_construct
;
15174 switch (TREE_CODE (*expr_p
))
15177 g
= gimple_build_omp_section (body
);
15180 g
= gimple_build_omp_master (body
);
15183 g
= gimplify_omp_ordered (*expr_p
, body
);
15186 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p
),
15187 pre_p
, ORT_WORKSHARE
, OMP_MASKED
);
15188 gimplify_adjust_omp_clauses (pre_p
, body
,
15189 &OMP_MASKED_CLAUSES (*expr_p
),
15191 g
= gimple_build_omp_masked (body
,
15192 OMP_MASKED_CLAUSES (*expr_p
));
15195 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p
),
15196 pre_p
, ORT_WORKSHARE
, OMP_CRITICAL
);
15197 gimplify_adjust_omp_clauses (pre_p
, body
,
15198 &OMP_CRITICAL_CLAUSES (*expr_p
),
15200 g
= gimple_build_omp_critical (body
,
15201 OMP_CRITICAL_NAME (*expr_p
),
15202 OMP_CRITICAL_CLAUSES (*expr_p
));
15205 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p
),
15206 pre_p
, ORT_WORKSHARE
, OMP_SCAN
);
15207 gimplify_adjust_omp_clauses (pre_p
, body
,
15208 &OMP_SCAN_CLAUSES (*expr_p
),
15210 g
= gimple_build_omp_scan (body
, OMP_SCAN_CLAUSES (*expr_p
));
15213 gcc_unreachable ();
15215 gimplify_seq_add_stmt (pre_p
, g
);
15220 case OMP_TASKGROUP
:
15222 gimple_seq body
= NULL
;
15224 tree
*pclauses
= &OMP_TASKGROUP_CLAUSES (*expr_p
);
15225 bool saved_in_omp_construct
= in_omp_construct
;
15226 gimplify_scan_omp_clauses (pclauses
, pre_p
, ORT_TASKGROUP
,
15228 gimplify_adjust_omp_clauses (pre_p
, NULL
, pclauses
, OMP_TASKGROUP
);
15230 in_omp_construct
= true;
15231 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
15232 in_omp_construct
= saved_in_omp_construct
;
15233 gimple_seq cleanup
= NULL
;
15234 tree fn
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END
);
15235 gimple
*g
= gimple_build_call (fn
, 0);
15236 gimple_seq_add_stmt (&cleanup
, g
);
15237 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
15239 gimple_seq_add_stmt (&body
, g
);
15240 g
= gimple_build_omp_taskgroup (body
, *pclauses
);
15241 gimplify_seq_add_stmt (pre_p
, g
);
15247 case OMP_ATOMIC_READ
:
15248 case OMP_ATOMIC_CAPTURE_OLD
:
15249 case OMP_ATOMIC_CAPTURE_NEW
:
15250 ret
= gimplify_omp_atomic (expr_p
, pre_p
);
15253 case TRANSACTION_EXPR
:
15254 ret
= gimplify_transaction (expr_p
, pre_p
);
15257 case TRUTH_AND_EXPR
:
15258 case TRUTH_OR_EXPR
:
15259 case TRUTH_XOR_EXPR
:
15261 tree orig_type
= TREE_TYPE (*expr_p
);
15262 tree new_type
, xop0
, xop1
;
15263 *expr_p
= gimple_boolify (*expr_p
);
15264 new_type
= TREE_TYPE (*expr_p
);
15265 if (!useless_type_conversion_p (orig_type
, new_type
))
15267 *expr_p
= fold_convert_loc (input_location
, orig_type
, *expr_p
);
15272 /* Boolified binary truth expressions are semantically equivalent
15273 to bitwise binary expressions. Canonicalize them to the
15274 bitwise variant. */
15275 switch (TREE_CODE (*expr_p
))
15277 case TRUTH_AND_EXPR
:
15278 TREE_SET_CODE (*expr_p
, BIT_AND_EXPR
);
15280 case TRUTH_OR_EXPR
:
15281 TREE_SET_CODE (*expr_p
, BIT_IOR_EXPR
);
15283 case TRUTH_XOR_EXPR
:
15284 TREE_SET_CODE (*expr_p
, BIT_XOR_EXPR
);
15289 /* Now make sure that operands have compatible type to
15290 expression's new_type. */
15291 xop0
= TREE_OPERAND (*expr_p
, 0);
15292 xop1
= TREE_OPERAND (*expr_p
, 1);
15293 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop0
)))
15294 TREE_OPERAND (*expr_p
, 0) = fold_convert_loc (input_location
,
15297 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop1
)))
15298 TREE_OPERAND (*expr_p
, 1) = fold_convert_loc (input_location
,
15301 /* Continue classified as tcc_binary. */
15305 case VEC_COND_EXPR
:
15308 case VEC_PERM_EXPR
:
15309 /* Classified as tcc_expression. */
15312 case BIT_INSERT_EXPR
:
15313 /* Argument 3 is a constant. */
15316 case POINTER_PLUS_EXPR
:
15318 enum gimplify_status r0
, r1
;
15319 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
15320 post_p
, is_gimple_val
, fb_rvalue
);
15321 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
15322 post_p
, is_gimple_val
, fb_rvalue
);
15323 recalculate_side_effects (*expr_p
);
15324 ret
= MIN (r0
, r1
);
15329 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p
)))
15331 case tcc_comparison
:
15332 /* Handle comparison of objects of non scalar mode aggregates
15333 with a call to memcmp. It would be nice to only have to do
15334 this for variable-sized objects, but then we'd have to allow
15335 the same nest of reference nodes we allow for MODIFY_EXPR and
15336 that's too complex.
15338 Compare scalar mode aggregates as scalar mode values. Using
15339 memcmp for them would be very inefficient at best, and is
15340 plain wrong if bitfields are involved. */
15342 tree type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 1));
15344 /* Vector comparisons need no boolification. */
15345 if (TREE_CODE (type
) == VECTOR_TYPE
)
15347 else if (!AGGREGATE_TYPE_P (type
))
15349 tree org_type
= TREE_TYPE (*expr_p
);
15350 *expr_p
= gimple_boolify (*expr_p
);
15351 if (!useless_type_conversion_p (org_type
,
15352 TREE_TYPE (*expr_p
)))
15354 *expr_p
= fold_convert_loc (input_location
,
15355 org_type
, *expr_p
);
15361 else if (TYPE_MODE (type
) != BLKmode
)
15362 ret
= gimplify_scalar_mode_aggregate_compare (expr_p
);
15364 ret
= gimplify_variable_sized_compare (expr_p
);
15369 /* If *EXPR_P does not need to be special-cased, handle it
15370 according to its class. */
15372 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
15373 post_p
, is_gimple_val
, fb_rvalue
);
15379 enum gimplify_status r0
, r1
;
15381 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
15382 post_p
, is_gimple_val
, fb_rvalue
);
15383 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
15384 post_p
, is_gimple_val
, fb_rvalue
);
15386 ret
= MIN (r0
, r1
);
15392 enum gimplify_status r0
, r1
, r2
;
15394 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
15395 post_p
, is_gimple_val
, fb_rvalue
);
15396 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
15397 post_p
, is_gimple_val
, fb_rvalue
);
15398 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
15399 post_p
, is_gimple_val
, fb_rvalue
);
15401 ret
= MIN (MIN (r0
, r1
), r2
);
15405 case tcc_declaration
:
15408 goto dont_recalculate
;
15411 gcc_unreachable ();
15414 recalculate_side_effects (*expr_p
);
15420 gcc_assert (*expr_p
|| ret
!= GS_OK
);
15422 while (ret
== GS_OK
);
15424 /* If we encountered an error_mark somewhere nested inside, either
15425 stub out the statement or propagate the error back out. */
15426 if (ret
== GS_ERROR
)
15433 /* This was only valid as a return value from the langhook, which
15434 we handled. Make sure it doesn't escape from any other context. */
15435 gcc_assert (ret
!= GS_UNHANDLED
);
15437 if (fallback
== fb_none
&& *expr_p
&& !is_gimple_stmt (*expr_p
))
15439 /* We aren't looking for a value, and we don't have a valid
15440 statement. If it doesn't have side-effects, throw it away.
15441 We can also get here with code such as "*&&L;", where L is
15442 a LABEL_DECL that is marked as FORCED_LABEL. */
15443 if (TREE_CODE (*expr_p
) == LABEL_DECL
15444 || !TREE_SIDE_EFFECTS (*expr_p
))
15446 else if (!TREE_THIS_VOLATILE (*expr_p
))
15448 /* This is probably a _REF that contains something nested that
15449 has side effects. Recurse through the operands to find it. */
15450 enum tree_code code
= TREE_CODE (*expr_p
);
15454 case COMPONENT_REF
:
15455 case REALPART_EXPR
:
15456 case IMAGPART_EXPR
:
15457 case VIEW_CONVERT_EXPR
:
15458 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
15459 gimple_test_f
, fallback
);
15463 case ARRAY_RANGE_REF
:
15464 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
15465 gimple_test_f
, fallback
);
15466 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
15467 gimple_test_f
, fallback
);
15471 /* Anything else with side-effects must be converted to
15472 a valid statement before we get here. */
15473 gcc_unreachable ();
15478 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p
))
15479 && TYPE_MODE (TREE_TYPE (*expr_p
)) != BLKmode
15480 && !is_empty_type (TREE_TYPE (*expr_p
)))
15482 /* Historically, the compiler has treated a bare reference
15483 to a non-BLKmode volatile lvalue as forcing a load. */
15484 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p
));
15486 /* Normally, we do not want to create a temporary for a
15487 TREE_ADDRESSABLE type because such a type should not be
15488 copied by bitwise-assignment. However, we make an
15489 exception here, as all we are doing here is ensuring that
15490 we read the bytes that make up the type. We use
15491 create_tmp_var_raw because create_tmp_var will abort when
15492 given a TREE_ADDRESSABLE type. */
15493 tree tmp
= create_tmp_var_raw (type
, "vol");
15494 gimple_add_tmp_var (tmp
);
15495 gimplify_assign (tmp
, *expr_p
, pre_p
);
15499 /* We can't do anything useful with a volatile reference to
15500 an incomplete type, so just throw it away. Likewise for
15501 a BLKmode type, since any implicit inner load should
15502 already have been turned into an explicit one by the
15503 gimplification process. */
15507 /* If we are gimplifying at the statement level, we're done. Tack
15508 everything together and return. */
15509 if (fallback
== fb_none
|| is_statement
)
15511 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
15512 it out for GC to reclaim it. */
15513 *expr_p
= NULL_TREE
;
15515 if (!gimple_seq_empty_p (internal_pre
)
15516 || !gimple_seq_empty_p (internal_post
))
15518 gimplify_seq_add_seq (&internal_pre
, internal_post
);
15519 gimplify_seq_add_seq (pre_p
, internal_pre
);
15522 /* The result of gimplifying *EXPR_P is going to be the last few
15523 statements in *PRE_P and *POST_P. Add location information
15524 to all the statements that were added by the gimplification
15526 if (!gimple_seq_empty_p (*pre_p
))
15527 annotate_all_with_location_after (*pre_p
, pre_last_gsi
, input_location
);
15529 if (!gimple_seq_empty_p (*post_p
))
15530 annotate_all_with_location_after (*post_p
, post_last_gsi
,
15536 #ifdef ENABLE_GIMPLE_CHECKING
15539 enum tree_code code
= TREE_CODE (*expr_p
);
15540 /* These expressions should already be in gimple IR form. */
15541 gcc_assert (code
!= MODIFY_EXPR
15542 && code
!= ASM_EXPR
15543 && code
!= BIND_EXPR
15544 && code
!= CATCH_EXPR
15545 && (code
!= COND_EXPR
|| gimplify_ctxp
->allow_rhs_cond_expr
)
15546 && code
!= EH_FILTER_EXPR
15547 && code
!= GOTO_EXPR
15548 && code
!= LABEL_EXPR
15549 && code
!= LOOP_EXPR
15550 && code
!= SWITCH_EXPR
15551 && code
!= TRY_FINALLY_EXPR
15552 && code
!= EH_ELSE_EXPR
15553 && code
!= OACC_PARALLEL
15554 && code
!= OACC_KERNELS
15555 && code
!= OACC_SERIAL
15556 && code
!= OACC_DATA
15557 && code
!= OACC_HOST_DATA
15558 && code
!= OACC_DECLARE
15559 && code
!= OACC_UPDATE
15560 && code
!= OACC_ENTER_DATA
15561 && code
!= OACC_EXIT_DATA
15562 && code
!= OACC_CACHE
15563 && code
!= OMP_CRITICAL
15565 && code
!= OACC_LOOP
15566 && code
!= OMP_MASTER
15567 && code
!= OMP_MASKED
15568 && code
!= OMP_TASKGROUP
15569 && code
!= OMP_ORDERED
15570 && code
!= OMP_PARALLEL
15571 && code
!= OMP_SCAN
15572 && code
!= OMP_SECTIONS
15573 && code
!= OMP_SECTION
15574 && code
!= OMP_SINGLE
15575 && code
!= OMP_SCOPE
);
15579 /* Otherwise we're gimplifying a subexpression, so the resulting
15580 value is interesting. If it's a valid operand that matches
15581 GIMPLE_TEST_F, we're done. Unless we are handling some
15582 post-effects internally; if that's the case, we need to copy into
15583 a temporary before adding the post-effects to POST_P. */
15584 if (gimple_seq_empty_p (internal_post
) && (*gimple_test_f
) (*expr_p
))
15587 /* Otherwise, we need to create a new temporary for the gimplified
15590 /* We can't return an lvalue if we have an internal postqueue. The
15591 object the lvalue refers to would (probably) be modified by the
15592 postqueue; we need to copy the value out first, which means an
15594 if ((fallback
& fb_lvalue
)
15595 && gimple_seq_empty_p (internal_post
)
15596 && is_gimple_addressable (*expr_p
))
15598 /* An lvalue will do. Take the address of the expression, store it
15599 in a temporary, and replace the expression with an INDIRECT_REF of
15601 tree ref_alias_type
= reference_alias_ptr_type (*expr_p
);
15602 unsigned int ref_align
= get_object_alignment (*expr_p
);
15603 tree ref_type
= TREE_TYPE (*expr_p
);
15604 tmp
= build_fold_addr_expr_loc (input_location
, *expr_p
);
15605 gimplify_expr (&tmp
, pre_p
, post_p
, is_gimple_reg
, fb_rvalue
);
15606 if (TYPE_ALIGN (ref_type
) != ref_align
)
15607 ref_type
= build_aligned_type (ref_type
, ref_align
);
15608 *expr_p
= build2 (MEM_REF
, ref_type
,
15609 tmp
, build_zero_cst (ref_alias_type
));
15611 else if ((fallback
& fb_rvalue
) && is_gimple_reg_rhs_or_call (*expr_p
))
15613 /* An rvalue will do. Assign the gimplified expression into a
15614 new temporary TMP and replace the original expression with
15615 TMP. First, make sure that the expression has a type so that
15616 it can be assigned into a temporary. */
15617 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p
)));
15618 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
15622 #ifdef ENABLE_GIMPLE_CHECKING
15623 if (!(fallback
& fb_mayfail
))
15625 fprintf (stderr
, "gimplification failed:\n");
15626 print_generic_expr (stderr
, *expr_p
);
15627 debug_tree (*expr_p
);
15628 internal_error ("gimplification failed");
15631 gcc_assert (fallback
& fb_mayfail
);
15633 /* If this is an asm statement, and the user asked for the
15634 impossible, don't die. Fail and let gimplify_asm_expr
15640 /* Make sure the temporary matches our predicate. */
15641 gcc_assert ((*gimple_test_f
) (*expr_p
));
15643 if (!gimple_seq_empty_p (internal_post
))
15645 annotate_all_with_location (internal_post
, input_location
);
15646 gimplify_seq_add_seq (pre_p
, internal_post
);
15650 input_location
= saved_location
;
15654 /* Like gimplify_expr but make sure the gimplified result is not itself
15655 a SSA name (but a decl if it were). Temporaries required by
15656 evaluating *EXPR_P may be still SSA names. */
15658 static enum gimplify_status
15659 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
15660 bool (*gimple_test_f
) (tree
), fallback_t fallback
,
15663 enum gimplify_status ret
= gimplify_expr (expr_p
, pre_p
, post_p
,
15664 gimple_test_f
, fallback
);
15666 && TREE_CODE (*expr_p
) == SSA_NAME
)
15667 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, NULL
, false);
15671 /* Look through TYPE for variable-sized objects and gimplify each such
15672 size that we find. Add to LIST_P any statements generated. */
15675 gimplify_type_sizes (tree type
, gimple_seq
*list_p
)
15677 if (type
== NULL
|| type
== error_mark_node
)
15680 const bool ignored_p
15682 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
15683 && DECL_IGNORED_P (TYPE_NAME (type
));
15686 /* We first do the main variant, then copy into any other variants. */
15687 type
= TYPE_MAIN_VARIANT (type
);
15689 /* Avoid infinite recursion. */
15690 if (TYPE_SIZES_GIMPLIFIED (type
))
15693 TYPE_SIZES_GIMPLIFIED (type
) = 1;
15695 switch (TREE_CODE (type
))
15698 case ENUMERAL_TYPE
:
15701 case FIXED_POINT_TYPE
:
15702 gimplify_one_sizepos (&TYPE_MIN_VALUE (type
), list_p
);
15703 gimplify_one_sizepos (&TYPE_MAX_VALUE (type
), list_p
);
15705 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
15707 TYPE_MIN_VALUE (t
) = TYPE_MIN_VALUE (type
);
15708 TYPE_MAX_VALUE (t
) = TYPE_MAX_VALUE (type
);
15713 /* These types may not have declarations, so handle them here. */
15714 gimplify_type_sizes (TREE_TYPE (type
), list_p
);
15715 gimplify_type_sizes (TYPE_DOMAIN (type
), list_p
);
15716 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
15717 with assigned stack slots, for -O1+ -g they should be tracked
15720 && TYPE_DOMAIN (type
)
15721 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type
)))
15723 t
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
15724 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
15725 DECL_IGNORED_P (t
) = 0;
15726 t
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
15727 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
15728 DECL_IGNORED_P (t
) = 0;
15734 case QUAL_UNION_TYPE
:
15735 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
15736 if (TREE_CODE (field
) == FIELD_DECL
)
15738 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field
), list_p
);
15739 /* Likewise, ensure variable offsets aren't removed. */
15741 && (t
= DECL_FIELD_OFFSET (field
))
15743 && DECL_ARTIFICIAL (t
))
15744 DECL_IGNORED_P (t
) = 0;
15745 gimplify_one_sizepos (&DECL_SIZE (field
), list_p
);
15746 gimplify_one_sizepos (&DECL_SIZE_UNIT (field
), list_p
);
15747 gimplify_type_sizes (TREE_TYPE (field
), list_p
);
15752 case REFERENCE_TYPE
:
15753 /* We used to recurse on the pointed-to type here, which turned out to
15754 be incorrect because its definition might refer to variables not
15755 yet initialized at this point if a forward declaration is involved.
15757 It was actually useful for anonymous pointed-to types to ensure
15758 that the sizes evaluation dominates every possible later use of the
15759 values. Restricting to such types here would be safe since there
15760 is no possible forward declaration around, but would introduce an
15761 undesirable middle-end semantic to anonymity. We then defer to
15762 front-ends the responsibility of ensuring that the sizes are
15763 evaluated both early and late enough, e.g. by attaching artificial
15764 type declarations to the tree. */
15771 gimplify_one_sizepos (&TYPE_SIZE (type
), list_p
);
15772 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type
), list_p
);
15774 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
15776 TYPE_SIZE (t
) = TYPE_SIZE (type
);
15777 TYPE_SIZE_UNIT (t
) = TYPE_SIZE_UNIT (type
);
15778 TYPE_SIZES_GIMPLIFIED (t
) = 1;
15782 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
15783 a size or position, has had all of its SAVE_EXPRs evaluated.
15784 We add any required statements to *STMT_P. */
15787 gimplify_one_sizepos (tree
*expr_p
, gimple_seq
*stmt_p
)
15789 tree expr
= *expr_p
;
15791 /* We don't do anything if the value isn't there, is constant, or contains
15792 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
15793 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
15794 will want to replace it with a new variable, but that will cause problems
15795 if this type is from outside the function. It's OK to have that here. */
15796 if (expr
== NULL_TREE
15797 || is_gimple_constant (expr
)
15798 || TREE_CODE (expr
) == VAR_DECL
15799 || CONTAINS_PLACEHOLDER_P (expr
))
15802 *expr_p
= unshare_expr (expr
);
15804 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
15805 if the def vanishes. */
15806 gimplify_expr (expr_p
, stmt_p
, NULL
, is_gimple_val
, fb_rvalue
, false);
15808 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
15809 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
15810 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
15811 if (is_gimple_constant (*expr_p
))
15812 *expr_p
= get_initialized_tmp_var (*expr_p
, stmt_p
, NULL
, false);
15815 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
15816 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
15817 is true, also gimplify the parameters. */
15820 gimplify_body (tree fndecl
, bool do_parms
)
15822 location_t saved_location
= input_location
;
15823 gimple_seq parm_stmts
, parm_cleanup
= NULL
, seq
;
15824 gimple
*outer_stmt
;
15827 timevar_push (TV_TREE_GIMPLIFY
);
15829 init_tree_ssa (cfun
);
15831 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
15833 default_rtl_profile ();
15835 gcc_assert (gimplify_ctxp
== NULL
);
15836 push_gimplify_context (true);
15838 if (flag_openacc
|| flag_openmp
)
15840 gcc_assert (gimplify_omp_ctxp
== NULL
);
15841 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl
)))
15842 gimplify_omp_ctxp
= new_omp_context (ORT_IMPLICIT_TARGET
);
15845 /* Unshare most shared trees in the body and in that of any nested functions.
15846 It would seem we don't have to do this for nested functions because
15847 they are supposed to be output and then the outer function gimplified
15848 first, but the g++ front end doesn't always do it that way. */
15849 unshare_body (fndecl
);
15850 unvisit_body (fndecl
);
15852 /* Make sure input_location isn't set to something weird. */
15853 input_location
= DECL_SOURCE_LOCATION (fndecl
);
15855 /* Resolve callee-copies. This has to be done before processing
15856 the body so that DECL_VALUE_EXPR gets processed correctly. */
15857 parm_stmts
= do_parms
? gimplify_parameters (&parm_cleanup
) : NULL
;
15859 /* Gimplify the function's body. */
15861 gimplify_stmt (&DECL_SAVED_TREE (fndecl
), &seq
);
15862 outer_stmt
= gimple_seq_first_nondebug_stmt (seq
);
15865 outer_stmt
= gimple_build_nop ();
15866 gimplify_seq_add_stmt (&seq
, outer_stmt
);
15869 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
15870 not the case, wrap everything in a GIMPLE_BIND to make it so. */
15871 if (gimple_code (outer_stmt
) == GIMPLE_BIND
15872 && (gimple_seq_first_nondebug_stmt (seq
)
15873 == gimple_seq_last_nondebug_stmt (seq
)))
15875 outer_bind
= as_a
<gbind
*> (outer_stmt
);
15876 if (gimple_seq_first_stmt (seq
) != outer_stmt
15877 || gimple_seq_last_stmt (seq
) != outer_stmt
)
15879 /* If there are debug stmts before or after outer_stmt, move them
15880 inside of outer_bind body. */
15881 gimple_stmt_iterator gsi
= gsi_for_stmt (outer_stmt
, &seq
);
15882 gimple_seq second_seq
= NULL
;
15883 if (gimple_seq_first_stmt (seq
) != outer_stmt
15884 && gimple_seq_last_stmt (seq
) != outer_stmt
)
15886 second_seq
= gsi_split_seq_after (gsi
);
15887 gsi_remove (&gsi
, false);
15889 else if (gimple_seq_first_stmt (seq
) != outer_stmt
)
15890 gsi_remove (&gsi
, false);
15893 gsi_remove (&gsi
, false);
15897 gimple_seq_add_seq_without_update (&seq
,
15898 gimple_bind_body (outer_bind
));
15899 gimple_seq_add_seq_without_update (&seq
, second_seq
);
15900 gimple_bind_set_body (outer_bind
, seq
);
15904 outer_bind
= gimple_build_bind (NULL_TREE
, seq
, NULL
);
15906 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
15908 /* If we had callee-copies statements, insert them at the beginning
15909 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
15910 if (!gimple_seq_empty_p (parm_stmts
))
15914 gimplify_seq_add_seq (&parm_stmts
, gimple_bind_body (outer_bind
));
15917 gtry
*g
= gimple_build_try (parm_stmts
, parm_cleanup
,
15918 GIMPLE_TRY_FINALLY
);
15920 gimple_seq_add_stmt (&parm_stmts
, g
);
15922 gimple_bind_set_body (outer_bind
, parm_stmts
);
15924 for (parm
= DECL_ARGUMENTS (current_function_decl
);
15925 parm
; parm
= DECL_CHAIN (parm
))
15926 if (DECL_HAS_VALUE_EXPR_P (parm
))
15928 DECL_HAS_VALUE_EXPR_P (parm
) = 0;
15929 DECL_IGNORED_P (parm
) = 0;
15933 if ((flag_openacc
|| flag_openmp
|| flag_openmp_simd
)
15934 && gimplify_omp_ctxp
)
15936 delete_omp_context (gimplify_omp_ctxp
);
15937 gimplify_omp_ctxp
= NULL
;
15940 pop_gimplify_context (outer_bind
);
15941 gcc_assert (gimplify_ctxp
== NULL
);
15943 if (flag_checking
&& !seen_error ())
15944 verify_gimple_in_seq (gimple_bind_body (outer_bind
));
15946 timevar_pop (TV_TREE_GIMPLIFY
);
15947 input_location
= saved_location
;
15952 typedef char *char_p
; /* For DEF_VEC_P. */
15954 /* Return whether we should exclude FNDECL from instrumentation. */
15957 flag_instrument_functions_exclude_p (tree fndecl
)
15961 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_functions
;
15962 if (v
&& v
->length () > 0)
15968 name
= lang_hooks
.decl_printable_name (fndecl
, 1);
15969 FOR_EACH_VEC_ELT (*v
, i
, s
)
15970 if (strstr (name
, s
) != NULL
)
15974 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_files
;
15975 if (v
&& v
->length () > 0)
15981 name
= DECL_SOURCE_FILE (fndecl
);
15982 FOR_EACH_VEC_ELT (*v
, i
, s
)
15983 if (strstr (name
, s
) != NULL
)
15990 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
15991 node for the function we want to gimplify.
15993 Return the sequence of GIMPLE statements corresponding to the body
15997 gimplify_function_tree (tree fndecl
)
16002 gcc_assert (!gimple_body (fndecl
));
16004 if (DECL_STRUCT_FUNCTION (fndecl
))
16005 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
16007 push_struct_function (fndecl
);
16009 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
16011 cfun
->curr_properties
|= PROP_gimple_lva
;
16013 if (asan_sanitize_use_after_scope ())
16014 asan_poisoned_variables
= new hash_set
<tree
> ();
16015 bind
= gimplify_body (fndecl
, true);
16016 if (asan_poisoned_variables
)
16018 delete asan_poisoned_variables
;
16019 asan_poisoned_variables
= NULL
;
16022 /* The tree body of the function is no longer needed, replace it
16023 with the new GIMPLE body. */
16025 gimple_seq_add_stmt (&seq
, bind
);
16026 gimple_set_body (fndecl
, seq
);
16028 /* If we're instrumenting function entry/exit, then prepend the call to
16029 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
16030 catch the exit hook. */
16031 /* ??? Add some way to ignore exceptions for this TFE. */
16032 if (flag_instrument_function_entry_exit
16033 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
)
16034 /* Do not instrument extern inline functions. */
16035 && !(DECL_DECLARED_INLINE_P (fndecl
)
16036 && DECL_EXTERNAL (fndecl
)
16037 && DECL_DISREGARD_INLINE_LIMITS (fndecl
))
16038 && !flag_instrument_functions_exclude_p (fndecl
))
16043 gimple_seq cleanup
= NULL
, body
= NULL
;
16044 tree tmp_var
, this_fn_addr
;
16047 /* The instrumentation hooks aren't going to call the instrumented
16048 function and the address they receive is expected to be matchable
16049 against symbol addresses. Make sure we don't create a trampoline,
16050 in case the current function is nested. */
16051 this_fn_addr
= build_fold_addr_expr (current_function_decl
);
16052 TREE_NO_TRAMPOLINE (this_fn_addr
) = 1;
16054 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
16055 call
= gimple_build_call (x
, 1, integer_zero_node
);
16056 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
16057 gimple_call_set_lhs (call
, tmp_var
);
16058 gimplify_seq_add_stmt (&cleanup
, call
);
16059 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT
);
16060 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
16061 gimplify_seq_add_stmt (&cleanup
, call
);
16062 tf
= gimple_build_try (seq
, cleanup
, GIMPLE_TRY_FINALLY
);
16064 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
16065 call
= gimple_build_call (x
, 1, integer_zero_node
);
16066 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
16067 gimple_call_set_lhs (call
, tmp_var
);
16068 gimplify_seq_add_stmt (&body
, call
);
16069 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER
);
16070 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
16071 gimplify_seq_add_stmt (&body
, call
);
16072 gimplify_seq_add_stmt (&body
, tf
);
16073 new_bind
= gimple_build_bind (NULL
, body
, NULL
);
16075 /* Replace the current function body with the body
16076 wrapped in the try/finally TF. */
16078 gimple_seq_add_stmt (&seq
, new_bind
);
16079 gimple_set_body (fndecl
, seq
);
16083 if (sanitize_flags_p (SANITIZE_THREAD
)
16084 && param_tsan_instrument_func_entry_exit
)
16086 gcall
*call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
16087 gimple
*tf
= gimple_build_try (seq
, call
, GIMPLE_TRY_FINALLY
);
16088 gbind
*new_bind
= gimple_build_bind (NULL
, tf
, NULL
);
16089 /* Replace the current function body with the body
16090 wrapped in the try/finally TF. */
16092 gimple_seq_add_stmt (&seq
, new_bind
);
16093 gimple_set_body (fndecl
, seq
);
16096 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
16097 cfun
->curr_properties
|= PROP_gimple_any
;
16101 dump_function (TDI_gimple
, fndecl
);
16104 /* Return a dummy expression of type TYPE in order to keep going after an
16108 dummy_object (tree type
)
16110 tree t
= build_int_cst (build_pointer_type (type
), 0);
16111 return build2 (MEM_REF
, type
, t
, t
);
16114 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
16115 builtin function, but a very special sort of operator. */
16117 enum gimplify_status
16118 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
,
16119 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
16121 tree promoted_type
, have_va_type
;
16122 tree valist
= TREE_OPERAND (*expr_p
, 0);
16123 tree type
= TREE_TYPE (*expr_p
);
16124 tree t
, tag
, aptag
;
16125 location_t loc
= EXPR_LOCATION (*expr_p
);
16127 /* Verify that valist is of the proper type. */
16128 have_va_type
= TREE_TYPE (valist
);
16129 if (have_va_type
== error_mark_node
)
16131 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
16132 if (have_va_type
== NULL_TREE
16133 && POINTER_TYPE_P (TREE_TYPE (valist
)))
16134 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
16136 = targetm
.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist
)));
16137 gcc_assert (have_va_type
!= NULL_TREE
);
16139 /* Generate a diagnostic for requesting data of a type that cannot
16140 be passed through `...' due to type promotion at the call site. */
16141 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
16144 static bool gave_help
;
16146 /* Use the expansion point to handle cases such as passing bool (defined
16147 in a system header) through `...'. */
16149 = expansion_point_location_if_in_system_header (loc
);
16151 /* Unfortunately, this is merely undefined, rather than a constraint
16152 violation, so we cannot make this an error. If this call is never
16153 executed, the program is still strictly conforming. */
16154 auto_diagnostic_group d
;
16155 warned
= warning_at (xloc
, 0,
16156 "%qT is promoted to %qT when passed through %<...%>",
16157 type
, promoted_type
);
16158 if (!gave_help
&& warned
)
16161 inform (xloc
, "(so you should pass %qT not %qT to %<va_arg%>)",
16162 promoted_type
, type
);
16165 /* We can, however, treat "undefined" any way we please.
16166 Call abort to encourage the user to fix the program. */
16168 inform (xloc
, "if this code is reached, the program will abort");
16169 /* Before the abort, allow the evaluation of the va_list
16170 expression to exit or longjmp. */
16171 gimplify_and_add (valist
, pre_p
);
16172 t
= build_call_expr_loc (loc
,
16173 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
16174 gimplify_and_add (t
, pre_p
);
16176 /* This is dead code, but go ahead and finish so that the
16177 mode of the result comes out right. */
16178 *expr_p
= dummy_object (type
);
16179 return GS_ALL_DONE
;
16182 tag
= build_int_cst (build_pointer_type (type
), 0);
16183 aptag
= build_int_cst (TREE_TYPE (valist
), 0);
16185 *expr_p
= build_call_expr_internal_loc (loc
, IFN_VA_ARG
, type
, 3,
16186 valist
, tag
, aptag
);
16188 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
16189 needs to be expanded. */
16190 cfun
->curr_properties
&= ~PROP_gimple_lva
;
16195 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
16197 DST/SRC are the destination and source respectively. You can pass
16198 ungimplified trees in DST or SRC, in which case they will be
16199 converted to a gimple operand if necessary.
16201 This function returns the newly created GIMPLE_ASSIGN tuple. */
16204 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
16206 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
16207 gimplify_and_add (t
, seq_p
);
16209 return gimple_seq_last_stmt (*seq_p
);
16213 gimplify_hasher::hash (const elt_t
*p
)
16216 return iterative_hash_expr (t
, 0);
16220 gimplify_hasher::equal (const elt_t
*p1
, const elt_t
*p2
)
16224 enum tree_code code
= TREE_CODE (t1
);
16226 if (TREE_CODE (t2
) != code
16227 || TREE_TYPE (t1
) != TREE_TYPE (t2
))
16230 if (!operand_equal_p (t1
, t2
, 0))
16233 /* Only allow them to compare equal if they also hash equal; otherwise
16234 results are nondeterminate, and we fail bootstrap comparison. */
16235 gcc_checking_assert (hash (p1
) == hash (p2
));