1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2024 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "diagnostic.h" /* For errorcount. */
41 #include "fold-const.h"
46 #include "gimple-iterator.h"
47 #include "gimple-fold.h"
50 #include "stor-layout.h"
51 #include "print-tree.h"
52 #include "tree-iterator.h"
53 #include "tree-inline.h"
54 #include "langhooks.h"
57 #include "tree-hash-traits.h"
58 #include "omp-general.h"
60 #include "gimple-low.h"
61 #include "gomp-constants.h"
62 #include "splay-tree.h"
63 #include "gimple-walk.h"
64 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
66 #include "stringpool.h"
70 #include "omp-offload.h"
72 #include "tree-nested.h"
74 /* Hash set of poisoned variables in a bind expr. */
75 static hash_set
<tree
> *asan_poisoned_variables
= NULL
;
77 enum gimplify_omp_var_data
80 GOVD_EXPLICIT
= 0x000002,
81 GOVD_SHARED
= 0x000004,
82 GOVD_PRIVATE
= 0x000008,
83 GOVD_FIRSTPRIVATE
= 0x000010,
84 GOVD_LASTPRIVATE
= 0x000020,
85 GOVD_REDUCTION
= 0x000040,
88 GOVD_DEBUG_PRIVATE
= 0x000200,
89 GOVD_PRIVATE_OUTER_REF
= 0x000400,
90 GOVD_LINEAR
= 0x000800,
91 GOVD_ALIGNED
= 0x001000,
93 /* Flag for GOVD_MAP: don't copy back. */
94 GOVD_MAP_TO_ONLY
= 0x002000,
96 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
97 GOVD_LINEAR_LASTPRIVATE_NO_OUTER
= 0x004000,
99 GOVD_MAP_0LEN_ARRAY
= 0x008000,
101 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
102 GOVD_MAP_ALWAYS_TO
= 0x010000,
104 /* Flag for shared vars that are or might be stored to in the region. */
105 GOVD_WRITTEN
= 0x020000,
107 /* Flag for GOVD_MAP, if it is a forced mapping. */
108 GOVD_MAP_FORCE
= 0x040000,
110 /* Flag for GOVD_MAP: must be present already. */
111 GOVD_MAP_FORCE_PRESENT
= 0x080000,
113 /* Flag for GOVD_MAP: only allocate. */
114 GOVD_MAP_ALLOC_ONLY
= 0x100000,
116 /* Flag for GOVD_MAP: only copy back. */
117 GOVD_MAP_FROM_ONLY
= 0x200000,
119 GOVD_NONTEMPORAL
= 0x400000,
121 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
122 GOVD_LASTPRIVATE_CONDITIONAL
= 0x800000,
124 GOVD_CONDTEMP
= 0x1000000,
126 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
127 GOVD_REDUCTION_INSCAN
= 0x2000000,
129 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
130 GOVD_FIRSTPRIVATE_IMPLICIT
= 0x4000000,
132 GOVD_DATA_SHARE_CLASS
= (GOVD_SHARED
| GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
133 | GOVD_LASTPRIVATE
| GOVD_REDUCTION
| GOVD_LINEAR
140 ORT_WORKSHARE
= 0x00,
141 ORT_TASKGROUP
= 0x01,
145 ORT_COMBINED_PARALLEL
= ORT_PARALLEL
| 1,
148 ORT_UNTIED_TASK
= ORT_TASK
| 1,
149 ORT_TASKLOOP
= ORT_TASK
| 2,
150 ORT_UNTIED_TASKLOOP
= ORT_UNTIED_TASK
| 2,
153 ORT_COMBINED_TEAMS
= ORT_TEAMS
| 1,
154 ORT_HOST_TEAMS
= ORT_TEAMS
| 2,
155 ORT_COMBINED_HOST_TEAMS
= ORT_COMBINED_TEAMS
| 2,
158 ORT_TARGET_DATA
= 0x40,
160 /* Data region with offloading. */
162 ORT_COMBINED_TARGET
= ORT_TARGET
| 1,
163 ORT_IMPLICIT_TARGET
= ORT_TARGET
| 2,
165 /* OpenACC variants. */
166 ORT_ACC
= 0x100, /* A generic OpenACC region. */
167 ORT_ACC_DATA
= ORT_ACC
| ORT_TARGET_DATA
, /* Data construct. */
168 ORT_ACC_PARALLEL
= ORT_ACC
| ORT_TARGET
, /* Parallel construct */
169 ORT_ACC_KERNELS
= ORT_ACC
| ORT_TARGET
| 2, /* Kernels construct. */
170 ORT_ACC_SERIAL
= ORT_ACC
| ORT_TARGET
| 4, /* Serial construct. */
171 ORT_ACC_HOST_DATA
= ORT_ACC
| ORT_TARGET_DATA
| 2, /* Host data. */
173 /* Dummy OpenMP region, used to disable expansion of
174 DECL_VALUE_EXPRs in taskloop pre body. */
178 /* Gimplify hashtable helper. */
180 struct gimplify_hasher
: free_ptr_hash
<elt_t
>
182 static inline hashval_t
hash (const elt_t
*);
183 static inline bool equal (const elt_t
*, const elt_t
*);
188 struct gimplify_ctx
*prev_context
;
190 vec
<gbind
*> bind_expr_stack
;
192 gimple_seq conditional_cleanups
;
196 vec
<tree
> case_labels
;
197 hash_set
<tree
> *live_switch_vars
;
198 /* The formal temporary table. Should this be persistent? */
199 hash_table
<gimplify_hasher
> *temp_htab
;
202 unsigned into_ssa
: 1;
203 unsigned allow_rhs_cond_expr
: 1;
204 unsigned in_cleanup_point_expr
: 1;
205 unsigned keep_stack
: 1;
206 unsigned save_stack
: 1;
207 unsigned in_switch_expr
: 1;
210 enum gimplify_defaultmap_kind
213 GDMK_SCALAR_TARGET
, /* w/ Fortran's target attr, implicit mapping, only. */
219 struct gimplify_omp_ctx
221 struct gimplify_omp_ctx
*outer_context
;
222 splay_tree variables
;
223 hash_set
<tree
> *privatized_types
;
225 /* Iteration variables in an OMP_FOR. */
226 vec
<tree
> loop_iter_var
;
228 enum omp_clause_default_kind default_kind
;
229 enum omp_region_type region_type
;
233 bool target_firstprivatize_array_bases
;
235 bool order_concurrent
;
241 static struct gimplify_ctx
*gimplify_ctxp
;
242 static struct gimplify_omp_ctx
*gimplify_omp_ctxp
;
243 static bool in_omp_construct
;
245 /* Forward declaration. */
246 static enum gimplify_status
gimplify_compound_expr (tree
*, gimple_seq
*, bool);
247 static hash_map
<tree
, tree
> *oacc_declare_returns
;
248 static enum gimplify_status
gimplify_expr (tree
*, gimple_seq
*, gimple_seq
*,
249 bool (*) (tree
), fallback_t
, bool);
250 static void prepare_gimple_addressable (tree
*, gimple_seq
*);
252 /* Shorter alias name for the above function for use in gimplify.cc
256 gimplify_seq_add_stmt (gimple_seq
*seq_p
, gimple
*gs
)
258 gimple_seq_add_stmt_without_update (seq_p
, gs
);
261 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
262 NULL, a new sequence is allocated. This function is
263 similar to gimple_seq_add_seq, but does not scan the operands.
264 During gimplification, we need to manipulate statement sequences
265 before the def/use vectors have been constructed. */
268 gimplify_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
270 gimple_stmt_iterator si
;
275 si
= gsi_last (*dst_p
);
276 gsi_insert_seq_after_without_update (&si
, src
, GSI_NEW_STMT
);
280 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
281 and popping gimplify contexts. */
283 static struct gimplify_ctx
*ctx_pool
= NULL
;
285 /* Return a gimplify context struct from the pool. */
287 static inline struct gimplify_ctx
*
290 struct gimplify_ctx
* c
= ctx_pool
;
293 ctx_pool
= c
->prev_context
;
295 c
= XNEW (struct gimplify_ctx
);
297 memset (c
, '\0', sizeof (*c
));
301 /* Put gimplify context C back into the pool. */
304 ctx_free (struct gimplify_ctx
*c
)
306 c
->prev_context
= ctx_pool
;
310 /* Free allocated ctx stack memory. */
313 free_gimplify_stack (void)
315 struct gimplify_ctx
*c
;
317 while ((c
= ctx_pool
))
319 ctx_pool
= c
->prev_context
;
325 /* Set up a context for the gimplifier. */
328 push_gimplify_context (bool in_ssa
, bool rhs_cond_ok
)
330 struct gimplify_ctx
*c
= ctx_alloc ();
332 c
->prev_context
= gimplify_ctxp
;
334 gimplify_ctxp
->into_ssa
= in_ssa
;
335 gimplify_ctxp
->allow_rhs_cond_expr
= rhs_cond_ok
;
338 /* Tear down a context for the gimplifier. If BODY is non-null, then
339 put the temporaries into the outer BIND_EXPR. Otherwise, put them
342 BODY is not a sequence, but the first tuple in a sequence. */
345 pop_gimplify_context (gimple
*body
)
347 struct gimplify_ctx
*c
= gimplify_ctxp
;
350 && (!c
->bind_expr_stack
.exists ()
351 || c
->bind_expr_stack
.is_empty ()));
352 c
->bind_expr_stack
.release ();
353 gimplify_ctxp
= c
->prev_context
;
356 declare_vars (c
->temps
, body
, false);
358 record_vars (c
->temps
);
365 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
368 gimple_push_bind_expr (gbind
*bind_stmt
)
370 gimplify_ctxp
->bind_expr_stack
.reserve (8);
371 gimplify_ctxp
->bind_expr_stack
.safe_push (bind_stmt
);
374 /* Pop the first element off the stack of bindings. */
377 gimple_pop_bind_expr (void)
379 gimplify_ctxp
->bind_expr_stack
.pop ();
382 /* Return the first element of the stack of bindings. */
385 gimple_current_bind_expr (void)
387 return gimplify_ctxp
->bind_expr_stack
.last ();
390 /* Return the stack of bindings created during gimplification. */
393 gimple_bind_expr_stack (void)
395 return gimplify_ctxp
->bind_expr_stack
;
398 /* Return true iff there is a COND_EXPR between us and the innermost
399 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
402 gimple_conditional_context (void)
404 return gimplify_ctxp
->conditions
> 0;
407 /* Note that we've entered a COND_EXPR. */
410 gimple_push_condition (void)
412 #ifdef ENABLE_GIMPLE_CHECKING
413 if (gimplify_ctxp
->conditions
== 0)
414 gcc_assert (gimple_seq_empty_p (gimplify_ctxp
->conditional_cleanups
));
416 ++(gimplify_ctxp
->conditions
);
419 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
420 now, add any conditional cleanups we've seen to the prequeue. */
423 gimple_pop_condition (gimple_seq
*pre_p
)
425 int conds
= --(gimplify_ctxp
->conditions
);
427 gcc_assert (conds
>= 0);
430 gimplify_seq_add_seq (pre_p
, gimplify_ctxp
->conditional_cleanups
);
431 gimplify_ctxp
->conditional_cleanups
= NULL
;
435 /* A stable comparison routine for use with splay trees and DECLs. */
438 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
443 return DECL_UID (a
) - DECL_UID (b
);
446 /* Create a new omp construct that deals with variable remapping. */
448 static struct gimplify_omp_ctx
*
449 new_omp_context (enum omp_region_type region_type
)
451 struct gimplify_omp_ctx
*c
;
453 c
= XCNEW (struct gimplify_omp_ctx
);
454 c
->outer_context
= gimplify_omp_ctxp
;
455 c
->variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
456 c
->privatized_types
= new hash_set
<tree
>;
457 c
->location
= input_location
;
458 c
->region_type
= region_type
;
459 if ((region_type
& ORT_TASK
) == 0)
460 c
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
462 c
->default_kind
= OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
463 c
->defaultmap
[GDMK_SCALAR
] = GOVD_MAP
;
464 c
->defaultmap
[GDMK_SCALAR_TARGET
] = GOVD_MAP
;
465 c
->defaultmap
[GDMK_AGGREGATE
] = GOVD_MAP
;
466 c
->defaultmap
[GDMK_ALLOCATABLE
] = GOVD_MAP
;
467 c
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
;
472 /* Destroy an omp construct that deals with variable remapping. */
475 delete_omp_context (struct gimplify_omp_ctx
*c
)
477 splay_tree_delete (c
->variables
);
478 delete c
->privatized_types
;
479 c
->loop_iter_var
.release ();
483 static void omp_add_variable (struct gimplify_omp_ctx
*, tree
, unsigned int);
484 static bool omp_notice_variable (struct gimplify_omp_ctx
*, tree
, bool);
486 /* Both gimplify the statement T and append it to *SEQ_P. This function
487 behaves exactly as gimplify_stmt, but you don't have to pass T as a
491 gimplify_and_add (tree t
, gimple_seq
*seq_p
)
493 gimplify_stmt (&t
, seq_p
);
496 /* Gimplify statement T into sequence *SEQ_P, and return the first
497 tuple in the sequence of generated tuples for this statement.
498 Return NULL if gimplifying T produced no tuples. */
501 gimplify_and_return_first (tree t
, gimple_seq
*seq_p
)
503 gimple_stmt_iterator last
= gsi_last (*seq_p
);
505 gimplify_and_add (t
, seq_p
);
507 if (!gsi_end_p (last
))
510 return gsi_stmt (last
);
513 return gimple_seq_first_stmt (*seq_p
);
516 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
517 LHS, or for a call argument. */
520 is_gimple_mem_rhs (tree t
)
522 /* If we're dealing with a renamable type, either source or dest must be
523 a renamed variable. */
524 if (is_gimple_reg_type (TREE_TYPE (t
)))
525 return is_gimple_val (t
);
527 return is_gimple_val (t
) || is_gimple_lvalue (t
);
530 /* Return true if T is a CALL_EXPR or an expression that can be
531 assigned to a temporary. Note that this predicate should only be
532 used during gimplification. See the rationale for this in
533 gimplify_modify_expr. */
536 is_gimple_reg_rhs_or_call (tree t
)
538 return (get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
539 || TREE_CODE (t
) == CALL_EXPR
);
542 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
543 this predicate should only be used during gimplification. See the
544 rationale for this in gimplify_modify_expr. */
547 is_gimple_mem_rhs_or_call (tree t
)
549 /* If we're dealing with a renamable type, either source or dest must be
550 a renamed variable. */
551 if (is_gimple_reg_type (TREE_TYPE (t
)))
552 return is_gimple_val (t
);
554 return (is_gimple_val (t
)
555 || is_gimple_lvalue (t
)
556 || TREE_CLOBBER_P (t
)
557 || TREE_CODE (t
) == CALL_EXPR
);
560 /* Create a temporary with a name derived from VAL. Subroutine of
561 lookup_tmp_var; nobody else should call this function. */
564 create_tmp_from_val (tree val
)
566 /* Drop all qualifiers and address-space information from the value type. */
567 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (val
));
568 tree var
= create_tmp_var (type
, get_name (val
));
572 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
573 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
576 lookup_tmp_var (tree val
, bool is_formal
, bool not_gimple_reg
)
580 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
581 gcc_assert (!is_formal
|| !not_gimple_reg
);
583 /* If not optimizing, never really reuse a temporary. local-alloc
584 won't allocate any variable that is used in more than one basic
585 block, which means it will go into memory, causing much extra
586 work in reload and final and poorer code generation, outweighing
587 the extra memory allocation here. */
588 if (!optimize
|| !is_formal
|| TREE_SIDE_EFFECTS (val
))
590 ret
= create_tmp_from_val (val
);
591 DECL_NOT_GIMPLE_REG_P (ret
) = not_gimple_reg
;
599 if (!gimplify_ctxp
->temp_htab
)
600 gimplify_ctxp
->temp_htab
= new hash_table
<gimplify_hasher
> (1000);
601 slot
= gimplify_ctxp
->temp_htab
->find_slot (&elt
, INSERT
);
604 elt_p
= XNEW (elt_t
);
606 elt_p
->temp
= ret
= create_tmp_from_val (val
);
619 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
622 internal_get_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
623 bool is_formal
, bool allow_ssa
, bool not_gimple_reg
)
627 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
628 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
629 gimplify_expr (&val
, pre_p
, post_p
, is_gimple_reg_rhs_or_call
,
633 && gimplify_ctxp
->into_ssa
634 && is_gimple_reg_type (TREE_TYPE (val
)))
636 t
= make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val
)));
637 if (! gimple_in_ssa_p (cfun
))
639 const char *name
= get_name (val
);
641 SET_SSA_NAME_VAR_OR_IDENTIFIER (t
, create_tmp_var_name (name
));
645 t
= lookup_tmp_var (val
, is_formal
, not_gimple_reg
);
647 mod
= build2 (INIT_EXPR
, TREE_TYPE (t
), t
, unshare_expr (val
));
649 SET_EXPR_LOCATION (mod
, EXPR_LOC_OR_LOC (val
, input_location
));
651 /* gimplify_modify_expr might want to reduce this further. */
652 gimplify_and_add (mod
, pre_p
);
658 /* Return a formal temporary variable initialized with VAL. PRE_P is as
659 in gimplify_expr. Only use this function if:
661 1) The value of the unfactored expression represented by VAL will not
662 change between the initialization and use of the temporary, and
663 2) The temporary will not be otherwise modified.
665 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
666 and #2 means it is inappropriate for && temps.
668 For other cases, use get_initialized_tmp_var instead. */
671 get_formal_tmp_var (tree val
, gimple_seq
*pre_p
)
673 return internal_get_tmp_var (val
, pre_p
, NULL
, true, true, false);
676 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
677 are as in gimplify_expr. */
680 get_initialized_tmp_var (tree val
, gimple_seq
*pre_p
,
681 gimple_seq
*post_p
/* = NULL */,
682 bool allow_ssa
/* = true */)
684 return internal_get_tmp_var (val
, pre_p
, post_p
, false, allow_ssa
, false);
687 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
688 generate debug info for them; otherwise don't. */
691 declare_vars (tree vars
, gimple
*gs
, bool debug_info
)
698 gbind
*scope
= as_a
<gbind
*> (gs
);
700 temps
= nreverse (last
);
702 block
= gimple_bind_block (scope
);
703 gcc_assert (!block
|| TREE_CODE (block
) == BLOCK
);
704 if (!block
|| !debug_info
)
706 DECL_CHAIN (last
) = gimple_bind_vars (scope
);
707 gimple_bind_set_vars (scope
, temps
);
711 /* We need to attach the nodes both to the BIND_EXPR and to its
712 associated BLOCK for debugging purposes. The key point here
713 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
714 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
715 if (BLOCK_VARS (block
))
716 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), temps
);
719 gimple_bind_set_vars (scope
,
720 chainon (gimple_bind_vars (scope
), temps
));
721 BLOCK_VARS (block
) = temps
;
727 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
728 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
729 no such upper bound can be obtained. */
732 force_constant_size (tree var
)
734 /* The only attempt we make is by querying the maximum size of objects
735 of the variable's type. */
737 HOST_WIDE_INT max_size
;
739 gcc_assert (VAR_P (var
));
741 max_size
= max_int_size_in_bytes (TREE_TYPE (var
));
743 gcc_assert (max_size
>= 0);
746 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var
)), max_size
);
748 = build_int_cst (TREE_TYPE (DECL_SIZE (var
)), max_size
* BITS_PER_UNIT
);
751 /* Push the temporary variable TMP into the current binding. */
754 gimple_add_tmp_var_fn (struct function
*fn
, tree tmp
)
756 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
758 /* Later processing assumes that the object size is constant, which might
759 not be true at this point. Force the use of a constant upper bound in
761 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
762 force_constant_size (tmp
);
764 DECL_CONTEXT (tmp
) = fn
->decl
;
765 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
767 record_vars_into (tmp
, fn
->decl
);
770 /* Push the temporary variable TMP into the current binding. */
773 gimple_add_tmp_var (tree tmp
)
775 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
777 /* Later processing assumes that the object size is constant, which might
778 not be true at this point. Force the use of a constant upper bound in
780 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
781 force_constant_size (tmp
);
783 DECL_CONTEXT (tmp
) = current_function_decl
;
784 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
788 DECL_CHAIN (tmp
) = gimplify_ctxp
->temps
;
789 gimplify_ctxp
->temps
= tmp
;
791 /* Mark temporaries local within the nearest enclosing parallel. */
792 if (gimplify_omp_ctxp
)
794 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
795 int flag
= GOVD_LOCAL
| GOVD_SEEN
;
797 && (ctx
->region_type
== ORT_WORKSHARE
798 || ctx
->region_type
== ORT_TASKGROUP
799 || ctx
->region_type
== ORT_SIMD
800 || ctx
->region_type
== ORT_ACC
))
802 if (ctx
->region_type
== ORT_SIMD
803 && TREE_ADDRESSABLE (tmp
)
804 && !TREE_STATIC (tmp
))
806 if (TREE_CODE (DECL_SIZE_UNIT (tmp
)) != INTEGER_CST
)
807 ctx
->add_safelen1
= true;
808 else if (ctx
->in_for_exprs
)
811 flag
= GOVD_PRIVATE
| GOVD_SEEN
;
814 ctx
= ctx
->outer_context
;
817 omp_add_variable (ctx
, tmp
, flag
);
826 /* This case is for nested functions. We need to expose the locals
828 body_seq
= gimple_body (current_function_decl
);
829 declare_vars (tmp
, gimple_seq_first_stmt (body_seq
), false);
835 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
836 nodes that are referenced more than once in GENERIC functions. This is
837 necessary because gimplification (translation into GIMPLE) is performed
838 by modifying tree nodes in-place, so gimplication of a shared node in a
839 first context could generate an invalid GIMPLE form in a second context.
841 This is achieved with a simple mark/copy/unmark algorithm that walks the
842 GENERIC representation top-down, marks nodes with TREE_VISITED the first
843 time it encounters them, duplicates them if they already have TREE_VISITED
844 set, and finally removes the TREE_VISITED marks it has set.
846 The algorithm works only at the function level, i.e. it generates a GENERIC
847 representation of a function with no nodes shared within the function when
848 passed a GENERIC function (except for nodes that are allowed to be shared).
850 At the global level, it is also necessary to unshare tree nodes that are
851 referenced in more than one function, for the same aforementioned reason.
852 This requires some cooperation from the front-end. There are 2 strategies:
854 1. Manual unsharing. The front-end needs to call unshare_expr on every
855 expression that might end up being shared across functions.
857 2. Deep unsharing. This is an extension of regular unsharing. Instead
858 of calling unshare_expr on expressions that might be shared across
859 functions, the front-end pre-marks them with TREE_VISITED. This will
860 ensure that they are unshared on the first reference within functions
861 when the regular unsharing algorithm runs. The counterpart is that
862 this algorithm must look deeper than for manual unsharing, which is
863 specified by LANG_HOOKS_DEEP_UNSHARING.
865 If there are only few specific cases of node sharing across functions, it is
866 probably easier for a front-end to unshare the expressions manually. On the
867 contrary, if the expressions generated at the global level are as widespread
868 as expressions generated within functions, deep unsharing is very likely the
871 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
872 These nodes model computations that must be done once. If we were to
873 unshare something like SAVE_EXPR(i++), the gimplification process would
874 create wrong code. However, if DATA is non-null, it must hold a pointer
875 set that is used to unshare the subtrees of these nodes. */
878 mostly_copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data
)
881 enum tree_code code
= TREE_CODE (t
);
883 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
884 copy their subtrees if we can make sure to do it only once. */
885 if (code
== SAVE_EXPR
|| code
== TARGET_EXPR
|| code
== BIND_EXPR
)
887 if (data
&& !((hash_set
<tree
> *)data
)->add (t
))
893 /* Stop at types, decls, constants like copy_tree_r. */
894 else if (TREE_CODE_CLASS (code
) == tcc_type
895 || TREE_CODE_CLASS (code
) == tcc_declaration
896 || TREE_CODE_CLASS (code
) == tcc_constant
)
899 /* Cope with the statement expression extension. */
900 else if (code
== STATEMENT_LIST
)
903 /* Leave the bulk of the work to copy_tree_r itself. */
905 copy_tree_r (tp
, walk_subtrees
, NULL
);
910 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
911 If *TP has been visited already, then *TP is deeply copied by calling
912 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
915 copy_if_shared_r (tree
*tp
, int *walk_subtrees
, void *data
)
918 enum tree_code code
= TREE_CODE (t
);
920 /* Skip types, decls, and constants. But we do want to look at their
921 types and the bounds of types. Mark them as visited so we properly
922 unmark their subtrees on the unmark pass. If we've already seen them,
923 don't look down further. */
924 if (TREE_CODE_CLASS (code
) == tcc_type
925 || TREE_CODE_CLASS (code
) == tcc_declaration
926 || TREE_CODE_CLASS (code
) == tcc_constant
)
928 if (TREE_VISITED (t
))
931 TREE_VISITED (t
) = 1;
934 /* If this node has been visited already, unshare it and don't look
936 else if (TREE_VISITED (t
))
938 walk_tree (tp
, mostly_copy_tree_r
, data
, NULL
);
942 /* Otherwise, mark the node as visited and keep looking. */
944 TREE_VISITED (t
) = 1;
949 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
950 copy_if_shared_r callback unmodified. */
953 copy_if_shared (tree
*tp
, void *data
)
955 walk_tree (tp
, copy_if_shared_r
, data
, NULL
);
958 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
959 any nested functions. */
962 unshare_body (tree fndecl
)
964 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
965 /* If the language requires deep unsharing, we need a pointer set to make
966 sure we don't repeatedly unshare subtrees of unshareable nodes. */
967 hash_set
<tree
> *visited
968 = lang_hooks
.deep_unsharing
? new hash_set
<tree
> : NULL
;
970 copy_if_shared (&DECL_SAVED_TREE (fndecl
), visited
);
971 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl
)), visited
);
972 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)), visited
);
977 for (cgn
= first_nested_function (cgn
); cgn
;
978 cgn
= next_nested_function (cgn
))
979 unshare_body (cgn
->decl
);
982 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
983 Subtrees are walked until the first unvisited node is encountered. */
986 unmark_visited_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
990 /* If this node has been visited, unmark it and keep looking. */
991 if (TREE_VISITED (t
))
992 TREE_VISITED (t
) = 0;
994 /* Otherwise, don't look any deeper. */
1001 /* Unmark the visited trees rooted at *TP. */
1004 unmark_visited (tree
*tp
)
1006 walk_tree (tp
, unmark_visited_r
, NULL
, NULL
);
1009 /* Likewise, but mark all trees as not visited. */
1012 unvisit_body (tree fndecl
)
1014 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
1016 unmark_visited (&DECL_SAVED_TREE (fndecl
));
1017 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl
)));
1018 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)));
1021 for (cgn
= first_nested_function (cgn
);
1022 cgn
; cgn
= next_nested_function (cgn
))
1023 unvisit_body (cgn
->decl
);
1026 /* Unconditionally make an unshared copy of EXPR. This is used when using
1027 stored expressions which span multiple functions, such as BINFO_VTABLE,
1028 as the normal unsharing process can't tell that they're shared. */
1031 unshare_expr (tree expr
)
1033 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1037 /* Worker for unshare_expr_without_location. */
1040 prune_expr_location (tree
*tp
, int *walk_subtrees
, void *)
1043 SET_EXPR_LOCATION (*tp
, UNKNOWN_LOCATION
);
1049 /* Similar to unshare_expr but also prune all expression locations
1053 unshare_expr_without_location (tree expr
)
1055 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1057 walk_tree (&expr
, prune_expr_location
, NULL
, NULL
);
1061 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1062 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1063 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1064 EXPR is the location of the EXPR. */
1067 rexpr_location (tree expr
, location_t or_else
= UNKNOWN_LOCATION
)
1072 if (EXPR_HAS_LOCATION (expr
))
1073 return EXPR_LOCATION (expr
);
1075 if (TREE_CODE (expr
) != STATEMENT_LIST
)
1078 tree_stmt_iterator i
= tsi_start (expr
);
1081 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
1087 if (!found
|| !tsi_one_before_end_p (i
))
1090 return rexpr_location (tsi_stmt (i
), or_else
);
1093 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1094 rexpr_location for the potential recursion. */
1097 rexpr_has_location (tree expr
)
1099 return rexpr_location (expr
) != UNKNOWN_LOCATION
;
1103 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1104 contain statements and have a value. Assign its value to a temporary
1105 and give it void_type_node. Return the temporary, or NULL_TREE if
1106 WRAPPER was already void. */
1109 voidify_wrapper_expr (tree wrapper
, tree temp
)
1111 tree type
= TREE_TYPE (wrapper
);
1112 if (type
&& !VOID_TYPE_P (type
))
1116 /* Set p to point to the body of the wrapper. Loop until we find
1117 something that isn't a wrapper. */
1118 for (p
= &wrapper
; p
&& *p
; )
1120 switch (TREE_CODE (*p
))
1123 TREE_SIDE_EFFECTS (*p
) = 1;
1124 TREE_TYPE (*p
) = void_type_node
;
1125 /* For a BIND_EXPR, the body is operand 1. */
1126 p
= &BIND_EXPR_BODY (*p
);
1129 case CLEANUP_POINT_EXPR
:
1130 case TRY_FINALLY_EXPR
:
1131 case TRY_CATCH_EXPR
:
1132 TREE_SIDE_EFFECTS (*p
) = 1;
1133 TREE_TYPE (*p
) = void_type_node
;
1134 p
= &TREE_OPERAND (*p
, 0);
1137 case STATEMENT_LIST
:
1139 tree_stmt_iterator i
= tsi_last (*p
);
1140 TREE_SIDE_EFFECTS (*p
) = 1;
1141 TREE_TYPE (*p
) = void_type_node
;
1142 p
= tsi_end_p (i
) ? NULL
: tsi_stmt_ptr (i
);
1147 /* Advance to the last statement. Set all container types to
1149 for (; TREE_CODE (*p
) == COMPOUND_EXPR
; p
= &TREE_OPERAND (*p
, 1))
1151 TREE_SIDE_EFFECTS (*p
) = 1;
1152 TREE_TYPE (*p
) = void_type_node
;
1156 case TRANSACTION_EXPR
:
1157 TREE_SIDE_EFFECTS (*p
) = 1;
1158 TREE_TYPE (*p
) = void_type_node
;
1159 p
= &TRANSACTION_EXPR_BODY (*p
);
1163 /* Assume that any tree upon which voidify_wrapper_expr is
1164 directly called is a wrapper, and that its body is op0. */
1167 TREE_SIDE_EFFECTS (*p
) = 1;
1168 TREE_TYPE (*p
) = void_type_node
;
1169 p
= &TREE_OPERAND (*p
, 0);
1177 if (p
== NULL
|| IS_EMPTY_STMT (*p
))
1181 /* The wrapper is on the RHS of an assignment that we're pushing
1183 gcc_assert (TREE_CODE (temp
) == INIT_EXPR
1184 || TREE_CODE (temp
) == MODIFY_EXPR
);
1185 TREE_OPERAND (temp
, 1) = *p
;
1190 temp
= create_tmp_var (type
, "retval");
1191 *p
= build2 (INIT_EXPR
, type
, temp
, *p
);
1200 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1201 a temporary through which they communicate. */
1204 build_stack_save_restore (gcall
**save
, gcall
**restore
)
1208 *save
= gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE
), 0);
1209 tmp_var
= create_tmp_var (ptr_type_node
, "saved_stack");
1210 gimple_call_set_lhs (*save
, tmp_var
);
1213 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE
),
1217 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1220 build_asan_poison_call_expr (tree decl
)
1222 /* Do not poison variables that have size equal to zero. */
1223 tree unit_size
= DECL_SIZE_UNIT (decl
);
1224 if (zerop (unit_size
))
1227 tree base
= build_fold_addr_expr (decl
);
1229 return build_call_expr_internal_loc (UNKNOWN_LOCATION
, IFN_ASAN_MARK
,
1231 build_int_cst (integer_type_node
,
1236 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1237 on POISON flag, shadow memory of a DECL variable. The call will be
1238 put on location identified by IT iterator, where BEFORE flag drives
1239 position where the stmt will be put. */
1242 asan_poison_variable (tree decl
, bool poison
, gimple_stmt_iterator
*it
,
1245 tree unit_size
= DECL_SIZE_UNIT (decl
);
1246 tree base
= build_fold_addr_expr (decl
);
1248 /* Do not poison variables that have size equal to zero. */
1249 if (zerop (unit_size
))
1252 /* It's necessary to have all stack variables aligned to ASAN granularity
1254 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1255 unsigned shadow_granularity
1256 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE
: ASAN_SHADOW_GRANULARITY
;
1257 if (DECL_ALIGN_UNIT (decl
) <= shadow_granularity
)
1258 SET_DECL_ALIGN (decl
, BITS_PER_UNIT
* shadow_granularity
);
1260 HOST_WIDE_INT flags
= poison
? ASAN_MARK_POISON
: ASAN_MARK_UNPOISON
;
1263 = gimple_build_call_internal (IFN_ASAN_MARK
, 3,
1264 build_int_cst (integer_type_node
, flags
),
1268 gsi_insert_before (it
, g
, GSI_NEW_STMT
);
1270 gsi_insert_after (it
, g
, GSI_NEW_STMT
);
1273 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1274 either poisons or unpoisons a DECL. Created statement is appended
1275 to SEQ_P gimple sequence. */
1278 asan_poison_variable (tree decl
, bool poison
, gimple_seq
*seq_p
)
1280 gimple_stmt_iterator it
= gsi_last (*seq_p
);
1281 bool before
= false;
1286 asan_poison_variable (decl
, poison
, &it
, before
);
1289 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1292 sort_by_decl_uid (const void *a
, const void *b
)
1294 const tree
*t1
= (const tree
*)a
;
1295 const tree
*t2
= (const tree
*)b
;
1297 int uid1
= DECL_UID (*t1
);
1298 int uid2
= DECL_UID (*t2
);
1302 else if (uid1
> uid2
)
1308 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1309 depending on POISON flag. Created statement is appended
1310 to SEQ_P gimple sequence. */
1313 asan_poison_variables (hash_set
<tree
> *variables
, bool poison
, gimple_seq
*seq_p
)
1315 unsigned c
= variables
->elements ();
1319 auto_vec
<tree
> sorted_variables (c
);
1321 for (hash_set
<tree
>::iterator it
= variables
->begin ();
1322 it
!= variables
->end (); ++it
)
1323 sorted_variables
.safe_push (*it
);
1325 sorted_variables
.qsort (sort_by_decl_uid
);
1329 FOR_EACH_VEC_ELT (sorted_variables
, i
, var
)
1331 asan_poison_variable (var
, poison
, seq_p
);
1333 /* Add use_after_scope_memory attribute for the variable in order
1334 to prevent re-written into SSA. */
1335 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
,
1336 DECL_ATTRIBUTES (var
)))
1337 DECL_ATTRIBUTES (var
)
1338 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
),
1340 DECL_ATTRIBUTES (var
));
1344 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1346 static enum gimplify_status
1347 gimplify_bind_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1349 tree bind_expr
= *expr_p
;
1350 bool old_keep_stack
= gimplify_ctxp
->keep_stack
;
1351 bool old_save_stack
= gimplify_ctxp
->save_stack
;
1354 gimple_seq body
, cleanup
;
1356 location_t start_locus
= 0, end_locus
= 0;
1357 tree ret_clauses
= NULL
;
1359 tree temp
= voidify_wrapper_expr (bind_expr
, NULL
);
1361 /* Mark variables seen in this bind expr. */
1362 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1366 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
1370 && !is_global_var (t
)
1371 && DECL_CONTEXT (t
) == current_function_decl
1373 && (attr
= lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t
)))
1376 gcc_assert (!DECL_HAS_VALUE_EXPR_P (t
));
1377 tree alloc
= TREE_PURPOSE (TREE_VALUE (attr
));
1378 tree align
= TREE_VALUE (TREE_VALUE (attr
));
1379 /* Allocate directives that appear in a target region must specify
1380 an allocator clause unless a requires directive with the
1381 dynamic_allocators clause is present in the same compilation
1383 bool missing_dyn_alloc
= false;
1384 if (alloc
== NULL_TREE
1385 && ((omp_requires_mask
& OMP_REQUIRES_DYNAMIC_ALLOCATORS
)
1388 /* This comes too early for omp_discover_declare_target...,
1389 but should at least catch the most common cases. */
1391 = cgraph_node::get (current_function_decl
)->offloadable
;
1392 for (struct gimplify_omp_ctx
*ctx2
= ctx
;
1393 ctx2
&& !missing_dyn_alloc
; ctx2
= ctx2
->outer_context
)
1394 if (ctx2
->code
== OMP_TARGET
)
1395 missing_dyn_alloc
= true;
1397 if (missing_dyn_alloc
)
1398 error_at (DECL_SOURCE_LOCATION (t
),
1399 "%<allocate%> directive for %qD inside a target "
1400 "region must specify an %<allocator%> clause", t
);
1401 /* Skip for omp_default_mem_alloc (= 1),
1402 unless align is present. */
1403 else if (!errorcount
1404 && (align
!= NULL_TREE
1405 || alloc
== NULL_TREE
1406 || !integer_onep (alloc
)))
1408 /* Fortran might already use a pointer type internally;
1409 use that pointer except for type(C_ptr) and type(C_funptr);
1410 note that normal proc pointers are rejected. */
1411 tree type
= TREE_TYPE (t
);
1413 if (lang_GNU_Fortran ()
1414 && POINTER_TYPE_P (type
)
1415 && TREE_TYPE (type
) != void_type_node
1416 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
1418 type
= TREE_TYPE (type
);
1423 tmp
= build_pointer_type (type
);
1424 v
= create_tmp_var (tmp
, get_name (t
));
1425 DECL_IGNORED_P (v
) = 0;
1427 = tree_cons (get_identifier ("omp allocate var"),
1428 build_tree_list (NULL_TREE
, t
),
1429 remove_attribute ("omp allocate",
1430 DECL_ATTRIBUTES (t
)));
1431 tmp
= build_fold_indirect_ref (v
);
1432 TREE_THIS_NOTRAP (tmp
) = 1;
1433 SET_DECL_VALUE_EXPR (t
, tmp
);
1434 DECL_HAS_VALUE_EXPR_P (t
) = 1;
1436 tree sz
= TYPE_SIZE_UNIT (type
);
1437 /* The size to use in Fortran might not match TYPE_SIZE_UNIT;
1438 hence, for some decls, a size variable is saved in the
1439 attributes; use it, if available. */
1440 if (TREE_CHAIN (TREE_VALUE (attr
))
1441 && TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr
)))
1443 TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr
)))))
1445 sz
= TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr
)));
1446 sz
= TREE_PURPOSE (sz
);
1448 if (alloc
== NULL_TREE
)
1449 alloc
= build_zero_cst (ptr_type_node
);
1450 if (align
== NULL_TREE
)
1451 align
= build_int_cst (size_type_node
, DECL_ALIGN_UNIT (t
));
1453 align
= build_int_cst (size_type_node
,
1454 MAX (tree_to_uhwi (align
),
1455 DECL_ALIGN_UNIT (t
)));
1456 location_t loc
= DECL_SOURCE_LOCATION (t
);
1457 tmp
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
1458 tmp
= build_call_expr_loc (loc
, tmp
, 3, align
, sz
, alloc
);
1459 tmp
= fold_build2_loc (loc
, MODIFY_EXPR
, TREE_TYPE (v
), v
,
1460 fold_convert (TREE_TYPE (v
), tmp
));
1461 gcc_assert (BIND_EXPR_BODY (bind_expr
) != NULL_TREE
);
1462 /* Ensure that either TREE_CHAIN (TREE_VALUE (attr) is set
1463 and GOMP_FREE added here or that DECL_HAS_VALUE_EXPR_P (t)
1464 is set, using in a condition much further below. */
1465 gcc_assert (DECL_HAS_VALUE_EXPR_P (t
)
1466 || TREE_CHAIN (TREE_VALUE (attr
)));
1467 if (TREE_CHAIN (TREE_VALUE (attr
)))
1469 /* Fortran is special as it does not have properly nest
1470 declarations in blocks. And as there is no
1471 initializer, there is also no expression to look for.
1472 Hence, the FE makes the statement list of the
1473 try-finally block available. We can put the GOMP_alloc
1474 at the top, unless an allocator or size expression
1475 requires to put it afterward; note that the size is
1476 always later in generated code; for strings, no
1477 size expr but still an expr might be available.
1478 As LTO does not handle a statement list, 'sl' has
1479 to be removed; done so by removing the attribute. */
1481 = remove_attribute ("omp allocate",
1482 DECL_ATTRIBUTES (t
));
1483 tree sl
= TREE_PURPOSE (TREE_CHAIN (TREE_VALUE (attr
)));
1484 tree_stmt_iterator e
= tsi_start (sl
);
1485 tree needle
= NULL_TREE
;
1486 if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr
))))
1488 needle
= TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr
)));
1489 needle
= (TREE_VALUE (needle
) ? TREE_VALUE (needle
)
1492 else if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr
))))
1494 else if (DECL_P (alloc
) && DECL_ARTIFICIAL (alloc
))
1497 if (needle
!= NULL_TREE
)
1499 while (!tsi_end_p (e
))
1502 || (TREE_CODE (*e
) == MODIFY_EXPR
1503 && TREE_OPERAND (*e
, 0) == needle
))
1507 gcc_assert (!tsi_end_p (e
));
1509 tsi_link_after (&e
, tmp
, TSI_SAME_STMT
);
1511 /* As the cleanup is in BIND_EXPR_BODY, GOMP_free is added
1512 here; for C/C++ it will be added in the 'cleanup'
1513 section after gimplification. But Fortran already has
1514 a try-finally block. */
1515 sl
= TREE_VALUE (TREE_CHAIN (TREE_VALUE (attr
)));
1517 tmp
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
1518 tmp
= build_call_expr_loc (EXPR_LOCATION (*e
), tmp
, 2, v
,
1519 build_zero_cst (ptr_type_node
));
1520 tsi_link_after (&e
, tmp
, TSI_SAME_STMT
);
1521 tmp
= build_clobber (TREE_TYPE (v
), CLOBBER_STORAGE_END
);
1522 tmp
= fold_build2_loc (loc
, MODIFY_EXPR
, TREE_TYPE (v
), v
,
1523 fold_convert (TREE_TYPE (v
), tmp
));
1525 tsi_link_after (&e
, tmp
, TSI_SAME_STMT
);
1529 gcc_assert (TREE_CODE (BIND_EXPR_BODY (bind_expr
))
1531 tree_stmt_iterator e
;
1532 e
= tsi_start (BIND_EXPR_BODY (bind_expr
));
1533 while (!tsi_end_p (e
))
1535 if ((TREE_CODE (*e
) == DECL_EXPR
1536 && TREE_OPERAND (*e
, 0) == t
)
1537 || (TREE_CODE (*e
) == CLEANUP_POINT_EXPR
1538 && (TREE_CODE (TREE_OPERAND (*e
, 0))
1540 && (TREE_OPERAND (TREE_OPERAND (*e
, 0), 0)
1545 gcc_assert (!tsi_end_p (e
));
1546 tsi_link_before (&e
, tmp
, TSI_SAME_STMT
);
1551 /* Mark variable as local. */
1552 if (ctx
&& ctx
->region_type
!= ORT_NONE
&& !DECL_EXTERNAL (t
))
1554 if (! DECL_SEEN_IN_BIND_EXPR_P (t
)
1555 || splay_tree_lookup (ctx
->variables
,
1556 (splay_tree_key
) t
) == NULL
)
1558 int flag
= GOVD_LOCAL
;
1559 if (ctx
->region_type
== ORT_SIMD
1560 && TREE_ADDRESSABLE (t
)
1561 && !TREE_STATIC (t
))
1563 if (TREE_CODE (DECL_SIZE_UNIT (t
)) != INTEGER_CST
)
1564 ctx
->add_safelen1
= true;
1566 flag
= GOVD_PRIVATE
;
1568 omp_add_variable (ctx
, t
, flag
| GOVD_SEEN
);
1570 /* Static locals inside of target construct or offloaded
1571 routines need to be "omp declare target". */
1572 if (TREE_STATIC (t
))
1573 for (; ctx
; ctx
= ctx
->outer_context
)
1574 if ((ctx
->region_type
& ORT_TARGET
) != 0)
1576 if (!lookup_attribute ("omp declare target",
1577 DECL_ATTRIBUTES (t
)))
1579 tree id
= get_identifier ("omp declare target");
1581 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (t
));
1582 varpool_node
*node
= varpool_node::get (t
);
1585 node
->offloadable
= 1;
1586 if (ENABLE_OFFLOADING
&& !DECL_EXTERNAL (t
))
1588 g
->have_offload
= true;
1590 vec_safe_push (offload_vars
, t
);
1598 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
1600 if (DECL_HARD_REGISTER (t
) && !is_global_var (t
) && cfun
)
1601 cfun
->has_local_explicit_reg_vars
= true;
1605 bind_stmt
= gimple_build_bind (BIND_EXPR_VARS (bind_expr
), NULL
,
1606 BIND_EXPR_BLOCK (bind_expr
));
1607 gimple_push_bind_expr (bind_stmt
);
1609 gimplify_ctxp
->keep_stack
= false;
1610 gimplify_ctxp
->save_stack
= false;
1612 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1614 gimplify_stmt (&BIND_EXPR_BODY (bind_expr
), &body
);
1615 gimple_bind_set_body (bind_stmt
, body
);
1617 /* Source location wise, the cleanup code (stack_restore and clobbers)
1618 belongs to the end of the block, so propagate what we have. The
1619 stack_save operation belongs to the beginning of block, which we can
1620 infer from the bind_expr directly if the block has no explicit
1622 if (BIND_EXPR_BLOCK (bind_expr
))
1624 end_locus
= BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1625 start_locus
= BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1627 if (start_locus
== 0)
1628 start_locus
= EXPR_LOCATION (bind_expr
);
1633 /* Add clobbers for all variables that go out of scope. */
1634 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1637 && !is_global_var (t
)
1638 && DECL_CONTEXT (t
) == current_function_decl
)
1641 && DECL_HAS_VALUE_EXPR_P (t
)
1643 && lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t
)))
1645 /* For Fortran, TREE_CHAIN (TREE_VALUE (attr)) is set, which
1646 causes that the GOMP_free call is already added above;
1647 and "omp allocate" is removed from DECL_ATTRIBUTES. */
1648 tree v
= TREE_OPERAND (DECL_VALUE_EXPR (t
), 0);
1649 tree tmp
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
1650 tmp
= build_call_expr_loc (end_locus
, tmp
, 2, v
,
1651 build_zero_cst (ptr_type_node
));
1652 gimplify_and_add (tmp
, &cleanup
);
1653 gimple
*clobber_stmt
;
1654 tmp
= build_clobber (TREE_TYPE (v
), CLOBBER_STORAGE_END
);
1655 clobber_stmt
= gimple_build_assign (v
, tmp
);
1656 gimple_set_location (clobber_stmt
, end_locus
);
1657 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1659 if (!DECL_HARD_REGISTER (t
)
1660 && !TREE_THIS_VOLATILE (t
)
1661 && !DECL_HAS_VALUE_EXPR_P (t
)
1662 /* Only care for variables that have to be in memory. Others
1663 will be rewritten into SSA names, hence moved to the
1665 && !is_gimple_reg (t
)
1666 && flag_stack_reuse
!= SR_NONE
)
1668 tree clobber
= build_clobber (TREE_TYPE (t
), CLOBBER_STORAGE_END
);
1669 gimple
*clobber_stmt
;
1670 clobber_stmt
= gimple_build_assign (t
, clobber
);
1671 gimple_set_location (clobber_stmt
, end_locus
);
1672 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1675 if (flag_openacc
&& oacc_declare_returns
!= NULL
)
1678 if (DECL_HAS_VALUE_EXPR_P (key
))
1680 key
= DECL_VALUE_EXPR (key
);
1681 if (INDIRECT_REF_P (key
))
1682 key
= TREE_OPERAND (key
, 0);
1684 tree
*c
= oacc_declare_returns
->get (key
);
1688 OMP_CLAUSE_CHAIN (*c
) = ret_clauses
;
1690 ret_clauses
= unshare_expr (*c
);
1692 oacc_declare_returns
->remove (key
);
1694 if (oacc_declare_returns
->is_empty ())
1696 delete oacc_declare_returns
;
1697 oacc_declare_returns
= NULL
;
1703 if (asan_poisoned_variables
!= NULL
1704 && asan_poisoned_variables
->contains (t
))
1706 asan_poisoned_variables
->remove (t
);
1707 asan_poison_variable (t
, true, &cleanup
);
1710 if (gimplify_ctxp
->live_switch_vars
!= NULL
1711 && gimplify_ctxp
->live_switch_vars
->contains (t
))
1712 gimplify_ctxp
->live_switch_vars
->remove (t
);
1715 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1716 the stack space allocated to the VLAs. */
1717 if (gimplify_ctxp
->save_stack
&& !gimplify_ctxp
->keep_stack
)
1719 gcall
*stack_restore
;
1721 /* Save stack on entry and restore it on exit. Add a try_finally
1722 block to achieve this. */
1723 build_stack_save_restore (&stack_save
, &stack_restore
);
1725 gimple_set_location (stack_save
, start_locus
);
1726 gimple_set_location (stack_restore
, end_locus
);
1728 gimplify_seq_add_stmt (&cleanup
, stack_restore
);
1734 gimple_stmt_iterator si
= gsi_start (cleanup
);
1736 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
1738 gsi_insert_seq_before_without_update (&si
, stmt
, GSI_NEW_STMT
);
1744 gimple_seq new_body
;
1747 gs
= gimple_build_try (gimple_bind_body (bind_stmt
), cleanup
,
1748 GIMPLE_TRY_FINALLY
);
1751 gimplify_seq_add_stmt (&new_body
, stack_save
);
1752 gimplify_seq_add_stmt (&new_body
, gs
);
1753 gimple_bind_set_body (bind_stmt
, new_body
);
1756 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1757 if (!gimplify_ctxp
->keep_stack
)
1758 gimplify_ctxp
->keep_stack
= old_keep_stack
;
1759 gimplify_ctxp
->save_stack
= old_save_stack
;
1761 gimple_pop_bind_expr ();
1763 gimplify_seq_add_stmt (pre_p
, bind_stmt
);
1771 *expr_p
= NULL_TREE
;
1775 /* Maybe add early return predict statement to PRE_P sequence. */
1778 maybe_add_early_return_predict_stmt (gimple_seq
*pre_p
)
1780 /* If we are not in a conditional context, add PREDICT statement. */
1781 if (gimple_conditional_context ())
1783 gimple
*predict
= gimple_build_predict (PRED_TREE_EARLY_RETURN
,
1785 gimplify_seq_add_stmt (pre_p
, predict
);
1789 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1790 GIMPLE value, it is assigned to a new temporary and the statement is
1791 re-written to return the temporary.
1793 PRE_P points to the sequence where side effects that must happen before
1794 STMT should be stored. */
1796 static enum gimplify_status
1797 gimplify_return_expr (tree stmt
, gimple_seq
*pre_p
)
1800 tree ret_expr
= TREE_OPERAND (stmt
, 0);
1801 tree result_decl
, result
;
1803 if (ret_expr
== error_mark_node
)
1807 || TREE_CODE (ret_expr
) == RESULT_DECL
)
1809 maybe_add_early_return_predict_stmt (pre_p
);
1810 greturn
*ret
= gimple_build_return (ret_expr
);
1811 copy_warning (ret
, stmt
);
1812 gimplify_seq_add_stmt (pre_p
, ret
);
1816 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1817 result_decl
= NULL_TREE
;
1818 else if (TREE_CODE (ret_expr
) == COMPOUND_EXPR
)
1820 /* Used in C++ for handling EH cleanup of the return value if a local
1821 cleanup throws. Assume the front-end knows what it's doing. */
1822 result_decl
= DECL_RESULT (current_function_decl
);
1823 /* But crash if we end up trying to modify ret_expr below. */
1824 ret_expr
= NULL_TREE
;
1828 result_decl
= TREE_OPERAND (ret_expr
, 0);
1830 /* See through a return by reference. */
1831 if (INDIRECT_REF_P (result_decl
))
1832 result_decl
= TREE_OPERAND (result_decl
, 0);
1834 gcc_assert ((TREE_CODE (ret_expr
) == MODIFY_EXPR
1835 || TREE_CODE (ret_expr
) == INIT_EXPR
)
1836 && TREE_CODE (result_decl
) == RESULT_DECL
);
1839 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1840 Recall that aggregate_value_p is FALSE for any aggregate type that is
1841 returned in registers. If we're returning values in registers, then
1842 we don't want to extend the lifetime of the RESULT_DECL, particularly
1843 across another call. In addition, for those aggregates for which
1844 hard_function_value generates a PARALLEL, we'll die during normal
1845 expansion of structure assignments; there's special code in expand_return
1846 to handle this case that does not exist in expand_expr. */
1849 else if (aggregate_value_p (result_decl
, TREE_TYPE (current_function_decl
)))
1851 if (!poly_int_tree_p (DECL_SIZE (result_decl
)))
1853 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl
)))
1854 gimplify_type_sizes (TREE_TYPE (result_decl
), pre_p
);
1855 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1856 should be effectively allocated by the caller, i.e. all calls to
1857 this function must be subject to the Return Slot Optimization. */
1858 gimplify_one_sizepos (&DECL_SIZE (result_decl
), pre_p
);
1859 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl
), pre_p
);
1861 result
= result_decl
;
1863 else if (gimplify_ctxp
->return_temp
)
1864 result
= gimplify_ctxp
->return_temp
;
1867 result
= create_tmp_reg (TREE_TYPE (result_decl
));
1869 /* ??? With complex control flow (usually involving abnormal edges),
1870 we can wind up warning about an uninitialized value for this. Due
1871 to how this variable is constructed and initialized, this is never
1872 true. Give up and never warn. */
1873 suppress_warning (result
, OPT_Wuninitialized
);
1875 gimplify_ctxp
->return_temp
= result
;
1878 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1879 Then gimplify the whole thing. */
1880 if (result
!= result_decl
)
1881 TREE_OPERAND (ret_expr
, 0) = result
;
1883 gimplify_and_add (TREE_OPERAND (stmt
, 0), pre_p
);
1885 maybe_add_early_return_predict_stmt (pre_p
);
1886 ret
= gimple_build_return (result
);
1887 copy_warning (ret
, stmt
);
1888 gimplify_seq_add_stmt (pre_p
, ret
);
1893 /* Gimplify a variable-length array DECL. */
1896 gimplify_vla_decl (tree decl
, gimple_seq
*seq_p
)
1898 /* This is a variable-sized decl. Simplify its size and mark it
1899 for deferred expansion. */
1900 tree t
, addr
, ptr_type
;
1902 gimplify_one_sizepos (&DECL_SIZE (decl
), seq_p
);
1903 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl
), seq_p
);
1905 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1906 if (DECL_HAS_VALUE_EXPR_P (decl
))
1909 /* All occurrences of this decl in final gimplified code will be
1910 replaced by indirection. Setting DECL_VALUE_EXPR does two
1911 things: First, it lets the rest of the gimplifier know what
1912 replacement to use. Second, it lets the debug info know
1913 where to find the value. */
1914 ptr_type
= build_pointer_type (TREE_TYPE (decl
));
1915 addr
= create_tmp_var (ptr_type
, get_name (decl
));
1916 DECL_IGNORED_P (addr
) = 0;
1917 t
= build_fold_indirect_ref (addr
);
1918 TREE_THIS_NOTRAP (t
) = 1;
1919 SET_DECL_VALUE_EXPR (decl
, t
);
1920 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
1922 t
= build_alloca_call_expr (DECL_SIZE_UNIT (decl
), DECL_ALIGN (decl
),
1923 max_int_size_in_bytes (TREE_TYPE (decl
)));
1924 /* The call has been built for a variable-sized object. */
1925 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
1926 t
= fold_convert (ptr_type
, t
);
1927 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
1929 gimplify_and_add (t
, seq_p
);
1931 /* Record the dynamic allocation associated with DECL if requested. */
1932 if (flag_callgraph_info
& CALLGRAPH_INFO_DYNAMIC_ALLOC
)
1933 record_dynamic_alloc (decl
);
1936 /* A helper function to be called via walk_tree. Mark all labels under *TP
1937 as being forced. To be called for DECL_INITIAL of static variables. */
1940 force_labels_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1944 if (TREE_CODE (*tp
) == LABEL_DECL
)
1946 FORCED_LABEL (*tp
) = 1;
1947 cfun
->has_forced_label_in_static
= 1;
1953 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1954 Build a call to internal const function DEFERRED_INIT:
1955 1st argument: SIZE of the DECL;
1956 2nd argument: INIT_TYPE;
1957 3rd argument: NAME of the DECL;
1959 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1962 gimple_add_init_for_auto_var (tree decl
,
1963 enum auto_init_type init_type
,
1966 gcc_assert (auto_var_p (decl
));
1967 gcc_assert (init_type
> AUTO_INIT_UNINITIALIZED
);
1968 location_t loc
= EXPR_LOCATION (decl
);
1969 tree decl_size
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1972 = build_int_cst (integer_type_node
, (int) init_type
);
1974 tree decl_name
= NULL_TREE
;
1975 if (DECL_NAME (decl
))
1977 decl_name
= build_string_literal (DECL_NAME (decl
));
1981 char decl_name_anonymous
[3 + (HOST_BITS_PER_INT
+ 2) / 3];
1982 sprintf (decl_name_anonymous
, "D.%u", DECL_UID (decl
));
1983 decl_name
= build_string_literal (decl_name_anonymous
);
1986 tree call
= build_call_expr_internal_loc (loc
, IFN_DEFERRED_INIT
,
1987 TREE_TYPE (decl
), 3,
1988 decl_size
, init_type_node
,
1991 gimplify_assign (decl
, call
, seq_p
);
1994 /* Generate padding initialization for automatic vairable DECL.
1995 C guarantees that brace-init with fewer initializers than members
1996 aggregate will initialize the rest of the aggregate as-if it were
1997 static initialization. In turn static initialization guarantees
1998 that padding is initialized to zero. So, we always initialize paddings
1999 to zeroes regardless INIT_TYPE.
2000 To do the padding initialization, we insert a call to
2001 __builtin_clear_padding (&decl, 0, for_auto_init = true).
2002 Note, we add an additional dummy argument for __builtin_clear_padding,
2003 'for_auto_init' to distinguish whether this call is for automatic
2004 variable initialization or not.
2007 gimple_add_padding_init_for_auto_var (tree decl
, bool is_vla
,
2010 tree addr_of_decl
= NULL_TREE
;
2011 tree fn
= builtin_decl_explicit (BUILT_IN_CLEAR_PADDING
);
2015 /* The temporary address variable for this vla should be
2016 created in gimplify_vla_decl. */
2017 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
2018 gcc_assert (INDIRECT_REF_P (DECL_VALUE_EXPR (decl
)));
2019 addr_of_decl
= TREE_OPERAND (DECL_VALUE_EXPR (decl
), 0);
2023 mark_addressable (decl
);
2024 addr_of_decl
= build_fold_addr_expr (decl
);
2027 gimple
*call
= gimple_build_call (fn
, 2, addr_of_decl
,
2028 build_one_cst (TREE_TYPE (addr_of_decl
)));
2029 gimplify_seq_add_stmt (seq_p
, call
);
2032 /* Return true if the DECL need to be automaticly initialized by the
2035 is_var_need_auto_init (tree decl
)
2037 if (auto_var_p (decl
)
2038 && (TREE_CODE (decl
) != VAR_DECL
2039 || !DECL_HARD_REGISTER (decl
))
2040 && (flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
)
2041 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl
)))
2042 && !OPAQUE_TYPE_P (TREE_TYPE (decl
))
2043 && !is_empty_type (TREE_TYPE (decl
)))
2048 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
2049 and initialization explicit. */
2051 static enum gimplify_status
2052 gimplify_decl_expr (tree
*stmt_p
, gimple_seq
*seq_p
)
2054 tree stmt
= *stmt_p
;
2055 tree decl
= DECL_EXPR_DECL (stmt
);
2057 *stmt_p
= NULL_TREE
;
2059 if (TREE_TYPE (decl
) == error_mark_node
)
2062 if ((TREE_CODE (decl
) == TYPE_DECL
2064 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl
)))
2066 gimplify_type_sizes (TREE_TYPE (decl
), seq_p
);
2067 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
2068 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl
)), seq_p
);
2071 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
2072 in case its size expressions contain problematic nodes like CALL_EXPR. */
2073 if (TREE_CODE (decl
) == TYPE_DECL
2074 && DECL_ORIGINAL_TYPE (decl
)
2075 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl
)))
2077 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl
), seq_p
);
2078 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl
)) == REFERENCE_TYPE
)
2079 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl
)), seq_p
);
2082 if (VAR_P (decl
) && !DECL_EXTERNAL (decl
))
2084 tree init
= DECL_INITIAL (decl
);
2085 bool is_vla
= false;
2086 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
2087 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
2088 If the decl has VALUE_EXPR that was created by FE (usually
2089 C++FE), it's a proxy varaible, and FE already initialized
2090 the VALUE_EXPR of it, we should not initialize it anymore. */
2091 bool decl_had_value_expr_p
= DECL_HAS_VALUE_EXPR_P (decl
);
2094 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl
), &size
)
2095 || (!TREE_STATIC (decl
)
2096 && flag_stack_check
== GENERIC_STACK_CHECK
2098 (unsigned HOST_WIDE_INT
) STACK_CHECK_MAX_VAR_SIZE
)))
2100 gimplify_vla_decl (decl
, seq_p
);
2104 if (asan_poisoned_variables
2106 && TREE_ADDRESSABLE (decl
)
2107 && !TREE_STATIC (decl
)
2108 && !DECL_HAS_VALUE_EXPR_P (decl
)
2109 && DECL_ALIGN (decl
) <= MAX_SUPPORTED_STACK_ALIGNMENT
2110 && dbg_cnt (asan_use_after_scope
)
2111 && !gimplify_omp_ctxp
2112 /* GNAT introduces temporaries to hold return values of calls in
2113 initializers of variables defined in other units, so the
2114 declaration of the variable is discarded completely. We do not
2115 want to issue poison calls for such dropped variables. */
2116 && (DECL_SEEN_IN_BIND_EXPR_P (decl
)
2117 || (DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)))
2119 asan_poisoned_variables
->add (decl
);
2120 asan_poison_variable (decl
, false, seq_p
);
2121 if (!DECL_ARTIFICIAL (decl
) && gimplify_ctxp
->live_switch_vars
)
2122 gimplify_ctxp
->live_switch_vars
->add (decl
);
2125 /* Some front ends do not explicitly declare all anonymous
2126 artificial variables. We compensate here by declaring the
2127 variables, though it would be better if the front ends would
2128 explicitly declare them. */
2129 if (!DECL_SEEN_IN_BIND_EXPR_P (decl
)
2130 && DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)
2131 gimple_add_tmp_var (decl
);
2133 if (init
&& init
!= error_mark_node
)
2135 if (!TREE_STATIC (decl
))
2137 DECL_INITIAL (decl
) = NULL_TREE
;
2138 init
= build2 (INIT_EXPR
, void_type_node
, decl
, init
);
2139 gimplify_and_add (init
, seq_p
);
2141 /* Clear TREE_READONLY if we really have an initialization. */
2142 if (!DECL_INITIAL (decl
)
2143 && !omp_privatize_by_reference (decl
))
2144 TREE_READONLY (decl
) = 0;
2147 /* We must still examine initializers for static variables
2148 as they may contain a label address. */
2149 walk_tree (&init
, force_labels_r
, NULL
, NULL
);
2151 /* When there is no explicit initializer, if the user requested,
2152 We should insert an artifical initializer for this automatic
2154 else if (is_var_need_auto_init (decl
)
2155 && !decl_had_value_expr_p
)
2157 gimple_add_init_for_auto_var (decl
,
2160 /* The expanding of a call to the above .DEFERRED_INIT will apply
2161 block initialization to the whole space covered by this variable.
2162 As a result, all the paddings will be initialized to zeroes
2163 for zero initialization and 0xFE byte-repeatable patterns for
2164 pattern initialization.
2165 In order to make the paddings as zeroes for pattern init, We
2166 should add a call to __builtin_clear_padding to clear the
2167 paddings to zero in compatiple with CLANG.
2168 We cannot insert this call if the variable is a gimple register
2169 since __builtin_clear_padding will take the address of the
2170 variable. As a result, if a long double/_Complex long double
2171 variable will spilled into stack later, its padding is 0XFE. */
2172 if (flag_auto_var_init
== AUTO_INIT_PATTERN
2173 && !is_gimple_reg (decl
)
2174 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl
)))
2175 gimple_add_padding_init_for_auto_var (decl
, is_vla
, seq_p
);
2182 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
2183 and replacing the LOOP_EXPR with goto, but if the loop contains an
2184 EXIT_EXPR, we need to append a label for it to jump to. */
2186 static enum gimplify_status
2187 gimplify_loop_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2189 tree saved_label
= gimplify_ctxp
->exit_label
;
2190 tree start_label
= create_artificial_label (UNKNOWN_LOCATION
);
2192 gimplify_seq_add_stmt (pre_p
, gimple_build_label (start_label
));
2194 gimplify_ctxp
->exit_label
= NULL_TREE
;
2196 gimplify_and_add (LOOP_EXPR_BODY (*expr_p
), pre_p
);
2198 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (start_label
));
2200 if (gimplify_ctxp
->exit_label
)
2201 gimplify_seq_add_stmt (pre_p
,
2202 gimple_build_label (gimplify_ctxp
->exit_label
));
2204 gimplify_ctxp
->exit_label
= saved_label
;
2210 /* Gimplify a statement list onto a sequence. These may be created either
2211 by an enlightened front-end, or by shortcut_cond_expr. */
2213 static enum gimplify_status
2214 gimplify_statement_list (tree
*expr_p
, gimple_seq
*pre_p
)
2216 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
2218 tree_stmt_iterator i
= tsi_start (*expr_p
);
2220 while (!tsi_end_p (i
))
2222 gimplify_stmt (tsi_stmt_ptr (i
), pre_p
);
2236 /* Emit warning for the unreachable statment STMT if needed.
2237 Return the gimple itself when the warning is emitted, otherwise
2240 emit_warn_switch_unreachable (gimple
*stmt
)
2242 if (gimple_code (stmt
) == GIMPLE_GOTO
2243 && TREE_CODE (gimple_goto_dest (stmt
)) == LABEL_DECL
2244 && DECL_ARTIFICIAL (gimple_goto_dest (stmt
)))
2245 /* Don't warn for compiler-generated gotos. These occur
2246 in Duff's devices, for example. */
2248 else if ((flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
)
2249 && ((gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
2250 || (gimple_call_builtin_p (stmt
, BUILT_IN_CLEAR_PADDING
)
2251 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt
, 1)))
2252 || (is_gimple_assign (stmt
)
2253 && gimple_assign_single_p (stmt
)
2254 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
2255 && gimple_call_internal_p (
2256 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
)),
2257 IFN_DEFERRED_INIT
))))
2258 /* Don't warn for compiler-generated initializations for
2259 -ftrivial-auto-var-init.
2261 case 1: a call to .DEFERRED_INIT;
2262 case 2: a call to __builtin_clear_padding with the 2nd argument is
2263 present and non-zero;
2264 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2265 that has the LHS of .DEFERRED_INIT as the RHS as following:
2266 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2270 warning_at (gimple_location (stmt
), OPT_Wswitch_unreachable
,
2271 "statement will never be executed");
2275 /* Callback for walk_gimple_seq. */
2278 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator
*gsi_p
,
2279 bool *handled_ops_p
,
2280 struct walk_stmt_info
*wi
)
2282 gimple
*stmt
= gsi_stmt (*gsi_p
);
2283 bool unreachable_issued
= wi
->info
!= NULL
;
2285 *handled_ops_p
= true;
2286 switch (gimple_code (stmt
))
2289 /* A compiler-generated cleanup or a user-written try block.
2290 If it's empty, don't dive into it--that would result in
2291 worse location info. */
2292 if (gimple_try_eval (stmt
) == NULL
)
2294 if (warn_switch_unreachable
&& !unreachable_issued
)
2295 wi
->info
= emit_warn_switch_unreachable (stmt
);
2297 /* Stop when auto var init warning is not on. */
2298 if (!warn_trivial_auto_var_init
)
2299 return integer_zero_node
;
2304 case GIMPLE_EH_FILTER
:
2305 case GIMPLE_TRANSACTION
:
2306 /* Walk the sub-statements. */
2307 *handled_ops_p
= false;
2311 /* Ignore these. We may generate them before declarations that
2312 are never executed. If there's something to warn about,
2313 there will be non-debug stmts too, and we'll catch those. */
2317 /* Stop till the first Label. */
2318 return integer_zero_node
;
2320 if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2322 *handled_ops_p
= false;
2325 if (warn_trivial_auto_var_init
2326 && flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
2327 && gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
2329 /* Get the variable name from the 3rd argument of call. */
2330 tree var_name
= gimple_call_arg (stmt
, 2);
2331 var_name
= TREE_OPERAND (TREE_OPERAND (var_name
, 0), 0);
2332 const char *var_name_str
= TREE_STRING_POINTER (var_name
);
2334 warning_at (gimple_location (stmt
), OPT_Wtrivial_auto_var_init
,
2335 "%qs cannot be initialized with"
2336 "%<-ftrivial-auto-var_init%>",
2343 /* check the first "real" statement (not a decl/lexical scope/...), issue
2344 warning if needed. */
2345 if (warn_switch_unreachable
&& !unreachable_issued
)
2346 wi
->info
= emit_warn_switch_unreachable (stmt
);
2347 /* Stop when auto var init warning is not on. */
2348 if (!warn_trivial_auto_var_init
)
2349 return integer_zero_node
;
2356 /* Possibly warn about unreachable statements between switch's controlling
2357 expression and the first case. Also warn about -ftrivial-auto-var-init
2358 cannot initialize the auto variable under such situation.
2359 SEQ is the body of a switch expression. */
2362 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq
)
2364 if ((!warn_switch_unreachable
&& !warn_trivial_auto_var_init
)
2365 /* This warning doesn't play well with Fortran when optimizations
2367 || lang_GNU_Fortran ()
2371 struct walk_stmt_info wi
;
2373 memset (&wi
, 0, sizeof (wi
));
2374 walk_gimple_seq (seq
, warn_switch_unreachable_and_auto_init_r
, NULL
, &wi
);
2378 /* A label entry that pairs label and a location. */
2385 /* Find LABEL in vector of label entries VEC. */
2387 static struct label_entry
*
2388 find_label_entry (const auto_vec
<struct label_entry
> *vec
, tree label
)
2391 struct label_entry
*l
;
2393 FOR_EACH_VEC_ELT (*vec
, i
, l
)
2394 if (l
->label
== label
)
2399 /* Return true if LABEL, a LABEL_DECL, represents a case label
2400 in a vector of labels CASES. */
2403 case_label_p (const vec
<tree
> *cases
, tree label
)
2408 FOR_EACH_VEC_ELT (*cases
, i
, l
)
2409 if (CASE_LABEL (l
) == label
)
2414 /* Find the last nondebug statement in a scope STMT. */
2417 last_stmt_in_scope (gimple
*stmt
)
2422 switch (gimple_code (stmt
))
2426 gbind
*bind
= as_a
<gbind
*> (stmt
);
2427 stmt
= gimple_seq_last_nondebug_stmt (gimple_bind_body (bind
));
2428 return last_stmt_in_scope (stmt
);
2433 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
2434 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt
));
2435 gimple
*last_eval
= last_stmt_in_scope (stmt
);
2436 if (gimple_stmt_may_fallthru (last_eval
)
2437 && (last_eval
== NULL
2438 || !gimple_call_internal_p (last_eval
, IFN_FALLTHROUGH
))
2439 && gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
2441 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt
));
2442 return last_stmt_in_scope (stmt
);
2456 /* Collect labels that may fall through into LABELS and return the statement
2457 preceding another case label, or a user-defined label. Store a location
2458 useful to give warnings at *PREVLOC (usually the location of the returned
2459 statement or of its surrounding scope). */
2462 collect_fallthrough_labels (gimple_stmt_iterator
*gsi_p
,
2463 auto_vec
<struct label_entry
> *labels
,
2464 location_t
*prevloc
)
2466 gimple
*prev
= NULL
;
2468 *prevloc
= UNKNOWN_LOCATION
;
2471 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
)
2473 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2474 which starts on a GIMPLE_SWITCH and ends with a break label.
2475 Handle that as a single statement that can fall through. */
2476 gbind
*bind
= as_a
<gbind
*> (gsi_stmt (*gsi_p
));
2477 gimple
*first
= gimple_seq_first_stmt (gimple_bind_body (bind
));
2478 gimple
*last
= gimple_seq_last_stmt (gimple_bind_body (bind
));
2480 && gimple_code (first
) == GIMPLE_SWITCH
2481 && gimple_code (last
) == GIMPLE_LABEL
)
2483 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2484 if (SWITCH_BREAK_LABEL_P (label
))
2492 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
2493 || gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_TRY
)
2495 /* Nested scope. Only look at the last statement of
2496 the innermost scope. */
2497 location_t bind_loc
= gimple_location (gsi_stmt (*gsi_p
));
2498 gimple
*last
= last_stmt_in_scope (gsi_stmt (*gsi_p
));
2502 /* It might be a label without a location. Use the
2503 location of the scope then. */
2504 if (!gimple_has_location (prev
))
2505 *prevloc
= bind_loc
;
2511 /* Ifs are tricky. */
2512 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_COND
)
2514 gcond
*cond_stmt
= as_a
<gcond
*> (gsi_stmt (*gsi_p
));
2515 tree false_lab
= gimple_cond_false_label (cond_stmt
);
2516 location_t if_loc
= gimple_location (cond_stmt
);
2519 if (i > 1) goto <D.2259>; else goto D;
2520 we can't do much with the else-branch. */
2521 if (!DECL_ARTIFICIAL (false_lab
))
2524 /* Go on until the false label, then one step back. */
2525 for (; !gsi_end_p (*gsi_p
); gsi_next (gsi_p
))
2527 gimple
*stmt
= gsi_stmt (*gsi_p
);
2528 if (gimple_code (stmt
) == GIMPLE_LABEL
2529 && gimple_label_label (as_a
<glabel
*> (stmt
)) == false_lab
)
2533 /* Not found? Oops. */
2534 if (gsi_end_p (*gsi_p
))
2537 /* A dead label can't fall through. */
2538 if (!UNUSED_LABEL_P (false_lab
))
2540 struct label_entry l
= { false_lab
, if_loc
};
2541 labels
->safe_push (l
);
2544 /* Go to the last statement of the then branch. */
2547 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2553 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_GOTO
2554 && !gimple_has_location (gsi_stmt (*gsi_p
)))
2556 /* Look at the statement before, it might be
2557 attribute fallthrough, in which case don't warn. */
2559 bool fallthru_before_dest
2560 = gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_FALLTHROUGH
);
2562 tree goto_dest
= gimple_goto_dest (gsi_stmt (*gsi_p
));
2563 if (!fallthru_before_dest
)
2565 struct label_entry l
= { goto_dest
, if_loc
};
2566 labels
->safe_push (l
);
2569 /* This case is about
2570 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2575 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2576 through to #3. So set PREV to #1. */
2577 else if (UNUSED_LABEL_P (false_lab
))
2578 prev
= gsi_stmt (*gsi_p
);
2580 /* And move back. */
2584 /* Remember the last statement. Skip labels that are of no interest
2586 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2588 tree label
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (*gsi_p
)));
2589 if (find_label_entry (labels
, label
))
2590 prev
= gsi_stmt (*gsi_p
);
2592 else if (gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_ASAN_MARK
))
2594 else if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_PREDICT
)
2596 else if (!is_gimple_debug (gsi_stmt (*gsi_p
)))
2597 prev
= gsi_stmt (*gsi_p
);
2600 while (!gsi_end_p (*gsi_p
)
2601 /* Stop if we find a case or a user-defined label. */
2602 && (gimple_code (gsi_stmt (*gsi_p
)) != GIMPLE_LABEL
2603 || !gimple_has_location (gsi_stmt (*gsi_p
))));
2605 if (prev
&& gimple_has_location (prev
))
2606 *prevloc
= gimple_location (prev
);
2610 /* Return true if the switch fallthough warning should occur. LABEL is
2611 the label statement that we're falling through to. */
2614 should_warn_for_implicit_fallthrough (gimple_stmt_iterator
*gsi_p
, tree label
)
2616 gimple_stmt_iterator gsi
= *gsi_p
;
2618 /* Don't warn if the label is marked with a "falls through" comment. */
2619 if (FALLTHROUGH_LABEL_P (label
))
2622 /* Don't warn for non-case labels followed by a statement:
2627 as these are likely intentional. */
2628 if (!case_label_p (&gimplify_ctxp
->case_labels
, label
))
2631 while (!gsi_end_p (gsi
)
2632 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2633 && (l
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi
))))
2634 && !case_label_p (&gimplify_ctxp
->case_labels
, l
))
2635 gsi_next_nondebug (&gsi
);
2636 if (gsi_end_p (gsi
) || gimple_code (gsi_stmt (gsi
)) != GIMPLE_LABEL
)
2640 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2641 immediately breaks. */
2644 /* Skip all immediately following labels. */
2645 while (!gsi_end_p (gsi
)
2646 && (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2647 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_PREDICT
))
2648 gsi_next_nondebug (&gsi
);
2650 /* { ... something; default:; } */
2652 /* { ... something; default: break; } or
2653 { ... something; default: goto L; } */
2654 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_GOTO
2655 /* { ... something; default: return; } */
2656 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
2662 /* Callback for walk_gimple_seq. */
2665 warn_implicit_fallthrough_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2666 struct walk_stmt_info
*)
2668 gimple
*stmt
= gsi_stmt (*gsi_p
);
2670 *handled_ops_p
= true;
2671 switch (gimple_code (stmt
))
2676 case GIMPLE_EH_FILTER
:
2677 case GIMPLE_TRANSACTION
:
2678 /* Walk the sub-statements. */
2679 *handled_ops_p
= false;
2682 /* Find a sequence of form:
2689 and possibly warn. */
2692 /* Found a label. Skip all immediately following labels. */
2693 while (!gsi_end_p (*gsi_p
)
2694 && gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2695 gsi_next_nondebug (gsi_p
);
2697 /* There might be no more statements. */
2698 if (gsi_end_p (*gsi_p
))
2699 return integer_zero_node
;
2701 /* Vector of labels that fall through. */
2702 auto_vec
<struct label_entry
> labels
;
2704 gimple
*prev
= collect_fallthrough_labels (gsi_p
, &labels
, &prevloc
);
2706 /* There might be no more statements. */
2707 if (gsi_end_p (*gsi_p
))
2708 return integer_zero_node
;
2710 gimple
*next
= gsi_stmt (*gsi_p
);
2712 /* If what follows is a label, then we may have a fallthrough. */
2713 if (gimple_code (next
) == GIMPLE_LABEL
2714 && gimple_has_location (next
)
2715 && (label
= gimple_label_label (as_a
<glabel
*> (next
)))
2718 struct label_entry
*l
;
2719 bool warned_p
= false;
2720 auto_diagnostic_group d
;
2721 if (!should_warn_for_implicit_fallthrough (gsi_p
, label
))
2723 else if (gimple_code (prev
) == GIMPLE_LABEL
2724 && (label
= gimple_label_label (as_a
<glabel
*> (prev
)))
2725 && (l
= find_label_entry (&labels
, label
)))
2726 warned_p
= warning_at (l
->loc
, OPT_Wimplicit_fallthrough_
,
2727 "this statement may fall through");
2728 else if (!gimple_call_internal_p (prev
, IFN_FALLTHROUGH
)
2729 /* Try to be clever and don't warn when the statement
2730 can't actually fall through. */
2731 && gimple_stmt_may_fallthru (prev
)
2732 && prevloc
!= UNKNOWN_LOCATION
)
2733 warned_p
= warning_at (prevloc
,
2734 OPT_Wimplicit_fallthrough_
,
2735 "this statement may fall through");
2737 inform (gimple_location (next
), "here");
2739 /* Mark this label as processed so as to prevent multiple
2740 warnings in nested switches. */
2741 FALLTHROUGH_LABEL_P (label
) = true;
2743 /* So that next warn_implicit_fallthrough_r will start looking for
2744 a new sequence starting with this label. */
2755 /* Warn when a switch case falls through. */
2758 maybe_warn_implicit_fallthrough (gimple_seq seq
)
2760 if (!warn_implicit_fallthrough
)
2763 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2766 || lang_GNU_OBJC ()))
2769 struct walk_stmt_info wi
;
2770 memset (&wi
, 0, sizeof (wi
));
2771 walk_gimple_seq (seq
, warn_implicit_fallthrough_r
, NULL
, &wi
);
2774 /* Callback for walk_gimple_seq. */
2777 expand_FALLTHROUGH_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2778 struct walk_stmt_info
*wi
)
2780 gimple
*stmt
= gsi_stmt (*gsi_p
);
2782 *handled_ops_p
= true;
2783 switch (gimple_code (stmt
))
2788 case GIMPLE_EH_FILTER
:
2789 case GIMPLE_TRANSACTION
:
2790 /* Walk the sub-statements. */
2791 *handled_ops_p
= false;
2794 static_cast<location_t
*>(wi
->info
)[0] = UNKNOWN_LOCATION
;
2795 if (gimple_call_internal_p (stmt
, IFN_FALLTHROUGH
))
2797 location_t loc
= gimple_location (stmt
);
2798 gsi_remove (gsi_p
, true);
2799 wi
->removed_stmt
= true;
2801 /* nothrow flag is added by genericize_c_loop to mark fallthrough
2802 statement at the end of some loop's body. Those should be
2803 always diagnosed, either because they indeed don't precede
2804 a case label or default label, or because the next statement
2805 is not within the same iteration statement. */
2806 if ((stmt
->subcode
& GF_CALL_NOTHROW
) != 0)
2808 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2809 "a case label or default label");
2813 if (gsi_end_p (*gsi_p
))
2815 static_cast<location_t
*>(wi
->info
)[0] = BUILTINS_LOCATION
;
2816 static_cast<location_t
*>(wi
->info
)[1] = loc
;
2822 gimple_stmt_iterator gsi2
= *gsi_p
;
2823 stmt
= gsi_stmt (gsi2
);
2824 if (gimple_code (stmt
) == GIMPLE_GOTO
&& !gimple_has_location (stmt
))
2826 /* Go on until the artificial label. */
2827 tree goto_dest
= gimple_goto_dest (stmt
);
2828 for (; !gsi_end_p (gsi2
); gsi_next (&gsi2
))
2830 if (gimple_code (gsi_stmt (gsi2
)) == GIMPLE_LABEL
2831 && gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi2
)))
2836 /* Not found? Stop. */
2837 if (gsi_end_p (gsi2
))
2840 /* Look one past it. */
2844 /* We're looking for a case label or default label here. */
2845 while (!gsi_end_p (gsi2
))
2847 stmt
= gsi_stmt (gsi2
);
2848 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2850 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
2851 if (gimple_has_location (stmt
) && DECL_ARTIFICIAL (label
))
2857 else if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2859 else if (!is_gimple_debug (stmt
))
2860 /* Anything else is not expected. */
2865 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2866 "a case label or default label");
2870 static_cast<location_t
*>(wi
->info
)[0] = UNKNOWN_LOCATION
;
2876 /* Expand all FALLTHROUGH () calls in SEQ. */
2879 expand_FALLTHROUGH (gimple_seq
*seq_p
)
2881 struct walk_stmt_info wi
;
2883 memset (&wi
, 0, sizeof (wi
));
2884 loc
[0] = UNKNOWN_LOCATION
;
2885 loc
[1] = UNKNOWN_LOCATION
;
2886 wi
.info
= (void *) &loc
[0];
2887 walk_gimple_seq_mod (seq_p
, expand_FALLTHROUGH_r
, NULL
, &wi
);
2888 if (loc
[0] != UNKNOWN_LOCATION
)
2889 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2890 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2891 pedwarn (loc
[1], 0, "attribute %<fallthrough%> not preceding "
2892 "a case label or default label");
2896 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2899 static enum gimplify_status
2900 gimplify_switch_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2902 tree switch_expr
= *expr_p
;
2903 gimple_seq switch_body_seq
= NULL
;
2904 enum gimplify_status ret
;
2905 tree index_type
= TREE_TYPE (switch_expr
);
2906 if (index_type
== NULL_TREE
)
2907 index_type
= TREE_TYPE (SWITCH_COND (switch_expr
));
2909 ret
= gimplify_expr (&SWITCH_COND (switch_expr
), pre_p
, NULL
, is_gimple_val
,
2911 if (ret
== GS_ERROR
|| ret
== GS_UNHANDLED
)
2914 if (SWITCH_BODY (switch_expr
))
2917 vec
<tree
> saved_labels
;
2918 hash_set
<tree
> *saved_live_switch_vars
= NULL
;
2919 tree default_case
= NULL_TREE
;
2920 gswitch
*switch_stmt
;
2922 /* Save old labels, get new ones from body, then restore the old
2923 labels. Save all the things from the switch body to append after. */
2924 saved_labels
= gimplify_ctxp
->case_labels
;
2925 gimplify_ctxp
->case_labels
.create (8);
2927 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2928 saved_live_switch_vars
= gimplify_ctxp
->live_switch_vars
;
2929 tree_code body_type
= TREE_CODE (SWITCH_BODY (switch_expr
));
2930 if (body_type
== BIND_EXPR
|| body_type
== STATEMENT_LIST
)
2931 gimplify_ctxp
->live_switch_vars
= new hash_set
<tree
> (4);
2933 gimplify_ctxp
->live_switch_vars
= NULL
;
2935 bool old_in_switch_expr
= gimplify_ctxp
->in_switch_expr
;
2936 gimplify_ctxp
->in_switch_expr
= true;
2938 gimplify_stmt (&SWITCH_BODY (switch_expr
), &switch_body_seq
);
2940 gimplify_ctxp
->in_switch_expr
= old_in_switch_expr
;
2941 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq
);
2942 maybe_warn_implicit_fallthrough (switch_body_seq
);
2943 /* Only do this for the outermost GIMPLE_SWITCH. */
2944 if (!gimplify_ctxp
->in_switch_expr
)
2945 expand_FALLTHROUGH (&switch_body_seq
);
2947 labels
= gimplify_ctxp
->case_labels
;
2948 gimplify_ctxp
->case_labels
= saved_labels
;
2950 if (gimplify_ctxp
->live_switch_vars
)
2952 gcc_assert (gimplify_ctxp
->live_switch_vars
->is_empty ());
2953 delete gimplify_ctxp
->live_switch_vars
;
2955 gimplify_ctxp
->live_switch_vars
= saved_live_switch_vars
;
2957 preprocess_case_label_vec_for_gimple (labels
, index_type
,
2960 bool add_bind
= false;
2963 glabel
*new_default
;
2966 = build_case_label (NULL_TREE
, NULL_TREE
,
2967 create_artificial_label (UNKNOWN_LOCATION
));
2968 if (old_in_switch_expr
)
2970 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case
)) = 1;
2973 new_default
= gimple_build_label (CASE_LABEL (default_case
));
2974 gimplify_seq_add_stmt (&switch_body_seq
, new_default
);
2976 else if (old_in_switch_expr
)
2978 gimple
*last
= gimple_seq_last_stmt (switch_body_seq
);
2979 if (last
&& gimple_code (last
) == GIMPLE_LABEL
)
2981 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2982 if (SWITCH_BREAK_LABEL_P (label
))
2987 switch_stmt
= gimple_build_switch (SWITCH_COND (switch_expr
),
2988 default_case
, labels
);
2989 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2990 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2991 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2992 so that we can easily find the start and end of the switch
2996 gimple_seq bind_body
= NULL
;
2997 gimplify_seq_add_stmt (&bind_body
, switch_stmt
);
2998 gimple_seq_add_seq (&bind_body
, switch_body_seq
);
2999 gbind
*bind
= gimple_build_bind (NULL_TREE
, bind_body
, NULL_TREE
);
3000 gimple_set_location (bind
, EXPR_LOCATION (switch_expr
));
3001 gimplify_seq_add_stmt (pre_p
, bind
);
3005 gimplify_seq_add_stmt (pre_p
, switch_stmt
);
3006 gimplify_seq_add_seq (pre_p
, switch_body_seq
);
3016 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
3018 static enum gimplify_status
3019 gimplify_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
3021 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p
))
3022 == current_function_decl
);
3024 tree label
= LABEL_EXPR_LABEL (*expr_p
);
3025 glabel
*label_stmt
= gimple_build_label (label
);
3026 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
3027 gimplify_seq_add_stmt (pre_p
, label_stmt
);
3029 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
3030 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
3032 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
3033 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
3039 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
3041 static enum gimplify_status
3042 gimplify_case_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
3044 struct gimplify_ctx
*ctxp
;
3047 /* Invalid programs can play Duff's Device type games with, for example,
3048 #pragma omp parallel. At least in the C front end, we don't
3049 detect such invalid branches until after gimplification, in the
3050 diagnose_omp_blocks pass. */
3051 for (ctxp
= gimplify_ctxp
; ; ctxp
= ctxp
->prev_context
)
3052 if (ctxp
->case_labels
.exists ())
3055 tree label
= CASE_LABEL (*expr_p
);
3056 label_stmt
= gimple_build_label (label
);
3057 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
3058 ctxp
->case_labels
.safe_push (*expr_p
);
3059 gimplify_seq_add_stmt (pre_p
, label_stmt
);
3061 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
3062 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
3064 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
3065 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
3071 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
3075 build_and_jump (tree
*label_p
)
3077 if (label_p
== NULL
)
3078 /* If there's nowhere to jump, just fall through. */
3081 if (*label_p
== NULL_TREE
)
3083 tree label
= create_artificial_label (UNKNOWN_LOCATION
);
3087 return build1 (GOTO_EXPR
, void_type_node
, *label_p
);
3090 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
3091 This also involves building a label to jump to and communicating it to
3092 gimplify_loop_expr through gimplify_ctxp->exit_label. */
3094 static enum gimplify_status
3095 gimplify_exit_expr (tree
*expr_p
)
3097 tree cond
= TREE_OPERAND (*expr_p
, 0);
3100 expr
= build_and_jump (&gimplify_ctxp
->exit_label
);
3101 expr
= build3 (COND_EXPR
, void_type_node
, cond
, expr
, NULL_TREE
);
3107 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
3108 different from its canonical type, wrap the whole thing inside a
3109 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
3112 The canonical type of a COMPONENT_REF is the type of the field being
3113 referenced--unless the field is a bit-field which can be read directly
3114 in a smaller mode, in which case the canonical type is the
3115 sign-appropriate type corresponding to that mode. */
3118 canonicalize_component_ref (tree
*expr_p
)
3120 tree expr
= *expr_p
;
3123 gcc_assert (TREE_CODE (expr
) == COMPONENT_REF
);
3125 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
)))
3126 type
= TREE_TYPE (get_unwidened (expr
, NULL_TREE
));
3128 type
= TREE_TYPE (TREE_OPERAND (expr
, 1));
3130 /* One could argue that all the stuff below is not necessary for
3131 the non-bitfield case and declare it a FE error if type
3132 adjustment would be needed. */
3133 if (TREE_TYPE (expr
) != type
)
3135 #ifdef ENABLE_TYPES_CHECKING
3136 tree old_type
= TREE_TYPE (expr
);
3140 /* We need to preserve qualifiers and propagate them from
3142 type_quals
= TYPE_QUALS (type
)
3143 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr
, 0)));
3144 if (TYPE_QUALS (type
) != type_quals
)
3145 type
= build_qualified_type (TYPE_MAIN_VARIANT (type
), type_quals
);
3147 /* Set the type of the COMPONENT_REF to the underlying type. */
3148 TREE_TYPE (expr
) = type
;
3150 #ifdef ENABLE_TYPES_CHECKING
3151 /* It is now a FE error, if the conversion from the canonical
3152 type to the original expression type is not useless. */
3153 gcc_assert (useless_type_conversion_p (old_type
, type
));
3158 /* If a NOP conversion is changing a pointer to array of foo to a pointer
3159 to foo, embed that change in the ADDR_EXPR by converting
3164 where L is the lower bound. For simplicity, only do this for constant
3166 The constraint is that the type of &array[L] is trivially convertible
3170 canonicalize_addr_expr (tree
*expr_p
)
3172 tree expr
= *expr_p
;
3173 tree addr_expr
= TREE_OPERAND (expr
, 0);
3174 tree datype
, ddatype
, pddatype
;
3176 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
3177 if (!POINTER_TYPE_P (TREE_TYPE (expr
))
3178 || TREE_CODE (addr_expr
) != ADDR_EXPR
)
3181 /* The addr_expr type should be a pointer to an array. */
3182 datype
= TREE_TYPE (TREE_TYPE (addr_expr
));
3183 if (TREE_CODE (datype
) != ARRAY_TYPE
)
3186 /* The pointer to element type shall be trivially convertible to
3187 the expression pointer type. */
3188 ddatype
= TREE_TYPE (datype
);
3189 pddatype
= build_pointer_type (ddatype
);
3190 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr
)),
3194 /* The lower bound and element sizes must be constant. */
3195 if (!TYPE_SIZE_UNIT (ddatype
)
3196 || TREE_CODE (TYPE_SIZE_UNIT (ddatype
)) != INTEGER_CST
3197 || !TYPE_DOMAIN (datype
) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))
3198 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))) != INTEGER_CST
)
3201 /* All checks succeeded. Build a new node to merge the cast. */
3202 *expr_p
= build4 (ARRAY_REF
, ddatype
, TREE_OPERAND (addr_expr
, 0),
3203 TYPE_MIN_VALUE (TYPE_DOMAIN (datype
)),
3204 NULL_TREE
, NULL_TREE
);
3205 *expr_p
= build1 (ADDR_EXPR
, pddatype
, *expr_p
);
3207 /* We can have stripped a required restrict qualifier above. */
3208 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
3209 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
3212 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
3213 underneath as appropriate. */
3215 static enum gimplify_status
3216 gimplify_conversion (tree
*expr_p
)
3218 location_t loc
= EXPR_LOCATION (*expr_p
);
3219 gcc_assert (CONVERT_EXPR_P (*expr_p
));
3221 /* Then strip away all but the outermost conversion. */
3222 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p
, 0));
3224 /* And remove the outermost conversion if it's useless. */
3225 if (tree_ssa_useless_type_conversion (*expr_p
))
3226 *expr_p
= TREE_OPERAND (*expr_p
, 0);
3228 /* If we still have a conversion at the toplevel,
3229 then canonicalize some constructs. */
3230 if (CONVERT_EXPR_P (*expr_p
))
3232 tree sub
= TREE_OPERAND (*expr_p
, 0);
3234 /* If a NOP conversion is changing the type of a COMPONENT_REF
3235 expression, then canonicalize its type now in order to expose more
3236 redundant conversions. */
3237 if (TREE_CODE (sub
) == COMPONENT_REF
)
3238 canonicalize_component_ref (&TREE_OPERAND (*expr_p
, 0));
3240 /* If a NOP conversion is changing a pointer to array of foo
3241 to a pointer to foo, embed that change in the ADDR_EXPR. */
3242 else if (TREE_CODE (sub
) == ADDR_EXPR
)
3243 canonicalize_addr_expr (expr_p
);
3246 /* If we have a conversion to a non-register type force the
3247 use of a VIEW_CONVERT_EXPR instead. */
3248 if (CONVERT_EXPR_P (*expr_p
) && !is_gimple_reg_type (TREE_TYPE (*expr_p
)))
3249 *expr_p
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (*expr_p
),
3250 TREE_OPERAND (*expr_p
, 0));
3252 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3253 if (TREE_CODE (*expr_p
) == CONVERT_EXPR
)
3254 TREE_SET_CODE (*expr_p
, NOP_EXPR
);
3259 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3260 DECL_VALUE_EXPR, and it's worth re-examining things. */
3262 static enum gimplify_status
3263 gimplify_var_or_parm_decl (tree
*expr_p
)
3265 tree decl
= *expr_p
;
3267 /* ??? If this is a local variable, and it has not been seen in any
3268 outer BIND_EXPR, then it's probably the result of a duplicate
3269 declaration, for which we've already issued an error. It would
3270 be really nice if the front end wouldn't leak these at all.
3271 Currently the only known culprit is C++ destructors, as seen
3272 in g++.old-deja/g++.jason/binding.C.
3273 Another possible culpit are size expressions for variably modified
3274 types which are lost in the FE or not gimplified correctly. */
3276 && !DECL_SEEN_IN_BIND_EXPR_P (decl
)
3277 && !TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)
3278 && decl_function_context (decl
) == current_function_decl
)
3280 gcc_assert (seen_error ());
3284 /* When within an OMP context, notice uses of variables. */
3285 if (gimplify_omp_ctxp
&& omp_notice_variable (gimplify_omp_ctxp
, decl
, true))
3288 /* If the decl is an alias for another expression, substitute it now. */
3289 if (DECL_HAS_VALUE_EXPR_P (decl
))
3291 *expr_p
= unshare_expr (DECL_VALUE_EXPR (decl
));
3298 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3301 recalculate_side_effects (tree t
)
3303 enum tree_code code
= TREE_CODE (t
);
3304 int len
= TREE_OPERAND_LENGTH (t
);
3307 switch (TREE_CODE_CLASS (code
))
3309 case tcc_expression
:
3315 case PREDECREMENT_EXPR
:
3316 case PREINCREMENT_EXPR
:
3317 case POSTDECREMENT_EXPR
:
3318 case POSTINCREMENT_EXPR
:
3319 /* All of these have side-effects, no matter what their
3328 case tcc_comparison
: /* a comparison expression */
3329 case tcc_unary
: /* a unary arithmetic expression */
3330 case tcc_binary
: /* a binary arithmetic expression */
3331 case tcc_reference
: /* a reference */
3332 case tcc_vl_exp
: /* a function call */
3333 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
3334 for (i
= 0; i
< len
; ++i
)
3336 tree op
= TREE_OPERAND (t
, i
);
3337 if (op
&& TREE_SIDE_EFFECTS (op
))
3338 TREE_SIDE_EFFECTS (t
) = 1;
3343 /* No side-effects. */
3347 if (code
== SSA_NAME
)
3348 /* No side-effects. */
3354 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3358 : min_lval '[' val ']'
3360 | compound_lval '[' val ']'
3361 | compound_lval '.' ID
3363 This is not part of the original SIMPLE definition, which separates
3364 array and member references, but it seems reasonable to handle them
3365 together. Also, this way we don't run into problems with union
3366 aliasing; gcc requires that for accesses through a union to alias, the
3367 union reference must be explicit, which was not always the case when we
3368 were splitting up array and member refs.
3370 PRE_P points to the sequence where side effects that must happen before
3371 *EXPR_P should be stored.
3373 POST_P points to the sequence where side effects that must happen after
3374 *EXPR_P should be stored. */
3376 static enum gimplify_status
3377 gimplify_compound_lval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3378 fallback_t fallback
)
3381 enum gimplify_status ret
= GS_ALL_DONE
, tret
;
3383 location_t loc
= EXPR_LOCATION (*expr_p
);
3384 tree expr
= *expr_p
;
3386 /* Create a stack of the subexpressions so later we can walk them in
3387 order from inner to outer. */
3388 auto_vec
<tree
, 10> expr_stack
;
3390 /* We can handle anything that get_inner_reference can deal with. */
3391 for (p
= expr_p
; ; p
= &TREE_OPERAND (*p
, 0))
3394 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3395 if (TREE_CODE (*p
) == INDIRECT_REF
)
3396 *p
= fold_indirect_ref_loc (loc
, *p
);
3398 if (handled_component_p (*p
))
3400 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3401 additional COMPONENT_REFs. */
3402 else if ((VAR_P (*p
) || TREE_CODE (*p
) == PARM_DECL
)
3403 && gimplify_var_or_parm_decl (p
) == GS_OK
)
3408 expr_stack
.safe_push (*p
);
3411 gcc_assert (expr_stack
.length ());
3413 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3414 walked through and P points to the innermost expression.
3416 Java requires that we elaborated nodes in source order. That
3417 means we must gimplify the inner expression followed by each of
3418 the indices, in order. But we can't gimplify the inner
3419 expression until we deal with any variable bounds, sizes, or
3420 positions in order to deal with PLACEHOLDER_EXPRs.
3422 The base expression may contain a statement expression that
3423 has declarations used in size expressions, so has to be
3424 gimplified before gimplifying the size expressions.
3426 So we do this in three steps. First we deal with variable
3427 bounds, sizes, and positions, then we gimplify the base and
3428 ensure it is memory if needed, then we deal with the annotations
3429 for any variables in the components and any indices, from left
3432 bool need_non_reg
= false;
3433 for (i
= expr_stack
.length () - 1; i
>= 0; i
--)
3435 tree t
= expr_stack
[i
];
3437 if (error_operand_p (TREE_OPERAND (t
, 0)))
3440 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3442 /* Deal with the low bound and element type size and put them into
3443 the ARRAY_REF. If these values are set, they have already been
3445 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3447 tree low
= unshare_expr (array_ref_low_bound (t
));
3448 if (!is_gimple_min_invariant (low
))
3450 TREE_OPERAND (t
, 2) = low
;
3454 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
3456 tree elmt_size
= array_ref_element_size (t
);
3457 if (!is_gimple_min_invariant (elmt_size
))
3459 elmt_size
= unshare_expr (elmt_size
);
3460 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
, 0)));
3461 tree factor
= size_int (TYPE_ALIGN_UNIT (elmt_type
));
3463 /* Divide the element size by the alignment of the element
3465 elmt_size
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3468 TREE_OPERAND (t
, 3) = elmt_size
;
3471 need_non_reg
= true;
3473 else if (TREE_CODE (t
) == COMPONENT_REF
)
3475 /* Set the field offset into T and gimplify it. */
3476 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3478 tree offset
= component_ref_field_offset (t
);
3479 if (!is_gimple_min_invariant (offset
))
3481 offset
= unshare_expr (offset
);
3482 tree field
= TREE_OPERAND (t
, 1);
3484 = size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
);
3486 /* Divide the offset by its alignment. */
3487 offset
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3490 TREE_OPERAND (t
, 2) = offset
;
3493 need_non_reg
= true;
3495 else if (!is_gimple_reg_type (TREE_TYPE (t
)))
3496 /* When the result of an operation, in particular a VIEW_CONVERT_EXPR
3497 is a non-register type then require the base object to be a
3498 non-register as well. */
3499 need_non_reg
= true;
3502 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3503 so as to match the min_lval predicate. Failure to do so may result
3504 in the creation of large aggregate temporaries. */
3505 tret
= gimplify_expr (p
, pre_p
, post_p
, is_gimple_min_lval
,
3506 fallback
| fb_lvalue
);
3507 ret
= MIN (ret
, tret
);
3508 if (ret
== GS_ERROR
)
3511 /* Step 2a: if we have component references we do not support on
3512 registers then make sure the base isn't a register. Of course
3513 we can only do so if an rvalue is OK. */
3514 if (need_non_reg
&& (fallback
& fb_rvalue
))
3515 prepare_gimple_addressable (p
, pre_p
);
3518 /* Step 3: gimplify size expressions and the indices and operands of
3519 ARRAY_REF. During this loop we also remove any useless conversions.
3520 If we operate on a register also make sure to properly gimplify
3521 to individual operations. */
3523 bool reg_operations
= is_gimple_reg (*p
);
3524 for (; expr_stack
.length () > 0; )
3526 tree t
= expr_stack
.pop ();
3528 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3530 gcc_assert (!reg_operations
);
3532 /* Gimplify the low bound and element type size. */
3533 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3534 is_gimple_reg
, fb_rvalue
);
3535 ret
= MIN (ret
, tret
);
3537 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
, post_p
,
3538 is_gimple_reg
, fb_rvalue
);
3539 ret
= MIN (ret
, tret
);
3541 /* Gimplify the dimension. */
3542 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, post_p
,
3543 is_gimple_val
, fb_rvalue
);
3544 ret
= MIN (ret
, tret
);
3546 else if (TREE_CODE (t
) == COMPONENT_REF
)
3548 gcc_assert (!reg_operations
);
3550 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3551 is_gimple_reg
, fb_rvalue
);
3552 ret
= MIN (ret
, tret
);
3554 else if (reg_operations
)
3556 tret
= gimplify_expr (&TREE_OPERAND (t
, 0), pre_p
, post_p
,
3557 is_gimple_val
, fb_rvalue
);
3558 ret
= MIN (ret
, tret
);
3561 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t
, 0));
3563 /* The innermost expression P may have originally had
3564 TREE_SIDE_EFFECTS set which would have caused all the outer
3565 expressions in *EXPR_P leading to P to also have had
3566 TREE_SIDE_EFFECTS set. */
3567 recalculate_side_effects (t
);
3570 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3571 if ((fallback
& fb_rvalue
) && TREE_CODE (*expr_p
) == COMPONENT_REF
)
3573 canonicalize_component_ref (expr_p
);
3576 expr_stack
.release ();
3578 gcc_assert (*expr_p
== expr
|| ret
!= GS_ALL_DONE
);
3583 /* Gimplify the self modifying expression pointed to by EXPR_P
3586 PRE_P points to the list where side effects that must happen before
3587 *EXPR_P should be stored.
3589 POST_P points to the list where side effects that must happen after
3590 *EXPR_P should be stored.
3592 WANT_VALUE is nonzero iff we want to use the value of this expression
3593 in another expression.
3595 ARITH_TYPE is the type the computation should be performed in. */
3597 enum gimplify_status
3598 gimplify_self_mod_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3599 bool want_value
, tree arith_type
)
3601 enum tree_code code
;
3602 tree lhs
, lvalue
, rhs
, t1
;
3603 gimple_seq post
= NULL
, *orig_post_p
= post_p
;
3605 enum tree_code arith_code
;
3606 enum gimplify_status ret
;
3607 location_t loc
= EXPR_LOCATION (*expr_p
);
3609 code
= TREE_CODE (*expr_p
);
3611 gcc_assert (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
3612 || code
== PREINCREMENT_EXPR
|| code
== PREDECREMENT_EXPR
);
3614 /* Prefix or postfix? */
3615 if (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
)
3616 /* Faster to treat as prefix if result is not used. */
3617 postfix
= want_value
;
3621 /* For postfix, make sure the inner expression's post side effects
3622 are executed after side effects from this expression. */
3626 /* Add or subtract? */
3627 if (code
== PREINCREMENT_EXPR
|| code
== POSTINCREMENT_EXPR
)
3628 arith_code
= PLUS_EXPR
;
3630 arith_code
= MINUS_EXPR
;
3632 /* Gimplify the LHS into a GIMPLE lvalue. */
3633 lvalue
= TREE_OPERAND (*expr_p
, 0);
3634 ret
= gimplify_expr (&lvalue
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
3635 if (ret
== GS_ERROR
)
3638 /* Extract the operands to the arithmetic operation. */
3640 rhs
= TREE_OPERAND (*expr_p
, 1);
3642 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3643 that as the result value and in the postqueue operation. */
3646 ret
= gimplify_expr (&lhs
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
3647 if (ret
== GS_ERROR
)
3650 lhs
= get_initialized_tmp_var (lhs
, pre_p
);
3653 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3654 if (POINTER_TYPE_P (TREE_TYPE (lhs
)))
3656 rhs
= convert_to_ptrofftype_loc (loc
, rhs
);
3657 if (arith_code
== MINUS_EXPR
)
3658 rhs
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (rhs
), rhs
);
3659 t1
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (*expr_p
), lhs
, rhs
);
3662 t1
= fold_convert (TREE_TYPE (*expr_p
),
3663 fold_build2 (arith_code
, arith_type
,
3664 fold_convert (arith_type
, lhs
),
3665 fold_convert (arith_type
, rhs
)));
3669 gimplify_assign (lvalue
, t1
, pre_p
);
3670 gimplify_seq_add_seq (orig_post_p
, post
);
3676 *expr_p
= build2 (MODIFY_EXPR
, TREE_TYPE (lvalue
), lvalue
, t1
);
3681 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3684 maybe_with_size_expr (tree
*expr_p
)
3686 tree expr
= *expr_p
;
3687 tree type
= TREE_TYPE (expr
);
3690 /* If we've already wrapped this or the type is error_mark_node, we can't do
3692 if (TREE_CODE (expr
) == WITH_SIZE_EXPR
3693 || type
== error_mark_node
)
3696 /* If the size isn't known or is a constant, we have nothing to do. */
3697 size
= TYPE_SIZE_UNIT (type
);
3698 if (!size
|| poly_int_tree_p (size
))
3701 /* Otherwise, make a WITH_SIZE_EXPR. */
3702 size
= unshare_expr (size
);
3703 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, expr
);
3704 *expr_p
= build2 (WITH_SIZE_EXPR
, type
, expr
, size
);
3707 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3708 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3709 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3710 gimplified to an SSA name. */
3712 enum gimplify_status
3713 gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
3716 bool (*test
) (tree
);
3719 /* In general, we allow lvalues for function arguments to avoid
3720 extra overhead of copying large aggregates out of even larger
3721 aggregates into temporaries only to copy the temporaries to
3722 the argument list. Make optimizers happy by pulling out to
3723 temporaries those types that fit in registers. */
3724 if (is_gimple_reg_type (TREE_TYPE (*arg_p
)))
3725 test
= is_gimple_val
, fb
= fb_rvalue
;
3728 test
= is_gimple_lvalue
, fb
= fb_either
;
3729 /* Also strip a TARGET_EXPR that would force an extra copy. */
3730 if (TREE_CODE (*arg_p
) == TARGET_EXPR
)
3732 tree init
= TARGET_EXPR_INITIAL (*arg_p
);
3734 && !VOID_TYPE_P (TREE_TYPE (init
)))
3739 /* If this is a variable sized type, we must remember the size. */
3740 maybe_with_size_expr (arg_p
);
3742 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3743 /* Make sure arguments have the same location as the function call
3745 protected_set_expr_location (*arg_p
, call_location
);
3747 /* There is a sequence point before a function call. Side effects in
3748 the argument list must occur before the actual call. So, when
3749 gimplifying arguments, force gimplify_expr to use an internal
3750 post queue which is then appended to the end of PRE_P. */
3751 return gimplify_expr (arg_p
, pre_p
, NULL
, test
, fb
, allow_ssa
);
3754 /* Don't fold inside offloading or taskreg regions: it can break code by
3755 adding decl references that weren't in the source. We'll do it during
3756 omplower pass instead. */
3759 maybe_fold_stmt (gimple_stmt_iterator
*gsi
)
3761 struct gimplify_omp_ctx
*ctx
;
3762 for (ctx
= gimplify_omp_ctxp
; ctx
; ctx
= ctx
->outer_context
)
3763 if ((ctx
->region_type
& (ORT_TARGET
| ORT_PARALLEL
| ORT_TASK
)) != 0)
3765 else if ((ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
3767 /* Delay folding of builtins until the IL is in consistent state
3768 so the diagnostic machinery can do a better job. */
3769 if (gimple_call_builtin_p (gsi_stmt (*gsi
)))
3771 return fold_stmt (gsi
);
3774 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3775 WANT_VALUE is true if the result of the call is desired. */
3777 static enum gimplify_status
3778 gimplify_call_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
3780 tree fndecl
, parms
, p
, fnptrtype
;
3781 enum gimplify_status ret
;
3784 bool builtin_va_start_p
= false;
3785 location_t loc
= EXPR_LOCATION (*expr_p
);
3787 gcc_assert (TREE_CODE (*expr_p
) == CALL_EXPR
);
3789 /* For reliable diagnostics during inlining, it is necessary that
3790 every call_expr be annotated with file and line. */
3791 if (! EXPR_HAS_LOCATION (*expr_p
))
3792 SET_EXPR_LOCATION (*expr_p
, input_location
);
3794 /* Gimplify internal functions created in the FEs. */
3795 if (CALL_EXPR_FN (*expr_p
) == NULL_TREE
)
3800 nargs
= call_expr_nargs (*expr_p
);
3801 enum internal_fn ifn
= CALL_EXPR_IFN (*expr_p
);
3802 auto_vec
<tree
> vargs (nargs
);
3804 if (ifn
== IFN_ASSUME
)
3806 if (simple_condition_p (CALL_EXPR_ARG (*expr_p
, 0)))
3808 /* If the [[assume (cond)]]; condition is simple
3809 enough and can be evaluated unconditionally
3810 without side-effects, expand it as
3811 if (!cond) __builtin_unreachable (); */
3812 tree fndecl
= builtin_decl_explicit (BUILT_IN_UNREACHABLE
);
3813 *expr_p
= build3 (COND_EXPR
, void_type_node
,
3814 CALL_EXPR_ARG (*expr_p
, 0), void_node
,
3815 build_call_expr_loc (EXPR_LOCATION (*expr_p
),
3819 /* If not optimizing, ignore the assumptions. */
3820 if (!optimize
|| seen_error ())
3822 *expr_p
= NULL_TREE
;
3825 /* Temporarily, until gimple lowering, transform
3832 such that gimple lowering can outline the condition into
3833 a separate function easily. */
3834 tree guard
= create_tmp_var (boolean_type_node
);
3835 *expr_p
= build2 (MODIFY_EXPR
, void_type_node
, guard
,
3836 gimple_boolify (CALL_EXPR_ARG (*expr_p
, 0)));
3837 *expr_p
= build3 (BIND_EXPR
, void_type_node
, NULL
, *expr_p
, NULL
);
3838 push_gimplify_context ();
3839 gimple_seq body
= NULL
;
3840 gimple
*g
= gimplify_and_return_first (*expr_p
, &body
);
3841 pop_gimplify_context (g
);
3842 g
= gimple_build_assume (guard
, body
);
3843 gimple_set_location (g
, loc
);
3844 gimplify_seq_add_stmt (pre_p
, g
);
3845 *expr_p
= NULL_TREE
;
3849 for (i
= 0; i
< nargs
; i
++)
3851 gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3852 EXPR_LOCATION (*expr_p
));
3853 vargs
.quick_push (CALL_EXPR_ARG (*expr_p
, i
));
3856 gcall
*call
= gimple_build_call_internal_vec (ifn
, vargs
);
3857 gimple_call_set_nothrow (call
, TREE_NOTHROW (*expr_p
));
3858 gimplify_seq_add_stmt (pre_p
, call
);
3862 /* This may be a call to a builtin function.
3864 Builtin function calls may be transformed into different
3865 (and more efficient) builtin function calls under certain
3866 circumstances. Unfortunately, gimplification can muck things
3867 up enough that the builtin expanders are not aware that certain
3868 transformations are still valid.
3870 So we attempt transformation/gimplification of the call before
3871 we gimplify the CALL_EXPR. At this time we do not manage to
3872 transform all calls in the same manner as the expanders do, but
3873 we do transform most of them. */
3874 fndecl
= get_callee_fndecl (*expr_p
);
3875 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
3876 switch (DECL_FUNCTION_CODE (fndecl
))
3878 CASE_BUILT_IN_ALLOCA
:
3879 /* If the call has been built for a variable-sized object, then we
3880 want to restore the stack level when the enclosing BIND_EXPR is
3881 exited to reclaim the allocated space; otherwise, we precisely
3882 need to do the opposite and preserve the latest stack level. */
3883 if (CALL_ALLOCA_FOR_VAR_P (*expr_p
))
3884 gimplify_ctxp
->save_stack
= true;
3886 gimplify_ctxp
->keep_stack
= true;
3889 case BUILT_IN_VA_START
:
3891 builtin_va_start_p
= true;
3892 if (call_expr_nargs (*expr_p
) < 2)
3894 error ("too few arguments to function %<va_start%>");
3895 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3899 if (fold_builtin_next_arg (*expr_p
, true))
3901 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3907 case BUILT_IN_EH_RETURN
:
3908 cfun
->calls_eh_return
= true;
3911 case BUILT_IN_CLEAR_PADDING
:
3912 if (call_expr_nargs (*expr_p
) == 1)
3914 /* Remember the original type of the argument in an internal
3915 dummy second argument, as in GIMPLE pointer conversions are
3916 useless. Also mark this call as not for automatic
3917 initialization in the internal dummy third argument. */
3918 p
= CALL_EXPR_ARG (*expr_p
, 0);
3920 = build_call_expr_loc (EXPR_LOCATION (*expr_p
), fndecl
, 2, p
,
3921 build_zero_cst (TREE_TYPE (p
)));
3929 if (fndecl
&& fndecl_built_in_p (fndecl
))
3931 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3932 if (new_tree
&& new_tree
!= *expr_p
)
3934 /* There was a transformation of this call which computes the
3935 same value, but in a more efficient way. Return and try
3942 /* Remember the original function pointer type. */
3943 fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
3948 && (cfun
->curr_properties
& PROP_gimple_any
) == 0)
3950 tree variant
= omp_resolve_declare_variant (fndecl
);
3951 if (variant
!= fndecl
)
3952 CALL_EXPR_FN (*expr_p
) = build1 (ADDR_EXPR
, fnptrtype
, variant
);
3955 /* There is a sequence point before the call, so any side effects in
3956 the calling expression must occur before the actual call. Force
3957 gimplify_expr to use an internal post queue. */
3958 ret
= gimplify_expr (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
3959 is_gimple_call_addr
, fb_rvalue
);
3961 if (ret
== GS_ERROR
)
3964 nargs
= call_expr_nargs (*expr_p
);
3966 /* Get argument types for verification. */
3967 fndecl
= get_callee_fndecl (*expr_p
);
3970 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3972 parms
= TYPE_ARG_TYPES (TREE_TYPE (fnptrtype
));
3974 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
3975 p
= DECL_ARGUMENTS (fndecl
);
3980 for (i
= 0; i
< nargs
&& p
; i
++, p
= TREE_CHAIN (p
))
3983 /* If the last argument is __builtin_va_arg_pack () and it is not
3984 passed as a named argument, decrease the number of CALL_EXPR
3985 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3988 && TREE_CODE (CALL_EXPR_ARG (*expr_p
, nargs
- 1)) == CALL_EXPR
)
3990 tree last_arg
= CALL_EXPR_ARG (*expr_p
, nargs
- 1);
3991 tree last_arg_fndecl
= get_callee_fndecl (last_arg
);
3994 && fndecl_built_in_p (last_arg_fndecl
, BUILT_IN_VA_ARG_PACK
))
3996 tree call
= *expr_p
;
3999 *expr_p
= build_call_array_loc (loc
, TREE_TYPE (call
),
4000 CALL_EXPR_FN (call
),
4001 nargs
, CALL_EXPR_ARGP (call
));
4003 /* Copy all CALL_EXPR flags, location and block, except
4004 CALL_EXPR_VA_ARG_PACK flag. */
4005 CALL_EXPR_STATIC_CHAIN (*expr_p
) = CALL_EXPR_STATIC_CHAIN (call
);
4006 CALL_EXPR_TAILCALL (*expr_p
) = CALL_EXPR_TAILCALL (call
);
4007 CALL_EXPR_RETURN_SLOT_OPT (*expr_p
)
4008 = CALL_EXPR_RETURN_SLOT_OPT (call
);
4009 CALL_FROM_THUNK_P (*expr_p
) = CALL_FROM_THUNK_P (call
);
4010 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (call
));
4012 /* Set CALL_EXPR_VA_ARG_PACK. */
4013 CALL_EXPR_VA_ARG_PACK (*expr_p
) = 1;
4017 /* If the call returns twice then after building the CFG the call
4018 argument computations will no longer dominate the call because
4019 we add an abnormal incoming edge to the call. So do not use SSA
4021 bool returns_twice
= call_expr_flags (*expr_p
) & ECF_RETURNS_TWICE
;
4023 /* Gimplify the function arguments. */
4026 for (i
= (PUSH_ARGS_REVERSED
? nargs
- 1 : 0);
4027 PUSH_ARGS_REVERSED
? i
>= 0 : i
< nargs
;
4028 PUSH_ARGS_REVERSED
? i
-- : i
++)
4030 enum gimplify_status t
;
4032 /* Avoid gimplifying the second argument to va_start, which needs to
4033 be the plain PARM_DECL. */
4034 if ((i
!= 1) || !builtin_va_start_p
)
4036 t
= gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
4037 EXPR_LOCATION (*expr_p
), ! returns_twice
);
4045 /* Gimplify the static chain. */
4046 if (CALL_EXPR_STATIC_CHAIN (*expr_p
))
4048 if (fndecl
&& !DECL_STATIC_CHAIN (fndecl
))
4049 CALL_EXPR_STATIC_CHAIN (*expr_p
) = NULL
;
4052 enum gimplify_status t
;
4053 t
= gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p
), pre_p
,
4054 EXPR_LOCATION (*expr_p
), ! returns_twice
);
4060 /* Verify the function result. */
4061 if (want_value
&& fndecl
4062 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype
))))
4064 error_at (loc
, "using result of function returning %<void%>");
4068 /* Try this again in case gimplification exposed something. */
4069 if (ret
!= GS_ERROR
)
4071 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
4073 if (new_tree
&& new_tree
!= *expr_p
)
4075 /* There was a transformation of this call which computes the
4076 same value, but in a more efficient way. Return and try
4084 *expr_p
= error_mark_node
;
4088 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
4089 decl. This allows us to eliminate redundant or useless
4090 calls to "const" functions. */
4091 if (TREE_CODE (*expr_p
) == CALL_EXPR
)
4093 int flags
= call_expr_flags (*expr_p
);
4094 if (flags
& (ECF_CONST
| ECF_PURE
)
4095 /* An infinite loop is considered a side effect. */
4096 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
4097 TREE_SIDE_EFFECTS (*expr_p
) = 0;
4100 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
4101 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
4102 form and delegate the creation of a GIMPLE_CALL to
4103 gimplify_modify_expr. This is always possible because when
4104 WANT_VALUE is true, the caller wants the result of this call into
4105 a temporary, which means that we will emit an INIT_EXPR in
4106 internal_get_tmp_var which will then be handled by
4107 gimplify_modify_expr. */
4110 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
4111 have to do is replicate it as a GIMPLE_CALL tuple. */
4112 gimple_stmt_iterator gsi
;
4113 call
= gimple_build_call_from_tree (*expr_p
, fnptrtype
);
4114 notice_special_calls (call
);
4115 gimplify_seq_add_stmt (pre_p
, call
);
4116 gsi
= gsi_last (*pre_p
);
4117 maybe_fold_stmt (&gsi
);
4118 *expr_p
= NULL_TREE
;
4121 /* Remember the original function type. */
4122 CALL_EXPR_FN (*expr_p
) = build1 (NOP_EXPR
, fnptrtype
,
4123 CALL_EXPR_FN (*expr_p
));
4128 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
4129 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
4131 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
4132 condition is true or false, respectively. If null, we should generate
4133 our own to skip over the evaluation of this specific expression.
4135 LOCUS is the source location of the COND_EXPR.
4137 This function is the tree equivalent of do_jump.
4139 shortcut_cond_r should only be called by shortcut_cond_expr. */
4142 shortcut_cond_r (tree pred
, tree
*true_label_p
, tree
*false_label_p
,
4145 tree local_label
= NULL_TREE
;
4146 tree t
, expr
= NULL
;
4148 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
4149 retain the shortcut semantics. Just insert the gotos here;
4150 shortcut_cond_expr will append the real blocks later. */
4151 if (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
4153 location_t new_locus
;
4155 /* Turn if (a && b) into
4157 if (a); else goto no;
4158 if (b) goto yes; else goto no;
4161 if (false_label_p
== NULL
)
4162 false_label_p
= &local_label
;
4164 /* Keep the original source location on the first 'if'. */
4165 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), NULL
, false_label_p
, locus
);
4166 append_to_statement_list (t
, &expr
);
4168 /* Set the source location of the && on the second 'if'. */
4169 new_locus
= rexpr_location (pred
, locus
);
4170 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
4172 append_to_statement_list (t
, &expr
);
4174 else if (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
4176 location_t new_locus
;
4178 /* Turn if (a || b) into
4181 if (b) goto yes; else goto no;
4184 if (true_label_p
== NULL
)
4185 true_label_p
= &local_label
;
4187 /* Keep the original source location on the first 'if'. */
4188 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), true_label_p
, NULL
, locus
);
4189 append_to_statement_list (t
, &expr
);
4191 /* Set the source location of the || on the second 'if'. */
4192 new_locus
= rexpr_location (pred
, locus
);
4193 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
4195 append_to_statement_list (t
, &expr
);
4197 else if (TREE_CODE (pred
) == COND_EXPR
4198 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 1)))
4199 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 2))))
4201 location_t new_locus
;
4203 /* As long as we're messing with gotos, turn if (a ? b : c) into
4205 if (b) goto yes; else goto no;
4207 if (c) goto yes; else goto no;
4209 Don't do this if one of the arms has void type, which can happen
4210 in C++ when the arm is throw. */
4212 /* Keep the original source location on the first 'if'. Set the source
4213 location of the ? on the second 'if'. */
4214 new_locus
= rexpr_location (pred
, locus
);
4215 expr
= build3 (COND_EXPR
, void_type_node
, TREE_OPERAND (pred
, 0),
4216 shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
,
4217 false_label_p
, locus
),
4218 shortcut_cond_r (TREE_OPERAND (pred
, 2), true_label_p
,
4219 false_label_p
, new_locus
));
4223 expr
= build3 (COND_EXPR
, void_type_node
, pred
,
4224 build_and_jump (true_label_p
),
4225 build_and_jump (false_label_p
));
4226 SET_EXPR_LOCATION (expr
, locus
);
4231 t
= build1 (LABEL_EXPR
, void_type_node
, local_label
);
4232 append_to_statement_list (t
, &expr
);
4238 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
4239 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
4240 statement, if it is the last one. Otherwise, return NULL. */
4243 find_goto (tree expr
)
4248 if (TREE_CODE (expr
) == GOTO_EXPR
)
4251 if (TREE_CODE (expr
) != STATEMENT_LIST
)
4254 tree_stmt_iterator i
= tsi_start (expr
);
4256 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
4259 if (!tsi_one_before_end_p (i
))
4262 return find_goto (tsi_stmt (i
));
4265 /* Same as find_goto, except that it returns NULL if the destination
4266 is not a LABEL_DECL. */
4269 find_goto_label (tree expr
)
4271 tree dest
= find_goto (expr
);
4272 if (dest
&& TREE_CODE (GOTO_DESTINATION (dest
)) == LABEL_DECL
)
4277 /* Given a conditional expression EXPR with short-circuit boolean
4278 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4279 predicate apart into the equivalent sequence of conditionals. */
4282 shortcut_cond_expr (tree expr
)
4284 tree pred
= TREE_OPERAND (expr
, 0);
4285 tree then_
= TREE_OPERAND (expr
, 1);
4286 tree else_
= TREE_OPERAND (expr
, 2);
4287 tree true_label
, false_label
, end_label
, t
;
4289 tree
*false_label_p
;
4290 bool emit_end
, emit_false
, jump_over_else
;
4291 bool then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
4292 bool else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
4294 /* First do simple transformations. */
4297 /* If there is no 'else', turn
4300 if (a) if (b) then c. */
4301 while (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
4303 /* Keep the original source location on the first 'if'. */
4304 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
4305 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
4306 /* Set the source location of the && on the second 'if'. */
4307 if (rexpr_has_location (pred
))
4308 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
4309 then_
= shortcut_cond_expr (expr
);
4310 then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
4311 pred
= TREE_OPERAND (pred
, 0);
4312 expr
= build3 (COND_EXPR
, void_type_node
, pred
, then_
, NULL_TREE
);
4313 SET_EXPR_LOCATION (expr
, locus
);
4319 /* If there is no 'then', turn
4322 if (a); else if (b); else d. */
4323 while (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
4325 /* Keep the original source location on the first 'if'. */
4326 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
4327 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
4328 /* Set the source location of the || on the second 'if'. */
4329 if (rexpr_has_location (pred
))
4330 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
4331 else_
= shortcut_cond_expr (expr
);
4332 else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
4333 pred
= TREE_OPERAND (pred
, 0);
4334 expr
= build3 (COND_EXPR
, void_type_node
, pred
, NULL_TREE
, else_
);
4335 SET_EXPR_LOCATION (expr
, locus
);
4339 /* If we're done, great. */
4340 if (TREE_CODE (pred
) != TRUTH_ANDIF_EXPR
4341 && TREE_CODE (pred
) != TRUTH_ORIF_EXPR
)
4344 /* Otherwise we need to mess with gotos. Change
4347 if (a); else goto no;
4350 and recursively gimplify the condition. */
4352 true_label
= false_label
= end_label
= NULL_TREE
;
4354 /* If our arms just jump somewhere, hijack those labels so we don't
4355 generate jumps to jumps. */
4357 if (tree then_goto
= find_goto_label (then_
))
4359 true_label
= GOTO_DESTINATION (then_goto
);
4364 if (tree else_goto
= find_goto_label (else_
))
4366 false_label
= GOTO_DESTINATION (else_goto
);
4371 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4373 true_label_p
= &true_label
;
4375 true_label_p
= NULL
;
4377 /* The 'else' branch also needs a label if it contains interesting code. */
4378 if (false_label
|| else_se
)
4379 false_label_p
= &false_label
;
4381 false_label_p
= NULL
;
4383 /* If there was nothing else in our arms, just forward the label(s). */
4384 if (!then_se
&& !else_se
)
4385 return shortcut_cond_r (pred
, true_label_p
, false_label_p
,
4386 EXPR_LOC_OR_LOC (expr
, input_location
));
4388 /* If our last subexpression already has a terminal label, reuse it. */
4390 t
= expr_last (else_
);
4392 t
= expr_last (then_
);
4395 if (t
&& TREE_CODE (t
) == LABEL_EXPR
)
4396 end_label
= LABEL_EXPR_LABEL (t
);
4398 /* If we don't care about jumping to the 'else' branch, jump to the end
4399 if the condition is false. */
4401 false_label_p
= &end_label
;
4403 /* We only want to emit these labels if we aren't hijacking them. */
4404 emit_end
= (end_label
== NULL_TREE
);
4405 emit_false
= (false_label
== NULL_TREE
);
4407 /* We only emit the jump over the else clause if we have to--if the
4408 then clause may fall through. Otherwise we can wind up with a
4409 useless jump and a useless label at the end of gimplified code,
4410 which will cause us to think that this conditional as a whole
4411 falls through even if it doesn't. If we then inline a function
4412 which ends with such a condition, that can cause us to issue an
4413 inappropriate warning about control reaching the end of a
4414 non-void function. */
4415 jump_over_else
= block_may_fallthru (then_
);
4417 pred
= shortcut_cond_r (pred
, true_label_p
, false_label_p
,
4418 EXPR_LOC_OR_LOC (expr
, input_location
));
4421 append_to_statement_list (pred
, &expr
);
4423 append_to_statement_list (then_
, &expr
);
4428 tree last
= expr_last (expr
);
4429 t
= build_and_jump (&end_label
);
4430 if (rexpr_has_location (last
))
4431 SET_EXPR_LOCATION (t
, rexpr_location (last
));
4432 append_to_statement_list (t
, &expr
);
4436 t
= build1 (LABEL_EXPR
, void_type_node
, false_label
);
4437 append_to_statement_list (t
, &expr
);
4439 append_to_statement_list (else_
, &expr
);
4441 if (emit_end
&& end_label
)
4443 t
= build1 (LABEL_EXPR
, void_type_node
, end_label
);
4444 append_to_statement_list (t
, &expr
);
4450 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4453 gimple_boolify (tree expr
)
4455 tree type
= TREE_TYPE (expr
);
4456 location_t loc
= EXPR_LOCATION (expr
);
4458 if (TREE_CODE (expr
) == NE_EXPR
4459 && TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
4460 && integer_zerop (TREE_OPERAND (expr
, 1)))
4462 tree call
= TREE_OPERAND (expr
, 0);
4463 tree fn
= get_callee_fndecl (call
);
4465 /* For __builtin_expect ((long) (x), y) recurse into x as well
4466 if x is truth_value_p. */
4468 && fndecl_built_in_p (fn
, BUILT_IN_EXPECT
)
4469 && call_expr_nargs (call
) == 2)
4471 tree arg
= CALL_EXPR_ARG (call
, 0);
4474 if (TREE_CODE (arg
) == NOP_EXPR
4475 && TREE_TYPE (arg
) == TREE_TYPE (call
))
4476 arg
= TREE_OPERAND (arg
, 0);
4477 if (truth_value_p (TREE_CODE (arg
)))
4479 arg
= gimple_boolify (arg
);
4480 CALL_EXPR_ARG (call
, 0)
4481 = fold_convert_loc (loc
, TREE_TYPE (call
), arg
);
4487 switch (TREE_CODE (expr
))
4489 case TRUTH_AND_EXPR
:
4491 case TRUTH_XOR_EXPR
:
4492 case TRUTH_ANDIF_EXPR
:
4493 case TRUTH_ORIF_EXPR
:
4494 /* Also boolify the arguments of truth exprs. */
4495 TREE_OPERAND (expr
, 1) = gimple_boolify (TREE_OPERAND (expr
, 1));
4498 case TRUTH_NOT_EXPR
:
4499 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4501 /* These expressions always produce boolean results. */
4502 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4503 TREE_TYPE (expr
) = boolean_type_node
;
4507 switch ((enum annot_expr_kind
) TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)))
4509 case annot_expr_ivdep_kind
:
4510 case annot_expr_unroll_kind
:
4511 case annot_expr_no_vector_kind
:
4512 case annot_expr_vector_kind
:
4513 case annot_expr_parallel_kind
:
4514 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4515 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4516 TREE_TYPE (expr
) = boolean_type_node
;
4523 if (COMPARISON_CLASS_P (expr
))
4525 /* These expressions always produce boolean results. */
4526 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4527 TREE_TYPE (expr
) = boolean_type_node
;
4530 /* Other expressions that get here must have boolean values, but
4531 might need to be converted to the appropriate mode. */
4532 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
4534 return fold_convert_loc (loc
, boolean_type_node
, expr
);
4538 /* Given a conditional expression *EXPR_P without side effects, gimplify
4539 its operands. New statements are inserted to PRE_P. */
4541 static enum gimplify_status
4542 gimplify_pure_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
)
4544 tree expr
= *expr_p
, cond
;
4545 enum gimplify_status ret
, tret
;
4546 enum tree_code code
;
4548 cond
= gimple_boolify (COND_EXPR_COND (expr
));
4550 /* We need to handle && and || specially, as their gimplification
4551 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4552 code
= TREE_CODE (cond
);
4553 if (code
== TRUTH_ANDIF_EXPR
)
4554 TREE_SET_CODE (cond
, TRUTH_AND_EXPR
);
4555 else if (code
== TRUTH_ORIF_EXPR
)
4556 TREE_SET_CODE (cond
, TRUTH_OR_EXPR
);
4557 ret
= gimplify_expr (&cond
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
4558 COND_EXPR_COND (*expr_p
) = cond
;
4560 tret
= gimplify_expr (&COND_EXPR_THEN (expr
), pre_p
, NULL
,
4561 is_gimple_val
, fb_rvalue
);
4562 ret
= MIN (ret
, tret
);
4563 tret
= gimplify_expr (&COND_EXPR_ELSE (expr
), pre_p
, NULL
,
4564 is_gimple_val
, fb_rvalue
);
4566 return MIN (ret
, tret
);
4569 /* Return true if evaluating EXPR could trap.
4570 EXPR is GENERIC, while tree_could_trap_p can be called
4574 generic_expr_could_trap_p (tree expr
)
4578 if (!expr
|| is_gimple_val (expr
))
4581 if (!EXPR_P (expr
) || tree_could_trap_p (expr
))
4584 n
= TREE_OPERAND_LENGTH (expr
);
4585 for (i
= 0; i
< n
; i
++)
4586 if (generic_expr_could_trap_p (TREE_OPERAND (expr
, i
)))
4592 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4601 The second form is used when *EXPR_P is of type void.
4603 PRE_P points to the list where side effects that must happen before
4604 *EXPR_P should be stored. */
4606 static enum gimplify_status
4607 gimplify_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
, fallback_t fallback
)
4609 tree expr
= *expr_p
;
4610 tree type
= TREE_TYPE (expr
);
4611 location_t loc
= EXPR_LOCATION (expr
);
4612 tree tmp
, arm1
, arm2
;
4613 enum gimplify_status ret
;
4614 tree label_true
, label_false
, label_cont
;
4615 bool have_then_clause_p
, have_else_clause_p
;
4617 enum tree_code pred_code
;
4618 gimple_seq seq
= NULL
;
4620 /* If this COND_EXPR has a value, copy the values into a temporary within
4622 if (!VOID_TYPE_P (type
))
4624 tree then_
= TREE_OPERAND (expr
, 1), else_
= TREE_OPERAND (expr
, 2);
4627 /* If either an rvalue is ok or we do not require an lvalue, create the
4628 temporary. But we cannot do that if the type is addressable. */
4629 if (((fallback
& fb_rvalue
) || !(fallback
& fb_lvalue
))
4630 && !TREE_ADDRESSABLE (type
))
4632 if (gimplify_ctxp
->allow_rhs_cond_expr
4633 /* If either branch has side effects or could trap, it can't be
4634 evaluated unconditionally. */
4635 && !TREE_SIDE_EFFECTS (then_
)
4636 && !generic_expr_could_trap_p (then_
)
4637 && !TREE_SIDE_EFFECTS (else_
)
4638 && !generic_expr_could_trap_p (else_
))
4639 return gimplify_pure_cond_expr (expr_p
, pre_p
);
4641 tmp
= create_tmp_var (type
, "iftmp");
4645 /* Otherwise, only create and copy references to the values. */
4648 type
= build_pointer_type (type
);
4650 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4651 then_
= build_fold_addr_expr_loc (loc
, then_
);
4653 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4654 else_
= build_fold_addr_expr_loc (loc
, else_
);
4657 = build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0), then_
, else_
);
4659 tmp
= create_tmp_var (type
, "iftmp");
4660 result
= build_simple_mem_ref_loc (loc
, tmp
);
4663 /* Build the new then clause, `tmp = then_;'. But don't build the
4664 assignment if the value is void; in C++ it can be if it's a throw. */
4665 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4666 TREE_OPERAND (expr
, 1) = build2 (INIT_EXPR
, type
, tmp
, then_
);
4668 /* Similarly, build the new else clause, `tmp = else_;'. */
4669 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4670 TREE_OPERAND (expr
, 2) = build2 (INIT_EXPR
, type
, tmp
, else_
);
4672 TREE_TYPE (expr
) = void_type_node
;
4673 recalculate_side_effects (expr
);
4675 /* Move the COND_EXPR to the prequeue. */
4676 gimplify_stmt (&expr
, pre_p
);
4682 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4683 STRIP_TYPE_NOPS (TREE_OPERAND (expr
, 0));
4684 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == COMPOUND_EXPR
)
4685 gimplify_compound_expr (&TREE_OPERAND (expr
, 0), pre_p
, true);
4687 /* Make sure the condition has BOOLEAN_TYPE. */
4688 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4690 /* Break apart && and || conditions. */
4691 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ANDIF_EXPR
4692 || TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ORIF_EXPR
)
4694 expr
= shortcut_cond_expr (expr
);
4696 if (expr
!= *expr_p
)
4700 /* We can't rely on gimplify_expr to re-gimplify the expanded
4701 form properly, as cleanups might cause the target labels to be
4702 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4703 set up a conditional context. */
4704 gimple_push_condition ();
4705 gimplify_stmt (expr_p
, &seq
);
4706 gimple_pop_condition (pre_p
);
4707 gimple_seq_add_seq (pre_p
, seq
);
4713 /* Now do the normal gimplification. */
4715 /* Gimplify condition. */
4716 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, NULL
,
4717 is_gimple_condexpr_for_cond
, fb_rvalue
);
4718 if (ret
== GS_ERROR
)
4720 gcc_assert (TREE_OPERAND (expr
, 0) != NULL_TREE
);
4722 gimple_push_condition ();
4724 have_then_clause_p
= have_else_clause_p
= false;
4725 label_true
= find_goto_label (TREE_OPERAND (expr
, 1));
4727 && DECL_CONTEXT (GOTO_DESTINATION (label_true
)) == current_function_decl
4728 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4729 have different locations, otherwise we end up with incorrect
4730 location information on the branches. */
4732 || !EXPR_HAS_LOCATION (expr
)
4733 || !rexpr_has_location (label_true
)
4734 || EXPR_LOCATION (expr
) == rexpr_location (label_true
)))
4736 have_then_clause_p
= true;
4737 label_true
= GOTO_DESTINATION (label_true
);
4740 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
4741 label_false
= find_goto_label (TREE_OPERAND (expr
, 2));
4743 && DECL_CONTEXT (GOTO_DESTINATION (label_false
)) == current_function_decl
4744 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4745 have different locations, otherwise we end up with incorrect
4746 location information on the branches. */
4748 || !EXPR_HAS_LOCATION (expr
)
4749 || !rexpr_has_location (label_false
)
4750 || EXPR_LOCATION (expr
) == rexpr_location (label_false
)))
4752 have_else_clause_p
= true;
4753 label_false
= GOTO_DESTINATION (label_false
);
4756 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
4758 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr
), &pred_code
, &arm1
,
4760 cond_stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
,
4762 gimple_set_location (cond_stmt
, EXPR_LOCATION (expr
));
4763 copy_warning (cond_stmt
, COND_EXPR_COND (expr
));
4764 gimplify_seq_add_stmt (&seq
, cond_stmt
);
4765 gimple_stmt_iterator gsi
= gsi_last (seq
);
4766 maybe_fold_stmt (&gsi
);
4768 label_cont
= NULL_TREE
;
4769 if (!have_then_clause_p
)
4771 /* For if (...) {} else { code; } put label_true after
4773 if (TREE_OPERAND (expr
, 1) == NULL_TREE
4774 && !have_else_clause_p
4775 && TREE_OPERAND (expr
, 2) != NULL_TREE
)
4777 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4778 handling that label_cont == label_true can be only reached
4779 through fallthrough from { code; }. */
4780 if (integer_zerop (COND_EXPR_COND (expr
)))
4781 UNUSED_LABEL_P (label_true
) = 1;
4782 label_cont
= label_true
;
4786 bool then_side_effects
4787 = (TREE_OPERAND (expr
, 1)
4788 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr
, 1)));
4789 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_true
));
4790 have_then_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 1), &seq
);
4791 /* For if (...) { code; } else {} or
4792 if (...) { code; } else goto label; or
4793 if (...) { code; return; } else { ... }
4794 label_cont isn't needed. */
4795 if (!have_else_clause_p
4796 && TREE_OPERAND (expr
, 2) != NULL_TREE
4797 && gimple_seq_may_fallthru (seq
))
4800 label_cont
= create_artificial_label (UNKNOWN_LOCATION
);
4802 /* For if (0) { non-side-effect-code } else { code }
4803 tell -Wimplicit-fallthrough handling that label_cont can
4804 be only reached through fallthrough from { code }. */
4805 if (integer_zerop (COND_EXPR_COND (expr
)))
4807 UNUSED_LABEL_P (label_true
) = 1;
4808 if (!then_side_effects
)
4809 UNUSED_LABEL_P (label_cont
) = 1;
4812 g
= gimple_build_goto (label_cont
);
4814 /* GIMPLE_COND's are very low level; they have embedded
4815 gotos. This particular embedded goto should not be marked
4816 with the location of the original COND_EXPR, as it would
4817 correspond to the COND_EXPR's condition, not the ELSE or the
4818 THEN arms. To avoid marking it with the wrong location, flag
4819 it as "no location". */
4820 gimple_set_do_not_emit_location (g
);
4822 gimplify_seq_add_stmt (&seq
, g
);
4826 if (!have_else_clause_p
)
4828 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4829 tell -Wimplicit-fallthrough handling that label_false can be only
4830 reached through fallthrough from { code }. */
4831 if (integer_nonzerop (COND_EXPR_COND (expr
))
4832 && (TREE_OPERAND (expr
, 2) == NULL_TREE
4833 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr
, 2))))
4834 UNUSED_LABEL_P (label_false
) = 1;
4835 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_false
));
4836 have_else_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 2), &seq
);
4839 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_cont
));
4841 gimple_pop_condition (pre_p
);
4842 gimple_seq_add_seq (pre_p
, seq
);
4844 if (ret
== GS_ERROR
)
4846 else if (have_then_clause_p
|| have_else_clause_p
)
4850 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4851 expr
= TREE_OPERAND (expr
, 0);
4852 gimplify_stmt (&expr
, pre_p
);
4859 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4860 to be marked addressable.
4862 We cannot rely on such an expression being directly markable if a temporary
4863 has been created by the gimplification. In this case, we create another
4864 temporary and initialize it with a copy, which will become a store after we
4865 mark it addressable. This can happen if the front-end passed us something
4866 that it could not mark addressable yet, like a Fortran pass-by-reference
4867 parameter (int) floatvar. */
4870 prepare_gimple_addressable (tree
*expr_p
, gimple_seq
*seq_p
)
4872 while (handled_component_p (*expr_p
))
4873 expr_p
= &TREE_OPERAND (*expr_p
, 0);
4875 /* Do not allow an SSA name as the temporary. */
4876 if (is_gimple_reg (*expr_p
))
4877 *expr_p
= internal_get_tmp_var (*expr_p
, seq_p
, NULL
, false, false, true);
4880 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4881 a call to __builtin_memcpy. */
4883 static enum gimplify_status
4884 gimplify_modify_expr_to_memcpy (tree
*expr_p
, tree size
, bool want_value
,
4887 tree t
, to
, to_ptr
, from
, from_ptr
;
4889 location_t loc
= EXPR_LOCATION (*expr_p
);
4891 to
= TREE_OPERAND (*expr_p
, 0);
4892 from
= TREE_OPERAND (*expr_p
, 1);
4893 gcc_assert (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (to
)))
4894 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (from
))));
4896 /* Mark the RHS addressable. Beware that it may not be possible to do so
4897 directly if a temporary has been created by the gimplification. */
4898 prepare_gimple_addressable (&from
, seq_p
);
4900 mark_addressable (from
);
4901 from_ptr
= build_fold_addr_expr_loc (loc
, from
);
4902 gimplify_arg (&from_ptr
, seq_p
, loc
);
4904 mark_addressable (to
);
4905 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4906 gimplify_arg (&to_ptr
, seq_p
, loc
);
4908 t
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
4910 gs
= gimple_build_call (t
, 3, to_ptr
, from_ptr
, size
);
4911 gimple_call_set_alloca_for_var (gs
, true);
4915 /* tmp = memcpy() */
4916 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4917 gimple_call_set_lhs (gs
, t
);
4918 gimplify_seq_add_stmt (seq_p
, gs
);
4920 *expr_p
= build_simple_mem_ref (t
);
4924 gimplify_seq_add_stmt (seq_p
, gs
);
4929 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4930 a call to __builtin_memset. In this case we know that the RHS is
4931 a CONSTRUCTOR with an empty element list. */
4933 static enum gimplify_status
4934 gimplify_modify_expr_to_memset (tree
*expr_p
, tree size
, bool want_value
,
4937 tree t
, from
, to
, to_ptr
;
4939 location_t loc
= EXPR_LOCATION (*expr_p
);
4941 /* Assert our assumptions, to abort instead of producing wrong code
4942 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4943 not be immediately exposed. */
4944 from
= TREE_OPERAND (*expr_p
, 1);
4945 if (TREE_CODE (from
) == WITH_SIZE_EXPR
)
4946 from
= TREE_OPERAND (from
, 0);
4948 gcc_assert (TREE_CODE (from
) == CONSTRUCTOR
4949 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from
)));
4952 to
= TREE_OPERAND (*expr_p
, 0);
4953 gcc_assert (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (to
))));
4955 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4956 gimplify_arg (&to_ptr
, seq_p
, loc
);
4957 t
= builtin_decl_implicit (BUILT_IN_MEMSET
);
4959 gs
= gimple_build_call (t
, 3, to_ptr
, integer_zero_node
, size
);
4963 /* tmp = memset() */
4964 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4965 gimple_call_set_lhs (gs
, t
);
4966 gimplify_seq_add_stmt (seq_p
, gs
);
4968 *expr_p
= build1 (INDIRECT_REF
, TREE_TYPE (to
), t
);
4972 gimplify_seq_add_stmt (seq_p
, gs
);
4977 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4978 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4979 assignment. Return non-null if we detect a potential overlap. */
4981 struct gimplify_init_ctor_preeval_data
4983 /* The base decl of the lhs object. May be NULL, in which case we
4984 have to assume the lhs is indirect. */
4987 /* The alias set of the lhs object. */
4988 alias_set_type lhs_alias_set
;
4992 gimplify_init_ctor_preeval_1 (tree
*tp
, int *walk_subtrees
, void *xdata
)
4994 struct gimplify_init_ctor_preeval_data
*data
4995 = (struct gimplify_init_ctor_preeval_data
*) xdata
;
4998 /* If we find the base object, obviously we have overlap. */
4999 if (data
->lhs_base_decl
== t
)
5002 /* If the constructor component is indirect, determine if we have a
5003 potential overlap with the lhs. The only bits of information we
5004 have to go on at this point are addressability and alias sets. */
5005 if ((INDIRECT_REF_P (t
)
5006 || TREE_CODE (t
) == MEM_REF
)
5007 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
5008 && alias_sets_conflict_p (data
->lhs_alias_set
, get_alias_set (t
)))
5011 /* If the constructor component is a call, determine if it can hide a
5012 potential overlap with the lhs through an INDIRECT_REF like above.
5013 ??? Ugh - this is completely broken. In fact this whole analysis
5014 doesn't look conservative. */
5015 if (TREE_CODE (t
) == CALL_EXPR
)
5017 tree type
, fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t
)));
5019 for (type
= TYPE_ARG_TYPES (fntype
); type
; type
= TREE_CHAIN (type
))
5020 if (POINTER_TYPE_P (TREE_VALUE (type
))
5021 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
5022 && alias_sets_conflict_p (data
->lhs_alias_set
,
5024 (TREE_TYPE (TREE_VALUE (type
)))))
5028 if (IS_TYPE_OR_DECL_P (t
))
5033 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
5034 force values that overlap with the lhs (as described by *DATA)
5035 into temporaries. */
5038 gimplify_init_ctor_preeval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5039 struct gimplify_init_ctor_preeval_data
*data
)
5041 enum gimplify_status one
;
5043 /* If the value is constant, then there's nothing to pre-evaluate. */
5044 if (TREE_CONSTANT (*expr_p
))
5046 /* Ensure it does not have side effects, it might contain a reference to
5047 the object we're initializing. */
5048 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p
));
5052 /* If the type has non-trivial constructors, we can't pre-evaluate. */
5053 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)))
5056 /* Recurse for nested constructors. */
5057 if (TREE_CODE (*expr_p
) == CONSTRUCTOR
)
5059 unsigned HOST_WIDE_INT ix
;
5060 constructor_elt
*ce
;
5061 vec
<constructor_elt
, va_gc
> *v
= CONSTRUCTOR_ELTS (*expr_p
);
5063 FOR_EACH_VEC_SAFE_ELT (v
, ix
, ce
)
5064 gimplify_init_ctor_preeval (&ce
->value
, pre_p
, post_p
, data
);
5069 /* If this is a variable sized type, we must remember the size. */
5070 maybe_with_size_expr (expr_p
);
5072 /* Gimplify the constructor element to something appropriate for the rhs
5073 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
5074 the gimplifier will consider this a store to memory. Doing this
5075 gimplification now means that we won't have to deal with complicated
5076 language-specific trees, nor trees like SAVE_EXPR that can induce
5077 exponential search behavior. */
5078 one
= gimplify_expr (expr_p
, pre_p
, post_p
, is_gimple_mem_rhs
, fb_rvalue
);
5079 if (one
== GS_ERROR
)
5085 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
5086 with the lhs, since "a = { .x=a }" doesn't make sense. This will
5087 always be true for all scalars, since is_gimple_mem_rhs insists on a
5088 temporary variable for them. */
5089 if (DECL_P (*expr_p
))
5092 /* If this is of variable size, we have no choice but to assume it doesn't
5093 overlap since we can't make a temporary for it. */
5094 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p
))) != INTEGER_CST
)
5097 /* Otherwise, we must search for overlap ... */
5098 if (!walk_tree (expr_p
, gimplify_init_ctor_preeval_1
, data
, NULL
))
5101 /* ... and if found, force the value into a temporary. */
5102 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
5105 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
5106 a RANGE_EXPR in a CONSTRUCTOR for an array.
5110 object[var] = value;
5117 We increment var _after_ the loop exit check because we might otherwise
5118 fail if upper == TYPE_MAX_VALUE (type for upper).
5120 Note that we never have to deal with SAVE_EXPRs here, because this has
5121 already been taken care of for us, in gimplify_init_ctor_preeval(). */
5123 static void gimplify_init_ctor_eval (tree
, vec
<constructor_elt
, va_gc
> *,
5124 gimple_seq
*, bool);
5127 gimplify_init_ctor_eval_range (tree object
, tree lower
, tree upper
,
5128 tree value
, tree array_elt_type
,
5129 gimple_seq
*pre_p
, bool cleared
)
5131 tree loop_entry_label
, loop_exit_label
, fall_thru_label
;
5132 tree var
, var_type
, cref
, tmp
;
5134 loop_entry_label
= create_artificial_label (UNKNOWN_LOCATION
);
5135 loop_exit_label
= create_artificial_label (UNKNOWN_LOCATION
);
5136 fall_thru_label
= create_artificial_label (UNKNOWN_LOCATION
);
5138 /* Create and initialize the index variable. */
5139 var_type
= TREE_TYPE (upper
);
5140 var
= create_tmp_var (var_type
);
5141 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, lower
));
5143 /* Add the loop entry label. */
5144 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_entry_label
));
5146 /* Build the reference. */
5147 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
5148 var
, NULL_TREE
, NULL_TREE
);
5150 /* If we are a constructor, just call gimplify_init_ctor_eval to do
5151 the store. Otherwise just assign value to the reference. */
5153 if (TREE_CODE (value
) == CONSTRUCTOR
)
5154 /* NB we might have to call ourself recursively through
5155 gimplify_init_ctor_eval if the value is a constructor. */
5156 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
5160 if (gimplify_expr (&value
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
5162 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (cref
, value
));
5165 /* We exit the loop when the index var is equal to the upper bound. */
5166 gimplify_seq_add_stmt (pre_p
,
5167 gimple_build_cond (EQ_EXPR
, var
, upper
,
5168 loop_exit_label
, fall_thru_label
));
5170 gimplify_seq_add_stmt (pre_p
, gimple_build_label (fall_thru_label
));
5172 /* Otherwise, increment the index var... */
5173 tmp
= build2 (PLUS_EXPR
, var_type
, var
,
5174 fold_convert (var_type
, integer_one_node
));
5175 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, tmp
));
5177 /* ...and jump back to the loop entry. */
5178 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (loop_entry_label
));
5180 /* Add the loop exit label. */
5181 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_exit_label
));
5184 /* A subroutine of gimplify_init_constructor. Generate individual
5185 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
5186 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
5187 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
5191 gimplify_init_ctor_eval (tree object
, vec
<constructor_elt
, va_gc
> *elts
,
5192 gimple_seq
*pre_p
, bool cleared
)
5194 tree array_elt_type
= NULL
;
5195 unsigned HOST_WIDE_INT ix
;
5196 tree purpose
, value
;
5198 if (TREE_CODE (TREE_TYPE (object
)) == ARRAY_TYPE
)
5199 array_elt_type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object
)));
5201 FOR_EACH_CONSTRUCTOR_ELT (elts
, ix
, purpose
, value
)
5205 /* NULL values are created above for gimplification errors. */
5209 if (cleared
&& initializer_zerop (value
))
5212 /* ??? Here's to hoping the front end fills in all of the indices,
5213 so we don't have to figure out what's missing ourselves. */
5214 gcc_assert (purpose
);
5216 /* Skip zero-sized fields, unless value has side-effects. This can
5217 happen with calls to functions returning a empty type, which
5218 we shouldn't discard. As a number of downstream passes don't
5219 expect sets of empty type fields, we rely on the gimplification of
5220 the MODIFY_EXPR we make below to drop the assignment statement. */
5221 if (!TREE_SIDE_EFFECTS (value
)
5222 && TREE_CODE (purpose
) == FIELD_DECL
5223 && is_empty_type (TREE_TYPE (purpose
)))
5226 /* If we have a RANGE_EXPR, we have to build a loop to assign the
5228 if (TREE_CODE (purpose
) == RANGE_EXPR
)
5230 tree lower
= TREE_OPERAND (purpose
, 0);
5231 tree upper
= TREE_OPERAND (purpose
, 1);
5233 /* If the lower bound is equal to upper, just treat it as if
5234 upper was the index. */
5235 if (simple_cst_equal (lower
, upper
))
5239 gimplify_init_ctor_eval_range (object
, lower
, upper
, value
,
5240 array_elt_type
, pre_p
, cleared
);
5247 /* Do not use bitsizetype for ARRAY_REF indices. */
5248 if (TYPE_DOMAIN (TREE_TYPE (object
)))
5250 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object
))),
5252 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
5253 purpose
, NULL_TREE
, NULL_TREE
);
5257 gcc_assert (TREE_CODE (purpose
) == FIELD_DECL
);
5258 cref
= build3 (COMPONENT_REF
, TREE_TYPE (purpose
),
5259 unshare_expr (object
), purpose
, NULL_TREE
);
5262 if (TREE_CODE (value
) == CONSTRUCTOR
5263 && TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
)
5264 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
5268 tree init
= build2 (INIT_EXPR
, TREE_TYPE (cref
), cref
, value
);
5269 gimplify_and_add (init
, pre_p
);
5275 /* Return the appropriate RHS predicate for this LHS. */
5278 rhs_predicate_for (tree lhs
)
5280 if (is_gimple_reg (lhs
))
5281 return is_gimple_reg_rhs_or_call
;
5283 return is_gimple_mem_rhs_or_call
;
5286 /* Return the initial guess for an appropriate RHS predicate for this LHS,
5287 before the LHS has been gimplified. */
5289 static gimple_predicate
5290 initial_rhs_predicate_for (tree lhs
)
5292 if (is_gimple_reg_type (TREE_TYPE (lhs
)))
5293 return is_gimple_reg_rhs_or_call
;
5295 return is_gimple_mem_rhs_or_call
;
5298 /* Gimplify a C99 compound literal expression. This just means adding
5299 the DECL_EXPR before the current statement and using its anonymous
5302 static enum gimplify_status
5303 gimplify_compound_literal_expr (tree
*expr_p
, gimple_seq
*pre_p
,
5304 bool (*gimple_test_f
) (tree
),
5305 fallback_t fallback
)
5307 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p
);
5308 tree decl
= DECL_EXPR_DECL (decl_s
);
5309 tree init
= DECL_INITIAL (decl
);
5310 /* Mark the decl as addressable if the compound literal
5311 expression is addressable now, otherwise it is marked too late
5312 after we gimplify the initialization expression. */
5313 if (TREE_ADDRESSABLE (*expr_p
))
5314 TREE_ADDRESSABLE (decl
) = 1;
5315 /* Otherwise, if we don't need an lvalue and have a literal directly
5316 substitute it. Check if it matches the gimple predicate, as
5317 otherwise we'd generate a new temporary, and we can as well just
5318 use the decl we already have. */
5319 else if (!TREE_ADDRESSABLE (decl
)
5320 && !TREE_THIS_VOLATILE (decl
)
5322 && (fallback
& fb_lvalue
) == 0
5323 && gimple_test_f (init
))
5329 /* If the decl is not addressable, then it is being used in some
5330 expression or on the right hand side of a statement, and it can
5331 be put into a readonly data section. */
5332 if (!TREE_ADDRESSABLE (decl
) && (fallback
& fb_lvalue
) == 0)
5333 TREE_READONLY (decl
) = 1;
5335 /* This decl isn't mentioned in the enclosing block, so add it to the
5336 list of temps. FIXME it seems a bit of a kludge to say that
5337 anonymous artificial vars aren't pushed, but everything else is. */
5338 if (DECL_NAME (decl
) == NULL_TREE
&& !DECL_SEEN_IN_BIND_EXPR_P (decl
))
5339 gimple_add_tmp_var (decl
);
5341 gimplify_and_add (decl_s
, pre_p
);
5346 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5347 return a new CONSTRUCTOR if something changed. */
5350 optimize_compound_literals_in_ctor (tree orig_ctor
)
5352 tree ctor
= orig_ctor
;
5353 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (ctor
);
5354 unsigned int idx
, num
= vec_safe_length (elts
);
5356 for (idx
= 0; idx
< num
; idx
++)
5358 tree value
= (*elts
)[idx
].value
;
5359 tree newval
= value
;
5360 if (TREE_CODE (value
) == CONSTRUCTOR
)
5361 newval
= optimize_compound_literals_in_ctor (value
);
5362 else if (TREE_CODE (value
) == COMPOUND_LITERAL_EXPR
)
5364 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (value
);
5365 tree decl
= DECL_EXPR_DECL (decl_s
);
5366 tree init
= DECL_INITIAL (decl
);
5368 if (!TREE_ADDRESSABLE (value
)
5369 && !TREE_ADDRESSABLE (decl
)
5371 && TREE_CODE (init
) == CONSTRUCTOR
)
5372 newval
= optimize_compound_literals_in_ctor (init
);
5374 if (newval
== value
)
5377 if (ctor
== orig_ctor
)
5379 ctor
= copy_node (orig_ctor
);
5380 CONSTRUCTOR_ELTS (ctor
) = vec_safe_copy (elts
);
5381 elts
= CONSTRUCTOR_ELTS (ctor
);
5383 (*elts
)[idx
].value
= newval
;
5388 /* A subroutine of gimplify_modify_expr. Break out elements of a
5389 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5391 Note that we still need to clear any elements that don't have explicit
5392 initializers, so if not all elements are initialized we keep the
5393 original MODIFY_EXPR, we just remove all of the constructor elements.
5395 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5396 GS_ERROR if we would have to create a temporary when gimplifying
5397 this constructor. Otherwise, return GS_OK.
5399 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5401 static enum gimplify_status
5402 gimplify_init_constructor (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5403 bool want_value
, bool notify_temp_creation
)
5405 tree object
, ctor
, type
;
5406 enum gimplify_status ret
;
5407 vec
<constructor_elt
, va_gc
> *elts
;
5408 bool cleared
= false;
5409 bool is_empty_ctor
= false;
5410 bool is_init_expr
= (TREE_CODE (*expr_p
) == INIT_EXPR
);
5412 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == CONSTRUCTOR
);
5414 if (!notify_temp_creation
)
5416 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
5417 is_gimple_lvalue
, fb_lvalue
);
5418 if (ret
== GS_ERROR
)
5422 object
= TREE_OPERAND (*expr_p
, 0);
5423 ctor
= TREE_OPERAND (*expr_p
, 1)
5424 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p
, 1));
5425 type
= TREE_TYPE (ctor
);
5426 elts
= CONSTRUCTOR_ELTS (ctor
);
5429 switch (TREE_CODE (type
))
5433 case QUAL_UNION_TYPE
:
5436 /* Use readonly data for initializers of this or smaller size
5437 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5439 const HOST_WIDE_INT min_unique_size
= 64;
5440 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5441 is smaller than this, use readonly data. */
5442 const int unique_nonzero_ratio
= 8;
5443 /* True if a single access of the object must be ensured. This is the
5444 case if the target is volatile, the type is non-addressable and more
5445 than one field need to be assigned. */
5446 const bool ensure_single_access
5447 = TREE_THIS_VOLATILE (object
)
5448 && !TREE_ADDRESSABLE (type
)
5449 && vec_safe_length (elts
) > 1;
5450 struct gimplify_init_ctor_preeval_data preeval_data
;
5451 HOST_WIDE_INT num_ctor_elements
, num_nonzero_elements
;
5452 HOST_WIDE_INT num_unique_nonzero_elements
;
5453 bool complete_p
, valid_const_initializer
;
5455 /* Aggregate types must lower constructors to initialization of
5456 individual elements. The exception is that a CONSTRUCTOR node
5457 with no elements indicates zero-initialization of the whole. */
5458 if (vec_safe_is_empty (elts
))
5460 if (notify_temp_creation
)
5463 /* The var will be initialized and so appear on lhs of
5464 assignment, it can't be TREE_READONLY anymore. */
5466 TREE_READONLY (object
) = 0;
5468 is_empty_ctor
= true;
5472 /* Fetch information about the constructor to direct later processing.
5473 We might want to make static versions of it in various cases, and
5474 can only do so if it known to be a valid constant initializer. */
5475 valid_const_initializer
5476 = categorize_ctor_elements (ctor
, &num_nonzero_elements
,
5477 &num_unique_nonzero_elements
,
5478 &num_ctor_elements
, &complete_p
);
5480 /* If a const aggregate variable is being initialized, then it
5481 should never be a lose to promote the variable to be static. */
5482 if (valid_const_initializer
5483 && num_nonzero_elements
> 1
5484 && TREE_READONLY (object
)
5486 && !DECL_REGISTER (object
)
5487 && (flag_merge_constants
>= 2 || !TREE_ADDRESSABLE (object
)
5488 || DECL_MERGEABLE (object
))
5489 /* For ctors that have many repeated nonzero elements
5490 represented through RANGE_EXPRs, prefer initializing
5491 those through runtime loops over copies of large amounts
5492 of data from readonly data section. */
5493 && (num_unique_nonzero_elements
5494 > num_nonzero_elements
/ unique_nonzero_ratio
5495 || ((unsigned HOST_WIDE_INT
) int_size_in_bytes (type
)
5496 <= (unsigned HOST_WIDE_INT
) min_unique_size
)))
5498 if (notify_temp_creation
)
5501 DECL_INITIAL (object
) = ctor
;
5502 TREE_STATIC (object
) = 1;
5503 if (!DECL_NAME (object
))
5504 DECL_NAME (object
) = create_tmp_var_name ("C");
5505 walk_tree (&DECL_INITIAL (object
), force_labels_r
, NULL
, NULL
);
5507 /* ??? C++ doesn't automatically append a .<number> to the
5508 assembler name, and even when it does, it looks at FE private
5509 data structures to figure out what that number should be,
5510 which are not set for this variable. I suppose this is
5511 important for local statics for inline functions, which aren't
5512 "local" in the object file sense. So in order to get a unique
5513 TU-local symbol, we must invoke the lhd version now. */
5514 lhd_set_decl_assembler_name (object
);
5516 *expr_p
= NULL_TREE
;
5520 /* The var will be initialized and so appear on lhs of
5521 assignment, it can't be TREE_READONLY anymore. */
5522 if (VAR_P (object
) && !notify_temp_creation
)
5523 TREE_READONLY (object
) = 0;
5525 /* If there are "lots" of initialized elements, even discounting
5526 those that are not address constants (and thus *must* be
5527 computed at runtime), then partition the constructor into
5528 constant and non-constant parts. Block copy the constant
5529 parts in, then generate code for the non-constant parts. */
5530 /* TODO. There's code in cp/typeck.cc to do this. */
5532 if (int_size_in_bytes (TREE_TYPE (ctor
)) < 0)
5533 /* store_constructor will ignore the clearing of variable-sized
5534 objects. Initializers for such objects must explicitly set
5535 every field that needs to be set. */
5537 else if (!complete_p
)
5538 /* If the constructor isn't complete, clear the whole object
5539 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5541 ??? This ought not to be needed. For any element not present
5542 in the initializer, we should simply set them to zero. Except
5543 we'd need to *find* the elements that are not present, and that
5544 requires trickery to avoid quadratic compile-time behavior in
5545 large cases or excessive memory use in small cases. */
5546 cleared
= !CONSTRUCTOR_NO_CLEARING (ctor
);
5547 else if (num_ctor_elements
- num_nonzero_elements
5548 > CLEAR_RATIO (optimize_function_for_speed_p (cfun
))
5549 && num_nonzero_elements
< num_ctor_elements
/ 4)
5550 /* If there are "lots" of zeros, it's more efficient to clear
5551 the memory and then set the nonzero elements. */
5553 else if (ensure_single_access
&& num_nonzero_elements
== 0)
5554 /* If a single access to the target must be ensured and all elements
5555 are zero, then it's optimal to clear whatever their number. */
5560 /* If there are "lots" of initialized elements, and all of them
5561 are valid address constants, then the entire initializer can
5562 be dropped to memory, and then memcpy'd out. Don't do this
5563 for sparse arrays, though, as it's more efficient to follow
5564 the standard CONSTRUCTOR behavior of memset followed by
5565 individual element initialization. Also don't do this for small
5566 all-zero initializers (which aren't big enough to merit
5567 clearing), and don't try to make bitwise copies of
5568 TREE_ADDRESSABLE types. */
5569 if (valid_const_initializer
5571 && !(cleared
|| num_nonzero_elements
== 0)
5572 && !TREE_ADDRESSABLE (type
))
5574 HOST_WIDE_INT size
= int_size_in_bytes (type
);
5577 /* ??? We can still get unbounded array types, at least
5578 from the C++ front end. This seems wrong, but attempt
5579 to work around it for now. */
5582 size
= int_size_in_bytes (TREE_TYPE (object
));
5584 TREE_TYPE (ctor
) = type
= TREE_TYPE (object
);
5587 /* Find the maximum alignment we can assume for the object. */
5588 /* ??? Make use of DECL_OFFSET_ALIGN. */
5589 if (DECL_P (object
))
5590 align
= DECL_ALIGN (object
);
5592 align
= TYPE_ALIGN (type
);
5594 /* Do a block move either if the size is so small as to make
5595 each individual move a sub-unit move on average, or if it
5596 is so large as to make individual moves inefficient. */
5598 && num_nonzero_elements
> 1
5599 /* For ctors that have many repeated nonzero elements
5600 represented through RANGE_EXPRs, prefer initializing
5601 those through runtime loops over copies of large amounts
5602 of data from readonly data section. */
5603 && (num_unique_nonzero_elements
5604 > num_nonzero_elements
/ unique_nonzero_ratio
5605 || size
<= min_unique_size
)
5606 && (size
< num_nonzero_elements
5607 || !can_move_by_pieces (size
, align
)))
5609 if (notify_temp_creation
)
5612 walk_tree (&ctor
, force_labels_r
, NULL
, NULL
);
5613 ctor
= tree_output_constant_def (ctor
);
5614 if (!useless_type_conversion_p (type
, TREE_TYPE (ctor
)))
5615 ctor
= build1 (VIEW_CONVERT_EXPR
, type
, ctor
);
5616 TREE_OPERAND (*expr_p
, 1) = ctor
;
5618 /* This is no longer an assignment of a CONSTRUCTOR, but
5619 we still may have processing to do on the LHS. So
5620 pretend we didn't do anything here to let that happen. */
5621 return GS_UNHANDLED
;
5625 /* If a single access to the target must be ensured and there are
5626 nonzero elements or the zero elements are not assigned en masse,
5627 initialize the target from a temporary. */
5628 if (ensure_single_access
&& (num_nonzero_elements
> 0 || !cleared
))
5630 if (notify_temp_creation
)
5633 tree temp
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
5634 TREE_OPERAND (*expr_p
, 0) = temp
;
5635 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
5637 build2 (MODIFY_EXPR
, void_type_node
,
5642 if (notify_temp_creation
)
5645 /* If there are nonzero elements and if needed, pre-evaluate to capture
5646 elements overlapping with the lhs into temporaries. We must do this
5647 before clearing to fetch the values before they are zeroed-out. */
5648 if (num_nonzero_elements
> 0 && TREE_CODE (*expr_p
) != INIT_EXPR
)
5650 preeval_data
.lhs_base_decl
= get_base_address (object
);
5651 if (!DECL_P (preeval_data
.lhs_base_decl
))
5652 preeval_data
.lhs_base_decl
= NULL
;
5653 preeval_data
.lhs_alias_set
= get_alias_set (object
);
5655 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p
, 1),
5656 pre_p
, post_p
, &preeval_data
);
5659 bool ctor_has_side_effects_p
5660 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p
, 1));
5664 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5665 Note that we still have to gimplify, in order to handle the
5666 case of variable sized types. Avoid shared tree structures. */
5667 CONSTRUCTOR_ELTS (ctor
) = NULL
;
5668 TREE_SIDE_EFFECTS (ctor
) = 0;
5669 object
= unshare_expr (object
);
5670 gimplify_stmt (expr_p
, pre_p
);
5673 /* If we have not block cleared the object, or if there are nonzero
5674 elements in the constructor, or if the constructor has side effects,
5675 add assignments to the individual scalar fields of the object. */
5677 || num_nonzero_elements
> 0
5678 || ctor_has_side_effects_p
)
5679 gimplify_init_ctor_eval (object
, elts
, pre_p
, cleared
);
5681 *expr_p
= NULL_TREE
;
5689 if (notify_temp_creation
)
5692 /* Extract the real and imaginary parts out of the ctor. */
5693 gcc_assert (elts
->length () == 2);
5694 r
= (*elts
)[0].value
;
5695 i
= (*elts
)[1].value
;
5696 if (r
== NULL
|| i
== NULL
)
5698 tree zero
= build_zero_cst (TREE_TYPE (type
));
5705 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5706 represent creation of a complex value. */
5707 if (TREE_CONSTANT (r
) && TREE_CONSTANT (i
))
5709 ctor
= build_complex (type
, r
, i
);
5710 TREE_OPERAND (*expr_p
, 1) = ctor
;
5714 ctor
= build2 (COMPLEX_EXPR
, type
, r
, i
);
5715 TREE_OPERAND (*expr_p
, 1) = ctor
;
5716 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1),
5719 rhs_predicate_for (TREE_OPERAND (*expr_p
, 0)),
5727 unsigned HOST_WIDE_INT ix
;
5728 constructor_elt
*ce
;
5730 if (notify_temp_creation
)
5733 /* Vector types use CONSTRUCTOR all the way through gimple
5734 compilation as a general initializer. */
5735 FOR_EACH_VEC_SAFE_ELT (elts
, ix
, ce
)
5737 enum gimplify_status tret
;
5738 tret
= gimplify_expr (&ce
->value
, pre_p
, post_p
, is_gimple_val
,
5740 if (tret
== GS_ERROR
)
5742 else if (TREE_STATIC (ctor
)
5743 && !initializer_constant_valid_p (ce
->value
,
5744 TREE_TYPE (ce
->value
)))
5745 TREE_STATIC (ctor
) = 0;
5747 recompute_constructor_flags (ctor
);
5749 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5750 if (TREE_CONSTANT (ctor
))
5752 bool constant_p
= true;
5755 /* Even when ctor is constant, it might contain non-*_CST
5756 elements, such as addresses or trapping values like
5757 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5758 in VECTOR_CST nodes. */
5759 FOR_EACH_CONSTRUCTOR_VALUE (elts
, ix
, value
)
5760 if (!CONSTANT_CLASS_P (value
))
5768 TREE_OPERAND (*expr_p
, 1) = build_vector_from_ctor (type
, elts
);
5773 if (!is_gimple_reg (TREE_OPERAND (*expr_p
, 0)))
5774 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (ctor
, pre_p
);
5779 /* So how did we get a CONSTRUCTOR for a scalar type? */
5783 if (ret
== GS_ERROR
)
5785 /* If we have gimplified both sides of the initializer but have
5786 not emitted an assignment, do so now. */
5788 /* If the type is an empty type, we don't need to emit the
5790 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
5792 tree lhs
= TREE_OPERAND (*expr_p
, 0);
5793 tree rhs
= TREE_OPERAND (*expr_p
, 1);
5794 if (want_value
&& object
== lhs
)
5795 lhs
= unshare_expr (lhs
);
5796 gassign
*init
= gimple_build_assign (lhs
, rhs
);
5797 gimplify_seq_add_stmt (pre_p
, init
);
5810 /* If the user requests to initialize automatic variables, we
5811 should initialize paddings inside the variable. Add a call to
5812 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5813 initialize paddings of object always to zero regardless of
5814 INIT_TYPE. Note, we will not insert this call if the aggregate
5815 variable has be completely cleared already or it's initialized
5816 with an empty constructor. We cannot insert this call if the
5817 variable is a gimple register since __builtin_clear_padding will take
5818 the address of the variable. As a result, if a long double/_Complex long
5819 double variable will be spilled into stack later, its padding cannot
5820 be cleared with __builtin_clear_padding. We should clear its padding
5821 when it is spilled into memory. */
5823 && !is_gimple_reg (object
)
5824 && clear_padding_type_may_have_padding_p (type
)
5825 && ((AGGREGATE_TYPE_P (type
) && !cleared
&& !is_empty_ctor
)
5826 || !AGGREGATE_TYPE_P (type
))
5827 && is_var_need_auto_init (object
))
5828 gimple_add_padding_init_for_auto_var (object
, false, pre_p
);
5833 /* Given a pointer value OP0, return a simplified version of an
5834 indirection through OP0, or NULL_TREE if no simplification is
5835 possible. This may only be applied to a rhs of an expression.
5836 Note that the resulting type may be different from the type pointed
5837 to in the sense that it is still compatible from the langhooks
5841 gimple_fold_indirect_ref_rhs (tree t
)
5843 return gimple_fold_indirect_ref (t
);
5846 /* Subroutine of gimplify_modify_expr to do simplifications of
5847 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5848 something changes. */
5850 static enum gimplify_status
5851 gimplify_modify_expr_rhs (tree
*expr_p
, tree
*from_p
, tree
*to_p
,
5852 gimple_seq
*pre_p
, gimple_seq
*post_p
,
5855 enum gimplify_status ret
= GS_UNHANDLED
;
5861 switch (TREE_CODE (*from_p
))
5864 /* If we're assigning from a read-only variable initialized with
5865 a constructor and not volatile, do the direct assignment from
5866 the constructor, but only if the target is not volatile either
5867 since this latter assignment might end up being done on a per
5868 field basis. However, if the target is volatile and the type
5869 is aggregate and non-addressable, gimplify_init_constructor
5870 knows that it needs to ensure a single access to the target
5871 and it will return GS_OK only in this case. */
5872 if (TREE_READONLY (*from_p
)
5873 && DECL_INITIAL (*from_p
)
5874 && TREE_CODE (DECL_INITIAL (*from_p
)) == CONSTRUCTOR
5875 && !TREE_THIS_VOLATILE (*from_p
)
5876 && (!TREE_THIS_VOLATILE (*to_p
)
5877 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p
))
5878 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p
)))))
5880 tree old_from
= *from_p
;
5881 enum gimplify_status subret
;
5883 /* Move the constructor into the RHS. */
5884 *from_p
= unshare_expr (DECL_INITIAL (*from_p
));
5886 /* Let's see if gimplify_init_constructor will need to put
5888 subret
= gimplify_init_constructor (expr_p
, NULL
, NULL
,
5890 if (subret
== GS_ERROR
)
5892 /* If so, revert the change. */
5903 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p
)))
5904 /* If we have code like
5908 where the type of "x" is a (possibly cv-qualified variant
5909 of "A"), treat the entire expression as identical to "x".
5910 This kind of code arises in C++ when an object is bound
5911 to a const reference, and if "x" is a TARGET_EXPR we want
5912 to take advantage of the optimization below. But not if
5913 the type is TREE_ADDRESSABLE; then C++17 says that the
5914 TARGET_EXPR needs to be a temporary. */
5916 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p
, 0)))
5918 bool volatile_p
= TREE_THIS_VOLATILE (*from_p
);
5919 if (TREE_THIS_VOLATILE (t
) != volatile_p
)
5922 t
= build_simple_mem_ref_loc (EXPR_LOCATION (*from_p
),
5923 build_fold_addr_expr (t
));
5924 if (REFERENCE_CLASS_P (t
))
5925 TREE_THIS_VOLATILE (t
) = volatile_p
;
5935 /* If we are initializing something from a TARGET_EXPR, strip the
5936 TARGET_EXPR and initialize it directly, if possible. This can't
5937 be done if the initializer is void, since that implies that the
5938 temporary is set in some non-trivial way.
5940 ??? What about code that pulls out the temp and uses it
5941 elsewhere? I think that such code never uses the TARGET_EXPR as
5942 an initializer. If I'm wrong, we'll die because the temp won't
5943 have any RTL. In that case, I guess we'll need to replace
5944 references somehow. */
5945 tree init
= TARGET_EXPR_INITIAL (*from_p
);
5948 && (TREE_CODE (*expr_p
) != MODIFY_EXPR
5949 || !TARGET_EXPR_NO_ELIDE (*from_p
))
5950 && !VOID_TYPE_P (TREE_TYPE (init
)))
5960 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5962 gimplify_compound_expr (from_p
, pre_p
, true);
5968 /* If we already made some changes, let the front end have a
5969 crack at this before we break it down. */
5970 if (ret
!= GS_UNHANDLED
)
5973 /* If we're initializing from a CONSTRUCTOR, break this into
5974 individual MODIFY_EXPRs. */
5975 ret
= gimplify_init_constructor (expr_p
, pre_p
, post_p
, want_value
,
5980 /* If we're assigning to a non-register type, push the assignment
5981 down into the branches. This is mandatory for ADDRESSABLE types,
5982 since we cannot generate temporaries for such, but it saves a
5983 copy in other cases as well. */
5984 if (!is_gimple_reg_type (TREE_TYPE (*from_p
)))
5986 /* This code should mirror the code in gimplify_cond_expr. */
5987 enum tree_code code
= TREE_CODE (*expr_p
);
5988 tree cond
= *from_p
;
5989 tree result
= *to_p
;
5991 ret
= gimplify_expr (&result
, pre_p
, post_p
,
5992 is_gimple_lvalue
, fb_lvalue
);
5993 if (ret
!= GS_ERROR
)
5996 /* If we are going to write RESULT more than once, clear
5997 TREE_READONLY flag, otherwise we might incorrectly promote
5998 the variable to static const and initialize it at compile
5999 time in one of the branches. */
6001 && TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
6002 && TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
6003 TREE_READONLY (result
) = 0;
6004 if (TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
)
6005 TREE_OPERAND (cond
, 1)
6006 = build2 (code
, void_type_node
, result
,
6007 TREE_OPERAND (cond
, 1));
6008 if (TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
6009 TREE_OPERAND (cond
, 2)
6010 = build2 (code
, void_type_node
, unshare_expr (result
),
6011 TREE_OPERAND (cond
, 2));
6013 TREE_TYPE (cond
) = void_type_node
;
6014 recalculate_side_effects (cond
);
6018 gimplify_and_add (cond
, pre_p
);
6019 *expr_p
= unshare_expr (result
);
6028 /* For calls that return in memory, give *to_p as the CALL_EXPR's
6029 return slot so that we don't generate a temporary. */
6030 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p
)
6031 && aggregate_value_p (*from_p
, *from_p
))
6035 if (!(rhs_predicate_for (*to_p
))(*from_p
))
6036 /* If we need a temporary, *to_p isn't accurate. */
6038 /* It's OK to use the return slot directly unless it's an NRV. */
6039 else if (TREE_CODE (*to_p
) == RESULT_DECL
6040 && DECL_NAME (*to_p
) == NULL_TREE
6041 && needs_to_live_in_memory (*to_p
))
6043 else if (is_gimple_reg_type (TREE_TYPE (*to_p
))
6044 || (DECL_P (*to_p
) && DECL_REGISTER (*to_p
)))
6045 /* Don't force regs into memory. */
6047 else if (TREE_CODE (*expr_p
) == INIT_EXPR
)
6048 /* It's OK to use the target directly if it's being
6051 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p
)))
6053 /* Always use the target and thus RSO for variable-sized types.
6054 GIMPLE cannot deal with a variable-sized assignment
6055 embedded in a call statement. */
6057 else if (TREE_CODE (*to_p
) != SSA_NAME
6058 && (!is_gimple_variable (*to_p
)
6059 || needs_to_live_in_memory (*to_p
)))
6060 /* Don't use the original target if it's already addressable;
6061 if its address escapes, and the called function uses the
6062 NRV optimization, a conforming program could see *to_p
6063 change before the called function returns; see c++/19317.
6064 When optimizing, the return_slot pass marks more functions
6065 as safe after we have escape info. */
6072 CALL_EXPR_RETURN_SLOT_OPT (*from_p
) = 1;
6073 mark_addressable (*to_p
);
6078 case WITH_SIZE_EXPR
:
6079 /* Likewise for calls that return an aggregate of non-constant size,
6080 since we would not be able to generate a temporary at all. */
6081 if (TREE_CODE (TREE_OPERAND (*from_p
, 0)) == CALL_EXPR
)
6083 *from_p
= TREE_OPERAND (*from_p
, 0);
6084 /* We don't change ret in this case because the
6085 WITH_SIZE_EXPR might have been added in
6086 gimplify_modify_expr, so returning GS_OK would lead to an
6092 /* If we're initializing from a container, push the initialization
6094 case CLEANUP_POINT_EXPR
:
6096 case STATEMENT_LIST
:
6098 tree wrap
= *from_p
;
6101 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_min_lval
,
6103 if (ret
!= GS_ERROR
)
6106 t
= voidify_wrapper_expr (wrap
, *expr_p
);
6107 gcc_assert (t
== *expr_p
);
6111 gimplify_and_add (wrap
, pre_p
);
6112 *expr_p
= unshare_expr (*to_p
);
6120 /* Pull out compound literal expressions from a NOP_EXPR.
6121 Those are created in the C FE to drop qualifiers during
6122 lvalue conversion. */
6123 if ((TREE_CODE (TREE_OPERAND (*from_p
, 0)) == COMPOUND_LITERAL_EXPR
)
6124 && tree_ssa_useless_type_conversion (*from_p
))
6126 *from_p
= TREE_OPERAND (*from_p
, 0);
6132 case COMPOUND_LITERAL_EXPR
:
6134 tree complit
= TREE_OPERAND (*expr_p
, 1);
6135 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (complit
);
6136 tree decl
= DECL_EXPR_DECL (decl_s
);
6137 tree init
= DECL_INITIAL (decl
);
6139 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
6140 into struct T x = { 0, 1, 2 } if the address of the
6141 compound literal has never been taken. */
6142 if (!TREE_ADDRESSABLE (complit
)
6143 && !TREE_ADDRESSABLE (decl
)
6146 *expr_p
= copy_node (*expr_p
);
6147 TREE_OPERAND (*expr_p
, 1) = init
;
6162 /* Return true if T looks like a valid GIMPLE statement. */
6165 is_gimple_stmt (tree t
)
6167 const enum tree_code code
= TREE_CODE (t
);
6172 /* The only valid NOP_EXPR is the empty statement. */
6173 return IS_EMPTY_STMT (t
);
6177 /* These are only valid if they're void. */
6178 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
6184 case CASE_LABEL_EXPR
:
6185 case TRY_CATCH_EXPR
:
6186 case TRY_FINALLY_EXPR
:
6187 case EH_FILTER_EXPR
:
6190 case STATEMENT_LIST
:
6195 case OACC_HOST_DATA
:
6198 case OACC_ENTER_DATA
:
6199 case OACC_EXIT_DATA
:
6204 case OMP_DISTRIBUTE
:
6211 case OMP_STRUCTURED_BLOCK
:
6220 case OMP_TARGET_DATA
:
6221 case OMP_TARGET_UPDATE
:
6222 case OMP_TARGET_ENTER_DATA
:
6223 case OMP_TARGET_EXIT_DATA
:
6226 /* These are always void. */
6232 /* These are valid regardless of their type. */
6241 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
6242 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
6244 IMPORTANT NOTE: This promotion is performed by introducing a load of the
6245 other, unmodified part of the complex object just before the total store.
6246 As a consequence, if the object is still uninitialized, an undefined value
6247 will be loaded into a register, which may result in a spurious exception
6248 if the register is floating-point and the value happens to be a signaling
6249 NaN for example. Then the fully-fledged complex operations lowering pass
6250 followed by a DCE pass are necessary in order to fix things up. */
6252 static enum gimplify_status
6253 gimplify_modify_expr_complex_part (tree
*expr_p
, gimple_seq
*pre_p
,
6256 enum tree_code code
, ocode
;
6257 tree lhs
, rhs
, new_rhs
, other
, realpart
, imagpart
;
6259 lhs
= TREE_OPERAND (*expr_p
, 0);
6260 rhs
= TREE_OPERAND (*expr_p
, 1);
6261 code
= TREE_CODE (lhs
);
6262 lhs
= TREE_OPERAND (lhs
, 0);
6264 ocode
= code
== REALPART_EXPR
? IMAGPART_EXPR
: REALPART_EXPR
;
6265 other
= build1 (ocode
, TREE_TYPE (rhs
), lhs
);
6266 suppress_warning (other
);
6267 other
= get_formal_tmp_var (other
, pre_p
);
6269 realpart
= code
== REALPART_EXPR
? rhs
: other
;
6270 imagpart
= code
== REALPART_EXPR
? other
: rhs
;
6272 if (TREE_CONSTANT (realpart
) && TREE_CONSTANT (imagpart
))
6273 new_rhs
= build_complex (TREE_TYPE (lhs
), realpart
, imagpart
);
6275 new_rhs
= build2 (COMPLEX_EXPR
, TREE_TYPE (lhs
), realpart
, imagpart
);
6277 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (lhs
, new_rhs
));
6278 *expr_p
= (want_value
) ? rhs
: NULL_TREE
;
6283 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6289 PRE_P points to the list where side effects that must happen before
6290 *EXPR_P should be stored.
6292 POST_P points to the list where side effects that must happen after
6293 *EXPR_P should be stored.
6295 WANT_VALUE is nonzero iff we want to use the value of this expression
6296 in another expression. */
6298 static enum gimplify_status
6299 gimplify_modify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
6302 tree
*from_p
= &TREE_OPERAND (*expr_p
, 1);
6303 tree
*to_p
= &TREE_OPERAND (*expr_p
, 0);
6304 enum gimplify_status ret
= GS_UNHANDLED
;
6306 location_t loc
= EXPR_LOCATION (*expr_p
);
6307 gimple_stmt_iterator gsi
;
6309 if (error_operand_p (*from_p
) || error_operand_p (*to_p
))
6312 gcc_assert (TREE_CODE (*expr_p
) == MODIFY_EXPR
6313 || TREE_CODE (*expr_p
) == INIT_EXPR
);
6315 /* Trying to simplify a clobber using normal logic doesn't work,
6316 so handle it here. */
6317 if (TREE_CLOBBER_P (*from_p
))
6319 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
6320 if (ret
== GS_ERROR
)
6322 gcc_assert (!want_value
);
6323 if (!VAR_P (*to_p
) && TREE_CODE (*to_p
) != MEM_REF
)
6325 tree addr
= get_initialized_tmp_var (build_fold_addr_expr (*to_p
),
6327 *to_p
= build_simple_mem_ref_loc (EXPR_LOCATION (*to_p
), addr
);
6329 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (*to_p
, *from_p
));
6334 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6336 if (TREE_TYPE (*from_p
) != error_mark_node
6337 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p
))
6338 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p
)))
6339 && TREE_CODE (*from_p
) == CONSTRUCTOR
6340 && CONSTRUCTOR_NELTS (*from_p
) == 0)
6342 maybe_with_size_expr (from_p
);
6343 gcc_assert (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
);
6344 return gimplify_modify_expr_to_memset (expr_p
,
6345 TREE_OPERAND (*from_p
, 1),
6349 /* Insert pointer conversions required by the middle-end that are not
6350 required by the frontend. This fixes middle-end type checking for
6351 for example gcc.dg/redecl-6.c. */
6352 if (POINTER_TYPE_P (TREE_TYPE (*to_p
)))
6354 STRIP_USELESS_TYPE_CONVERSION (*from_p
);
6355 if (!useless_type_conversion_p (TREE_TYPE (*to_p
), TREE_TYPE (*from_p
)))
6356 *from_p
= fold_convert_loc (loc
, TREE_TYPE (*to_p
), *from_p
);
6359 /* See if any simplifications can be done based on what the RHS is. */
6360 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
6362 if (ret
!= GS_UNHANDLED
)
6365 /* For empty types only gimplify the left hand side and right hand
6366 side as statements and throw away the assignment. Do this after
6367 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6369 if (is_empty_type (TREE_TYPE (*from_p
))
6371 /* Don't do this for calls that return addressable types, expand_call
6372 relies on those having a lhs. */
6373 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p
))
6374 && TREE_CODE (*from_p
) == CALL_EXPR
))
6376 gimplify_stmt (from_p
, pre_p
);
6377 gimplify_stmt (to_p
, pre_p
);
6378 *expr_p
= NULL_TREE
;
6382 /* If the value being copied is of variable width, compute the length
6383 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6384 before gimplifying any of the operands so that we can resolve any
6385 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6386 the size of the expression to be copied, not of the destination, so
6387 that is what we must do here. */
6388 maybe_with_size_expr (from_p
);
6390 /* As a special case, we have to temporarily allow for assignments
6391 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6392 a toplevel statement, when gimplifying the GENERIC expression
6393 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6394 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6396 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6397 prevent gimplify_expr from trying to create a new temporary for
6398 foo's LHS, we tell it that it should only gimplify until it
6399 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6400 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6401 and all we need to do here is set 'a' to be its LHS. */
6403 /* Gimplify the RHS first for C++17 and bug 71104. */
6404 gimple_predicate initial_pred
= initial_rhs_predicate_for (*to_p
);
6405 ret
= gimplify_expr (from_p
, pre_p
, post_p
, initial_pred
, fb_rvalue
);
6406 if (ret
== GS_ERROR
)
6409 /* Then gimplify the LHS. */
6410 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6411 twice we have to make sure to gimplify into non-SSA as otherwise
6412 the abnormal edge added later will make those defs not dominate
6414 ??? Technically this applies only to the registers used in the
6415 resulting non-register *TO_P. */
6416 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
6418 && TREE_CODE (*from_p
) == CALL_EXPR
6419 && call_expr_flags (*from_p
) & ECF_RETURNS_TWICE
)
6420 gimplify_ctxp
->into_ssa
= false;
6421 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
6422 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
6423 if (ret
== GS_ERROR
)
6426 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6427 guess for the predicate was wrong. */
6428 gimple_predicate final_pred
= rhs_predicate_for (*to_p
);
6429 if (final_pred
!= initial_pred
)
6431 ret
= gimplify_expr (from_p
, pre_p
, post_p
, final_pred
, fb_rvalue
);
6432 if (ret
== GS_ERROR
)
6436 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6437 size as argument to the call. */
6438 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
6440 tree call
= TREE_OPERAND (*from_p
, 0);
6441 tree vlasize
= TREE_OPERAND (*from_p
, 1);
6443 if (TREE_CODE (call
) == CALL_EXPR
6444 && CALL_EXPR_IFN (call
) == IFN_VA_ARG
)
6446 int nargs
= call_expr_nargs (call
);
6447 tree type
= TREE_TYPE (call
);
6448 tree ap
= CALL_EXPR_ARG (call
, 0);
6449 tree tag
= CALL_EXPR_ARG (call
, 1);
6450 tree aptag
= CALL_EXPR_ARG (call
, 2);
6451 tree newcall
= build_call_expr_internal_loc (EXPR_LOCATION (call
),
6455 TREE_OPERAND (*from_p
, 0) = newcall
;
6459 /* Now see if the above changed *from_p to something we handle specially. */
6460 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
6462 if (ret
!= GS_UNHANDLED
)
6465 /* If we've got a variable sized assignment between two lvalues (i.e. does
6466 not involve a call), then we can make things a bit more straightforward
6467 by converting the assignment to memcpy or memset. */
6468 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
6470 tree from
= TREE_OPERAND (*from_p
, 0);
6471 tree size
= TREE_OPERAND (*from_p
, 1);
6473 if (TREE_CODE (from
) == CONSTRUCTOR
)
6474 return gimplify_modify_expr_to_memset (expr_p
, size
, want_value
, pre_p
);
6475 else if (is_gimple_addressable (from
)
6476 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (*to_p
)))
6477 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (from
))))
6480 return gimplify_modify_expr_to_memcpy (expr_p
, size
, want_value
,
6485 /* Transform partial stores to non-addressable complex variables into
6486 total stores. This allows us to use real instead of virtual operands
6487 for these variables, which improves optimization. */
6488 if ((TREE_CODE (*to_p
) == REALPART_EXPR
6489 || TREE_CODE (*to_p
) == IMAGPART_EXPR
)
6490 && is_gimple_reg (TREE_OPERAND (*to_p
, 0)))
6491 return gimplify_modify_expr_complex_part (expr_p
, pre_p
, want_value
);
6493 /* Try to alleviate the effects of the gimplification creating artificial
6494 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6495 make sure not to create DECL_DEBUG_EXPR links across functions. */
6496 if (!gimplify_ctxp
->into_ssa
6498 && DECL_IGNORED_P (*from_p
)
6500 && !DECL_IGNORED_P (*to_p
)
6501 && decl_function_context (*to_p
) == current_function_decl
6502 && decl_function_context (*from_p
) == current_function_decl
)
6504 if (!DECL_NAME (*from_p
) && DECL_NAME (*to_p
))
6506 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p
)));
6507 DECL_HAS_DEBUG_EXPR_P (*from_p
) = 1;
6508 SET_DECL_DEBUG_EXPR (*from_p
, *to_p
);
6511 if (want_value
&& TREE_THIS_VOLATILE (*to_p
))
6512 *from_p
= get_initialized_tmp_var (*from_p
, pre_p
, post_p
);
6514 if (TREE_CODE (*from_p
) == CALL_EXPR
)
6516 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6517 instead of a GIMPLE_ASSIGN. */
6519 if (CALL_EXPR_FN (*from_p
) == NULL_TREE
)
6521 /* Gimplify internal functions created in the FEs. */
6522 int nargs
= call_expr_nargs (*from_p
), i
;
6523 enum internal_fn ifn
= CALL_EXPR_IFN (*from_p
);
6524 auto_vec
<tree
> vargs (nargs
);
6526 for (i
= 0; i
< nargs
; i
++)
6528 gimplify_arg (&CALL_EXPR_ARG (*from_p
, i
), pre_p
,
6529 EXPR_LOCATION (*from_p
));
6530 vargs
.quick_push (CALL_EXPR_ARG (*from_p
, i
));
6532 call_stmt
= gimple_build_call_internal_vec (ifn
, vargs
);
6533 gimple_call_set_nothrow (call_stmt
, TREE_NOTHROW (*from_p
));
6534 gimple_set_location (call_stmt
, EXPR_LOCATION (*expr_p
));
6538 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*from_p
));
6539 CALL_EXPR_FN (*from_p
) = TREE_OPERAND (CALL_EXPR_FN (*from_p
), 0);
6540 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p
));
6541 tree fndecl
= get_callee_fndecl (*from_p
);
6543 && fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
6544 && call_expr_nargs (*from_p
) == 3)
6545 call_stmt
= gimple_build_call_internal (IFN_BUILTIN_EXPECT
, 3,
6546 CALL_EXPR_ARG (*from_p
, 0),
6547 CALL_EXPR_ARG (*from_p
, 1),
6548 CALL_EXPR_ARG (*from_p
, 2));
6551 call_stmt
= gimple_build_call_from_tree (*from_p
, fnptrtype
);
6554 notice_special_calls (call_stmt
);
6555 if (!gimple_call_noreturn_p (call_stmt
) || !should_remove_lhs_p (*to_p
))
6556 gimple_call_set_lhs (call_stmt
, *to_p
);
6557 else if (TREE_CODE (*to_p
) == SSA_NAME
)
6558 /* The above is somewhat premature, avoid ICEing later for a
6559 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6560 ??? This doesn't make it a default-def. */
6561 SSA_NAME_DEF_STMT (*to_p
) = gimple_build_nop ();
6567 assign
= gimple_build_assign (*to_p
, *from_p
);
6568 gimple_set_location (assign
, EXPR_LOCATION (*expr_p
));
6569 if (COMPARISON_CLASS_P (*from_p
))
6570 copy_warning (assign
, *from_p
);
6573 if (gimplify_ctxp
->into_ssa
&& is_gimple_reg (*to_p
))
6575 /* We should have got an SSA name from the start. */
6576 gcc_assert (TREE_CODE (*to_p
) == SSA_NAME
6577 || ! gimple_in_ssa_p (cfun
));
6580 gimplify_seq_add_stmt (pre_p
, assign
);
6581 gsi
= gsi_last (*pre_p
);
6582 maybe_fold_stmt (&gsi
);
6586 *expr_p
= TREE_THIS_VOLATILE (*to_p
) ? *from_p
: unshare_expr (*to_p
);
6595 /* Gimplify a comparison between two variable-sized objects. Do this
6596 with a call to BUILT_IN_MEMCMP. */
6598 static enum gimplify_status
6599 gimplify_variable_sized_compare (tree
*expr_p
)
6601 location_t loc
= EXPR_LOCATION (*expr_p
);
6602 tree op0
= TREE_OPERAND (*expr_p
, 0);
6603 tree op1
= TREE_OPERAND (*expr_p
, 1);
6604 tree t
, arg
, dest
, src
, expr
;
6606 arg
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
6607 arg
= unshare_expr (arg
);
6608 arg
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg
, op0
);
6609 src
= build_fold_addr_expr_loc (loc
, op1
);
6610 dest
= build_fold_addr_expr_loc (loc
, op0
);
6611 t
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
6612 t
= build_call_expr_loc (loc
, t
, 3, dest
, src
, arg
);
6615 = build2 (TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), t
, integer_zero_node
);
6616 SET_EXPR_LOCATION (expr
, loc
);
6622 /* Gimplify a comparison between two aggregate objects of integral scalar
6623 mode as a comparison between the bitwise equivalent scalar values. */
6625 static enum gimplify_status
6626 gimplify_scalar_mode_aggregate_compare (tree
*expr_p
)
6628 location_t loc
= EXPR_LOCATION (*expr_p
);
6629 tree op0
= TREE_OPERAND (*expr_p
, 0);
6630 tree op1
= TREE_OPERAND (*expr_p
, 1);
6632 tree type
= TREE_TYPE (op0
);
6633 tree scalar_type
= lang_hooks
.types
.type_for_mode (TYPE_MODE (type
), 1);
6635 op0
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op0
);
6636 op1
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op1
);
6639 = fold_build2_loc (loc
, TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), op0
, op1
);
6644 /* Gimplify an expression sequence. This function gimplifies each
6645 expression and rewrites the original expression with the last
6646 expression of the sequence in GIMPLE form.
6648 PRE_P points to the list where the side effects for all the
6649 expressions in the sequence will be emitted.
6651 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6653 static enum gimplify_status
6654 gimplify_compound_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
6660 tree
*sub_p
= &TREE_OPERAND (t
, 0);
6662 if (TREE_CODE (*sub_p
) == COMPOUND_EXPR
)
6663 gimplify_compound_expr (sub_p
, pre_p
, false);
6665 gimplify_stmt (sub_p
, pre_p
);
6667 t
= TREE_OPERAND (t
, 1);
6669 while (TREE_CODE (t
) == COMPOUND_EXPR
);
6676 gimplify_stmt (expr_p
, pre_p
);
6681 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6682 gimplify. After gimplification, EXPR_P will point to a new temporary
6683 that holds the original value of the SAVE_EXPR node.
6685 PRE_P points to the list where side effects that must happen before
6686 *EXPR_P should be stored. */
6688 static enum gimplify_status
6689 gimplify_save_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6691 enum gimplify_status ret
= GS_ALL_DONE
;
6694 gcc_assert (TREE_CODE (*expr_p
) == SAVE_EXPR
);
6695 val
= TREE_OPERAND (*expr_p
, 0);
6697 if (val
&& TREE_TYPE (val
) == error_mark_node
)
6700 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6701 if (!SAVE_EXPR_RESOLVED_P (*expr_p
))
6703 /* The operand may be a void-valued expression. It is
6704 being executed only for its side-effects. */
6705 if (TREE_TYPE (val
) == void_type_node
)
6707 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
6708 is_gimple_stmt
, fb_none
);
6712 /* The temporary may not be an SSA name as later abnormal and EH
6713 control flow may invalidate use/def domination. When in SSA
6714 form then assume there are no such issues and SAVE_EXPRs only
6715 appear via GENERIC foldings. */
6716 val
= get_initialized_tmp_var (val
, pre_p
, post_p
,
6717 gimple_in_ssa_p (cfun
));
6719 TREE_OPERAND (*expr_p
, 0) = val
;
6720 SAVE_EXPR_RESOLVED_P (*expr_p
) = 1;
6728 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6735 PRE_P points to the list where side effects that must happen before
6736 *EXPR_P should be stored.
6738 POST_P points to the list where side effects that must happen after
6739 *EXPR_P should be stored. */
6741 static enum gimplify_status
6742 gimplify_addr_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6744 tree expr
= *expr_p
;
6745 tree op0
= TREE_OPERAND (expr
, 0);
6746 enum gimplify_status ret
;
6747 location_t loc
= EXPR_LOCATION (*expr_p
);
6749 switch (TREE_CODE (op0
))
6753 /* Check if we are dealing with an expression of the form '&*ptr'.
6754 While the front end folds away '&*ptr' into 'ptr', these
6755 expressions may be generated internally by the compiler (e.g.,
6756 builtins like __builtin_va_end). */
6757 /* Caution: the silent array decomposition semantics we allow for
6758 ADDR_EXPR means we can't always discard the pair. */
6759 /* Gimplification of the ADDR_EXPR operand may drop
6760 cv-qualification conversions, so make sure we add them if
6763 tree op00
= TREE_OPERAND (op0
, 0);
6764 tree t_expr
= TREE_TYPE (expr
);
6765 tree t_op00
= TREE_TYPE (op00
);
6767 if (!useless_type_conversion_p (t_expr
, t_op00
))
6768 op00
= fold_convert_loc (loc
, TREE_TYPE (expr
), op00
);
6774 case VIEW_CONVERT_EXPR
:
6775 /* Take the address of our operand and then convert it to the type of
6778 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6779 all clear. The impact of this transformation is even less clear. */
6781 /* If the operand is a useless conversion, look through it. Doing so
6782 guarantees that the ADDR_EXPR and its operand will remain of the
6784 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0
, 0)))
6785 op0
= TREE_OPERAND (op0
, 0);
6787 *expr_p
= fold_convert_loc (loc
, TREE_TYPE (expr
),
6788 build_fold_addr_expr_loc (loc
,
6789 TREE_OPERAND (op0
, 0)));
6794 if (integer_zerop (TREE_OPERAND (op0
, 1)))
6795 goto do_indirect_ref
;
6800 /* If we see a call to a declared builtin or see its address
6801 being taken (we can unify those cases here) then we can mark
6802 the builtin for implicit generation by GCC. */
6803 if (TREE_CODE (op0
) == FUNCTION_DECL
6804 && fndecl_built_in_p (op0
, BUILT_IN_NORMAL
)
6805 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0
)))
6806 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0
), true);
6808 /* We use fb_either here because the C frontend sometimes takes
6809 the address of a call that returns a struct; see
6810 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6811 the implied temporary explicit. */
6813 /* Make the operand addressable. */
6814 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, post_p
,
6815 is_gimple_addressable
, fb_either
);
6816 if (ret
== GS_ERROR
)
6819 /* Then mark it. Beware that it may not be possible to do so directly
6820 if a temporary has been created by the gimplification. */
6821 prepare_gimple_addressable (&TREE_OPERAND (expr
, 0), pre_p
);
6823 op0
= TREE_OPERAND (expr
, 0);
6825 /* For various reasons, the gimplification of the expression
6826 may have made a new INDIRECT_REF. */
6827 if (INDIRECT_REF_P (op0
)
6828 || (TREE_CODE (op0
) == MEM_REF
6829 && integer_zerop (TREE_OPERAND (op0
, 1))))
6830 goto do_indirect_ref
;
6832 mark_addressable (TREE_OPERAND (expr
, 0));
6834 /* The FEs may end up building ADDR_EXPRs early on a decl with
6835 an incomplete type. Re-build ADDR_EXPRs in canonical form
6837 if (!types_compatible_p (TREE_TYPE (op0
), TREE_TYPE (TREE_TYPE (expr
))))
6838 *expr_p
= build_fold_addr_expr (op0
);
6840 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6841 recompute_tree_invariant_for_addr_expr (*expr_p
);
6843 /* If we re-built the ADDR_EXPR add a conversion to the original type
6845 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
6846 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
6854 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6855 value; output operands should be a gimple lvalue. */
6857 static enum gimplify_status
6858 gimplify_asm_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6862 const char **oconstraints
;
6865 const char *constraint
;
6866 bool allows_mem
, allows_reg
, is_inout
;
6867 enum gimplify_status ret
, tret
;
6869 vec
<tree
, va_gc
> *inputs
;
6870 vec
<tree
, va_gc
> *outputs
;
6871 vec
<tree
, va_gc
> *clobbers
;
6872 vec
<tree
, va_gc
> *labels
;
6876 noutputs
= list_length (ASM_OUTPUTS (expr
));
6877 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
6885 link_next
= NULL_TREE
;
6886 for (i
= 0, link
= ASM_OUTPUTS (expr
); link
; ++i
, link
= link_next
)
6889 size_t constraint_len
;
6891 link_next
= TREE_CHAIN (link
);
6895 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6896 constraint_len
= strlen (constraint
);
6897 if (constraint_len
== 0)
6900 ok
= parse_output_constraint (&constraint
, i
, 0, 0,
6901 &allows_mem
, &allows_reg
, &is_inout
);
6908 /* If we can't make copies, we can only accept memory.
6909 Similarly for VLAs. */
6910 tree outtype
= TREE_TYPE (TREE_VALUE (link
));
6911 if (outtype
!= error_mark_node
6912 && (TREE_ADDRESSABLE (outtype
)
6913 || !COMPLETE_TYPE_P (outtype
)
6914 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype
))))
6920 error ("impossible constraint in %<asm%>");
6921 error ("non-memory output %d must stay in memory", i
);
6926 if (!allows_reg
&& allows_mem
)
6927 mark_addressable (TREE_VALUE (link
));
6929 tree orig
= TREE_VALUE (link
);
6930 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6931 is_inout
? is_gimple_min_lval
: is_gimple_lvalue
,
6932 fb_lvalue
| fb_mayfail
);
6933 if (tret
== GS_ERROR
)
6935 if (orig
!= error_mark_node
)
6936 error ("invalid lvalue in %<asm%> output %d", i
);
6940 /* If the constraint does not allow memory make sure we gimplify
6941 it to a register if it is not already but its base is. This
6942 happens for complex and vector components. */
6945 tree op
= TREE_VALUE (link
);
6946 if (! is_gimple_val (op
)
6947 && is_gimple_reg_type (TREE_TYPE (op
))
6948 && is_gimple_reg (get_base_address (op
)))
6950 tree tem
= create_tmp_reg (TREE_TYPE (op
));
6954 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
),
6955 tem
, unshare_expr (op
));
6956 gimplify_and_add (ass
, pre_p
);
6958 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
), op
, tem
);
6959 gimplify_and_add (ass
, post_p
);
6961 TREE_VALUE (link
) = tem
;
6966 vec_safe_push (outputs
, link
);
6967 TREE_CHAIN (link
) = NULL_TREE
;
6971 /* An input/output operand. To give the optimizers more
6972 flexibility, split it into separate input and output
6975 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6978 /* Turn the in/out constraint into an output constraint. */
6979 char *p
= xstrdup (constraint
);
6981 TREE_VALUE (TREE_PURPOSE (link
)) = build_string (constraint_len
, p
);
6983 /* And add a matching input constraint. */
6986 sprintf (buf
, "%u", i
);
6988 /* If there are multiple alternatives in the constraint,
6989 handle each of them individually. Those that allow register
6990 will be replaced with operand number, the others will stay
6992 if (strchr (p
, ',') != NULL
)
6994 size_t len
= 0, buflen
= strlen (buf
);
6995 char *beg
, *end
, *str
, *dst
;
6999 end
= strchr (beg
, ',');
7001 end
= strchr (beg
, '\0');
7002 if ((size_t) (end
- beg
) < buflen
)
7005 len
+= end
- beg
+ 1;
7012 str
= (char *) alloca (len
);
7013 for (beg
= p
+ 1, dst
= str
;;)
7016 bool mem_p
, reg_p
, inout_p
;
7018 end
= strchr (beg
, ',');
7023 parse_output_constraint (&tem
, i
, 0, 0,
7024 &mem_p
, ®_p
, &inout_p
);
7029 memcpy (dst
, buf
, buflen
);
7038 memcpy (dst
, beg
, len
);
7047 input
= build_string (dst
- str
, str
);
7050 input
= build_string (strlen (buf
), buf
);
7053 input
= build_string (constraint_len
- 1, constraint
+ 1);
7057 input
= build_tree_list (build_tree_list (NULL_TREE
, input
),
7058 unshare_expr (TREE_VALUE (link
)));
7059 ASM_INPUTS (expr
) = chainon (ASM_INPUTS (expr
), input
);
7063 link_next
= NULL_TREE
;
7064 for (link
= ASM_INPUTS (expr
); link
; ++i
, link
= link_next
)
7066 link_next
= TREE_CHAIN (link
);
7067 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
7068 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
7069 oconstraints
, &allows_mem
, &allows_reg
);
7071 /* If we can't make copies, we can only accept memory. */
7072 tree intype
= TREE_TYPE (TREE_VALUE (link
));
7073 if (intype
!= error_mark_node
7074 && (TREE_ADDRESSABLE (intype
)
7075 || !COMPLETE_TYPE_P (intype
)
7076 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype
))))
7082 error ("impossible constraint in %<asm%>");
7083 error ("non-memory input %d must stay in memory", i
);
7088 /* If the operand is a memory input, it should be an lvalue. */
7089 if (!allows_reg
&& allows_mem
)
7091 tree inputv
= TREE_VALUE (link
);
7092 STRIP_NOPS (inputv
);
7093 if (TREE_CODE (inputv
) == PREDECREMENT_EXPR
7094 || TREE_CODE (inputv
) == PREINCREMENT_EXPR
7095 || TREE_CODE (inputv
) == POSTDECREMENT_EXPR
7096 || TREE_CODE (inputv
) == POSTINCREMENT_EXPR
7097 || TREE_CODE (inputv
) == MODIFY_EXPR
)
7098 TREE_VALUE (link
) = error_mark_node
;
7099 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
7100 is_gimple_lvalue
, fb_lvalue
| fb_mayfail
);
7101 if (tret
!= GS_ERROR
)
7103 /* Unlike output operands, memory inputs are not guaranteed
7104 to be lvalues by the FE, and while the expressions are
7105 marked addressable there, if it is e.g. a statement
7106 expression, temporaries in it might not end up being
7107 addressable. They might be already used in the IL and thus
7108 it is too late to make them addressable now though. */
7109 tree x
= TREE_VALUE (link
);
7110 while (handled_component_p (x
))
7111 x
= TREE_OPERAND (x
, 0);
7112 if (TREE_CODE (x
) == MEM_REF
7113 && TREE_CODE (TREE_OPERAND (x
, 0)) == ADDR_EXPR
)
7114 x
= TREE_OPERAND (TREE_OPERAND (x
, 0), 0);
7116 || TREE_CODE (x
) == PARM_DECL
7117 || TREE_CODE (x
) == RESULT_DECL
)
7118 && !TREE_ADDRESSABLE (x
)
7119 && is_gimple_reg (x
))
7121 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
),
7123 "memory input %d is not directly addressable",
7125 prepare_gimple_addressable (&TREE_VALUE (link
), pre_p
);
7128 mark_addressable (TREE_VALUE (link
));
7129 if (tret
== GS_ERROR
)
7131 if (inputv
!= error_mark_node
)
7132 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
), input_location
),
7133 "memory input %d is not directly addressable", i
);
7139 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
7140 is_gimple_asm_val
, fb_rvalue
);
7141 if (tret
== GS_ERROR
)
7145 TREE_CHAIN (link
) = NULL_TREE
;
7146 vec_safe_push (inputs
, link
);
7149 link_next
= NULL_TREE
;
7150 for (link
= ASM_CLOBBERS (expr
); link
; ++i
, link
= link_next
)
7152 link_next
= TREE_CHAIN (link
);
7153 TREE_CHAIN (link
) = NULL_TREE
;
7154 vec_safe_push (clobbers
, link
);
7157 link_next
= NULL_TREE
;
7158 for (link
= ASM_LABELS (expr
); link
; ++i
, link
= link_next
)
7160 link_next
= TREE_CHAIN (link
);
7161 TREE_CHAIN (link
) = NULL_TREE
;
7162 vec_safe_push (labels
, link
);
7165 /* Do not add ASMs with errors to the gimple IL stream. */
7166 if (ret
!= GS_ERROR
)
7168 stmt
= gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr
)),
7169 inputs
, outputs
, clobbers
, labels
);
7171 /* asm is volatile if it was marked by the user as volatile or
7172 there are no outputs or this is an asm goto. */
7173 gimple_asm_set_volatile (stmt
,
7174 ASM_VOLATILE_P (expr
)
7177 gimple_asm_set_input (stmt
, ASM_INPUT_P (expr
));
7178 gimple_asm_set_inline (stmt
, ASM_INLINE_P (expr
));
7180 gimplify_seq_add_stmt (pre_p
, stmt
);
7186 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
7187 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
7188 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
7189 return to this function.
7191 FIXME should we complexify the prequeue handling instead? Or use flags
7192 for all the cleanups and let the optimizer tighten them up? The current
7193 code seems pretty fragile; it will break on a cleanup within any
7194 non-conditional nesting. But any such nesting would be broken, anyway;
7195 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
7196 and continues out of it. We can do that at the RTL level, though, so
7197 having an optimizer to tighten up try/finally regions would be a Good
7200 static enum gimplify_status
7201 gimplify_cleanup_point_expr (tree
*expr_p
, gimple_seq
*pre_p
)
7203 gimple_stmt_iterator iter
;
7204 gimple_seq body_sequence
= NULL
;
7206 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
7208 /* We only care about the number of conditions between the innermost
7209 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
7210 any cleanups collected outside the CLEANUP_POINT_EXPR. */
7211 int old_conds
= gimplify_ctxp
->conditions
;
7212 gimple_seq old_cleanups
= gimplify_ctxp
->conditional_cleanups
;
7213 bool old_in_cleanup_point_expr
= gimplify_ctxp
->in_cleanup_point_expr
;
7214 gimplify_ctxp
->conditions
= 0;
7215 gimplify_ctxp
->conditional_cleanups
= NULL
;
7216 gimplify_ctxp
->in_cleanup_point_expr
= true;
7218 gimplify_stmt (&TREE_OPERAND (*expr_p
, 0), &body_sequence
);
7220 gimplify_ctxp
->conditions
= old_conds
;
7221 gimplify_ctxp
->conditional_cleanups
= old_cleanups
;
7222 gimplify_ctxp
->in_cleanup_point_expr
= old_in_cleanup_point_expr
;
7224 for (iter
= gsi_start (body_sequence
); !gsi_end_p (iter
); )
7226 gimple
*wce
= gsi_stmt (iter
);
7228 if (gimple_code (wce
) == GIMPLE_WITH_CLEANUP_EXPR
)
7230 if (gsi_one_before_end_p (iter
))
7232 /* Note that gsi_insert_seq_before and gsi_remove do not
7233 scan operands, unlike some other sequence mutators. */
7234 if (!gimple_wce_cleanup_eh_only (wce
))
7235 gsi_insert_seq_before_without_update (&iter
,
7236 gimple_wce_cleanup (wce
),
7238 gsi_remove (&iter
, true);
7245 enum gimple_try_flags kind
;
7247 if (gimple_wce_cleanup_eh_only (wce
))
7248 kind
= GIMPLE_TRY_CATCH
;
7250 kind
= GIMPLE_TRY_FINALLY
;
7251 seq
= gsi_split_seq_after (iter
);
7253 gtry
= gimple_build_try (seq
, gimple_wce_cleanup (wce
), kind
);
7254 /* Do not use gsi_replace here, as it may scan operands.
7255 We want to do a simple structural modification only. */
7256 gsi_set_stmt (&iter
, gtry
);
7257 iter
= gsi_start (gtry
->eval
);
7264 gimplify_seq_add_seq (pre_p
, body_sequence
);
7277 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
7278 is the cleanup action required. EH_ONLY is true if the cleanup should
7279 only be executed if an exception is thrown, not on normal exit.
7280 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
7281 only valid for clobbers. */
7284 gimple_push_cleanup (tree var
, tree cleanup
, bool eh_only
, gimple_seq
*pre_p
,
7285 bool force_uncond
= false)
7288 gimple_seq cleanup_stmts
= NULL
;
7290 /* Errors can result in improperly nested cleanups. Which results in
7291 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7295 if (gimple_conditional_context ())
7297 /* If we're in a conditional context, this is more complex. We only
7298 want to run the cleanup if we actually ran the initialization that
7299 necessitates it, but we want to run it after the end of the
7300 conditional context. So we wrap the try/finally around the
7301 condition and use a flag to determine whether or not to actually
7302 run the destructor. Thus
7306 becomes (approximately)
7310 if (test) { A::A(temp); flag = 1; val = f(temp); }
7313 if (flag) A::~A(temp);
7319 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7320 wce
= gimple_build_wce (cleanup_stmts
);
7321 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
7325 tree flag
= create_tmp_var (boolean_type_node
, "cleanup");
7326 gassign
*ffalse
= gimple_build_assign (flag
, boolean_false_node
);
7327 gassign
*ftrue
= gimple_build_assign (flag
, boolean_true_node
);
7329 cleanup
= build3 (COND_EXPR
, void_type_node
, flag
, cleanup
, NULL
);
7330 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7331 wce
= gimple_build_wce (cleanup_stmts
);
7332 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
7334 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, ffalse
);
7335 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
7336 gimplify_seq_add_stmt (pre_p
, ftrue
);
7338 /* Because of this manipulation, and the EH edges that jump
7339 threading cannot redirect, the temporary (VAR) will appear
7340 to be used uninitialized. Don't warn. */
7341 suppress_warning (var
, OPT_Wuninitialized
);
7346 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7347 wce
= gimple_build_wce (cleanup_stmts
);
7348 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
7349 gimplify_seq_add_stmt (pre_p
, wce
);
7353 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7355 static enum gimplify_status
7356 gimplify_target_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
7358 tree targ
= *expr_p
;
7359 tree temp
= TARGET_EXPR_SLOT (targ
);
7360 tree init
= TARGET_EXPR_INITIAL (targ
);
7361 enum gimplify_status ret
;
7363 bool unpoison_empty_seq
= false;
7364 gimple_stmt_iterator unpoison_it
;
7368 gimple_seq init_pre_p
= NULL
;
7370 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7371 to the temps list. Handle also variable length TARGET_EXPRs. */
7372 if (!poly_int_tree_p (DECL_SIZE (temp
)))
7374 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp
)))
7375 gimplify_type_sizes (TREE_TYPE (temp
), &init_pre_p
);
7376 /* FIXME: this is correct only when the size of the type does
7377 not depend on expressions evaluated in init. */
7378 gimplify_vla_decl (temp
, &init_pre_p
);
7382 /* Save location where we need to place unpoisoning. It's possible
7383 that a variable will be converted to needs_to_live_in_memory. */
7384 unpoison_it
= gsi_last (*pre_p
);
7385 unpoison_empty_seq
= gsi_end_p (unpoison_it
);
7387 gimple_add_tmp_var (temp
);
7390 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7391 expression is supposed to initialize the slot. */
7392 if (VOID_TYPE_P (TREE_TYPE (init
)))
7393 ret
= gimplify_expr (&init
, &init_pre_p
, post_p
, is_gimple_stmt
,
7397 tree init_expr
= build2 (INIT_EXPR
, void_type_node
, temp
, init
);
7399 ret
= gimplify_expr (&init
, &init_pre_p
, post_p
, is_gimple_stmt
,
7402 ggc_free (init_expr
);
7404 if (ret
== GS_ERROR
)
7406 /* PR c++/28266 Make sure this is expanded only once. */
7407 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
7412 gimplify_and_add (init
, &init_pre_p
);
7414 /* Add a clobber for the temporary going out of scope, like
7415 gimplify_bind_expr. But only if we did not promote the
7416 temporary to static storage. */
7417 if (gimplify_ctxp
->in_cleanup_point_expr
7418 && !TREE_STATIC (temp
)
7419 && needs_to_live_in_memory (temp
))
7421 if (flag_stack_reuse
== SR_ALL
)
7423 tree clobber
= build_clobber (TREE_TYPE (temp
),
7424 CLOBBER_STORAGE_END
);
7425 clobber
= build2 (MODIFY_EXPR
, TREE_TYPE (temp
), temp
, clobber
);
7426 gimple_push_cleanup (temp
, clobber
, false, pre_p
, true);
7428 if (asan_poisoned_variables
7429 && DECL_ALIGN (temp
) <= MAX_SUPPORTED_STACK_ALIGNMENT
7430 && !TREE_STATIC (temp
)
7431 && dbg_cnt (asan_use_after_scope
)
7432 && !gimplify_omp_ctxp
)
7434 tree asan_cleanup
= build_asan_poison_call_expr (temp
);
7437 if (unpoison_empty_seq
)
7438 unpoison_it
= gsi_start (*pre_p
);
7440 asan_poison_variable (temp
, false, &unpoison_it
,
7441 unpoison_empty_seq
);
7442 gimple_push_cleanup (temp
, asan_cleanup
, false, pre_p
);
7447 gimple_seq_add_seq (pre_p
, init_pre_p
);
7449 /* If needed, push the cleanup for the temp. */
7450 if (TARGET_EXPR_CLEANUP (targ
))
7451 gimple_push_cleanup (temp
, TARGET_EXPR_CLEANUP (targ
),
7452 CLEANUP_EH_ONLY (targ
), pre_p
);
7454 /* Only expand this once. */
7455 TREE_OPERAND (targ
, 3) = init
;
7456 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
7459 /* We should have expanded this before. */
7460 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp
));
7466 /* Gimplification of expression trees. */
7468 /* Gimplify an expression which appears at statement context. The
7469 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7470 NULL, a new sequence is allocated.
7472 Return true if we actually added a statement to the queue. */
7475 gimplify_stmt (tree
*stmt_p
, gimple_seq
*seq_p
)
7477 gimple_seq_node last
;
7479 last
= gimple_seq_last (*seq_p
);
7480 gimplify_expr (stmt_p
, seq_p
, NULL
, is_gimple_stmt
, fb_none
);
7481 return last
!= gimple_seq_last (*seq_p
);
7484 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7485 to CTX. If entries already exist, force them to be some flavor of private.
7486 If there is no enclosing parallel, do nothing. */
7489 omp_firstprivatize_variable (struct gimplify_omp_ctx
*ctx
, tree decl
)
7493 if (decl
== NULL
|| !DECL_P (decl
) || ctx
->region_type
== ORT_NONE
)
7498 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7501 if (n
->value
& GOVD_SHARED
)
7502 n
->value
= GOVD_FIRSTPRIVATE
| (n
->value
& GOVD_SEEN
);
7503 else if (n
->value
& GOVD_MAP
)
7504 n
->value
|= GOVD_MAP_TO_ONLY
;
7508 else if ((ctx
->region_type
& ORT_TARGET
) != 0)
7510 if (ctx
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
7511 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
7513 omp_add_variable (ctx
, decl
, GOVD_MAP
| GOVD_MAP_TO_ONLY
);
7515 else if (ctx
->region_type
!= ORT_WORKSHARE
7516 && ctx
->region_type
!= ORT_TASKGROUP
7517 && ctx
->region_type
!= ORT_SIMD
7518 && ctx
->region_type
!= ORT_ACC
7519 && !(ctx
->region_type
& ORT_TARGET_DATA
))
7520 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
7522 ctx
= ctx
->outer_context
;
7527 /* Similarly for each of the type sizes of TYPE. */
7530 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
7532 if (type
== NULL
|| type
== error_mark_node
)
7534 type
= TYPE_MAIN_VARIANT (type
);
7536 if (ctx
->privatized_types
->add (type
))
7539 switch (TREE_CODE (type
))
7545 case FIXED_POINT_TYPE
:
7546 omp_firstprivatize_variable (ctx
, TYPE_MIN_VALUE (type
));
7547 omp_firstprivatize_variable (ctx
, TYPE_MAX_VALUE (type
));
7551 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
7552 omp_firstprivatize_type_sizes (ctx
, TYPE_DOMAIN (type
));
7557 case QUAL_UNION_TYPE
:
7560 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
7561 if (TREE_CODE (field
) == FIELD_DECL
)
7563 omp_firstprivatize_variable (ctx
, DECL_FIELD_OFFSET (field
));
7564 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (field
));
7570 case REFERENCE_TYPE
:
7571 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
7578 omp_firstprivatize_variable (ctx
, TYPE_SIZE (type
));
7579 omp_firstprivatize_variable (ctx
, TYPE_SIZE_UNIT (type
));
7580 lang_hooks
.types
.omp_firstprivatize_type_sizes (ctx
, type
);
7583 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7586 omp_add_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned int flags
)
7589 unsigned int nflags
;
7592 if (error_operand_p (decl
) || ctx
->region_type
== ORT_NONE
)
7595 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7596 there are constructors involved somewhere. Exception is a shared clause,
7597 there is nothing privatized in that case. */
7598 if ((flags
& GOVD_SHARED
) == 0
7599 && (TREE_ADDRESSABLE (TREE_TYPE (decl
))
7600 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl
))))
7603 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7604 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7606 /* We shouldn't be re-adding the decl with the same data
7608 gcc_assert ((n
->value
& GOVD_DATA_SHARE_CLASS
& flags
) == 0);
7609 nflags
= n
->value
| flags
;
7610 /* The only combination of data sharing classes we should see is
7611 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7612 reduction variables to be used in data sharing clauses. */
7613 gcc_assert ((ctx
->region_type
& ORT_ACC
) != 0
7614 || ((nflags
& GOVD_DATA_SHARE_CLASS
)
7615 == (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
))
7616 || (flags
& GOVD_DATA_SHARE_CLASS
) == 0);
7621 /* When adding a variable-sized variable, we have to handle all sorts
7622 of additional bits of data: the pointer replacement variable, and
7623 the parameters of the type. */
7624 if (DECL_SIZE (decl
) && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7626 /* Add the pointer replacement variable as PRIVATE if the variable
7627 replacement is private, else FIRSTPRIVATE since we'll need the
7628 address of the original variable either for SHARED, or for the
7629 copy into or out of the context. */
7630 if (!(flags
& GOVD_LOCAL
) && ctx
->region_type
!= ORT_TASKGROUP
)
7632 if (flags
& GOVD_MAP
)
7633 nflags
= GOVD_MAP
| GOVD_MAP_TO_ONLY
| GOVD_EXPLICIT
;
7634 else if (flags
& GOVD_PRIVATE
)
7635 nflags
= GOVD_PRIVATE
;
7636 else if (((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7637 && (flags
& GOVD_FIRSTPRIVATE
))
7638 || (ctx
->region_type
== ORT_TARGET_DATA
7639 && (flags
& GOVD_DATA_SHARE_CLASS
) == 0))
7640 nflags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
7642 nflags
= GOVD_FIRSTPRIVATE
;
7643 nflags
|= flags
& GOVD_SEEN
;
7644 t
= DECL_VALUE_EXPR (decl
);
7645 gcc_assert (INDIRECT_REF_P (t
));
7646 t
= TREE_OPERAND (t
, 0);
7647 gcc_assert (DECL_P (t
));
7648 omp_add_variable (ctx
, t
, nflags
);
7651 /* Add all of the variable and type parameters (which should have
7652 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7653 omp_firstprivatize_variable (ctx
, DECL_SIZE_UNIT (decl
));
7654 omp_firstprivatize_variable (ctx
, DECL_SIZE (decl
));
7655 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7657 /* The variable-sized variable itself is never SHARED, only some form
7658 of PRIVATE. The sharing would take place via the pointer variable
7659 which we remapped above. */
7660 if (flags
& GOVD_SHARED
)
7661 flags
= GOVD_SHARED
| GOVD_DEBUG_PRIVATE
7662 | (flags
& (GOVD_SEEN
| GOVD_EXPLICIT
));
7664 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7665 alloca statement we generate for the variable, so make sure it
7666 is available. This isn't automatically needed for the SHARED
7667 case, since we won't be allocating local storage then.
7668 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7669 in this case omp_notice_variable will be called later
7670 on when it is gimplified. */
7671 else if (! (flags
& (GOVD_LOCAL
| GOVD_MAP
))
7672 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl
))))
7673 omp_notice_variable (ctx
, TYPE_SIZE_UNIT (TREE_TYPE (decl
)), true);
7675 else if ((flags
& (GOVD_MAP
| GOVD_LOCAL
)) == 0
7676 && omp_privatize_by_reference (decl
))
7678 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7680 /* Similar to the direct variable sized case above, we'll need the
7681 size of references being privatized. */
7682 if ((flags
& GOVD_SHARED
) == 0)
7684 t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7685 if (t
&& DECL_P (t
))
7686 omp_notice_variable (ctx
, t
, true);
7693 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, flags
);
7695 /* For reductions clauses in OpenACC loop directives, by default create a
7696 copy clause on the enclosing parallel construct for carrying back the
7698 if (ctx
->region_type
== ORT_ACC
&& (flags
& GOVD_REDUCTION
))
7700 struct gimplify_omp_ctx
*outer_ctx
= ctx
->outer_context
;
7703 n
= splay_tree_lookup (outer_ctx
->variables
, (splay_tree_key
)decl
);
7706 /* Ignore local variables and explicitly declared clauses. */
7707 if (n
->value
& (GOVD_LOCAL
| GOVD_EXPLICIT
))
7709 else if (outer_ctx
->region_type
== ORT_ACC_KERNELS
)
7711 /* According to the OpenACC spec, such a reduction variable
7712 should already have a copy map on a kernels construct,
7713 verify that here. */
7714 gcc_assert (!(n
->value
& GOVD_FIRSTPRIVATE
)
7715 && (n
->value
& GOVD_MAP
));
7717 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7719 /* Remove firstprivate and make it a copy map. */
7720 n
->value
&= ~GOVD_FIRSTPRIVATE
;
7721 n
->value
|= GOVD_MAP
;
7724 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7726 splay_tree_insert (outer_ctx
->variables
, (splay_tree_key
)decl
,
7727 GOVD_MAP
| GOVD_SEEN
);
7730 outer_ctx
= outer_ctx
->outer_context
;
7735 /* Notice a threadprivate variable DECL used in OMP context CTX.
7736 This just prints out diagnostics about threadprivate variable uses
7737 in untied tasks. If DECL2 is non-NULL, prevent this warning
7738 on that variable. */
7741 omp_notice_threadprivate_variable (struct gimplify_omp_ctx
*ctx
, tree decl
,
7745 struct gimplify_omp_ctx
*octx
;
7747 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
7748 if ((octx
->region_type
& ORT_TARGET
) != 0
7749 || octx
->order_concurrent
)
7751 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
)decl
);
7754 if (octx
->order_concurrent
)
7756 error ("threadprivate variable %qE used in a region with"
7757 " %<order(concurrent)%> clause", DECL_NAME (decl
));
7758 inform (octx
->location
, "enclosing region");
7762 error ("threadprivate variable %qE used in target region",
7764 inform (octx
->location
, "enclosing target region");
7766 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl
, 0);
7769 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl2
, 0);
7772 if (ctx
->region_type
!= ORT_UNTIED_TASK
)
7774 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7777 error ("threadprivate variable %qE used in untied task",
7779 inform (ctx
->location
, "enclosing task");
7780 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, 0);
7783 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl2
, 0);
7787 /* Return true if global var DECL is device resident. */
7790 device_resident_p (tree decl
)
7792 tree attr
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl
));
7797 for (tree t
= TREE_VALUE (attr
); t
; t
= TREE_PURPOSE (t
))
7799 tree c
= TREE_VALUE (t
);
7800 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DEVICE_RESIDENT
)
7807 /* Return true if DECL has an ACC DECLARE attribute. */
7810 is_oacc_declared (tree decl
)
7812 tree t
= TREE_CODE (decl
) == MEM_REF
? TREE_OPERAND (decl
, 0) : decl
;
7813 tree declared
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t
));
7814 return declared
!= NULL_TREE
;
7817 /* Determine outer default flags for DECL mentioned in an OMP region
7818 but not declared in an enclosing clause.
7820 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7821 remapped firstprivate instead of shared. To some extent this is
7822 addressed in omp_firstprivatize_type_sizes, but not
7826 omp_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
,
7827 bool in_code
, unsigned flags
)
7829 enum omp_clause_default_kind default_kind
= ctx
->default_kind
;
7830 enum omp_clause_default_kind kind
;
7832 kind
= lang_hooks
.decls
.omp_predetermined_sharing (decl
);
7833 if (ctx
->region_type
& ORT_TASK
)
7835 tree detach_clause
= omp_find_clause (ctx
->clauses
, OMP_CLAUSE_DETACH
);
7837 /* The event-handle specified by a detach clause should always be firstprivate,
7838 regardless of the current default. */
7839 if (detach_clause
&& OMP_CLAUSE_DECL (detach_clause
) == decl
)
7840 kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
7842 if (kind
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
7843 default_kind
= kind
;
7844 else if (VAR_P (decl
) && TREE_STATIC (decl
) && DECL_IN_CONSTANT_POOL (decl
))
7845 default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
7846 /* For C/C++ default({,first}private), variables with static storage duration
7847 declared in a namespace or global scope and referenced in construct
7848 must be explicitly specified, i.e. acts as default(none). */
7849 else if ((default_kind
== OMP_CLAUSE_DEFAULT_PRIVATE
7850 || default_kind
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
7852 && is_global_var (decl
)
7853 && (DECL_FILE_SCOPE_P (decl
)
7854 || (DECL_CONTEXT (decl
)
7855 && TREE_CODE (DECL_CONTEXT (decl
)) == NAMESPACE_DECL
))
7856 && !lang_GNU_Fortran ())
7857 default_kind
= OMP_CLAUSE_DEFAULT_NONE
;
7859 switch (default_kind
)
7861 case OMP_CLAUSE_DEFAULT_NONE
:
7865 if (ctx
->region_type
& ORT_PARALLEL
)
7867 else if ((ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
)
7869 else if (ctx
->region_type
& ORT_TASK
)
7871 else if (ctx
->region_type
& ORT_TEAMS
)
7876 error ("%qE not specified in enclosing %qs",
7877 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rtype
);
7878 inform (ctx
->location
, "enclosing %qs", rtype
);
7881 case OMP_CLAUSE_DEFAULT_SHARED
:
7882 flags
|= GOVD_SHARED
;
7884 case OMP_CLAUSE_DEFAULT_PRIVATE
:
7885 flags
|= GOVD_PRIVATE
;
7887 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
:
7888 flags
|= GOVD_FIRSTPRIVATE
;
7890 case OMP_CLAUSE_DEFAULT_UNSPECIFIED
:
7891 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7892 gcc_assert ((ctx
->region_type
& ORT_TASK
) != 0);
7893 if (struct gimplify_omp_ctx
*octx
= ctx
->outer_context
)
7895 omp_notice_variable (octx
, decl
, in_code
);
7896 for (; octx
; octx
= octx
->outer_context
)
7900 n2
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
7901 if ((octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)) != 0
7902 && (n2
== NULL
|| (n2
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7904 if (n2
&& (n2
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
)
7906 flags
|= GOVD_FIRSTPRIVATE
;
7909 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TEAMS
)) != 0)
7911 flags
|= GOVD_SHARED
;
7917 if (TREE_CODE (decl
) == PARM_DECL
7918 || (!is_global_var (decl
)
7919 && DECL_CONTEXT (decl
) == current_function_decl
))
7920 flags
|= GOVD_FIRSTPRIVATE
;
7922 flags
|= GOVD_SHARED
;
7933 /* Return string name for types of OpenACC constructs from ORT_* values. */
7936 oacc_region_type_name (enum omp_region_type region_type
)
7938 switch (region_type
)
7942 case ORT_ACC_PARALLEL
:
7944 case ORT_ACC_KERNELS
:
7946 case ORT_ACC_SERIAL
:
7953 /* Determine outer default flags for DECL mentioned in an OACC region
7954 but not declared in an enclosing clause. */
7957 oacc_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned flags
)
7959 struct gimplify_omp_ctx
*ctx_default
= ctx
;
7960 /* If no 'default' clause appears on this compute construct... */
7961 if (ctx_default
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
)
7963 /* ..., see if one appears on a lexically containing 'data'
7965 while ((ctx_default
= ctx_default
->outer_context
))
7967 if (ctx_default
->region_type
== ORT_ACC_DATA
7968 && ctx_default
->default_kind
!= OMP_CLAUSE_DEFAULT_SHARED
)
7971 /* If not, reset. */
7976 bool on_device
= false;
7977 bool is_private
= false;
7978 bool declared
= is_oacc_declared (decl
);
7979 tree type
= TREE_TYPE (decl
);
7981 if (omp_privatize_by_reference (decl
))
7982 type
= TREE_TYPE (type
);
7984 /* For Fortran COMMON blocks, only used variables in those blocks are
7985 transfered and remapped. The block itself will have a private clause to
7986 avoid transfering the data twice.
7987 The hook evaluates to false by default. For a variable in Fortran's COMMON
7988 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7989 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7990 the whole block. For C++ and Fortran, it can also be true under certain
7991 other conditions, if DECL_HAS_VALUE_EXPR. */
7992 if (RECORD_OR_UNION_TYPE_P (type
))
7993 is_private
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7995 if ((ctx
->region_type
& (ORT_ACC_PARALLEL
| ORT_ACC_KERNELS
)) != 0
7996 && is_global_var (decl
)
7997 && device_resident_p (decl
)
8001 flags
|= GOVD_MAP_TO_ONLY
;
8004 switch (ctx
->region_type
)
8006 case ORT_ACC_KERNELS
:
8008 flags
|= GOVD_FIRSTPRIVATE
;
8009 else if (AGGREGATE_TYPE_P (type
))
8011 /* Aggregates default to 'present_or_copy', or 'present'. */
8012 if (ctx_default
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
8015 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
8018 /* Scalars default to 'copy'. */
8019 flags
|= GOVD_MAP
| GOVD_MAP_FORCE
;
8023 case ORT_ACC_PARALLEL
:
8024 case ORT_ACC_SERIAL
:
8026 flags
|= GOVD_FIRSTPRIVATE
;
8027 else if (on_device
|| declared
)
8029 else if (AGGREGATE_TYPE_P (type
))
8031 /* Aggregates default to 'present_or_copy', or 'present'. */
8032 if (ctx_default
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
8035 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
8038 /* Scalars default to 'firstprivate'. */
8039 flags
|= GOVD_FIRSTPRIVATE
;
8047 if (DECL_ARTIFICIAL (decl
))
8048 ; /* We can get compiler-generated decls, and should not complain
8050 else if (ctx_default
->default_kind
== OMP_CLAUSE_DEFAULT_NONE
)
8052 error ("%qE not specified in enclosing OpenACC %qs construct",
8053 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)),
8054 oacc_region_type_name (ctx
->region_type
));
8055 if (ctx_default
!= ctx
)
8056 inform (ctx
->location
, "enclosing OpenACC %qs construct and",
8057 oacc_region_type_name (ctx
->region_type
));
8058 inform (ctx_default
->location
,
8059 "enclosing OpenACC %qs construct with %qs clause",
8060 oacc_region_type_name (ctx_default
->region_type
),
8063 else if (ctx_default
->default_kind
== OMP_CLAUSE_DEFAULT_PRESENT
)
8064 ; /* Handled above. */
8066 gcc_checking_assert (ctx_default
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
);
8071 /* Record the fact that DECL was used within the OMP context CTX.
8072 IN_CODE is true when real code uses DECL, and false when we should
8073 merely emit default(none) errors. Return true if DECL is going to
8074 be remapped and thus DECL shouldn't be gimplified into its
8075 DECL_VALUE_EXPR (if any). */
8078 omp_notice_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, bool in_code
)
8081 unsigned flags
= in_code
? GOVD_SEEN
: 0;
8082 bool ret
= false, shared
;
8084 if (error_operand_p (decl
))
8087 if (DECL_ARTIFICIAL (decl
))
8089 tree attr
= lookup_attribute ("omp allocate var", DECL_ATTRIBUTES (decl
));
8091 decl
= TREE_VALUE (TREE_VALUE (attr
));
8094 if (ctx
->region_type
== ORT_NONE
)
8095 return lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
8097 if (is_global_var (decl
))
8099 /* Threadprivate variables are predetermined. */
8100 if (DECL_THREAD_LOCAL_P (decl
))
8101 return omp_notice_threadprivate_variable (ctx
, decl
, NULL_TREE
);
8103 if (DECL_HAS_VALUE_EXPR_P (decl
))
8105 if (ctx
->region_type
& ORT_ACC
)
8106 /* For OpenACC, defer expansion of value to avoid transfering
8107 privatized common block data instead of im-/explicitly transfered
8108 variables which are in common blocks. */
8112 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
8114 if (value
&& DECL_P (value
) && DECL_THREAD_LOCAL_P (value
))
8115 return omp_notice_threadprivate_variable (ctx
, decl
, value
);
8119 if (gimplify_omp_ctxp
->outer_context
== NULL
8121 && oacc_get_fn_attrib (current_function_decl
))
8123 location_t loc
= DECL_SOURCE_LOCATION (decl
);
8125 if (lookup_attribute ("omp declare target link",
8126 DECL_ATTRIBUTES (decl
)))
8129 "%qE with %<link%> clause used in %<routine%> function",
8133 else if (!lookup_attribute ("omp declare target",
8134 DECL_ATTRIBUTES (decl
)))
8137 "%qE requires a %<declare%> directive for use "
8138 "in a %<routine%> function", DECL_NAME (decl
));
8144 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
8145 if ((ctx
->region_type
& ORT_TARGET
) != 0)
8147 if (ctx
->region_type
& ORT_ACC
)
8148 /* For OpenACC, as remarked above, defer expansion. */
8153 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
8156 unsigned nflags
= flags
;
8157 if ((ctx
->region_type
& ORT_ACC
) == 0)
8159 bool is_declare_target
= false;
8160 if (is_global_var (decl
)
8161 && varpool_node::get_create (decl
)->offloadable
)
8163 struct gimplify_omp_ctx
*octx
;
8164 for (octx
= ctx
->outer_context
;
8165 octx
; octx
= octx
->outer_context
)
8167 n
= splay_tree_lookup (octx
->variables
,
8168 (splay_tree_key
)decl
);
8170 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
8171 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
8174 is_declare_target
= octx
== NULL
;
8176 if (!is_declare_target
)
8179 enum omp_clause_defaultmap_kind kind
;
8180 if (lang_hooks
.decls
.omp_allocatable_p (decl
))
8181 gdmk
= GDMK_ALLOCATABLE
;
8182 else if (lang_hooks
.decls
.omp_scalar_target_p (decl
))
8183 gdmk
= GDMK_SCALAR_TARGET
;
8184 else if (lang_hooks
.decls
.omp_scalar_p (decl
, false))
8186 else if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
8187 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
8188 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
8190 gdmk
= GDMK_POINTER
;
8192 gdmk
= GDMK_AGGREGATE
;
8193 kind
= lang_hooks
.decls
.omp_predetermined_mapping (decl
);
8194 if (kind
!= OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
)
8196 if (kind
== OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
)
8197 nflags
|= GOVD_FIRSTPRIVATE
;
8198 else if (kind
== OMP_CLAUSE_DEFAULTMAP_TO
)
8199 nflags
|= GOVD_MAP
| GOVD_MAP_TO_ONLY
;
8203 else if (ctx
->defaultmap
[gdmk
] == 0)
8205 tree d
= lang_hooks
.decls
.omp_report_decl (decl
);
8206 error ("%qE not specified in enclosing %<target%>",
8208 inform (ctx
->location
, "enclosing %<target%>");
8210 else if (ctx
->defaultmap
[gdmk
]
8211 & (GOVD_MAP_0LEN_ARRAY
| GOVD_FIRSTPRIVATE
))
8212 nflags
|= ctx
->defaultmap
[gdmk
];
8213 else if (ctx
->defaultmap
[gdmk
] & GOVD_MAP_FORCE_PRESENT
)
8215 gcc_assert (ctx
->defaultmap
[gdmk
] & GOVD_MAP
);
8216 nflags
|= ctx
->defaultmap
[gdmk
] | GOVD_MAP_ALLOC_ONLY
;
8220 gcc_assert (ctx
->defaultmap
[gdmk
] & GOVD_MAP
);
8221 nflags
|= ctx
->defaultmap
[gdmk
] & ~GOVD_MAP
;
8226 struct gimplify_omp_ctx
*octx
= ctx
->outer_context
;
8227 if ((ctx
->region_type
& ORT_ACC
) && octx
)
8229 /* Look in outer OpenACC contexts, to see if there's a
8230 data attribute for this variable. */
8231 omp_notice_variable (octx
, decl
, in_code
);
8233 for (; octx
; octx
= octx
->outer_context
)
8235 if (!(octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)))
8238 = splay_tree_lookup (octx
->variables
,
8239 (splay_tree_key
) decl
);
8242 if (octx
->region_type
== ORT_ACC_HOST_DATA
)
8243 error ("variable %qE declared in enclosing "
8244 "%<host_data%> region", DECL_NAME (decl
));
8246 if (octx
->region_type
== ORT_ACC_DATA
8247 && (n2
->value
& GOVD_MAP_0LEN_ARRAY
))
8248 nflags
|= GOVD_MAP_0LEN_ARRAY
;
8254 if ((nflags
& ~(GOVD_MAP_TO_ONLY
| GOVD_MAP_FROM_ONLY
8255 | GOVD_MAP_ALLOC_ONLY
)) == flags
)
8257 tree type
= TREE_TYPE (decl
);
8259 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
8260 && omp_privatize_by_reference (decl
))
8261 type
= TREE_TYPE (type
);
8262 if (!omp_mappable_type (type
))
8264 error ("%qD referenced in target region does not have "
8265 "a mappable type", decl
);
8266 nflags
|= GOVD_MAP
| GOVD_EXPLICIT
;
8270 if ((ctx
->region_type
& ORT_ACC
) != 0)
8271 nflags
= oacc_default_clause (ctx
, decl
, flags
);
8277 omp_add_variable (ctx
, decl
, nflags
);
8281 /* If nothing changed, there's nothing left to do. */
8282 if ((n
->value
& flags
) == flags
)
8292 if (ctx
->region_type
== ORT_WORKSHARE
8293 || ctx
->region_type
== ORT_TASKGROUP
8294 || ctx
->region_type
== ORT_SIMD
8295 || ctx
->region_type
== ORT_ACC
8296 || (ctx
->region_type
& ORT_TARGET_DATA
) != 0)
8299 flags
= omp_default_clause (ctx
, decl
, in_code
, flags
);
8301 if ((flags
& GOVD_PRIVATE
)
8302 && lang_hooks
.decls
.omp_private_outer_ref (decl
))
8303 flags
|= GOVD_PRIVATE_OUTER_REF
;
8305 omp_add_variable (ctx
, decl
, flags
);
8307 shared
= (flags
& GOVD_SHARED
) != 0;
8308 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
8312 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
8313 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
8314 if (ctx
->region_type
== ORT_SIMD
8315 && ctx
->in_for_exprs
8316 && ((n
->value
& (GOVD_PRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
))
8318 flags
&= ~GOVD_SEEN
;
8320 if ((n
->value
& (GOVD_SEEN
| GOVD_LOCAL
)) == 0
8321 && (flags
& (GOVD_SEEN
| GOVD_LOCAL
)) == GOVD_SEEN
8322 && DECL_SIZE (decl
))
8324 if (TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
8327 tree t
= DECL_VALUE_EXPR (decl
);
8328 gcc_assert (INDIRECT_REF_P (t
));
8329 t
= TREE_OPERAND (t
, 0);
8330 gcc_assert (DECL_P (t
));
8331 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
8332 n2
->value
|= GOVD_SEEN
;
8334 else if (omp_privatize_by_reference (decl
)
8335 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)))
8336 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))))
8340 tree t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
8341 gcc_assert (DECL_P (t
));
8342 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
8344 omp_notice_variable (ctx
, t
, true);
8348 if (ctx
->region_type
& ORT_ACC
)
8349 /* For OpenACC, as remarked above, defer expansion. */
8352 shared
= ((flags
| n
->value
) & GOVD_SHARED
) != 0;
8353 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
8355 /* If nothing changed, there's nothing left to do. */
8356 if ((n
->value
& flags
) == flags
)
8362 /* If the variable is private in the current context, then we don't
8363 need to propagate anything to an outer context. */
8364 if ((flags
& GOVD_PRIVATE
) && !(flags
& GOVD_PRIVATE_OUTER_REF
))
8366 if ((flags
& (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8367 == (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8369 if ((flags
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
8370 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8371 == (GOVD_LASTPRIVATE
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8373 if (ctx
->outer_context
8374 && omp_notice_variable (ctx
->outer_context
, decl
, in_code
))
8379 /* Verify that DECL is private within CTX. If there's specific information
8380 to the contrary in the innermost scope, generate an error. */
8383 omp_is_private (struct gimplify_omp_ctx
*ctx
, tree decl
, int simd
)
8387 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
8390 if (n
->value
& GOVD_SHARED
)
8392 if (ctx
== gimplify_omp_ctxp
)
8395 error ("iteration variable %qE is predetermined linear",
8398 error ("iteration variable %qE should be private",
8400 n
->value
= GOVD_PRIVATE
;
8406 else if ((n
->value
& GOVD_EXPLICIT
) != 0
8407 && (ctx
== gimplify_omp_ctxp
8408 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
8409 && gimplify_omp_ctxp
->outer_context
== ctx
)))
8411 if ((n
->value
& GOVD_FIRSTPRIVATE
) != 0)
8412 error ("iteration variable %qE should not be firstprivate",
8414 else if ((n
->value
& GOVD_REDUCTION
) != 0)
8415 error ("iteration variable %qE should not be reduction",
8417 else if (simd
!= 1 && (n
->value
& GOVD_LINEAR
) != 0)
8418 error ("iteration variable %qE should not be linear",
8421 return (ctx
== gimplify_omp_ctxp
8422 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
8423 && gimplify_omp_ctxp
->outer_context
== ctx
));
8426 if (ctx
->region_type
!= ORT_WORKSHARE
8427 && ctx
->region_type
!= ORT_TASKGROUP
8428 && ctx
->region_type
!= ORT_SIMD
8429 && ctx
->region_type
!= ORT_ACC
)
8431 else if (ctx
->outer_context
)
8432 return omp_is_private (ctx
->outer_context
, decl
, simd
);
8436 /* Return true if DECL is private within a parallel region
8437 that binds to the current construct's context or in parallel
8438 region's REDUCTION clause. */
8441 omp_check_private (struct gimplify_omp_ctx
*ctx
, tree decl
, bool copyprivate
)
8447 ctx
= ctx
->outer_context
;
8450 if (is_global_var (decl
))
8453 /* References might be private, but might be shared too,
8454 when checking for copyprivate, assume they might be
8455 private, otherwise assume they might be shared. */
8459 if (omp_privatize_by_reference (decl
))
8462 /* Treat C++ privatized non-static data members outside
8463 of the privatization the same. */
8464 if (omp_member_access_dummy_var (decl
))
8470 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8472 if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
8473 && (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
8475 if ((ctx
->region_type
& ORT_TARGET_DATA
) != 0
8477 || (n
->value
& GOVD_MAP
) == 0)
8484 if ((n
->value
& GOVD_LOCAL
) != 0
8485 && omp_member_access_dummy_var (decl
))
8487 return (n
->value
& GOVD_SHARED
) == 0;
8490 if (ctx
->region_type
== ORT_WORKSHARE
8491 || ctx
->region_type
== ORT_TASKGROUP
8492 || ctx
->region_type
== ORT_SIMD
8493 || ctx
->region_type
== ORT_ACC
)
8502 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8505 find_decl_expr (tree
*tp
, int *walk_subtrees
, void *data
)
8509 /* If this node has been visited, unmark it and keep looking. */
8510 if (TREE_CODE (t
) == DECL_EXPR
&& DECL_EXPR_DECL (t
) == (tree
) data
)
8513 if (IS_TYPE_OR_DECL_P (t
))
8519 /* Gimplify the affinity clause but effectively ignore it.
8522 if ((step > 1) ? var <= end : var > end)
8523 locatator_var_expr; */
8526 gimplify_omp_affinity (tree
*list_p
, gimple_seq
*pre_p
)
8528 tree last_iter
= NULL_TREE
;
8529 tree last_bind
= NULL_TREE
;
8530 tree label
= NULL_TREE
;
8531 tree
*last_body
= NULL
;
8532 for (tree c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8533 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_AFFINITY
)
8535 tree t
= OMP_CLAUSE_DECL (c
);
8536 if (TREE_CODE (t
) == TREE_LIST
8538 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8540 if (TREE_VALUE (t
) == null_pointer_node
)
8542 if (TREE_PURPOSE (t
) != last_iter
)
8546 append_to_statement_list (label
, last_body
);
8547 gimplify_and_add (last_bind
, pre_p
);
8548 last_bind
= NULL_TREE
;
8550 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8552 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
8553 is_gimple_val
, fb_rvalue
) == GS_ERROR
8554 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
8555 is_gimple_val
, fb_rvalue
) == GS_ERROR
8556 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
8557 is_gimple_val
, fb_rvalue
) == GS_ERROR
8558 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
8559 is_gimple_val
, fb_rvalue
)
8563 last_iter
= TREE_PURPOSE (t
);
8564 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
8565 last_bind
= build3 (BIND_EXPR
, void_type_node
, BLOCK_VARS (block
),
8567 last_body
= &BIND_EXPR_BODY (last_bind
);
8568 tree cond
= NULL_TREE
;
8569 location_t loc
= OMP_CLAUSE_LOCATION (c
);
8570 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8572 tree var
= TREE_VEC_ELT (it
, 0);
8573 tree begin
= TREE_VEC_ELT (it
, 1);
8574 tree end
= TREE_VEC_ELT (it
, 2);
8575 tree step
= TREE_VEC_ELT (it
, 3);
8576 loc
= DECL_SOURCE_LOCATION (var
);
8577 tree tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8579 append_to_statement_list_force (tem
, last_body
);
8581 tree cond1
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8582 step
, build_zero_cst (TREE_TYPE (step
)));
8583 tree cond2
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
,
8585 tree cond3
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8587 cond1
= fold_build3_loc (loc
, COND_EXPR
, boolean_type_node
,
8588 cond1
, cond2
, cond3
);
8590 cond
= fold_build2_loc (loc
, TRUTH_AND_EXPR
,
8591 boolean_type_node
, cond
, cond1
);
8595 tree cont_label
= create_artificial_label (loc
);
8596 label
= build1 (LABEL_EXPR
, void_type_node
, cont_label
);
8597 tree tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, cond
,
8599 build_and_jump (&cont_label
));
8600 append_to_statement_list_force (tem
, last_body
);
8602 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
8604 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
), 0),
8606 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
8608 if (error_operand_p (TREE_VALUE (t
)))
8610 append_to_statement_list_force (TREE_VALUE (t
), last_body
);
8611 TREE_VALUE (t
) = null_pointer_node
;
8617 append_to_statement_list (label
, last_body
);
8618 gimplify_and_add (last_bind
, pre_p
);
8619 last_bind
= NULL_TREE
;
8621 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8623 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8624 NULL
, is_gimple_val
, fb_rvalue
);
8625 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8627 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8629 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8630 is_gimple_lvalue
, fb_lvalue
) == GS_ERROR
)
8632 gimplify_and_add (OMP_CLAUSE_DECL (c
), pre_p
);
8637 append_to_statement_list (label
, last_body
);
8638 gimplify_and_add (last_bind
, pre_p
);
8643 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8644 lower all the depend clauses by populating corresponding depend
8645 array. Returns 0 if there are no such depend clauses, or
8646 2 if all depend clauses should be removed, 1 otherwise. */
8649 gimplify_omp_depend (tree
*list_p
, gimple_seq
*pre_p
)
8653 size_t n
[5] = { 0, 0, 0, 0, 0 };
8655 tree counts
[5] = { NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
};
8656 tree last_iter
= NULL_TREE
, last_count
= NULL_TREE
;
8658 location_t first_loc
= UNKNOWN_LOCATION
;
8660 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8661 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8663 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8665 case OMP_CLAUSE_DEPEND_IN
:
8668 case OMP_CLAUSE_DEPEND_OUT
:
8669 case OMP_CLAUSE_DEPEND_INOUT
:
8672 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8675 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8678 case OMP_CLAUSE_DEPEND_INOUTSET
:
8684 tree t
= OMP_CLAUSE_DECL (c
);
8685 if (first_loc
== UNKNOWN_LOCATION
)
8686 first_loc
= OMP_CLAUSE_LOCATION (c
);
8687 if (TREE_CODE (t
) == TREE_LIST
8689 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8691 if (TREE_PURPOSE (t
) != last_iter
)
8693 tree tcnt
= size_one_node
;
8694 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8696 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
8697 is_gimple_val
, fb_rvalue
) == GS_ERROR
8698 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
8699 is_gimple_val
, fb_rvalue
) == GS_ERROR
8700 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
8701 is_gimple_val
, fb_rvalue
) == GS_ERROR
8702 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
8703 is_gimple_val
, fb_rvalue
)
8706 tree var
= TREE_VEC_ELT (it
, 0);
8707 tree begin
= TREE_VEC_ELT (it
, 1);
8708 tree end
= TREE_VEC_ELT (it
, 2);
8709 tree step
= TREE_VEC_ELT (it
, 3);
8710 tree orig_step
= TREE_VEC_ELT (it
, 4);
8711 tree type
= TREE_TYPE (var
);
8712 tree stype
= TREE_TYPE (step
);
8713 location_t loc
= DECL_SOURCE_LOCATION (var
);
8715 /* Compute count for this iterator as
8717 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8718 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8719 and compute product of those for the entire depend
8721 if (POINTER_TYPE_P (type
))
8722 endmbegin
= fold_build2_loc (loc
, POINTER_DIFF_EXPR
,
8725 endmbegin
= fold_build2_loc (loc
, MINUS_EXPR
, type
,
8727 tree stepm1
= fold_build2_loc (loc
, MINUS_EXPR
, stype
,
8729 build_int_cst (stype
, 1));
8730 tree stepp1
= fold_build2_loc (loc
, PLUS_EXPR
, stype
, step
,
8731 build_int_cst (stype
, 1));
8732 tree pos
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
8733 unshare_expr (endmbegin
),
8735 pos
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
8737 tree neg
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
8739 if (TYPE_UNSIGNED (stype
))
8741 neg
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, neg
);
8742 step
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, step
);
8744 neg
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
8747 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8750 pos
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, pos
,
8751 build_int_cst (stype
, 0));
8752 cond
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
,
8754 neg
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, neg
,
8755 build_int_cst (stype
, 0));
8756 tree osteptype
= TREE_TYPE (orig_step
);
8757 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8759 build_int_cst (osteptype
, 0));
8760 tree cnt
= fold_build3_loc (loc
, COND_EXPR
, stype
,
8762 cnt
= fold_convert_loc (loc
, sizetype
, cnt
);
8763 if (gimplify_expr (&cnt
, pre_p
, NULL
, is_gimple_val
,
8764 fb_rvalue
) == GS_ERROR
)
8766 tcnt
= size_binop_loc (loc
, MULT_EXPR
, tcnt
, cnt
);
8768 if (gimplify_expr (&tcnt
, pre_p
, NULL
, is_gimple_val
,
8769 fb_rvalue
) == GS_ERROR
)
8771 last_iter
= TREE_PURPOSE (t
);
8774 if (counts
[i
] == NULL_TREE
)
8775 counts
[i
] = last_count
;
8777 counts
[i
] = size_binop_loc (OMP_CLAUSE_LOCATION (c
),
8778 PLUS_EXPR
, counts
[i
], last_count
);
8783 for (i
= 0; i
< 5; i
++)
8789 tree total
= size_zero_node
;
8790 for (i
= 0; i
< 5; i
++)
8792 unused
[i
] = counts
[i
] == NULL_TREE
&& n
[i
] == 0;
8793 if (counts
[i
] == NULL_TREE
)
8794 counts
[i
] = size_zero_node
;
8796 counts
[i
] = size_binop (PLUS_EXPR
, counts
[i
], size_int (n
[i
]));
8797 if (gimplify_expr (&counts
[i
], pre_p
, NULL
, is_gimple_val
,
8798 fb_rvalue
) == GS_ERROR
)
8800 total
= size_binop (PLUS_EXPR
, total
, counts
[i
]);
8803 if (gimplify_expr (&total
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
8806 bool is_old
= unused
[1] && unused
[3] && unused
[4];
8807 tree totalpx
= size_binop (PLUS_EXPR
, unshare_expr (total
),
8808 size_int (is_old
? 1 : 4));
8810 totalpx
= size_binop (PLUS_EXPR
, totalpx
,
8811 size_binop (MULT_EXPR
, counts
[4], size_int (2)));
8812 tree type
= build_array_type (ptr_type_node
, build_index_type (totalpx
));
8813 tree array
= create_tmp_var_raw (type
);
8814 TREE_ADDRESSABLE (array
) = 1;
8815 if (!poly_int_tree_p (totalpx
))
8817 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array
)))
8818 gimplify_type_sizes (TREE_TYPE (array
), pre_p
);
8819 if (gimplify_omp_ctxp
)
8821 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8823 && (ctx
->region_type
== ORT_WORKSHARE
8824 || ctx
->region_type
== ORT_TASKGROUP
8825 || ctx
->region_type
== ORT_SIMD
8826 || ctx
->region_type
== ORT_ACC
))
8827 ctx
= ctx
->outer_context
;
8829 omp_add_variable (ctx
, array
, GOVD_LOCAL
| GOVD_SEEN
);
8831 gimplify_vla_decl (array
, pre_p
);
8834 gimple_add_tmp_var (array
);
8835 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
8840 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8841 build_int_cst (ptr_type_node
, 0));
8842 gimplify_and_add (tem
, pre_p
);
8843 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
8846 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8847 fold_convert (ptr_type_node
, total
));
8848 gimplify_and_add (tem
, pre_p
);
8849 for (i
= 1; i
< (is_old
? 2 : 4); i
++)
8851 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (i
+ !is_old
),
8852 NULL_TREE
, NULL_TREE
);
8853 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, counts
[i
- 1]);
8854 gimplify_and_add (tem
, pre_p
);
8861 for (i
= 0; i
< 5; i
++)
8863 if (i
&& (i
>= j
|| unused
[i
- 1]))
8865 cnts
[i
] = cnts
[i
- 1];
8868 cnts
[i
] = create_tmp_var (sizetype
);
8870 g
= gimple_build_assign (cnts
[i
], size_int (is_old
? 2 : 5));
8875 t
= size_binop (PLUS_EXPR
, counts
[0], size_int (2));
8877 t
= size_binop (PLUS_EXPR
, cnts
[i
- 1], counts
[i
- 1]);
8878 if (gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
8881 g
= gimple_build_assign (cnts
[i
], t
);
8883 gimple_seq_add_stmt (pre_p
, g
);
8886 cnts
[5] = NULL_TREE
;
8889 tree t
= size_binop (PLUS_EXPR
, total
, size_int (5));
8890 cnts
[5] = create_tmp_var (sizetype
);
8891 g
= gimple_build_assign (cnts
[i
], t
);
8892 gimple_seq_add_stmt (pre_p
, g
);
8895 last_iter
= NULL_TREE
;
8896 tree last_bind
= NULL_TREE
;
8897 tree
*last_body
= NULL
;
8898 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8899 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8901 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8903 case OMP_CLAUSE_DEPEND_IN
:
8906 case OMP_CLAUSE_DEPEND_OUT
:
8907 case OMP_CLAUSE_DEPEND_INOUT
:
8910 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8913 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8916 case OMP_CLAUSE_DEPEND_INOUTSET
:
8922 tree t
= OMP_CLAUSE_DECL (c
);
8923 if (TREE_CODE (t
) == TREE_LIST
8925 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8927 if (TREE_PURPOSE (t
) != last_iter
)
8930 gimplify_and_add (last_bind
, pre_p
);
8931 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
8932 last_bind
= build3 (BIND_EXPR
, void_type_node
,
8933 BLOCK_VARS (block
), NULL
, block
);
8934 TREE_SIDE_EFFECTS (last_bind
) = 1;
8935 SET_EXPR_LOCATION (last_bind
, OMP_CLAUSE_LOCATION (c
));
8936 tree
*p
= &BIND_EXPR_BODY (last_bind
);
8937 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8939 tree var
= TREE_VEC_ELT (it
, 0);
8940 tree begin
= TREE_VEC_ELT (it
, 1);
8941 tree end
= TREE_VEC_ELT (it
, 2);
8942 tree step
= TREE_VEC_ELT (it
, 3);
8943 tree orig_step
= TREE_VEC_ELT (it
, 4);
8944 tree type
= TREE_TYPE (var
);
8945 location_t loc
= DECL_SOURCE_LOCATION (var
);
8953 if (orig_step > 0) {
8954 if (var < end) goto beg_label;
8956 if (var > end) goto beg_label;
8958 for each iterator, with inner iterators added to
8960 tree beg_label
= create_artificial_label (loc
);
8961 tree cond_label
= NULL_TREE
;
8962 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8964 append_to_statement_list_force (tem
, p
);
8965 tem
= build_and_jump (&cond_label
);
8966 append_to_statement_list_force (tem
, p
);
8967 tem
= build1 (LABEL_EXPR
, void_type_node
, beg_label
);
8968 append_to_statement_list (tem
, p
);
8969 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
8970 NULL_TREE
, NULL_TREE
);
8971 TREE_SIDE_EFFECTS (bind
) = 1;
8972 SET_EXPR_LOCATION (bind
, loc
);
8973 append_to_statement_list_force (bind
, p
);
8974 if (POINTER_TYPE_P (type
))
8975 tem
= build2_loc (loc
, POINTER_PLUS_EXPR
, type
,
8976 var
, fold_convert_loc (loc
, sizetype
,
8979 tem
= build2_loc (loc
, PLUS_EXPR
, type
, var
, step
);
8980 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8982 append_to_statement_list_force (tem
, p
);
8983 tem
= build1 (LABEL_EXPR
, void_type_node
, cond_label
);
8984 append_to_statement_list (tem
, p
);
8985 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8989 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8990 cond
, build_and_jump (&beg_label
),
8992 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8995 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8996 cond
, build_and_jump (&beg_label
),
8998 tree osteptype
= TREE_TYPE (orig_step
);
8999 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
9001 build_int_cst (osteptype
, 0));
9002 tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
9004 append_to_statement_list_force (tem
, p
);
9005 p
= &BIND_EXPR_BODY (bind
);
9009 last_iter
= TREE_PURPOSE (t
);
9010 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
9012 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
),
9014 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
9016 if (error_operand_p (TREE_VALUE (t
)))
9018 if (TREE_VALUE (t
) != null_pointer_node
)
9019 TREE_VALUE (t
) = build_fold_addr_expr (TREE_VALUE (t
));
9022 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
9023 NULL_TREE
, NULL_TREE
);
9024 tree r2
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[5],
9025 NULL_TREE
, NULL_TREE
);
9026 r2
= build_fold_addr_expr_with_type (r2
, ptr_type_node
);
9027 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
9028 void_type_node
, r
, r2
);
9029 append_to_statement_list_force (tem
, last_body
);
9030 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
9031 void_type_node
, cnts
[i
],
9032 size_binop (PLUS_EXPR
, cnts
[i
],
9034 append_to_statement_list_force (tem
, last_body
);
9037 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
9038 NULL_TREE
, NULL_TREE
);
9039 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
9040 void_type_node
, r
, TREE_VALUE (t
));
9041 append_to_statement_list_force (tem
, last_body
);
9044 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
9045 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)),
9046 NULL_TREE
, NULL_TREE
);
9047 tem
= build_int_cst (ptr_type_node
, GOMP_DEPEND_INOUTSET
);
9048 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
9049 void_type_node
, r
, tem
);
9050 append_to_statement_list_force (tem
, last_body
);
9052 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
9053 void_type_node
, cnts
[i
],
9054 size_binop (PLUS_EXPR
, cnts
[i
],
9055 size_int (1 + (i
== 5))));
9056 append_to_statement_list_force (tem
, last_body
);
9057 TREE_VALUE (t
) = null_pointer_node
;
9063 gimplify_and_add (last_bind
, pre_p
);
9064 last_bind
= NULL_TREE
;
9066 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
9068 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
9069 NULL
, is_gimple_val
, fb_rvalue
);
9070 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
9072 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
9074 if (OMP_CLAUSE_DECL (c
) != null_pointer_node
)
9075 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
9076 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
9077 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9081 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
9082 NULL_TREE
, NULL_TREE
);
9083 tree r2
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[5],
9084 NULL_TREE
, NULL_TREE
);
9085 r2
= build_fold_addr_expr_with_type (r2
, ptr_type_node
);
9086 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, r2
);
9087 gimplify_and_add (tem
, pre_p
);
9088 g
= gimple_build_assign (cnts
[i
], size_binop (PLUS_EXPR
,
9091 gimple_seq_add_stmt (pre_p
, g
);
9094 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
9095 NULL_TREE
, NULL_TREE
);
9096 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, OMP_CLAUSE_DECL (c
));
9097 gimplify_and_add (tem
, pre_p
);
9100 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
9101 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)),
9102 NULL_TREE
, NULL_TREE
);
9103 tem
= build_int_cst (ptr_type_node
, GOMP_DEPEND_INOUTSET
);
9104 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, tem
);
9105 append_to_statement_list_force (tem
, last_body
);
9106 gimplify_and_add (tem
, pre_p
);
9108 g
= gimple_build_assign (cnts
[i
],
9109 size_binop (PLUS_EXPR
, cnts
[i
],
9110 size_int (1 + (i
== 5))));
9111 gimple_seq_add_stmt (pre_p
, g
);
9115 gimplify_and_add (last_bind
, pre_p
);
9116 tree cond
= boolean_false_node
;
9120 cond
= build2_loc (first_loc
, NE_EXPR
, boolean_type_node
, cnts
[0],
9121 size_binop_loc (first_loc
, PLUS_EXPR
, counts
[0],
9124 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
9125 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
9127 size_binop_loc (first_loc
, PLUS_EXPR
,
9133 tree prev
= size_int (5);
9134 for (i
= 0; i
< 5; i
++)
9138 prev
= size_binop_loc (first_loc
, PLUS_EXPR
, counts
[i
], prev
);
9139 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
9140 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
9141 cnts
[i
], unshare_expr (prev
)));
9144 tem
= build3_loc (first_loc
, COND_EXPR
, void_type_node
, cond
,
9145 build_call_expr_loc (first_loc
,
9146 builtin_decl_explicit (BUILT_IN_TRAP
),
9148 gimplify_and_add (tem
, pre_p
);
9149 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
9150 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
9151 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
9152 OMP_CLAUSE_CHAIN (c
) = *list_p
;
9157 /* True if mapping node C maps, or unmaps, a (Fortran) array descriptor. */
9160 omp_map_clause_descriptor_p (tree c
)
9162 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_MAP
)
9165 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
)
9168 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_RELEASE
9169 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DELETE
)
9170 && OMP_CLAUSE_RELEASE_DESCRIPTOR (c
))
9176 /* For a set of mappings describing an array section pointed to by a struct
9177 (or derived type, etc.) component, create an "alloc" or "release" node to
9178 insert into a list following a GOMP_MAP_STRUCT node. For some types of
9179 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
9180 be created that is inserted into the list of mapping nodes attached to the
9181 directive being processed -- not part of the sorted list of nodes after
9184 CODE is the code of the directive being processed. GRP_START and GRP_END
9185 are the first and last of two or three nodes representing this array section
9186 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
9187 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
9188 filled with the additional node described above, if needed.
9190 This function does not add the new nodes to any lists itself. It is the
9191 responsibility of the caller to do that. */
9194 build_omp_struct_comp_nodes (enum tree_code code
, tree grp_start
, tree grp_end
,
9197 enum gomp_map_kind mkind
9198 = (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
)
9199 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
9201 gcc_assert (grp_start
!= grp_end
);
9203 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
), OMP_CLAUSE_MAP
);
9204 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
9205 OMP_CLAUSE_DECL (c2
) = unshare_expr (OMP_CLAUSE_DECL (grp_end
));
9206 OMP_CLAUSE_CHAIN (c2
) = NULL_TREE
;
9207 tree grp_mid
= NULL_TREE
;
9208 if (OMP_CLAUSE_CHAIN (grp_start
) != grp_end
)
9209 grp_mid
= OMP_CLAUSE_CHAIN (grp_start
);
9211 if (grp_mid
&& omp_map_clause_descriptor_p (grp_mid
))
9212 OMP_CLAUSE_SIZE (c2
) = OMP_CLAUSE_SIZE (grp_mid
);
9214 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (ptr_type_node
);
9217 && OMP_CLAUSE_CODE (grp_mid
) == OMP_CLAUSE_MAP
9218 && OMP_CLAUSE_MAP_KIND (grp_mid
) == GOMP_MAP_ALWAYS_POINTER
)
9221 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
), OMP_CLAUSE_MAP
);
9222 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
9223 OMP_CLAUSE_DECL (c3
) = unshare_expr (OMP_CLAUSE_DECL (grp_mid
));
9224 OMP_CLAUSE_SIZE (c3
) = TYPE_SIZE_UNIT (ptr_type_node
);
9225 OMP_CLAUSE_CHAIN (c3
) = NULL_TREE
;
9230 *extra_node
= NULL_TREE
;
9235 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
9236 and set *BITPOSP and *POFFSETP to the bit offset of the access.
9237 If BASE_REF is non-NULL and the containing object is a reference, set
9238 *BASE_REF to that reference before dereferencing the object.
9239 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
9240 has array type, else return NULL. */
9243 extract_base_bit_offset (tree base
, poly_int64
*bitposp
,
9244 poly_offset_int
*poffsetp
,
9245 bool *variable_offset
)
9248 poly_int64 bitsize
, bitpos
;
9250 int unsignedp
, reversep
, volatilep
= 0;
9251 poly_offset_int poffset
;
9255 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
, &mode
,
9256 &unsignedp
, &reversep
, &volatilep
);
9260 if (offset
&& poly_int_tree_p (offset
))
9262 poffset
= wi::to_poly_offset (offset
);
9263 *variable_offset
= false;
9268 *variable_offset
= (offset
!= NULL_TREE
);
9271 if (maybe_ne (bitpos
, 0))
9272 poffset
+= bits_to_bytes_round_down (bitpos
);
9275 *poffsetp
= poffset
;
9280 /* Used for topological sorting of mapping groups. UNVISITED means we haven't
9281 started processing the group yet. The TEMPORARY mark is used when we first
9282 encounter a group on a depth-first traversal, and the PERMANENT mark is used
9283 when we have processed all the group's children (i.e. all the base pointers
9284 referred to by the group's mapping nodes, recursively). */
9286 enum omp_tsort_mark
{
9292 /* Hash for trees based on operand_equal_p. Like tree_operand_hash
9293 but ignores side effects in the equality comparisons. */
9295 struct tree_operand_hash_no_se
: tree_operand_hash
9297 static inline bool equal (const value_type
&,
9298 const compare_type
&);
9302 tree_operand_hash_no_se::equal (const value_type
&t1
,
9303 const compare_type
&t2
)
9305 return operand_equal_p (t1
, t2
, OEP_MATCH_SIDE_EFFECTS
);
9308 /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
9311 struct omp_mapping_group
{
9314 omp_tsort_mark mark
;
9315 /* If we've removed the group but need to reindex, mark the group as
9318 /* The group points to an already-created "GOMP_MAP_STRUCT
9319 GOMP_MAP_ATTACH_DETACH" pair. */
9320 bool reprocess_struct
;
9321 /* The group should use "zero-length" allocations for pointers that are not
9322 mapped "to" on the same directive. */
9324 struct omp_mapping_group
*sibling
;
9325 struct omp_mapping_group
*next
;
9329 debug_mapping_group (omp_mapping_group
*grp
)
9331 tree tmp
= OMP_CLAUSE_CHAIN (grp
->grp_end
);
9332 OMP_CLAUSE_CHAIN (grp
->grp_end
) = NULL
;
9333 debug_generic_expr (*grp
->grp_start
);
9334 OMP_CLAUSE_CHAIN (grp
->grp_end
) = tmp
;
9337 /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
9341 omp_get_base_pointer (tree expr
)
9343 while (TREE_CODE (expr
) == ARRAY_REF
9344 || TREE_CODE (expr
) == COMPONENT_REF
)
9345 expr
= TREE_OPERAND (expr
, 0);
9347 if (INDIRECT_REF_P (expr
)
9348 || (TREE_CODE (expr
) == MEM_REF
9349 && integer_zerop (TREE_OPERAND (expr
, 1))))
9351 expr
= TREE_OPERAND (expr
, 0);
9352 while (TREE_CODE (expr
) == COMPOUND_EXPR
)
9353 expr
= TREE_OPERAND (expr
, 1);
9354 if (TREE_CODE (expr
) == POINTER_PLUS_EXPR
)
9355 expr
= TREE_OPERAND (expr
, 0);
9356 if (TREE_CODE (expr
) == SAVE_EXPR
)
9357 expr
= TREE_OPERAND (expr
, 0);
9365 /* An attach or detach operation depends directly on the address being
9366 attached/detached. Return that address, or none if there are no
9367 attachments/detachments. */
9370 omp_get_attachment (omp_mapping_group
*grp
)
9372 tree node
= *grp
->grp_start
;
9374 switch (OMP_CLAUSE_MAP_KIND (node
))
9378 case GOMP_MAP_TOFROM
:
9379 case GOMP_MAP_ALWAYS_FROM
:
9380 case GOMP_MAP_ALWAYS_TO
:
9381 case GOMP_MAP_ALWAYS_TOFROM
:
9382 case GOMP_MAP_FORCE_FROM
:
9383 case GOMP_MAP_FORCE_TO
:
9384 case GOMP_MAP_FORCE_TOFROM
:
9385 case GOMP_MAP_FORCE_PRESENT
:
9386 case GOMP_MAP_PRESENT_ALLOC
:
9387 case GOMP_MAP_PRESENT_FROM
:
9388 case GOMP_MAP_PRESENT_TO
:
9389 case GOMP_MAP_PRESENT_TOFROM
:
9390 case GOMP_MAP_ALWAYS_PRESENT_FROM
:
9391 case GOMP_MAP_ALWAYS_PRESENT_TO
:
9392 case GOMP_MAP_ALWAYS_PRESENT_TOFROM
:
9393 case GOMP_MAP_ALLOC
:
9394 case GOMP_MAP_RELEASE
:
9395 case GOMP_MAP_DELETE
:
9396 case GOMP_MAP_FORCE_ALLOC
:
9397 if (node
== grp
->grp_end
)
9400 node
= OMP_CLAUSE_CHAIN (node
);
9401 if (node
&& omp_map_clause_descriptor_p (node
))
9403 gcc_assert (node
!= grp
->grp_end
);
9404 node
= OMP_CLAUSE_CHAIN (node
);
9407 switch (OMP_CLAUSE_MAP_KIND (node
))
9409 case GOMP_MAP_POINTER
:
9410 case GOMP_MAP_ALWAYS_POINTER
:
9411 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9412 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9413 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
9416 case GOMP_MAP_ATTACH_DETACH
:
9417 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9418 case GOMP_MAP_DETACH
:
9419 return OMP_CLAUSE_DECL (node
);
9422 internal_error ("unexpected mapping node");
9424 return error_mark_node
;
9426 case GOMP_MAP_TO_PSET
:
9427 gcc_assert (node
!= grp
->grp_end
);
9428 node
= OMP_CLAUSE_CHAIN (node
);
9429 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_ATTACH
9430 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_DETACH
)
9431 return OMP_CLAUSE_DECL (node
);
9433 internal_error ("unexpected mapping node");
9434 return error_mark_node
;
9436 case GOMP_MAP_ATTACH
:
9437 case GOMP_MAP_DETACH
:
9438 node
= OMP_CLAUSE_CHAIN (node
);
9439 if (!node
|| *grp
->grp_start
== grp
->grp_end
)
9440 return OMP_CLAUSE_DECL (*grp
->grp_start
);
9441 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9442 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9443 return OMP_CLAUSE_DECL (*grp
->grp_start
);
9445 internal_error ("unexpected mapping node");
9446 return error_mark_node
;
9448 case GOMP_MAP_STRUCT
:
9449 case GOMP_MAP_STRUCT_UNORD
:
9450 case GOMP_MAP_FORCE_DEVICEPTR
:
9451 case GOMP_MAP_DEVICE_RESIDENT
:
9453 case GOMP_MAP_IF_PRESENT
:
9454 case GOMP_MAP_FIRSTPRIVATE
:
9455 case GOMP_MAP_FIRSTPRIVATE_INT
:
9456 case GOMP_MAP_USE_DEVICE_PTR
:
9457 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9461 internal_error ("unexpected mapping node");
9464 return error_mark_node
;
9467 /* Given a pointer START_P to the start of a group of related (e.g. pointer)
9468 mappings, return the chain pointer to the end of that group in the list. */
9471 omp_group_last (tree
*start_p
)
9473 tree c
= *start_p
, nc
, *grp_last_p
= start_p
;
9475 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
);
9477 nc
= OMP_CLAUSE_CHAIN (c
);
9479 if (!nc
|| OMP_CLAUSE_CODE (nc
) != OMP_CLAUSE_MAP
)
9482 switch (OMP_CLAUSE_MAP_KIND (c
))
9486 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
9487 && (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9488 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9489 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ATTACH_DETACH
9490 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
9491 || (OMP_CLAUSE_MAP_KIND (nc
)
9492 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
)
9493 || (OMP_CLAUSE_MAP_KIND (nc
)
9494 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
)
9495 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_DETACH
9496 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ALWAYS_POINTER
9497 || omp_map_clause_descriptor_p (nc
)))
9499 tree nc2
= OMP_CLAUSE_CHAIN (nc
);
9500 if (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_DETACH
)
9502 /* In the specific case we're doing "exit data" on an array
9503 slice of a reference-to-pointer struct component, we will see
9504 DETACH followed by ATTACH_DETACH here. We want to treat that
9505 as a single group. In other cases DETACH might represent a
9506 stand-alone "detach" clause, so we don't want to consider
9507 that part of the group. */
9509 && OMP_CLAUSE_CODE (nc2
) == OMP_CLAUSE_MAP
9510 && OMP_CLAUSE_MAP_KIND (nc2
) == GOMP_MAP_ATTACH_DETACH
)
9511 goto consume_two_nodes
;
9516 && OMP_CLAUSE_CODE (nc2
) == OMP_CLAUSE_MAP
9517 && (OMP_CLAUSE_MAP_KIND (nc
)
9518 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
)
9519 && OMP_CLAUSE_MAP_KIND (nc2
) == GOMP_MAP_ATTACH
)
9522 grp_last_p
= &OMP_CLAUSE_CHAIN (nc
);
9524 nc
= OMP_CLAUSE_CHAIN (nc2
);
9528 grp_last_p
= &OMP_CLAUSE_CHAIN (c
);
9535 case GOMP_MAP_ATTACH
:
9536 case GOMP_MAP_DETACH
:
9537 /* This is a weird artifact of how directives are parsed: bare attach or
9538 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9539 FIRSTPRIVATE_REFERENCE node. FIXME. */
9541 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
9542 && (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9543 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
))
9544 grp_last_p
= &OMP_CLAUSE_CHAIN (c
);
9547 case GOMP_MAP_TO_PSET
:
9548 if (OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
9549 && (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ATTACH
9550 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_DETACH
))
9551 grp_last_p
= &OMP_CLAUSE_CHAIN (c
);
9554 case GOMP_MAP_STRUCT
:
9555 case GOMP_MAP_STRUCT_UNORD
:
9557 unsigned HOST_WIDE_INT num_mappings
9558 = tree_to_uhwi (OMP_CLAUSE_SIZE (c
));
9559 if (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9560 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9561 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ATTACH_DETACH
)
9562 grp_last_p
= &OMP_CLAUSE_CHAIN (*grp_last_p
);
9563 for (unsigned i
= 0; i
< num_mappings
; i
++)
9564 grp_last_p
= &OMP_CLAUSE_CHAIN (*grp_last_p
);
9572 /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9573 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9574 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9575 if we have more than one such group, else return NULL. */
9578 omp_gather_mapping_groups_1 (tree
*list_p
, vec
<omp_mapping_group
> *groups
,
9579 tree gather_sentinel
)
9581 for (tree
*cp
= list_p
;
9582 *cp
&& *cp
!= gather_sentinel
;
9583 cp
= &OMP_CLAUSE_CHAIN (*cp
))
9585 if (OMP_CLAUSE_CODE (*cp
) != OMP_CLAUSE_MAP
)
9588 tree
*grp_last_p
= omp_group_last (cp
);
9589 omp_mapping_group grp
;
9592 grp
.grp_end
= *grp_last_p
;
9593 grp
.mark
= UNVISITED
;
9595 grp
.deleted
= false;
9596 grp
.reprocess_struct
= false;
9597 grp
.fragile
= false;
9599 groups
->safe_push (grp
);
9605 static vec
<omp_mapping_group
> *
9606 omp_gather_mapping_groups (tree
*list_p
)
9608 vec
<omp_mapping_group
> *groups
= new vec
<omp_mapping_group
> ();
9610 omp_gather_mapping_groups_1 (list_p
, groups
, NULL_TREE
);
9612 if (groups
->length () > 0)
9621 /* A pointer mapping group GRP may define a block of memory starting at some
9622 base address, and maybe also define a firstprivate pointer or firstprivate
9623 reference that points to that block. The return value is a node containing
9624 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9625 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9626 return the number of consecutive chained nodes in CHAINED. */
9629 omp_group_base (omp_mapping_group
*grp
, unsigned int *chained
,
9632 tree node
= *grp
->grp_start
;
9634 *firstprivate
= NULL_TREE
;
9637 switch (OMP_CLAUSE_MAP_KIND (node
))
9641 case GOMP_MAP_TOFROM
:
9642 case GOMP_MAP_ALWAYS_FROM
:
9643 case GOMP_MAP_ALWAYS_TO
:
9644 case GOMP_MAP_ALWAYS_TOFROM
:
9645 case GOMP_MAP_FORCE_FROM
:
9646 case GOMP_MAP_FORCE_TO
:
9647 case GOMP_MAP_FORCE_TOFROM
:
9648 case GOMP_MAP_FORCE_PRESENT
:
9649 case GOMP_MAP_PRESENT_ALLOC
:
9650 case GOMP_MAP_PRESENT_FROM
:
9651 case GOMP_MAP_PRESENT_TO
:
9652 case GOMP_MAP_PRESENT_TOFROM
:
9653 case GOMP_MAP_ALWAYS_PRESENT_FROM
:
9654 case GOMP_MAP_ALWAYS_PRESENT_TO
:
9655 case GOMP_MAP_ALWAYS_PRESENT_TOFROM
:
9656 case GOMP_MAP_ALLOC
:
9657 case GOMP_MAP_RELEASE
:
9658 case GOMP_MAP_DELETE
:
9659 case GOMP_MAP_FORCE_ALLOC
:
9660 case GOMP_MAP_IF_PRESENT
:
9661 if (node
== grp
->grp_end
)
9664 node
= OMP_CLAUSE_CHAIN (node
);
9666 internal_error ("unexpected mapping node");
9667 if (omp_map_clause_descriptor_p (node
))
9669 if (node
== grp
->grp_end
)
9670 return *grp
->grp_start
;
9671 node
= OMP_CLAUSE_CHAIN (node
);
9673 switch (OMP_CLAUSE_MAP_KIND (node
))
9675 case GOMP_MAP_POINTER
:
9676 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9677 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9678 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
9679 *firstprivate
= OMP_CLAUSE_DECL (node
);
9680 return *grp
->grp_start
;
9682 case GOMP_MAP_ALWAYS_POINTER
:
9683 case GOMP_MAP_ATTACH_DETACH
:
9684 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9685 case GOMP_MAP_DETACH
:
9686 return *grp
->grp_start
;
9689 internal_error ("unexpected mapping node");
9691 return error_mark_node
;
9693 case GOMP_MAP_TO_PSET
:
9694 gcc_assert (node
!= grp
->grp_end
);
9695 node
= OMP_CLAUSE_CHAIN (node
);
9696 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_ATTACH
9697 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_DETACH
)
9700 internal_error ("unexpected mapping node");
9701 return error_mark_node
;
9703 case GOMP_MAP_ATTACH
:
9704 case GOMP_MAP_DETACH
:
9705 node
= OMP_CLAUSE_CHAIN (node
);
9706 if (!node
|| *grp
->grp_start
== grp
->grp_end
)
9708 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9709 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9711 /* We're mapping the base pointer itself in a bare attach or detach
9712 node. This is a side effect of how parsing works, and the mapping
9713 will be removed anyway (at least for enter/exit data directives).
9714 We should ignore the mapping here. FIXME. */
9718 internal_error ("unexpected mapping node");
9719 return error_mark_node
;
9721 case GOMP_MAP_STRUCT
:
9722 case GOMP_MAP_STRUCT_UNORD
:
9724 unsigned HOST_WIDE_INT num_mappings
9725 = tree_to_uhwi (OMP_CLAUSE_SIZE (node
));
9726 node
= OMP_CLAUSE_CHAIN (node
);
9727 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9728 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9730 *firstprivate
= OMP_CLAUSE_DECL (node
);
9731 node
= OMP_CLAUSE_CHAIN (node
);
9733 else if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_ATTACH_DETACH
)
9734 node
= OMP_CLAUSE_CHAIN (node
);
9735 *chained
= num_mappings
;
9739 case GOMP_MAP_FORCE_DEVICEPTR
:
9740 case GOMP_MAP_DEVICE_RESIDENT
:
9742 case GOMP_MAP_FIRSTPRIVATE
:
9743 case GOMP_MAP_FIRSTPRIVATE_INT
:
9744 case GOMP_MAP_USE_DEVICE_PTR
:
9745 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9748 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9749 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9750 case GOMP_MAP_POINTER
:
9751 case GOMP_MAP_ALWAYS_POINTER
:
9752 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
9753 /* These shouldn't appear by themselves. */
9755 internal_error ("unexpected pointer mapping node");
9756 return error_mark_node
;
9762 return error_mark_node
;
9765 /* Given a vector of omp_mapping_groups, build a hash table so we can look up
9766 nodes by tree_operand_hash_no_se. */
9769 omp_index_mapping_groups_1 (hash_map
<tree_operand_hash_no_se
,
9770 omp_mapping_group
*> *grpmap
,
9771 vec
<omp_mapping_group
> *groups
,
9772 tree reindex_sentinel
)
9774 omp_mapping_group
*grp
;
9776 bool reindexing
= reindex_sentinel
!= NULL_TREE
, above_hwm
= false;
9778 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
9780 if (reindexing
&& *grp
->grp_start
== reindex_sentinel
)
9783 if (reindexing
&& !above_hwm
)
9786 if (grp
->reprocess_struct
)
9790 unsigned int chained
;
9791 tree node
= omp_group_base (grp
, &chained
, &fpp
);
9793 if (node
== error_mark_node
|| (!node
&& !fpp
))
9796 for (unsigned j
= 0;
9797 node
&& j
< chained
;
9798 node
= OMP_CLAUSE_CHAIN (node
), j
++)
9800 tree decl
= OMP_CLAUSE_DECL (node
);
9801 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
9802 meaning node-hash lookups don't work. This is a workaround for
9803 that, but ideally we should just create the INDIRECT_REF at
9804 source instead. FIXME. */
9805 if (TREE_CODE (decl
) == MEM_REF
9806 && integer_zerop (TREE_OPERAND (decl
, 1)))
9807 decl
= build_fold_indirect_ref (TREE_OPERAND (decl
, 0));
9809 omp_mapping_group
**prev
= grpmap
->get (decl
);
9811 if (prev
&& *prev
== grp
)
9815 /* Mapping the same thing twice is normally diagnosed as an error,
9816 but can happen under some circumstances, e.g. in pr99928-16.c,
9819 #pragma omp target simd reduction(+:a[:3]) \
9820 map(always, tofrom: a[:6])
9823 will result in two "a[0]" mappings (of different sizes). */
9825 grp
->sibling
= (*prev
)->sibling
;
9826 (*prev
)->sibling
= grp
;
9829 grpmap
->put (decl
, grp
);
9835 omp_mapping_group
**prev
= grpmap
->get (fpp
);
9836 if (prev
&& *prev
!= grp
)
9838 grp
->sibling
= (*prev
)->sibling
;
9839 (*prev
)->sibling
= grp
;
9842 grpmap
->put (fpp
, grp
);
9846 static hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *
9847 omp_index_mapping_groups (vec
<omp_mapping_group
> *groups
)
9849 hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *grpmap
9850 = new hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*>;
9852 omp_index_mapping_groups_1 (grpmap
, groups
, NULL_TREE
);
9857 /* Rebuild group map from partially-processed clause list (during
9858 omp_build_struct_sibling_lists). We have already processed nodes up until
9859 a high-water mark (HWM). This is a bit tricky because the list is being
9860 reordered as it is scanned, but we know:
9862 1. The list after HWM has not been touched yet, so we can reindex it safely.
9864 2. The list before and including HWM has been altered, but remains
9865 well-formed throughout the sibling-list building operation.
9867 so, we can do the reindex operation in two parts, on the processed and
9868 then the unprocessed halves of the list. */
9870 static hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *
9871 omp_reindex_mapping_groups (tree
*list_p
,
9872 vec
<omp_mapping_group
> *groups
,
9873 vec
<omp_mapping_group
> *processed_groups
,
9876 hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *grpmap
9877 = new hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*>;
9879 processed_groups
->truncate (0);
9881 omp_gather_mapping_groups_1 (list_p
, processed_groups
, sentinel
);
9882 omp_index_mapping_groups_1 (grpmap
, processed_groups
, NULL_TREE
);
9884 omp_index_mapping_groups_1 (grpmap
, groups
, sentinel
);
9889 /* Find the immediately-containing struct for a component ref (etc.)
9893 omp_containing_struct (tree expr
)
9899 /* Note: don't strip NOPs unless we're also stripping off array refs or a
9901 if (TREE_CODE (expr
) != ARRAY_REF
&& TREE_CODE (expr
) != COMPONENT_REF
)
9904 while (TREE_CODE (expr
) == ARRAY_REF
)
9905 expr
= TREE_OPERAND (expr
, 0);
9907 if (TREE_CODE (expr
) == COMPONENT_REF
)
9908 expr
= TREE_OPERAND (expr
, 0);
9913 /* Return TRUE if DECL describes a component that is part of a whole structure
9914 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
9915 that maps that structure, if present. */
9918 omp_mapped_by_containing_struct (hash_map
<tree_operand_hash_no_se
,
9919 omp_mapping_group
*> *grpmap
,
9921 omp_mapping_group
**mapped_by_group
)
9923 tree wsdecl
= NULL_TREE
;
9925 *mapped_by_group
= NULL
;
9929 wsdecl
= omp_containing_struct (decl
);
9932 omp_mapping_group
**wholestruct
= grpmap
->get (wsdecl
);
9934 && TREE_CODE (wsdecl
) == MEM_REF
9935 && integer_zerop (TREE_OPERAND (wsdecl
, 1)))
9937 tree deref
= TREE_OPERAND (wsdecl
, 0);
9938 deref
= build_fold_indirect_ref (deref
);
9939 wholestruct
= grpmap
->get (deref
);
9943 *mapped_by_group
= *wholestruct
;
9952 /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
9956 omp_tsort_mapping_groups_1 (omp_mapping_group
***outlist
,
9957 vec
<omp_mapping_group
> *groups
,
9958 hash_map
<tree_operand_hash_no_se
,
9959 omp_mapping_group
*> *grpmap
,
9960 omp_mapping_group
*grp
)
9962 if (grp
->mark
== PERMANENT
)
9964 if (grp
->mark
== TEMPORARY
)
9966 fprintf (stderr
, "when processing group:\n");
9967 debug_mapping_group (grp
);
9968 internal_error ("base pointer cycle detected");
9971 grp
->mark
= TEMPORARY
;
9973 tree attaches_to
= omp_get_attachment (grp
);
9977 omp_mapping_group
**basep
= grpmap
->get (attaches_to
);
9979 if (basep
&& *basep
!= grp
)
9981 for (omp_mapping_group
*w
= *basep
; w
; w
= w
->sibling
)
9982 if (!omp_tsort_mapping_groups_1 (outlist
, groups
, grpmap
, w
))
9987 tree decl
= OMP_CLAUSE_DECL (*grp
->grp_start
);
9991 tree base
= omp_get_base_pointer (decl
);
9996 omp_mapping_group
**innerp
= grpmap
->get (base
);
9997 omp_mapping_group
*wholestruct
;
9999 /* We should treat whole-structure mappings as if all (pointer, in this
10000 case) members are mapped as individual list items. Check if we have
10001 such a whole-structure mapping, if we don't have an explicit reference
10002 to the pointer member itself. */
10004 && TREE_CODE (base
) == COMPONENT_REF
10005 && omp_mapped_by_containing_struct (grpmap
, base
, &wholestruct
))
10006 innerp
= &wholestruct
;
10008 if (innerp
&& *innerp
!= grp
)
10010 for (omp_mapping_group
*w
= *innerp
; w
; w
= w
->sibling
)
10011 if (!omp_tsort_mapping_groups_1 (outlist
, groups
, grpmap
, w
))
10019 grp
->mark
= PERMANENT
;
10021 /* Emit grp to output list. */
10024 *outlist
= &grp
->next
;
10029 /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
10030 before mappings that use those pointers. This is an implementation of the
10031 depth-first search algorithm, described e.g. at:
10033 https://en.wikipedia.org/wiki/Topological_sorting
10036 static omp_mapping_group
*
10037 omp_tsort_mapping_groups (vec
<omp_mapping_group
> *groups
,
10038 hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*>
10040 bool enter_exit_data
)
10042 omp_mapping_group
*grp
, *outlist
= NULL
, **cursor
;
10044 bool saw_runtime_implicit
= false;
10048 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10050 if (grp
->mark
!= PERMANENT
)
10052 if (OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp
->grp_start
))
10054 saw_runtime_implicit
= true;
10057 if (!omp_tsort_mapping_groups_1 (&cursor
, groups
, grpmap
, grp
))
10062 if (!saw_runtime_implicit
)
10065 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10067 if (grp
->mark
!= PERMANENT
10068 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp
->grp_start
))
10070 /* Clear the flag for enter/exit data because it is currently
10071 meaningless for those operations in libgomp. */
10072 if (enter_exit_data
)
10073 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp
->grp_start
) = 0;
10075 if (!omp_tsort_mapping_groups_1 (&cursor
, groups
, grpmap
, grp
))
10083 /* Split INLIST into three parts:
10085 - "present" alloc/to/from groups
10086 - other to/from groups
10087 - other alloc/release/delete groups
10089 These sub-lists are then concatenated together to form the final list.
10090 Each sub-list retains the order of the original list.
10091 Note that ATTACH nodes are later moved to the end of the list in
10092 gimplify_adjust_omp_clauses, for target regions. */
10094 static omp_mapping_group
*
10095 omp_segregate_mapping_groups (omp_mapping_group
*inlist
)
10097 omp_mapping_group
*ard_groups
= NULL
, *tf_groups
= NULL
;
10098 omp_mapping_group
*p_groups
= NULL
;
10099 omp_mapping_group
**ard_tail
= &ard_groups
, **tf_tail
= &tf_groups
;
10100 omp_mapping_group
**p_tail
= &p_groups
;
10102 for (omp_mapping_group
*w
= inlist
; w
;)
10104 tree c
= *w
->grp_start
;
10105 omp_mapping_group
*next
= w
->next
;
10107 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
);
10109 switch (OMP_CLAUSE_MAP_KIND (c
))
10111 case GOMP_MAP_ALLOC
:
10112 case GOMP_MAP_RELEASE
:
10113 case GOMP_MAP_DELETE
:
10116 ard_tail
= &w
->next
;
10119 /* These map types are all semantically identical, so are moved into a
10120 single group. They will each be changed into GOMP_MAP_FORCE_PRESENT
10121 in gimplify_adjust_omp_clauses. */
10122 case GOMP_MAP_PRESENT_ALLOC
:
10123 case GOMP_MAP_PRESENT_FROM
:
10124 case GOMP_MAP_PRESENT_TO
:
10125 case GOMP_MAP_PRESENT_TOFROM
:
10134 tf_tail
= &w
->next
;
10140 /* Now splice the lists together... */
10141 *tf_tail
= ard_groups
;
10142 *p_tail
= tf_groups
;
10147 /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
10148 those groups based on the output list of omp_tsort_mapping_groups --
10149 singly-linked, threaded through each element's NEXT pointer starting at
10150 HEAD. Each list element appears exactly once in that linked list.
10152 Each element of GROUPS may correspond to one or several mapping nodes.
10153 Node groups are kept together, and in the reordered list, the positions of
10154 the original groups are reused for the positions of the reordered list.
10155 Hence if we have e.g.
10157 {to ptr ptr} firstprivate {tofrom ptr} ...
10159 first group non-"map" second group
10161 and say the second group contains a base pointer for the first so must be
10162 moved before it, the resulting list will contain:
10164 {tofrom ptr} firstprivate {to ptr ptr} ...
10165 ^ prev. second group ^ prev. first group
10169 omp_reorder_mapping_groups (vec
<omp_mapping_group
> *groups
,
10170 omp_mapping_group
*head
,
10173 omp_mapping_group
*grp
;
10175 unsigned numgroups
= groups
->length ();
10176 auto_vec
<tree
> old_heads (numgroups
);
10177 auto_vec
<tree
*> old_headps (numgroups
);
10178 auto_vec
<tree
> new_heads (numgroups
);
10179 auto_vec
<tree
> old_succs (numgroups
);
10180 bool map_at_start
= (list_p
== (*groups
)[0].grp_start
);
10182 tree
*new_grp_tail
= NULL
;
10184 /* Stash the start & end nodes of each mapping group before we start
10185 modifying the list. */
10186 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10188 old_headps
.quick_push (grp
->grp_start
);
10189 old_heads
.quick_push (*grp
->grp_start
);
10190 old_succs
.quick_push (OMP_CLAUSE_CHAIN (grp
->grp_end
));
10193 /* And similarly, the heads of the groups in the order we want to rearrange
10195 for (omp_mapping_group
*w
= head
; w
; w
= w
->next
)
10196 new_heads
.quick_push (*w
->grp_start
);
10198 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10202 if (new_grp_tail
&& old_succs
[i
- 1] == old_heads
[i
])
10204 /* a {b c d} {e f g} h i j (original)
10206 a {k l m} {e f g} h i j (inserted new group on last iter)
10208 a {k l m} {n o p} h i j (this time, chain last group to new one)
10211 *new_grp_tail
= new_heads
[i
];
10213 else if (new_grp_tail
)
10215 /* a {b c d} e {f g h} i j k (original)
10217 a {l m n} e {f g h} i j k (gap after last iter's group)
10219 a {l m n} e {o p q} h i j (chain last group to old successor)
10222 *new_grp_tail
= old_succs
[i
- 1];
10223 *old_headps
[i
] = new_heads
[i
];
10227 /* The first inserted group -- point to new group, and leave end
10233 *grp
->grp_start
= new_heads
[i
];
10236 new_grp_tail
= &OMP_CLAUSE_CHAIN (head
->grp_end
);
10242 *new_grp_tail
= old_succs
[numgroups
- 1];
10244 gcc_assert (!head
);
10246 return map_at_start
? (*groups
)[0].grp_start
: list_p
;
10249 /* DECL is supposed to have lastprivate semantics in the outer contexts
10250 of combined/composite constructs, starting with OCTX.
10251 Add needed lastprivate, shared or map clause if no data sharing or
10252 mapping clause are present. IMPLICIT_P is true if it is an implicit
10253 clause (IV on simd), in which case the lastprivate will not be
10254 copied to some constructs. */
10257 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx
*octx
,
10258 tree decl
, bool implicit_p
)
10260 struct gimplify_omp_ctx
*orig_octx
= octx
;
10261 for (; octx
; octx
= octx
->outer_context
)
10263 if ((octx
->region_type
== ORT_COMBINED_PARALLEL
10264 || (octx
->region_type
& ORT_COMBINED_TEAMS
) == ORT_COMBINED_TEAMS
)
10265 && splay_tree_lookup (octx
->variables
,
10266 (splay_tree_key
) decl
) == NULL
)
10268 omp_add_variable (octx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
10271 if ((octx
->region_type
& ORT_TASK
) != 0
10272 && octx
->combined_loop
10273 && splay_tree_lookup (octx
->variables
,
10274 (splay_tree_key
) decl
) == NULL
)
10276 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
10280 && octx
->region_type
== ORT_WORKSHARE
10281 && octx
->combined_loop
10282 && splay_tree_lookup (octx
->variables
,
10283 (splay_tree_key
) decl
) == NULL
10284 && octx
->outer_context
10285 && octx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
10286 && splay_tree_lookup (octx
->outer_context
->variables
,
10287 (splay_tree_key
) decl
) == NULL
)
10289 octx
= octx
->outer_context
;
10290 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
10293 if ((octx
->region_type
== ORT_WORKSHARE
|| octx
->region_type
== ORT_ACC
)
10294 && octx
->combined_loop
10295 && splay_tree_lookup (octx
->variables
,
10296 (splay_tree_key
) decl
) == NULL
10297 && !omp_check_private (octx
, decl
, false))
10299 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
10302 if (octx
->region_type
== ORT_COMBINED_TARGET
)
10304 splay_tree_node n
= splay_tree_lookup (octx
->variables
,
10305 (splay_tree_key
) decl
);
10308 omp_add_variable (octx
, decl
, GOVD_MAP
| GOVD_SEEN
);
10309 octx
= octx
->outer_context
;
10311 else if (!implicit_p
10312 && (n
->value
& GOVD_FIRSTPRIVATE_IMPLICIT
))
10314 n
->value
&= ~(GOVD_FIRSTPRIVATE
10315 | GOVD_FIRSTPRIVATE_IMPLICIT
10317 omp_add_variable (octx
, decl
, GOVD_MAP
| GOVD_SEEN
);
10318 octx
= octx
->outer_context
;
10323 if (octx
&& (implicit_p
|| octx
!= orig_octx
))
10324 omp_notice_variable (octx
, decl
, true);
10327 /* We might have indexed several groups for DECL, e.g. a "TO" mapping and also
10328 a "FIRSTPRIVATE" mapping. Return the one that isn't firstprivate, etc. */
10330 static omp_mapping_group
*
10331 omp_get_nonfirstprivate_group (hash_map
<tree_operand_hash_no_se
,
10332 omp_mapping_group
*> *grpmap
,
10333 tree decl
, bool allow_deleted
= false)
10335 omp_mapping_group
**to_group_p
= grpmap
->get (decl
);
10340 omp_mapping_group
*to_group
= *to_group_p
;
10342 for (; to_group
; to_group
= to_group
->sibling
)
10344 tree grp_end
= to_group
->grp_end
;
10345 switch (OMP_CLAUSE_MAP_KIND (grp_end
))
10347 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
10348 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
10352 if (allow_deleted
|| !to_group
->deleted
)
10360 /* Return TRUE if the directive (whose clauses are described by the hash table
10361 of mapping groups, GRPMAP) maps DECL explicitly. If TO_SPECIFICALLY is
10362 true, only count TO mappings. If ALLOW_DELETED is true, ignore the
10363 "deleted" flag for groups. If CONTAINED_IN_STRUCT is true, also return
10364 TRUE if DECL is mapped as a member of a whole-struct mapping. */
10367 omp_directive_maps_explicitly (hash_map
<tree_operand_hash_no_se
,
10368 omp_mapping_group
*> *grpmap
,
10369 tree decl
, omp_mapping_group
**base_group
,
10370 bool to_specifically
, bool allow_deleted
,
10371 bool contained_in_struct
)
10373 omp_mapping_group
*decl_group
10374 = omp_get_nonfirstprivate_group (grpmap
, decl
, allow_deleted
);
10376 *base_group
= NULL
;
10380 tree grp_first
= *decl_group
->grp_start
;
10381 /* We might be called during omp_build_struct_sibling_lists, when
10382 GOMP_MAP_STRUCT might have been inserted at the start of the group.
10383 Skip over that, and also possibly the node after it. */
10384 if (OMP_CLAUSE_MAP_KIND (grp_first
) == GOMP_MAP_STRUCT
10385 || OMP_CLAUSE_MAP_KIND (grp_first
) == GOMP_MAP_STRUCT_UNORD
)
10387 grp_first
= OMP_CLAUSE_CHAIN (grp_first
);
10388 if (OMP_CLAUSE_MAP_KIND (grp_first
) == GOMP_MAP_FIRSTPRIVATE_POINTER
10389 || (OMP_CLAUSE_MAP_KIND (grp_first
)
10390 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
10391 || OMP_CLAUSE_MAP_KIND (grp_first
) == GOMP_MAP_ATTACH_DETACH
)
10392 grp_first
= OMP_CLAUSE_CHAIN (grp_first
);
10394 enum gomp_map_kind first_kind
= OMP_CLAUSE_MAP_KIND (grp_first
);
10395 if (!to_specifically
10396 || GOMP_MAP_COPY_TO_P (first_kind
)
10397 || first_kind
== GOMP_MAP_ALLOC
)
10399 *base_group
= decl_group
;
10404 if (contained_in_struct
10405 && omp_mapped_by_containing_struct (grpmap
, decl
, base_group
))
10411 /* If we have mappings INNER and OUTER, where INNER is a component access and
10412 OUTER is a mapping of the whole containing struct, check that the mappings
10413 are compatible. We'll be deleting the inner mapping, so we need to make
10414 sure the outer mapping does (at least) the same transfers to/from the device
10415 as the inner mapping. */
10418 omp_check_mapping_compatibility (location_t loc
,
10419 omp_mapping_group
*outer
,
10420 omp_mapping_group
*inner
)
10422 tree first_outer
= *outer
->grp_start
, first_inner
= *inner
->grp_start
;
10424 gcc_assert (OMP_CLAUSE_CODE (first_outer
) == OMP_CLAUSE_MAP
);
10425 gcc_assert (OMP_CLAUSE_CODE (first_inner
) == OMP_CLAUSE_MAP
);
10427 enum gomp_map_kind outer_kind
= OMP_CLAUSE_MAP_KIND (first_outer
);
10428 enum gomp_map_kind inner_kind
= OMP_CLAUSE_MAP_KIND (first_inner
);
10430 if (outer_kind
== inner_kind
)
10433 switch (outer_kind
)
10435 case GOMP_MAP_ALWAYS_TO
:
10436 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
10437 || inner_kind
== GOMP_MAP_ALLOC
10438 || inner_kind
== GOMP_MAP_TO
)
10442 case GOMP_MAP_ALWAYS_FROM
:
10443 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
10444 || inner_kind
== GOMP_MAP_RELEASE
10445 || inner_kind
== GOMP_MAP_FROM
)
10450 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
10451 || inner_kind
== GOMP_MAP_ALLOC
)
10455 case GOMP_MAP_FROM
:
10456 if (inner_kind
== GOMP_MAP_RELEASE
10457 || inner_kind
== GOMP_MAP_FORCE_PRESENT
)
10461 case GOMP_MAP_ALWAYS_TOFROM
:
10462 case GOMP_MAP_TOFROM
:
10463 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
10464 || inner_kind
== GOMP_MAP_ALLOC
10465 || inner_kind
== GOMP_MAP_TO
10466 || inner_kind
== GOMP_MAP_FROM
10467 || inner_kind
== GOMP_MAP_TOFROM
)
10475 error_at (loc
, "data movement for component %qE is not compatible with "
10476 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner
),
10477 OMP_CLAUSE_DECL (first_outer
));
10482 /* This function handles several cases where clauses on a mapping directive
10483 can interact with each other.
10485 If we have a FIRSTPRIVATE_POINTER node and we're also mapping the pointer
10486 on the same directive, change the mapping of the first node to
10487 ATTACH_DETACH. We should have detected that this will happen already in
10488 c-omp.cc:c_omp_adjust_map_clauses and marked the appropriate decl
10489 as addressable. (If we didn't, bail out.)
10491 If we have a FIRSTPRIVATE_REFERENCE (for a reference to pointer) and we're
10492 mapping the base pointer also, we may need to change the mapping type to
10493 ATTACH_DETACH and synthesize an alloc node for the reference itself.
10495 If we have an ATTACH_DETACH node, this is an array section with a pointer
10496 base. If we're mapping the base on the same directive too, we can drop its
10497 mapping. However, if we have a reference to pointer, make other appropriate
10498 adjustments to the mapping nodes instead.
10500 If we have an ATTACH_DETACH node with a Fortran pointer-set (array
10501 descriptor) mapping for a derived-type component, and we're also mapping the
10502 whole of the derived-type variable on another clause, the pointer-set
10503 mapping is removed.
10505 If we have a component access but we're also mapping the whole of the
10506 containing struct, drop the former access.
10508 If the expression is a component access, and we're also mapping a base
10509 pointer used in that component access in the same expression, change the
10510 mapping type of the latter to ALLOC (ready for processing by
10511 omp_build_struct_sibling_lists). */
10514 omp_resolve_clause_dependencies (enum tree_code code
,
10515 vec
<omp_mapping_group
> *groups
,
10516 hash_map
<tree_operand_hash_no_se
,
10517 omp_mapping_group
*> *grpmap
)
10520 omp_mapping_group
*grp
;
10521 bool repair_chain
= false;
10523 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10525 tree grp_end
= grp
->grp_end
;
10526 tree decl
= OMP_CLAUSE_DECL (grp_end
);
10528 gcc_assert (OMP_CLAUSE_CODE (grp_end
) == OMP_CLAUSE_MAP
);
10530 switch (OMP_CLAUSE_MAP_KIND (grp_end
))
10532 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
10534 omp_mapping_group
*to_group
10535 = omp_get_nonfirstprivate_group (grpmap
, decl
);
10537 if (!to_group
|| to_group
== grp
)
10540 tree grp_first
= *to_group
->grp_start
;
10541 enum gomp_map_kind first_kind
= OMP_CLAUSE_MAP_KIND (grp_first
);
10543 if ((GOMP_MAP_COPY_TO_P (first_kind
)
10544 || first_kind
== GOMP_MAP_ALLOC
)
10545 && (OMP_CLAUSE_MAP_KIND (to_group
->grp_end
)
10546 != GOMP_MAP_FIRSTPRIVATE_POINTER
))
10548 gcc_assert (TREE_ADDRESSABLE (OMP_CLAUSE_DECL (grp_end
)));
10549 OMP_CLAUSE_SET_MAP_KIND (grp_end
, GOMP_MAP_ATTACH_DETACH
);
10554 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
10556 tree ptr
= build_fold_indirect_ref (decl
);
10558 omp_mapping_group
*to_group
10559 = omp_get_nonfirstprivate_group (grpmap
, ptr
);
10561 if (!to_group
|| to_group
== grp
)
10564 tree grp_first
= *to_group
->grp_start
;
10565 enum gomp_map_kind first_kind
= OMP_CLAUSE_MAP_KIND (grp_first
);
10567 if (GOMP_MAP_COPY_TO_P (first_kind
)
10568 || first_kind
== GOMP_MAP_ALLOC
)
10570 OMP_CLAUSE_SET_MAP_KIND (grp_end
, GOMP_MAP_ATTACH_DETACH
);
10571 OMP_CLAUSE_DECL (grp_end
) = ptr
;
10572 if ((OMP_CLAUSE_CHAIN (*to_group
->grp_start
)
10573 == to_group
->grp_end
)
10574 && (OMP_CLAUSE_MAP_KIND (to_group
->grp_end
)
10575 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
10577 gcc_assert (TREE_ADDRESSABLE
10578 (OMP_CLAUSE_DECL (to_group
->grp_end
)));
10579 OMP_CLAUSE_SET_MAP_KIND (to_group
->grp_end
,
10580 GOMP_MAP_ATTACH_DETACH
);
10582 location_t loc
= OMP_CLAUSE_LOCATION (to_group
->grp_end
);
10584 = build_omp_clause (loc
, OMP_CLAUSE_MAP
);
10585 OMP_CLAUSE_SET_MAP_KIND (alloc
, GOMP_MAP_ALLOC
);
10586 tree tmp
= build_fold_addr_expr (OMP_CLAUSE_DECL
10587 (to_group
->grp_end
));
10588 tree char_ptr_type
= build_pointer_type (char_type_node
);
10589 OMP_CLAUSE_DECL (alloc
)
10590 = build2 (MEM_REF
, char_type_node
,
10592 build_int_cst (char_ptr_type
, 0));
10593 OMP_CLAUSE_SIZE (alloc
) = TYPE_SIZE_UNIT (TREE_TYPE (tmp
));
10595 OMP_CLAUSE_CHAIN (alloc
)
10596 = OMP_CLAUSE_CHAIN (*to_group
->grp_start
);
10597 OMP_CLAUSE_CHAIN (*to_group
->grp_start
) = alloc
;
10603 case GOMP_MAP_ATTACH_DETACH
:
10604 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
10606 tree base_ptr
, referenced_ptr_node
= NULL_TREE
;
10608 while (TREE_CODE (decl
) == ARRAY_REF
)
10609 decl
= TREE_OPERAND (decl
, 0);
10611 if (TREE_CODE (decl
) == INDIRECT_REF
)
10612 decl
= TREE_OPERAND (decl
, 0);
10614 /* Only component accesses. */
10618 /* We want the pointer itself when checking if the base pointer is
10619 mapped elsewhere in the same directive -- if we have a
10620 reference to the pointer, don't use that. */
10622 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
10623 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
10625 referenced_ptr_node
= OMP_CLAUSE_CHAIN (*grp
->grp_start
);
10626 base_ptr
= OMP_CLAUSE_DECL (referenced_ptr_node
);
10631 gomp_map_kind zlas_kind
10632 = (code
== OACC_EXIT_DATA
|| code
== OMP_TARGET_EXIT_DATA
)
10633 ? GOMP_MAP_DETACH
: GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
;
10635 if (TREE_CODE (TREE_TYPE (base_ptr
)) == POINTER_TYPE
)
10637 /* If we map the base TO, and we're doing an attachment, we can
10638 skip the TO mapping altogether and create an ALLOC mapping
10639 instead, since the attachment will overwrite the device
10640 pointer in that location immediately anyway. Otherwise,
10641 change our mapping to
10642 GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION in case the
10643 attachment target has not been copied to the device already
10644 by some earlier directive. */
10646 bool base_mapped_to
= false;
10648 omp_mapping_group
*base_group
;
10650 if (omp_directive_maps_explicitly (grpmap
, base_ptr
,
10651 &base_group
, false, true,
10654 if (referenced_ptr_node
)
10656 base_mapped_to
= true;
10657 if ((OMP_CLAUSE_MAP_KIND (base_group
->grp_end
)
10658 == GOMP_MAP_ATTACH_DETACH
)
10659 && (OMP_CLAUSE_CHAIN (*base_group
->grp_start
)
10660 == base_group
->grp_end
))
10662 OMP_CLAUSE_CHAIN (*base_group
->grp_start
)
10663 = OMP_CLAUSE_CHAIN (base_group
->grp_end
);
10664 base_group
->grp_end
= *base_group
->grp_start
;
10665 repair_chain
= true;
10670 base_group
->deleted
= true;
10671 OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end
) = 1;
10675 /* We're dealing with a reference to a pointer, and we are
10676 attaching both the reference and the pointer. We know the
10677 reference itself is on the target, because we are going to
10678 create an ALLOC node for it in accumulate_sibling_list. The
10679 pointer might be on the target already or it might not, but
10680 if it isn't then it's not an error, so use
10681 GOMP_MAP_ATTACH_ZLAS for it. */
10682 if (!base_mapped_to
&& referenced_ptr_node
)
10683 OMP_CLAUSE_SET_MAP_KIND (referenced_ptr_node
, zlas_kind
);
10685 omp_mapping_group
*struct_group
;
10687 if ((desc
= OMP_CLAUSE_CHAIN (*grp
->grp_start
))
10688 && omp_map_clause_descriptor_p (desc
)
10689 && omp_mapped_by_containing_struct (grpmap
, decl
,
10691 /* If we have a pointer set but we're mapping (or unmapping)
10692 the whole of the containing struct, we can remove the
10693 pointer set mapping. */
10694 OMP_CLAUSE_CHAIN (*grp
->grp_start
) = OMP_CLAUSE_CHAIN (desc
);
10696 else if (TREE_CODE (TREE_TYPE (base_ptr
)) == REFERENCE_TYPE
10697 && (TREE_CODE (TREE_TYPE (TREE_TYPE (base_ptr
)))
10699 && OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION
10701 OMP_CLAUSE_SET_MAP_KIND (grp
->grp_end
, zlas_kind
);
10705 case GOMP_MAP_ATTACH
:
10706 /* Ignore standalone attach here. */
10711 omp_mapping_group
*struct_group
;
10712 if (omp_mapped_by_containing_struct (grpmap
, decl
, &struct_group
)
10713 && *grp
->grp_start
== grp_end
)
10715 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end
),
10716 struct_group
, grp
);
10717 /* Remove the whole of this mapping -- redundant. */
10718 grp
->deleted
= true;
10722 while ((base
= omp_get_base_pointer (base
)))
10724 omp_mapping_group
*base_group
;
10726 if (omp_directive_maps_explicitly (grpmap
, base
, &base_group
,
10727 true, true, false))
10729 tree grp_first
= *base_group
->grp_start
;
10730 OMP_CLAUSE_SET_MAP_KIND (grp_first
, GOMP_MAP_ALLOC
);
10739 /* Group start pointers may have become detached from the
10740 OMP_CLAUSE_CHAIN of previous groups if elements were removed from the
10741 end of those groups. Fix that now. */
10742 tree
*new_next
= NULL
;
10743 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10746 grp
->grp_start
= new_next
;
10748 new_next
= &OMP_CLAUSE_CHAIN (grp
->grp_end
);
10753 /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
10754 clause dependencies we handle for now are struct element mappings and
10755 whole-struct mappings on the same directive, and duplicate clause
10759 oacc_resolve_clause_dependencies (vec
<omp_mapping_group
> *groups
,
10760 hash_map
<tree_operand_hash_no_se
,
10761 omp_mapping_group
*> *grpmap
)
10764 omp_mapping_group
*grp
;
10765 hash_set
<tree_operand_hash
> *seen_components
= NULL
;
10766 hash_set
<tree_operand_hash
> *shown_error
= NULL
;
10768 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10770 tree grp_end
= grp
->grp_end
;
10771 tree decl
= OMP_CLAUSE_DECL (grp_end
);
10773 gcc_assert (OMP_CLAUSE_CODE (grp_end
) == OMP_CLAUSE_MAP
);
10775 if (DECL_P (grp_end
))
10778 tree c
= OMP_CLAUSE_DECL (*grp
->grp_start
);
10779 while (TREE_CODE (c
) == ARRAY_REF
)
10780 c
= TREE_OPERAND (c
, 0);
10781 if (TREE_CODE (c
) != COMPONENT_REF
)
10783 if (!seen_components
)
10784 seen_components
= new hash_set
<tree_operand_hash
> ();
10786 shown_error
= new hash_set
<tree_operand_hash
> ();
10787 if (seen_components
->contains (c
)
10788 && !shown_error
->contains (c
))
10790 error_at (OMP_CLAUSE_LOCATION (grp_end
),
10791 "%qE appears more than once in map clauses",
10792 OMP_CLAUSE_DECL (grp_end
));
10793 shown_error
->add (c
);
10796 seen_components
->add (c
);
10798 omp_mapping_group
*struct_group
;
10799 if (omp_mapped_by_containing_struct (grpmap
, decl
, &struct_group
)
10800 && *grp
->grp_start
== grp_end
)
10802 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end
),
10803 struct_group
, grp
);
10804 /* Remove the whole of this mapping -- redundant. */
10805 grp
->deleted
= true;
10809 if (seen_components
)
10810 delete seen_components
;
10812 delete shown_error
;
10815 /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
10816 is linked to the previous node pointed to by INSERT_AT. */
10819 omp_siblist_insert_node_after (tree newnode
, tree
*insert_at
)
10821 OMP_CLAUSE_CHAIN (newnode
) = *insert_at
;
10822 *insert_at
= newnode
;
10823 return &OMP_CLAUSE_CHAIN (newnode
);
10826 /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
10827 pointed to by chain MOVE_AFTER instead. */
10830 omp_siblist_move_node_after (tree node
, tree
*old_pos
, tree
*move_after
)
10832 gcc_assert (node
== *old_pos
);
10833 *old_pos
= OMP_CLAUSE_CHAIN (node
);
10834 OMP_CLAUSE_CHAIN (node
) = *move_after
;
10835 *move_after
= node
;
10838 /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
10839 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
10840 new nodes are prepended to the list before splicing into the new position.
10841 Return the position we should continue scanning the list at, or NULL to
10842 stay where we were. */
10845 omp_siblist_move_nodes_after (tree
*first_ptr
, tree last_node
,
10848 if (first_ptr
== move_after
)
10851 tree tmp
= *first_ptr
;
10852 *first_ptr
= OMP_CLAUSE_CHAIN (last_node
);
10853 OMP_CLAUSE_CHAIN (last_node
) = *move_after
;
10859 /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
10860 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
10861 pointer MOVE_AFTER.
10863 The latter list was previously part of the OMP clause list, and the former
10864 (prepended) part is comprised of new nodes.
10866 We start with a list of nodes starting with a struct mapping node. We
10867 rearrange the list so that new nodes starting from FIRST_NEW and whose last
10868 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
10869 the group of mapping nodes we are currently processing (from the chain
10870 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
10871 we should continue processing from, or NULL to stay where we were.
10873 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
10874 different) is worked through below. Here we are processing LAST_NODE, and
10875 FIRST_PTR points at the preceding mapping clause:
10877 #. mapping node chain
10878 ---------------------------------------------------
10879 A. struct_node [->B]
10881 C. comp_2 [->D (move_after)]
10883 E. attach_3 [->F (first_ptr)]
10884 F. map_to_4 [->G (continue_at)]
10885 G. attach_4 (last_node) [->H]
10888 *last_new_tail = *first_ptr;
10890 I. new_node (first_new) [->F (last_new_tail)]
10892 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
10894 #. mapping node chain
10895 ----------------------------------------------------
10896 A. struct_node [->B]
10898 C. comp_2 [->D (move_after)]
10900 E. attach_3 [->H (first_ptr)]
10901 F. map_to_4 [->G (continue_at)]
10902 G. attach_4 (last_node) [->H]
10905 I. new_node (first_new) [->F (last_new_tail)]
10907 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10909 #. mapping node chain
10910 ---------------------------------------------------
10911 A. struct_node [->B]
10913 C. comp_2 [->D (move_after)]
10915 E. attach_3 [->H (continue_at)]
10917 G. attach_4 (last_node) [->D]
10920 I. new_node (first_new) [->F (last_new_tail)]
10922 *move_after = first_new;
10924 #. mapping node chain
10925 ---------------------------------------------------
10926 A. struct_node [->B]
10928 C. comp_2 [->I (move_after)]
10930 E. attach_3 [->H (continue_at)]
10932 G. attach_4 (last_node) [->D]
10934 I. new_node (first_new) [->F (last_new_tail)]
10938 #. mapping node chain
10939 ---------------------------------------------------
10940 A. struct_node [->B]
10942 C. comp_2 [->I (move_after)]
10943 I. new_node (first_new) [->F (last_new_tail)]
10945 G. attach_4 (last_node) [->D]
10947 E. attach_3 [->H (continue_at)]
10952 omp_siblist_move_concat_nodes_after (tree first_new
, tree
*last_new_tail
,
10953 tree
*first_ptr
, tree last_node
,
10956 tree
*continue_at
= NULL
;
10957 *last_new_tail
= *first_ptr
;
10958 if (first_ptr
== move_after
)
10959 *move_after
= first_new
;
10962 *first_ptr
= OMP_CLAUSE_CHAIN (last_node
);
10963 continue_at
= first_ptr
;
10964 OMP_CLAUSE_CHAIN (last_node
) = *move_after
;
10965 *move_after
= first_new
;
10967 return continue_at
;
10970 static omp_addr_token
*
10971 omp_first_chained_access_token (vec
<omp_addr_token
*> &addr_tokens
)
10973 using namespace omp_addr_tokenizer
;
10974 int idx
= addr_tokens
.length () - 1;
10975 gcc_assert (idx
>= 0);
10976 if (addr_tokens
[idx
]->type
!= ACCESS_METHOD
)
10977 return addr_tokens
[idx
];
10978 while (idx
> 0 && addr_tokens
[idx
- 1]->type
== ACCESS_METHOD
)
10980 return addr_tokens
[idx
];
10983 /* Mapping struct members causes an additional set of nodes to be created,
10984 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
10985 number of members being mapped, in order of ascending position (address or
10988 We scan through the list of mapping clauses, calling this function for each
10989 struct member mapping we find, and build up the list of mappings after the
10990 initial GOMP_MAP_STRUCT node. For pointer members, these will be
10991 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
10992 moved into place in the sorted list.
11001 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
11004 GOMP_MAP_STRUCT (4)
11005 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
11006 GOMP_MAP_ALLOC (struct.a)
11007 GOMP_MAP_ALLOC (struct.b)
11008 GOMP_MAP_TO (struct.c)
11009 GOMP_MAP_ALLOC (struct.d)
11012 In the case where we are mapping references to pointers, or in Fortran if
11013 we are mapping an array with a descriptor, additional nodes may be created
11014 after the struct node list also.
11016 The return code is either a pointer to the next node to process (if the
11017 list has been rearranged), else NULL to continue with the next node in the
11021 omp_accumulate_sibling_list (enum omp_region_type region_type
,
11022 enum tree_code code
,
11023 hash_map
<tree_operand_hash
, tree
>
11024 *&struct_map_to_clause
,
11025 hash_map
<tree_operand_hash_no_se
,
11026 omp_mapping_group
*> *group_map
,
11027 tree
*grp_start_p
, tree grp_end
,
11028 vec
<omp_addr_token
*> &addr_tokens
, tree
**inner
,
11029 bool *fragile_p
, bool reprocessing_struct
,
11032 using namespace omp_addr_tokenizer
;
11033 poly_offset_int coffset
;
11034 poly_int64 cbitpos
;
11035 tree ocd
= OMP_CLAUSE_DECL (grp_end
);
11036 bool openmp
= !(region_type
& ORT_ACC
);
11037 bool target
= (region_type
& ORT_TARGET
) != 0;
11038 tree
*continue_at
= NULL
;
11040 while (TREE_CODE (ocd
) == ARRAY_REF
)
11041 ocd
= TREE_OPERAND (ocd
, 0);
11045 omp_mapping_group
*to_group
11046 = omp_get_nonfirstprivate_group (group_map
, ocd
, true);
11052 omp_addr_token
*last_token
= omp_first_chained_access_token (addr_tokens
);
11053 if (last_token
->type
== ACCESS_METHOD
)
11055 switch (last_token
->u
.access_kind
)
11058 case ACCESS_REF_TO_POINTER
:
11059 case ACCESS_REF_TO_POINTER_OFFSET
:
11060 case ACCESS_INDEXED_REF_TO_ARRAY
:
11061 /* We may see either a bare reference or a dereferenced
11062 "convert_from_reference"-like one here. Handle either way. */
11063 if (TREE_CODE (ocd
) == INDIRECT_REF
)
11064 ocd
= TREE_OPERAND (ocd
, 0);
11065 gcc_assert (TREE_CODE (TREE_TYPE (ocd
)) == REFERENCE_TYPE
);
11073 bool variable_offset
;
11075 = extract_base_bit_offset (ocd
, &cbitpos
, &coffset
, &variable_offset
);
11078 for (base_token
= addr_tokens
.length () - 1; base_token
>= 0; base_token
--)
11080 if (addr_tokens
[base_token
]->type
== ARRAY_BASE
11081 || addr_tokens
[base_token
]->type
== STRUCTURE_BASE
)
11085 /* The two expressions in the assertion below aren't quite the same: if we
11086 have 'struct_base_decl access_indexed_array' for something like
11087 "myvar[2].x" then base will be "myvar" and addr_tokens[base_token]->expr
11088 will be "myvar[2]" -- the actual base of the structure.
11089 The former interpretation leads to a strange situation where we get
11090 struct(myvar) alloc(myvar[2].ptr1)
11091 That is, the array of structures is kind of treated as one big structure
11092 for the purposes of gathering sibling lists, etc. */
11093 /* gcc_assert (base == addr_tokens[base_token]->expr); */
11095 bool attach_detach
= ((OMP_CLAUSE_MAP_KIND (grp_end
)
11096 == GOMP_MAP_ATTACH_DETACH
)
11097 || (OMP_CLAUSE_MAP_KIND (grp_end
)
11098 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
));
11099 bool has_descriptor
= false;
11100 if (OMP_CLAUSE_CHAIN (*grp_start_p
) != grp_end
)
11102 tree grp_mid
= OMP_CLAUSE_CHAIN (*grp_start_p
);
11103 if (grp_mid
&& omp_map_clause_descriptor_p (grp_mid
))
11104 has_descriptor
= true;
11107 if (!struct_map_to_clause
|| struct_map_to_clause
->get (base
) == NULL
)
11109 enum gomp_map_kind str_kind
= GOMP_MAP_STRUCT
;
11111 if (struct_map_to_clause
== NULL
)
11112 struct_map_to_clause
= new hash_map
<tree_operand_hash
, tree
>;
11114 if (variable_offset
)
11115 str_kind
= GOMP_MAP_STRUCT_UNORD
;
11117 tree l
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
), OMP_CLAUSE_MAP
);
11119 OMP_CLAUSE_SET_MAP_KIND (l
, str_kind
);
11120 OMP_CLAUSE_DECL (l
) = unshare_expr (base
);
11121 OMP_CLAUSE_SIZE (l
) = size_int (1);
11123 struct_map_to_clause
->put (base
, l
);
11125 /* On first iterating through the clause list, we insert the struct node
11126 just before the component access node that triggers the initial
11127 omp_accumulate_sibling_list call for a particular sibling list (and
11128 it then forms the first entry in that list). When reprocessing
11129 struct bases that are themselves component accesses, we insert the
11130 struct node on an off-side list to avoid inserting the new
11131 GOMP_MAP_STRUCT into the middle of the old one. */
11132 tree
*insert_node_pos
= reprocessing_struct
? *added_tail
: grp_start_p
;
11134 if (has_descriptor
)
11136 tree desc
= OMP_CLAUSE_CHAIN (*grp_start_p
);
11137 if (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
)
11138 OMP_CLAUSE_SET_MAP_KIND (desc
, GOMP_MAP_RELEASE
);
11139 tree sc
= *insert_node_pos
;
11140 OMP_CLAUSE_CHAIN (l
) = desc
;
11141 OMP_CLAUSE_CHAIN (*grp_start_p
) = OMP_CLAUSE_CHAIN (desc
);
11142 OMP_CLAUSE_CHAIN (desc
) = sc
;
11143 *insert_node_pos
= l
;
11145 else if (attach_detach
)
11149 = build_omp_struct_comp_nodes (code
, *grp_start_p
, grp_end
,
11152 OMP_CLAUSE_CHAIN (l
) = alloc_node
;
11156 OMP_CLAUSE_CHAIN (extra_node
) = *insert_node_pos
;
11157 OMP_CLAUSE_CHAIN (alloc_node
) = extra_node
;
11158 tail
= &OMP_CLAUSE_CHAIN (extra_node
);
11162 OMP_CLAUSE_CHAIN (alloc_node
) = *insert_node_pos
;
11163 tail
= &OMP_CLAUSE_CHAIN (alloc_node
);
11166 /* For OpenMP semantics, we don't want to implicitly allocate
11167 space for the pointer here for non-compute regions (e.g. "enter
11168 data"). A FRAGILE_P node is only being created so that
11169 omp-low.cc is able to rewrite the struct properly.
11170 For references (to pointers), we want to actually allocate the
11171 space for the reference itself in the sorted list following the
11173 For pointers, we want to allocate space if we had an explicit
11174 mapping of the attachment point, but not otherwise. */
11179 && TREE_CODE (TREE_TYPE (ocd
)) == POINTER_TYPE
11180 && !OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end
)))
11182 if (!lang_GNU_Fortran ())
11183 /* In Fortran, pointers are dereferenced automatically, but may
11184 be unassociated. So we still want to allocate space for the
11185 pointer (as the base for an attach operation that should be
11186 present in the same directive's clause list also). */
11187 OMP_CLAUSE_SIZE (alloc_node
) = size_zero_node
;
11188 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (alloc_node
) = 1;
11191 *insert_node_pos
= l
;
11193 if (reprocessing_struct
)
11195 /* When reprocessing a struct node group used as the base of a
11196 subcomponent access, if we have a reference-to-pointer base,
11198 struct(**ptr) attach(*ptr)
11199 whereas for a non-reprocess-struct group, we see, e.g.:
11200 tofrom(**ptr) attach(*ptr) attach(ptr)
11201 and we create the "alloc" for the second "attach", i.e.
11202 for the reference itself. When reprocessing a struct group we
11203 thus change the pointer attachment into a reference attachment
11204 by stripping the indirection. (The attachment of the
11205 referenced pointer must happen elsewhere, either on the same
11206 directive, or otherwise.) */
11207 tree adecl
= OMP_CLAUSE_DECL (alloc_node
);
11209 if ((TREE_CODE (adecl
) == INDIRECT_REF
11210 || (TREE_CODE (adecl
) == MEM_REF
11211 && integer_zerop (TREE_OPERAND (adecl
, 1))))
11212 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (adecl
, 0)))
11214 && (TREE_CODE (TREE_TYPE (TREE_TYPE
11215 (TREE_OPERAND (adecl
, 0)))) == POINTER_TYPE
))
11216 OMP_CLAUSE_DECL (alloc_node
) = TREE_OPERAND (adecl
, 0);
11218 *added_tail
= tail
;
11223 gcc_assert (*grp_start_p
== grp_end
);
11224 if (reprocessing_struct
)
11226 /* If we don't have an attach/detach node, this is a
11227 "target data" directive or similar, not an offload region.
11228 Synthesize an "alloc" node using just the initiating
11229 GOMP_MAP_STRUCT decl. */
11230 gomp_map_kind k
= (code
== OMP_TARGET_EXIT_DATA
11231 || code
== OACC_EXIT_DATA
)
11232 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
11234 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
),
11236 OMP_CLAUSE_SET_MAP_KIND (alloc_node
, k
);
11237 OMP_CLAUSE_DECL (alloc_node
) = unshare_expr (last_token
->expr
);
11238 OMP_CLAUSE_SIZE (alloc_node
)
11239 = TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (alloc_node
)));
11241 OMP_CLAUSE_CHAIN (alloc_node
) = OMP_CLAUSE_CHAIN (l
);
11242 OMP_CLAUSE_CHAIN (l
) = alloc_node
;
11243 *insert_node_pos
= l
;
11244 *added_tail
= &OMP_CLAUSE_CHAIN (alloc_node
);
11247 grp_start_p
= omp_siblist_insert_node_after (l
, insert_node_pos
);
11250 unsigned last_access
= base_token
+ 1;
11252 while (last_access
+ 1 < addr_tokens
.length ()
11253 && addr_tokens
[last_access
+ 1]->type
== ACCESS_METHOD
)
11256 if ((region_type
& ORT_TARGET
)
11257 && addr_tokens
[base_token
+ 1]->type
== ACCESS_METHOD
)
11259 bool base_ref
= false;
11260 access_method_kinds access_kind
11261 = addr_tokens
[last_access
]->u
.access_kind
;
11263 switch (access_kind
)
11265 case ACCESS_DIRECT
:
11266 case ACCESS_INDEXED_ARRAY
:
11270 case ACCESS_REF_TO_POINTER
:
11271 case ACCESS_REF_TO_POINTER_OFFSET
:
11272 case ACCESS_INDEXED_REF_TO_ARRAY
:
11279 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
),
11281 enum gomp_map_kind mkind
;
11282 omp_mapping_group
*decl_group
;
11284 switch (access_kind
)
11286 case ACCESS_POINTER
:
11287 case ACCESS_POINTER_OFFSET
:
11288 use_base
= addr_tokens
[last_access
]->expr
;
11290 case ACCESS_REF_TO_POINTER
:
11291 case ACCESS_REF_TO_POINTER_OFFSET
:
11293 = build_fold_indirect_ref (addr_tokens
[last_access
]->expr
);
11296 use_base
= addr_tokens
[base_token
]->expr
;
11299 = omp_directive_maps_explicitly (group_map
, use_base
, &decl_group
,
11300 true, false, true);
11301 if (addr_tokens
[base_token
]->type
== STRUCTURE_BASE
11302 && DECL_P (addr_tokens
[last_access
]->expr
)
11304 mkind
= base_ref
? GOMP_MAP_FIRSTPRIVATE_REFERENCE
11305 : GOMP_MAP_FIRSTPRIVATE_POINTER
;
11307 mkind
= GOMP_MAP_ATTACH_DETACH
;
11309 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
11310 /* If we have a reference to pointer base, we want to attach the
11311 pointer here, not the reference. The reference attachment happens
11314 = (access_kind
== ACCESS_REF_TO_POINTER
11315 || access_kind
== ACCESS_REF_TO_POINTER_OFFSET
);
11316 tree sdecl
= addr_tokens
[last_access
]->expr
;
11317 tree sdecl_ptr
= ref_to_ptr
? build_fold_indirect_ref (sdecl
)
11319 /* For the FIRSTPRIVATE_REFERENCE after the struct node, we
11320 want to use the reference itself for the decl, but we
11321 still want to use the pointer to calculate the bias. */
11322 OMP_CLAUSE_DECL (c2
) = (mkind
== GOMP_MAP_ATTACH_DETACH
)
11323 ? sdecl_ptr
: sdecl
;
11325 tree baddr
= build_fold_addr_expr (base
);
11326 baddr
= fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end
),
11327 ptrdiff_type_node
, baddr
);
11328 tree decladdr
= fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end
),
11329 ptrdiff_type_node
, sdecl
);
11330 OMP_CLAUSE_SIZE (c2
)
11331 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end
), MINUS_EXPR
,
11332 ptrdiff_type_node
, baddr
, decladdr
);
11333 /* Insert after struct node. */
11334 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (l
);
11335 OMP_CLAUSE_CHAIN (l
) = c2
;
11337 if (addr_tokens
[base_token
]->type
== STRUCTURE_BASE
11338 && (addr_tokens
[base_token
]->u
.structure_base_kind
11339 == BASE_COMPONENT_EXPR
)
11340 && mkind
== GOMP_MAP_ATTACH_DETACH
11341 && addr_tokens
[last_access
]->u
.access_kind
!= ACCESS_REF
)
11343 *inner
= insert_node_pos
;
11350 if (addr_tokens
[base_token
]->type
== STRUCTURE_BASE
11351 && (addr_tokens
[base_token
]->u
.structure_base_kind
11352 == BASE_COMPONENT_EXPR
)
11353 && addr_tokens
[last_access
]->u
.access_kind
== ACCESS_REF
)
11354 *inner
= insert_node_pos
;
11358 else if (struct_map_to_clause
)
11360 tree
*osc
= struct_map_to_clause
->get (base
);
11361 tree
*sc
= NULL
, *scp
= NULL
;
11362 bool unordered
= false;
11364 if (osc
&& OMP_CLAUSE_MAP_KIND (*osc
) == GOMP_MAP_STRUCT_UNORD
)
11367 unsigned HOST_WIDE_INT i
, elems
= tree_to_uhwi (OMP_CLAUSE_SIZE (*osc
));
11368 sc
= &OMP_CLAUSE_CHAIN (*osc
);
11369 /* The struct mapping might be immediately followed by a
11370 FIRSTPRIVATE_POINTER, FIRSTPRIVATE_REFERENCE or an ATTACH_DETACH --
11371 if it's an indirect access or a reference, or if the structure base
11372 is not a decl. The FIRSTPRIVATE_* nodes are removed in omp-low.cc
11373 after they have been processed there, and ATTACH_DETACH nodes are
11374 recomputed and moved out of the GOMP_MAP_STRUCT construct once
11375 sibling list building is complete. */
11376 if (OMP_CLAUSE_MAP_KIND (*sc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11377 || OMP_CLAUSE_MAP_KIND (*sc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
11378 || OMP_CLAUSE_MAP_KIND (*sc
) == GOMP_MAP_ATTACH_DETACH
)
11379 sc
= &OMP_CLAUSE_CHAIN (*sc
);
11380 for (i
= 0; i
< elems
; i
++, sc
= &OMP_CLAUSE_CHAIN (*sc
))
11381 if (attach_detach
&& sc
== grp_start_p
)
11383 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc
)) != COMPONENT_REF
11384 && TREE_CODE (OMP_CLAUSE_DECL (*sc
)) != INDIRECT_REF
11385 && TREE_CODE (OMP_CLAUSE_DECL (*sc
)) != ARRAY_REF
)
11389 tree sc_decl
= OMP_CLAUSE_DECL (*sc
);
11390 poly_offset_int offset
;
11393 if (TREE_CODE (sc_decl
) == ARRAY_REF
)
11395 while (TREE_CODE (sc_decl
) == ARRAY_REF
)
11396 sc_decl
= TREE_OPERAND (sc_decl
, 0);
11397 if (TREE_CODE (sc_decl
) != COMPONENT_REF
11398 || TREE_CODE (TREE_TYPE (sc_decl
)) != ARRAY_TYPE
)
11401 else if (INDIRECT_REF_P (sc_decl
)
11402 && TREE_CODE (TREE_OPERAND (sc_decl
, 0)) == COMPONENT_REF
11403 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl
, 0)))
11404 == REFERENCE_TYPE
))
11405 sc_decl
= TREE_OPERAND (sc_decl
, 0);
11407 bool variable_offset2
;
11408 tree base2
= extract_base_bit_offset (sc_decl
, &bitpos
, &offset
,
11409 &variable_offset2
);
11410 if (!base2
|| !operand_equal_p (base2
, base
, 0))
11414 if (variable_offset2
)
11416 OMP_CLAUSE_SET_MAP_KIND (*osc
, GOMP_MAP_STRUCT_UNORD
);
11420 else if ((region_type
& ORT_ACC
) != 0)
11422 /* For OpenACC, allow (ignore) duplicate struct accesses in
11423 the middle of a mapping clause, e.g. "mystruct->foo" in:
11424 copy(mystruct->foo->bar) copy(mystruct->foo->qux). */
11425 if (reprocessing_struct
11426 && known_eq (coffset
, offset
)
11427 && known_eq (cbitpos
, bitpos
))
11430 else if (known_eq (coffset
, offset
)
11431 && known_eq (cbitpos
, bitpos
))
11433 /* Having two struct members at the same offset doesn't work,
11434 so make sure we don't. (We're allowed to ignore this.
11435 Should we report the error?) */
11436 /*error_at (OMP_CLAUSE_LOCATION (grp_end),
11437 "duplicate struct member %qE in map clauses",
11438 OMP_CLAUSE_DECL (grp_end));*/
11441 if (maybe_lt (coffset
, offset
)
11442 || (known_eq (coffset
, offset
)
11443 && maybe_lt (cbitpos
, bitpos
)))
11452 /* If this is an unordered struct, just insert the new element at the
11453 end of the list. */
11456 for (; i
< elems
; i
++)
11457 sc
= &OMP_CLAUSE_CHAIN (*sc
);
11461 OMP_CLAUSE_SIZE (*osc
)
11462 = size_binop (PLUS_EXPR
, OMP_CLAUSE_SIZE (*osc
), size_one_node
);
11464 if (reprocessing_struct
)
11466 /* If we're reprocessing a struct node, we don't want to do most of
11467 the list manipulation below. We only need to handle the (pointer
11468 or reference) attach/detach case. */
11469 tree extra_node
, alloc_node
;
11470 if (has_descriptor
)
11471 gcc_unreachable ();
11472 else if (attach_detach
)
11473 alloc_node
= build_omp_struct_comp_nodes (code
, *grp_start_p
,
11474 grp_end
, &extra_node
);
11477 /* If we don't have an attach/detach node, this is a
11478 "target data" directive or similar, not an offload region.
11479 Synthesize an "alloc" node using just the initiating
11480 GOMP_MAP_STRUCT decl. */
11481 gomp_map_kind k
= (code
== OMP_TARGET_EXIT_DATA
11482 || code
== OACC_EXIT_DATA
)
11483 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
11485 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
),
11487 OMP_CLAUSE_SET_MAP_KIND (alloc_node
, k
);
11488 OMP_CLAUSE_DECL (alloc_node
) = unshare_expr (last_token
->expr
);
11489 OMP_CLAUSE_SIZE (alloc_node
)
11490 = TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (alloc_node
)));
11494 omp_siblist_insert_node_after (alloc_node
, scp
);
11497 tree
*new_end
= omp_siblist_insert_node_after (alloc_node
, sc
);
11498 if (sc
== *added_tail
)
11499 *added_tail
= new_end
;
11505 if (has_descriptor
)
11507 tree desc
= OMP_CLAUSE_CHAIN (*grp_start_p
);
11508 if (code
== OMP_TARGET_EXIT_DATA
11509 || code
== OACC_EXIT_DATA
)
11510 OMP_CLAUSE_SET_MAP_KIND (desc
, GOMP_MAP_RELEASE
);
11511 omp_siblist_move_node_after (desc
,
11512 &OMP_CLAUSE_CHAIN (*grp_start_p
),
11515 else if (attach_detach
)
11517 tree cl
= NULL_TREE
, extra_node
;
11518 tree alloc_node
= build_omp_struct_comp_nodes (code
, *grp_start_p
,
11519 grp_end
, &extra_node
);
11520 tree
*tail_chain
= NULL
;
11526 && TREE_CODE (TREE_TYPE (ocd
)) == POINTER_TYPE
11527 && !OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end
)))
11529 if (!lang_GNU_Fortran ())
11530 OMP_CLAUSE_SIZE (alloc_node
) = size_zero_node
;
11531 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (alloc_node
) = 1;
11536 grp_end : the last (or only) node in this group.
11537 grp_start_p : pointer to the first node in a pointer mapping group
11538 up to and including GRP_END.
11539 sc : pointer to the chain for the end of the struct component
11541 scp : pointer to the chain for the sorted position at which we
11542 should insert in the middle of the struct component list
11543 (else NULL to insert at end).
11544 alloc_node : the "alloc" node for the structure (pointer-type)
11545 component. We insert at SCP (if present), else SC
11546 (the end of the struct component list).
11547 extra_node : a newly-synthesized node for an additional indirect
11548 pointer mapping or a Fortran pointer set, if needed.
11549 cl : first node to prepend before grp_start_p.
11550 tail_chain : pointer to chain of last prepended node.
11552 The general idea is we move the nodes for this struct mapping
11553 together: the alloc node goes into the sorted list directly after
11554 the struct mapping, and any extra nodes (together with the nodes
11555 mapping arrays pointed to by struct components) get moved after
11556 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
11557 the end of the struct component mapping list. It's important that
11558 the alloc_node comes first in that case because it's part of the
11559 sorted component mapping list (but subsequent nodes are not!). */
11562 omp_siblist_insert_node_after (alloc_node
, scp
);
11564 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
11565 already inserted it) and the extra_node (if it is present). The
11566 list can be empty if we added alloc_node above and there is no
11568 if (scp
&& extra_node
)
11571 tail_chain
= &OMP_CLAUSE_CHAIN (extra_node
);
11573 else if (extra_node
)
11575 OMP_CLAUSE_CHAIN (alloc_node
) = extra_node
;
11577 tail_chain
= &OMP_CLAUSE_CHAIN (extra_node
);
11582 tail_chain
= &OMP_CLAUSE_CHAIN (alloc_node
);
11586 = cl
? omp_siblist_move_concat_nodes_after (cl
, tail_chain
,
11587 grp_start_p
, grp_end
,
11589 : omp_siblist_move_nodes_after (grp_start_p
, grp_end
, sc
);
11591 else if (*sc
!= grp_end
)
11593 gcc_assert (*grp_start_p
== grp_end
);
11595 /* We are moving the current node back to a previous struct node:
11596 the node that used to point to the current node will now point to
11598 continue_at
= grp_start_p
;
11599 /* In the non-pointer case, the mapping clause itself is moved into
11600 the correct position in the struct component list, which in this
11601 case is just SC. */
11602 omp_siblist_move_node_after (*grp_start_p
, grp_start_p
, sc
);
11605 return continue_at
;
11608 /* Scan through GROUPS, and create sorted structure sibling lists without
11612 omp_build_struct_sibling_lists (enum tree_code code
,
11613 enum omp_region_type region_type
,
11614 vec
<omp_mapping_group
> *groups
,
11615 hash_map
<tree_operand_hash_no_se
,
11616 omp_mapping_group
*> **grpmap
,
11619 using namespace omp_addr_tokenizer
;
11621 omp_mapping_group
*grp
;
11622 hash_map
<tree_operand_hash
, tree
> *struct_map_to_clause
= NULL
;
11623 bool success
= true;
11624 tree
*new_next
= NULL
;
11625 tree
*tail
= &OMP_CLAUSE_CHAIN ((*groups
)[groups
->length () - 1].grp_end
);
11626 tree added_nodes
= NULL_TREE
;
11627 tree
*added_tail
= &added_nodes
;
11628 auto_vec
<omp_mapping_group
> pre_hwm_groups
;
11630 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
11632 tree c
= grp
->grp_end
;
11633 tree decl
= OMP_CLAUSE_DECL (c
);
11634 tree grp_end
= grp
->grp_end
;
11635 auto_vec
<omp_addr_token
*> addr_tokens
;
11636 tree sentinel
= OMP_CLAUSE_CHAIN (grp_end
);
11638 if (new_next
&& !grp
->reprocess_struct
)
11639 grp
->grp_start
= new_next
;
11643 tree
*grp_start_p
= grp
->grp_start
;
11648 /* Skip groups we marked for deletion in
11649 {omp,oacc}_resolve_clause_dependencies. */
11653 if (OMP_CLAUSE_CHAIN (*grp_start_p
)
11654 && OMP_CLAUSE_CHAIN (*grp_start_p
) != grp_end
)
11656 /* Don't process an array descriptor that isn't inside a derived type
11657 as a struct (the GOMP_MAP_POINTER following will have the form
11658 "var.data", but such mappings are handled specially). */
11659 tree grpmid
= OMP_CLAUSE_CHAIN (*grp_start_p
);
11660 if (omp_map_clause_descriptor_p (grpmid
)
11661 && DECL_P (OMP_CLAUSE_DECL (grpmid
)))
11667 while (TREE_CODE (expr
) == ARRAY_REF
)
11668 expr
= TREE_OPERAND (expr
, 0);
11670 if (!omp_parse_expr (addr_tokens
, expr
))
11673 omp_addr_token
*last_token
11674 = omp_first_chained_access_token (addr_tokens
);
11676 /* A mapping of a reference to a pointer member that doesn't specify an
11677 array section, etc., like this:
11678 *mystruct.ref_to_ptr
11679 should not be processed by the struct sibling-list handling code --
11680 it just transfers the referenced pointer.
11682 In contrast, the quite similar-looking construct:
11684 which is equivalent to e.g.
11686 *does* trigger sibling-list processing.
11688 An exception for the former case is for "fragile" groups where the
11689 reference itself is not handled otherwise; this is subject to special
11690 handling in omp_accumulate_sibling_list also. */
11692 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
11693 && last_token
->type
== ACCESS_METHOD
11694 && last_token
->u
.access_kind
== ACCESS_REF
11699 if (TREE_CODE (d
) == ARRAY_REF
)
11701 while (TREE_CODE (d
) == ARRAY_REF
)
11702 d
= TREE_OPERAND (d
, 0);
11703 if (TREE_CODE (d
) == COMPONENT_REF
11704 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
11708 && INDIRECT_REF_P (decl
)
11709 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
11710 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
11712 && (OMP_CLAUSE_MAP_KIND (c
)
11713 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
))
11714 decl
= TREE_OPERAND (decl
, 0);
11718 if (TREE_CODE (decl
) != COMPONENT_REF
)
11721 /* If we're mapping the whole struct in another node, skip adding this
11722 node to a sibling list. */
11723 omp_mapping_group
*wholestruct
;
11724 if (omp_mapped_by_containing_struct (*grpmap
, OMP_CLAUSE_DECL (c
),
11728 if (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
11729 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
11730 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
11731 && code
!= OACC_UPDATE
11732 && code
!= OMP_TARGET_UPDATE
)
11734 if (error_operand_p (decl
))
11740 tree stype
= TREE_TYPE (decl
);
11741 if (TREE_CODE (stype
) == REFERENCE_TYPE
)
11742 stype
= TREE_TYPE (stype
);
11743 if (TYPE_SIZE_UNIT (stype
) == NULL
11744 || TREE_CODE (TYPE_SIZE_UNIT (stype
)) != INTEGER_CST
)
11746 error_at (OMP_CLAUSE_LOCATION (c
),
11747 "mapping field %qE of variable length "
11748 "structure", OMP_CLAUSE_DECL (c
));
11753 tree
*inner
= NULL
;
11754 bool fragile_p
= grp
->fragile
;
11757 = omp_accumulate_sibling_list (region_type
, code
,
11758 struct_map_to_clause
, *grpmap
,
11759 grp_start_p
, grp_end
, addr_tokens
,
11760 &inner
, &fragile_p
,
11761 grp
->reprocess_struct
, &added_tail
);
11765 omp_mapping_group newgrp
;
11766 newgrp
.grp_start
= inner
;
11767 if (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (*inner
))
11768 == GOMP_MAP_ATTACH_DETACH
)
11769 newgrp
.grp_end
= OMP_CLAUSE_CHAIN (*inner
);
11771 newgrp
.grp_end
= *inner
;
11772 newgrp
.mark
= UNVISITED
;
11773 newgrp
.sibling
= NULL
;
11774 newgrp
.deleted
= false;
11775 newgrp
.reprocess_struct
= true;
11776 newgrp
.fragile
= fragile_p
;
11777 newgrp
.next
= NULL
;
11778 groups
->safe_push (newgrp
);
11780 /* !!! Growing GROUPS might invalidate the pointers in the group
11781 map. Rebuild it here. This is a bit inefficient, but
11782 shouldn't happen very often. */
11785 = omp_reindex_mapping_groups (list_p
, groups
, &pre_hwm_groups
,
11791 /* Delete groups marked for deletion above. At this point the order of the
11792 groups may no longer correspond to the order of the underlying list,
11793 which complicates this a little. First clear out OMP_CLAUSE_DECL for
11794 deleted nodes... */
11796 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
11798 for (tree d
= *grp
->grp_start
;
11799 d
!= OMP_CLAUSE_CHAIN (grp
->grp_end
);
11800 d
= OMP_CLAUSE_CHAIN (d
))
11801 OMP_CLAUSE_DECL (d
) = NULL_TREE
;
11803 /* ...then sweep through the list removing the now-empty nodes. */
11808 if (OMP_CLAUSE_CODE (*tail
) == OMP_CLAUSE_MAP
11809 && OMP_CLAUSE_DECL (*tail
) == NULL_TREE
)
11810 *tail
= OMP_CLAUSE_CHAIN (*tail
);
11812 tail
= &OMP_CLAUSE_CHAIN (*tail
);
11815 /* Tack on the struct nodes added during nested struct reprocessing. */
11818 *tail
= added_nodes
;
11822 /* Now we have finished building the struct sibling lists, reprocess
11823 newly-added "attach" nodes: we need the address of the first
11824 mapped element of each struct sibling list for the bias of the attach
11825 operation -- not necessarily the base address of the whole struct. */
11826 if (struct_map_to_clause
)
11827 for (hash_map
<tree_operand_hash
, tree
>::iterator iter
11828 = struct_map_to_clause
->begin ();
11829 iter
!= struct_map_to_clause
->end ();
11832 tree struct_node
= (*iter
).second
;
11833 gcc_assert (OMP_CLAUSE_CODE (struct_node
) == OMP_CLAUSE_MAP
);
11834 tree attach
= OMP_CLAUSE_CHAIN (struct_node
);
11836 if (OMP_CLAUSE_CODE (attach
) != OMP_CLAUSE_MAP
11837 || OMP_CLAUSE_MAP_KIND (attach
) != GOMP_MAP_ATTACH_DETACH
)
11840 OMP_CLAUSE_SET_MAP_KIND (attach
, GOMP_MAP_ATTACH
);
11842 /* Sanity check: the standalone attach node will not work if we have
11843 an "enter data" operation (because for those, variables need to be
11844 mapped separately and attach nodes must be grouped together with the
11845 base they attach to). We should only have created the
11846 ATTACH_DETACH node after GOMP_MAP_STRUCT for a target region, so
11847 this should never be true. */
11848 gcc_assert ((region_type
& ORT_TARGET
) != 0);
11850 /* This is the first sorted node in the struct sibling list. Use it
11851 to recalculate the correct bias to use.
11852 (&first_node - attach_decl).
11853 For GOMP_MAP_STRUCT_UNORD, we need e.g. the
11854 min(min(min(first,second),third),fourth) element, because the
11855 elements aren't in any particular order. */
11857 if (OMP_CLAUSE_MAP_KIND (struct_node
) == GOMP_MAP_STRUCT_UNORD
)
11859 tree first_node
= OMP_CLAUSE_CHAIN (attach
);
11860 unsigned HOST_WIDE_INT num_mappings
11861 = tree_to_uhwi (OMP_CLAUSE_SIZE (struct_node
));
11862 lowest_addr
= OMP_CLAUSE_DECL (first_node
);
11863 lowest_addr
= build_fold_addr_expr (lowest_addr
);
11864 lowest_addr
= fold_convert (pointer_sized_int_node
, lowest_addr
);
11865 tree next_node
= OMP_CLAUSE_CHAIN (first_node
);
11866 while (num_mappings
> 1)
11868 tree tmp
= OMP_CLAUSE_DECL (next_node
);
11869 tmp
= build_fold_addr_expr (tmp
);
11870 tmp
= fold_convert (pointer_sized_int_node
, tmp
);
11871 lowest_addr
= fold_build2 (MIN_EXPR
, pointer_sized_int_node
,
11873 next_node
= OMP_CLAUSE_CHAIN (next_node
);
11876 lowest_addr
= fold_convert (ptrdiff_type_node
, lowest_addr
);
11880 tree first_node
= OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (attach
));
11881 first_node
= build_fold_addr_expr (first_node
);
11882 lowest_addr
= fold_convert (ptrdiff_type_node
, first_node
);
11884 tree attach_decl
= OMP_CLAUSE_DECL (attach
);
11885 attach_decl
= fold_convert (ptrdiff_type_node
, attach_decl
);
11886 OMP_CLAUSE_SIZE (attach
)
11887 = fold_build2 (MINUS_EXPR
, ptrdiff_type_node
, lowest_addr
,
11890 /* Remove GOMP_MAP_ATTACH node from after struct node. */
11891 OMP_CLAUSE_CHAIN (struct_node
) = OMP_CLAUSE_CHAIN (attach
);
11892 /* ...and re-insert it at the end of our clause list. */
11894 OMP_CLAUSE_CHAIN (attach
) = NULL_TREE
;
11895 tail
= &OMP_CLAUSE_CHAIN (attach
);
11899 if (struct_map_to_clause
)
11900 delete struct_map_to_clause
;
11905 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
11906 and previous omp contexts. */
11909 gimplify_scan_omp_clauses (tree
*list_p
, gimple_seq
*pre_p
,
11910 enum omp_region_type region_type
,
11911 enum tree_code code
)
11913 using namespace omp_addr_tokenizer
;
11914 struct gimplify_omp_ctx
*ctx
, *outer_ctx
;
11916 tree
*orig_list_p
= list_p
;
11917 int handled_depend_iterators
= -1;
11920 ctx
= new_omp_context (region_type
);
11922 outer_ctx
= ctx
->outer_context
;
11923 if (code
== OMP_TARGET
)
11925 if (!lang_GNU_Fortran ())
11926 ctx
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
11927 ctx
->defaultmap
[GDMK_SCALAR
] = GOVD_FIRSTPRIVATE
;
11928 ctx
->defaultmap
[GDMK_SCALAR_TARGET
] = (lang_GNU_Fortran ()
11929 ? GOVD_MAP
: GOVD_FIRSTPRIVATE
);
11931 if (!lang_GNU_Fortran ())
11935 case OMP_TARGET_DATA
:
11936 case OMP_TARGET_ENTER_DATA
:
11937 case OMP_TARGET_EXIT_DATA
:
11939 case OACC_HOST_DATA
:
11940 case OACC_PARALLEL
:
11942 ctx
->target_firstprivatize_array_bases
= true;
11947 vec
<omp_mapping_group
> *groups
= NULL
;
11948 hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *grpmap
= NULL
;
11949 unsigned grpnum
= 0;
11950 tree
*grp_start_p
= NULL
, grp_end
= NULL_TREE
;
11952 if (code
== OMP_TARGET
11953 || code
== OMP_TARGET_DATA
11954 || code
== OMP_TARGET_ENTER_DATA
11955 || code
== OMP_TARGET_EXIT_DATA
11956 || code
== OACC_DATA
11957 || code
== OACC_KERNELS
11958 || code
== OACC_PARALLEL
11959 || code
== OACC_SERIAL
11960 || code
== OACC_ENTER_DATA
11961 || code
== OACC_EXIT_DATA
11962 || code
== OACC_UPDATE
11963 || code
== OACC_DECLARE
)
11965 groups
= omp_gather_mapping_groups (list_p
);
11968 grpmap
= omp_index_mapping_groups (groups
);
11971 while ((c
= *list_p
) != NULL
)
11973 bool remove
= false;
11974 bool notice_outer
= true;
11975 bool map_descriptor
;
11976 const char *check_non_private
= NULL
;
11977 unsigned int flags
;
11979 auto_vec
<omp_addr_token
*, 10> addr_tokens
;
11981 if (grp_end
&& c
== OMP_CLAUSE_CHAIN (grp_end
))
11983 grp_start_p
= NULL
;
11984 grp_end
= NULL_TREE
;
11987 switch (OMP_CLAUSE_CODE (c
))
11989 case OMP_CLAUSE_PRIVATE
:
11990 flags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
11991 if (lang_hooks
.decls
.omp_private_outer_ref (OMP_CLAUSE_DECL (c
)))
11993 flags
|= GOVD_PRIVATE_OUTER_REF
;
11994 OMP_CLAUSE_PRIVATE_OUTER_REF (c
) = 1;
11997 notice_outer
= false;
11999 case OMP_CLAUSE_SHARED
:
12000 flags
= GOVD_SHARED
| GOVD_EXPLICIT
;
12002 case OMP_CLAUSE_FIRSTPRIVATE
:
12003 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
12004 check_non_private
= "firstprivate";
12005 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
12007 gcc_assert (code
== OMP_TARGET
);
12008 flags
|= GOVD_FIRSTPRIVATE_IMPLICIT
;
12011 case OMP_CLAUSE_LASTPRIVATE
:
12012 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
12015 case OMP_DISTRIBUTE
:
12016 error_at (OMP_CLAUSE_LOCATION (c
),
12017 "conditional %<lastprivate%> clause on "
12018 "%qs construct", "distribute");
12019 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
12022 error_at (OMP_CLAUSE_LOCATION (c
),
12023 "conditional %<lastprivate%> clause on "
12024 "%qs construct", "taskloop");
12025 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
12030 flags
= GOVD_LASTPRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
;
12031 if (code
!= OMP_LOOP
)
12032 check_non_private
= "lastprivate";
12033 decl
= OMP_CLAUSE_DECL (c
);
12034 if (error_operand_p (decl
))
12036 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
12037 && !lang_hooks
.decls
.omp_scalar_p (decl
, true))
12039 error_at (OMP_CLAUSE_LOCATION (c
),
12040 "non-scalar variable %qD in conditional "
12041 "%<lastprivate%> clause", decl
);
12042 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
12044 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
12045 flags
|= GOVD_LASTPRIVATE_CONDITIONAL
;
12046 omp_lastprivate_for_combined_outer_constructs (outer_ctx
, decl
,
12049 case OMP_CLAUSE_REDUCTION
:
12050 if (OMP_CLAUSE_REDUCTION_TASK (c
))
12052 if (region_type
== ORT_WORKSHARE
|| code
== OMP_SCOPE
)
12055 nowait
= omp_find_clause (*list_p
,
12056 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
12058 && (outer_ctx
== NULL
12059 || outer_ctx
->region_type
!= ORT_COMBINED_PARALLEL
))
12061 error_at (OMP_CLAUSE_LOCATION (c
),
12062 "%<task%> reduction modifier on a construct "
12063 "with a %<nowait%> clause");
12064 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
12067 else if ((region_type
& ORT_PARALLEL
) != ORT_PARALLEL
)
12069 error_at (OMP_CLAUSE_LOCATION (c
),
12070 "invalid %<task%> reduction modifier on construct "
12071 "other than %<parallel%>, %qs, %<sections%> or "
12072 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
12073 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
12076 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
12080 error_at (OMP_CLAUSE_LOCATION (c
),
12081 "%<inscan%> %<reduction%> clause on "
12082 "%qs construct", "sections");
12083 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
12086 error_at (OMP_CLAUSE_LOCATION (c
),
12087 "%<inscan%> %<reduction%> clause on "
12088 "%qs construct", "parallel");
12089 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
12092 error_at (OMP_CLAUSE_LOCATION (c
),
12093 "%<inscan%> %<reduction%> clause on "
12094 "%qs construct", "teams");
12095 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
12098 error_at (OMP_CLAUSE_LOCATION (c
),
12099 "%<inscan%> %<reduction%> clause on "
12100 "%qs construct", "taskloop");
12101 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
12104 error_at (OMP_CLAUSE_LOCATION (c
),
12105 "%<inscan%> %<reduction%> clause on "
12106 "%qs construct", "scope");
12107 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
12113 case OMP_CLAUSE_IN_REDUCTION
:
12114 case OMP_CLAUSE_TASK_REDUCTION
:
12115 flags
= GOVD_REDUCTION
| GOVD_SEEN
| GOVD_EXPLICIT
;
12116 /* OpenACC permits reductions on private variables. */
12117 if (!(region_type
& ORT_ACC
)
12118 /* taskgroup is actually not a worksharing region. */
12119 && code
!= OMP_TASKGROUP
)
12120 check_non_private
= omp_clause_code_name
[OMP_CLAUSE_CODE (c
)];
12121 decl
= OMP_CLAUSE_DECL (c
);
12122 if (TREE_CODE (decl
) == MEM_REF
)
12124 tree type
= TREE_TYPE (decl
);
12125 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
12126 gimplify_ctxp
->into_ssa
= false;
12127 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type
)), pre_p
,
12128 NULL
, is_gimple_val
, fb_rvalue
, false)
12131 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
12135 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
12136 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
12139 omp_firstprivatize_variable (ctx
, v
);
12140 omp_notice_variable (ctx
, v
, true);
12142 decl
= TREE_OPERAND (decl
, 0);
12143 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
12145 gimplify_ctxp
->into_ssa
= false;
12146 if (gimplify_expr (&TREE_OPERAND (decl
, 1), pre_p
,
12147 NULL
, is_gimple_val
, fb_rvalue
, false)
12150 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
12154 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
12155 v
= TREE_OPERAND (decl
, 1);
12158 omp_firstprivatize_variable (ctx
, v
);
12159 omp_notice_variable (ctx
, v
, true);
12161 decl
= TREE_OPERAND (decl
, 0);
12163 if (TREE_CODE (decl
) == ADDR_EXPR
12164 || TREE_CODE (decl
) == INDIRECT_REF
)
12165 decl
= TREE_OPERAND (decl
, 0);
12168 case OMP_CLAUSE_LINEAR
:
12169 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
), pre_p
, NULL
,
12170 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
12177 if (code
== OMP_SIMD
12178 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
12180 struct gimplify_omp_ctx
*octx
= outer_ctx
;
12182 && octx
->region_type
== ORT_WORKSHARE
12183 && octx
->combined_loop
12184 && !octx
->distribute
)
12186 if (octx
->outer_context
12187 && (octx
->outer_context
->region_type
12188 == ORT_COMBINED_PARALLEL
))
12189 octx
= octx
->outer_context
->outer_context
;
12191 octx
= octx
->outer_context
;
12194 && octx
->region_type
== ORT_WORKSHARE
12195 && octx
->combined_loop
12196 && octx
->distribute
)
12198 error_at (OMP_CLAUSE_LOCATION (c
),
12199 "%<linear%> clause for variable other than "
12200 "loop iterator specified on construct "
12201 "combined with %<distribute%>");
12206 /* For combined #pragma omp parallel for simd, need to put
12207 lastprivate and perhaps firstprivate too on the
12208 parallel. Similarly for #pragma omp for simd. */
12209 struct gimplify_omp_ctx
*octx
= outer_ctx
;
12210 bool taskloop_seen
= false;
12214 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
12215 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
12217 decl
= OMP_CLAUSE_DECL (c
);
12218 if (error_operand_p (decl
))
12224 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
12225 flags
|= GOVD_FIRSTPRIVATE
;
12226 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
12227 flags
|= GOVD_LASTPRIVATE
;
12229 && octx
->region_type
== ORT_WORKSHARE
12230 && octx
->combined_loop
)
12232 if (octx
->outer_context
12233 && (octx
->outer_context
->region_type
12234 == ORT_COMBINED_PARALLEL
))
12235 octx
= octx
->outer_context
;
12236 else if (omp_check_private (octx
, decl
, false))
12240 && (octx
->region_type
& ORT_TASK
) != 0
12241 && octx
->combined_loop
)
12242 taskloop_seen
= true;
12244 && octx
->region_type
== ORT_COMBINED_PARALLEL
12245 && ((ctx
->region_type
== ORT_WORKSHARE
12246 && octx
== outer_ctx
)
12248 flags
= GOVD_SEEN
| GOVD_SHARED
;
12250 && ((octx
->region_type
& ORT_COMBINED_TEAMS
)
12251 == ORT_COMBINED_TEAMS
))
12252 flags
= GOVD_SEEN
| GOVD_SHARED
;
12254 && octx
->region_type
== ORT_COMBINED_TARGET
)
12256 if (flags
& GOVD_LASTPRIVATE
)
12257 flags
= GOVD_SEEN
| GOVD_MAP
;
12262 = splay_tree_lookup (octx
->variables
,
12263 (splay_tree_key
) decl
);
12264 if (on
&& (on
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
12269 omp_add_variable (octx
, decl
, flags
);
12270 if (octx
->outer_context
== NULL
)
12272 octx
= octx
->outer_context
;
12277 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
12278 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
12279 omp_notice_variable (octx
, decl
, true);
12281 flags
= GOVD_LINEAR
| GOVD_EXPLICIT
;
12282 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
12283 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
12285 notice_outer
= false;
12286 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
12290 case OMP_CLAUSE_MAP
:
12293 grp_start_p
= list_p
;
12294 grp_end
= (*groups
)[grpnum
].grp_end
;
12297 decl
= OMP_CLAUSE_DECL (c
);
12299 if (error_operand_p (decl
))
12305 if (!omp_parse_expr (addr_tokens
, decl
))
12313 if (DECL_P (decl
) && outer_ctx
&& (region_type
& ORT_ACC
))
12315 struct gimplify_omp_ctx
*octx
;
12316 for (octx
= outer_ctx
; octx
; octx
= octx
->outer_context
)
12318 if (octx
->region_type
!= ORT_ACC_HOST_DATA
)
12321 = splay_tree_lookup (octx
->variables
,
12322 (splay_tree_key
) decl
);
12324 error_at (OMP_CLAUSE_LOCATION (c
), "variable %qE "
12325 "declared in enclosing %<host_data%> region",
12330 map_descriptor
= false;
12332 /* This condition checks if we're mapping an array descriptor that
12333 isn't inside a derived type -- these have special handling, and
12334 are not handled as structs in omp_build_struct_sibling_lists.
12335 See that function for further details. */
12336 if (*grp_start_p
!= grp_end
12337 && OMP_CLAUSE_CHAIN (*grp_start_p
)
12338 && OMP_CLAUSE_CHAIN (*grp_start_p
) != grp_end
)
12340 tree grp_mid
= OMP_CLAUSE_CHAIN (*grp_start_p
);
12341 if (omp_map_clause_descriptor_p (grp_mid
)
12342 && DECL_P (OMP_CLAUSE_DECL (grp_mid
)))
12343 map_descriptor
= true;
12345 else if (OMP_CLAUSE_CODE (grp_end
) == OMP_CLAUSE_MAP
12346 && (OMP_CLAUSE_MAP_KIND (grp_end
) == GOMP_MAP_RELEASE
12347 || OMP_CLAUSE_MAP_KIND (grp_end
) == GOMP_MAP_DELETE
)
12348 && OMP_CLAUSE_RELEASE_DESCRIPTOR (grp_end
))
12349 map_descriptor
= true;
12351 /* Adding the decl for a struct access: we haven't created
12352 GOMP_MAP_STRUCT nodes yet, so this statement needs to predict
12353 whether they will be created in gimplify_adjust_omp_clauses.
12354 NOTE: Technically we should probably look through DECL_VALUE_EXPR
12355 here because something that looks like a DECL_P may actually be a
12356 struct access, e.g. variables in a lambda closure
12357 (__closure->__foo) or class members (this->foo). Currently in both
12358 those cases we map the whole of the containing object (directly in
12359 the C++ FE) though, so struct nodes are not created. */
12361 && addr_tokens
[0]->type
== STRUCTURE_BASE
12362 && addr_tokens
[0]->u
.structure_base_kind
== BASE_DECL
12363 && !map_descriptor
)
12365 gcc_assert (addr_tokens
[1]->type
== ACCESS_METHOD
);
12366 /* If we got to this struct via a chain of pointers, maybe we
12367 want to map it implicitly instead. */
12368 if (omp_access_chain_p (addr_tokens
, 1))
12370 omp_mapping_group
*wholestruct
;
12371 if (!(region_type
& ORT_ACC
)
12372 && omp_mapped_by_containing_struct (grpmap
,
12373 OMP_CLAUSE_DECL (c
),
12376 decl
= addr_tokens
[1]->expr
;
12377 if (splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
))
12379 /* Standalone attach or detach clauses for a struct element
12380 should not inhibit implicit mapping of the whole struct. */
12381 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12382 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
)
12384 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
12386 gcc_assert (addr_tokens
[1]->u
.access_kind
!= ACCESS_DIRECT
12387 || TREE_ADDRESSABLE (decl
));
12391 if (!DECL_P (decl
))
12393 tree d
= decl
, *pd
;
12394 if (TREE_CODE (d
) == ARRAY_REF
)
12396 while (TREE_CODE (d
) == ARRAY_REF
)
12397 d
= TREE_OPERAND (d
, 0);
12398 if (TREE_CODE (d
) == COMPONENT_REF
12399 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
12402 pd
= &OMP_CLAUSE_DECL (c
);
12404 && TREE_CODE (decl
) == INDIRECT_REF
12405 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
12406 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
12408 && (OMP_CLAUSE_MAP_KIND (c
)
12409 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
))
12411 pd
= &TREE_OPERAND (decl
, 0);
12412 decl
= TREE_OPERAND (decl
, 0);
12415 if (addr_tokens
[0]->type
== STRUCTURE_BASE
12416 && addr_tokens
[0]->u
.structure_base_kind
== BASE_DECL
12417 && addr_tokens
[1]->type
== ACCESS_METHOD
12418 && (addr_tokens
[1]->u
.access_kind
== ACCESS_POINTER
12419 || (addr_tokens
[1]->u
.access_kind
12420 == ACCESS_POINTER_OFFSET
))
12421 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)))
12423 tree base
= addr_tokens
[1]->expr
;
12425 = splay_tree_lookup (ctx
->variables
,
12426 (splay_tree_key
) base
);
12427 n
->value
|= GOVD_SEEN
;
12430 if (code
== OMP_TARGET
&& OMP_CLAUSE_MAP_IN_REDUCTION (c
))
12432 /* Don't gimplify *pd fully at this point, as the base
12433 will need to be adjusted during omp lowering. */
12434 auto_vec
<tree
, 10> expr_stack
;
12436 while (handled_component_p (*p
)
12437 || TREE_CODE (*p
) == INDIRECT_REF
12438 || TREE_CODE (*p
) == ADDR_EXPR
12439 || TREE_CODE (*p
) == MEM_REF
12440 || TREE_CODE (*p
) == NON_LVALUE_EXPR
)
12442 expr_stack
.safe_push (*p
);
12443 p
= &TREE_OPERAND (*p
, 0);
12445 for (int i
= expr_stack
.length () - 1; i
>= 0; i
--)
12447 tree t
= expr_stack
[i
];
12448 if (TREE_CODE (t
) == ARRAY_REF
12449 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
12451 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
12453 tree low
= unshare_expr (array_ref_low_bound (t
));
12454 if (!is_gimple_min_invariant (low
))
12456 TREE_OPERAND (t
, 2) = low
;
12457 if (gimplify_expr (&TREE_OPERAND (t
, 2),
12460 fb_rvalue
) == GS_ERROR
)
12464 else if (gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
12465 NULL
, is_gimple_reg
,
12466 fb_rvalue
) == GS_ERROR
)
12468 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
12470 tree elmt_size
= array_ref_element_size (t
);
12471 if (!is_gimple_min_invariant (elmt_size
))
12473 elmt_size
= unshare_expr (elmt_size
);
12475 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
,
12478 = size_int (TYPE_ALIGN_UNIT (elmt_type
));
12480 = size_binop (EXACT_DIV_EXPR
, elmt_size
,
12482 TREE_OPERAND (t
, 3) = elmt_size
;
12483 if (gimplify_expr (&TREE_OPERAND (t
, 3),
12486 fb_rvalue
) == GS_ERROR
)
12490 else if (gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
12491 NULL
, is_gimple_reg
,
12492 fb_rvalue
) == GS_ERROR
)
12495 else if (TREE_CODE (t
) == COMPONENT_REF
)
12497 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
12499 tree offset
= component_ref_field_offset (t
);
12500 if (!is_gimple_min_invariant (offset
))
12502 offset
= unshare_expr (offset
);
12503 tree field
= TREE_OPERAND (t
, 1);
12505 = size_int (DECL_OFFSET_ALIGN (field
)
12507 offset
= size_binop (EXACT_DIV_EXPR
, offset
,
12509 TREE_OPERAND (t
, 2) = offset
;
12510 if (gimplify_expr (&TREE_OPERAND (t
, 2),
12513 fb_rvalue
) == GS_ERROR
)
12517 else if (gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
12518 NULL
, is_gimple_reg
,
12519 fb_rvalue
) == GS_ERROR
)
12523 for (; expr_stack
.length () > 0; )
12525 tree t
= expr_stack
.pop ();
12527 if (TREE_CODE (t
) == ARRAY_REF
12528 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
12530 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1))
12531 && gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
,
12532 NULL
, is_gimple_val
,
12533 fb_rvalue
) == GS_ERROR
)
12541 if ((code
== OMP_TARGET
12542 || code
== OMP_TARGET_DATA
12543 || code
== OMP_TARGET_ENTER_DATA
12544 || code
== OMP_TARGET_EXIT_DATA
)
12545 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
12547 /* If we have attach/detach but the decl we have is a pointer to
12548 pointer, we're probably mapping the "base level" array
12549 implicitly. Make sure we don't add the decl as if we mapped
12550 it explicitly. That is,
12554 #pragma omp target map(arr[a][b:c])
12556 should *not* map "arr" explicitly. That way we get a
12557 zero-length "alloc" mapping for it, and assuming it's been
12558 mapped by some previous directive, etc., things work as they
12561 tree basetype
= TREE_TYPE (addr_tokens
[0]->expr
);
12563 if (TREE_CODE (basetype
) == REFERENCE_TYPE
)
12564 basetype
= TREE_TYPE (basetype
);
12566 if (code
== OMP_TARGET
12567 && addr_tokens
[0]->type
== ARRAY_BASE
12568 && addr_tokens
[0]->u
.structure_base_kind
== BASE_DECL
12569 && TREE_CODE (basetype
) == POINTER_TYPE
12570 && TREE_CODE (TREE_TYPE (basetype
)) == POINTER_TYPE
)
12574 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
12575 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TO
12576 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TOFROM
12577 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_PRESENT_TO
12578 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_PRESENT_TOFROM
)
12579 flags
|= GOVD_MAP_ALWAYS_TO
;
12583 case OMP_CLAUSE_AFFINITY
:
12584 gimplify_omp_affinity (list_p
, pre_p
);
12587 case OMP_CLAUSE_DOACROSS
:
12588 if (OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SINK
)
12590 tree deps
= OMP_CLAUSE_DECL (c
);
12591 while (deps
&& TREE_CODE (deps
) == TREE_LIST
)
12593 if (TREE_CODE (TREE_PURPOSE (deps
)) == TRUNC_DIV_EXPR
12594 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps
), 1)))
12595 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps
), 1),
12596 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
12597 deps
= TREE_CHAIN (deps
);
12601 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c
)
12602 == OMP_CLAUSE_DOACROSS_SOURCE
);
12604 case OMP_CLAUSE_DEPEND
:
12605 if (handled_depend_iterators
== -1)
12606 handled_depend_iterators
= gimplify_omp_depend (list_p
, pre_p
);
12607 if (handled_depend_iterators
)
12609 if (handled_depend_iterators
== 2)
12613 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
12615 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
12616 NULL
, is_gimple_val
, fb_rvalue
);
12617 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
12619 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
12624 if (OMP_CLAUSE_DECL (c
) != null_pointer_node
)
12626 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
12627 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
12628 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
12634 if (code
== OMP_TASK
)
12635 ctx
->has_depend
= true;
12638 case OMP_CLAUSE_TO
:
12639 case OMP_CLAUSE_FROM
:
12640 case OMP_CLAUSE__CACHE_
:
12641 decl
= OMP_CLAUSE_DECL (c
);
12642 if (error_operand_p (decl
))
12647 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
12648 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
12649 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
12650 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
12651 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
12656 if (!DECL_P (decl
))
12658 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
,
12659 NULL
, is_gimple_lvalue
, fb_lvalue
)
12669 case OMP_CLAUSE_USE_DEVICE_PTR
:
12670 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12671 flags
= GOVD_EXPLICIT
;
12674 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
12675 decl
= OMP_CLAUSE_DECL (c
);
12676 while (TREE_CODE (decl
) == INDIRECT_REF
12677 || TREE_CODE (decl
) == ARRAY_REF
)
12678 decl
= TREE_OPERAND (decl
, 0);
12679 flags
= GOVD_EXPLICIT
;
12682 case OMP_CLAUSE_IS_DEVICE_PTR
:
12683 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
12687 decl
= OMP_CLAUSE_DECL (c
);
12689 if (error_operand_p (decl
))
12694 if (DECL_NAME (decl
) == NULL_TREE
&& (flags
& GOVD_SHARED
) == 0)
12696 tree t
= omp_member_access_dummy_var (decl
);
12699 tree v
= DECL_VALUE_EXPR (decl
);
12700 DECL_NAME (decl
) = DECL_NAME (TREE_OPERAND (v
, 1));
12702 omp_notice_variable (outer_ctx
, t
, true);
12705 if (code
== OACC_DATA
12706 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12707 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
12708 flags
|= GOVD_MAP_0LEN_ARRAY
;
12709 omp_add_variable (ctx
, decl
, flags
);
12710 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
12711 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
12712 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
12713 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
12715 struct gimplify_omp_ctx
*pctx
12716 = code
== OMP_TARGET
? outer_ctx
: ctx
;
12718 omp_add_variable (pctx
, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
12719 GOVD_LOCAL
| GOVD_SEEN
);
12721 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
12722 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c
),
12724 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
12725 NULL
) == NULL_TREE
)
12726 omp_add_variable (pctx
,
12727 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
12728 GOVD_LOCAL
| GOVD_SEEN
);
12729 gimplify_omp_ctxp
= pctx
;
12730 push_gimplify_context ();
12732 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
12733 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
12735 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c
),
12736 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
));
12737 pop_gimplify_context
12738 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
)));
12739 push_gimplify_context ();
12740 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c
),
12741 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
12742 pop_gimplify_context
12743 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
)));
12744 OMP_CLAUSE_REDUCTION_INIT (c
) = NULL_TREE
;
12745 OMP_CLAUSE_REDUCTION_MERGE (c
) = NULL_TREE
;
12747 gimplify_omp_ctxp
= outer_ctx
;
12749 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12750 && OMP_CLAUSE_LASTPRIVATE_STMT (c
))
12752 gimplify_omp_ctxp
= ctx
;
12753 push_gimplify_context ();
12754 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c
)) != BIND_EXPR
)
12756 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
12758 TREE_SIDE_EFFECTS (bind
) = 1;
12759 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LASTPRIVATE_STMT (c
);
12760 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = bind
;
12762 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c
),
12763 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
12764 pop_gimplify_context
12765 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
)));
12766 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = NULL_TREE
;
12768 gimplify_omp_ctxp
= outer_ctx
;
12770 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
12771 && OMP_CLAUSE_LINEAR_STMT (c
))
12773 gimplify_omp_ctxp
= ctx
;
12774 push_gimplify_context ();
12775 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c
)) != BIND_EXPR
)
12777 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
12779 TREE_SIDE_EFFECTS (bind
) = 1;
12780 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LINEAR_STMT (c
);
12781 OMP_CLAUSE_LINEAR_STMT (c
) = bind
;
12783 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c
),
12784 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
12785 pop_gimplify_context
12786 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
)));
12787 OMP_CLAUSE_LINEAR_STMT (c
) = NULL_TREE
;
12789 gimplify_omp_ctxp
= outer_ctx
;
12795 case OMP_CLAUSE_COPYIN
:
12796 case OMP_CLAUSE_COPYPRIVATE
:
12797 decl
= OMP_CLAUSE_DECL (c
);
12798 if (error_operand_p (decl
))
12803 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_COPYPRIVATE
12805 && !omp_check_private (ctx
, decl
, true))
12808 if (is_global_var (decl
))
12810 if (DECL_THREAD_LOCAL_P (decl
))
12812 else if (DECL_HAS_VALUE_EXPR_P (decl
))
12814 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
12818 && DECL_THREAD_LOCAL_P (value
))
12823 error_at (OMP_CLAUSE_LOCATION (c
),
12824 "copyprivate variable %qE is not threadprivate"
12825 " or private in outer context", DECL_NAME (decl
));
12828 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
12829 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
12830 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
12832 && ((region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
12833 || (region_type
== ORT_WORKSHARE
12834 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
12835 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
12836 || code
== OMP_LOOP
)))
12837 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
12838 || (code
== OMP_LOOP
12839 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
12840 && ((outer_ctx
->region_type
& ORT_COMBINED_TEAMS
)
12841 == ORT_COMBINED_TEAMS
))))
12844 = splay_tree_lookup (outer_ctx
->variables
,
12845 (splay_tree_key
)decl
);
12846 if (on
== NULL
|| (on
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
12848 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
12849 && TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
12850 && (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
12851 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
12852 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
12853 == POINTER_TYPE
))))
12854 omp_firstprivatize_variable (outer_ctx
, decl
);
12857 omp_add_variable (outer_ctx
, decl
,
12858 GOVD_SEEN
| GOVD_SHARED
);
12859 if (outer_ctx
->outer_context
)
12860 omp_notice_variable (outer_ctx
->outer_context
, decl
,
12866 omp_notice_variable (outer_ctx
, decl
, true);
12867 if (check_non_private
12868 && (region_type
== ORT_WORKSHARE
|| code
== OMP_SCOPE
)
12869 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
12870 || decl
== OMP_CLAUSE_DECL (c
)
12871 || (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
12872 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
12874 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
12875 == POINTER_PLUS_EXPR
12876 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
12877 (OMP_CLAUSE_DECL (c
), 0), 0))
12879 && omp_check_private (ctx
, decl
, false))
12881 error ("%s variable %qE is private in outer context",
12882 check_non_private
, DECL_NAME (decl
));
12887 case OMP_CLAUSE_DETACH
:
12888 flags
= GOVD_FIRSTPRIVATE
| GOVD_SEEN
;
12891 case OMP_CLAUSE_IF
:
12892 if (OMP_CLAUSE_IF_MODIFIER (c
) != ERROR_MARK
12893 && OMP_CLAUSE_IF_MODIFIER (c
) != code
)
12896 for (int i
= 0; i
< 2; i
++)
12897 switch (i
? OMP_CLAUSE_IF_MODIFIER (c
) : code
)
12899 case VOID_CST
: p
[i
] = "cancel"; break;
12900 case OMP_PARALLEL
: p
[i
] = "parallel"; break;
12901 case OMP_SIMD
: p
[i
] = "simd"; break;
12902 case OMP_TASK
: p
[i
] = "task"; break;
12903 case OMP_TASKLOOP
: p
[i
] = "taskloop"; break;
12904 case OMP_TARGET_DATA
: p
[i
] = "target data"; break;
12905 case OMP_TARGET
: p
[i
] = "target"; break;
12906 case OMP_TARGET_UPDATE
: p
[i
] = "target update"; break;
12907 case OMP_TARGET_ENTER_DATA
:
12908 p
[i
] = "target enter data"; break;
12909 case OMP_TARGET_EXIT_DATA
: p
[i
] = "target exit data"; break;
12910 default: gcc_unreachable ();
12912 error_at (OMP_CLAUSE_LOCATION (c
),
12913 "expected %qs %<if%> clause modifier rather than %qs",
12917 /* Fall through. */
12919 case OMP_CLAUSE_SELF
:
12920 case OMP_CLAUSE_FINAL
:
12921 OMP_CLAUSE_OPERAND (c
, 0)
12922 = gimple_boolify (OMP_CLAUSE_OPERAND (c
, 0));
12923 /* Fall through. */
12925 case OMP_CLAUSE_NUM_TEAMS
:
12926 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
12927 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
12928 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)))
12930 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)))
12935 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
12936 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
),
12937 pre_p
, NULL
, true);
12939 /* Fall through. */
12941 case OMP_CLAUSE_SCHEDULE
:
12942 case OMP_CLAUSE_NUM_THREADS
:
12943 case OMP_CLAUSE_THREAD_LIMIT
:
12944 case OMP_CLAUSE_DIST_SCHEDULE
:
12945 case OMP_CLAUSE_DEVICE
:
12946 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEVICE
12947 && OMP_CLAUSE_DEVICE_ANCESTOR (c
))
12949 if (code
!= OMP_TARGET
)
12951 error_at (OMP_CLAUSE_LOCATION (c
),
12952 "%<device%> clause with %<ancestor%> is only "
12953 "allowed on %<target%> construct");
12958 tree clauses
= *orig_list_p
;
12959 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
12960 if (OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_DEVICE
12961 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_FIRSTPRIVATE
12962 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_PRIVATE
12963 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_DEFAULTMAP
12964 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_MAP
12967 error_at (OMP_CLAUSE_LOCATION (c
),
12968 "with %<ancestor%>, only the %<device%>, "
12969 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
12970 "and %<map%> clauses may appear on the "
12976 /* Fall through. */
12978 case OMP_CLAUSE_PRIORITY
:
12979 case OMP_CLAUSE_GRAINSIZE
:
12980 case OMP_CLAUSE_NUM_TASKS
:
12981 case OMP_CLAUSE_FILTER
:
12982 case OMP_CLAUSE_HINT
:
12983 case OMP_CLAUSE_ASYNC
:
12984 case OMP_CLAUSE_WAIT
:
12985 case OMP_CLAUSE_NUM_GANGS
:
12986 case OMP_CLAUSE_NUM_WORKERS
:
12987 case OMP_CLAUSE_VECTOR_LENGTH
:
12988 case OMP_CLAUSE_WORKER
:
12989 case OMP_CLAUSE_VECTOR
:
12990 if (OMP_CLAUSE_OPERAND (c
, 0)
12991 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c
, 0)))
12993 if (error_operand_p (OMP_CLAUSE_OPERAND (c
, 0)))
12998 /* All these clauses care about value, not a particular decl,
12999 so try to force it into a SSA_NAME or fresh temporary. */
13000 OMP_CLAUSE_OPERAND (c
, 0)
13001 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c
, 0),
13002 pre_p
, NULL
, true);
13006 case OMP_CLAUSE_GANG
:
13007 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
13008 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
13010 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 1), pre_p
, NULL
,
13011 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
13015 case OMP_CLAUSE_NOWAIT
:
13019 case OMP_CLAUSE_ORDERED
:
13020 case OMP_CLAUSE_UNTIED
:
13021 case OMP_CLAUSE_COLLAPSE
:
13022 case OMP_CLAUSE_TILE
:
13023 case OMP_CLAUSE_AUTO
:
13024 case OMP_CLAUSE_SEQ
:
13025 case OMP_CLAUSE_INDEPENDENT
:
13026 case OMP_CLAUSE_MERGEABLE
:
13027 case OMP_CLAUSE_PROC_BIND
:
13028 case OMP_CLAUSE_SAFELEN
:
13029 case OMP_CLAUSE_SIMDLEN
:
13030 case OMP_CLAUSE_NOGROUP
:
13031 case OMP_CLAUSE_THREADS
:
13032 case OMP_CLAUSE_SIMD
:
13033 case OMP_CLAUSE_BIND
:
13034 case OMP_CLAUSE_IF_PRESENT
:
13035 case OMP_CLAUSE_FINALIZE
:
13038 case OMP_CLAUSE_ORDER
:
13039 ctx
->order_concurrent
= true;
13042 case OMP_CLAUSE_DEFAULTMAP
:
13043 enum gimplify_defaultmap_kind gdmkmin
, gdmkmax
;
13044 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c
))
13046 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
:
13047 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALL
:
13048 gdmkmin
= GDMK_SCALAR
;
13049 gdmkmax
= GDMK_POINTER
;
13051 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
:
13052 gdmkmin
= GDMK_SCALAR
;
13053 gdmkmax
= GDMK_SCALAR_TARGET
;
13055 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE
:
13056 gdmkmin
= gdmkmax
= GDMK_AGGREGATE
;
13058 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE
:
13059 gdmkmin
= gdmkmax
= GDMK_ALLOCATABLE
;
13061 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER
:
13062 gdmkmin
= gdmkmax
= GDMK_POINTER
;
13065 gcc_unreachable ();
13067 for (int gdmk
= gdmkmin
; gdmk
<= gdmkmax
; gdmk
++)
13068 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c
))
13070 case OMP_CLAUSE_DEFAULTMAP_ALLOC
:
13071 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_ALLOC_ONLY
;
13073 case OMP_CLAUSE_DEFAULTMAP_TO
:
13074 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_TO_ONLY
;
13076 case OMP_CLAUSE_DEFAULTMAP_FROM
:
13077 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_FROM_ONLY
;
13079 case OMP_CLAUSE_DEFAULTMAP_TOFROM
:
13080 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
13082 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
:
13083 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
13085 case OMP_CLAUSE_DEFAULTMAP_NONE
:
13086 ctx
->defaultmap
[gdmk
] = 0;
13088 case OMP_CLAUSE_DEFAULTMAP_PRESENT
:
13089 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
13091 case OMP_CLAUSE_DEFAULTMAP_DEFAULT
:
13095 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
13097 case GDMK_SCALAR_TARGET
:
13098 ctx
->defaultmap
[gdmk
] = (lang_GNU_Fortran ()
13099 ? GOVD_MAP
: GOVD_FIRSTPRIVATE
);
13101 case GDMK_AGGREGATE
:
13102 case GDMK_ALLOCATABLE
:
13103 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
13106 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
13107 if (!lang_GNU_Fortran ())
13108 ctx
->defaultmap
[gdmk
] |= GOVD_MAP_0LEN_ARRAY
;
13111 gcc_unreachable ();
13115 gcc_unreachable ();
13119 case OMP_CLAUSE_ALIGNED
:
13120 decl
= OMP_CLAUSE_DECL (c
);
13121 if (error_operand_p (decl
))
13126 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c
), pre_p
, NULL
,
13127 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
13132 if (!is_global_var (decl
)
13133 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
13134 omp_add_variable (ctx
, decl
, GOVD_ALIGNED
);
13137 case OMP_CLAUSE_NONTEMPORAL
:
13138 decl
= OMP_CLAUSE_DECL (c
);
13139 if (error_operand_p (decl
))
13144 omp_add_variable (ctx
, decl
, GOVD_NONTEMPORAL
);
13147 case OMP_CLAUSE_ALLOCATE
:
13148 decl
= OMP_CLAUSE_DECL (c
);
13149 if (error_operand_p (decl
))
13154 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
), pre_p
, NULL
,
13155 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
13160 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
13161 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
13164 else if (code
== OMP_TASKLOOP
13165 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
13166 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
13167 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
13168 pre_p
, NULL
, false);
13171 case OMP_CLAUSE_DEFAULT
:
13172 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
13175 case OMP_CLAUSE_INCLUSIVE
:
13176 case OMP_CLAUSE_EXCLUSIVE
:
13177 decl
= OMP_CLAUSE_DECL (c
);
13179 splay_tree_node n
= splay_tree_lookup (outer_ctx
->variables
,
13180 (splay_tree_key
) decl
);
13181 if (n
== NULL
|| (n
->value
& GOVD_REDUCTION
) == 0)
13183 error_at (OMP_CLAUSE_LOCATION (c
),
13184 "%qD specified in %qs clause but not in %<inscan%> "
13185 "%<reduction%> clause on the containing construct",
13186 decl
, omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
13191 n
->value
|= GOVD_REDUCTION_INSCAN
;
13192 if (outer_ctx
->region_type
== ORT_SIMD
13193 && outer_ctx
->outer_context
13194 && outer_ctx
->outer_context
->region_type
== ORT_WORKSHARE
)
13196 n
= splay_tree_lookup (outer_ctx
->outer_context
->variables
,
13197 (splay_tree_key
) decl
);
13198 if (n
&& (n
->value
& GOVD_REDUCTION
) != 0)
13199 n
->value
|= GOVD_REDUCTION_INSCAN
;
13205 case OMP_CLAUSE_NOHOST
:
13207 gcc_unreachable ();
13210 if (code
== OACC_DATA
13211 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
13212 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
13213 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
13216 *list_p
= OMP_CLAUSE_CHAIN (c
);
13218 list_p
= &OMP_CLAUSE_CHAIN (c
);
13227 ctx
->clauses
= *orig_list_p
;
13228 gimplify_omp_ctxp
= ctx
;
13231 /* Return true if DECL is a candidate for shared to firstprivate
13232 optimization. We only consider non-addressable scalars, not
13233 too big, and not references. */
13236 omp_shared_to_firstprivate_optimizable_decl_p (tree decl
)
13238 if (TREE_ADDRESSABLE (decl
))
13240 tree type
= TREE_TYPE (decl
);
13241 if (!is_gimple_reg_type (type
)
13242 || TREE_CODE (type
) == REFERENCE_TYPE
13243 || TREE_ADDRESSABLE (type
))
13245 /* Don't optimize too large decls, as each thread/task will have
13247 HOST_WIDE_INT len
= int_size_in_bytes (type
);
13248 if (len
== -1 || len
> 4 * POINTER_SIZE
/ BITS_PER_UNIT
)
13250 if (omp_privatize_by_reference (decl
))
13255 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
13256 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
13257 GOVD_WRITTEN in outer contexts. */
13260 omp_mark_stores (struct gimplify_omp_ctx
*ctx
, tree decl
)
13262 for (; ctx
; ctx
= ctx
->outer_context
)
13264 splay_tree_node n
= splay_tree_lookup (ctx
->variables
,
13265 (splay_tree_key
) decl
);
13268 else if (n
->value
& GOVD_SHARED
)
13270 n
->value
|= GOVD_WRITTEN
;
13273 else if (n
->value
& GOVD_DATA_SHARE_CLASS
)
13278 /* Helper callback for walk_gimple_seq to discover possible stores
13279 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
13280 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
13284 omp_find_stores_op (tree
*tp
, int *walk_subtrees
, void *data
)
13286 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
13288 *walk_subtrees
= 0;
13295 if (handled_component_p (op
))
13296 op
= TREE_OPERAND (op
, 0);
13297 else if ((TREE_CODE (op
) == MEM_REF
|| TREE_CODE (op
) == TARGET_MEM_REF
)
13298 && TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
13299 op
= TREE_OPERAND (TREE_OPERAND (op
, 0), 0);
13304 if (!DECL_P (op
) || !omp_shared_to_firstprivate_optimizable_decl_p (op
))
13307 omp_mark_stores (gimplify_omp_ctxp
, op
);
13311 /* Helper callback for walk_gimple_seq to discover possible stores
13312 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
13313 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
13317 omp_find_stores_stmt (gimple_stmt_iterator
*gsi_p
,
13318 bool *handled_ops_p
,
13319 struct walk_stmt_info
*wi
)
13321 gimple
*stmt
= gsi_stmt (*gsi_p
);
13322 switch (gimple_code (stmt
))
13324 /* Don't recurse on OpenMP constructs for which
13325 gimplify_adjust_omp_clauses already handled the bodies,
13326 except handle gimple_omp_for_pre_body. */
13327 case GIMPLE_OMP_FOR
:
13328 *handled_ops_p
= true;
13329 if (gimple_omp_for_pre_body (stmt
))
13330 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
13331 omp_find_stores_stmt
, omp_find_stores_op
, wi
);
13333 case GIMPLE_OMP_PARALLEL
:
13334 case GIMPLE_OMP_TASK
:
13335 case GIMPLE_OMP_SECTIONS
:
13336 case GIMPLE_OMP_SINGLE
:
13337 case GIMPLE_OMP_SCOPE
:
13338 case GIMPLE_OMP_TARGET
:
13339 case GIMPLE_OMP_TEAMS
:
13340 case GIMPLE_OMP_CRITICAL
:
13341 *handled_ops_p
= true;
13349 struct gimplify_adjust_omp_clauses_data
13355 /* For all variables that were not actually used within the context,
13356 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
13359 gimplify_adjust_omp_clauses_1 (splay_tree_node n
, void *data
)
13361 tree
*list_p
= ((struct gimplify_adjust_omp_clauses_data
*) data
)->list_p
;
13363 = ((struct gimplify_adjust_omp_clauses_data
*) data
)->pre_p
;
13364 tree decl
= (tree
) n
->key
;
13365 unsigned flags
= n
->value
;
13366 enum omp_clause_code code
;
13368 bool private_debug
;
13370 if (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
13371 && (flags
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0)
13372 flags
= GOVD_SHARED
| GOVD_SEEN
| GOVD_WRITTEN
;
13373 if (flags
& (GOVD_EXPLICIT
| GOVD_LOCAL
))
13375 if ((flags
& GOVD_SEEN
) == 0)
13377 if (flags
& GOVD_DEBUG_PRIVATE
)
13379 gcc_assert ((flags
& GOVD_DATA_SHARE_CLASS
) == GOVD_SHARED
);
13380 private_debug
= true;
13382 else if (flags
& GOVD_MAP
)
13383 private_debug
= false;
13386 = lang_hooks
.decls
.omp_private_debug_clause (decl
,
13387 !!(flags
& GOVD_SHARED
));
13389 code
= OMP_CLAUSE_PRIVATE
;
13390 else if (flags
& GOVD_MAP
)
13392 code
= OMP_CLAUSE_MAP
;
13393 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
13394 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
13396 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl
);
13400 && DECL_IN_CONSTANT_POOL (decl
)
13401 && !lookup_attribute ("omp declare target",
13402 DECL_ATTRIBUTES (decl
)))
13404 tree id
= get_identifier ("omp declare target");
13405 DECL_ATTRIBUTES (decl
)
13406 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
13407 varpool_node
*node
= varpool_node::get (decl
);
13410 node
->offloadable
= 1;
13411 if (ENABLE_OFFLOADING
)
13412 g
->have_offload
= true;
13416 else if (flags
& GOVD_SHARED
)
13418 if (is_global_var (decl
))
13420 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
13421 while (ctx
!= NULL
)
13424 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
13425 if (on
&& (on
->value
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
13426 | GOVD_PRIVATE
| GOVD_REDUCTION
13427 | GOVD_LINEAR
| GOVD_MAP
)) != 0)
13429 ctx
= ctx
->outer_context
;
13434 code
= OMP_CLAUSE_SHARED
;
13435 /* Don't optimize shared into firstprivate for read-only vars
13436 on tasks with depend clause, we shouldn't try to copy them
13437 until the dependencies are satisfied. */
13438 if (gimplify_omp_ctxp
->has_depend
)
13439 flags
|= GOVD_WRITTEN
;
13441 else if (flags
& GOVD_PRIVATE
)
13442 code
= OMP_CLAUSE_PRIVATE
;
13443 else if (flags
& GOVD_FIRSTPRIVATE
)
13445 code
= OMP_CLAUSE_FIRSTPRIVATE
;
13446 if ((gimplify_omp_ctxp
->region_type
& ORT_TARGET
)
13447 && (gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
13448 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
13450 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
13451 "%<target%> construct", decl
);
13455 else if (flags
& GOVD_LASTPRIVATE
)
13456 code
= OMP_CLAUSE_LASTPRIVATE
;
13457 else if (flags
& (GOVD_ALIGNED
| GOVD_NONTEMPORAL
))
13459 else if (flags
& GOVD_CONDTEMP
)
13461 code
= OMP_CLAUSE__CONDTEMP_
;
13462 gimple_add_tmp_var (decl
);
13465 gcc_unreachable ();
13467 if (((flags
& GOVD_LASTPRIVATE
)
13468 || (code
== OMP_CLAUSE_SHARED
&& (flags
& GOVD_WRITTEN
)))
13469 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
13470 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
13472 tree chain
= *list_p
;
13473 clause
= build_omp_clause (input_location
, code
);
13474 OMP_CLAUSE_DECL (clause
) = decl
;
13475 OMP_CLAUSE_CHAIN (clause
) = chain
;
13477 OMP_CLAUSE_PRIVATE_DEBUG (clause
) = 1;
13478 else if (code
== OMP_CLAUSE_PRIVATE
&& (flags
& GOVD_PRIVATE_OUTER_REF
))
13479 OMP_CLAUSE_PRIVATE_OUTER_REF (clause
) = 1;
13480 else if (code
== OMP_CLAUSE_SHARED
13481 && (flags
& GOVD_WRITTEN
) == 0
13482 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
13483 OMP_CLAUSE_SHARED_READONLY (clause
) = 1;
13484 else if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_EXPLICIT
) == 0)
13485 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause
) = 1;
13486 else if (code
== OMP_CLAUSE_MAP
&& (flags
& GOVD_MAP_0LEN_ARRAY
) != 0)
13488 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
13489 OMP_CLAUSE_DECL (nc
) = decl
;
13490 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
13491 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
13492 OMP_CLAUSE_DECL (clause
)
13493 = build_fold_indirect_ref_loc (input_location
, decl
);
13494 OMP_CLAUSE_DECL (clause
)
13495 = build2 (MEM_REF
, char_type_node
, OMP_CLAUSE_DECL (clause
),
13496 build_int_cst (build_pointer_type (char_type_node
), 0));
13497 OMP_CLAUSE_SIZE (clause
) = size_zero_node
;
13498 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
13499 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_ALLOC
);
13500 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause
) = 1;
13501 tree dtype
= TREE_TYPE (decl
);
13502 if (TREE_CODE (dtype
) == REFERENCE_TYPE
)
13503 dtype
= TREE_TYPE (dtype
);
13504 /* FIRSTPRIVATE_POINTER doesn't work well if we have a
13505 multiply-indirected pointer. If we have a reference to a pointer to
13506 a pointer, it's possible that this should really be
13507 GOMP_MAP_FIRSTPRIVATE_REFERENCE -- but that also doesn't work at the
13508 moment, so stick with this. (See PR113279 and testcases
13509 baseptrs-{4,6}.C:ref2ptrptr_offset_decl_member_slice). */
13510 if (TREE_CODE (dtype
) == POINTER_TYPE
13511 && TREE_CODE (TREE_TYPE (dtype
)) == POINTER_TYPE
)
13512 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
13514 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
13515 OMP_CLAUSE_CHAIN (nc
) = chain
;
13516 OMP_CLAUSE_CHAIN (clause
) = nc
;
13517 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
13518 gimplify_omp_ctxp
= ctx
->outer_context
;
13519 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0),
13520 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
13521 gimplify_omp_ctxp
= ctx
;
13523 else if (code
== OMP_CLAUSE_MAP
)
13526 /* Not all combinations of these GOVD_MAP flags are actually valid. */
13527 switch (flags
& (GOVD_MAP_TO_ONLY
13529 | GOVD_MAP_FORCE_PRESENT
13530 | GOVD_MAP_ALLOC_ONLY
13531 | GOVD_MAP_FROM_ONLY
))
13534 kind
= GOMP_MAP_TOFROM
;
13536 case GOVD_MAP_FORCE
:
13537 kind
= GOMP_MAP_TOFROM
| GOMP_MAP_FLAG_FORCE
;
13539 case GOVD_MAP_TO_ONLY
:
13540 kind
= GOMP_MAP_TO
;
13542 case GOVD_MAP_FROM_ONLY
:
13543 kind
= GOMP_MAP_FROM
;
13545 case GOVD_MAP_ALLOC_ONLY
:
13546 kind
= GOMP_MAP_ALLOC
;
13548 case GOVD_MAP_TO_ONLY
| GOVD_MAP_FORCE
:
13549 kind
= GOMP_MAP_TO
| GOMP_MAP_FLAG_FORCE
;
13551 case GOVD_MAP_FORCE_PRESENT
:
13552 kind
= GOMP_MAP_FORCE_PRESENT
;
13554 case GOVD_MAP_FORCE_PRESENT
| GOVD_MAP_ALLOC_ONLY
:
13555 kind
= GOMP_MAP_FORCE_PRESENT
;
13558 gcc_unreachable ();
13560 OMP_CLAUSE_SET_MAP_KIND (clause
, kind
);
13561 /* Setting of the implicit flag for the runtime is currently disabled for
13563 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0)
13564 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause
) = 1;
13565 if (DECL_SIZE (decl
)
13566 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
13568 tree decl2
= DECL_VALUE_EXPR (decl
);
13569 gcc_assert (INDIRECT_REF_P (decl2
));
13570 decl2
= TREE_OPERAND (decl2
, 0);
13571 gcc_assert (DECL_P (decl2
));
13572 tree mem
= build_simple_mem_ref (decl2
);
13573 OMP_CLAUSE_DECL (clause
) = mem
;
13574 OMP_CLAUSE_SIZE (clause
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
13575 if (gimplify_omp_ctxp
->outer_context
)
13577 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
13578 omp_notice_variable (ctx
, decl2
, true);
13579 omp_notice_variable (ctx
, OMP_CLAUSE_SIZE (clause
), true);
13581 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
13583 OMP_CLAUSE_DECL (nc
) = decl
;
13584 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
13585 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
)
13586 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
13588 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
13589 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
13590 OMP_CLAUSE_CHAIN (clause
) = nc
;
13592 else if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
13593 && omp_privatize_by_reference (decl
))
13595 OMP_CLAUSE_DECL (clause
) = build_simple_mem_ref (decl
);
13596 OMP_CLAUSE_SIZE (clause
)
13597 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))));
13598 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
13599 gimplify_omp_ctxp
= ctx
->outer_context
;
13600 gimplify_expr (&OMP_CLAUSE_SIZE (clause
),
13601 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
13602 gimplify_omp_ctxp
= ctx
;
13603 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
13605 OMP_CLAUSE_DECL (nc
) = decl
;
13606 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
13607 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_REFERENCE
);
13608 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
13609 OMP_CLAUSE_CHAIN (clause
) = nc
;
13612 OMP_CLAUSE_SIZE (clause
) = DECL_SIZE_UNIT (decl
);
13614 if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_LASTPRIVATE
) != 0)
13616 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
13617 OMP_CLAUSE_DECL (nc
) = decl
;
13618 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc
) = 1;
13619 OMP_CLAUSE_CHAIN (nc
) = chain
;
13620 OMP_CLAUSE_CHAIN (clause
) = nc
;
13621 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
13622 gimplify_omp_ctxp
= ctx
->outer_context
;
13623 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
13624 (ctx
->region_type
& ORT_ACC
) != 0);
13625 gimplify_omp_ctxp
= ctx
;
13628 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
13629 gimplify_omp_ctxp
= ctx
->outer_context
;
13630 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
13631 in simd. Those are only added for the local vars inside of simd body
13632 and they don't need to be e.g. default constructible. */
13633 if (code
!= OMP_CLAUSE_PRIVATE
|| ctx
->region_type
!= ORT_SIMD
)
13634 lang_hooks
.decls
.omp_finish_clause (clause
, pre_p
,
13635 (ctx
->region_type
& ORT_ACC
) != 0);
13636 if (gimplify_omp_ctxp
)
13637 for (; clause
!= chain
; clause
= OMP_CLAUSE_CHAIN (clause
))
13638 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
13639 && DECL_P (OMP_CLAUSE_SIZE (clause
)))
13640 omp_notice_variable (gimplify_omp_ctxp
, OMP_CLAUSE_SIZE (clause
),
13642 gimplify_omp_ctxp
= ctx
;
13647 gimplify_adjust_omp_clauses (gimple_seq
*pre_p
, gimple_seq body
, tree
*list_p
,
13648 enum tree_code code
)
13650 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
13651 tree
*orig_list_p
= list_p
;
13653 bool has_inscan_reductions
= false;
13657 struct gimplify_omp_ctx
*octx
;
13658 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
13659 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TASK
| ORT_TEAMS
)) != 0)
13663 struct walk_stmt_info wi
;
13664 memset (&wi
, 0, sizeof (wi
));
13665 walk_gimple_seq (body
, omp_find_stores_stmt
,
13666 omp_find_stores_op
, &wi
);
13670 if (ctx
->add_safelen1
)
13672 /* If there are VLAs in the body of simd loop, prevent
13674 gcc_assert (ctx
->region_type
== ORT_SIMD
);
13675 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
13676 OMP_CLAUSE_SAFELEN_EXPR (c
) = integer_one_node
;
13677 OMP_CLAUSE_CHAIN (c
) = *list_p
;
13679 list_p
= &OMP_CLAUSE_CHAIN (c
);
13682 if (ctx
->region_type
== ORT_WORKSHARE
13683 && ctx
->outer_context
13684 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
)
13686 for (c
= ctx
->outer_context
->clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13687 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
13688 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
13690 decl
= OMP_CLAUSE_DECL (c
);
13692 = splay_tree_lookup (ctx
->outer_context
->variables
,
13693 (splay_tree_key
) decl
);
13694 gcc_checking_assert (!splay_tree_lookup (ctx
->variables
,
13695 (splay_tree_key
) decl
));
13696 omp_add_variable (ctx
, decl
, n
->value
);
13697 tree c2
= copy_node (c
);
13698 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
13700 if ((n
->value
& GOVD_FIRSTPRIVATE
) == 0)
13702 c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
13703 OMP_CLAUSE_FIRSTPRIVATE
);
13704 OMP_CLAUSE_DECL (c2
) = decl
;
13705 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
13710 if (code
== OMP_TARGET
13711 || code
== OMP_TARGET_DATA
13712 || code
== OMP_TARGET_ENTER_DATA
13713 || code
== OMP_TARGET_EXIT_DATA
)
13715 vec
<omp_mapping_group
> *groups
;
13716 groups
= omp_gather_mapping_groups (list_p
);
13717 hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *grpmap
= NULL
;
13721 grpmap
= omp_index_mapping_groups (groups
);
13723 omp_resolve_clause_dependencies (code
, groups
, grpmap
);
13724 omp_build_struct_sibling_lists (code
, ctx
->region_type
, groups
,
13727 omp_mapping_group
*outlist
= NULL
;
13732 /* Rebuild now we have struct sibling lists. */
13733 groups
= omp_gather_mapping_groups (list_p
);
13734 grpmap
= omp_index_mapping_groups (groups
);
13736 bool enter_exit
= (code
== OMP_TARGET_ENTER_DATA
13737 || code
== OMP_TARGET_EXIT_DATA
);
13739 outlist
= omp_tsort_mapping_groups (groups
, grpmap
, enter_exit
);
13740 outlist
= omp_segregate_mapping_groups (outlist
);
13741 list_p
= omp_reorder_mapping_groups (groups
, outlist
, list_p
);
13747 else if (ctx
->region_type
& ORT_ACC
)
13749 vec
<omp_mapping_group
> *groups
;
13750 groups
= omp_gather_mapping_groups (list_p
);
13753 hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *grpmap
;
13754 grpmap
= omp_index_mapping_groups (groups
);
13756 oacc_resolve_clause_dependencies (groups
, grpmap
);
13757 omp_build_struct_sibling_lists (code
, ctx
->region_type
, groups
,
13765 tree attach_list
= NULL_TREE
;
13766 tree
*attach_tail
= &attach_list
;
13768 tree
*grp_start_p
= NULL
, grp_end
= NULL_TREE
;
13770 while ((c
= *list_p
) != NULL
)
13773 bool remove
= false;
13774 bool move_attach
= false;
13776 if (grp_end
&& c
== OMP_CLAUSE_CHAIN (grp_end
))
13777 grp_end
= NULL_TREE
;
13779 switch (OMP_CLAUSE_CODE (c
))
13781 case OMP_CLAUSE_FIRSTPRIVATE
:
13782 if ((ctx
->region_type
& ORT_TARGET
)
13783 && (ctx
->region_type
& ORT_ACC
) == 0
13784 && TYPE_ATOMIC (strip_array_types
13785 (TREE_TYPE (OMP_CLAUSE_DECL (c
)))))
13787 error_at (OMP_CLAUSE_LOCATION (c
),
13788 "%<_Atomic%> %qD in %<firstprivate%> clause on "
13789 "%<target%> construct", OMP_CLAUSE_DECL (c
));
13793 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
13795 decl
= OMP_CLAUSE_DECL (c
);
13796 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
13797 if ((n
->value
& GOVD_MAP
) != 0)
13802 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c
) = 0;
13803 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
) = 0;
13806 case OMP_CLAUSE_PRIVATE
:
13807 case OMP_CLAUSE_SHARED
:
13808 case OMP_CLAUSE_LINEAR
:
13809 decl
= OMP_CLAUSE_DECL (c
);
13810 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
13811 remove
= !(n
->value
& GOVD_SEEN
);
13812 if ((n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0
13813 && code
== OMP_PARALLEL
13814 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
13818 bool shared
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
;
13819 if ((n
->value
& GOVD_DEBUG_PRIVATE
)
13820 || lang_hooks
.decls
.omp_private_debug_clause (decl
, shared
))
13822 gcc_assert ((n
->value
& GOVD_DEBUG_PRIVATE
) == 0
13823 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
13825 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_PRIVATE
);
13826 OMP_CLAUSE_PRIVATE_DEBUG (c
) = 1;
13828 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
13831 n
->value
|= GOVD_WRITTEN
;
13832 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
13833 && (n
->value
& GOVD_WRITTEN
) == 0
13835 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
13836 OMP_CLAUSE_SHARED_READONLY (c
) = 1;
13837 else if (DECL_P (decl
)
13838 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
13839 && (n
->value
& GOVD_WRITTEN
) != 0)
13840 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
13841 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
13842 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
13843 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
13846 n
->value
&= ~GOVD_EXPLICIT
;
13849 case OMP_CLAUSE_LASTPRIVATE
:
13850 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
13851 accurately reflect the presence of a FIRSTPRIVATE clause. */
13852 decl
= OMP_CLAUSE_DECL (c
);
13853 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
13854 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
13855 = (n
->value
& GOVD_FIRSTPRIVATE
) != 0;
13856 if (code
== OMP_DISTRIBUTE
13857 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
13860 error_at (OMP_CLAUSE_LOCATION (c
),
13861 "same variable used in %<firstprivate%> and "
13862 "%<lastprivate%> clauses on %<distribute%> "
13866 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
13868 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
13869 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
13870 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) && code
== OMP_PARALLEL
)
13874 case OMP_CLAUSE_ALIGNED
:
13875 decl
= OMP_CLAUSE_DECL (c
);
13876 if (!is_global_var (decl
))
13878 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
13879 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
13880 if (!remove
&& TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
13882 struct gimplify_omp_ctx
*octx
;
13884 && (n
->value
& (GOVD_DATA_SHARE_CLASS
13885 & ~GOVD_FIRSTPRIVATE
)))
13888 for (octx
= ctx
->outer_context
; octx
;
13889 octx
= octx
->outer_context
)
13891 n
= splay_tree_lookup (octx
->variables
,
13892 (splay_tree_key
) decl
);
13895 if (n
->value
& GOVD_LOCAL
)
13897 /* We have to avoid assigning a shared variable
13898 to itself when trying to add
13899 __builtin_assume_aligned. */
13900 if (n
->value
& GOVD_SHARED
)
13908 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
13910 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
13911 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
13916 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
13917 decl
= OMP_CLAUSE_DECL (c
);
13918 while (INDIRECT_REF_P (decl
)
13919 || TREE_CODE (decl
) == ARRAY_REF
)
13920 decl
= TREE_OPERAND (decl
, 0);
13921 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
13922 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
13925 case OMP_CLAUSE_IS_DEVICE_PTR
:
13926 case OMP_CLAUSE_NONTEMPORAL
:
13927 decl
= OMP_CLAUSE_DECL (c
);
13928 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
13929 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
13932 case OMP_CLAUSE_MAP
:
13933 decl
= OMP_CLAUSE_DECL (c
);
13936 grp_start_p
= list_p
;
13937 grp_end
= *omp_group_last (grp_start_p
);
13939 switch (OMP_CLAUSE_MAP_KIND (c
))
13941 case GOMP_MAP_PRESENT_ALLOC
:
13942 case GOMP_MAP_PRESENT_TO
:
13943 case GOMP_MAP_PRESENT_FROM
:
13944 case GOMP_MAP_PRESENT_TOFROM
:
13945 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_PRESENT
);
13953 if (TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
)
13956 case OACC_HOST_DATA
:
13957 case OACC_ENTER_DATA
:
13958 case OACC_EXIT_DATA
:
13959 case OMP_TARGET_DATA
:
13960 case OMP_TARGET_ENTER_DATA
:
13961 case OMP_TARGET_EXIT_DATA
:
13962 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
13963 || (OMP_CLAUSE_MAP_KIND (c
)
13964 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
13965 /* For target {,enter ,exit }data only the array slice is
13966 mapped, but not the pointer to it. */
13968 if (code
== OMP_TARGET_EXIT_DATA
13969 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
13970 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
))
13980 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
13982 /* Sanity check: attach/detach map kinds use the size as a bias,
13983 and it's never right to use the decl size for such
13985 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
13986 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
13987 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DETACH
13988 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH_DETACH
13989 && (OMP_CLAUSE_MAP_KIND (c
)
13990 != GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
));
13991 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
13992 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
13994 gimplify_omp_ctxp
= ctx
->outer_context
;
13995 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
, NULL
,
13996 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
13998 gimplify_omp_ctxp
= ctx
;
14002 else if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
14003 || (OMP_CLAUSE_MAP_KIND (c
)
14004 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
14005 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
14006 && TREE_CODE (OMP_CLAUSE_SIZE (c
)) != INTEGER_CST
)
14008 OMP_CLAUSE_SIZE (c
)
14009 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c
), pre_p
, NULL
,
14011 if ((ctx
->region_type
& ORT_TARGET
) != 0)
14012 omp_add_variable (ctx
, OMP_CLAUSE_SIZE (c
),
14013 GOVD_FIRSTPRIVATE
| GOVD_SEEN
);
14015 gimplify_omp_ctxp
= ctx
;
14016 /* Data clauses associated with reductions must be
14017 compatible with present_or_copy. Warn and adjust the clause
14018 if that is not the case. */
14019 if (ctx
->region_type
== ORT_ACC_PARALLEL
14020 || ctx
->region_type
== ORT_ACC_SERIAL
)
14022 tree t
= DECL_P (decl
) ? decl
: TREE_OPERAND (decl
, 0);
14026 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
14028 if (n
&& (n
->value
& GOVD_REDUCTION
))
14030 enum gomp_map_kind kind
= OMP_CLAUSE_MAP_KIND (c
);
14032 OMP_CLAUSE_MAP_IN_REDUCTION (c
) = 1;
14033 if ((kind
& GOMP_MAP_TOFROM
) != GOMP_MAP_TOFROM
14034 && kind
!= GOMP_MAP_FORCE_PRESENT
14035 && kind
!= GOMP_MAP_POINTER
)
14037 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
14038 "incompatible data clause with reduction "
14039 "on %qE; promoting to %<present_or_copy%>",
14041 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
14045 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
14046 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT_UNORD
)
14047 && (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
))
14052 /* If we have a DECL_VALUE_EXPR (e.g. this is a class member and/or
14053 a variable captured in a lambda closure), look through that now
14054 before the DECL_P check below. (A code other than COMPONENT_REF,
14055 i.e. INDIRECT_REF, will be a VLA/variable-length array
14056 section. A global var may be a variable in a common block. We
14057 don't want to do this here for either of those.) */
14058 if ((ctx
->region_type
& ORT_ACC
) == 0
14060 && !is_global_var (decl
)
14061 && DECL_HAS_VALUE_EXPR_P (decl
)
14062 && TREE_CODE (DECL_VALUE_EXPR (decl
)) == COMPONENT_REF
)
14063 decl
= OMP_CLAUSE_DECL (c
) = DECL_VALUE_EXPR (decl
);
14064 if (TREE_CODE (decl
) == TARGET_EXPR
)
14066 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
14067 is_gimple_lvalue
, fb_lvalue
) == GS_ERROR
)
14070 else if (!DECL_P (decl
))
14072 if ((ctx
->region_type
& ORT_TARGET
) != 0
14073 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
14075 if (INDIRECT_REF_P (decl
)
14076 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
14077 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
14078 == REFERENCE_TYPE
))
14079 decl
= TREE_OPERAND (decl
, 0);
14080 if (TREE_CODE (decl
) == COMPONENT_REF
)
14082 while (TREE_CODE (decl
) == COMPONENT_REF
)
14083 decl
= TREE_OPERAND (decl
, 0);
14086 n
= splay_tree_lookup (ctx
->variables
,
14087 (splay_tree_key
) decl
);
14088 if (!(n
->value
& GOVD_SEEN
))
14094 tree d
= decl
, *pd
;
14095 if (TREE_CODE (d
) == ARRAY_REF
)
14097 while (TREE_CODE (d
) == ARRAY_REF
)
14098 d
= TREE_OPERAND (d
, 0);
14099 if (TREE_CODE (d
) == COMPONENT_REF
14100 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
14103 pd
= &OMP_CLAUSE_DECL (c
);
14105 && TREE_CODE (decl
) == INDIRECT_REF
14106 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
14107 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
14109 && (OMP_CLAUSE_MAP_KIND (c
)
14110 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
))
14112 pd
= &TREE_OPERAND (decl
, 0);
14113 decl
= TREE_OPERAND (decl
, 0);
14116 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
14119 case OACC_ENTER_DATA
:
14120 case OACC_EXIT_DATA
:
14121 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
14124 else if (code
== OACC_ENTER_DATA
)
14125 goto change_to_attach
;
14127 case OMP_TARGET_EXIT_DATA
:
14128 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_DETACH
);
14131 /* An "attach/detach" operation on an update directive
14132 should behave as a GOMP_MAP_ALWAYS_POINTER. Note that
14133 both GOMP_MAP_ATTACH_DETACH and GOMP_MAP_ALWAYS_POINTER
14134 kinds depend on the previous mapping (for non-TARGET
14136 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_ALWAYS_POINTER
);
14140 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_ATTACH
);
14141 if ((ctx
->region_type
& ORT_TARGET
) != 0)
14142 move_attach
= true;
14144 else if ((ctx
->region_type
& ORT_TARGET
) != 0
14145 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
14146 || (OMP_CLAUSE_MAP_KIND (c
)
14147 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
)))
14148 move_attach
= true;
14150 /* If we have e.g. map(struct: *var), don't gimplify the
14151 argument since omp-low.cc wants to see the decl itself. */
14152 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
14155 /* We've already partly gimplified this in
14156 gimplify_scan_omp_clauses. Don't do any more. */
14157 if (code
== OMP_TARGET
&& OMP_CLAUSE_MAP_IN_REDUCTION (c
))
14160 gimplify_omp_ctxp
= ctx
->outer_context
;
14161 if (gimplify_expr (pd
, pre_p
, NULL
, is_gimple_lvalue
,
14162 fb_lvalue
) == GS_ERROR
)
14164 gimplify_omp_ctxp
= ctx
;
14168 if ((code
== OMP_TARGET
14169 || code
== OMP_TARGET_DATA
14170 || code
== OMP_TARGET_ENTER_DATA
14171 || code
== OMP_TARGET_EXIT_DATA
)
14172 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
14174 bool firstprivatize
= false;
14176 for (struct gimplify_omp_ctx
*octx
= ctx
->outer_context
; octx
;
14177 octx
= octx
->outer_context
)
14180 = splay_tree_lookup (octx
->variables
,
14181 (splay_tree_key
) OMP_CLAUSE_DECL (c
));
14182 /* If this is contained in an outer OpenMP region as a
14183 firstprivate value, remove the attach/detach. */
14184 if (n
&& (n
->value
& GOVD_FIRSTPRIVATE
))
14186 firstprivatize
= true;
14191 enum gomp_map_kind map_kind
;
14192 if (firstprivatize
)
14193 map_kind
= GOMP_MAP_FIRSTPRIVATE_POINTER
;
14194 else if (code
== OMP_TARGET_EXIT_DATA
)
14195 map_kind
= GOMP_MAP_DETACH
;
14197 map_kind
= GOMP_MAP_ATTACH
;
14198 OMP_CLAUSE_SET_MAP_KIND (c
, map_kind
);
14200 else if ((ctx
->region_type
& ORT_ACC
) != 0
14201 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
14203 enum gomp_map_kind map_kind
= (code
== OACC_EXIT_DATA
14205 : GOMP_MAP_ATTACH
);
14206 OMP_CLAUSE_SET_MAP_KIND (c
, map_kind
);
14209 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
14210 if ((ctx
->region_type
& ORT_TARGET
) != 0
14211 && !(n
->value
& GOVD_SEEN
)
14212 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) == 0
14213 && (!is_global_var (decl
)
14214 || !lookup_attribute ("omp declare target link",
14215 DECL_ATTRIBUTES (decl
))))
14218 /* For struct element mapping, if struct is never referenced
14219 in target block and none of the mapping has always modifier,
14220 remove all the struct element mappings, which immediately
14221 follow the GOMP_MAP_STRUCT map clause. */
14222 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
14223 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT_UNORD
)
14225 HOST_WIDE_INT cnt
= tree_to_shwi (OMP_CLAUSE_SIZE (c
));
14227 OMP_CLAUSE_CHAIN (c
)
14228 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c
));
14231 else if (DECL_SIZE (decl
)
14232 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
14233 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_POINTER
14234 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
14235 && (OMP_CLAUSE_MAP_KIND (c
)
14236 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
14238 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
14239 for these, TREE_CODE (DECL_SIZE (decl)) will always be
14241 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DEVICEPTR
);
14243 tree decl2
= DECL_VALUE_EXPR (decl
);
14244 gcc_assert (INDIRECT_REF_P (decl2
));
14245 decl2
= TREE_OPERAND (decl2
, 0);
14246 gcc_assert (DECL_P (decl2
));
14247 tree mem
= build_simple_mem_ref (decl2
);
14248 OMP_CLAUSE_DECL (c
) = mem
;
14249 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
14250 if (ctx
->outer_context
)
14252 omp_notice_variable (ctx
->outer_context
, decl2
, true);
14253 omp_notice_variable (ctx
->outer_context
,
14254 OMP_CLAUSE_SIZE (c
), true);
14256 if (((ctx
->region_type
& ORT_TARGET
) != 0
14257 || !ctx
->target_firstprivatize_array_bases
)
14258 && ((n
->value
& GOVD_SEEN
) == 0
14259 || (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
)) == 0))
14261 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
14263 OMP_CLAUSE_DECL (nc
) = decl
;
14264 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
14265 if (ctx
->target_firstprivatize_array_bases
)
14266 OMP_CLAUSE_SET_MAP_KIND (nc
,
14267 GOMP_MAP_FIRSTPRIVATE_POINTER
);
14269 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
14270 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (c
);
14271 OMP_CLAUSE_CHAIN (c
) = nc
;
14277 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
14278 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
14279 gcc_assert ((n
->value
& GOVD_SEEN
) == 0
14280 || ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
14284 /* If we have a target region, we can push all the attaches to the
14285 end of the list (we may have standalone "attach" operations
14286 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
14287 the attachment point AND the pointed-to block have been mapped).
14288 If we have something else, e.g. "enter data", we need to keep
14289 "attach" nodes together with the previous node they attach to so
14290 that separate "exit data" operations work properly (see
14291 libgomp/target.c). */
14292 if ((ctx
->region_type
& ORT_TARGET
) != 0
14293 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
14294 || (OMP_CLAUSE_MAP_KIND (c
)
14295 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
)))
14296 move_attach
= true;
14300 case OMP_CLAUSE_TO
:
14301 case OMP_CLAUSE_FROM
:
14302 case OMP_CLAUSE__CACHE_
:
14303 decl
= OMP_CLAUSE_DECL (c
);
14304 if (!DECL_P (decl
))
14306 if (DECL_SIZE (decl
)
14307 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
14309 tree decl2
= DECL_VALUE_EXPR (decl
);
14310 gcc_assert (INDIRECT_REF_P (decl2
));
14311 decl2
= TREE_OPERAND (decl2
, 0);
14312 gcc_assert (DECL_P (decl2
));
14313 tree mem
= build_simple_mem_ref (decl2
);
14314 OMP_CLAUSE_DECL (c
) = mem
;
14315 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
14316 if (ctx
->outer_context
)
14318 omp_notice_variable (ctx
->outer_context
, decl2
, true);
14319 omp_notice_variable (ctx
->outer_context
,
14320 OMP_CLAUSE_SIZE (c
), true);
14323 else if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
14324 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
14327 case OMP_CLAUSE_REDUCTION
:
14328 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
14330 decl
= OMP_CLAUSE_DECL (c
);
14331 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
14332 if ((n
->value
& GOVD_REDUCTION_INSCAN
) == 0)
14335 error_at (OMP_CLAUSE_LOCATION (c
),
14336 "%qD specified in %<inscan%> %<reduction%> clause "
14337 "but not in %<scan%> directive clause", decl
);
14340 has_inscan_reductions
= true;
14343 case OMP_CLAUSE_IN_REDUCTION
:
14344 case OMP_CLAUSE_TASK_REDUCTION
:
14345 decl
= OMP_CLAUSE_DECL (c
);
14346 /* OpenACC reductions need a present_or_copy data clause.
14347 Add one if necessary. Emit error when the reduction is private. */
14348 if (ctx
->region_type
== ORT_ACC_PARALLEL
14349 || ctx
->region_type
== ORT_ACC_SERIAL
)
14351 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
14352 if (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
14355 error_at (OMP_CLAUSE_LOCATION (c
), "invalid private "
14356 "reduction on %qE", DECL_NAME (decl
));
14358 else if ((n
->value
& GOVD_MAP
) == 0)
14360 tree next
= OMP_CLAUSE_CHAIN (c
);
14361 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_MAP
);
14362 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_TOFROM
);
14363 OMP_CLAUSE_DECL (nc
) = decl
;
14364 OMP_CLAUSE_CHAIN (c
) = nc
;
14365 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
14370 OMP_CLAUSE_MAP_IN_REDUCTION (nc
) = 1;
14371 if (OMP_CLAUSE_CHAIN (nc
) == NULL
)
14373 nc
= OMP_CLAUSE_CHAIN (nc
);
14375 OMP_CLAUSE_CHAIN (nc
) = next
;
14376 n
->value
|= GOVD_MAP
;
14380 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
14381 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
14384 case OMP_CLAUSE_ALLOCATE
:
14385 decl
= OMP_CLAUSE_DECL (c
);
14386 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
14387 if (n
!= NULL
&& !(n
->value
& GOVD_SEEN
))
14389 if ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
| GOVD_LINEAR
))
14391 && (n
->value
& (GOVD_REDUCTION
| GOVD_LASTPRIVATE
)) == 0)
14395 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
14396 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)) != INTEGER_CST
14397 && ((ctx
->region_type
& (ORT_PARALLEL
| ORT_TARGET
)) != 0
14398 || (ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASK
14399 || (ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
))
14401 tree allocator
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
14402 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) allocator
);
14405 enum omp_clause_default_kind default_kind
14406 = ctx
->default_kind
;
14407 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
14408 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
14410 ctx
->default_kind
= default_kind
;
14413 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
14418 case OMP_CLAUSE_COPYIN
:
14419 case OMP_CLAUSE_COPYPRIVATE
:
14420 case OMP_CLAUSE_IF
:
14421 case OMP_CLAUSE_SELF
:
14422 case OMP_CLAUSE_NUM_THREADS
:
14423 case OMP_CLAUSE_NUM_TEAMS
:
14424 case OMP_CLAUSE_THREAD_LIMIT
:
14425 case OMP_CLAUSE_DIST_SCHEDULE
:
14426 case OMP_CLAUSE_DEVICE
:
14427 case OMP_CLAUSE_SCHEDULE
:
14428 case OMP_CLAUSE_NOWAIT
:
14429 case OMP_CLAUSE_ORDERED
:
14430 case OMP_CLAUSE_DEFAULT
:
14431 case OMP_CLAUSE_UNTIED
:
14432 case OMP_CLAUSE_COLLAPSE
:
14433 case OMP_CLAUSE_FINAL
:
14434 case OMP_CLAUSE_MERGEABLE
:
14435 case OMP_CLAUSE_PROC_BIND
:
14436 case OMP_CLAUSE_SAFELEN
:
14437 case OMP_CLAUSE_SIMDLEN
:
14438 case OMP_CLAUSE_DEPEND
:
14439 case OMP_CLAUSE_DOACROSS
:
14440 case OMP_CLAUSE_PRIORITY
:
14441 case OMP_CLAUSE_GRAINSIZE
:
14442 case OMP_CLAUSE_NUM_TASKS
:
14443 case OMP_CLAUSE_NOGROUP
:
14444 case OMP_CLAUSE_THREADS
:
14445 case OMP_CLAUSE_SIMD
:
14446 case OMP_CLAUSE_FILTER
:
14447 case OMP_CLAUSE_HINT
:
14448 case OMP_CLAUSE_DEFAULTMAP
:
14449 case OMP_CLAUSE_ORDER
:
14450 case OMP_CLAUSE_BIND
:
14451 case OMP_CLAUSE_DETACH
:
14452 case OMP_CLAUSE_USE_DEVICE_PTR
:
14453 case OMP_CLAUSE_USE_DEVICE_ADDR
:
14454 case OMP_CLAUSE_ASYNC
:
14455 case OMP_CLAUSE_WAIT
:
14456 case OMP_CLAUSE_INDEPENDENT
:
14457 case OMP_CLAUSE_NUM_GANGS
:
14458 case OMP_CLAUSE_NUM_WORKERS
:
14459 case OMP_CLAUSE_VECTOR_LENGTH
:
14460 case OMP_CLAUSE_GANG
:
14461 case OMP_CLAUSE_WORKER
:
14462 case OMP_CLAUSE_VECTOR
:
14463 case OMP_CLAUSE_AUTO
:
14464 case OMP_CLAUSE_SEQ
:
14465 case OMP_CLAUSE_TILE
:
14466 case OMP_CLAUSE_IF_PRESENT
:
14467 case OMP_CLAUSE_FINALIZE
:
14468 case OMP_CLAUSE_INCLUSIVE
:
14469 case OMP_CLAUSE_EXCLUSIVE
:
14472 case OMP_CLAUSE_NOHOST
:
14474 gcc_unreachable ();
14478 *list_p
= OMP_CLAUSE_CHAIN (c
);
14479 else if (move_attach
)
14481 /* Remove attach node from here, separate out into its own list. */
14483 *list_p
= OMP_CLAUSE_CHAIN (c
);
14484 OMP_CLAUSE_CHAIN (c
) = NULL_TREE
;
14485 attach_tail
= &OMP_CLAUSE_CHAIN (c
);
14488 list_p
= &OMP_CLAUSE_CHAIN (c
);
14491 /* Splice attach nodes at the end of the list. */
14494 *list_p
= attach_list
;
14495 list_p
= attach_tail
;
14498 /* Add in any implicit data sharing. */
14499 struct gimplify_adjust_omp_clauses_data data
;
14500 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0)
14502 /* OpenMP. Implicit clauses are added at the start of the clause list,
14503 but after any non-map clauses. */
14504 tree
*implicit_add_list_p
= orig_list_p
;
14505 while (*implicit_add_list_p
14506 && OMP_CLAUSE_CODE (*implicit_add_list_p
) != OMP_CLAUSE_MAP
)
14507 implicit_add_list_p
= &OMP_CLAUSE_CHAIN (*implicit_add_list_p
);
14508 data
.list_p
= implicit_add_list_p
;
14512 data
.list_p
= list_p
;
14513 data
.pre_p
= pre_p
;
14514 splay_tree_foreach (ctx
->variables
, gimplify_adjust_omp_clauses_1
, &data
);
14516 if (has_inscan_reductions
)
14517 for (c
= *orig_list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
14518 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
14519 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
14521 error_at (OMP_CLAUSE_LOCATION (c
),
14522 "%<inscan%> %<reduction%> clause used together with "
14523 "%<linear%> clause for a variable other than loop "
14528 gimplify_omp_ctxp
= ctx
->outer_context
;
14529 delete_omp_context (ctx
);
14532 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
14533 -1 if unknown yet (simd is involved, won't be known until vectorization)
14534 and 1 if they do. If SCORES is non-NULL, it should point to an array
14535 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
14536 of the CONSTRUCTS (position -1 if it will never match) followed by
14537 number of constructs in the OpenMP context construct trait. If the
14538 score depends on whether it will be in a declare simd clone or not,
14539 the function returns 2 and there will be two sets of the scores, the first
14540 one for the case that it is not in a declare simd clone, the other
14541 that it is in a declare simd clone. */
14544 omp_construct_selector_matches (enum tree_code
*constructs
, int nconstructs
,
14547 int matched
= 0, cnt
= 0;
14548 bool simd_seen
= false;
14549 bool target_seen
= false;
14550 int declare_simd_cnt
= -1;
14551 auto_vec
<enum tree_code
, 16> codes
;
14552 for (struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
; ctx
;)
14554 if (((ctx
->region_type
& ORT_PARALLEL
) && ctx
->code
== OMP_PARALLEL
)
14555 || ((ctx
->region_type
& (ORT_TARGET
| ORT_IMPLICIT_TARGET
| ORT_ACC
))
14556 == ORT_TARGET
&& ctx
->code
== OMP_TARGET
)
14557 || ((ctx
->region_type
& ORT_TEAMS
) && ctx
->code
== OMP_TEAMS
)
14558 || (ctx
->region_type
== ORT_WORKSHARE
&& ctx
->code
== OMP_FOR
)
14559 || (ctx
->region_type
== ORT_SIMD
14560 && ctx
->code
== OMP_SIMD
14561 && !omp_find_clause (ctx
->clauses
, OMP_CLAUSE_BIND
)))
14565 codes
.safe_push (ctx
->code
);
14566 else if (matched
< nconstructs
&& ctx
->code
== constructs
[matched
])
14568 if (ctx
->code
== OMP_SIMD
)
14576 if (ctx
->code
== OMP_TARGET
)
14578 if (scores
== NULL
)
14579 return matched
< nconstructs
? 0 : simd_seen
? -1 : 1;
14580 target_seen
= true;
14584 else if (ctx
->region_type
== ORT_WORKSHARE
14585 && ctx
->code
== OMP_LOOP
14586 && ctx
->outer_context
14587 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
14588 && ctx
->outer_context
->outer_context
14589 && ctx
->outer_context
->outer_context
->code
== OMP_LOOP
14590 && ctx
->outer_context
->outer_context
->distribute
)
14591 ctx
= ctx
->outer_context
->outer_context
;
14592 ctx
= ctx
->outer_context
;
14595 && lookup_attribute ("omp declare simd",
14596 DECL_ATTRIBUTES (current_function_decl
)))
14598 /* Declare simd is a maybe case, it is supposed to be added only to the
14599 omp-simd-clone.cc added clones and not to the base function. */
14600 declare_simd_cnt
= cnt
++;
14602 codes
.safe_push (OMP_SIMD
);
14604 && constructs
[0] == OMP_SIMD
)
14606 gcc_assert (matched
== 0);
14608 if (++matched
== nconstructs
)
14612 if (tree attr
= lookup_attribute ("omp declare variant variant",
14613 DECL_ATTRIBUTES (current_function_decl
)))
14615 tree selectors
= TREE_VALUE (attr
);
14616 int variant_nconstructs
= list_length (selectors
);
14617 enum tree_code
*variant_constructs
= NULL
;
14618 if (!target_seen
&& variant_nconstructs
)
14621 = (enum tree_code
*) alloca (variant_nconstructs
14622 * sizeof (enum tree_code
));
14623 omp_construct_traits_to_codes (selectors
, variant_nconstructs
,
14624 variant_constructs
);
14626 for (int i
= 0; i
< variant_nconstructs
; i
++)
14630 codes
.safe_push (variant_constructs
[i
]);
14631 else if (matched
< nconstructs
14632 && variant_constructs
[i
] == constructs
[matched
])
14634 if (variant_constructs
[i
] == OMP_SIMD
)
14645 && lookup_attribute ("omp declare target block",
14646 DECL_ATTRIBUTES (current_function_decl
)))
14649 codes
.safe_push (OMP_TARGET
);
14650 else if (matched
< nconstructs
&& constructs
[matched
] == OMP_TARGET
)
14655 for (int pass
= 0; pass
< (declare_simd_cnt
== -1 ? 1 : 2); pass
++)
14657 int j
= codes
.length () - 1;
14658 for (int i
= nconstructs
- 1; i
>= 0; i
--)
14661 && (pass
!= 0 || declare_simd_cnt
!= j
)
14662 && constructs
[i
] != codes
[j
])
14664 if (pass
== 0 && declare_simd_cnt
!= -1 && j
> declare_simd_cnt
)
14669 *scores
++ = ((pass
== 0 && declare_simd_cnt
!= -1)
14670 ? codes
.length () - 1 : codes
.length ());
14672 return declare_simd_cnt
== -1 ? 1 : 2;
14674 if (matched
== nconstructs
)
14675 return simd_seen
? -1 : 1;
14679 /* Gimplify OACC_CACHE. */
14682 gimplify_oacc_cache (tree
*expr_p
, gimple_seq
*pre_p
)
14684 tree expr
= *expr_p
;
14686 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr
), pre_p
, ORT_ACC
,
14688 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OACC_CACHE_CLAUSES (expr
),
14691 /* TODO: Do something sensible with this information. */
14693 *expr_p
= NULL_TREE
;
14696 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
14697 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
14698 kind. The entry kind will replace the one in CLAUSE, while the exit
14699 kind will be used in a new omp_clause and returned to the caller. */
14702 gimplify_oacc_declare_1 (tree clause
)
14704 HOST_WIDE_INT kind
, new_op
;
14708 kind
= OMP_CLAUSE_MAP_KIND (clause
);
14712 case GOMP_MAP_ALLOC
:
14713 new_op
= GOMP_MAP_RELEASE
;
14717 case GOMP_MAP_FROM
:
14718 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
14719 new_op
= GOMP_MAP_FROM
;
14723 case GOMP_MAP_TOFROM
:
14724 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_TO
);
14725 new_op
= GOMP_MAP_FROM
;
14729 case GOMP_MAP_DEVICE_RESIDENT
:
14730 case GOMP_MAP_FORCE_DEVICEPTR
:
14731 case GOMP_MAP_FORCE_PRESENT
:
14732 case GOMP_MAP_LINK
:
14733 case GOMP_MAP_POINTER
:
14738 gcc_unreachable ();
14744 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
), OMP_CLAUSE_MAP
);
14745 OMP_CLAUSE_SET_MAP_KIND (c
, new_op
);
14746 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clause
);
14752 /* Gimplify OACC_DECLARE. */
14755 gimplify_oacc_declare (tree
*expr_p
, gimple_seq
*pre_p
)
14757 tree expr
= *expr_p
;
14759 tree clauses
, t
, decl
;
14761 clauses
= OACC_DECLARE_CLAUSES (expr
);
14763 gimplify_scan_omp_clauses (&clauses
, pre_p
, ORT_TARGET_DATA
, OACC_DECLARE
);
14764 gimplify_adjust_omp_clauses (pre_p
, NULL
, &clauses
, OACC_DECLARE
);
14766 for (t
= clauses
; t
; t
= OMP_CLAUSE_CHAIN (t
))
14768 decl
= OMP_CLAUSE_DECL (t
);
14770 if (TREE_CODE (decl
) == MEM_REF
)
14771 decl
= TREE_OPERAND (decl
, 0);
14773 if (VAR_P (decl
) && !is_oacc_declared (decl
))
14775 tree attr
= get_identifier ("oacc declare target");
14776 DECL_ATTRIBUTES (decl
) = tree_cons (attr
, NULL_TREE
,
14777 DECL_ATTRIBUTES (decl
));
14781 && !is_global_var (decl
)
14782 && DECL_CONTEXT (decl
) == current_function_decl
)
14784 tree c
= gimplify_oacc_declare_1 (t
);
14787 if (oacc_declare_returns
== NULL
)
14788 oacc_declare_returns
= new hash_map
<tree
, tree
>;
14790 oacc_declare_returns
->put (decl
, c
);
14794 if (gimplify_omp_ctxp
)
14795 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_SEEN
);
14798 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
14801 gimplify_seq_add_stmt (pre_p
, stmt
);
14803 *expr_p
= NULL_TREE
;
14806 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
14807 gimplification of the body, as well as scanning the body for used
14808 variables. We need to do this scan now, because variable-sized
14809 decls will be decomposed during gimplification. */
14812 gimplify_omp_parallel (tree
*expr_p
, gimple_seq
*pre_p
)
14814 tree expr
= *expr_p
;
14816 gimple_seq body
= NULL
;
14818 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr
), pre_p
,
14819 OMP_PARALLEL_COMBINED (expr
)
14820 ? ORT_COMBINED_PARALLEL
14821 : ORT_PARALLEL
, OMP_PARALLEL
);
14823 push_gimplify_context ();
14825 g
= gimplify_and_return_first (OMP_PARALLEL_BODY (expr
), &body
);
14826 if (gimple_code (g
) == GIMPLE_BIND
)
14827 pop_gimplify_context (g
);
14829 pop_gimplify_context (NULL
);
14831 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_PARALLEL_CLAUSES (expr
),
14834 g
= gimple_build_omp_parallel (body
,
14835 OMP_PARALLEL_CLAUSES (expr
),
14836 NULL_TREE
, NULL_TREE
);
14837 if (OMP_PARALLEL_COMBINED (expr
))
14838 gimple_omp_set_subcode (g
, GF_OMP_PARALLEL_COMBINED
);
14839 gimplify_seq_add_stmt (pre_p
, g
);
14840 *expr_p
= NULL_TREE
;
14843 /* Gimplify the contents of an OMP_TASK statement. This involves
14844 gimplification of the body, as well as scanning the body for used
14845 variables. We need to do this scan now, because variable-sized
14846 decls will be decomposed during gimplification. */
14849 gimplify_omp_task (tree
*expr_p
, gimple_seq
*pre_p
)
14851 tree expr
= *expr_p
;
14853 gimple_seq body
= NULL
;
14854 bool nowait
= false;
14855 bool has_depend
= false;
14857 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
14859 for (tree c
= OMP_TASK_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
14860 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
14863 if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET
)
14865 error_at (OMP_CLAUSE_LOCATION (c
),
14866 "%<mutexinoutset%> kind in %<depend%> clause on a "
14867 "%<taskwait%> construct");
14871 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NOWAIT
)
14873 if (nowait
&& !has_depend
)
14875 error_at (EXPR_LOCATION (expr
),
14876 "%<taskwait%> construct with %<nowait%> clause but no "
14877 "%<depend%> clauses");
14878 *expr_p
= NULL_TREE
;
14883 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr
), pre_p
,
14884 omp_find_clause (OMP_TASK_CLAUSES (expr
),
14886 ? ORT_UNTIED_TASK
: ORT_TASK
, OMP_TASK
);
14888 if (OMP_TASK_BODY (expr
))
14890 push_gimplify_context ();
14892 g
= gimplify_and_return_first (OMP_TASK_BODY (expr
), &body
);
14893 if (gimple_code (g
) == GIMPLE_BIND
)
14894 pop_gimplify_context (g
);
14896 pop_gimplify_context (NULL
);
14899 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_TASK_CLAUSES (expr
),
14902 g
= gimple_build_omp_task (body
,
14903 OMP_TASK_CLAUSES (expr
),
14904 NULL_TREE
, NULL_TREE
,
14905 NULL_TREE
, NULL_TREE
, NULL_TREE
);
14906 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
14907 gimple_omp_task_set_taskwait_p (g
, true);
14908 gimplify_seq_add_stmt (pre_p
, g
);
14909 *expr_p
= NULL_TREE
;
14912 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
14913 force it into a temporary initialized in PRE_P and add firstprivate clause
14914 to ORIG_FOR_STMT. */
14917 gimplify_omp_taskloop_expr (tree type
, tree
*tp
, gimple_seq
*pre_p
,
14918 tree orig_for_stmt
)
14920 if (*tp
== NULL
|| is_gimple_constant (*tp
))
14923 *tp
= get_initialized_tmp_var (*tp
, pre_p
, NULL
, false);
14924 /* Reference to pointer conversion is considered useless,
14925 but is significant for firstprivate clause. Force it
14928 && TREE_CODE (type
) == POINTER_TYPE
14929 && TREE_CODE (TREE_TYPE (*tp
)) == REFERENCE_TYPE
)
14931 tree v
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
14932 tree m
= build2 (INIT_EXPR
, TREE_TYPE (v
), v
, *tp
);
14933 gimplify_and_add (m
, pre_p
);
14937 tree c
= build_omp_clause (input_location
, OMP_CLAUSE_FIRSTPRIVATE
);
14938 OMP_CLAUSE_DECL (c
) = *tp
;
14939 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
14940 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
14943 /* Helper function of gimplify_omp_for, find OMP_ORDERED with
14944 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
14947 find_standalone_omp_ordered (tree
*tp
, int *walk_subtrees
, void *)
14949 switch (TREE_CODE (*tp
))
14952 if (OMP_ORDERED_BODY (*tp
) == NULL_TREE
)
14958 *walk_subtrees
= 0;
14966 /* Gimplify the gross structure of an OMP_FOR statement. */
14968 static enum gimplify_status
14969 gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
14971 tree for_stmt
, orig_for_stmt
, inner_for_stmt
= NULL_TREE
, decl
, var
, t
;
14972 enum gimplify_status ret
= GS_ALL_DONE
;
14973 enum gimplify_status tret
;
14975 gimple_seq for_body
, for_pre_body
;
14977 bitmap has_decl_expr
= NULL
;
14978 enum omp_region_type ort
= ORT_WORKSHARE
;
14979 bool openacc
= TREE_CODE (*expr_p
) == OACC_LOOP
;
14981 orig_for_stmt
= for_stmt
= *expr_p
;
14983 bool loop_p
= (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_BIND
)
14985 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
14987 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
14988 gcc_assert (TREE_CODE (for_stmt
) != OACC_LOOP
);
14989 inner_for_stmt
= walk_tree (&OMP_FOR_BODY (for_stmt
),
14990 find_combined_omp_for
, data
, NULL
);
14991 if (inner_for_stmt
== NULL_TREE
)
14993 gcc_assert (seen_error ());
14994 *expr_p
= NULL_TREE
;
14997 if (data
[2] && OMP_FOR_PRE_BODY (*data
[2]))
14999 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data
[2]),
15000 &OMP_FOR_PRE_BODY (for_stmt
));
15001 OMP_FOR_PRE_BODY (*data
[2]) = NULL_TREE
;
15003 if (OMP_FOR_PRE_BODY (inner_for_stmt
))
15005 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt
),
15006 &OMP_FOR_PRE_BODY (for_stmt
));
15007 OMP_FOR_PRE_BODY (inner_for_stmt
) = NULL_TREE
;
15012 /* We have some statements or variable declarations in between
15013 the composite construct directives. Move them around the
15016 for (i
= 0; i
< 3; i
++)
15020 if (i
< 2 && data
[i
+ 1] == &OMP_BODY (t
))
15021 data
[i
+ 1] = data
[i
];
15022 *data
[i
] = OMP_BODY (t
);
15023 tree body
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
15024 NULL_TREE
, make_node (BLOCK
));
15025 OMP_BODY (t
) = body
;
15026 append_to_statement_list_force (inner_for_stmt
,
15027 &BIND_EXPR_BODY (body
));
15029 data
[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body
)));
15030 gcc_assert (*data
[3] == inner_for_stmt
);
15035 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
15037 && OMP_FOR_ORIG_DECLS (inner_for_stmt
)
15038 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
15040 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
15043 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
15044 /* Class iterators aren't allowed on OMP_SIMD, so the only
15045 case we need to solve is distribute parallel for. They are
15046 allowed on the loop construct, but that is already handled
15047 in gimplify_omp_loop. */
15048 gcc_assert (TREE_CODE (inner_for_stmt
) == OMP_FOR
15049 && TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
15051 tree orig_decl
= TREE_PURPOSE (orig
);
15052 tree last
= TREE_VALUE (orig
);
15054 for (pc
= &OMP_FOR_CLAUSES (inner_for_stmt
);
15055 *pc
; pc
= &OMP_CLAUSE_CHAIN (*pc
))
15056 if ((OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
15057 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LASTPRIVATE
)
15058 && OMP_CLAUSE_DECL (*pc
) == orig_decl
)
15060 if (*pc
== NULL_TREE
)
15063 for (spc
= &OMP_PARALLEL_CLAUSES (*data
[1]);
15064 *spc
; spc
= &OMP_CLAUSE_CHAIN (*spc
))
15065 if (OMP_CLAUSE_CODE (*spc
) == OMP_CLAUSE_PRIVATE
15066 && OMP_CLAUSE_DECL (*spc
) == orig_decl
)
15071 *spc
= OMP_CLAUSE_CHAIN (c
);
15072 OMP_CLAUSE_CHAIN (c
) = NULL_TREE
;
15076 if (*pc
== NULL_TREE
)
15078 else if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
)
15080 /* private clause will appear only on inner_for_stmt.
15081 Change it into firstprivate, and add private clause
15083 tree c
= copy_node (*pc
);
15084 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
15085 OMP_FOR_CLAUSES (for_stmt
) = c
;
15086 OMP_CLAUSE_CODE (*pc
) = OMP_CLAUSE_FIRSTPRIVATE
;
15087 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
15091 /* lastprivate clause will appear on both inner_for_stmt
15092 and for_stmt. Add firstprivate clause to
15094 tree c
= build_omp_clause (OMP_CLAUSE_LOCATION (*pc
),
15095 OMP_CLAUSE_FIRSTPRIVATE
);
15096 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (*pc
);
15097 OMP_CLAUSE_CHAIN (c
) = *pc
;
15099 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
15101 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
15102 OMP_CLAUSE_FIRSTPRIVATE
);
15103 OMP_CLAUSE_DECL (c
) = last
;
15104 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
15105 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
15106 c
= build_omp_clause (UNKNOWN_LOCATION
,
15107 *pc
? OMP_CLAUSE_SHARED
15108 : OMP_CLAUSE_FIRSTPRIVATE
);
15109 OMP_CLAUSE_DECL (c
) = orig_decl
;
15110 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
15111 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
15113 /* Similarly, take care of C++ range for temporaries, those should
15114 be firstprivate on OMP_PARALLEL if any. */
15116 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
15117 if (OMP_FOR_ORIG_DECLS (inner_for_stmt
)
15118 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
15120 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
15124 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
15125 tree v
= TREE_CHAIN (orig
);
15126 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
15127 OMP_CLAUSE_FIRSTPRIVATE
);
15128 /* First add firstprivate clause for the __for_end artificial
15130 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 1);
15131 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
15133 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
15134 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
15135 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
15136 if (TREE_VEC_ELT (v
, 0))
15138 /* And now the same for __for_range artificial decl if it
15140 c
= build_omp_clause (UNKNOWN_LOCATION
,
15141 OMP_CLAUSE_FIRSTPRIVATE
);
15142 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 0);
15143 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
15145 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
15146 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
15147 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
15152 switch (TREE_CODE (for_stmt
))
15155 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
))
15157 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
15158 OMP_CLAUSE_SCHEDULE
))
15159 error_at (EXPR_LOCATION (for_stmt
),
15160 "%qs clause may not appear on non-rectangular %qs",
15161 "schedule", lang_GNU_Fortran () ? "do" : "for");
15162 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
))
15163 error_at (EXPR_LOCATION (for_stmt
),
15164 "%qs clause may not appear on non-rectangular %qs",
15165 "ordered", lang_GNU_Fortran () ? "do" : "for");
15168 case OMP_DISTRIBUTE
:
15169 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
)
15170 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
15171 OMP_CLAUSE_DIST_SCHEDULE
))
15172 error_at (EXPR_LOCATION (for_stmt
),
15173 "%qs clause may not appear on non-rectangular %qs",
15174 "dist_schedule", "distribute");
15180 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
))
15182 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
15183 OMP_CLAUSE_GRAINSIZE
))
15184 error_at (EXPR_LOCATION (for_stmt
),
15185 "%qs clause may not appear on non-rectangular %qs",
15186 "grainsize", "taskloop");
15187 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
15188 OMP_CLAUSE_NUM_TASKS
))
15189 error_at (EXPR_LOCATION (for_stmt
),
15190 "%qs clause may not appear on non-rectangular %qs",
15191 "num_tasks", "taskloop");
15193 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_UNTIED
))
15194 ort
= ORT_UNTIED_TASKLOOP
;
15196 ort
= ORT_TASKLOOP
;
15202 gcc_unreachable ();
15205 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
15206 clause for the IV. */
15207 if (ort
== ORT_SIMD
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
15209 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), 0);
15210 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
15211 decl
= TREE_OPERAND (t
, 0);
15212 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15213 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
15214 && OMP_CLAUSE_DECL (c
) == decl
)
15216 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
15221 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
)
15222 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt
), pre_p
, ort
,
15223 loop_p
&& TREE_CODE (for_stmt
) != OMP_SIMD
15224 ? OMP_LOOP
: TREE_CODE (for_stmt
));
15226 if (TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
)
15227 gimplify_omp_ctxp
->distribute
= true;
15229 /* Handle OMP_FOR_INIT. */
15230 for_pre_body
= NULL
;
15231 if ((ort
== ORT_SIMD
15232 || (inner_for_stmt
&& TREE_CODE (inner_for_stmt
) == OMP_SIMD
))
15233 && OMP_FOR_PRE_BODY (for_stmt
))
15235 has_decl_expr
= BITMAP_ALLOC (NULL
);
15236 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == DECL_EXPR
15237 && VAR_P (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt
))))
15239 t
= OMP_FOR_PRE_BODY (for_stmt
);
15240 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
15242 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == STATEMENT_LIST
)
15244 tree_stmt_iterator si
;
15245 for (si
= tsi_start (OMP_FOR_PRE_BODY (for_stmt
)); !tsi_end_p (si
);
15249 if (TREE_CODE (t
) == DECL_EXPR
15250 && VAR_P (DECL_EXPR_DECL (t
)))
15251 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
15255 if (OMP_FOR_PRE_BODY (for_stmt
))
15257 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
|| gimplify_omp_ctxp
)
15258 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
15261 struct gimplify_omp_ctx ctx
;
15262 memset (&ctx
, 0, sizeof (ctx
));
15263 ctx
.region_type
= ORT_NONE
;
15264 gimplify_omp_ctxp
= &ctx
;
15265 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
15266 gimplify_omp_ctxp
= NULL
;
15269 OMP_FOR_PRE_BODY (for_stmt
) = NULL_TREE
;
15271 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
15272 for_stmt
= inner_for_stmt
;
15274 /* For taskloop, need to gimplify the start, end and step before the
15275 taskloop, outside of the taskloop omp context. */
15276 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
15278 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
15280 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
15281 gimple_seq
*for_pre_p
= (gimple_seq_empty_p (for_pre_body
)
15282 ? pre_p
: &for_pre_body
);
15283 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
15284 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
15286 tree v
= TREE_OPERAND (t
, 1);
15287 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
15288 for_pre_p
, orig_for_stmt
);
15289 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
15290 for_pre_p
, orig_for_stmt
);
15293 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
15296 /* Handle OMP_FOR_COND. */
15297 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
15298 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
15300 tree v
= TREE_OPERAND (t
, 1);
15301 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
15302 for_pre_p
, orig_for_stmt
);
15303 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
15304 for_pre_p
, orig_for_stmt
);
15307 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
15310 /* Handle OMP_FOR_INCR. */
15311 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
15312 if (TREE_CODE (t
) == MODIFY_EXPR
)
15314 decl
= TREE_OPERAND (t
, 0);
15315 t
= TREE_OPERAND (t
, 1);
15316 tree
*tp
= &TREE_OPERAND (t
, 1);
15317 if (TREE_CODE (t
) == PLUS_EXPR
&& *tp
== decl
)
15318 tp
= &TREE_OPERAND (t
, 0);
15320 gimplify_omp_taskloop_expr (NULL_TREE
, tp
, for_pre_p
,
15325 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt
), pre_p
, ort
,
15329 if (orig_for_stmt
!= for_stmt
)
15330 gimplify_omp_ctxp
->combined_loop
= true;
15333 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
15334 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt
)));
15335 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
15336 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt
)));
15338 tree c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
);
15339 bool is_doacross
= false;
15340 if (c
&& walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt
),
15341 find_standalone_omp_ordered
, NULL
))
15343 OMP_CLAUSE_ORDERED_DOACROSS (c
) = 1;
15344 is_doacross
= true;
15345 int len
= TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
));
15346 gimplify_omp_ctxp
->loop_iter_var
.create (len
* 2);
15347 for (tree
*pc
= &OMP_FOR_CLAUSES (for_stmt
); *pc
; )
15348 if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LINEAR
)
15350 error_at (OMP_CLAUSE_LOCATION (*pc
),
15351 "%<linear%> clause may not be specified together "
15352 "with %<ordered%> clause if stand-alone %<ordered%> "
15353 "construct is nested in it");
15354 *pc
= OMP_CLAUSE_CHAIN (*pc
);
15357 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15359 int collapse
= 1, tile
= 0;
15360 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_COLLAPSE
);
15362 collapse
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c
));
15363 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_TILE
);
15365 tile
= list_length (OMP_CLAUSE_TILE_LIST (c
));
15366 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ALLOCATE
);
15367 hash_set
<tree
> *allocate_uids
= NULL
;
15370 allocate_uids
= new hash_set
<tree
>;
15371 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
15372 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
)
15373 allocate_uids
->add (OMP_CLAUSE_DECL (c
));
15375 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
15377 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
15378 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
15379 decl
= TREE_OPERAND (t
, 0);
15380 gcc_assert (DECL_P (decl
));
15381 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl
))
15382 || POINTER_TYPE_P (TREE_TYPE (decl
)));
15385 if (TREE_CODE (for_stmt
) == OMP_FOR
&& OMP_FOR_ORIG_DECLS (for_stmt
))
15387 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
15388 if (TREE_CODE (orig_decl
) == TREE_LIST
)
15390 orig_decl
= TREE_PURPOSE (orig_decl
);
15394 gimplify_omp_ctxp
->loop_iter_var
.quick_push (orig_decl
);
15397 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
15398 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
15401 if (for_stmt
== orig_for_stmt
)
15403 tree orig_decl
= decl
;
15404 if (OMP_FOR_ORIG_DECLS (for_stmt
))
15406 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
15407 if (TREE_CODE (orig_decl
) == TREE_LIST
)
15409 orig_decl
= TREE_PURPOSE (orig_decl
);
15414 if (is_global_var (orig_decl
) && DECL_THREAD_LOCAL_P (orig_decl
))
15415 error_at (EXPR_LOCATION (for_stmt
),
15416 "threadprivate iteration variable %qD", orig_decl
);
15419 /* Make sure the iteration variable is private. */
15420 tree c
= NULL_TREE
;
15421 tree c2
= NULL_TREE
;
15422 if (orig_for_stmt
!= for_stmt
)
15424 /* Preserve this information until we gimplify the inner simd. */
15426 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
15427 TREE_PRIVATE (t
) = 1;
15429 else if (ort
== ORT_SIMD
)
15431 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
15432 (splay_tree_key
) decl
);
15433 omp_is_private (gimplify_omp_ctxp
, decl
,
15434 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
15436 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
15438 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
15439 if (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
)
15440 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
15441 OMP_CLAUSE_LASTPRIVATE
);
15442 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
15443 OMP_CLAUSE_LASTPRIVATE
))
15444 if (OMP_CLAUSE_DECL (c3
) == decl
)
15446 warning_at (OMP_CLAUSE_LOCATION (c3
), OPT_Wopenmp
,
15447 "conditional %<lastprivate%> on loop "
15448 "iterator %qD ignored", decl
);
15449 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
15450 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
15453 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1 && !loop_p
)
15455 c
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
15456 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
15457 unsigned int flags
= GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
;
15459 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
15460 || TREE_PRIVATE (t
))
15462 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
15463 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
15465 struct gimplify_omp_ctx
*outer
15466 = gimplify_omp_ctxp
->outer_context
;
15467 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
15469 if (outer
->region_type
== ORT_WORKSHARE
15470 && outer
->combined_loop
)
15472 n
= splay_tree_lookup (outer
->variables
,
15473 (splay_tree_key
)decl
);
15474 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
15476 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
15477 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
15481 struct gimplify_omp_ctx
*octx
= outer
->outer_context
;
15483 && octx
->region_type
== ORT_COMBINED_PARALLEL
15484 && octx
->outer_context
15485 && (octx
->outer_context
->region_type
15487 && octx
->outer_context
->combined_loop
)
15489 octx
= octx
->outer_context
;
15490 n
= splay_tree_lookup (octx
->variables
,
15491 (splay_tree_key
)decl
);
15492 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
15494 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
15495 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
15502 OMP_CLAUSE_DECL (c
) = decl
;
15503 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
15504 OMP_FOR_CLAUSES (for_stmt
) = c
;
15505 omp_add_variable (gimplify_omp_ctxp
, decl
, flags
);
15506 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
15507 omp_lastprivate_for_combined_outer_constructs (outer
, decl
,
15514 || !bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)));
15515 if (TREE_PRIVATE (t
))
15516 lastprivate
= false;
15517 if (loop_p
&& OMP_FOR_ORIG_DECLS (for_stmt
))
15519 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
15520 if (TREE_CODE (elt
) == TREE_LIST
&& TREE_PURPOSE (elt
))
15521 lastprivate
= false;
15524 struct gimplify_omp_ctx
*outer
15525 = gimplify_omp_ctxp
->outer_context
;
15526 if (outer
&& lastprivate
)
15527 omp_lastprivate_for_combined_outer_constructs (outer
, decl
,
15530 c
= build_omp_clause (input_location
,
15531 lastprivate
? OMP_CLAUSE_LASTPRIVATE
15532 : OMP_CLAUSE_PRIVATE
);
15533 OMP_CLAUSE_DECL (c
) = decl
;
15534 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
15535 OMP_FOR_CLAUSES (for_stmt
) = c
;
15536 omp_add_variable (gimplify_omp_ctxp
, decl
,
15537 (lastprivate
? GOVD_LASTPRIVATE
: GOVD_PRIVATE
)
15538 | GOVD_EXPLICIT
| GOVD_SEEN
);
15542 else if (omp_is_private (gimplify_omp_ctxp
, decl
, 0))
15544 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
15545 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
15546 (splay_tree_key
) decl
);
15547 if (n
&& (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
))
15548 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
15549 OMP_CLAUSE_LASTPRIVATE
);
15550 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
15551 OMP_CLAUSE_LASTPRIVATE
))
15552 if (OMP_CLAUSE_DECL (c3
) == decl
)
15554 warning_at (OMP_CLAUSE_LOCATION (c3
), OPT_Wopenmp
,
15555 "conditional %<lastprivate%> on loop "
15556 "iterator %qD ignored", decl
);
15557 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
15558 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
15562 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_PRIVATE
| GOVD_SEEN
);
15564 /* If DECL is not a gimple register, create a temporary variable to act
15565 as an iteration counter. This is valid, since DECL cannot be
15566 modified in the body of the loop. Similarly for any iteration vars
15567 in simd with collapse > 1 where the iterator vars must be
15568 lastprivate. And similarly for vars mentioned in allocate clauses. */
15569 if (orig_for_stmt
!= for_stmt
)
15571 else if (!is_gimple_reg (decl
)
15572 || (ort
== ORT_SIMD
15573 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) > 1)
15574 || (allocate_uids
&& allocate_uids
->contains (decl
)))
15576 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
15577 /* Make sure omp_add_variable is not called on it prematurely.
15578 We call it ourselves a few lines later. */
15579 gimplify_omp_ctxp
= NULL
;
15580 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
15581 gimplify_omp_ctxp
= ctx
;
15582 TREE_OPERAND (t
, 0) = var
;
15584 gimplify_seq_add_stmt (&for_body
, gimple_build_assign (decl
, var
));
15586 if (ort
== ORT_SIMD
15587 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
15589 c2
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
15590 OMP_CLAUSE_LINEAR_NO_COPYIN (c2
) = 1;
15591 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2
) = 1;
15592 OMP_CLAUSE_DECL (c2
) = var
;
15593 OMP_CLAUSE_CHAIN (c2
) = OMP_FOR_CLAUSES (for_stmt
);
15594 OMP_FOR_CLAUSES (for_stmt
) = c2
;
15595 omp_add_variable (gimplify_omp_ctxp
, var
,
15596 GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
);
15597 if (c
== NULL_TREE
)
15604 omp_add_variable (gimplify_omp_ctxp
, var
,
15605 GOVD_PRIVATE
| GOVD_SEEN
);
15610 gimplify_omp_ctxp
->in_for_exprs
= true;
15611 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
15613 tree lb
= TREE_OPERAND (t
, 1);
15614 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 1), &for_pre_body
, NULL
,
15615 is_gimple_val
, fb_rvalue
, false);
15616 ret
= MIN (ret
, tret
);
15617 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 2), &for_pre_body
, NULL
,
15618 is_gimple_val
, fb_rvalue
, false);
15621 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
15622 is_gimple_val
, fb_rvalue
, false);
15623 gimplify_omp_ctxp
->in_for_exprs
= false;
15624 ret
= MIN (ret
, tret
);
15625 if (ret
== GS_ERROR
)
15628 /* Handle OMP_FOR_COND. */
15629 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
15630 gcc_assert (COMPARISON_CLASS_P (t
));
15631 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
15633 gimplify_omp_ctxp
->in_for_exprs
= true;
15634 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
15636 tree ub
= TREE_OPERAND (t
, 1);
15637 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 1), &for_pre_body
, NULL
,
15638 is_gimple_val
, fb_rvalue
, false);
15639 ret
= MIN (ret
, tret
);
15640 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 2), &for_pre_body
, NULL
,
15641 is_gimple_val
, fb_rvalue
, false);
15644 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
15645 is_gimple_val
, fb_rvalue
, false);
15646 gimplify_omp_ctxp
->in_for_exprs
= false;
15647 ret
= MIN (ret
, tret
);
15649 /* Handle OMP_FOR_INCR. */
15650 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
15651 switch (TREE_CODE (t
))
15653 case PREINCREMENT_EXPR
:
15654 case POSTINCREMENT_EXPR
:
15656 tree decl
= TREE_OPERAND (t
, 0);
15657 /* c_omp_for_incr_canonicalize_ptr() should have been
15658 called to massage things appropriately. */
15659 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
15661 if (orig_for_stmt
!= for_stmt
)
15663 t
= build_int_cst (TREE_TYPE (decl
), 1);
15665 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
15666 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
15667 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
15668 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
15672 case PREDECREMENT_EXPR
:
15673 case POSTDECREMENT_EXPR
:
15674 /* c_omp_for_incr_canonicalize_ptr() should have been
15675 called to massage things appropriately. */
15676 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
15677 if (orig_for_stmt
!= for_stmt
)
15679 t
= build_int_cst (TREE_TYPE (decl
), -1);
15681 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
15682 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
15683 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
15684 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
15688 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
15689 TREE_OPERAND (t
, 0) = var
;
15691 t
= TREE_OPERAND (t
, 1);
15692 switch (TREE_CODE (t
))
15695 if (TREE_OPERAND (t
, 1) == decl
)
15697 TREE_OPERAND (t
, 1) = TREE_OPERAND (t
, 0);
15698 TREE_OPERAND (t
, 0) = var
;
15704 case POINTER_PLUS_EXPR
:
15705 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
15706 TREE_OPERAND (t
, 0) = var
;
15709 gcc_unreachable ();
15712 gimplify_omp_ctxp
->in_for_exprs
= true;
15713 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
15714 is_gimple_val
, fb_rvalue
, false);
15715 ret
= MIN (ret
, tret
);
15718 tree step
= TREE_OPERAND (t
, 1);
15719 tree stept
= TREE_TYPE (decl
);
15720 if (POINTER_TYPE_P (stept
))
15722 step
= fold_convert (stept
, step
);
15723 if (TREE_CODE (t
) == MINUS_EXPR
)
15724 step
= fold_build1 (NEGATE_EXPR
, stept
, step
);
15725 OMP_CLAUSE_LINEAR_STEP (c
) = step
;
15726 if (step
!= TREE_OPERAND (t
, 1))
15728 tret
= gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
),
15729 &for_pre_body
, NULL
,
15730 is_gimple_val
, fb_rvalue
, false);
15731 ret
= MIN (ret
, tret
);
15734 gimplify_omp_ctxp
->in_for_exprs
= false;
15738 gcc_unreachable ();
15744 OMP_CLAUSE_LINEAR_STEP (c2
) = OMP_CLAUSE_LINEAR_STEP (c
);
15747 if ((var
!= decl
|| collapse
> 1 || tile
) && orig_for_stmt
== for_stmt
)
15749 for (c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15750 if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
15751 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) == NULL
)
15752 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
15753 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)
15754 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) == NULL
))
15755 && OMP_CLAUSE_DECL (c
) == decl
)
15757 if (is_doacross
&& (collapse
== 1 || i
>= collapse
))
15761 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
15762 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
15763 gcc_assert (TREE_OPERAND (t
, 0) == var
);
15764 t
= TREE_OPERAND (t
, 1);
15765 gcc_assert (TREE_CODE (t
) == PLUS_EXPR
15766 || TREE_CODE (t
) == MINUS_EXPR
15767 || TREE_CODE (t
) == POINTER_PLUS_EXPR
);
15768 gcc_assert (TREE_OPERAND (t
, 0) == var
);
15769 t
= build2 (TREE_CODE (t
), TREE_TYPE (decl
),
15770 is_doacross
? var
: decl
,
15771 TREE_OPERAND (t
, 1));
15774 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
15775 seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
);
15777 seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
);
15778 push_gimplify_context ();
15779 gimplify_assign (decl
, t
, seq
);
15780 gimple
*bind
= NULL
;
15781 if (gimplify_ctxp
->temps
)
15783 bind
= gimple_build_bind (NULL_TREE
, *seq
, NULL_TREE
);
15785 gimplify_seq_add_stmt (seq
, bind
);
15787 pop_gimplify_context (bind
);
15790 if (OMP_FOR_NON_RECTANGULAR (for_stmt
) && var
!= decl
)
15791 for (int j
= i
+ 1; j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
15793 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
15794 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
15795 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
15796 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
15797 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
15798 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
15799 gcc_assert (COMPARISON_CLASS_P (t
));
15800 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
15801 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
15802 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
15806 BITMAP_FREE (has_decl_expr
);
15807 delete allocate_uids
;
15809 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
15810 || (loop_p
&& orig_for_stmt
== for_stmt
))
15812 push_gimplify_context ();
15813 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt
)) != BIND_EXPR
)
15815 OMP_FOR_BODY (orig_for_stmt
)
15816 = build3 (BIND_EXPR
, void_type_node
, NULL
,
15817 OMP_FOR_BODY (orig_for_stmt
), NULL
);
15818 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt
)) = 1;
15822 gimple
*g
= gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt
),
15825 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
15826 || (loop_p
&& orig_for_stmt
== for_stmt
))
15828 if (gimple_code (g
) == GIMPLE_BIND
)
15829 pop_gimplify_context (g
);
15831 pop_gimplify_context (NULL
);
15834 if (orig_for_stmt
!= for_stmt
)
15835 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
15837 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
15838 decl
= TREE_OPERAND (t
, 0);
15839 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
15840 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
15841 gimplify_omp_ctxp
= ctx
->outer_context
;
15842 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
15843 gimplify_omp_ctxp
= ctx
;
15844 omp_add_variable (gimplify_omp_ctxp
, var
, GOVD_PRIVATE
| GOVD_SEEN
);
15845 TREE_OPERAND (t
, 0) = var
;
15846 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
15847 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
15848 TREE_OPERAND (TREE_OPERAND (t
, 1), 0) = var
;
15849 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
15850 for (int j
= i
+ 1;
15851 j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
15853 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
15854 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
15855 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
15856 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
15858 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
15859 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
15861 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
15862 gcc_assert (COMPARISON_CLASS_P (t
));
15863 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
15864 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
15866 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
15867 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
15872 gimplify_adjust_omp_clauses (pre_p
, for_body
,
15873 &OMP_FOR_CLAUSES (orig_for_stmt
),
15874 TREE_CODE (orig_for_stmt
));
15877 switch (TREE_CODE (orig_for_stmt
))
15879 case OMP_FOR
: kind
= GF_OMP_FOR_KIND_FOR
; break;
15880 case OMP_SIMD
: kind
= GF_OMP_FOR_KIND_SIMD
; break;
15881 case OMP_DISTRIBUTE
: kind
= GF_OMP_FOR_KIND_DISTRIBUTE
; break;
15882 case OMP_TASKLOOP
: kind
= GF_OMP_FOR_KIND_TASKLOOP
; break;
15883 case OACC_LOOP
: kind
= GF_OMP_FOR_KIND_OACC_LOOP
; break;
15885 gcc_unreachable ();
15887 if (loop_p
&& kind
== GF_OMP_FOR_KIND_SIMD
)
15889 gimplify_seq_add_seq (pre_p
, for_pre_body
);
15890 for_pre_body
= NULL
;
15892 gfor
= gimple_build_omp_for (for_body
, kind
, OMP_FOR_CLAUSES (orig_for_stmt
),
15893 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)),
15895 if (orig_for_stmt
!= for_stmt
)
15896 gimple_omp_for_set_combined_p (gfor
, true);
15897 if (gimplify_omp_ctxp
15898 && (gimplify_omp_ctxp
->combined_loop
15899 || (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
15900 && gimplify_omp_ctxp
->outer_context
15901 && gimplify_omp_ctxp
->outer_context
->combined_loop
)))
15903 gimple_omp_for_set_combined_into_p (gfor
, true);
15904 if (gimplify_omp_ctxp
->combined_loop
)
15905 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_SIMD
);
15907 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_FOR
);
15910 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
15912 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
15913 gimple_omp_for_set_index (gfor
, i
, TREE_OPERAND (t
, 0));
15914 gimple_omp_for_set_initial (gfor
, i
, TREE_OPERAND (t
, 1));
15915 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
15916 gimple_omp_for_set_cond (gfor
, i
, TREE_CODE (t
));
15917 gimple_omp_for_set_final (gfor
, i
, TREE_OPERAND (t
, 1));
15918 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
15919 gimple_omp_for_set_incr (gfor
, i
, TREE_OPERAND (t
, 1));
15922 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
15923 constructs with GIMPLE_OMP_TASK sandwiched in between them.
15924 The outer taskloop stands for computing the number of iterations,
15925 counts for collapsed loops and holding taskloop specific clauses.
15926 The task construct stands for the effect of data sharing on the
15927 explicit task it creates and the inner taskloop stands for expansion
15928 of the static loop inside of the explicit task construct. */
15929 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
15931 tree
*gfor_clauses_ptr
= gimple_omp_for_clauses_ptr (gfor
);
15932 tree task_clauses
= NULL_TREE
;
15933 tree c
= *gfor_clauses_ptr
;
15934 tree
*gtask_clauses_ptr
= &task_clauses
;
15935 tree outer_for_clauses
= NULL_TREE
;
15936 tree
*gforo_clauses_ptr
= &outer_for_clauses
;
15937 bitmap lastprivate_uids
= NULL
;
15938 if (omp_find_clause (c
, OMP_CLAUSE_ALLOCATE
))
15940 c
= omp_find_clause (c
, OMP_CLAUSE_LASTPRIVATE
);
15943 lastprivate_uids
= BITMAP_ALLOC (NULL
);
15944 for (; c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
15945 OMP_CLAUSE_LASTPRIVATE
))
15946 bitmap_set_bit (lastprivate_uids
,
15947 DECL_UID (OMP_CLAUSE_DECL (c
)));
15949 c
= *gfor_clauses_ptr
;
15951 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
15952 switch (OMP_CLAUSE_CODE (c
))
15954 /* These clauses are allowed on task, move them there. */
15955 case OMP_CLAUSE_SHARED
:
15956 case OMP_CLAUSE_FIRSTPRIVATE
:
15957 case OMP_CLAUSE_DEFAULT
:
15958 case OMP_CLAUSE_IF
:
15959 case OMP_CLAUSE_UNTIED
:
15960 case OMP_CLAUSE_FINAL
:
15961 case OMP_CLAUSE_MERGEABLE
:
15962 case OMP_CLAUSE_PRIORITY
:
15963 case OMP_CLAUSE_REDUCTION
:
15964 case OMP_CLAUSE_IN_REDUCTION
:
15965 *gtask_clauses_ptr
= c
;
15966 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
15968 case OMP_CLAUSE_PRIVATE
:
15969 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c
))
15971 /* We want private on outer for and firstprivate
15974 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
15975 OMP_CLAUSE_FIRSTPRIVATE
);
15976 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
15977 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
15979 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
15980 *gforo_clauses_ptr
= c
;
15981 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
15985 *gtask_clauses_ptr
= c
;
15986 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
15989 /* These clauses go into outer taskloop clauses. */
15990 case OMP_CLAUSE_GRAINSIZE
:
15991 case OMP_CLAUSE_NUM_TASKS
:
15992 case OMP_CLAUSE_NOGROUP
:
15993 *gforo_clauses_ptr
= c
;
15994 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
15996 /* Collapse clause we duplicate on both taskloops. */
15997 case OMP_CLAUSE_COLLAPSE
:
15998 *gfor_clauses_ptr
= c
;
15999 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
16000 *gforo_clauses_ptr
= copy_node (c
);
16001 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
16003 /* For lastprivate, keep the clause on inner taskloop, and add
16004 a shared clause on task. If the same decl is also firstprivate,
16005 add also firstprivate clause on the inner taskloop. */
16006 case OMP_CLAUSE_LASTPRIVATE
:
16007 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
16009 /* For taskloop C++ lastprivate IVs, we want:
16010 1) private on outer taskloop
16011 2) firstprivate and shared on task
16012 3) lastprivate on inner taskloop */
16014 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
16015 OMP_CLAUSE_FIRSTPRIVATE
);
16016 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
16017 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
16019 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
16020 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
) = 1;
16021 *gforo_clauses_ptr
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
16022 OMP_CLAUSE_PRIVATE
);
16023 OMP_CLAUSE_DECL (*gforo_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
16024 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr
) = 1;
16025 TREE_TYPE (*gforo_clauses_ptr
) = TREE_TYPE (c
);
16026 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
16028 *gfor_clauses_ptr
= c
;
16029 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
16031 = build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_SHARED
);
16032 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
16033 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
16034 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr
) = 1;
16036 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
16038 /* Allocate clause we duplicate on task and inner taskloop
16039 if the decl is lastprivate, otherwise just put on task. */
16040 case OMP_CLAUSE_ALLOCATE
:
16041 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
16042 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
16044 /* Additionally, put firstprivate clause on task
16045 for the allocator if it is not constant. */
16047 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
16048 OMP_CLAUSE_FIRSTPRIVATE
);
16049 OMP_CLAUSE_DECL (*gtask_clauses_ptr
)
16050 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
16051 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
16053 if (lastprivate_uids
16054 && bitmap_bit_p (lastprivate_uids
,
16055 DECL_UID (OMP_CLAUSE_DECL (c
))))
16057 *gfor_clauses_ptr
= c
;
16058 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
16059 *gtask_clauses_ptr
= copy_node (c
);
16060 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
16064 *gtask_clauses_ptr
= c
;
16065 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
16069 gcc_unreachable ();
16071 *gfor_clauses_ptr
= NULL_TREE
;
16072 *gtask_clauses_ptr
= NULL_TREE
;
16073 *gforo_clauses_ptr
= NULL_TREE
;
16074 BITMAP_FREE (lastprivate_uids
);
16075 gimple_set_location (gfor
, input_location
);
16076 g
= gimple_build_bind (NULL_TREE
, gfor
, NULL_TREE
);
16077 g
= gimple_build_omp_task (g
, task_clauses
, NULL_TREE
, NULL_TREE
,
16078 NULL_TREE
, NULL_TREE
, NULL_TREE
);
16079 gimple_set_location (g
, input_location
);
16080 gimple_omp_task_set_taskloop_p (g
, true);
16081 g
= gimple_build_bind (NULL_TREE
, g
, NULL_TREE
);
16083 = gimple_build_omp_for (g
, GF_OMP_FOR_KIND_TASKLOOP
, outer_for_clauses
,
16084 gimple_omp_for_collapse (gfor
),
16085 gimple_omp_for_pre_body (gfor
));
16086 gimple_omp_for_set_pre_body (gfor
, NULL
);
16087 gimple_omp_for_set_combined_p (gforo
, true);
16088 gimple_omp_for_set_combined_into_p (gfor
, true);
16089 for (i
= 0; i
< (int) gimple_omp_for_collapse (gfor
); i
++)
16091 tree type
= TREE_TYPE (gimple_omp_for_index (gfor
, i
));
16092 tree v
= create_tmp_var (type
);
16093 gimple_omp_for_set_index (gforo
, i
, v
);
16094 t
= unshare_expr (gimple_omp_for_initial (gfor
, i
));
16095 gimple_omp_for_set_initial (gforo
, i
, t
);
16096 gimple_omp_for_set_cond (gforo
, i
,
16097 gimple_omp_for_cond (gfor
, i
));
16098 t
= unshare_expr (gimple_omp_for_final (gfor
, i
));
16099 gimple_omp_for_set_final (gforo
, i
, t
);
16100 t
= unshare_expr (gimple_omp_for_incr (gfor
, i
));
16101 gcc_assert (TREE_OPERAND (t
, 0) == gimple_omp_for_index (gfor
, i
));
16102 TREE_OPERAND (t
, 0) = v
;
16103 gimple_omp_for_set_incr (gforo
, i
, t
);
16104 t
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
16105 OMP_CLAUSE_DECL (t
) = v
;
16106 OMP_CLAUSE_CHAIN (t
) = gimple_omp_for_clauses (gforo
);
16107 gimple_omp_for_set_clauses (gforo
, t
);
16108 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
16110 tree
*p1
= NULL
, *p2
= NULL
;
16111 t
= gimple_omp_for_initial (gforo
, i
);
16112 if (TREE_CODE (t
) == TREE_VEC
)
16113 p1
= &TREE_VEC_ELT (t
, 0);
16114 t
= gimple_omp_for_final (gforo
, i
);
16115 if (TREE_CODE (t
) == TREE_VEC
)
16118 p2
= &TREE_VEC_ELT (t
, 0);
16120 p1
= &TREE_VEC_ELT (t
, 0);
16125 for (j
= 0; j
< i
; j
++)
16126 if (*p1
== gimple_omp_for_index (gfor
, j
))
16128 *p1
= gimple_omp_for_index (gforo
, j
);
16133 gcc_assert (j
< i
);
16137 gimplify_seq_add_stmt (pre_p
, gforo
);
16140 gimplify_seq_add_stmt (pre_p
, gfor
);
16142 if (TREE_CODE (orig_for_stmt
) == OMP_FOR
)
16144 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
16145 unsigned lastprivate_conditional
= 0;
16147 && (ctx
->region_type
== ORT_TARGET_DATA
16148 || ctx
->region_type
== ORT_TASKGROUP
))
16149 ctx
= ctx
->outer_context
;
16150 if (ctx
&& (ctx
->region_type
& ORT_PARALLEL
) != 0)
16151 for (tree c
= gimple_omp_for_clauses (gfor
);
16152 c
; c
= OMP_CLAUSE_CHAIN (c
))
16153 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
16154 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
16155 ++lastprivate_conditional
;
16156 if (lastprivate_conditional
)
16158 struct omp_for_data fd
;
16159 omp_extract_for_data (gfor
, &fd
, NULL
);
16160 tree type
= build_array_type_nelts (unsigned_type_for (fd
.iter_type
),
16161 lastprivate_conditional
);
16162 tree var
= create_tmp_var_raw (type
);
16163 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
16164 OMP_CLAUSE_DECL (c
) = var
;
16165 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
16166 gimple_omp_for_set_clauses (gfor
, c
);
16167 omp_add_variable (ctx
, var
, GOVD_CONDTEMP
| GOVD_SEEN
);
16170 else if (TREE_CODE (orig_for_stmt
) == OMP_SIMD
)
16172 unsigned lastprivate_conditional
= 0;
16173 for (tree c
= gimple_omp_for_clauses (gfor
); c
; c
= OMP_CLAUSE_CHAIN (c
))
16174 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
16175 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
16176 ++lastprivate_conditional
;
16177 if (lastprivate_conditional
)
16179 struct omp_for_data fd
;
16180 omp_extract_for_data (gfor
, &fd
, NULL
);
16181 tree type
= unsigned_type_for (fd
.iter_type
);
16182 while (lastprivate_conditional
--)
16184 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
16185 OMP_CLAUSE__CONDTEMP_
);
16186 OMP_CLAUSE_DECL (c
) = create_tmp_var (type
);
16187 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
16188 gimple_omp_for_set_clauses (gfor
, c
);
16193 if (ret
!= GS_ALL_DONE
)
16195 *expr_p
= NULL_TREE
;
16196 return GS_ALL_DONE
;
16199 /* Helper for gimplify_omp_loop, called through walk_tree. */
16202 note_no_context_vars (tree
*tp
, int *, void *data
)
16205 && DECL_CONTEXT (*tp
) == NULL_TREE
16206 && !is_global_var (*tp
))
16208 vec
<tree
> *d
= (vec
<tree
> *) data
;
16209 d
->safe_push (*tp
);
16210 DECL_CONTEXT (*tp
) = current_function_decl
;
16215 /* Gimplify the gross structure of an OMP_LOOP statement. */
16217 static enum gimplify_status
16218 gimplify_omp_loop (tree
*expr_p
, gimple_seq
*pre_p
)
16220 tree for_stmt
= *expr_p
;
16221 tree clauses
= OMP_FOR_CLAUSES (for_stmt
);
16222 struct gimplify_omp_ctx
*octx
= gimplify_omp_ctxp
;
16223 enum omp_clause_bind_kind kind
= OMP_CLAUSE_BIND_THREAD
;
16226 /* If order is not present, the behavior is as if order(concurrent)
16228 tree order
= omp_find_clause (clauses
, OMP_CLAUSE_ORDER
);
16229 if (order
== NULL_TREE
)
16231 order
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_ORDER
);
16232 OMP_CLAUSE_CHAIN (order
) = clauses
;
16233 OMP_FOR_CLAUSES (for_stmt
) = clauses
= order
;
16236 tree bind
= omp_find_clause (clauses
, OMP_CLAUSE_BIND
);
16237 if (bind
== NULL_TREE
)
16239 if (!flag_openmp
) /* flag_openmp_simd */
16241 else if (octx
&& (octx
->region_type
& ORT_TEAMS
) != 0)
16242 kind
= OMP_CLAUSE_BIND_TEAMS
;
16243 else if (octx
&& (octx
->region_type
& ORT_PARALLEL
) != 0)
16244 kind
= OMP_CLAUSE_BIND_PARALLEL
;
16247 for (; octx
; octx
= octx
->outer_context
)
16249 if ((octx
->region_type
& ORT_ACC
) != 0
16250 || octx
->region_type
== ORT_NONE
16251 || octx
->region_type
== ORT_IMPLICIT_TARGET
)
16255 if (octx
== NULL
&& !in_omp_construct
)
16256 error_at (EXPR_LOCATION (for_stmt
),
16257 "%<bind%> clause not specified on a %<loop%> "
16258 "construct not nested inside another OpenMP construct");
16260 bind
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_BIND
);
16261 OMP_CLAUSE_CHAIN (bind
) = clauses
;
16262 OMP_CLAUSE_BIND_KIND (bind
) = kind
;
16263 OMP_FOR_CLAUSES (for_stmt
) = bind
;
16266 switch (OMP_CLAUSE_BIND_KIND (bind
))
16268 case OMP_CLAUSE_BIND_THREAD
:
16270 case OMP_CLAUSE_BIND_PARALLEL
:
16271 if (!flag_openmp
) /* flag_openmp_simd */
16273 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
16276 for (; octx
; octx
= octx
->outer_context
)
16277 if (octx
->region_type
== ORT_SIMD
16278 && omp_find_clause (octx
->clauses
, OMP_CLAUSE_BIND
) == NULL_TREE
)
16280 error_at (EXPR_LOCATION (for_stmt
),
16281 "%<bind(parallel)%> on a %<loop%> construct nested "
16282 "inside %<simd%> construct");
16283 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
16286 kind
= OMP_CLAUSE_BIND_PARALLEL
;
16288 case OMP_CLAUSE_BIND_TEAMS
:
16289 if (!flag_openmp
) /* flag_openmp_simd */
16291 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
16295 && octx
->region_type
!= ORT_IMPLICIT_TARGET
16296 && octx
->region_type
!= ORT_NONE
16297 && (octx
->region_type
& ORT_TEAMS
) == 0)
16298 || in_omp_construct
)
16300 error_at (EXPR_LOCATION (for_stmt
),
16301 "%<bind(teams)%> on a %<loop%> region not strictly "
16302 "nested inside of a %<teams%> region");
16303 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
16306 kind
= OMP_CLAUSE_BIND_TEAMS
;
16309 gcc_unreachable ();
16312 for (tree
*pc
= &OMP_FOR_CLAUSES (for_stmt
); *pc
; )
16313 switch (OMP_CLAUSE_CODE (*pc
))
16315 case OMP_CLAUSE_REDUCTION
:
16316 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc
))
16318 error_at (OMP_CLAUSE_LOCATION (*pc
),
16319 "%<inscan%> %<reduction%> clause on "
16320 "%qs construct", "loop");
16321 OMP_CLAUSE_REDUCTION_INSCAN (*pc
) = 0;
16323 if (OMP_CLAUSE_REDUCTION_TASK (*pc
))
16325 error_at (OMP_CLAUSE_LOCATION (*pc
),
16326 "invalid %<task%> reduction modifier on construct "
16327 "other than %<parallel%>, %qs or %<sections%>",
16328 lang_GNU_Fortran () ? "do" : "for");
16329 OMP_CLAUSE_REDUCTION_TASK (*pc
) = 0;
16331 pc
= &OMP_CLAUSE_CHAIN (*pc
);
16333 case OMP_CLAUSE_LASTPRIVATE
:
16334 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
16336 tree t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
16337 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
16338 if (OMP_CLAUSE_DECL (*pc
) == TREE_OPERAND (t
, 0))
16340 if (OMP_FOR_ORIG_DECLS (for_stmt
)
16341 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
16343 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
16346 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
16347 if (OMP_CLAUSE_DECL (*pc
) == TREE_PURPOSE (orig
))
16351 if (i
== TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)))
16353 error_at (OMP_CLAUSE_LOCATION (*pc
),
16354 "%<lastprivate%> clause on a %<loop%> construct refers "
16355 "to a variable %qD which is not the loop iterator",
16356 OMP_CLAUSE_DECL (*pc
));
16357 *pc
= OMP_CLAUSE_CHAIN (*pc
);
16360 pc
= &OMP_CLAUSE_CHAIN (*pc
);
16363 pc
= &OMP_CLAUSE_CHAIN (*pc
);
16367 TREE_SET_CODE (for_stmt
, OMP_SIMD
);
16372 case OMP_CLAUSE_BIND_THREAD
: last
= 0; break;
16373 case OMP_CLAUSE_BIND_PARALLEL
: last
= 1; break;
16374 case OMP_CLAUSE_BIND_TEAMS
: last
= 2; break;
16376 for (int pass
= 1; pass
<= last
; pass
++)
16380 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
,
16381 make_node (BLOCK
));
16382 append_to_statement_list (*expr_p
, &BIND_EXPR_BODY (bind
));
16383 *expr_p
= make_node (OMP_PARALLEL
);
16384 TREE_TYPE (*expr_p
) = void_type_node
;
16385 OMP_PARALLEL_BODY (*expr_p
) = bind
;
16386 OMP_PARALLEL_COMBINED (*expr_p
) = 1;
16387 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (for_stmt
));
16388 tree
*pc
= &OMP_PARALLEL_CLAUSES (*expr_p
);
16389 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
16390 if (OMP_FOR_ORIG_DECLS (for_stmt
)
16391 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
))
16394 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
16395 if (TREE_PURPOSE (elt
) && TREE_VALUE (elt
))
16397 *pc
= build_omp_clause (UNKNOWN_LOCATION
,
16398 OMP_CLAUSE_FIRSTPRIVATE
);
16399 OMP_CLAUSE_DECL (*pc
) = TREE_VALUE (elt
);
16400 pc
= &OMP_CLAUSE_CHAIN (*pc
);
16404 tree t
= make_node (pass
== 2 ? OMP_DISTRIBUTE
: OMP_FOR
);
16405 tree
*pc
= &OMP_FOR_CLAUSES (t
);
16406 TREE_TYPE (t
) = void_type_node
;
16407 OMP_FOR_BODY (t
) = *expr_p
;
16408 SET_EXPR_LOCATION (t
, EXPR_LOCATION (for_stmt
));
16409 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
16410 switch (OMP_CLAUSE_CODE (c
))
16412 case OMP_CLAUSE_BIND
:
16413 case OMP_CLAUSE_ORDER
:
16414 case OMP_CLAUSE_COLLAPSE
:
16415 *pc
= copy_node (c
);
16416 pc
= &OMP_CLAUSE_CHAIN (*pc
);
16418 case OMP_CLAUSE_PRIVATE
:
16419 case OMP_CLAUSE_FIRSTPRIVATE
:
16420 /* Only needed on innermost. */
16422 case OMP_CLAUSE_LASTPRIVATE
:
16423 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
) && pass
!= last
)
16425 *pc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
16426 OMP_CLAUSE_FIRSTPRIVATE
);
16427 OMP_CLAUSE_DECL (*pc
) = OMP_CLAUSE_DECL (c
);
16428 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
16429 pc
= &OMP_CLAUSE_CHAIN (*pc
);
16431 *pc
= copy_node (c
);
16432 OMP_CLAUSE_LASTPRIVATE_STMT (*pc
) = NULL_TREE
;
16433 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
16434 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
16437 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc
) = 1;
16439 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
16440 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc
) = 0;
16442 pc
= &OMP_CLAUSE_CHAIN (*pc
);
16444 case OMP_CLAUSE_REDUCTION
:
16445 *pc
= copy_node (c
);
16446 OMP_CLAUSE_DECL (*pc
) = unshare_expr (OMP_CLAUSE_DECL (c
));
16447 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
16448 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
))
16450 auto_vec
<tree
> no_context_vars
;
16451 int walk_subtrees
= 0;
16452 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
16453 &walk_subtrees
, &no_context_vars
);
16454 if (tree p
= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
))
16455 note_no_context_vars (&p
, &walk_subtrees
, &no_context_vars
);
16456 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c
),
16457 note_no_context_vars
,
16459 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c
),
16460 note_no_context_vars
,
16463 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
)
16464 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
));
16465 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
16466 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
)
16467 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
));
16469 hash_map
<tree
, tree
> decl_map
;
16470 decl_map
.put (OMP_CLAUSE_DECL (c
), OMP_CLAUSE_DECL (c
));
16471 decl_map
.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
16472 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
));
16473 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
16474 decl_map
.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
16475 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
));
16478 memset (&id
, 0, sizeof (id
));
16479 id
.src_fn
= current_function_decl
;
16480 id
.dst_fn
= current_function_decl
;
16481 id
.src_cfun
= cfun
;
16482 id
.decl_map
= &decl_map
;
16483 id
.copy_decl
= copy_decl_no_change
;
16484 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
16485 id
.transform_new_cfg
= true;
16486 id
.transform_return_to_modify
= false;
16488 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc
), copy_tree_body_r
,
16490 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc
), copy_tree_body_r
,
16493 for (tree d
: no_context_vars
)
16495 DECL_CONTEXT (d
) = NULL_TREE
;
16496 DECL_CONTEXT (*decl_map
.get (d
)) = NULL_TREE
;
16501 OMP_CLAUSE_REDUCTION_INIT (*pc
)
16502 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c
));
16503 OMP_CLAUSE_REDUCTION_MERGE (*pc
)
16504 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c
));
16506 pc
= &OMP_CLAUSE_CHAIN (*pc
);
16509 gcc_unreachable ();
16514 return gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_stmt
, fb_none
);
16518 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
16519 of OMP_TARGET's body. */
16522 find_omp_teams (tree
*tp
, int *walk_subtrees
, void *)
16524 *walk_subtrees
= 0;
16525 switch (TREE_CODE (*tp
))
16530 case STATEMENT_LIST
:
16531 *walk_subtrees
= 1;
16539 /* Helper function of optimize_target_teams, determine if the expression
16540 can be computed safely before the target construct on the host. */
16543 computable_teams_clause (tree
*tp
, int *walk_subtrees
, void *)
16549 *walk_subtrees
= 0;
16552 switch (TREE_CODE (*tp
))
16557 *walk_subtrees
= 0;
16558 if (error_operand_p (*tp
)
16559 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
16560 || DECL_HAS_VALUE_EXPR_P (*tp
)
16561 || DECL_THREAD_LOCAL_P (*tp
)
16562 || TREE_SIDE_EFFECTS (*tp
)
16563 || TREE_THIS_VOLATILE (*tp
))
16565 if (is_global_var (*tp
)
16566 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp
))
16567 || lookup_attribute ("omp declare target link",
16568 DECL_ATTRIBUTES (*tp
))))
16571 && !DECL_SEEN_IN_BIND_EXPR_P (*tp
)
16572 && !is_global_var (*tp
)
16573 && decl_function_context (*tp
) == current_function_decl
)
16575 n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
16576 (splay_tree_key
) *tp
);
16579 if (gimplify_omp_ctxp
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
16583 else if (n
->value
& GOVD_LOCAL
)
16585 else if (n
->value
& GOVD_FIRSTPRIVATE
)
16587 else if ((n
->value
& (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
16588 == (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
16592 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
16596 if (TARGET_EXPR_INITIAL (*tp
)
16597 || TREE_CODE (TARGET_EXPR_SLOT (*tp
)) != VAR_DECL
)
16599 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp
),
16600 walk_subtrees
, NULL
);
16601 /* Allow some reasonable subset of integral arithmetics. */
16605 case TRUNC_DIV_EXPR
:
16606 case CEIL_DIV_EXPR
:
16607 case FLOOR_DIV_EXPR
:
16608 case ROUND_DIV_EXPR
:
16609 case TRUNC_MOD_EXPR
:
16610 case CEIL_MOD_EXPR
:
16611 case FLOOR_MOD_EXPR
:
16612 case ROUND_MOD_EXPR
:
16614 case EXACT_DIV_EXPR
:
16625 case NON_LVALUE_EXPR
:
16627 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
16630 /* And disallow anything else, except for comparisons. */
16632 if (COMPARISON_CLASS_P (*tp
))
16638 /* Try to determine if the num_teams and/or thread_limit expressions
16639 can have their values determined already before entering the
16641 INTEGER_CSTs trivially are,
16642 integral decls that are firstprivate (explicitly or implicitly)
16643 or explicitly map(always, to:) or map(always, tofrom:) on the target
16644 region too, and expressions involving simple arithmetics on those
16645 too, function calls are not ok, dereferencing something neither etc.
16646 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
16647 EXPR based on what we find:
16648 0 stands for clause not specified at all, use implementation default
16649 -1 stands for value that can't be determined easily before entering
16650 the target construct.
16651 -2 means that no explicit teams construct was specified
16652 If teams construct is not present at all, use 1 for num_teams
16653 and 0 for thread_limit (only one team is involved, and the thread
16654 limit is implementation defined. */
16657 optimize_target_teams (tree target
, gimple_seq
*pre_p
)
16659 tree body
= OMP_BODY (target
);
16660 tree teams
= walk_tree (&body
, find_omp_teams
, NULL
, NULL
);
16661 tree num_teams_lower
= NULL_TREE
;
16662 tree num_teams_upper
= integer_zero_node
;
16663 tree thread_limit
= integer_zero_node
;
16664 location_t num_teams_loc
= EXPR_LOCATION (target
);
16665 location_t thread_limit_loc
= EXPR_LOCATION (target
);
16667 struct gimplify_omp_ctx
*target_ctx
= gimplify_omp_ctxp
;
16669 if (teams
== NULL_TREE
)
16670 num_teams_upper
= build_int_cst (integer_type_node
, -2);
16672 for (c
= OMP_TEAMS_CLAUSES (teams
); c
; c
= OMP_CLAUSE_CHAIN (c
))
16674 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
)
16676 p
= &num_teams_upper
;
16677 num_teams_loc
= OMP_CLAUSE_LOCATION (c
);
16678 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
))
16680 expr
= OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
);
16681 if (TREE_CODE (expr
) == INTEGER_CST
)
16682 num_teams_lower
= expr
;
16683 else if (walk_tree (&expr
, computable_teams_clause
,
16685 num_teams_lower
= integer_minus_one_node
;
16688 num_teams_lower
= expr
;
16689 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
16690 if (gimplify_expr (&num_teams_lower
, pre_p
, NULL
,
16691 is_gimple_val
, fb_rvalue
, false)
16694 gimplify_omp_ctxp
= target_ctx
;
16695 num_teams_lower
= integer_minus_one_node
;
16699 gimplify_omp_ctxp
= target_ctx
;
16700 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
16701 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
16707 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREAD_LIMIT
)
16710 thread_limit_loc
= OMP_CLAUSE_LOCATION (c
);
16714 expr
= OMP_CLAUSE_OPERAND (c
, 0);
16715 if (TREE_CODE (expr
) == INTEGER_CST
)
16720 if (walk_tree (&expr
, computable_teams_clause
, NULL
, NULL
))
16722 *p
= integer_minus_one_node
;
16726 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
16727 if (gimplify_expr (p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
, false)
16730 gimplify_omp_ctxp
= target_ctx
;
16731 *p
= integer_minus_one_node
;
16734 gimplify_omp_ctxp
= target_ctx
;
16735 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
16736 OMP_CLAUSE_OPERAND (c
, 0) = *p
;
16738 if (!omp_find_clause (OMP_TARGET_CLAUSES (target
), OMP_CLAUSE_THREAD_LIMIT
))
16740 c
= build_omp_clause (thread_limit_loc
, OMP_CLAUSE_THREAD_LIMIT
);
16741 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
16742 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
16743 OMP_TARGET_CLAUSES (target
) = c
;
16745 c
= build_omp_clause (num_teams_loc
, OMP_CLAUSE_NUM_TEAMS
);
16746 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c
) = num_teams_upper
;
16747 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
) = num_teams_lower
;
16748 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
16749 OMP_TARGET_CLAUSES (target
) = c
;
16752 /* Gimplify the gross structure of several OMP constructs. */
16755 gimplify_omp_workshare (tree
*expr_p
, gimple_seq
*pre_p
)
16757 tree expr
= *expr_p
;
16759 gimple_seq body
= NULL
;
16760 enum omp_region_type ort
;
16762 switch (TREE_CODE (expr
))
16766 ort
= ORT_WORKSHARE
;
16769 ort
= ORT_TASKGROUP
;
16772 ort
= OMP_TARGET_COMBINED (expr
) ? ORT_COMBINED_TARGET
: ORT_TARGET
;
16775 ort
= ORT_ACC_KERNELS
;
16777 case OACC_PARALLEL
:
16778 ort
= ORT_ACC_PARALLEL
;
16781 ort
= ORT_ACC_SERIAL
;
16784 ort
= ORT_ACC_DATA
;
16786 case OMP_TARGET_DATA
:
16787 ort
= ORT_TARGET_DATA
;
16790 ort
= OMP_TEAMS_COMBINED (expr
) ? ORT_COMBINED_TEAMS
: ORT_TEAMS
;
16791 if (gimplify_omp_ctxp
== NULL
16792 || gimplify_omp_ctxp
->region_type
== ORT_IMPLICIT_TARGET
)
16793 ort
= (enum omp_region_type
) (ort
| ORT_HOST_TEAMS
);
16795 case OACC_HOST_DATA
:
16796 ort
= ORT_ACC_HOST_DATA
;
16799 gcc_unreachable ();
16802 bool save_in_omp_construct
= in_omp_construct
;
16803 if ((ort
& ORT_ACC
) == 0)
16804 in_omp_construct
= false;
16805 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr
), pre_p
, ort
,
16807 if (TREE_CODE (expr
) == OMP_TARGET
)
16808 optimize_target_teams (expr
, pre_p
);
16809 if ((ort
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
16810 || (ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
16812 push_gimplify_context ();
16813 gimple
*g
= gimplify_and_return_first (OMP_BODY (expr
), &body
);
16814 if (gimple_code (g
) == GIMPLE_BIND
)
16815 pop_gimplify_context (g
);
16817 pop_gimplify_context (NULL
);
16818 if ((ort
& ORT_TARGET_DATA
) != 0)
16820 enum built_in_function end_ix
;
16821 switch (TREE_CODE (expr
))
16824 case OACC_HOST_DATA
:
16825 end_ix
= BUILT_IN_GOACC_DATA_END
;
16827 case OMP_TARGET_DATA
:
16828 end_ix
= BUILT_IN_GOMP_TARGET_END_DATA
;
16831 gcc_unreachable ();
16833 tree fn
= builtin_decl_explicit (end_ix
);
16834 g
= gimple_build_call (fn
, 0);
16835 gimple_seq cleanup
= NULL
;
16836 gimple_seq_add_stmt (&cleanup
, g
);
16837 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
16839 gimple_seq_add_stmt (&body
, g
);
16843 gimplify_and_add (OMP_BODY (expr
), &body
);
16844 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_CLAUSES (expr
),
16846 in_omp_construct
= save_in_omp_construct
;
16848 switch (TREE_CODE (expr
))
16851 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_DATA
,
16852 OMP_CLAUSES (expr
));
16854 case OACC_HOST_DATA
:
16855 if (omp_find_clause (OMP_CLAUSES (expr
), OMP_CLAUSE_IF_PRESENT
))
16857 for (tree c
= OMP_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
16858 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
16859 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
) = 1;
16862 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_HOST_DATA
,
16863 OMP_CLAUSES (expr
));
16866 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_KERNELS
,
16867 OMP_CLAUSES (expr
));
16869 case OACC_PARALLEL
:
16870 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_PARALLEL
,
16871 OMP_CLAUSES (expr
));
16874 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_SERIAL
,
16875 OMP_CLAUSES (expr
));
16878 stmt
= gimple_build_omp_sections (body
, OMP_CLAUSES (expr
));
16881 stmt
= gimple_build_omp_single (body
, OMP_CLAUSES (expr
));
16884 stmt
= gimple_build_omp_scope (body
, OMP_CLAUSES (expr
));
16887 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_REGION
,
16888 OMP_CLAUSES (expr
));
16890 case OMP_TARGET_DATA
:
16891 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
16892 to be evaluated before the use_device_{ptr,addr} clauses if they
16893 refer to the same variables. */
16895 tree use_device_clauses
;
16896 tree
*pc
, *uc
= &use_device_clauses
;
16897 for (pc
= &OMP_CLAUSES (expr
); *pc
; )
16898 if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_PTR
16899 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_ADDR
)
16902 *pc
= OMP_CLAUSE_CHAIN (*pc
);
16903 uc
= &OMP_CLAUSE_CHAIN (*uc
);
16906 pc
= &OMP_CLAUSE_CHAIN (*pc
);
16908 *pc
= use_device_clauses
;
16909 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_DATA
,
16910 OMP_CLAUSES (expr
));
16914 stmt
= gimple_build_omp_teams (body
, OMP_CLAUSES (expr
));
16915 if ((ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
16916 gimple_omp_teams_set_host (as_a
<gomp_teams
*> (stmt
), true);
16919 gcc_unreachable ();
16922 gimplify_seq_add_stmt (pre_p
, stmt
);
16923 *expr_p
= NULL_TREE
;
16926 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
16927 target update constructs. */
16930 gimplify_omp_target_update (tree
*expr_p
, gimple_seq
*pre_p
)
16932 tree expr
= *expr_p
;
16935 enum omp_region_type ort
= ORT_WORKSHARE
;
16937 switch (TREE_CODE (expr
))
16939 case OACC_ENTER_DATA
:
16940 kind
= GF_OMP_TARGET_KIND_OACC_ENTER_DATA
;
16943 case OACC_EXIT_DATA
:
16944 kind
= GF_OMP_TARGET_KIND_OACC_EXIT_DATA
;
16948 kind
= GF_OMP_TARGET_KIND_OACC_UPDATE
;
16951 case OMP_TARGET_UPDATE
:
16952 kind
= GF_OMP_TARGET_KIND_UPDATE
;
16954 case OMP_TARGET_ENTER_DATA
:
16955 kind
= GF_OMP_TARGET_KIND_ENTER_DATA
;
16957 case OMP_TARGET_EXIT_DATA
:
16958 kind
= GF_OMP_TARGET_KIND_EXIT_DATA
;
16961 gcc_unreachable ();
16963 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr
), pre_p
,
16964 ort
, TREE_CODE (expr
));
16965 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OMP_STANDALONE_CLAUSES (expr
),
16967 if (TREE_CODE (expr
) == OACC_UPDATE
16968 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
16969 OMP_CLAUSE_IF_PRESENT
))
16971 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
16973 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
16974 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
16975 switch (OMP_CLAUSE_MAP_KIND (c
))
16977 case GOMP_MAP_FORCE_TO
:
16978 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
16980 case GOMP_MAP_FORCE_FROM
:
16981 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FROM
);
16987 else if (TREE_CODE (expr
) == OACC_EXIT_DATA
16988 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
16989 OMP_CLAUSE_FINALIZE
))
16991 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
16993 bool have_clause
= false;
16994 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
16995 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
16996 switch (OMP_CLAUSE_MAP_KIND (c
))
16998 case GOMP_MAP_FROM
:
16999 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_FROM
);
17000 have_clause
= true;
17002 case GOMP_MAP_RELEASE
:
17003 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_DELETE
);
17004 have_clause
= true;
17006 case GOMP_MAP_TO_PSET
:
17007 /* Fortran arrays with descriptors must map that descriptor when
17008 doing standalone "attach" operations (in OpenACC). In that
17009 case GOMP_MAP_TO_PSET appears by itself with no preceding
17010 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
17012 case GOMP_MAP_POINTER
:
17013 /* TODO PR92929: we may see these here, but they'll always follow
17014 one of the clauses above, and will be handled by libgomp as
17015 one group, so no handling required here. */
17016 gcc_assert (have_clause
);
17018 case GOMP_MAP_DETACH
:
17019 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_DETACH
);
17020 have_clause
= false;
17022 case GOMP_MAP_STRUCT
:
17023 case GOMP_MAP_STRUCT_UNORD
:
17024 have_clause
= false;
17027 gcc_unreachable ();
17030 stmt
= gimple_build_omp_target (NULL
, kind
, OMP_STANDALONE_CLAUSES (expr
));
17032 gimplify_seq_add_stmt (pre_p
, stmt
);
17033 *expr_p
= NULL_TREE
;
17036 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
17037 stabilized the lhs of the atomic operation as *ADDR. Return true if
17038 EXPR is this stabilized form. */
17041 goa_lhs_expr_p (tree expr
, tree addr
)
17043 /* Also include casts to other type variants. The C front end is fond
17044 of adding these for e.g. volatile variables. This is like
17045 STRIP_TYPE_NOPS but includes the main variant lookup. */
17046 STRIP_USELESS_TYPE_CONVERSION (expr
);
17048 if (INDIRECT_REF_P (expr
))
17050 expr
= TREE_OPERAND (expr
, 0);
17051 while (expr
!= addr
17052 && (CONVERT_EXPR_P (expr
)
17053 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
17054 && TREE_CODE (expr
) == TREE_CODE (addr
)
17055 && types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (addr
)))
17057 expr
= TREE_OPERAND (expr
, 0);
17058 addr
= TREE_OPERAND (addr
, 0);
17062 return (TREE_CODE (addr
) == ADDR_EXPR
17063 && TREE_CODE (expr
) == ADDR_EXPR
17064 && TREE_OPERAND (addr
, 0) == TREE_OPERAND (expr
, 0));
17066 if (TREE_CODE (addr
) == ADDR_EXPR
&& expr
== TREE_OPERAND (addr
, 0))
17071 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
17072 expression does not involve the lhs, evaluate it into a temporary.
17073 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
17074 or -1 if an error was encountered. */
17077 goa_stabilize_expr (tree
*expr_p
, gimple_seq
*pre_p
, tree lhs_addr
,
17078 tree lhs_var
, tree
&target_expr
, bool rhs
, int depth
)
17080 tree expr
= *expr_p
;
17083 if (goa_lhs_expr_p (expr
, lhs_addr
))
17089 if (is_gimple_val (expr
))
17092 /* Maximum depth of lhs in expression is for the
17093 __builtin_clear_padding (...), __builtin_clear_padding (...),
17094 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
17098 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
17101 case tcc_comparison
:
17102 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
, lhs_addr
,
17103 lhs_var
, target_expr
, true, depth
);
17106 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
, lhs_addr
,
17107 lhs_var
, target_expr
, true, depth
);
17109 case tcc_expression
:
17110 switch (TREE_CODE (expr
))
17112 case TRUTH_ANDIF_EXPR
:
17113 case TRUTH_ORIF_EXPR
:
17114 case TRUTH_AND_EXPR
:
17115 case TRUTH_OR_EXPR
:
17116 case TRUTH_XOR_EXPR
:
17117 case BIT_INSERT_EXPR
:
17118 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
17119 lhs_addr
, lhs_var
, target_expr
, true,
17122 case TRUTH_NOT_EXPR
:
17123 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
17124 lhs_addr
, lhs_var
, target_expr
, true,
17128 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
, lhs_var
,
17129 target_expr
, true, depth
))
17131 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
17132 lhs_addr
, lhs_var
, target_expr
, true,
17134 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
17135 lhs_addr
, lhs_var
, target_expr
, false,
17140 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
, lhs_var
,
17141 target_expr
, true, depth
))
17143 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
17144 lhs_addr
, lhs_var
, target_expr
, false,
17147 case COMPOUND_EXPR
:
17148 /* Break out any preevaluations from cp_build_modify_expr. */
17149 for (; TREE_CODE (expr
) == COMPOUND_EXPR
;
17150 expr
= TREE_OPERAND (expr
, 1))
17152 /* Special-case __builtin_clear_padding call before
17153 __builtin_memcmp. */
17154 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
)
17156 tree fndecl
= get_callee_fndecl (TREE_OPERAND (expr
, 0));
17158 && fndecl_built_in_p (fndecl
, BUILT_IN_CLEAR_PADDING
)
17159 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
17161 || goa_stabilize_expr (&TREE_OPERAND (expr
, 0), NULL
,
17163 target_expr
, true, depth
)))
17167 saw_lhs
= goa_stabilize_expr (&TREE_OPERAND (expr
, 0),
17168 pre_p
, lhs_addr
, lhs_var
,
17169 target_expr
, true, depth
);
17170 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1),
17171 pre_p
, lhs_addr
, lhs_var
,
17172 target_expr
, rhs
, depth
);
17178 gimplify_stmt (&TREE_OPERAND (expr
, 0), pre_p
);
17181 return goa_stabilize_expr (&expr
, pre_p
, lhs_addr
, lhs_var
,
17182 target_expr
, rhs
, depth
);
17184 return goa_stabilize_expr (expr_p
, pre_p
, lhs_addr
, lhs_var
,
17185 target_expr
, rhs
, depth
);
17187 if (!goa_stabilize_expr (&TREE_OPERAND (expr
, 0), NULL
, lhs_addr
,
17188 lhs_var
, target_expr
, true, depth
))
17190 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
17191 lhs_addr
, lhs_var
, target_expr
, true,
17193 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
17194 lhs_addr
, lhs_var
, target_expr
, true,
17196 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 2), pre_p
,
17197 lhs_addr
, lhs_var
, target_expr
, true,
17201 if (TARGET_EXPR_INITIAL (expr
))
17203 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
,
17204 lhs_var
, target_expr
, true,
17207 if (expr
== target_expr
)
17211 saw_lhs
= goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr
),
17212 pre_p
, lhs_addr
, lhs_var
,
17213 target_expr
, true, depth
);
17214 if (saw_lhs
&& target_expr
== NULL_TREE
&& pre_p
)
17215 target_expr
= expr
;
17223 case tcc_reference
:
17224 if (TREE_CODE (expr
) == BIT_FIELD_REF
17225 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
17226 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
17227 lhs_addr
, lhs_var
, target_expr
, true,
17231 if (TREE_CODE (expr
) == CALL_EXPR
)
17233 if (tree fndecl
= get_callee_fndecl (expr
))
17234 if (fndecl_built_in_p (fndecl
, BUILT_IN_CLEAR_PADDING
,
17237 int nargs
= call_expr_nargs (expr
);
17238 for (int i
= 0; i
< nargs
; i
++)
17239 saw_lhs
|= goa_stabilize_expr (&CALL_EXPR_ARG (expr
, i
),
17240 pre_p
, lhs_addr
, lhs_var
,
17241 target_expr
, true, depth
);
17250 if (saw_lhs
== 0 && pre_p
)
17252 enum gimplify_status gs
;
17253 if (TREE_CODE (expr
) == CALL_EXPR
&& VOID_TYPE_P (TREE_TYPE (expr
)))
17255 gimplify_stmt (&expr
, pre_p
);
17259 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
17261 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_lvalue
, fb_lvalue
);
17262 if (gs
!= GS_ALL_DONE
)
17269 /* Gimplify an OMP_ATOMIC statement. */
17271 static enum gimplify_status
17272 gimplify_omp_atomic (tree
*expr_p
, gimple_seq
*pre_p
)
17274 tree addr
= TREE_OPERAND (*expr_p
, 0);
17275 tree rhs
= TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
17276 ? NULL
: TREE_OPERAND (*expr_p
, 1);
17277 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr
)));
17279 gomp_atomic_load
*loadstmt
;
17280 gomp_atomic_store
*storestmt
;
17281 tree target_expr
= NULL_TREE
;
17283 tmp_load
= create_tmp_reg (type
);
17285 && goa_stabilize_expr (&rhs
, pre_p
, addr
, tmp_load
, target_expr
,
17289 if (gimplify_expr (&addr
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
17293 loadstmt
= gimple_build_omp_atomic_load (tmp_load
, addr
,
17294 OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
17295 gimplify_seq_add_stmt (pre_p
, loadstmt
);
17298 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
17299 representatives. Use BIT_FIELD_REF on the lhs instead. */
17301 if (TREE_CODE (rhs
) == COND_EXPR
)
17302 rhsarg
= TREE_OPERAND (rhs
, 1);
17303 if (TREE_CODE (rhsarg
) == BIT_INSERT_EXPR
17304 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load
)))
17306 tree bitpos
= TREE_OPERAND (rhsarg
, 2);
17307 tree op1
= TREE_OPERAND (rhsarg
, 1);
17309 tree tmp_store
= tmp_load
;
17310 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_CAPTURE_OLD
)
17311 tmp_store
= get_initialized_tmp_var (tmp_load
, pre_p
);
17312 if (INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
17313 bitsize
= bitsize_int (TYPE_PRECISION (TREE_TYPE (op1
)));
17315 bitsize
= TYPE_SIZE (TREE_TYPE (op1
));
17316 gcc_assert (TREE_OPERAND (rhsarg
, 0) == tmp_load
);
17317 tree t
= build2_loc (EXPR_LOCATION (rhsarg
),
17318 MODIFY_EXPR
, void_type_node
,
17319 build3_loc (EXPR_LOCATION (rhsarg
),
17320 BIT_FIELD_REF
, TREE_TYPE (op1
),
17321 tmp_store
, bitsize
, bitpos
), op1
);
17322 if (TREE_CODE (rhs
) == COND_EXPR
)
17323 t
= build3_loc (EXPR_LOCATION (rhs
), COND_EXPR
, void_type_node
,
17324 TREE_OPERAND (rhs
, 0), t
, void_node
);
17325 gimplify_and_add (t
, pre_p
);
17328 bool save_allow_rhs_cond_expr
= gimplify_ctxp
->allow_rhs_cond_expr
;
17329 if (TREE_CODE (rhs
) == COND_EXPR
)
17330 gimplify_ctxp
->allow_rhs_cond_expr
= true;
17331 enum gimplify_status gs
= gimplify_expr (&rhs
, pre_p
, NULL
,
17332 is_gimple_val
, fb_rvalue
);
17333 gimplify_ctxp
->allow_rhs_cond_expr
= save_allow_rhs_cond_expr
;
17334 if (gs
!= GS_ALL_DONE
)
17338 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
)
17341 = gimple_build_omp_atomic_store (rhs
, OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
17342 if (TREE_CODE (*expr_p
) != OMP_ATOMIC_READ
&& OMP_ATOMIC_WEAK (*expr_p
))
17344 gimple_omp_atomic_set_weak (loadstmt
);
17345 gimple_omp_atomic_set_weak (storestmt
);
17347 gimplify_seq_add_stmt (pre_p
, storestmt
);
17348 switch (TREE_CODE (*expr_p
))
17350 case OMP_ATOMIC_READ
:
17351 case OMP_ATOMIC_CAPTURE_OLD
:
17352 *expr_p
= tmp_load
;
17353 gimple_omp_atomic_set_need_value (loadstmt
);
17355 case OMP_ATOMIC_CAPTURE_NEW
:
17357 gimple_omp_atomic_set_need_value (storestmt
);
17364 return GS_ALL_DONE
;
17367 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
17368 body, and adding some EH bits. */
17370 static enum gimplify_status
17371 gimplify_transaction (tree
*expr_p
, gimple_seq
*pre_p
)
17373 tree expr
= *expr_p
, temp
, tbody
= TRANSACTION_EXPR_BODY (expr
);
17375 gtransaction
*trans_stmt
;
17376 gimple_seq body
= NULL
;
17379 /* Wrap the transaction body in a BIND_EXPR so we have a context
17380 where to put decls for OMP. */
17381 if (TREE_CODE (tbody
) != BIND_EXPR
)
17383 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, tbody
, NULL
);
17384 TREE_SIDE_EFFECTS (bind
) = 1;
17385 SET_EXPR_LOCATION (bind
, EXPR_LOCATION (tbody
));
17386 TRANSACTION_EXPR_BODY (expr
) = bind
;
17389 push_gimplify_context ();
17390 temp
= voidify_wrapper_expr (*expr_p
, NULL
);
17392 body_stmt
= gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr
), &body
);
17393 pop_gimplify_context (body_stmt
);
17395 trans_stmt
= gimple_build_transaction (body
);
17396 if (TRANSACTION_EXPR_OUTER (expr
))
17397 subcode
= GTMA_IS_OUTER
;
17398 else if (TRANSACTION_EXPR_RELAXED (expr
))
17399 subcode
= GTMA_IS_RELAXED
;
17400 gimple_transaction_set_subcode (trans_stmt
, subcode
);
17402 gimplify_seq_add_stmt (pre_p
, trans_stmt
);
17410 *expr_p
= NULL_TREE
;
17411 return GS_ALL_DONE
;
17414 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
17415 is the OMP_BODY of the original EXPR (which has already been
17416 gimplified so it's not present in the EXPR).
17418 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
17421 gimplify_omp_ordered (tree expr
, gimple_seq body
)
17426 tree source_c
= NULL_TREE
;
17427 tree sink_c
= NULL_TREE
;
17429 if (gimplify_omp_ctxp
)
17431 for (c
= OMP_ORDERED_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
17432 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
17433 && gimplify_omp_ctxp
->loop_iter_var
.is_empty ())
17435 error_at (OMP_CLAUSE_LOCATION (c
),
17436 "%<ordered%> construct with %qs clause must be "
17437 "closely nested inside a loop with %<ordered%> clause",
17438 OMP_CLAUSE_DOACROSS_DEPEND (c
) ? "depend" : "doacross");
17441 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
17442 && OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SINK
)
17446 if (OMP_CLAUSE_DECL (c
) == NULL_TREE
)
17447 continue; /* omp_cur_iteration - 1 */
17448 for (decls
= OMP_CLAUSE_DECL (c
), i
= 0;
17449 decls
&& TREE_CODE (decls
) == TREE_LIST
;
17450 decls
= TREE_CHAIN (decls
), ++i
)
17451 if (i
>= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
17453 else if (TREE_VALUE (decls
)
17454 != gimplify_omp_ctxp
->loop_iter_var
[2 * i
])
17456 error_at (OMP_CLAUSE_LOCATION (c
),
17457 "variable %qE is not an iteration "
17458 "of outermost loop %d, expected %qE",
17459 TREE_VALUE (decls
), i
+ 1,
17460 gimplify_omp_ctxp
->loop_iter_var
[2 * i
]);
17466 = gimplify_omp_ctxp
->loop_iter_var
[2 * i
+ 1];
17467 if (!fail
&& i
!= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
17469 error_at (OMP_CLAUSE_LOCATION (c
),
17470 "number of variables in %qs clause with "
17471 "%<sink%> modifier does not match number of "
17472 "iteration variables",
17473 OMP_CLAUSE_DOACROSS_DEPEND (c
)
17474 ? "depend" : "doacross");
17478 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
17479 && OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SOURCE
)
17483 error_at (OMP_CLAUSE_LOCATION (c
),
17484 "more than one %qs clause with %<source%> "
17485 "modifier on an %<ordered%> construct",
17486 OMP_CLAUSE_DOACROSS_DEPEND (source_c
)
17487 ? "depend" : "doacross");
17494 if (source_c
&& sink_c
)
17496 error_at (OMP_CLAUSE_LOCATION (source_c
),
17497 "%qs clause with %<source%> modifier specified "
17498 "together with %qs clauses with %<sink%> modifier "
17499 "on the same construct",
17500 OMP_CLAUSE_DOACROSS_DEPEND (source_c
) ? "depend" : "doacross",
17501 OMP_CLAUSE_DOACROSS_DEPEND (sink_c
) ? "depend" : "doacross");
17506 return gimple_build_nop ();
17507 return gimple_build_omp_ordered (body
, OMP_ORDERED_CLAUSES (expr
));
17510 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
17511 expression produces a value to be used as an operand inside a GIMPLE
17512 statement, the value will be stored back in *EXPR_P. This value will
17513 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
17514 an SSA_NAME. The corresponding sequence of GIMPLE statements is
17515 emitted in PRE_P and POST_P.
17517 Additionally, this process may overwrite parts of the input
17518 expression during gimplification. Ideally, it should be
17519 possible to do non-destructive gimplification.
17521 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
17522 the expression needs to evaluate to a value to be used as
17523 an operand in a GIMPLE statement, this value will be stored in
17524 *EXPR_P on exit. This happens when the caller specifies one
17525 of fb_lvalue or fb_rvalue fallback flags.
17527 PRE_P will contain the sequence of GIMPLE statements corresponding
17528 to the evaluation of EXPR and all the side-effects that must
17529 be executed before the main expression. On exit, the last
17530 statement of PRE_P is the core statement being gimplified. For
17531 instance, when gimplifying 'if (++a)' the last statement in
17532 PRE_P will be 'if (t.1)' where t.1 is the result of
17533 pre-incrementing 'a'.
17535 POST_P will contain the sequence of GIMPLE statements corresponding
17536 to the evaluation of all the side-effects that must be executed
17537 after the main expression. If this is NULL, the post
17538 side-effects are stored at the end of PRE_P.
17540 The reason why the output is split in two is to handle post
17541 side-effects explicitly. In some cases, an expression may have
17542 inner and outer post side-effects which need to be emitted in
17543 an order different from the one given by the recursive
17544 traversal. For instance, for the expression (*p--)++ the post
17545 side-effects of '--' must actually occur *after* the post
17546 side-effects of '++'. However, gimplification will first visit
17547 the inner expression, so if a separate POST sequence was not
17548 used, the resulting sequence would be:
17555 However, the post-decrement operation in line #2 must not be
17556 evaluated until after the store to *p at line #4, so the
17557 correct sequence should be:
17564 So, by specifying a separate post queue, it is possible
17565 to emit the post side-effects in the correct order.
17566 If POST_P is NULL, an internal queue will be used. Before
17567 returning to the caller, the sequence POST_P is appended to
17568 the main output sequence PRE_P.
17570 GIMPLE_TEST_F points to a function that takes a tree T and
17571 returns nonzero if T is in the GIMPLE form requested by the
17572 caller. The GIMPLE predicates are in gimple.cc.
17574 FALLBACK tells the function what sort of a temporary we want if
17575 gimplification cannot produce an expression that complies with
17578 fb_none means that no temporary should be generated
17579 fb_rvalue means that an rvalue is OK to generate
17580 fb_lvalue means that an lvalue is OK to generate
17581 fb_either means that either is OK, but an lvalue is preferable.
17582 fb_mayfail means that gimplification may fail (in which case
17583 GS_ERROR will be returned)
17585 The return value is either GS_ERROR or GS_ALL_DONE, since this
17586 function iterates until EXPR is completely gimplified or an error
17589 enum gimplify_status
17590 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
17591 bool (*gimple_test_f
) (tree
), fallback_t fallback
)
17594 gimple_seq internal_pre
= NULL
;
17595 gimple_seq internal_post
= NULL
;
17598 location_t saved_location
;
17599 enum gimplify_status ret
;
17600 gimple_stmt_iterator pre_last_gsi
, post_last_gsi
;
17603 save_expr
= *expr_p
;
17604 if (save_expr
== NULL_TREE
)
17605 return GS_ALL_DONE
;
17607 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
17608 is_statement
= gimple_test_f
== is_gimple_stmt
;
17610 gcc_assert (pre_p
);
17612 /* Consistency checks. */
17613 if (gimple_test_f
== is_gimple_reg
)
17614 gcc_assert (fallback
& (fb_rvalue
| fb_lvalue
));
17615 else if (gimple_test_f
== is_gimple_val
17616 || gimple_test_f
== is_gimple_call_addr
17617 || gimple_test_f
== is_gimple_condexpr_for_cond
17618 || gimple_test_f
== is_gimple_mem_rhs
17619 || gimple_test_f
== is_gimple_mem_rhs_or_call
17620 || gimple_test_f
== is_gimple_reg_rhs
17621 || gimple_test_f
== is_gimple_reg_rhs_or_call
17622 || gimple_test_f
== is_gimple_asm_val
17623 || gimple_test_f
== is_gimple_mem_ref_addr
)
17624 gcc_assert (fallback
& fb_rvalue
);
17625 else if (gimple_test_f
== is_gimple_min_lval
17626 || gimple_test_f
== is_gimple_lvalue
)
17627 gcc_assert (fallback
& fb_lvalue
);
17628 else if (gimple_test_f
== is_gimple_addressable
)
17629 gcc_assert (fallback
& fb_either
);
17630 else if (gimple_test_f
== is_gimple_stmt
)
17631 gcc_assert (fallback
== fb_none
);
17634 /* We should have recognized the GIMPLE_TEST_F predicate to
17635 know what kind of fallback to use in case a temporary is
17636 needed to hold the value or address of *EXPR_P. */
17637 gcc_unreachable ();
17640 /* We used to check the predicate here and return immediately if it
17641 succeeds. This is wrong; the design is for gimplification to be
17642 idempotent, and for the predicates to only test for valid forms, not
17643 whether they are fully simplified. */
17645 pre_p
= &internal_pre
;
17647 if (post_p
== NULL
)
17648 post_p
= &internal_post
;
17650 /* Remember the last statements added to PRE_P and POST_P. Every
17651 new statement added by the gimplification helpers needs to be
17652 annotated with location information. To centralize the
17653 responsibility, we remember the last statement that had been
17654 added to both queues before gimplifying *EXPR_P. If
17655 gimplification produces new statements in PRE_P and POST_P, those
17656 statements will be annotated with the same location information
17658 pre_last_gsi
= gsi_last (*pre_p
);
17659 post_last_gsi
= gsi_last (*post_p
);
17661 saved_location
= input_location
;
17662 if (save_expr
!= error_mark_node
17663 && EXPR_HAS_LOCATION (*expr_p
))
17664 input_location
= EXPR_LOCATION (*expr_p
);
17666 /* Loop over the specific gimplifiers until the toplevel node
17667 remains the same. */
17670 /* Strip away as many useless type conversions as possible
17671 at the toplevel. */
17672 STRIP_USELESS_TYPE_CONVERSION (*expr_p
);
17674 /* Remember the expr. */
17675 save_expr
= *expr_p
;
17677 /* Die, die, die, my darling. */
17678 if (error_operand_p (save_expr
))
17684 /* Do any language-specific gimplification. */
17685 ret
= ((enum gimplify_status
)
17686 lang_hooks
.gimplify_expr (expr_p
, pre_p
, post_p
));
17689 if (*expr_p
== NULL_TREE
)
17691 if (*expr_p
!= save_expr
)
17694 else if (ret
!= GS_UNHANDLED
)
17697 /* Make sure that all the cases set 'ret' appropriately. */
17698 ret
= GS_UNHANDLED
;
17699 switch (TREE_CODE (*expr_p
))
17701 /* First deal with the special cases. */
17703 case POSTINCREMENT_EXPR
:
17704 case POSTDECREMENT_EXPR
:
17705 case PREINCREMENT_EXPR
:
17706 case PREDECREMENT_EXPR
:
17707 ret
= gimplify_self_mod_expr (expr_p
, pre_p
, post_p
,
17708 fallback
!= fb_none
,
17709 TREE_TYPE (*expr_p
));
17712 case VIEW_CONVERT_EXPR
:
17713 if ((fallback
& fb_rvalue
)
17714 && is_gimple_reg_type (TREE_TYPE (*expr_p
))
17715 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
17717 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
17718 post_p
, is_gimple_val
, fb_rvalue
);
17719 recalculate_side_effects (*expr_p
);
17725 case ARRAY_RANGE_REF
:
17726 case REALPART_EXPR
:
17727 case IMAGPART_EXPR
:
17728 case COMPONENT_REF
:
17729 ret
= gimplify_compound_lval (expr_p
, pre_p
, post_p
,
17730 fallback
? fallback
: fb_rvalue
);
17734 ret
= gimplify_cond_expr (expr_p
, pre_p
, fallback
);
17736 /* C99 code may assign to an array in a structure value of a
17737 conditional expression, and this has undefined behavior
17738 only on execution, so create a temporary if an lvalue is
17740 if (fallback
== fb_lvalue
)
17742 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
17743 mark_addressable (*expr_p
);
17749 ret
= gimplify_call_expr (expr_p
, pre_p
, fallback
!= fb_none
);
17751 /* C99 code may assign to an array in a structure returned
17752 from a function, and this has undefined behavior only on
17753 execution, so create a temporary if an lvalue is
17755 if (fallback
== fb_lvalue
)
17757 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
17758 mark_addressable (*expr_p
);
17764 gcc_unreachable ();
17766 case OMP_ARRAY_SECTION
:
17767 gcc_unreachable ();
17769 case COMPOUND_EXPR
:
17770 ret
= gimplify_compound_expr (expr_p
, pre_p
, fallback
!= fb_none
);
17773 case COMPOUND_LITERAL_EXPR
:
17774 ret
= gimplify_compound_literal_expr (expr_p
, pre_p
,
17775 gimple_test_f
, fallback
);
17780 ret
= gimplify_modify_expr (expr_p
, pre_p
, post_p
,
17781 fallback
!= fb_none
);
17784 case TRUTH_ANDIF_EXPR
:
17785 case TRUTH_ORIF_EXPR
:
17787 /* Preserve the original type of the expression and the
17788 source location of the outer expression. */
17789 tree org_type
= TREE_TYPE (*expr_p
);
17790 *expr_p
= gimple_boolify (*expr_p
);
17791 *expr_p
= build3_loc (input_location
, COND_EXPR
,
17795 org_type
, boolean_true_node
),
17798 org_type
, boolean_false_node
));
17803 case TRUTH_NOT_EXPR
:
17805 tree type
= TREE_TYPE (*expr_p
);
17806 /* The parsers are careful to generate TRUTH_NOT_EXPR
17807 only with operands that are always zero or one.
17808 We do not fold here but handle the only interesting case
17809 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
17810 *expr_p
= gimple_boolify (*expr_p
);
17811 if (TYPE_PRECISION (TREE_TYPE (*expr_p
)) == 1)
17812 *expr_p
= build1_loc (input_location
, BIT_NOT_EXPR
,
17813 TREE_TYPE (*expr_p
),
17814 TREE_OPERAND (*expr_p
, 0));
17816 *expr_p
= build2_loc (input_location
, BIT_XOR_EXPR
,
17817 TREE_TYPE (*expr_p
),
17818 TREE_OPERAND (*expr_p
, 0),
17819 build_int_cst (TREE_TYPE (*expr_p
), 1));
17820 if (!useless_type_conversion_p (type
, TREE_TYPE (*expr_p
)))
17821 *expr_p
= fold_convert_loc (input_location
, type
, *expr_p
);
17827 ret
= gimplify_addr_expr (expr_p
, pre_p
, post_p
);
17830 case ANNOTATE_EXPR
:
17832 tree cond
= TREE_OPERAND (*expr_p
, 0);
17833 tree kind
= TREE_OPERAND (*expr_p
, 1);
17834 tree data
= TREE_OPERAND (*expr_p
, 2);
17835 tree type
= TREE_TYPE (cond
);
17836 if (!INTEGRAL_TYPE_P (type
))
17842 tree tmp
= create_tmp_var (type
);
17843 gimplify_arg (&cond
, pre_p
, EXPR_LOCATION (*expr_p
));
17845 = gimple_build_call_internal (IFN_ANNOTATE
, 3, cond
, kind
, data
);
17846 gimple_call_set_lhs (call
, tmp
);
17847 gimplify_seq_add_stmt (pre_p
, call
);
17854 ret
= gimplify_va_arg_expr (expr_p
, pre_p
, post_p
);
17858 if (IS_EMPTY_STMT (*expr_p
))
17864 if (VOID_TYPE_P (TREE_TYPE (*expr_p
))
17865 || fallback
== fb_none
)
17867 /* Just strip a conversion to void (or in void context) and
17869 *expr_p
= TREE_OPERAND (*expr_p
, 0);
17874 ret
= gimplify_conversion (expr_p
);
17875 if (ret
== GS_ERROR
)
17877 if (*expr_p
!= save_expr
)
17881 case FIX_TRUNC_EXPR
:
17882 /* unary_expr: ... | '(' cast ')' val | ... */
17883 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
17884 is_gimple_val
, fb_rvalue
);
17885 recalculate_side_effects (*expr_p
);
17890 bool volatilep
= TREE_THIS_VOLATILE (*expr_p
);
17891 bool notrap
= TREE_THIS_NOTRAP (*expr_p
);
17892 tree saved_ptr_type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 0));
17894 *expr_p
= fold_indirect_ref_loc (input_location
, *expr_p
);
17895 if (*expr_p
!= save_expr
)
17901 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
17902 is_gimple_reg
, fb_rvalue
);
17903 if (ret
== GS_ERROR
)
17906 recalculate_side_effects (*expr_p
);
17907 *expr_p
= fold_build2_loc (input_location
, MEM_REF
,
17908 TREE_TYPE (*expr_p
),
17909 TREE_OPERAND (*expr_p
, 0),
17910 build_int_cst (saved_ptr_type
, 0));
17911 TREE_THIS_VOLATILE (*expr_p
) = volatilep
;
17912 TREE_THIS_NOTRAP (*expr_p
) = notrap
;
17917 /* We arrive here through the various re-gimplifcation paths. */
17919 /* First try re-folding the whole thing. */
17920 tmp
= fold_binary (MEM_REF
, TREE_TYPE (*expr_p
),
17921 TREE_OPERAND (*expr_p
, 0),
17922 TREE_OPERAND (*expr_p
, 1));
17925 REF_REVERSE_STORAGE_ORDER (tmp
)
17926 = REF_REVERSE_STORAGE_ORDER (*expr_p
);
17928 recalculate_side_effects (*expr_p
);
17932 /* Avoid re-gimplifying the address operand if it is already
17933 in suitable form. Re-gimplifying would mark the address
17934 operand addressable. Always gimplify when not in SSA form
17935 as we still may have to gimplify decls with value-exprs. */
17936 if (!gimplify_ctxp
|| !gimple_in_ssa_p (cfun
)
17937 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p
, 0)))
17939 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
17940 is_gimple_mem_ref_addr
, fb_rvalue
);
17941 if (ret
== GS_ERROR
)
17944 recalculate_side_effects (*expr_p
);
17948 /* Constants need not be gimplified. */
17955 /* Drop the overflow flag on constants, we do not want
17956 that in the GIMPLE IL. */
17957 if (TREE_OVERFLOW_P (*expr_p
))
17958 *expr_p
= drop_tree_overflow (*expr_p
);
17963 /* If we require an lvalue, such as for ADDR_EXPR, retain the
17964 CONST_DECL node. Otherwise the decl is replaceable by its
17966 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
17967 if (fallback
& fb_lvalue
)
17971 *expr_p
= DECL_INITIAL (*expr_p
);
17977 ret
= gimplify_decl_expr (expr_p
, pre_p
);
17981 ret
= gimplify_bind_expr (expr_p
, pre_p
);
17985 ret
= gimplify_loop_expr (expr_p
, pre_p
);
17989 ret
= gimplify_switch_expr (expr_p
, pre_p
);
17993 ret
= gimplify_exit_expr (expr_p
);
17997 /* If the target is not LABEL, then it is a computed jump
17998 and the target needs to be gimplified. */
17999 if (TREE_CODE (GOTO_DESTINATION (*expr_p
)) != LABEL_DECL
)
18001 ret
= gimplify_expr (&GOTO_DESTINATION (*expr_p
), pre_p
,
18002 NULL
, is_gimple_val
, fb_rvalue
);
18003 if (ret
== GS_ERROR
)
18006 gimplify_seq_add_stmt (pre_p
,
18007 gimple_build_goto (GOTO_DESTINATION (*expr_p
)));
18012 gimplify_seq_add_stmt (pre_p
,
18013 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p
),
18014 PREDICT_EXPR_OUTCOME (*expr_p
)));
18019 ret
= gimplify_label_expr (expr_p
, pre_p
);
18020 label
= LABEL_EXPR_LABEL (*expr_p
);
18021 gcc_assert (decl_function_context (label
) == current_function_decl
);
18023 /* If the label is used in a goto statement, or address of the label
18024 is taken, we need to unpoison all variables that were seen so far.
18025 Doing so would prevent us from reporting a false positives. */
18026 if (asan_poisoned_variables
18027 && asan_used_labels
!= NULL
18028 && asan_used_labels
->contains (label
)
18029 && !gimplify_omp_ctxp
)
18030 asan_poison_variables (asan_poisoned_variables
, false, pre_p
);
18033 case CASE_LABEL_EXPR
:
18034 ret
= gimplify_case_label_expr (expr_p
, pre_p
);
18036 if (gimplify_ctxp
->live_switch_vars
)
18037 asan_poison_variables (gimplify_ctxp
->live_switch_vars
, false,
18042 ret
= gimplify_return_expr (*expr_p
, pre_p
);
18046 /* Don't reduce this in place; let gimplify_init_constructor work its
18047 magic. Buf if we're just elaborating this for side effects, just
18048 gimplify any element that has side-effects. */
18049 if (fallback
== fb_none
)
18051 unsigned HOST_WIDE_INT ix
;
18053 tree temp
= NULL_TREE
;
18054 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p
), ix
, val
)
18055 if (TREE_SIDE_EFFECTS (val
))
18056 append_to_statement_list (val
, &temp
);
18059 ret
= temp
? GS_OK
: GS_ALL_DONE
;
18061 /* C99 code may assign to an array in a constructed
18062 structure or union, and this has undefined behavior only
18063 on execution, so create a temporary if an lvalue is
18065 else if (fallback
== fb_lvalue
)
18067 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
18068 mark_addressable (*expr_p
);
18075 /* The following are special cases that are not handled by the
18076 original GIMPLE grammar. */
18078 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
18081 ret
= gimplify_save_expr (expr_p
, pre_p
, post_p
);
18084 case BIT_FIELD_REF
:
18085 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
18086 post_p
, is_gimple_lvalue
, fb_either
);
18087 recalculate_side_effects (*expr_p
);
18090 case TARGET_MEM_REF
:
18092 enum gimplify_status r0
= GS_ALL_DONE
, r1
= GS_ALL_DONE
;
18094 if (TMR_BASE (*expr_p
))
18095 r0
= gimplify_expr (&TMR_BASE (*expr_p
), pre_p
,
18096 post_p
, is_gimple_mem_ref_addr
, fb_either
);
18097 if (TMR_INDEX (*expr_p
))
18098 r1
= gimplify_expr (&TMR_INDEX (*expr_p
), pre_p
,
18099 post_p
, is_gimple_val
, fb_rvalue
);
18100 if (TMR_INDEX2 (*expr_p
))
18101 r1
= gimplify_expr (&TMR_INDEX2 (*expr_p
), pre_p
,
18102 post_p
, is_gimple_val
, fb_rvalue
);
18103 /* TMR_STEP and TMR_OFFSET are always integer constants. */
18104 ret
= MIN (r0
, r1
);
18108 case NON_LVALUE_EXPR
:
18109 /* This should have been stripped above. */
18110 gcc_unreachable ();
18113 ret
= gimplify_asm_expr (expr_p
, pre_p
, post_p
);
18116 case TRY_FINALLY_EXPR
:
18117 case TRY_CATCH_EXPR
:
18119 gimple_seq eval
, cleanup
;
18122 /* Calls to destructors are generated automatically in FINALLY/CATCH
18123 block. They should have location as UNKNOWN_LOCATION. However,
18124 gimplify_call_expr will reset these call stmts to input_location
18125 if it finds stmt's location is unknown. To prevent resetting for
18126 destructors, we set the input_location to unknown.
18127 Note that this only affects the destructor calls in FINALLY/CATCH
18128 block, and will automatically reset to its original value by the
18129 end of gimplify_expr. */
18130 input_location
= UNKNOWN_LOCATION
;
18131 eval
= cleanup
= NULL
;
18132 gimplify_and_add (TREE_OPERAND (*expr_p
, 0), &eval
);
18133 if (TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
18134 && TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == EH_ELSE_EXPR
)
18136 gimple_seq n
= NULL
, e
= NULL
;
18137 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
18139 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
18141 if (!gimple_seq_empty_p (n
) && !gimple_seq_empty_p (e
))
18143 geh_else
*stmt
= gimple_build_eh_else (n
, e
);
18144 gimple_seq_add_stmt (&cleanup
, stmt
);
18148 gimplify_and_add (TREE_OPERAND (*expr_p
, 1), &cleanup
);
18149 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
18150 if (gimple_seq_empty_p (cleanup
))
18152 gimple_seq_add_seq (pre_p
, eval
);
18156 try_
= gimple_build_try (eval
, cleanup
,
18157 TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
18158 ? GIMPLE_TRY_FINALLY
18159 : GIMPLE_TRY_CATCH
);
18160 if (EXPR_HAS_LOCATION (save_expr
))
18161 gimple_set_location (try_
, EXPR_LOCATION (save_expr
));
18162 else if (LOCATION_LOCUS (saved_location
) != UNKNOWN_LOCATION
)
18163 gimple_set_location (try_
, saved_location
);
18164 if (TREE_CODE (*expr_p
) == TRY_CATCH_EXPR
)
18165 gimple_try_set_catch_is_cleanup (try_
,
18166 TRY_CATCH_IS_CLEANUP (*expr_p
));
18167 gimplify_seq_add_stmt (pre_p
, try_
);
18172 case CLEANUP_POINT_EXPR
:
18173 ret
= gimplify_cleanup_point_expr (expr_p
, pre_p
);
18177 ret
= gimplify_target_expr (expr_p
, pre_p
, post_p
);
18183 gimple_seq handler
= NULL
;
18184 gimplify_and_add (CATCH_BODY (*expr_p
), &handler
);
18185 c
= gimple_build_catch (CATCH_TYPES (*expr_p
), handler
);
18186 gimplify_seq_add_stmt (pre_p
, c
);
18191 case EH_FILTER_EXPR
:
18194 gimple_seq failure
= NULL
;
18196 gimplify_and_add (EH_FILTER_FAILURE (*expr_p
), &failure
);
18197 ehf
= gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p
), failure
);
18198 copy_warning (ehf
, *expr_p
);
18199 gimplify_seq_add_stmt (pre_p
, ehf
);
18206 enum gimplify_status r0
, r1
;
18207 r0
= gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p
), pre_p
,
18208 post_p
, is_gimple_val
, fb_rvalue
);
18209 r1
= gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p
), pre_p
,
18210 post_p
, is_gimple_val
, fb_rvalue
);
18211 TREE_SIDE_EFFECTS (*expr_p
) = 0;
18212 ret
= MIN (r0
, r1
);
18217 /* We get here when taking the address of a label. We mark
18218 the label as "forced"; meaning it can never be removed and
18219 it is a potential target for any computed goto. */
18220 FORCED_LABEL (*expr_p
) = 1;
18224 case STATEMENT_LIST
:
18225 ret
= gimplify_statement_list (expr_p
, pre_p
);
18228 case WITH_SIZE_EXPR
:
18230 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
18231 post_p
== &internal_post
? NULL
: post_p
,
18232 gimple_test_f
, fallback
);
18233 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
18234 is_gimple_val
, fb_rvalue
);
18241 ret
= gimplify_var_or_parm_decl (expr_p
);
18245 /* When within an OMP context, notice uses of variables. */
18246 if (gimplify_omp_ctxp
)
18247 omp_notice_variable (gimplify_omp_ctxp
, *expr_p
, true);
18251 case DEBUG_EXPR_DECL
:
18252 gcc_unreachable ();
18254 case DEBUG_BEGIN_STMT
:
18255 gimplify_seq_add_stmt (pre_p
,
18256 gimple_build_debug_begin_stmt
18257 (TREE_BLOCK (*expr_p
),
18258 EXPR_LOCATION (*expr_p
)));
18264 /* Allow callbacks into the gimplifier during optimization. */
18269 gimplify_omp_parallel (expr_p
, pre_p
);
18274 gimplify_omp_task (expr_p
, pre_p
);
18280 /* Temporarily disable into_ssa, as scan_omp_simd
18281 which calls copy_gimple_seq_and_replace_locals can't deal
18282 with SSA_NAMEs defined outside of the body properly. */
18283 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
18284 gimplify_ctxp
->into_ssa
= false;
18285 ret
= gimplify_omp_for (expr_p
, pre_p
);
18286 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
18291 case OMP_DISTRIBUTE
:
18294 ret
= gimplify_omp_for (expr_p
, pre_p
);
18298 ret
= gimplify_omp_loop (expr_p
, pre_p
);
18302 gimplify_oacc_cache (expr_p
, pre_p
);
18307 gimplify_oacc_declare (expr_p
, pre_p
);
18311 case OACC_HOST_DATA
:
18314 case OACC_PARALLEL
:
18320 case OMP_TARGET_DATA
:
18322 gimplify_omp_workshare (expr_p
, pre_p
);
18326 case OACC_ENTER_DATA
:
18327 case OACC_EXIT_DATA
:
18329 case OMP_TARGET_UPDATE
:
18330 case OMP_TARGET_ENTER_DATA
:
18331 case OMP_TARGET_EXIT_DATA
:
18332 gimplify_omp_target_update (expr_p
, pre_p
);
18337 case OMP_STRUCTURED_BLOCK
:
18344 gimple_seq body
= NULL
;
18346 bool saved_in_omp_construct
= in_omp_construct
;
18348 in_omp_construct
= true;
18349 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
18350 in_omp_construct
= saved_in_omp_construct
;
18351 switch (TREE_CODE (*expr_p
))
18354 g
= gimple_build_omp_section (body
);
18356 case OMP_STRUCTURED_BLOCK
:
18357 g
= gimple_build_omp_structured_block (body
);
18360 g
= gimple_build_omp_master (body
);
18363 g
= gimplify_omp_ordered (*expr_p
, body
);
18364 if (OMP_BODY (*expr_p
) == NULL_TREE
18365 && gimple_code (g
) == GIMPLE_OMP_ORDERED
)
18366 gimple_omp_ordered_standalone (g
);
18369 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p
),
18370 pre_p
, ORT_WORKSHARE
, OMP_MASKED
);
18371 gimplify_adjust_omp_clauses (pre_p
, body
,
18372 &OMP_MASKED_CLAUSES (*expr_p
),
18374 g
= gimple_build_omp_masked (body
,
18375 OMP_MASKED_CLAUSES (*expr_p
));
18378 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p
),
18379 pre_p
, ORT_WORKSHARE
, OMP_CRITICAL
);
18380 gimplify_adjust_omp_clauses (pre_p
, body
,
18381 &OMP_CRITICAL_CLAUSES (*expr_p
),
18383 g
= gimple_build_omp_critical (body
,
18384 OMP_CRITICAL_NAME (*expr_p
),
18385 OMP_CRITICAL_CLAUSES (*expr_p
));
18388 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p
),
18389 pre_p
, ORT_WORKSHARE
, OMP_SCAN
);
18390 gimplify_adjust_omp_clauses (pre_p
, body
,
18391 &OMP_SCAN_CLAUSES (*expr_p
),
18393 g
= gimple_build_omp_scan (body
, OMP_SCAN_CLAUSES (*expr_p
));
18396 gcc_unreachable ();
18398 gimplify_seq_add_stmt (pre_p
, g
);
18403 case OMP_TASKGROUP
:
18405 gimple_seq body
= NULL
;
18407 tree
*pclauses
= &OMP_TASKGROUP_CLAUSES (*expr_p
);
18408 bool saved_in_omp_construct
= in_omp_construct
;
18409 gimplify_scan_omp_clauses (pclauses
, pre_p
, ORT_TASKGROUP
,
18411 gimplify_adjust_omp_clauses (pre_p
, NULL
, pclauses
, OMP_TASKGROUP
);
18413 in_omp_construct
= true;
18414 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
18415 in_omp_construct
= saved_in_omp_construct
;
18416 gimple_seq cleanup
= NULL
;
18417 tree fn
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END
);
18418 gimple
*g
= gimple_build_call (fn
, 0);
18419 gimple_seq_add_stmt (&cleanup
, g
);
18420 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
18422 gimple_seq_add_stmt (&body
, g
);
18423 g
= gimple_build_omp_taskgroup (body
, *pclauses
);
18424 gimplify_seq_add_stmt (pre_p
, g
);
18430 case OMP_ATOMIC_READ
:
18431 case OMP_ATOMIC_CAPTURE_OLD
:
18432 case OMP_ATOMIC_CAPTURE_NEW
:
18433 ret
= gimplify_omp_atomic (expr_p
, pre_p
);
18436 case TRANSACTION_EXPR
:
18437 ret
= gimplify_transaction (expr_p
, pre_p
);
18440 case TRUTH_AND_EXPR
:
18441 case TRUTH_OR_EXPR
:
18442 case TRUTH_XOR_EXPR
:
18444 tree orig_type
= TREE_TYPE (*expr_p
);
18445 tree new_type
, xop0
, xop1
;
18446 *expr_p
= gimple_boolify (*expr_p
);
18447 new_type
= TREE_TYPE (*expr_p
);
18448 if (!useless_type_conversion_p (orig_type
, new_type
))
18450 *expr_p
= fold_convert_loc (input_location
, orig_type
, *expr_p
);
18455 /* Boolified binary truth expressions are semantically equivalent
18456 to bitwise binary expressions. Canonicalize them to the
18457 bitwise variant. */
18458 switch (TREE_CODE (*expr_p
))
18460 case TRUTH_AND_EXPR
:
18461 TREE_SET_CODE (*expr_p
, BIT_AND_EXPR
);
18463 case TRUTH_OR_EXPR
:
18464 TREE_SET_CODE (*expr_p
, BIT_IOR_EXPR
);
18466 case TRUTH_XOR_EXPR
:
18467 TREE_SET_CODE (*expr_p
, BIT_XOR_EXPR
);
18472 /* Now make sure that operands have compatible type to
18473 expression's new_type. */
18474 xop0
= TREE_OPERAND (*expr_p
, 0);
18475 xop1
= TREE_OPERAND (*expr_p
, 1);
18476 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop0
)))
18477 TREE_OPERAND (*expr_p
, 0) = fold_convert_loc (input_location
,
18480 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop1
)))
18481 TREE_OPERAND (*expr_p
, 1) = fold_convert_loc (input_location
,
18484 /* Continue classified as tcc_binary. */
18488 case VEC_COND_EXPR
:
18491 case VEC_PERM_EXPR
:
18492 /* Classified as tcc_expression. */
18495 case BIT_INSERT_EXPR
:
18496 /* Argument 3 is a constant. */
18499 case POINTER_PLUS_EXPR
:
18501 enum gimplify_status r0
, r1
;
18502 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
18503 post_p
, is_gimple_val
, fb_rvalue
);
18504 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
18505 post_p
, is_gimple_val
, fb_rvalue
);
18506 recalculate_side_effects (*expr_p
);
18507 ret
= MIN (r0
, r1
);
18512 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p
)))
18514 case tcc_comparison
:
18515 /* Handle comparison of objects of non scalar mode aggregates
18516 with a call to memcmp. It would be nice to only have to do
18517 this for variable-sized objects, but then we'd have to allow
18518 the same nest of reference nodes we allow for MODIFY_EXPR and
18519 that's too complex.
18521 Compare scalar mode aggregates as scalar mode values. Using
18522 memcmp for them would be very inefficient at best, and is
18523 plain wrong if bitfields are involved. */
18524 if (error_operand_p (TREE_OPERAND (*expr_p
, 1)))
18528 tree type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 1));
18530 /* Vector comparisons need no boolification. */
18531 if (TREE_CODE (type
) == VECTOR_TYPE
)
18533 else if (!AGGREGATE_TYPE_P (type
))
18535 tree org_type
= TREE_TYPE (*expr_p
);
18536 *expr_p
= gimple_boolify (*expr_p
);
18537 if (!useless_type_conversion_p (org_type
,
18538 TREE_TYPE (*expr_p
)))
18540 *expr_p
= fold_convert_loc (input_location
,
18541 org_type
, *expr_p
);
18547 else if (TYPE_MODE (type
) != BLKmode
)
18548 ret
= gimplify_scalar_mode_aggregate_compare (expr_p
);
18550 ret
= gimplify_variable_sized_compare (expr_p
);
18554 /* If *EXPR_P does not need to be special-cased, handle it
18555 according to its class. */
18557 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
18558 post_p
, is_gimple_val
, fb_rvalue
);
18564 enum gimplify_status r0
, r1
;
18566 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
18567 post_p
, is_gimple_val
, fb_rvalue
);
18568 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
18569 post_p
, is_gimple_val
, fb_rvalue
);
18571 ret
= MIN (r0
, r1
);
18577 enum gimplify_status r0
, r1
, r2
;
18579 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
18580 post_p
, is_gimple_val
, fb_rvalue
);
18581 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
18582 post_p
, is_gimple_val
, fb_rvalue
);
18583 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
18584 post_p
, is_gimple_val
, fb_rvalue
);
18586 ret
= MIN (MIN (r0
, r1
), r2
);
18590 case tcc_declaration
:
18593 goto dont_recalculate
;
18596 gcc_unreachable ();
18599 recalculate_side_effects (*expr_p
);
18605 gcc_assert (*expr_p
|| ret
!= GS_OK
);
18607 while (ret
== GS_OK
);
18609 /* If we encountered an error_mark somewhere nested inside, either
18610 stub out the statement or propagate the error back out. */
18611 if (ret
== GS_ERROR
)
18618 /* This was only valid as a return value from the langhook, which
18619 we handled. Make sure it doesn't escape from any other context. */
18620 gcc_assert (ret
!= GS_UNHANDLED
);
18622 if (fallback
== fb_none
&& *expr_p
&& !is_gimple_stmt (*expr_p
))
18624 /* We aren't looking for a value, and we don't have a valid
18625 statement. If it doesn't have side-effects, throw it away.
18626 We can also get here with code such as "*&&L;", where L is
18627 a LABEL_DECL that is marked as FORCED_LABEL. */
18628 if (TREE_CODE (*expr_p
) == LABEL_DECL
18629 || !TREE_SIDE_EFFECTS (*expr_p
))
18631 else if (!TREE_THIS_VOLATILE (*expr_p
))
18633 /* This is probably a _REF that contains something nested that
18634 has side effects. Recurse through the operands to find it. */
18635 enum tree_code code
= TREE_CODE (*expr_p
);
18639 case COMPONENT_REF
:
18640 case REALPART_EXPR
:
18641 case IMAGPART_EXPR
:
18642 case VIEW_CONVERT_EXPR
:
18643 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
18644 gimple_test_f
, fallback
);
18648 case ARRAY_RANGE_REF
:
18649 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
18650 gimple_test_f
, fallback
);
18651 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
18652 gimple_test_f
, fallback
);
18656 /* Anything else with side-effects must be converted to
18657 a valid statement before we get here. */
18658 gcc_unreachable ();
18663 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p
))
18664 && TYPE_MODE (TREE_TYPE (*expr_p
)) != BLKmode
18665 && !is_empty_type (TREE_TYPE (*expr_p
)))
18667 /* Historically, the compiler has treated a bare reference
18668 to a non-BLKmode volatile lvalue as forcing a load. */
18669 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p
));
18671 /* Normally, we do not want to create a temporary for a
18672 TREE_ADDRESSABLE type because such a type should not be
18673 copied by bitwise-assignment. However, we make an
18674 exception here, as all we are doing here is ensuring that
18675 we read the bytes that make up the type. We use
18676 create_tmp_var_raw because create_tmp_var will abort when
18677 given a TREE_ADDRESSABLE type. */
18678 tree tmp
= create_tmp_var_raw (type
, "vol");
18679 gimple_add_tmp_var (tmp
);
18680 gimplify_assign (tmp
, *expr_p
, pre_p
);
18684 /* We can't do anything useful with a volatile reference to
18685 an incomplete type, so just throw it away. Likewise for
18686 a BLKmode type, since any implicit inner load should
18687 already have been turned into an explicit one by the
18688 gimplification process. */
18692 /* If we are gimplifying at the statement level, we're done. Tack
18693 everything together and return. */
18694 if (fallback
== fb_none
|| is_statement
)
18696 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
18697 it out for GC to reclaim it. */
18698 *expr_p
= NULL_TREE
;
18700 if (!gimple_seq_empty_p (internal_pre
)
18701 || !gimple_seq_empty_p (internal_post
))
18703 gimplify_seq_add_seq (&internal_pre
, internal_post
);
18704 gimplify_seq_add_seq (pre_p
, internal_pre
);
18707 /* The result of gimplifying *EXPR_P is going to be the last few
18708 statements in *PRE_P and *POST_P. Add location information
18709 to all the statements that were added by the gimplification
18711 if (!gimple_seq_empty_p (*pre_p
))
18712 annotate_all_with_location_after (*pre_p
, pre_last_gsi
, input_location
);
18714 if (!gimple_seq_empty_p (*post_p
))
18715 annotate_all_with_location_after (*post_p
, post_last_gsi
,
18721 #ifdef ENABLE_GIMPLE_CHECKING
18724 enum tree_code code
= TREE_CODE (*expr_p
);
18725 /* These expressions should already be in gimple IR form. */
18726 gcc_assert (code
!= MODIFY_EXPR
18727 && code
!= ASM_EXPR
18728 && code
!= BIND_EXPR
18729 && code
!= CATCH_EXPR
18730 && (code
!= COND_EXPR
|| gimplify_ctxp
->allow_rhs_cond_expr
)
18731 && code
!= EH_FILTER_EXPR
18732 && code
!= GOTO_EXPR
18733 && code
!= LABEL_EXPR
18734 && code
!= LOOP_EXPR
18735 && code
!= SWITCH_EXPR
18736 && code
!= TRY_FINALLY_EXPR
18737 && code
!= EH_ELSE_EXPR
18738 && code
!= OACC_PARALLEL
18739 && code
!= OACC_KERNELS
18740 && code
!= OACC_SERIAL
18741 && code
!= OACC_DATA
18742 && code
!= OACC_HOST_DATA
18743 && code
!= OACC_DECLARE
18744 && code
!= OACC_UPDATE
18745 && code
!= OACC_ENTER_DATA
18746 && code
!= OACC_EXIT_DATA
18747 && code
!= OACC_CACHE
18748 && code
!= OMP_CRITICAL
18750 && code
!= OACC_LOOP
18751 && code
!= OMP_MASTER
18752 && code
!= OMP_MASKED
18753 && code
!= OMP_TASKGROUP
18754 && code
!= OMP_ORDERED
18755 && code
!= OMP_PARALLEL
18756 && code
!= OMP_SCAN
18757 && code
!= OMP_SECTIONS
18758 && code
!= OMP_SECTION
18759 && code
!= OMP_STRUCTURED_BLOCK
18760 && code
!= OMP_SINGLE
18761 && code
!= OMP_SCOPE
);
18765 /* Otherwise we're gimplifying a subexpression, so the resulting
18766 value is interesting. If it's a valid operand that matches
18767 GIMPLE_TEST_F, we're done. Unless we are handling some
18768 post-effects internally; if that's the case, we need to copy into
18769 a temporary before adding the post-effects to POST_P. */
18770 if (gimple_seq_empty_p (internal_post
) && (*gimple_test_f
) (*expr_p
))
18773 /* Otherwise, we need to create a new temporary for the gimplified
18776 /* We can't return an lvalue if we have an internal postqueue. The
18777 object the lvalue refers to would (probably) be modified by the
18778 postqueue; we need to copy the value out first, which means an
18780 if ((fallback
& fb_lvalue
)
18781 && gimple_seq_empty_p (internal_post
)
18782 && is_gimple_addressable (*expr_p
))
18784 /* An lvalue will do. Take the address of the expression, store it
18785 in a temporary, and replace the expression with an INDIRECT_REF of
18787 tree ref_alias_type
= reference_alias_ptr_type (*expr_p
);
18788 unsigned int ref_align
= get_object_alignment (*expr_p
);
18789 tree ref_type
= TREE_TYPE (*expr_p
);
18790 tmp
= build_fold_addr_expr_loc (input_location
, *expr_p
);
18791 gimplify_expr (&tmp
, pre_p
, post_p
, is_gimple_reg
, fb_rvalue
);
18792 if (TYPE_ALIGN (ref_type
) != ref_align
)
18793 ref_type
= build_aligned_type (ref_type
, ref_align
);
18794 *expr_p
= build2 (MEM_REF
, ref_type
,
18795 tmp
, build_zero_cst (ref_alias_type
));
18797 else if ((fallback
& fb_rvalue
) && is_gimple_reg_rhs_or_call (*expr_p
))
18799 /* An rvalue will do. Assign the gimplified expression into a
18800 new temporary TMP and replace the original expression with
18801 TMP. First, make sure that the expression has a type so that
18802 it can be assigned into a temporary. */
18803 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p
)));
18804 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
18808 #ifdef ENABLE_GIMPLE_CHECKING
18809 if (!(fallback
& fb_mayfail
))
18811 fprintf (stderr
, "gimplification failed:\n");
18812 print_generic_expr (stderr
, *expr_p
);
18813 debug_tree (*expr_p
);
18814 internal_error ("gimplification failed");
18817 gcc_assert (fallback
& fb_mayfail
);
18819 /* If this is an asm statement, and the user asked for the
18820 impossible, don't die. Fail and let gimplify_asm_expr
18826 /* Make sure the temporary matches our predicate. */
18827 gcc_assert ((*gimple_test_f
) (*expr_p
));
18829 if (!gimple_seq_empty_p (internal_post
))
18831 annotate_all_with_location (internal_post
, input_location
);
18832 gimplify_seq_add_seq (pre_p
, internal_post
);
18836 input_location
= saved_location
;
18840 /* Like gimplify_expr but make sure the gimplified result is not itself
18841 a SSA name (but a decl if it were). Temporaries required by
18842 evaluating *EXPR_P may be still SSA names. */
18844 static enum gimplify_status
18845 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
18846 bool (*gimple_test_f
) (tree
), fallback_t fallback
,
18849 enum gimplify_status ret
= gimplify_expr (expr_p
, pre_p
, post_p
,
18850 gimple_test_f
, fallback
);
18852 && TREE_CODE (*expr_p
) == SSA_NAME
)
18853 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, NULL
, false);
18857 /* Look through TYPE for variable-sized objects and gimplify each such
18858 size that we find. Add to LIST_P any statements generated. */
18861 gimplify_type_sizes (tree type
, gimple_seq
*list_p
)
18863 if (type
== NULL
|| type
== error_mark_node
)
18866 const bool ignored_p
18868 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
18869 && DECL_IGNORED_P (TYPE_NAME (type
));
18872 /* We first do the main variant, then copy into any other variants. */
18873 type
= TYPE_MAIN_VARIANT (type
);
18875 /* Avoid infinite recursion. */
18876 if (TYPE_SIZES_GIMPLIFIED (type
))
18879 TYPE_SIZES_GIMPLIFIED (type
) = 1;
18881 switch (TREE_CODE (type
))
18884 case ENUMERAL_TYPE
:
18887 case FIXED_POINT_TYPE
:
18888 gimplify_one_sizepos (&TYPE_MIN_VALUE (type
), list_p
);
18889 gimplify_one_sizepos (&TYPE_MAX_VALUE (type
), list_p
);
18891 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
18893 TYPE_MIN_VALUE (t
) = TYPE_MIN_VALUE (type
);
18894 TYPE_MAX_VALUE (t
) = TYPE_MAX_VALUE (type
);
18899 /* These types may not have declarations, so handle them here. */
18900 gimplify_type_sizes (TREE_TYPE (type
), list_p
);
18901 gimplify_type_sizes (TYPE_DOMAIN (type
), list_p
);
18902 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
18903 with assigned stack slots, for -O1+ -g they should be tracked
18906 && TYPE_DOMAIN (type
)
18907 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type
)))
18909 t
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
18910 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
18911 DECL_IGNORED_P (t
) = 0;
18912 t
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
18913 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
18914 DECL_IGNORED_P (t
) = 0;
18920 case QUAL_UNION_TYPE
:
18921 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
18922 if (TREE_CODE (field
) == FIELD_DECL
)
18924 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field
), list_p
);
18925 /* Likewise, ensure variable offsets aren't removed. */
18927 && (t
= DECL_FIELD_OFFSET (field
))
18929 && DECL_ARTIFICIAL (t
))
18930 DECL_IGNORED_P (t
) = 0;
18931 gimplify_one_sizepos (&DECL_SIZE (field
), list_p
);
18932 gimplify_one_sizepos (&DECL_SIZE_UNIT (field
), list_p
);
18933 gimplify_type_sizes (TREE_TYPE (field
), list_p
);
18938 case REFERENCE_TYPE
:
18939 /* We used to recurse on the pointed-to type here, which turned out to
18940 be incorrect because its definition might refer to variables not
18941 yet initialized at this point if a forward declaration is involved.
18943 It was actually useful for anonymous pointed-to types to ensure
18944 that the sizes evaluation dominates every possible later use of the
18945 values. Restricting to such types here would be safe since there
18946 is no possible forward declaration around, but would introduce an
18947 undesirable middle-end semantic to anonymity. We then defer to
18948 front-ends the responsibility of ensuring that the sizes are
18949 evaluated both early and late enough, e.g. by attaching artificial
18950 type declarations to the tree. */
18957 gimplify_one_sizepos (&TYPE_SIZE (type
), list_p
);
18958 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type
), list_p
);
18960 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
18962 TYPE_SIZE (t
) = TYPE_SIZE (type
);
18963 TYPE_SIZE_UNIT (t
) = TYPE_SIZE_UNIT (type
);
18964 TYPE_SIZES_GIMPLIFIED (t
) = 1;
18968 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
18969 a size or position, has had all of its SAVE_EXPRs evaluated.
18970 We add any required statements to *STMT_P. */
18973 gimplify_one_sizepos (tree
*expr_p
, gimple_seq
*stmt_p
)
18975 tree expr
= *expr_p
;
18977 /* We don't do anything if the value isn't there, is constant, or contains
18978 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
18979 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
18980 will want to replace it with a new variable, but that will cause problems
18981 if this type is from outside the function. It's OK to have that here. */
18982 if (expr
== NULL_TREE
18983 || is_gimple_constant (expr
)
18985 || CONTAINS_PLACEHOLDER_P (expr
))
18988 *expr_p
= unshare_expr (expr
);
18990 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
18991 if the def vanishes. */
18992 gimplify_expr (expr_p
, stmt_p
, NULL
, is_gimple_val
, fb_rvalue
, false);
18994 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
18995 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
18996 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
18997 if (is_gimple_constant (*expr_p
))
18998 *expr_p
= get_initialized_tmp_var (*expr_p
, stmt_p
, NULL
, false);
19001 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
19002 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
19003 is true, also gimplify the parameters. */
19006 gimplify_body (tree fndecl
, bool do_parms
)
19008 location_t saved_location
= input_location
;
19009 gimple_seq parm_stmts
, parm_cleanup
= NULL
, seq
;
19010 gimple
*outer_stmt
;
19013 timevar_push (TV_TREE_GIMPLIFY
);
19015 init_tree_ssa (cfun
);
19017 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
19019 default_rtl_profile ();
19021 gcc_assert (gimplify_ctxp
== NULL
);
19022 push_gimplify_context (true);
19024 if (flag_openacc
|| flag_openmp
)
19026 gcc_assert (gimplify_omp_ctxp
== NULL
);
19027 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl
)))
19028 gimplify_omp_ctxp
= new_omp_context (ORT_IMPLICIT_TARGET
);
19031 /* Unshare most shared trees in the body and in that of any nested functions.
19032 It would seem we don't have to do this for nested functions because
19033 they are supposed to be output and then the outer function gimplified
19034 first, but the g++ front end doesn't always do it that way. */
19035 unshare_body (fndecl
);
19036 unvisit_body (fndecl
);
19038 /* Make sure input_location isn't set to something weird. */
19039 input_location
= DECL_SOURCE_LOCATION (fndecl
);
19041 /* Resolve callee-copies. This has to be done before processing
19042 the body so that DECL_VALUE_EXPR gets processed correctly. */
19043 parm_stmts
= do_parms
? gimplify_parameters (&parm_cleanup
) : NULL
;
19045 /* Gimplify the function's body. */
19047 gimplify_stmt (&DECL_SAVED_TREE (fndecl
), &seq
);
19048 outer_stmt
= gimple_seq_first_nondebug_stmt (seq
);
19051 outer_stmt
= gimple_build_nop ();
19052 gimplify_seq_add_stmt (&seq
, outer_stmt
);
19055 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
19056 not the case, wrap everything in a GIMPLE_BIND to make it so. */
19057 if (gimple_code (outer_stmt
) == GIMPLE_BIND
19058 && (gimple_seq_first_nondebug_stmt (seq
)
19059 == gimple_seq_last_nondebug_stmt (seq
)))
19061 outer_bind
= as_a
<gbind
*> (outer_stmt
);
19062 if (gimple_seq_first_stmt (seq
) != outer_stmt
19063 || gimple_seq_last_stmt (seq
) != outer_stmt
)
19065 /* If there are debug stmts before or after outer_stmt, move them
19066 inside of outer_bind body. */
19067 gimple_stmt_iterator gsi
= gsi_for_stmt (outer_stmt
, &seq
);
19068 gimple_seq second_seq
= NULL
;
19069 if (gimple_seq_first_stmt (seq
) != outer_stmt
19070 && gimple_seq_last_stmt (seq
) != outer_stmt
)
19072 second_seq
= gsi_split_seq_after (gsi
);
19073 gsi_remove (&gsi
, false);
19075 else if (gimple_seq_first_stmt (seq
) != outer_stmt
)
19076 gsi_remove (&gsi
, false);
19079 gsi_remove (&gsi
, false);
19083 gimple_seq_add_seq_without_update (&seq
,
19084 gimple_bind_body (outer_bind
));
19085 gimple_seq_add_seq_without_update (&seq
, second_seq
);
19086 gimple_bind_set_body (outer_bind
, seq
);
19090 outer_bind
= gimple_build_bind (NULL_TREE
, seq
, NULL
);
19092 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
19094 /* If we had callee-copies statements, insert them at the beginning
19095 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
19096 if (!gimple_seq_empty_p (parm_stmts
))
19100 gimplify_seq_add_seq (&parm_stmts
, gimple_bind_body (outer_bind
));
19103 gtry
*g
= gimple_build_try (parm_stmts
, parm_cleanup
,
19104 GIMPLE_TRY_FINALLY
);
19106 gimple_seq_add_stmt (&parm_stmts
, g
);
19108 gimple_bind_set_body (outer_bind
, parm_stmts
);
19110 for (parm
= DECL_ARGUMENTS (current_function_decl
);
19111 parm
; parm
= DECL_CHAIN (parm
))
19112 if (DECL_HAS_VALUE_EXPR_P (parm
))
19114 DECL_HAS_VALUE_EXPR_P (parm
) = 0;
19115 DECL_IGNORED_P (parm
) = 0;
19119 if ((flag_openacc
|| flag_openmp
|| flag_openmp_simd
)
19120 && gimplify_omp_ctxp
)
19122 delete_omp_context (gimplify_omp_ctxp
);
19123 gimplify_omp_ctxp
= NULL
;
19126 pop_gimplify_context (outer_bind
);
19127 gcc_assert (gimplify_ctxp
== NULL
);
19129 if (flag_checking
&& !seen_error ())
19130 verify_gimple_in_seq (gimple_bind_body (outer_bind
));
19132 timevar_pop (TV_TREE_GIMPLIFY
);
19133 input_location
= saved_location
;
19138 typedef char *char_p
; /* For DEF_VEC_P. */
19140 /* Return whether we should exclude FNDECL from instrumentation. */
19143 flag_instrument_functions_exclude_p (tree fndecl
)
19147 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_functions
;
19148 if (v
&& v
->length () > 0)
19154 name
= lang_hooks
.decl_printable_name (fndecl
, 1);
19155 FOR_EACH_VEC_ELT (*v
, i
, s
)
19156 if (strstr (name
, s
) != NULL
)
19160 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_files
;
19161 if (v
&& v
->length () > 0)
19167 name
= DECL_SOURCE_FILE (fndecl
);
19168 FOR_EACH_VEC_ELT (*v
, i
, s
)
19169 if (strstr (name
, s
) != NULL
)
19176 /* Build a call to the instrumentation function FNCODE and add it to SEQ.
19177 If COND_VAR is not NULL, it is a boolean variable guarding the call to
19178 the instrumentation function. IF STMT is not NULL, it is a statement
19179 to be executed just before the call to the instrumentation function. */
19182 build_instrumentation_call (gimple_seq
*seq
, enum built_in_function fncode
,
19183 tree cond_var
, gimple
*stmt
)
19185 /* The instrumentation hooks aren't going to call the instrumented
19186 function and the address they receive is expected to be matchable
19187 against symbol addresses. Make sure we don't create a trampoline,
19188 in case the current function is nested. */
19189 tree this_fn_addr
= build_fold_addr_expr (current_function_decl
);
19190 TREE_NO_TRAMPOLINE (this_fn_addr
) = 1;
19192 tree label_true
, label_false
;
19195 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
19196 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
19197 gcond
*cond
= gimple_build_cond (EQ_EXPR
, cond_var
, boolean_false_node
,
19198 label_true
, label_false
);
19199 gimplify_seq_add_stmt (seq
, cond
);
19200 gimplify_seq_add_stmt (seq
, gimple_build_label (label_true
));
19201 gimplify_seq_add_stmt (seq
, gimple_build_predict (PRED_COLD_LABEL
,
19206 gimplify_seq_add_stmt (seq
, stmt
);
19208 tree x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
19209 gcall
*call
= gimple_build_call (x
, 1, integer_zero_node
);
19210 tree tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
19211 gimple_call_set_lhs (call
, tmp_var
);
19212 gimplify_seq_add_stmt (seq
, call
);
19213 x
= builtin_decl_implicit (fncode
);
19214 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
19215 gimplify_seq_add_stmt (seq
, call
);
19218 gimplify_seq_add_stmt (seq
, gimple_build_label (label_false
));
19221 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
19222 node for the function we want to gimplify.
19224 Return the sequence of GIMPLE statements corresponding to the body
19228 gimplify_function_tree (tree fndecl
)
19233 gcc_assert (!gimple_body (fndecl
));
19235 if (DECL_STRUCT_FUNCTION (fndecl
))
19236 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
19238 push_struct_function (fndecl
);
19240 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
19242 cfun
->curr_properties
|= PROP_gimple_lva
;
19244 if (asan_sanitize_use_after_scope ())
19245 asan_poisoned_variables
= new hash_set
<tree
> ();
19246 bind
= gimplify_body (fndecl
, true);
19247 if (asan_poisoned_variables
)
19249 delete asan_poisoned_variables
;
19250 asan_poisoned_variables
= NULL
;
19253 /* The tree body of the function is no longer needed, replace it
19254 with the new GIMPLE body. */
19256 gimple_seq_add_stmt (&seq
, bind
);
19257 gimple_set_body (fndecl
, seq
);
19259 /* If we're instrumenting function entry/exit, then prepend the call to
19260 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
19261 catch the exit hook. */
19262 /* ??? Add some way to ignore exceptions for this TFE. */
19263 if (flag_instrument_function_entry_exit
19264 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
)
19265 /* Do not instrument extern inline functions. */
19266 && !(DECL_DECLARED_INLINE_P (fndecl
)
19267 && DECL_EXTERNAL (fndecl
)
19268 && DECL_DISREGARD_INLINE_LIMITS (fndecl
))
19269 && !flag_instrument_functions_exclude_p (fndecl
))
19271 gimple_seq body
= NULL
, cleanup
= NULL
;
19275 /* If -finstrument-functions-once is specified, generate:
19277 static volatile bool C.0 = false;
19284 [call profiling enter function]
19287 without specific protection for data races. */
19288 if (flag_instrument_function_entry_exit
> 1)
19291 = build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
19293 create_tmp_var_name ("C"),
19294 boolean_type_node
);
19295 DECL_ARTIFICIAL (first_var
) = 1;
19296 DECL_IGNORED_P (first_var
) = 1;
19297 TREE_STATIC (first_var
) = 1;
19298 TREE_THIS_VOLATILE (first_var
) = 1;
19299 TREE_USED (first_var
) = 1;
19300 DECL_INITIAL (first_var
) = boolean_false_node
;
19301 varpool_node::add (first_var
);
19303 cond_var
= create_tmp_var (boolean_type_node
, "tmp_called");
19304 assign
= gimple_build_assign (cond_var
, first_var
);
19305 gimplify_seq_add_stmt (&body
, assign
);
19307 assign
= gimple_build_assign (first_var
, boolean_true_node
);
19312 cond_var
= NULL_TREE
;
19316 build_instrumentation_call (&body
, BUILT_IN_PROFILE_FUNC_ENTER
,
19319 /* If -finstrument-functions-once is specified, generate:
19322 [call profiling exit function]
19324 without specific protection for data races. */
19325 build_instrumentation_call (&cleanup
, BUILT_IN_PROFILE_FUNC_EXIT
,
19328 gimple
*tf
= gimple_build_try (seq
, cleanup
, GIMPLE_TRY_FINALLY
);
19329 gimplify_seq_add_stmt (&body
, tf
);
19330 gbind
*new_bind
= gimple_build_bind (NULL
, body
, NULL
);
19332 /* Replace the current function body with the body
19333 wrapped in the try/finally TF. */
19335 gimple_seq_add_stmt (&seq
, new_bind
);
19336 gimple_set_body (fndecl
, seq
);
19340 if (sanitize_flags_p (SANITIZE_THREAD
)
19341 && param_tsan_instrument_func_entry_exit
)
19343 gcall
*call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
19344 gimple
*tf
= gimple_build_try (seq
, call
, GIMPLE_TRY_FINALLY
);
19345 gbind
*new_bind
= gimple_build_bind (NULL
, tf
, NULL
);
19346 /* Replace the current function body with the body
19347 wrapped in the try/finally TF. */
19349 gimple_seq_add_stmt (&seq
, new_bind
);
19350 gimple_set_body (fndecl
, seq
);
19353 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
19354 cfun
->curr_properties
|= PROP_gimple_any
;
19358 dump_function (TDI_gimple
, fndecl
);
19361 /* Return a dummy expression of type TYPE in order to keep going after an
19365 dummy_object (tree type
)
19367 tree t
= build_int_cst (build_pointer_type (type
), 0);
19368 return build2 (MEM_REF
, type
, t
, t
);
19371 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
19372 builtin function, but a very special sort of operator. */
19374 enum gimplify_status
19375 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
,
19376 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
19378 tree promoted_type
, have_va_type
;
19379 tree valist
= TREE_OPERAND (*expr_p
, 0);
19380 tree type
= TREE_TYPE (*expr_p
);
19381 tree t
, tag
, aptag
;
19382 location_t loc
= EXPR_LOCATION (*expr_p
);
19384 /* Verify that valist is of the proper type. */
19385 have_va_type
= TREE_TYPE (valist
);
19386 if (have_va_type
== error_mark_node
)
19388 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
19389 if (have_va_type
== NULL_TREE
19390 && POINTER_TYPE_P (TREE_TYPE (valist
)))
19391 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
19393 = targetm
.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist
)));
19394 gcc_assert (have_va_type
!= NULL_TREE
);
19396 /* Generate a diagnostic for requesting data of a type that cannot
19397 be passed through `...' due to type promotion at the call site. */
19398 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
19401 static bool gave_help
;
19403 /* Use the expansion point to handle cases such as passing bool (defined
19404 in a system header) through `...'. */
19406 = expansion_point_location_if_in_system_header (loc
);
19408 /* Unfortunately, this is merely undefined, rather than a constraint
19409 violation, so we cannot make this an error. If this call is never
19410 executed, the program is still strictly conforming. */
19411 auto_diagnostic_group d
;
19412 warned
= warning_at (xloc
, 0,
19413 "%qT is promoted to %qT when passed through %<...%>",
19414 type
, promoted_type
);
19415 if (!gave_help
&& warned
)
19418 inform (xloc
, "(so you should pass %qT not %qT to %<va_arg%>)",
19419 promoted_type
, type
);
19422 /* We can, however, treat "undefined" any way we please.
19423 Call abort to encourage the user to fix the program. */
19425 inform (xloc
, "if this code is reached, the program will abort");
19426 /* Before the abort, allow the evaluation of the va_list
19427 expression to exit or longjmp. */
19428 gimplify_and_add (valist
, pre_p
);
19429 t
= build_call_expr_loc (loc
,
19430 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
19431 gimplify_and_add (t
, pre_p
);
19433 /* This is dead code, but go ahead and finish so that the
19434 mode of the result comes out right. */
19435 *expr_p
= dummy_object (type
);
19436 return GS_ALL_DONE
;
19439 tag
= build_int_cst (build_pointer_type (type
), 0);
19440 aptag
= build_int_cst (TREE_TYPE (valist
), 0);
19442 *expr_p
= build_call_expr_internal_loc (loc
, IFN_VA_ARG
, type
, 3,
19443 valist
, tag
, aptag
);
19445 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
19446 needs to be expanded. */
19447 cfun
->curr_properties
&= ~PROP_gimple_lva
;
19452 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
19454 DST/SRC are the destination and source respectively. You can pass
19455 ungimplified trees in DST or SRC, in which case they will be
19456 converted to a gimple operand if necessary.
19458 This function returns the newly created GIMPLE_ASSIGN tuple. */
19461 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
19463 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
19464 gimplify_and_add (t
, seq_p
);
19466 return gimple_seq_last_stmt (*seq_p
);
19470 gimplify_hasher::hash (const elt_t
*p
)
19473 return iterative_hash_expr (t
, 0);
19477 gimplify_hasher::equal (const elt_t
*p1
, const elt_t
*p2
)
19481 enum tree_code code
= TREE_CODE (t1
);
19483 if (TREE_CODE (t2
) != code
19484 || TREE_TYPE (t1
) != TREE_TYPE (t2
))
19487 if (!operand_equal_p (t1
, t2
, 0))
19490 /* Only allow them to compare equal if they also hash equal; otherwise
19491 results are nondeterminate, and we fail bootstrap comparison. */
19492 gcc_checking_assert (hash (p1
) == hash (p2
));