1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2023 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "diagnostic.h" /* For errorcount. */
41 #include "fold-const.h"
46 #include "gimple-iterator.h"
47 #include "gimple-fold.h"
50 #include "stor-layout.h"
51 #include "print-tree.h"
52 #include "tree-iterator.h"
53 #include "tree-inline.h"
54 #include "langhooks.h"
57 #include "tree-hash-traits.h"
58 #include "omp-general.h"
60 #include "gimple-low.h"
61 #include "gomp-constants.h"
62 #include "splay-tree.h"
63 #include "gimple-walk.h"
64 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
66 #include "stringpool.h"
70 #include "omp-offload.h"
72 #include "tree-nested.h"
74 /* Hash set of poisoned variables in a bind expr. */
75 static hash_set
<tree
> *asan_poisoned_variables
= NULL
;
77 enum gimplify_omp_var_data
80 GOVD_EXPLICIT
= 0x000002,
81 GOVD_SHARED
= 0x000004,
82 GOVD_PRIVATE
= 0x000008,
83 GOVD_FIRSTPRIVATE
= 0x000010,
84 GOVD_LASTPRIVATE
= 0x000020,
85 GOVD_REDUCTION
= 0x000040,
88 GOVD_DEBUG_PRIVATE
= 0x000200,
89 GOVD_PRIVATE_OUTER_REF
= 0x000400,
90 GOVD_LINEAR
= 0x000800,
91 GOVD_ALIGNED
= 0x001000,
93 /* Flag for GOVD_MAP: don't copy back. */
94 GOVD_MAP_TO_ONLY
= 0x002000,
96 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
97 GOVD_LINEAR_LASTPRIVATE_NO_OUTER
= 0x004000,
99 GOVD_MAP_0LEN_ARRAY
= 0x008000,
101 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
102 GOVD_MAP_ALWAYS_TO
= 0x010000,
104 /* Flag for shared vars that are or might be stored to in the region. */
105 GOVD_WRITTEN
= 0x020000,
107 /* Flag for GOVD_MAP, if it is a forced mapping. */
108 GOVD_MAP_FORCE
= 0x040000,
110 /* Flag for GOVD_MAP: must be present already. */
111 GOVD_MAP_FORCE_PRESENT
= 0x080000,
113 /* Flag for GOVD_MAP: only allocate. */
114 GOVD_MAP_ALLOC_ONLY
= 0x100000,
116 /* Flag for GOVD_MAP: only copy back. */
117 GOVD_MAP_FROM_ONLY
= 0x200000,
119 GOVD_NONTEMPORAL
= 0x400000,
121 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
122 GOVD_LASTPRIVATE_CONDITIONAL
= 0x800000,
124 GOVD_CONDTEMP
= 0x1000000,
126 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
127 GOVD_REDUCTION_INSCAN
= 0x2000000,
129 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
130 GOVD_FIRSTPRIVATE_IMPLICIT
= 0x4000000,
132 GOVD_DATA_SHARE_CLASS
= (GOVD_SHARED
| GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
133 | GOVD_LASTPRIVATE
| GOVD_REDUCTION
| GOVD_LINEAR
140 ORT_WORKSHARE
= 0x00,
141 ORT_TASKGROUP
= 0x01,
145 ORT_COMBINED_PARALLEL
= ORT_PARALLEL
| 1,
148 ORT_UNTIED_TASK
= ORT_TASK
| 1,
149 ORT_TASKLOOP
= ORT_TASK
| 2,
150 ORT_UNTIED_TASKLOOP
= ORT_UNTIED_TASK
| 2,
153 ORT_COMBINED_TEAMS
= ORT_TEAMS
| 1,
154 ORT_HOST_TEAMS
= ORT_TEAMS
| 2,
155 ORT_COMBINED_HOST_TEAMS
= ORT_COMBINED_TEAMS
| 2,
158 ORT_TARGET_DATA
= 0x40,
160 /* Data region with offloading. */
162 ORT_COMBINED_TARGET
= ORT_TARGET
| 1,
163 ORT_IMPLICIT_TARGET
= ORT_TARGET
| 2,
165 /* OpenACC variants. */
166 ORT_ACC
= 0x100, /* A generic OpenACC region. */
167 ORT_ACC_DATA
= ORT_ACC
| ORT_TARGET_DATA
, /* Data construct. */
168 ORT_ACC_PARALLEL
= ORT_ACC
| ORT_TARGET
, /* Parallel construct */
169 ORT_ACC_KERNELS
= ORT_ACC
| ORT_TARGET
| 2, /* Kernels construct. */
170 ORT_ACC_SERIAL
= ORT_ACC
| ORT_TARGET
| 4, /* Serial construct. */
171 ORT_ACC_HOST_DATA
= ORT_ACC
| ORT_TARGET_DATA
| 2, /* Host data. */
173 /* Dummy OpenMP region, used to disable expansion of
174 DECL_VALUE_EXPRs in taskloop pre body. */
178 /* Gimplify hashtable helper. */
180 struct gimplify_hasher
: free_ptr_hash
<elt_t
>
182 static inline hashval_t
hash (const elt_t
*);
183 static inline bool equal (const elt_t
*, const elt_t
*);
188 struct gimplify_ctx
*prev_context
;
190 vec
<gbind
*> bind_expr_stack
;
192 gimple_seq conditional_cleanups
;
196 vec
<tree
> case_labels
;
197 hash_set
<tree
> *live_switch_vars
;
198 /* The formal temporary table. Should this be persistent? */
199 hash_table
<gimplify_hasher
> *temp_htab
;
202 unsigned into_ssa
: 1;
203 unsigned allow_rhs_cond_expr
: 1;
204 unsigned in_cleanup_point_expr
: 1;
205 unsigned keep_stack
: 1;
206 unsigned save_stack
: 1;
207 unsigned in_switch_expr
: 1;
210 enum gimplify_defaultmap_kind
213 GDMK_SCALAR_TARGET
, /* w/ Fortran's target attr, implicit mapping, only. */
219 struct gimplify_omp_ctx
221 struct gimplify_omp_ctx
*outer_context
;
222 splay_tree variables
;
223 hash_set
<tree
> *privatized_types
;
225 /* Iteration variables in an OMP_FOR. */
226 vec
<tree
> loop_iter_var
;
228 enum omp_clause_default_kind default_kind
;
229 enum omp_region_type region_type
;
233 bool target_firstprivatize_array_bases
;
235 bool order_concurrent
;
241 static struct gimplify_ctx
*gimplify_ctxp
;
242 static struct gimplify_omp_ctx
*gimplify_omp_ctxp
;
243 static bool in_omp_construct
;
245 /* Forward declaration. */
246 static enum gimplify_status
gimplify_compound_expr (tree
*, gimple_seq
*, bool);
247 static hash_map
<tree
, tree
> *oacc_declare_returns
;
248 static enum gimplify_status
gimplify_expr (tree
*, gimple_seq
*, gimple_seq
*,
249 bool (*) (tree
), fallback_t
, bool);
250 static void prepare_gimple_addressable (tree
*, gimple_seq
*);
252 /* Shorter alias name for the above function for use in gimplify.cc
256 gimplify_seq_add_stmt (gimple_seq
*seq_p
, gimple
*gs
)
258 gimple_seq_add_stmt_without_update (seq_p
, gs
);
261 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
262 NULL, a new sequence is allocated. This function is
263 similar to gimple_seq_add_seq, but does not scan the operands.
264 During gimplification, we need to manipulate statement sequences
265 before the def/use vectors have been constructed. */
268 gimplify_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
270 gimple_stmt_iterator si
;
275 si
= gsi_last (*dst_p
);
276 gsi_insert_seq_after_without_update (&si
, src
, GSI_NEW_STMT
);
280 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
281 and popping gimplify contexts. */
283 static struct gimplify_ctx
*ctx_pool
= NULL
;
285 /* Return a gimplify context struct from the pool. */
287 static inline struct gimplify_ctx
*
290 struct gimplify_ctx
* c
= ctx_pool
;
293 ctx_pool
= c
->prev_context
;
295 c
= XNEW (struct gimplify_ctx
);
297 memset (c
, '\0', sizeof (*c
));
301 /* Put gimplify context C back into the pool. */
304 ctx_free (struct gimplify_ctx
*c
)
306 c
->prev_context
= ctx_pool
;
310 /* Free allocated ctx stack memory. */
313 free_gimplify_stack (void)
315 struct gimplify_ctx
*c
;
317 while ((c
= ctx_pool
))
319 ctx_pool
= c
->prev_context
;
325 /* Set up a context for the gimplifier. */
328 push_gimplify_context (bool in_ssa
, bool rhs_cond_ok
)
330 struct gimplify_ctx
*c
= ctx_alloc ();
332 c
->prev_context
= gimplify_ctxp
;
334 gimplify_ctxp
->into_ssa
= in_ssa
;
335 gimplify_ctxp
->allow_rhs_cond_expr
= rhs_cond_ok
;
338 /* Tear down a context for the gimplifier. If BODY is non-null, then
339 put the temporaries into the outer BIND_EXPR. Otherwise, put them
342 BODY is not a sequence, but the first tuple in a sequence. */
345 pop_gimplify_context (gimple
*body
)
347 struct gimplify_ctx
*c
= gimplify_ctxp
;
350 && (!c
->bind_expr_stack
.exists ()
351 || c
->bind_expr_stack
.is_empty ()));
352 c
->bind_expr_stack
.release ();
353 gimplify_ctxp
= c
->prev_context
;
356 declare_vars (c
->temps
, body
, false);
358 record_vars (c
->temps
);
365 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
368 gimple_push_bind_expr (gbind
*bind_stmt
)
370 gimplify_ctxp
->bind_expr_stack
.reserve (8);
371 gimplify_ctxp
->bind_expr_stack
.safe_push (bind_stmt
);
374 /* Pop the first element off the stack of bindings. */
377 gimple_pop_bind_expr (void)
379 gimplify_ctxp
->bind_expr_stack
.pop ();
382 /* Return the first element of the stack of bindings. */
385 gimple_current_bind_expr (void)
387 return gimplify_ctxp
->bind_expr_stack
.last ();
390 /* Return the stack of bindings created during gimplification. */
393 gimple_bind_expr_stack (void)
395 return gimplify_ctxp
->bind_expr_stack
;
398 /* Return true iff there is a COND_EXPR between us and the innermost
399 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
402 gimple_conditional_context (void)
404 return gimplify_ctxp
->conditions
> 0;
407 /* Note that we've entered a COND_EXPR. */
410 gimple_push_condition (void)
412 #ifdef ENABLE_GIMPLE_CHECKING
413 if (gimplify_ctxp
->conditions
== 0)
414 gcc_assert (gimple_seq_empty_p (gimplify_ctxp
->conditional_cleanups
));
416 ++(gimplify_ctxp
->conditions
);
419 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
420 now, add any conditional cleanups we've seen to the prequeue. */
423 gimple_pop_condition (gimple_seq
*pre_p
)
425 int conds
= --(gimplify_ctxp
->conditions
);
427 gcc_assert (conds
>= 0);
430 gimplify_seq_add_seq (pre_p
, gimplify_ctxp
->conditional_cleanups
);
431 gimplify_ctxp
->conditional_cleanups
= NULL
;
435 /* A stable comparison routine for use with splay trees and DECLs. */
438 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
443 return DECL_UID (a
) - DECL_UID (b
);
446 /* Create a new omp construct that deals with variable remapping. */
448 static struct gimplify_omp_ctx
*
449 new_omp_context (enum omp_region_type region_type
)
451 struct gimplify_omp_ctx
*c
;
453 c
= XCNEW (struct gimplify_omp_ctx
);
454 c
->outer_context
= gimplify_omp_ctxp
;
455 c
->variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
456 c
->privatized_types
= new hash_set
<tree
>;
457 c
->location
= input_location
;
458 c
->region_type
= region_type
;
459 if ((region_type
& ORT_TASK
) == 0)
460 c
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
462 c
->default_kind
= OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
463 c
->defaultmap
[GDMK_SCALAR
] = GOVD_MAP
;
464 c
->defaultmap
[GDMK_SCALAR_TARGET
] = GOVD_MAP
;
465 c
->defaultmap
[GDMK_AGGREGATE
] = GOVD_MAP
;
466 c
->defaultmap
[GDMK_ALLOCATABLE
] = GOVD_MAP
;
467 c
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
;
472 /* Destroy an omp construct that deals with variable remapping. */
475 delete_omp_context (struct gimplify_omp_ctx
*c
)
477 splay_tree_delete (c
->variables
);
478 delete c
->privatized_types
;
479 c
->loop_iter_var
.release ();
483 static void omp_add_variable (struct gimplify_omp_ctx
*, tree
, unsigned int);
484 static bool omp_notice_variable (struct gimplify_omp_ctx
*, tree
, bool);
486 /* Both gimplify the statement T and append it to *SEQ_P. This function
487 behaves exactly as gimplify_stmt, but you don't have to pass T as a
491 gimplify_and_add (tree t
, gimple_seq
*seq_p
)
493 gimplify_stmt (&t
, seq_p
);
496 /* Gimplify statement T into sequence *SEQ_P, and return the first
497 tuple in the sequence of generated tuples for this statement.
498 Return NULL if gimplifying T produced no tuples. */
501 gimplify_and_return_first (tree t
, gimple_seq
*seq_p
)
503 gimple_stmt_iterator last
= gsi_last (*seq_p
);
505 gimplify_and_add (t
, seq_p
);
507 if (!gsi_end_p (last
))
510 return gsi_stmt (last
);
513 return gimple_seq_first_stmt (*seq_p
);
516 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
517 LHS, or for a call argument. */
520 is_gimple_mem_rhs (tree t
)
522 /* If we're dealing with a renamable type, either source or dest must be
523 a renamed variable. */
524 if (is_gimple_reg_type (TREE_TYPE (t
)))
525 return is_gimple_val (t
);
527 return is_gimple_val (t
) || is_gimple_lvalue (t
);
530 /* Return true if T is a CALL_EXPR or an expression that can be
531 assigned to a temporary. Note that this predicate should only be
532 used during gimplification. See the rationale for this in
533 gimplify_modify_expr. */
536 is_gimple_reg_rhs_or_call (tree t
)
538 return (get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
539 || TREE_CODE (t
) == CALL_EXPR
);
542 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
543 this predicate should only be used during gimplification. See the
544 rationale for this in gimplify_modify_expr. */
547 is_gimple_mem_rhs_or_call (tree t
)
549 /* If we're dealing with a renamable type, either source or dest must be
550 a renamed variable. */
551 if (is_gimple_reg_type (TREE_TYPE (t
)))
552 return is_gimple_val (t
);
554 return (is_gimple_val (t
)
555 || is_gimple_lvalue (t
)
556 || TREE_CLOBBER_P (t
)
557 || TREE_CODE (t
) == CALL_EXPR
);
560 /* Create a temporary with a name derived from VAL. Subroutine of
561 lookup_tmp_var; nobody else should call this function. */
564 create_tmp_from_val (tree val
)
566 /* Drop all qualifiers and address-space information from the value type. */
567 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (val
));
568 tree var
= create_tmp_var (type
, get_name (val
));
572 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
573 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
576 lookup_tmp_var (tree val
, bool is_formal
, bool not_gimple_reg
)
580 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
581 gcc_assert (!is_formal
|| !not_gimple_reg
);
583 /* If not optimizing, never really reuse a temporary. local-alloc
584 won't allocate any variable that is used in more than one basic
585 block, which means it will go into memory, causing much extra
586 work in reload and final and poorer code generation, outweighing
587 the extra memory allocation here. */
588 if (!optimize
|| !is_formal
|| TREE_SIDE_EFFECTS (val
))
590 ret
= create_tmp_from_val (val
);
591 DECL_NOT_GIMPLE_REG_P (ret
) = not_gimple_reg
;
599 if (!gimplify_ctxp
->temp_htab
)
600 gimplify_ctxp
->temp_htab
= new hash_table
<gimplify_hasher
> (1000);
601 slot
= gimplify_ctxp
->temp_htab
->find_slot (&elt
, INSERT
);
604 elt_p
= XNEW (elt_t
);
606 elt_p
->temp
= ret
= create_tmp_from_val (val
);
619 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
622 internal_get_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
623 bool is_formal
, bool allow_ssa
, bool not_gimple_reg
)
627 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
628 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
629 gimplify_expr (&val
, pre_p
, post_p
, is_gimple_reg_rhs_or_call
,
633 && gimplify_ctxp
->into_ssa
634 && is_gimple_reg_type (TREE_TYPE (val
)))
636 t
= make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val
)));
637 if (! gimple_in_ssa_p (cfun
))
639 const char *name
= get_name (val
);
641 SET_SSA_NAME_VAR_OR_IDENTIFIER (t
, create_tmp_var_name (name
));
645 t
= lookup_tmp_var (val
, is_formal
, not_gimple_reg
);
647 mod
= build2 (INIT_EXPR
, TREE_TYPE (t
), t
, unshare_expr (val
));
649 SET_EXPR_LOCATION (mod
, EXPR_LOC_OR_LOC (val
, input_location
));
651 /* gimplify_modify_expr might want to reduce this further. */
652 gimplify_and_add (mod
, pre_p
);
658 /* Return a formal temporary variable initialized with VAL. PRE_P is as
659 in gimplify_expr. Only use this function if:
661 1) The value of the unfactored expression represented by VAL will not
662 change between the initialization and use of the temporary, and
663 2) The temporary will not be otherwise modified.
665 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
666 and #2 means it is inappropriate for && temps.
668 For other cases, use get_initialized_tmp_var instead. */
671 get_formal_tmp_var (tree val
, gimple_seq
*pre_p
)
673 return internal_get_tmp_var (val
, pre_p
, NULL
, true, true, false);
676 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
677 are as in gimplify_expr. */
680 get_initialized_tmp_var (tree val
, gimple_seq
*pre_p
,
681 gimple_seq
*post_p
/* = NULL */,
682 bool allow_ssa
/* = true */)
684 return internal_get_tmp_var (val
, pre_p
, post_p
, false, allow_ssa
, false);
687 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
688 generate debug info for them; otherwise don't. */
691 declare_vars (tree vars
, gimple
*gs
, bool debug_info
)
698 gbind
*scope
= as_a
<gbind
*> (gs
);
700 temps
= nreverse (last
);
702 block
= gimple_bind_block (scope
);
703 gcc_assert (!block
|| TREE_CODE (block
) == BLOCK
);
704 if (!block
|| !debug_info
)
706 DECL_CHAIN (last
) = gimple_bind_vars (scope
);
707 gimple_bind_set_vars (scope
, temps
);
711 /* We need to attach the nodes both to the BIND_EXPR and to its
712 associated BLOCK for debugging purposes. The key point here
713 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
714 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
715 if (BLOCK_VARS (block
))
716 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), temps
);
719 gimple_bind_set_vars (scope
,
720 chainon (gimple_bind_vars (scope
), temps
));
721 BLOCK_VARS (block
) = temps
;
727 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
728 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
729 no such upper bound can be obtained. */
732 force_constant_size (tree var
)
734 /* The only attempt we make is by querying the maximum size of objects
735 of the variable's type. */
737 HOST_WIDE_INT max_size
;
739 gcc_assert (VAR_P (var
));
741 max_size
= max_int_size_in_bytes (TREE_TYPE (var
));
743 gcc_assert (max_size
>= 0);
746 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var
)), max_size
);
748 = build_int_cst (TREE_TYPE (DECL_SIZE (var
)), max_size
* BITS_PER_UNIT
);
751 /* Push the temporary variable TMP into the current binding. */
754 gimple_add_tmp_var_fn (struct function
*fn
, tree tmp
)
756 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
758 /* Later processing assumes that the object size is constant, which might
759 not be true at this point. Force the use of a constant upper bound in
761 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
762 force_constant_size (tmp
);
764 DECL_CONTEXT (tmp
) = fn
->decl
;
765 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
767 record_vars_into (tmp
, fn
->decl
);
770 /* Push the temporary variable TMP into the current binding. */
773 gimple_add_tmp_var (tree tmp
)
775 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
777 /* Later processing assumes that the object size is constant, which might
778 not be true at this point. Force the use of a constant upper bound in
780 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
781 force_constant_size (tmp
);
783 DECL_CONTEXT (tmp
) = current_function_decl
;
784 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
788 DECL_CHAIN (tmp
) = gimplify_ctxp
->temps
;
789 gimplify_ctxp
->temps
= tmp
;
791 /* Mark temporaries local within the nearest enclosing parallel. */
792 if (gimplify_omp_ctxp
)
794 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
795 int flag
= GOVD_LOCAL
| GOVD_SEEN
;
797 && (ctx
->region_type
== ORT_WORKSHARE
798 || ctx
->region_type
== ORT_TASKGROUP
799 || ctx
->region_type
== ORT_SIMD
800 || ctx
->region_type
== ORT_ACC
))
802 if (ctx
->region_type
== ORT_SIMD
803 && TREE_ADDRESSABLE (tmp
)
804 && !TREE_STATIC (tmp
))
806 if (TREE_CODE (DECL_SIZE_UNIT (tmp
)) != INTEGER_CST
)
807 ctx
->add_safelen1
= true;
808 else if (ctx
->in_for_exprs
)
811 flag
= GOVD_PRIVATE
| GOVD_SEEN
;
814 ctx
= ctx
->outer_context
;
817 omp_add_variable (ctx
, tmp
, flag
);
826 /* This case is for nested functions. We need to expose the locals
828 body_seq
= gimple_body (current_function_decl
);
829 declare_vars (tmp
, gimple_seq_first_stmt (body_seq
), false);
835 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
836 nodes that are referenced more than once in GENERIC functions. This is
837 necessary because gimplification (translation into GIMPLE) is performed
838 by modifying tree nodes in-place, so gimplication of a shared node in a
839 first context could generate an invalid GIMPLE form in a second context.
841 This is achieved with a simple mark/copy/unmark algorithm that walks the
842 GENERIC representation top-down, marks nodes with TREE_VISITED the first
843 time it encounters them, duplicates them if they already have TREE_VISITED
844 set, and finally removes the TREE_VISITED marks it has set.
846 The algorithm works only at the function level, i.e. it generates a GENERIC
847 representation of a function with no nodes shared within the function when
848 passed a GENERIC function (except for nodes that are allowed to be shared).
850 At the global level, it is also necessary to unshare tree nodes that are
851 referenced in more than one function, for the same aforementioned reason.
852 This requires some cooperation from the front-end. There are 2 strategies:
854 1. Manual unsharing. The front-end needs to call unshare_expr on every
855 expression that might end up being shared across functions.
857 2. Deep unsharing. This is an extension of regular unsharing. Instead
858 of calling unshare_expr on expressions that might be shared across
859 functions, the front-end pre-marks them with TREE_VISITED. This will
860 ensure that they are unshared on the first reference within functions
861 when the regular unsharing algorithm runs. The counterpart is that
862 this algorithm must look deeper than for manual unsharing, which is
863 specified by LANG_HOOKS_DEEP_UNSHARING.
865 If there are only few specific cases of node sharing across functions, it is
866 probably easier for a front-end to unshare the expressions manually. On the
867 contrary, if the expressions generated at the global level are as widespread
868 as expressions generated within functions, deep unsharing is very likely the
871 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
872 These nodes model computations that must be done once. If we were to
873 unshare something like SAVE_EXPR(i++), the gimplification process would
874 create wrong code. However, if DATA is non-null, it must hold a pointer
875 set that is used to unshare the subtrees of these nodes. */
878 mostly_copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data
)
881 enum tree_code code
= TREE_CODE (t
);
883 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
884 copy their subtrees if we can make sure to do it only once. */
885 if (code
== SAVE_EXPR
|| code
== TARGET_EXPR
|| code
== BIND_EXPR
)
887 if (data
&& !((hash_set
<tree
> *)data
)->add (t
))
893 /* Stop at types, decls, constants like copy_tree_r. */
894 else if (TREE_CODE_CLASS (code
) == tcc_type
895 || TREE_CODE_CLASS (code
) == tcc_declaration
896 || TREE_CODE_CLASS (code
) == tcc_constant
)
899 /* Cope with the statement expression extension. */
900 else if (code
== STATEMENT_LIST
)
903 /* Leave the bulk of the work to copy_tree_r itself. */
905 copy_tree_r (tp
, walk_subtrees
, NULL
);
910 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
911 If *TP has been visited already, then *TP is deeply copied by calling
912 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
915 copy_if_shared_r (tree
*tp
, int *walk_subtrees
, void *data
)
918 enum tree_code code
= TREE_CODE (t
);
920 /* Skip types, decls, and constants. But we do want to look at their
921 types and the bounds of types. Mark them as visited so we properly
922 unmark their subtrees on the unmark pass. If we've already seen them,
923 don't look down further. */
924 if (TREE_CODE_CLASS (code
) == tcc_type
925 || TREE_CODE_CLASS (code
) == tcc_declaration
926 || TREE_CODE_CLASS (code
) == tcc_constant
)
928 if (TREE_VISITED (t
))
931 TREE_VISITED (t
) = 1;
934 /* If this node has been visited already, unshare it and don't look
936 else if (TREE_VISITED (t
))
938 walk_tree (tp
, mostly_copy_tree_r
, data
, NULL
);
942 /* Otherwise, mark the node as visited and keep looking. */
944 TREE_VISITED (t
) = 1;
949 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
950 copy_if_shared_r callback unmodified. */
953 copy_if_shared (tree
*tp
, void *data
)
955 walk_tree (tp
, copy_if_shared_r
, data
, NULL
);
958 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
959 any nested functions. */
962 unshare_body (tree fndecl
)
964 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
965 /* If the language requires deep unsharing, we need a pointer set to make
966 sure we don't repeatedly unshare subtrees of unshareable nodes. */
967 hash_set
<tree
> *visited
968 = lang_hooks
.deep_unsharing
? new hash_set
<tree
> : NULL
;
970 copy_if_shared (&DECL_SAVED_TREE (fndecl
), visited
);
971 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl
)), visited
);
972 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)), visited
);
977 for (cgn
= first_nested_function (cgn
); cgn
;
978 cgn
= next_nested_function (cgn
))
979 unshare_body (cgn
->decl
);
982 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
983 Subtrees are walked until the first unvisited node is encountered. */
986 unmark_visited_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
990 /* If this node has been visited, unmark it and keep looking. */
991 if (TREE_VISITED (t
))
992 TREE_VISITED (t
) = 0;
994 /* Otherwise, don't look any deeper. */
1001 /* Unmark the visited trees rooted at *TP. */
1004 unmark_visited (tree
*tp
)
1006 walk_tree (tp
, unmark_visited_r
, NULL
, NULL
);
1009 /* Likewise, but mark all trees as not visited. */
1012 unvisit_body (tree fndecl
)
1014 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
1016 unmark_visited (&DECL_SAVED_TREE (fndecl
));
1017 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl
)));
1018 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)));
1021 for (cgn
= first_nested_function (cgn
);
1022 cgn
; cgn
= next_nested_function (cgn
))
1023 unvisit_body (cgn
->decl
);
1026 /* Unconditionally make an unshared copy of EXPR. This is used when using
1027 stored expressions which span multiple functions, such as BINFO_VTABLE,
1028 as the normal unsharing process can't tell that they're shared. */
1031 unshare_expr (tree expr
)
1033 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1037 /* Worker for unshare_expr_without_location. */
1040 prune_expr_location (tree
*tp
, int *walk_subtrees
, void *)
1043 SET_EXPR_LOCATION (*tp
, UNKNOWN_LOCATION
);
1049 /* Similar to unshare_expr but also prune all expression locations
1053 unshare_expr_without_location (tree expr
)
1055 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1057 walk_tree (&expr
, prune_expr_location
, NULL
, NULL
);
1061 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1062 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1063 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1064 EXPR is the location of the EXPR. */
1067 rexpr_location (tree expr
, location_t or_else
= UNKNOWN_LOCATION
)
1072 if (EXPR_HAS_LOCATION (expr
))
1073 return EXPR_LOCATION (expr
);
1075 if (TREE_CODE (expr
) != STATEMENT_LIST
)
1078 tree_stmt_iterator i
= tsi_start (expr
);
1081 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
1087 if (!found
|| !tsi_one_before_end_p (i
))
1090 return rexpr_location (tsi_stmt (i
), or_else
);
1093 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1094 rexpr_location for the potential recursion. */
1097 rexpr_has_location (tree expr
)
1099 return rexpr_location (expr
) != UNKNOWN_LOCATION
;
1103 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1104 contain statements and have a value. Assign its value to a temporary
1105 and give it void_type_node. Return the temporary, or NULL_TREE if
1106 WRAPPER was already void. */
1109 voidify_wrapper_expr (tree wrapper
, tree temp
)
1111 tree type
= TREE_TYPE (wrapper
);
1112 if (type
&& !VOID_TYPE_P (type
))
1116 /* Set p to point to the body of the wrapper. Loop until we find
1117 something that isn't a wrapper. */
1118 for (p
= &wrapper
; p
&& *p
; )
1120 switch (TREE_CODE (*p
))
1123 TREE_SIDE_EFFECTS (*p
) = 1;
1124 TREE_TYPE (*p
) = void_type_node
;
1125 /* For a BIND_EXPR, the body is operand 1. */
1126 p
= &BIND_EXPR_BODY (*p
);
1129 case CLEANUP_POINT_EXPR
:
1130 case TRY_FINALLY_EXPR
:
1131 case TRY_CATCH_EXPR
:
1132 TREE_SIDE_EFFECTS (*p
) = 1;
1133 TREE_TYPE (*p
) = void_type_node
;
1134 p
= &TREE_OPERAND (*p
, 0);
1137 case STATEMENT_LIST
:
1139 tree_stmt_iterator i
= tsi_last (*p
);
1140 TREE_SIDE_EFFECTS (*p
) = 1;
1141 TREE_TYPE (*p
) = void_type_node
;
1142 p
= tsi_end_p (i
) ? NULL
: tsi_stmt_ptr (i
);
1147 /* Advance to the last statement. Set all container types to
1149 for (; TREE_CODE (*p
) == COMPOUND_EXPR
; p
= &TREE_OPERAND (*p
, 1))
1151 TREE_SIDE_EFFECTS (*p
) = 1;
1152 TREE_TYPE (*p
) = void_type_node
;
1156 case TRANSACTION_EXPR
:
1157 TREE_SIDE_EFFECTS (*p
) = 1;
1158 TREE_TYPE (*p
) = void_type_node
;
1159 p
= &TRANSACTION_EXPR_BODY (*p
);
1163 /* Assume that any tree upon which voidify_wrapper_expr is
1164 directly called is a wrapper, and that its body is op0. */
1167 TREE_SIDE_EFFECTS (*p
) = 1;
1168 TREE_TYPE (*p
) = void_type_node
;
1169 p
= &TREE_OPERAND (*p
, 0);
1177 if (p
== NULL
|| IS_EMPTY_STMT (*p
))
1181 /* The wrapper is on the RHS of an assignment that we're pushing
1183 gcc_assert (TREE_CODE (temp
) == INIT_EXPR
1184 || TREE_CODE (temp
) == MODIFY_EXPR
);
1185 TREE_OPERAND (temp
, 1) = *p
;
1190 temp
= create_tmp_var (type
, "retval");
1191 *p
= build2 (INIT_EXPR
, type
, temp
, *p
);
1200 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1201 a temporary through which they communicate. */
1204 build_stack_save_restore (gcall
**save
, gcall
**restore
)
1208 *save
= gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE
), 0);
1209 tmp_var
= create_tmp_var (ptr_type_node
, "saved_stack");
1210 gimple_call_set_lhs (*save
, tmp_var
);
1213 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE
),
1217 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1220 build_asan_poison_call_expr (tree decl
)
1222 /* Do not poison variables that have size equal to zero. */
1223 tree unit_size
= DECL_SIZE_UNIT (decl
);
1224 if (zerop (unit_size
))
1227 tree base
= build_fold_addr_expr (decl
);
1229 return build_call_expr_internal_loc (UNKNOWN_LOCATION
, IFN_ASAN_MARK
,
1231 build_int_cst (integer_type_node
,
1236 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1237 on POISON flag, shadow memory of a DECL variable. The call will be
1238 put on location identified by IT iterator, where BEFORE flag drives
1239 position where the stmt will be put. */
1242 asan_poison_variable (tree decl
, bool poison
, gimple_stmt_iterator
*it
,
1245 tree unit_size
= DECL_SIZE_UNIT (decl
);
1246 tree base
= build_fold_addr_expr (decl
);
1248 /* Do not poison variables that have size equal to zero. */
1249 if (zerop (unit_size
))
1252 /* It's necessary to have all stack variables aligned to ASAN granularity
1254 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1255 unsigned shadow_granularity
1256 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE
: ASAN_SHADOW_GRANULARITY
;
1257 if (DECL_ALIGN_UNIT (decl
) <= shadow_granularity
)
1258 SET_DECL_ALIGN (decl
, BITS_PER_UNIT
* shadow_granularity
);
1260 HOST_WIDE_INT flags
= poison
? ASAN_MARK_POISON
: ASAN_MARK_UNPOISON
;
1263 = gimple_build_call_internal (IFN_ASAN_MARK
, 3,
1264 build_int_cst (integer_type_node
, flags
),
1268 gsi_insert_before (it
, g
, GSI_NEW_STMT
);
1270 gsi_insert_after (it
, g
, GSI_NEW_STMT
);
1273 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1274 either poisons or unpoisons a DECL. Created statement is appended
1275 to SEQ_P gimple sequence. */
1278 asan_poison_variable (tree decl
, bool poison
, gimple_seq
*seq_p
)
1280 gimple_stmt_iterator it
= gsi_last (*seq_p
);
1281 bool before
= false;
1286 asan_poison_variable (decl
, poison
, &it
, before
);
1289 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1292 sort_by_decl_uid (const void *a
, const void *b
)
1294 const tree
*t1
= (const tree
*)a
;
1295 const tree
*t2
= (const tree
*)b
;
1297 int uid1
= DECL_UID (*t1
);
1298 int uid2
= DECL_UID (*t2
);
1302 else if (uid1
> uid2
)
1308 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1309 depending on POISON flag. Created statement is appended
1310 to SEQ_P gimple sequence. */
1313 asan_poison_variables (hash_set
<tree
> *variables
, bool poison
, gimple_seq
*seq_p
)
1315 unsigned c
= variables
->elements ();
1319 auto_vec
<tree
> sorted_variables (c
);
1321 for (hash_set
<tree
>::iterator it
= variables
->begin ();
1322 it
!= variables
->end (); ++it
)
1323 sorted_variables
.safe_push (*it
);
1325 sorted_variables
.qsort (sort_by_decl_uid
);
1329 FOR_EACH_VEC_ELT (sorted_variables
, i
, var
)
1331 asan_poison_variable (var
, poison
, seq_p
);
1333 /* Add use_after_scope_memory attribute for the variable in order
1334 to prevent re-written into SSA. */
1335 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
,
1336 DECL_ATTRIBUTES (var
)))
1337 DECL_ATTRIBUTES (var
)
1338 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
),
1340 DECL_ATTRIBUTES (var
));
1344 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1346 static enum gimplify_status
1347 gimplify_bind_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1349 tree bind_expr
= *expr_p
;
1350 bool old_keep_stack
= gimplify_ctxp
->keep_stack
;
1351 bool old_save_stack
= gimplify_ctxp
->save_stack
;
1354 gimple_seq body
, cleanup
;
1356 location_t start_locus
= 0, end_locus
= 0;
1357 tree ret_clauses
= NULL
;
1359 tree temp
= voidify_wrapper_expr (bind_expr
, NULL
);
1361 /* Mark variables seen in this bind expr. */
1362 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1366 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
1370 && !is_global_var (t
)
1371 && DECL_CONTEXT (t
) == current_function_decl
1373 && (attr
= lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t
)))
1376 gcc_assert (!DECL_HAS_VALUE_EXPR_P (t
));
1377 tree alloc
= TREE_PURPOSE (TREE_VALUE (attr
));
1378 tree align
= TREE_VALUE (TREE_VALUE (attr
));
1379 /* Allocate directives that appear in a target region must specify
1380 an allocator clause unless a requires directive with the
1381 dynamic_allocators clause is present in the same compilation
1383 bool missing_dyn_alloc
= false;
1384 if (alloc
== NULL_TREE
1385 && ((omp_requires_mask
& OMP_REQUIRES_DYNAMIC_ALLOCATORS
)
1388 /* This comes too early for omp_discover_declare_target...,
1389 but should at least catch the most common cases. */
1391 = cgraph_node::get (current_function_decl
)->offloadable
;
1392 for (struct gimplify_omp_ctx
*ctx2
= ctx
;
1393 ctx2
&& !missing_dyn_alloc
; ctx2
= ctx2
->outer_context
)
1394 if (ctx2
->code
== OMP_TARGET
)
1395 missing_dyn_alloc
= true;
1397 if (missing_dyn_alloc
)
1398 error_at (DECL_SOURCE_LOCATION (t
),
1399 "%<allocate%> directive for %qD inside a target "
1400 "region must specify an %<allocator%> clause", t
);
1401 /* Skip for omp_default_mem_alloc (= 1),
1402 unless align is present. */
1403 else if (!errorcount
1404 && (align
!= NULL_TREE
1405 || alloc
== NULL_TREE
1406 || !integer_onep (alloc
)))
1408 /* Fortran might already use a pointer type internally;
1409 use that pointer except for type(C_ptr) and type(C_funptr);
1410 note that normal proc pointers are rejected. */
1411 tree type
= TREE_TYPE (t
);
1413 if (lang_GNU_Fortran ()
1414 && POINTER_TYPE_P (type
)
1415 && TREE_TYPE (type
) != void_type_node
1416 && TREE_CODE (TREE_TYPE (type
)) != FUNCTION_TYPE
)
1418 type
= TREE_TYPE (type
);
1423 tmp
= build_pointer_type (type
);
1424 v
= create_tmp_var (tmp
, get_name (t
));
1425 DECL_IGNORED_P (v
) = 0;
1427 = tree_cons (get_identifier ("omp allocate var"),
1428 build_tree_list (NULL_TREE
, t
),
1429 remove_attribute ("omp allocate",
1430 DECL_ATTRIBUTES (t
)));
1431 tmp
= build_fold_indirect_ref (v
);
1432 TREE_THIS_NOTRAP (tmp
) = 1;
1433 SET_DECL_VALUE_EXPR (t
, tmp
);
1434 DECL_HAS_VALUE_EXPR_P (t
) = 1;
1436 tree sz
= TYPE_SIZE_UNIT (type
);
1437 /* The size to use in Fortran might not match TYPE_SIZE_UNIT;
1438 hence, for some decls, a size variable is saved in the
1439 attributes; use it, if available. */
1440 if (TREE_CHAIN (TREE_VALUE (attr
))
1441 && TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr
)))
1443 TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr
)))))
1445 sz
= TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr
)));
1446 sz
= TREE_PURPOSE (sz
);
1448 if (alloc
== NULL_TREE
)
1449 alloc
= build_zero_cst (ptr_type_node
);
1450 if (align
== NULL_TREE
)
1451 align
= build_int_cst (size_type_node
, DECL_ALIGN_UNIT (t
));
1453 align
= build_int_cst (size_type_node
,
1454 MAX (tree_to_uhwi (align
),
1455 DECL_ALIGN_UNIT (t
)));
1456 location_t loc
= DECL_SOURCE_LOCATION (t
);
1457 tmp
= builtin_decl_explicit (BUILT_IN_GOMP_ALLOC
);
1458 tmp
= build_call_expr_loc (loc
, tmp
, 3, align
, sz
, alloc
);
1459 tmp
= fold_build2_loc (loc
, MODIFY_EXPR
, TREE_TYPE (v
), v
,
1460 fold_convert (TREE_TYPE (v
), tmp
));
1461 gcc_assert (BIND_EXPR_BODY (bind_expr
) != NULL_TREE
);
1462 /* Ensure that either TREE_CHAIN (TREE_VALUE (attr) is set
1463 and GOMP_FREE added here or that DECL_HAS_VALUE_EXPR_P (t)
1464 is set, using in a condition much further below. */
1465 gcc_assert (DECL_HAS_VALUE_EXPR_P (t
)
1466 || TREE_CHAIN (TREE_VALUE (attr
)));
1467 if (TREE_CHAIN (TREE_VALUE (attr
)))
1469 /* Fortran is special as it does not have properly nest
1470 declarations in blocks. And as there is no
1471 initializer, there is also no expression to look for.
1472 Hence, the FE makes the statement list of the
1473 try-finally block available. We can put the GOMP_alloc
1474 at the top, unless an allocator or size expression
1475 requires to put it afterward; note that the size is
1476 always later in generated code; for strings, no
1477 size expr but still an expr might be available.
1478 As LTO does not handle a statement list, 'sl' has
1479 to be removed; done so by removing the attribute. */
1481 = remove_attribute ("omp allocate",
1482 DECL_ATTRIBUTES (t
));
1483 tree sl
= TREE_PURPOSE (TREE_CHAIN (TREE_VALUE (attr
)));
1484 tree_stmt_iterator e
= tsi_start (sl
);
1485 tree needle
= NULL_TREE
;
1486 if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr
))))
1488 needle
= TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr
)));
1489 needle
= (TREE_VALUE (needle
) ? TREE_VALUE (needle
)
1492 else if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr
))))
1494 else if (DECL_P (alloc
) && DECL_ARTIFICIAL (alloc
))
1497 if (needle
!= NULL_TREE
)
1499 while (!tsi_end_p (e
))
1502 || (TREE_CODE (*e
) == MODIFY_EXPR
1503 && TREE_OPERAND (*e
, 0) == needle
))
1507 gcc_assert (!tsi_end_p (e
));
1509 tsi_link_after (&e
, tmp
, TSI_SAME_STMT
);
1511 /* As the cleanup is in BIND_EXPR_BODY, GOMP_free is added
1512 here; for C/C++ it will be added in the 'cleanup'
1513 section after gimplification. But Fortran already has
1514 a try-finally block. */
1515 sl
= TREE_VALUE (TREE_CHAIN (TREE_VALUE (attr
)));
1517 tmp
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
1518 tmp
= build_call_expr_loc (EXPR_LOCATION (*e
), tmp
, 2, v
,
1519 build_zero_cst (ptr_type_node
));
1520 tsi_link_after (&e
, tmp
, TSI_SAME_STMT
);
1521 tmp
= build_clobber (TREE_TYPE (v
), CLOBBER_STORAGE_END
);
1522 tmp
= fold_build2_loc (loc
, MODIFY_EXPR
, TREE_TYPE (v
), v
,
1523 fold_convert (TREE_TYPE (v
), tmp
));
1525 tsi_link_after (&e
, tmp
, TSI_SAME_STMT
);
1529 gcc_assert (TREE_CODE (BIND_EXPR_BODY (bind_expr
))
1531 tree_stmt_iterator e
;
1532 e
= tsi_start (BIND_EXPR_BODY (bind_expr
));
1533 while (!tsi_end_p (e
))
1535 if ((TREE_CODE (*e
) == DECL_EXPR
1536 && TREE_OPERAND (*e
, 0) == t
)
1537 || (TREE_CODE (*e
) == CLEANUP_POINT_EXPR
1538 && (TREE_CODE (TREE_OPERAND (*e
, 0))
1540 && (TREE_OPERAND (TREE_OPERAND (*e
, 0), 0)
1545 gcc_assert (!tsi_end_p (e
));
1546 tsi_link_before (&e
, tmp
, TSI_SAME_STMT
);
1551 /* Mark variable as local. */
1552 if (ctx
&& ctx
->region_type
!= ORT_NONE
&& !DECL_EXTERNAL (t
))
1554 if (! DECL_SEEN_IN_BIND_EXPR_P (t
)
1555 || splay_tree_lookup (ctx
->variables
,
1556 (splay_tree_key
) t
) == NULL
)
1558 int flag
= GOVD_LOCAL
;
1559 if (ctx
->region_type
== ORT_SIMD
1560 && TREE_ADDRESSABLE (t
)
1561 && !TREE_STATIC (t
))
1563 if (TREE_CODE (DECL_SIZE_UNIT (t
)) != INTEGER_CST
)
1564 ctx
->add_safelen1
= true;
1566 flag
= GOVD_PRIVATE
;
1568 omp_add_variable (ctx
, t
, flag
| GOVD_SEEN
);
1570 /* Static locals inside of target construct or offloaded
1571 routines need to be "omp declare target". */
1572 if (TREE_STATIC (t
))
1573 for (; ctx
; ctx
= ctx
->outer_context
)
1574 if ((ctx
->region_type
& ORT_TARGET
) != 0)
1576 if (!lookup_attribute ("omp declare target",
1577 DECL_ATTRIBUTES (t
)))
1579 tree id
= get_identifier ("omp declare target");
1581 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (t
));
1582 varpool_node
*node
= varpool_node::get (t
);
1585 node
->offloadable
= 1;
1586 if (ENABLE_OFFLOADING
&& !DECL_EXTERNAL (t
))
1588 g
->have_offload
= true;
1590 vec_safe_push (offload_vars
, t
);
1598 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
1600 if (DECL_HARD_REGISTER (t
) && !is_global_var (t
) && cfun
)
1601 cfun
->has_local_explicit_reg_vars
= true;
1605 bind_stmt
= gimple_build_bind (BIND_EXPR_VARS (bind_expr
), NULL
,
1606 BIND_EXPR_BLOCK (bind_expr
));
1607 gimple_push_bind_expr (bind_stmt
);
1609 gimplify_ctxp
->keep_stack
= false;
1610 gimplify_ctxp
->save_stack
= false;
1612 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1614 gimplify_stmt (&BIND_EXPR_BODY (bind_expr
), &body
);
1615 gimple_bind_set_body (bind_stmt
, body
);
1617 /* Source location wise, the cleanup code (stack_restore and clobbers)
1618 belongs to the end of the block, so propagate what we have. The
1619 stack_save operation belongs to the beginning of block, which we can
1620 infer from the bind_expr directly if the block has no explicit
1622 if (BIND_EXPR_BLOCK (bind_expr
))
1624 end_locus
= BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1625 start_locus
= BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1627 if (start_locus
== 0)
1628 start_locus
= EXPR_LOCATION (bind_expr
);
1633 /* Add clobbers for all variables that go out of scope. */
1634 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1637 && !is_global_var (t
)
1638 && DECL_CONTEXT (t
) == current_function_decl
)
1641 && DECL_HAS_VALUE_EXPR_P (t
)
1643 && lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t
)))
1645 /* For Fortran, TREE_CHAIN (TREE_VALUE (attr)) is set, which
1646 causes that the GOMP_free call is already added above;
1647 and "omp allocate" is removed from DECL_ATTRIBUTES. */
1648 tree v
= TREE_OPERAND (DECL_VALUE_EXPR (t
), 0);
1649 tree tmp
= builtin_decl_explicit (BUILT_IN_GOMP_FREE
);
1650 tmp
= build_call_expr_loc (end_locus
, tmp
, 2, v
,
1651 build_zero_cst (ptr_type_node
));
1652 gimplify_and_add (tmp
, &cleanup
);
1653 gimple
*clobber_stmt
;
1654 tmp
= build_clobber (TREE_TYPE (v
), CLOBBER_STORAGE_END
);
1655 clobber_stmt
= gimple_build_assign (v
, tmp
);
1656 gimple_set_location (clobber_stmt
, end_locus
);
1657 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1659 if (!DECL_HARD_REGISTER (t
)
1660 && !TREE_THIS_VOLATILE (t
)
1661 && !DECL_HAS_VALUE_EXPR_P (t
)
1662 /* Only care for variables that have to be in memory. Others
1663 will be rewritten into SSA names, hence moved to the
1665 && !is_gimple_reg (t
)
1666 && flag_stack_reuse
!= SR_NONE
)
1668 tree clobber
= build_clobber (TREE_TYPE (t
), CLOBBER_STORAGE_END
);
1669 gimple
*clobber_stmt
;
1670 clobber_stmt
= gimple_build_assign (t
, clobber
);
1671 gimple_set_location (clobber_stmt
, end_locus
);
1672 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1675 if (flag_openacc
&& oacc_declare_returns
!= NULL
)
1678 if (DECL_HAS_VALUE_EXPR_P (key
))
1680 key
= DECL_VALUE_EXPR (key
);
1681 if (INDIRECT_REF_P (key
))
1682 key
= TREE_OPERAND (key
, 0);
1684 tree
*c
= oacc_declare_returns
->get (key
);
1688 OMP_CLAUSE_CHAIN (*c
) = ret_clauses
;
1690 ret_clauses
= unshare_expr (*c
);
1692 oacc_declare_returns
->remove (key
);
1694 if (oacc_declare_returns
->is_empty ())
1696 delete oacc_declare_returns
;
1697 oacc_declare_returns
= NULL
;
1703 if (asan_poisoned_variables
!= NULL
1704 && asan_poisoned_variables
->contains (t
))
1706 asan_poisoned_variables
->remove (t
);
1707 asan_poison_variable (t
, true, &cleanup
);
1710 if (gimplify_ctxp
->live_switch_vars
!= NULL
1711 && gimplify_ctxp
->live_switch_vars
->contains (t
))
1712 gimplify_ctxp
->live_switch_vars
->remove (t
);
1715 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1716 the stack space allocated to the VLAs. */
1717 if (gimplify_ctxp
->save_stack
&& !gimplify_ctxp
->keep_stack
)
1719 gcall
*stack_restore
;
1721 /* Save stack on entry and restore it on exit. Add a try_finally
1722 block to achieve this. */
1723 build_stack_save_restore (&stack_save
, &stack_restore
);
1725 gimple_set_location (stack_save
, start_locus
);
1726 gimple_set_location (stack_restore
, end_locus
);
1728 gimplify_seq_add_stmt (&cleanup
, stack_restore
);
1734 gimple_stmt_iterator si
= gsi_start (cleanup
);
1736 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
1738 gsi_insert_seq_before_without_update (&si
, stmt
, GSI_NEW_STMT
);
1744 gimple_seq new_body
;
1747 gs
= gimple_build_try (gimple_bind_body (bind_stmt
), cleanup
,
1748 GIMPLE_TRY_FINALLY
);
1751 gimplify_seq_add_stmt (&new_body
, stack_save
);
1752 gimplify_seq_add_stmt (&new_body
, gs
);
1753 gimple_bind_set_body (bind_stmt
, new_body
);
1756 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1757 if (!gimplify_ctxp
->keep_stack
)
1758 gimplify_ctxp
->keep_stack
= old_keep_stack
;
1759 gimplify_ctxp
->save_stack
= old_save_stack
;
1761 gimple_pop_bind_expr ();
1763 gimplify_seq_add_stmt (pre_p
, bind_stmt
);
1771 *expr_p
= NULL_TREE
;
1775 /* Maybe add early return predict statement to PRE_P sequence. */
1778 maybe_add_early_return_predict_stmt (gimple_seq
*pre_p
)
1780 /* If we are not in a conditional context, add PREDICT statement. */
1781 if (gimple_conditional_context ())
1783 gimple
*predict
= gimple_build_predict (PRED_TREE_EARLY_RETURN
,
1785 gimplify_seq_add_stmt (pre_p
, predict
);
1789 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1790 GIMPLE value, it is assigned to a new temporary and the statement is
1791 re-written to return the temporary.
1793 PRE_P points to the sequence where side effects that must happen before
1794 STMT should be stored. */
1796 static enum gimplify_status
1797 gimplify_return_expr (tree stmt
, gimple_seq
*pre_p
)
1800 tree ret_expr
= TREE_OPERAND (stmt
, 0);
1801 tree result_decl
, result
;
1803 if (ret_expr
== error_mark_node
)
1807 || TREE_CODE (ret_expr
) == RESULT_DECL
)
1809 maybe_add_early_return_predict_stmt (pre_p
);
1810 greturn
*ret
= gimple_build_return (ret_expr
);
1811 copy_warning (ret
, stmt
);
1812 gimplify_seq_add_stmt (pre_p
, ret
);
1816 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1817 result_decl
= NULL_TREE
;
1818 else if (TREE_CODE (ret_expr
) == COMPOUND_EXPR
)
1820 /* Used in C++ for handling EH cleanup of the return value if a local
1821 cleanup throws. Assume the front-end knows what it's doing. */
1822 result_decl
= DECL_RESULT (current_function_decl
);
1823 /* But crash if we end up trying to modify ret_expr below. */
1824 ret_expr
= NULL_TREE
;
1828 result_decl
= TREE_OPERAND (ret_expr
, 0);
1830 /* See through a return by reference. */
1831 if (INDIRECT_REF_P (result_decl
))
1832 result_decl
= TREE_OPERAND (result_decl
, 0);
1834 gcc_assert ((TREE_CODE (ret_expr
) == MODIFY_EXPR
1835 || TREE_CODE (ret_expr
) == INIT_EXPR
)
1836 && TREE_CODE (result_decl
) == RESULT_DECL
);
1839 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1840 Recall that aggregate_value_p is FALSE for any aggregate type that is
1841 returned in registers. If we're returning values in registers, then
1842 we don't want to extend the lifetime of the RESULT_DECL, particularly
1843 across another call. In addition, for those aggregates for which
1844 hard_function_value generates a PARALLEL, we'll die during normal
1845 expansion of structure assignments; there's special code in expand_return
1846 to handle this case that does not exist in expand_expr. */
1849 else if (aggregate_value_p (result_decl
, TREE_TYPE (current_function_decl
)))
1851 if (!poly_int_tree_p (DECL_SIZE (result_decl
)))
1853 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl
)))
1854 gimplify_type_sizes (TREE_TYPE (result_decl
), pre_p
);
1855 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1856 should be effectively allocated by the caller, i.e. all calls to
1857 this function must be subject to the Return Slot Optimization. */
1858 gimplify_one_sizepos (&DECL_SIZE (result_decl
), pre_p
);
1859 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl
), pre_p
);
1861 result
= result_decl
;
1863 else if (gimplify_ctxp
->return_temp
)
1864 result
= gimplify_ctxp
->return_temp
;
1867 result
= create_tmp_reg (TREE_TYPE (result_decl
));
1869 /* ??? With complex control flow (usually involving abnormal edges),
1870 we can wind up warning about an uninitialized value for this. Due
1871 to how this variable is constructed and initialized, this is never
1872 true. Give up and never warn. */
1873 suppress_warning (result
, OPT_Wuninitialized
);
1875 gimplify_ctxp
->return_temp
= result
;
1878 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1879 Then gimplify the whole thing. */
1880 if (result
!= result_decl
)
1881 TREE_OPERAND (ret_expr
, 0) = result
;
1883 gimplify_and_add (TREE_OPERAND (stmt
, 0), pre_p
);
1885 maybe_add_early_return_predict_stmt (pre_p
);
1886 ret
= gimple_build_return (result
);
1887 copy_warning (ret
, stmt
);
1888 gimplify_seq_add_stmt (pre_p
, ret
);
1893 /* Gimplify a variable-length array DECL. */
1896 gimplify_vla_decl (tree decl
, gimple_seq
*seq_p
)
1898 /* This is a variable-sized decl. Simplify its size and mark it
1899 for deferred expansion. */
1900 tree t
, addr
, ptr_type
;
1902 gimplify_one_sizepos (&DECL_SIZE (decl
), seq_p
);
1903 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl
), seq_p
);
1905 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1906 if (DECL_HAS_VALUE_EXPR_P (decl
))
1909 /* All occurrences of this decl in final gimplified code will be
1910 replaced by indirection. Setting DECL_VALUE_EXPR does two
1911 things: First, it lets the rest of the gimplifier know what
1912 replacement to use. Second, it lets the debug info know
1913 where to find the value. */
1914 ptr_type
= build_pointer_type (TREE_TYPE (decl
));
1915 addr
= create_tmp_var (ptr_type
, get_name (decl
));
1916 DECL_IGNORED_P (addr
) = 0;
1917 t
= build_fold_indirect_ref (addr
);
1918 TREE_THIS_NOTRAP (t
) = 1;
1919 SET_DECL_VALUE_EXPR (decl
, t
);
1920 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
1922 t
= build_alloca_call_expr (DECL_SIZE_UNIT (decl
), DECL_ALIGN (decl
),
1923 max_int_size_in_bytes (TREE_TYPE (decl
)));
1924 /* The call has been built for a variable-sized object. */
1925 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
1926 t
= fold_convert (ptr_type
, t
);
1927 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
1929 gimplify_and_add (t
, seq_p
);
1931 /* Record the dynamic allocation associated with DECL if requested. */
1932 if (flag_callgraph_info
& CALLGRAPH_INFO_DYNAMIC_ALLOC
)
1933 record_dynamic_alloc (decl
);
1936 /* A helper function to be called via walk_tree. Mark all labels under *TP
1937 as being forced. To be called for DECL_INITIAL of static variables. */
1940 force_labels_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1944 if (TREE_CODE (*tp
) == LABEL_DECL
)
1946 FORCED_LABEL (*tp
) = 1;
1947 cfun
->has_forced_label_in_static
= 1;
1953 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1954 Build a call to internal const function DEFERRED_INIT:
1955 1st argument: SIZE of the DECL;
1956 2nd argument: INIT_TYPE;
1957 3rd argument: NAME of the DECL;
1959 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1962 gimple_add_init_for_auto_var (tree decl
,
1963 enum auto_init_type init_type
,
1966 gcc_assert (auto_var_p (decl
));
1967 gcc_assert (init_type
> AUTO_INIT_UNINITIALIZED
);
1968 location_t loc
= EXPR_LOCATION (decl
);
1969 tree decl_size
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1972 = build_int_cst (integer_type_node
, (int) init_type
);
1974 tree decl_name
= NULL_TREE
;
1975 if (DECL_NAME (decl
))
1977 decl_name
= build_string_literal (DECL_NAME (decl
));
1981 char decl_name_anonymous
[3 + (HOST_BITS_PER_INT
+ 2) / 3];
1982 sprintf (decl_name_anonymous
, "D.%u", DECL_UID (decl
));
1983 decl_name
= build_string_literal (decl_name_anonymous
);
1986 tree call
= build_call_expr_internal_loc (loc
, IFN_DEFERRED_INIT
,
1987 TREE_TYPE (decl
), 3,
1988 decl_size
, init_type_node
,
1991 gimplify_assign (decl
, call
, seq_p
);
1994 /* Generate padding initialization for automatic vairable DECL.
1995 C guarantees that brace-init with fewer initializers than members
1996 aggregate will initialize the rest of the aggregate as-if it were
1997 static initialization. In turn static initialization guarantees
1998 that padding is initialized to zero. So, we always initialize paddings
1999 to zeroes regardless INIT_TYPE.
2000 To do the padding initialization, we insert a call to
2001 __builtin_clear_padding (&decl, 0, for_auto_init = true).
2002 Note, we add an additional dummy argument for __builtin_clear_padding,
2003 'for_auto_init' to distinguish whether this call is for automatic
2004 variable initialization or not.
2007 gimple_add_padding_init_for_auto_var (tree decl
, bool is_vla
,
2010 tree addr_of_decl
= NULL_TREE
;
2011 tree fn
= builtin_decl_explicit (BUILT_IN_CLEAR_PADDING
);
2015 /* The temporary address variable for this vla should be
2016 created in gimplify_vla_decl. */
2017 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
2018 gcc_assert (INDIRECT_REF_P (DECL_VALUE_EXPR (decl
)));
2019 addr_of_decl
= TREE_OPERAND (DECL_VALUE_EXPR (decl
), 0);
2023 mark_addressable (decl
);
2024 addr_of_decl
= build_fold_addr_expr (decl
);
2027 gimple
*call
= gimple_build_call (fn
, 2, addr_of_decl
,
2028 build_one_cst (TREE_TYPE (addr_of_decl
)));
2029 gimplify_seq_add_stmt (seq_p
, call
);
2032 /* Return true if the DECL need to be automaticly initialized by the
2035 is_var_need_auto_init (tree decl
)
2037 if (auto_var_p (decl
)
2038 && (TREE_CODE (decl
) != VAR_DECL
2039 || !DECL_HARD_REGISTER (decl
))
2040 && (flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
)
2041 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl
)))
2042 && !OPAQUE_TYPE_P (TREE_TYPE (decl
))
2043 && !is_empty_type (TREE_TYPE (decl
)))
2048 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
2049 and initialization explicit. */
2051 static enum gimplify_status
2052 gimplify_decl_expr (tree
*stmt_p
, gimple_seq
*seq_p
)
2054 tree stmt
= *stmt_p
;
2055 tree decl
= DECL_EXPR_DECL (stmt
);
2057 *stmt_p
= NULL_TREE
;
2059 if (TREE_TYPE (decl
) == error_mark_node
)
2062 if ((TREE_CODE (decl
) == TYPE_DECL
2064 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl
)))
2066 gimplify_type_sizes (TREE_TYPE (decl
), seq_p
);
2067 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
2068 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl
)), seq_p
);
2071 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
2072 in case its size expressions contain problematic nodes like CALL_EXPR. */
2073 if (TREE_CODE (decl
) == TYPE_DECL
2074 && DECL_ORIGINAL_TYPE (decl
)
2075 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl
)))
2077 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl
), seq_p
);
2078 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl
)) == REFERENCE_TYPE
)
2079 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl
)), seq_p
);
2082 if (VAR_P (decl
) && !DECL_EXTERNAL (decl
))
2084 tree init
= DECL_INITIAL (decl
);
2085 bool is_vla
= false;
2086 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
2087 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
2088 If the decl has VALUE_EXPR that was created by FE (usually
2089 C++FE), it's a proxy varaible, and FE already initialized
2090 the VALUE_EXPR of it, we should not initialize it anymore. */
2091 bool decl_had_value_expr_p
= DECL_HAS_VALUE_EXPR_P (decl
);
2094 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl
), &size
)
2095 || (!TREE_STATIC (decl
)
2096 && flag_stack_check
== GENERIC_STACK_CHECK
2098 (unsigned HOST_WIDE_INT
) STACK_CHECK_MAX_VAR_SIZE
)))
2100 gimplify_vla_decl (decl
, seq_p
);
2104 if (asan_poisoned_variables
2106 && TREE_ADDRESSABLE (decl
)
2107 && !TREE_STATIC (decl
)
2108 && !DECL_HAS_VALUE_EXPR_P (decl
)
2109 && DECL_ALIGN (decl
) <= MAX_SUPPORTED_STACK_ALIGNMENT
2110 && dbg_cnt (asan_use_after_scope
)
2111 && !gimplify_omp_ctxp
2112 /* GNAT introduces temporaries to hold return values of calls in
2113 initializers of variables defined in other units, so the
2114 declaration of the variable is discarded completely. We do not
2115 want to issue poison calls for such dropped variables. */
2116 && (DECL_SEEN_IN_BIND_EXPR_P (decl
)
2117 || (DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)))
2119 asan_poisoned_variables
->add (decl
);
2120 asan_poison_variable (decl
, false, seq_p
);
2121 if (!DECL_ARTIFICIAL (decl
) && gimplify_ctxp
->live_switch_vars
)
2122 gimplify_ctxp
->live_switch_vars
->add (decl
);
2125 /* Some front ends do not explicitly declare all anonymous
2126 artificial variables. We compensate here by declaring the
2127 variables, though it would be better if the front ends would
2128 explicitly declare them. */
2129 if (!DECL_SEEN_IN_BIND_EXPR_P (decl
)
2130 && DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)
2131 gimple_add_tmp_var (decl
);
2133 if (init
&& init
!= error_mark_node
)
2135 if (!TREE_STATIC (decl
))
2137 DECL_INITIAL (decl
) = NULL_TREE
;
2138 init
= build2 (INIT_EXPR
, void_type_node
, decl
, init
);
2139 gimplify_and_add (init
, seq_p
);
2141 /* Clear TREE_READONLY if we really have an initialization. */
2142 if (!DECL_INITIAL (decl
)
2143 && !omp_privatize_by_reference (decl
))
2144 TREE_READONLY (decl
) = 0;
2147 /* We must still examine initializers for static variables
2148 as they may contain a label address. */
2149 walk_tree (&init
, force_labels_r
, NULL
, NULL
);
2151 /* When there is no explicit initializer, if the user requested,
2152 We should insert an artifical initializer for this automatic
2154 else if (is_var_need_auto_init (decl
)
2155 && !decl_had_value_expr_p
)
2157 gimple_add_init_for_auto_var (decl
,
2160 /* The expanding of a call to the above .DEFERRED_INIT will apply
2161 block initialization to the whole space covered by this variable.
2162 As a result, all the paddings will be initialized to zeroes
2163 for zero initialization and 0xFE byte-repeatable patterns for
2164 pattern initialization.
2165 In order to make the paddings as zeroes for pattern init, We
2166 should add a call to __builtin_clear_padding to clear the
2167 paddings to zero in compatiple with CLANG.
2168 We cannot insert this call if the variable is a gimple register
2169 since __builtin_clear_padding will take the address of the
2170 variable. As a result, if a long double/_Complex long double
2171 variable will spilled into stack later, its padding is 0XFE. */
2172 if (flag_auto_var_init
== AUTO_INIT_PATTERN
2173 && !is_gimple_reg (decl
)
2174 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl
)))
2175 gimple_add_padding_init_for_auto_var (decl
, is_vla
, seq_p
);
2182 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
2183 and replacing the LOOP_EXPR with goto, but if the loop contains an
2184 EXIT_EXPR, we need to append a label for it to jump to. */
2186 static enum gimplify_status
2187 gimplify_loop_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2189 tree saved_label
= gimplify_ctxp
->exit_label
;
2190 tree start_label
= create_artificial_label (UNKNOWN_LOCATION
);
2192 gimplify_seq_add_stmt (pre_p
, gimple_build_label (start_label
));
2194 gimplify_ctxp
->exit_label
= NULL_TREE
;
2196 gimplify_and_add (LOOP_EXPR_BODY (*expr_p
), pre_p
);
2198 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (start_label
));
2200 if (gimplify_ctxp
->exit_label
)
2201 gimplify_seq_add_stmt (pre_p
,
2202 gimple_build_label (gimplify_ctxp
->exit_label
));
2204 gimplify_ctxp
->exit_label
= saved_label
;
2210 /* Gimplify a statement list onto a sequence. These may be created either
2211 by an enlightened front-end, or by shortcut_cond_expr. */
2213 static enum gimplify_status
2214 gimplify_statement_list (tree
*expr_p
, gimple_seq
*pre_p
)
2216 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
2218 tree_stmt_iterator i
= tsi_start (*expr_p
);
2220 while (!tsi_end_p (i
))
2222 gimplify_stmt (tsi_stmt_ptr (i
), pre_p
);
2236 /* Emit warning for the unreachable statment STMT if needed.
2237 Return the gimple itself when the warning is emitted, otherwise
2240 emit_warn_switch_unreachable (gimple
*stmt
)
2242 if (gimple_code (stmt
) == GIMPLE_GOTO
2243 && TREE_CODE (gimple_goto_dest (stmt
)) == LABEL_DECL
2244 && DECL_ARTIFICIAL (gimple_goto_dest (stmt
)))
2245 /* Don't warn for compiler-generated gotos. These occur
2246 in Duff's devices, for example. */
2248 else if ((flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
)
2249 && ((gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
2250 || (gimple_call_builtin_p (stmt
, BUILT_IN_CLEAR_PADDING
)
2251 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt
, 1)))
2252 || (is_gimple_assign (stmt
)
2253 && gimple_assign_single_p (stmt
)
2254 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
2255 && gimple_call_internal_p (
2256 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
)),
2257 IFN_DEFERRED_INIT
))))
2258 /* Don't warn for compiler-generated initializations for
2259 -ftrivial-auto-var-init.
2261 case 1: a call to .DEFERRED_INIT;
2262 case 2: a call to __builtin_clear_padding with the 2nd argument is
2263 present and non-zero;
2264 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2265 that has the LHS of .DEFERRED_INIT as the RHS as following:
2266 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2270 warning_at (gimple_location (stmt
), OPT_Wswitch_unreachable
,
2271 "statement will never be executed");
2275 /* Callback for walk_gimple_seq. */
2278 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator
*gsi_p
,
2279 bool *handled_ops_p
,
2280 struct walk_stmt_info
*wi
)
2282 gimple
*stmt
= gsi_stmt (*gsi_p
);
2283 bool unreachable_issued
= wi
->info
!= NULL
;
2285 *handled_ops_p
= true;
2286 switch (gimple_code (stmt
))
2289 /* A compiler-generated cleanup or a user-written try block.
2290 If it's empty, don't dive into it--that would result in
2291 worse location info. */
2292 if (gimple_try_eval (stmt
) == NULL
)
2294 if (warn_switch_unreachable
&& !unreachable_issued
)
2295 wi
->info
= emit_warn_switch_unreachable (stmt
);
2297 /* Stop when auto var init warning is not on. */
2298 if (!warn_trivial_auto_var_init
)
2299 return integer_zero_node
;
2304 case GIMPLE_EH_FILTER
:
2305 case GIMPLE_TRANSACTION
:
2306 /* Walk the sub-statements. */
2307 *handled_ops_p
= false;
2311 /* Ignore these. We may generate them before declarations that
2312 are never executed. If there's something to warn about,
2313 there will be non-debug stmts too, and we'll catch those. */
2317 /* Stop till the first Label. */
2318 return integer_zero_node
;
2320 if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2322 *handled_ops_p
= false;
2325 if (warn_trivial_auto_var_init
2326 && flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
2327 && gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
2329 /* Get the variable name from the 3rd argument of call. */
2330 tree var_name
= gimple_call_arg (stmt
, 2);
2331 var_name
= TREE_OPERAND (TREE_OPERAND (var_name
, 0), 0);
2332 const char *var_name_str
= TREE_STRING_POINTER (var_name
);
2334 warning_at (gimple_location (stmt
), OPT_Wtrivial_auto_var_init
,
2335 "%qs cannot be initialized with"
2336 "%<-ftrivial-auto-var_init%>",
2343 /* check the first "real" statement (not a decl/lexical scope/...), issue
2344 warning if needed. */
2345 if (warn_switch_unreachable
&& !unreachable_issued
)
2346 wi
->info
= emit_warn_switch_unreachable (stmt
);
2347 /* Stop when auto var init warning is not on. */
2348 if (!warn_trivial_auto_var_init
)
2349 return integer_zero_node
;
2356 /* Possibly warn about unreachable statements between switch's controlling
2357 expression and the first case. Also warn about -ftrivial-auto-var-init
2358 cannot initialize the auto variable under such situation.
2359 SEQ is the body of a switch expression. */
2362 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq
)
2364 if ((!warn_switch_unreachable
&& !warn_trivial_auto_var_init
)
2365 /* This warning doesn't play well with Fortran when optimizations
2367 || lang_GNU_Fortran ()
2371 struct walk_stmt_info wi
;
2373 memset (&wi
, 0, sizeof (wi
));
2374 walk_gimple_seq (seq
, warn_switch_unreachable_and_auto_init_r
, NULL
, &wi
);
2378 /* A label entry that pairs label and a location. */
2385 /* Find LABEL in vector of label entries VEC. */
2387 static struct label_entry
*
2388 find_label_entry (const auto_vec
<struct label_entry
> *vec
, tree label
)
2391 struct label_entry
*l
;
2393 FOR_EACH_VEC_ELT (*vec
, i
, l
)
2394 if (l
->label
== label
)
2399 /* Return true if LABEL, a LABEL_DECL, represents a case label
2400 in a vector of labels CASES. */
2403 case_label_p (const vec
<tree
> *cases
, tree label
)
2408 FOR_EACH_VEC_ELT (*cases
, i
, l
)
2409 if (CASE_LABEL (l
) == label
)
2414 /* Find the last nondebug statement in a scope STMT. */
2417 last_stmt_in_scope (gimple
*stmt
)
2422 switch (gimple_code (stmt
))
2426 gbind
*bind
= as_a
<gbind
*> (stmt
);
2427 stmt
= gimple_seq_last_nondebug_stmt (gimple_bind_body (bind
));
2428 return last_stmt_in_scope (stmt
);
2433 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
2434 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt
));
2435 gimple
*last_eval
= last_stmt_in_scope (stmt
);
2436 if (gimple_stmt_may_fallthru (last_eval
)
2437 && (last_eval
== NULL
2438 || !gimple_call_internal_p (last_eval
, IFN_FALLTHROUGH
))
2439 && gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
2441 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt
));
2442 return last_stmt_in_scope (stmt
);
2456 /* Collect labels that may fall through into LABELS and return the statement
2457 preceding another case label, or a user-defined label. Store a location
2458 useful to give warnings at *PREVLOC (usually the location of the returned
2459 statement or of its surrounding scope). */
2462 collect_fallthrough_labels (gimple_stmt_iterator
*gsi_p
,
2463 auto_vec
<struct label_entry
> *labels
,
2464 location_t
*prevloc
)
2466 gimple
*prev
= NULL
;
2468 *prevloc
= UNKNOWN_LOCATION
;
2471 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
)
2473 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2474 which starts on a GIMPLE_SWITCH and ends with a break label.
2475 Handle that as a single statement that can fall through. */
2476 gbind
*bind
= as_a
<gbind
*> (gsi_stmt (*gsi_p
));
2477 gimple
*first
= gimple_seq_first_stmt (gimple_bind_body (bind
));
2478 gimple
*last
= gimple_seq_last_stmt (gimple_bind_body (bind
));
2480 && gimple_code (first
) == GIMPLE_SWITCH
2481 && gimple_code (last
) == GIMPLE_LABEL
)
2483 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2484 if (SWITCH_BREAK_LABEL_P (label
))
2492 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
2493 || gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_TRY
)
2495 /* Nested scope. Only look at the last statement of
2496 the innermost scope. */
2497 location_t bind_loc
= gimple_location (gsi_stmt (*gsi_p
));
2498 gimple
*last
= last_stmt_in_scope (gsi_stmt (*gsi_p
));
2502 /* It might be a label without a location. Use the
2503 location of the scope then. */
2504 if (!gimple_has_location (prev
))
2505 *prevloc
= bind_loc
;
2511 /* Ifs are tricky. */
2512 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_COND
)
2514 gcond
*cond_stmt
= as_a
<gcond
*> (gsi_stmt (*gsi_p
));
2515 tree false_lab
= gimple_cond_false_label (cond_stmt
);
2516 location_t if_loc
= gimple_location (cond_stmt
);
2519 if (i > 1) goto <D.2259>; else goto D;
2520 we can't do much with the else-branch. */
2521 if (!DECL_ARTIFICIAL (false_lab
))
2524 /* Go on until the false label, then one step back. */
2525 for (; !gsi_end_p (*gsi_p
); gsi_next (gsi_p
))
2527 gimple
*stmt
= gsi_stmt (*gsi_p
);
2528 if (gimple_code (stmt
) == GIMPLE_LABEL
2529 && gimple_label_label (as_a
<glabel
*> (stmt
)) == false_lab
)
2533 /* Not found? Oops. */
2534 if (gsi_end_p (*gsi_p
))
2537 /* A dead label can't fall through. */
2538 if (!UNUSED_LABEL_P (false_lab
))
2540 struct label_entry l
= { false_lab
, if_loc
};
2541 labels
->safe_push (l
);
2544 /* Go to the last statement of the then branch. */
2547 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2553 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_GOTO
2554 && !gimple_has_location (gsi_stmt (*gsi_p
)))
2556 /* Look at the statement before, it might be
2557 attribute fallthrough, in which case don't warn. */
2559 bool fallthru_before_dest
2560 = gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_FALLTHROUGH
);
2562 tree goto_dest
= gimple_goto_dest (gsi_stmt (*gsi_p
));
2563 if (!fallthru_before_dest
)
2565 struct label_entry l
= { goto_dest
, if_loc
};
2566 labels
->safe_push (l
);
2569 /* This case is about
2570 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2575 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2576 through to #3. So set PREV to #1. */
2577 else if (UNUSED_LABEL_P (false_lab
))
2578 prev
= gsi_stmt (*gsi_p
);
2580 /* And move back. */
2584 /* Remember the last statement. Skip labels that are of no interest
2586 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2588 tree label
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (*gsi_p
)));
2589 if (find_label_entry (labels
, label
))
2590 prev
= gsi_stmt (*gsi_p
);
2592 else if (gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_ASAN_MARK
))
2594 else if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_PREDICT
)
2596 else if (!is_gimple_debug (gsi_stmt (*gsi_p
)))
2597 prev
= gsi_stmt (*gsi_p
);
2600 while (!gsi_end_p (*gsi_p
)
2601 /* Stop if we find a case or a user-defined label. */
2602 && (gimple_code (gsi_stmt (*gsi_p
)) != GIMPLE_LABEL
2603 || !gimple_has_location (gsi_stmt (*gsi_p
))));
2605 if (prev
&& gimple_has_location (prev
))
2606 *prevloc
= gimple_location (prev
);
2610 /* Return true if the switch fallthough warning should occur. LABEL is
2611 the label statement that we're falling through to. */
2614 should_warn_for_implicit_fallthrough (gimple_stmt_iterator
*gsi_p
, tree label
)
2616 gimple_stmt_iterator gsi
= *gsi_p
;
2618 /* Don't warn if the label is marked with a "falls through" comment. */
2619 if (FALLTHROUGH_LABEL_P (label
))
2622 /* Don't warn for non-case labels followed by a statement:
2627 as these are likely intentional. */
2628 if (!case_label_p (&gimplify_ctxp
->case_labels
, label
))
2631 while (!gsi_end_p (gsi
)
2632 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2633 && (l
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi
))))
2634 && !case_label_p (&gimplify_ctxp
->case_labels
, l
))
2635 gsi_next_nondebug (&gsi
);
2636 if (gsi_end_p (gsi
) || gimple_code (gsi_stmt (gsi
)) != GIMPLE_LABEL
)
2640 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2641 immediately breaks. */
2644 /* Skip all immediately following labels. */
2645 while (!gsi_end_p (gsi
)
2646 && (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2647 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_PREDICT
))
2648 gsi_next_nondebug (&gsi
);
2650 /* { ... something; default:; } */
2652 /* { ... something; default: break; } or
2653 { ... something; default: goto L; } */
2654 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_GOTO
2655 /* { ... something; default: return; } */
2656 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
2662 /* Callback for walk_gimple_seq. */
2665 warn_implicit_fallthrough_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2666 struct walk_stmt_info
*)
2668 gimple
*stmt
= gsi_stmt (*gsi_p
);
2670 *handled_ops_p
= true;
2671 switch (gimple_code (stmt
))
2676 case GIMPLE_EH_FILTER
:
2677 case GIMPLE_TRANSACTION
:
2678 /* Walk the sub-statements. */
2679 *handled_ops_p
= false;
2682 /* Find a sequence of form:
2689 and possibly warn. */
2692 /* Found a label. Skip all immediately following labels. */
2693 while (!gsi_end_p (*gsi_p
)
2694 && gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2695 gsi_next_nondebug (gsi_p
);
2697 /* There might be no more statements. */
2698 if (gsi_end_p (*gsi_p
))
2699 return integer_zero_node
;
2701 /* Vector of labels that fall through. */
2702 auto_vec
<struct label_entry
> labels
;
2704 gimple
*prev
= collect_fallthrough_labels (gsi_p
, &labels
, &prevloc
);
2706 /* There might be no more statements. */
2707 if (gsi_end_p (*gsi_p
))
2708 return integer_zero_node
;
2710 gimple
*next
= gsi_stmt (*gsi_p
);
2712 /* If what follows is a label, then we may have a fallthrough. */
2713 if (gimple_code (next
) == GIMPLE_LABEL
2714 && gimple_has_location (next
)
2715 && (label
= gimple_label_label (as_a
<glabel
*> (next
)))
2718 struct label_entry
*l
;
2719 bool warned_p
= false;
2720 auto_diagnostic_group d
;
2721 if (!should_warn_for_implicit_fallthrough (gsi_p
, label
))
2723 else if (gimple_code (prev
) == GIMPLE_LABEL
2724 && (label
= gimple_label_label (as_a
<glabel
*> (prev
)))
2725 && (l
= find_label_entry (&labels
, label
)))
2726 warned_p
= warning_at (l
->loc
, OPT_Wimplicit_fallthrough_
,
2727 "this statement may fall through");
2728 else if (!gimple_call_internal_p (prev
, IFN_FALLTHROUGH
)
2729 /* Try to be clever and don't warn when the statement
2730 can't actually fall through. */
2731 && gimple_stmt_may_fallthru (prev
)
2732 && prevloc
!= UNKNOWN_LOCATION
)
2733 warned_p
= warning_at (prevloc
,
2734 OPT_Wimplicit_fallthrough_
,
2735 "this statement may fall through");
2737 inform (gimple_location (next
), "here");
2739 /* Mark this label as processed so as to prevent multiple
2740 warnings in nested switches. */
2741 FALLTHROUGH_LABEL_P (label
) = true;
2743 /* So that next warn_implicit_fallthrough_r will start looking for
2744 a new sequence starting with this label. */
2755 /* Warn when a switch case falls through. */
2758 maybe_warn_implicit_fallthrough (gimple_seq seq
)
2760 if (!warn_implicit_fallthrough
)
2763 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2766 || lang_GNU_OBJC ()))
2769 struct walk_stmt_info wi
;
2770 memset (&wi
, 0, sizeof (wi
));
2771 walk_gimple_seq (seq
, warn_implicit_fallthrough_r
, NULL
, &wi
);
2774 /* Callback for walk_gimple_seq. */
2777 expand_FALLTHROUGH_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2778 struct walk_stmt_info
*wi
)
2780 gimple
*stmt
= gsi_stmt (*gsi_p
);
2782 *handled_ops_p
= true;
2783 switch (gimple_code (stmt
))
2788 case GIMPLE_EH_FILTER
:
2789 case GIMPLE_TRANSACTION
:
2790 /* Walk the sub-statements. */
2791 *handled_ops_p
= false;
2794 static_cast<location_t
*>(wi
->info
)[0] = UNKNOWN_LOCATION
;
2795 if (gimple_call_internal_p (stmt
, IFN_FALLTHROUGH
))
2797 location_t loc
= gimple_location (stmt
);
2798 gsi_remove (gsi_p
, true);
2799 wi
->removed_stmt
= true;
2801 /* nothrow flag is added by genericize_c_loop to mark fallthrough
2802 statement at the end of some loop's body. Those should be
2803 always diagnosed, either because they indeed don't precede
2804 a case label or default label, or because the next statement
2805 is not within the same iteration statement. */
2806 if ((stmt
->subcode
& GF_CALL_NOTHROW
) != 0)
2808 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2809 "a case label or default label");
2813 if (gsi_end_p (*gsi_p
))
2815 static_cast<location_t
*>(wi
->info
)[0] = BUILTINS_LOCATION
;
2816 static_cast<location_t
*>(wi
->info
)[1] = loc
;
2822 gimple_stmt_iterator gsi2
= *gsi_p
;
2823 stmt
= gsi_stmt (gsi2
);
2824 if (gimple_code (stmt
) == GIMPLE_GOTO
&& !gimple_has_location (stmt
))
2826 /* Go on until the artificial label. */
2827 tree goto_dest
= gimple_goto_dest (stmt
);
2828 for (; !gsi_end_p (gsi2
); gsi_next (&gsi2
))
2830 if (gimple_code (gsi_stmt (gsi2
)) == GIMPLE_LABEL
2831 && gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi2
)))
2836 /* Not found? Stop. */
2837 if (gsi_end_p (gsi2
))
2840 /* Look one past it. */
2844 /* We're looking for a case label or default label here. */
2845 while (!gsi_end_p (gsi2
))
2847 stmt
= gsi_stmt (gsi2
);
2848 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2850 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
2851 if (gimple_has_location (stmt
) && DECL_ARTIFICIAL (label
))
2857 else if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2859 else if (!is_gimple_debug (stmt
))
2860 /* Anything else is not expected. */
2865 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2866 "a case label or default label");
2870 static_cast<location_t
*>(wi
->info
)[0] = UNKNOWN_LOCATION
;
2876 /* Expand all FALLTHROUGH () calls in SEQ. */
2879 expand_FALLTHROUGH (gimple_seq
*seq_p
)
2881 struct walk_stmt_info wi
;
2883 memset (&wi
, 0, sizeof (wi
));
2884 loc
[0] = UNKNOWN_LOCATION
;
2885 loc
[1] = UNKNOWN_LOCATION
;
2886 wi
.info
= (void *) &loc
[0];
2887 walk_gimple_seq_mod (seq_p
, expand_FALLTHROUGH_r
, NULL
, &wi
);
2888 if (loc
[0] != UNKNOWN_LOCATION
)
2889 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2890 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2891 pedwarn (loc
[1], 0, "attribute %<fallthrough%> not preceding "
2892 "a case label or default label");
2896 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2899 static enum gimplify_status
2900 gimplify_switch_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2902 tree switch_expr
= *expr_p
;
2903 gimple_seq switch_body_seq
= NULL
;
2904 enum gimplify_status ret
;
2905 tree index_type
= TREE_TYPE (switch_expr
);
2906 if (index_type
== NULL_TREE
)
2907 index_type
= TREE_TYPE (SWITCH_COND (switch_expr
));
2909 ret
= gimplify_expr (&SWITCH_COND (switch_expr
), pre_p
, NULL
, is_gimple_val
,
2911 if (ret
== GS_ERROR
|| ret
== GS_UNHANDLED
)
2914 if (SWITCH_BODY (switch_expr
))
2917 vec
<tree
> saved_labels
;
2918 hash_set
<tree
> *saved_live_switch_vars
= NULL
;
2919 tree default_case
= NULL_TREE
;
2920 gswitch
*switch_stmt
;
2922 /* Save old labels, get new ones from body, then restore the old
2923 labels. Save all the things from the switch body to append after. */
2924 saved_labels
= gimplify_ctxp
->case_labels
;
2925 gimplify_ctxp
->case_labels
.create (8);
2927 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2928 saved_live_switch_vars
= gimplify_ctxp
->live_switch_vars
;
2929 tree_code body_type
= TREE_CODE (SWITCH_BODY (switch_expr
));
2930 if (body_type
== BIND_EXPR
|| body_type
== STATEMENT_LIST
)
2931 gimplify_ctxp
->live_switch_vars
= new hash_set
<tree
> (4);
2933 gimplify_ctxp
->live_switch_vars
= NULL
;
2935 bool old_in_switch_expr
= gimplify_ctxp
->in_switch_expr
;
2936 gimplify_ctxp
->in_switch_expr
= true;
2938 gimplify_stmt (&SWITCH_BODY (switch_expr
), &switch_body_seq
);
2940 gimplify_ctxp
->in_switch_expr
= old_in_switch_expr
;
2941 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq
);
2942 maybe_warn_implicit_fallthrough (switch_body_seq
);
2943 /* Only do this for the outermost GIMPLE_SWITCH. */
2944 if (!gimplify_ctxp
->in_switch_expr
)
2945 expand_FALLTHROUGH (&switch_body_seq
);
2947 labels
= gimplify_ctxp
->case_labels
;
2948 gimplify_ctxp
->case_labels
= saved_labels
;
2950 if (gimplify_ctxp
->live_switch_vars
)
2952 gcc_assert (gimplify_ctxp
->live_switch_vars
->is_empty ());
2953 delete gimplify_ctxp
->live_switch_vars
;
2955 gimplify_ctxp
->live_switch_vars
= saved_live_switch_vars
;
2957 preprocess_case_label_vec_for_gimple (labels
, index_type
,
2960 bool add_bind
= false;
2963 glabel
*new_default
;
2966 = build_case_label (NULL_TREE
, NULL_TREE
,
2967 create_artificial_label (UNKNOWN_LOCATION
));
2968 if (old_in_switch_expr
)
2970 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case
)) = 1;
2973 new_default
= gimple_build_label (CASE_LABEL (default_case
));
2974 gimplify_seq_add_stmt (&switch_body_seq
, new_default
);
2976 else if (old_in_switch_expr
)
2978 gimple
*last
= gimple_seq_last_stmt (switch_body_seq
);
2979 if (last
&& gimple_code (last
) == GIMPLE_LABEL
)
2981 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2982 if (SWITCH_BREAK_LABEL_P (label
))
2987 switch_stmt
= gimple_build_switch (SWITCH_COND (switch_expr
),
2988 default_case
, labels
);
2989 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2990 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2991 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2992 so that we can easily find the start and end of the switch
2996 gimple_seq bind_body
= NULL
;
2997 gimplify_seq_add_stmt (&bind_body
, switch_stmt
);
2998 gimple_seq_add_seq (&bind_body
, switch_body_seq
);
2999 gbind
*bind
= gimple_build_bind (NULL_TREE
, bind_body
, NULL_TREE
);
3000 gimple_set_location (bind
, EXPR_LOCATION (switch_expr
));
3001 gimplify_seq_add_stmt (pre_p
, bind
);
3005 gimplify_seq_add_stmt (pre_p
, switch_stmt
);
3006 gimplify_seq_add_seq (pre_p
, switch_body_seq
);
3016 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
3018 static enum gimplify_status
3019 gimplify_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
3021 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p
))
3022 == current_function_decl
);
3024 tree label
= LABEL_EXPR_LABEL (*expr_p
);
3025 glabel
*label_stmt
= gimple_build_label (label
);
3026 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
3027 gimplify_seq_add_stmt (pre_p
, label_stmt
);
3029 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
3030 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
3032 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
3033 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
3039 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
3041 static enum gimplify_status
3042 gimplify_case_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
3044 struct gimplify_ctx
*ctxp
;
3047 /* Invalid programs can play Duff's Device type games with, for example,
3048 #pragma omp parallel. At least in the C front end, we don't
3049 detect such invalid branches until after gimplification, in the
3050 diagnose_omp_blocks pass. */
3051 for (ctxp
= gimplify_ctxp
; ; ctxp
= ctxp
->prev_context
)
3052 if (ctxp
->case_labels
.exists ())
3055 tree label
= CASE_LABEL (*expr_p
);
3056 label_stmt
= gimple_build_label (label
);
3057 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
3058 ctxp
->case_labels
.safe_push (*expr_p
);
3059 gimplify_seq_add_stmt (pre_p
, label_stmt
);
3061 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
3062 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
3064 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
3065 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
3071 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
3075 build_and_jump (tree
*label_p
)
3077 if (label_p
== NULL
)
3078 /* If there's nowhere to jump, just fall through. */
3081 if (*label_p
== NULL_TREE
)
3083 tree label
= create_artificial_label (UNKNOWN_LOCATION
);
3087 return build1 (GOTO_EXPR
, void_type_node
, *label_p
);
3090 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
3091 This also involves building a label to jump to and communicating it to
3092 gimplify_loop_expr through gimplify_ctxp->exit_label. */
3094 static enum gimplify_status
3095 gimplify_exit_expr (tree
*expr_p
)
3097 tree cond
= TREE_OPERAND (*expr_p
, 0);
3100 expr
= build_and_jump (&gimplify_ctxp
->exit_label
);
3101 expr
= build3 (COND_EXPR
, void_type_node
, cond
, expr
, NULL_TREE
);
3107 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
3108 different from its canonical type, wrap the whole thing inside a
3109 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
3112 The canonical type of a COMPONENT_REF is the type of the field being
3113 referenced--unless the field is a bit-field which can be read directly
3114 in a smaller mode, in which case the canonical type is the
3115 sign-appropriate type corresponding to that mode. */
3118 canonicalize_component_ref (tree
*expr_p
)
3120 tree expr
= *expr_p
;
3123 gcc_assert (TREE_CODE (expr
) == COMPONENT_REF
);
3125 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
)))
3126 type
= TREE_TYPE (get_unwidened (expr
, NULL_TREE
));
3128 type
= TREE_TYPE (TREE_OPERAND (expr
, 1));
3130 /* One could argue that all the stuff below is not necessary for
3131 the non-bitfield case and declare it a FE error if type
3132 adjustment would be needed. */
3133 if (TREE_TYPE (expr
) != type
)
3135 #ifdef ENABLE_TYPES_CHECKING
3136 tree old_type
= TREE_TYPE (expr
);
3140 /* We need to preserve qualifiers and propagate them from
3142 type_quals
= TYPE_QUALS (type
)
3143 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr
, 0)));
3144 if (TYPE_QUALS (type
) != type_quals
)
3145 type
= build_qualified_type (TYPE_MAIN_VARIANT (type
), type_quals
);
3147 /* Set the type of the COMPONENT_REF to the underlying type. */
3148 TREE_TYPE (expr
) = type
;
3150 #ifdef ENABLE_TYPES_CHECKING
3151 /* It is now a FE error, if the conversion from the canonical
3152 type to the original expression type is not useless. */
3153 gcc_assert (useless_type_conversion_p (old_type
, type
));
3158 /* If a NOP conversion is changing a pointer to array of foo to a pointer
3159 to foo, embed that change in the ADDR_EXPR by converting
3164 where L is the lower bound. For simplicity, only do this for constant
3166 The constraint is that the type of &array[L] is trivially convertible
3170 canonicalize_addr_expr (tree
*expr_p
)
3172 tree expr
= *expr_p
;
3173 tree addr_expr
= TREE_OPERAND (expr
, 0);
3174 tree datype
, ddatype
, pddatype
;
3176 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
3177 if (!POINTER_TYPE_P (TREE_TYPE (expr
))
3178 || TREE_CODE (addr_expr
) != ADDR_EXPR
)
3181 /* The addr_expr type should be a pointer to an array. */
3182 datype
= TREE_TYPE (TREE_TYPE (addr_expr
));
3183 if (TREE_CODE (datype
) != ARRAY_TYPE
)
3186 /* The pointer to element type shall be trivially convertible to
3187 the expression pointer type. */
3188 ddatype
= TREE_TYPE (datype
);
3189 pddatype
= build_pointer_type (ddatype
);
3190 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr
)),
3194 /* The lower bound and element sizes must be constant. */
3195 if (!TYPE_SIZE_UNIT (ddatype
)
3196 || TREE_CODE (TYPE_SIZE_UNIT (ddatype
)) != INTEGER_CST
3197 || !TYPE_DOMAIN (datype
) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))
3198 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))) != INTEGER_CST
)
3201 /* All checks succeeded. Build a new node to merge the cast. */
3202 *expr_p
= build4 (ARRAY_REF
, ddatype
, TREE_OPERAND (addr_expr
, 0),
3203 TYPE_MIN_VALUE (TYPE_DOMAIN (datype
)),
3204 NULL_TREE
, NULL_TREE
);
3205 *expr_p
= build1 (ADDR_EXPR
, pddatype
, *expr_p
);
3207 /* We can have stripped a required restrict qualifier above. */
3208 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
3209 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
3212 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
3213 underneath as appropriate. */
3215 static enum gimplify_status
3216 gimplify_conversion (tree
*expr_p
)
3218 location_t loc
= EXPR_LOCATION (*expr_p
);
3219 gcc_assert (CONVERT_EXPR_P (*expr_p
));
3221 /* Then strip away all but the outermost conversion. */
3222 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p
, 0));
3224 /* And remove the outermost conversion if it's useless. */
3225 if (tree_ssa_useless_type_conversion (*expr_p
))
3226 *expr_p
= TREE_OPERAND (*expr_p
, 0);
3228 /* If we still have a conversion at the toplevel,
3229 then canonicalize some constructs. */
3230 if (CONVERT_EXPR_P (*expr_p
))
3232 tree sub
= TREE_OPERAND (*expr_p
, 0);
3234 /* If a NOP conversion is changing the type of a COMPONENT_REF
3235 expression, then canonicalize its type now in order to expose more
3236 redundant conversions. */
3237 if (TREE_CODE (sub
) == COMPONENT_REF
)
3238 canonicalize_component_ref (&TREE_OPERAND (*expr_p
, 0));
3240 /* If a NOP conversion is changing a pointer to array of foo
3241 to a pointer to foo, embed that change in the ADDR_EXPR. */
3242 else if (TREE_CODE (sub
) == ADDR_EXPR
)
3243 canonicalize_addr_expr (expr_p
);
3246 /* If we have a conversion to a non-register type force the
3247 use of a VIEW_CONVERT_EXPR instead. */
3248 if (CONVERT_EXPR_P (*expr_p
) && !is_gimple_reg_type (TREE_TYPE (*expr_p
)))
3249 *expr_p
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (*expr_p
),
3250 TREE_OPERAND (*expr_p
, 0));
3252 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3253 if (TREE_CODE (*expr_p
) == CONVERT_EXPR
)
3254 TREE_SET_CODE (*expr_p
, NOP_EXPR
);
3259 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3260 DECL_VALUE_EXPR, and it's worth re-examining things. */
3262 static enum gimplify_status
3263 gimplify_var_or_parm_decl (tree
*expr_p
)
3265 tree decl
= *expr_p
;
3267 /* ??? If this is a local variable, and it has not been seen in any
3268 outer BIND_EXPR, then it's probably the result of a duplicate
3269 declaration, for which we've already issued an error. It would
3270 be really nice if the front end wouldn't leak these at all.
3271 Currently the only known culprit is C++ destructors, as seen
3272 in g++.old-deja/g++.jason/binding.C.
3273 Another possible culpit are size expressions for variably modified
3274 types which are lost in the FE or not gimplified correctly. */
3276 && !DECL_SEEN_IN_BIND_EXPR_P (decl
)
3277 && !TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)
3278 && decl_function_context (decl
) == current_function_decl
)
3280 gcc_assert (seen_error ());
3284 /* When within an OMP context, notice uses of variables. */
3285 if (gimplify_omp_ctxp
&& omp_notice_variable (gimplify_omp_ctxp
, decl
, true))
3288 /* If the decl is an alias for another expression, substitute it now. */
3289 if (DECL_HAS_VALUE_EXPR_P (decl
))
3291 *expr_p
= unshare_expr (DECL_VALUE_EXPR (decl
));
3298 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3301 recalculate_side_effects (tree t
)
3303 enum tree_code code
= TREE_CODE (t
);
3304 int len
= TREE_OPERAND_LENGTH (t
);
3307 switch (TREE_CODE_CLASS (code
))
3309 case tcc_expression
:
3315 case PREDECREMENT_EXPR
:
3316 case PREINCREMENT_EXPR
:
3317 case POSTDECREMENT_EXPR
:
3318 case POSTINCREMENT_EXPR
:
3319 /* All of these have side-effects, no matter what their
3328 case tcc_comparison
: /* a comparison expression */
3329 case tcc_unary
: /* a unary arithmetic expression */
3330 case tcc_binary
: /* a binary arithmetic expression */
3331 case tcc_reference
: /* a reference */
3332 case tcc_vl_exp
: /* a function call */
3333 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
3334 for (i
= 0; i
< len
; ++i
)
3336 tree op
= TREE_OPERAND (t
, i
);
3337 if (op
&& TREE_SIDE_EFFECTS (op
))
3338 TREE_SIDE_EFFECTS (t
) = 1;
3343 /* No side-effects. */
3351 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3355 : min_lval '[' val ']'
3357 | compound_lval '[' val ']'
3358 | compound_lval '.' ID
3360 This is not part of the original SIMPLE definition, which separates
3361 array and member references, but it seems reasonable to handle them
3362 together. Also, this way we don't run into problems with union
3363 aliasing; gcc requires that for accesses through a union to alias, the
3364 union reference must be explicit, which was not always the case when we
3365 were splitting up array and member refs.
3367 PRE_P points to the sequence where side effects that must happen before
3368 *EXPR_P should be stored.
3370 POST_P points to the sequence where side effects that must happen after
3371 *EXPR_P should be stored. */
3373 static enum gimplify_status
3374 gimplify_compound_lval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3375 fallback_t fallback
)
3378 enum gimplify_status ret
= GS_ALL_DONE
, tret
;
3380 location_t loc
= EXPR_LOCATION (*expr_p
);
3381 tree expr
= *expr_p
;
3383 /* Create a stack of the subexpressions so later we can walk them in
3384 order from inner to outer. */
3385 auto_vec
<tree
, 10> expr_stack
;
3387 /* We can handle anything that get_inner_reference can deal with. */
3388 for (p
= expr_p
; ; p
= &TREE_OPERAND (*p
, 0))
3391 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3392 if (TREE_CODE (*p
) == INDIRECT_REF
)
3393 *p
= fold_indirect_ref_loc (loc
, *p
);
3395 if (handled_component_p (*p
))
3397 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3398 additional COMPONENT_REFs. */
3399 else if ((VAR_P (*p
) || TREE_CODE (*p
) == PARM_DECL
)
3400 && gimplify_var_or_parm_decl (p
) == GS_OK
)
3405 expr_stack
.safe_push (*p
);
3408 gcc_assert (expr_stack
.length ());
3410 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3411 walked through and P points to the innermost expression.
3413 Java requires that we elaborated nodes in source order. That
3414 means we must gimplify the inner expression followed by each of
3415 the indices, in order. But we can't gimplify the inner
3416 expression until we deal with any variable bounds, sizes, or
3417 positions in order to deal with PLACEHOLDER_EXPRs.
3419 The base expression may contain a statement expression that
3420 has declarations used in size expressions, so has to be
3421 gimplified before gimplifying the size expressions.
3423 So we do this in three steps. First we deal with variable
3424 bounds, sizes, and positions, then we gimplify the base and
3425 ensure it is memory if needed, then we deal with the annotations
3426 for any variables in the components and any indices, from left
3429 bool need_non_reg
= false;
3430 for (i
= expr_stack
.length () - 1; i
>= 0; i
--)
3432 tree t
= expr_stack
[i
];
3434 if (error_operand_p (TREE_OPERAND (t
, 0)))
3437 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3439 /* Deal with the low bound and element type size and put them into
3440 the ARRAY_REF. If these values are set, they have already been
3442 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3444 tree low
= unshare_expr (array_ref_low_bound (t
));
3445 if (!is_gimple_min_invariant (low
))
3447 TREE_OPERAND (t
, 2) = low
;
3451 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
3453 tree elmt_size
= array_ref_element_size (t
);
3454 if (!is_gimple_min_invariant (elmt_size
))
3456 elmt_size
= unshare_expr (elmt_size
);
3457 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
, 0)));
3458 tree factor
= size_int (TYPE_ALIGN_UNIT (elmt_type
));
3460 /* Divide the element size by the alignment of the element
3462 elmt_size
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3465 TREE_OPERAND (t
, 3) = elmt_size
;
3468 need_non_reg
= true;
3470 else if (TREE_CODE (t
) == COMPONENT_REF
)
3472 /* Set the field offset into T and gimplify it. */
3473 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3475 tree offset
= component_ref_field_offset (t
);
3476 if (!is_gimple_min_invariant (offset
))
3478 offset
= unshare_expr (offset
);
3479 tree field
= TREE_OPERAND (t
, 1);
3481 = size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
);
3483 /* Divide the offset by its alignment. */
3484 offset
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3487 TREE_OPERAND (t
, 2) = offset
;
3490 need_non_reg
= true;
3492 else if (!is_gimple_reg_type (TREE_TYPE (t
)))
3493 /* When the result of an operation, in particular a VIEW_CONVERT_EXPR
3494 is a non-register type then require the base object to be a
3495 non-register as well. */
3496 need_non_reg
= true;
3499 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3500 so as to match the min_lval predicate. Failure to do so may result
3501 in the creation of large aggregate temporaries. */
3502 tret
= gimplify_expr (p
, pre_p
, post_p
, is_gimple_min_lval
,
3503 fallback
| fb_lvalue
);
3504 ret
= MIN (ret
, tret
);
3505 if (ret
== GS_ERROR
)
3508 /* Step 2a: if we have component references we do not support on
3509 registers then make sure the base isn't a register. Of course
3510 we can only do so if an rvalue is OK. */
3511 if (need_non_reg
&& (fallback
& fb_rvalue
))
3512 prepare_gimple_addressable (p
, pre_p
);
3515 /* Step 3: gimplify size expressions and the indices and operands of
3516 ARRAY_REF. During this loop we also remove any useless conversions.
3517 If we operate on a register also make sure to properly gimplify
3518 to individual operations. */
3520 bool reg_operations
= is_gimple_reg (*p
);
3521 for (; expr_stack
.length () > 0; )
3523 tree t
= expr_stack
.pop ();
3525 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3527 gcc_assert (!reg_operations
);
3529 /* Gimplify the low bound and element type size. */
3530 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3531 is_gimple_reg
, fb_rvalue
);
3532 ret
= MIN (ret
, tret
);
3534 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
, post_p
,
3535 is_gimple_reg
, fb_rvalue
);
3536 ret
= MIN (ret
, tret
);
3538 /* Gimplify the dimension. */
3539 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, post_p
,
3540 is_gimple_val
, fb_rvalue
);
3541 ret
= MIN (ret
, tret
);
3543 else if (TREE_CODE (t
) == COMPONENT_REF
)
3545 gcc_assert (!reg_operations
);
3547 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3548 is_gimple_reg
, fb_rvalue
);
3549 ret
= MIN (ret
, tret
);
3551 else if (reg_operations
)
3553 tret
= gimplify_expr (&TREE_OPERAND (t
, 0), pre_p
, post_p
,
3554 is_gimple_val
, fb_rvalue
);
3555 ret
= MIN (ret
, tret
);
3558 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t
, 0));
3560 /* The innermost expression P may have originally had
3561 TREE_SIDE_EFFECTS set which would have caused all the outer
3562 expressions in *EXPR_P leading to P to also have had
3563 TREE_SIDE_EFFECTS set. */
3564 recalculate_side_effects (t
);
3567 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3568 if ((fallback
& fb_rvalue
) && TREE_CODE (*expr_p
) == COMPONENT_REF
)
3570 canonicalize_component_ref (expr_p
);
3573 expr_stack
.release ();
3575 gcc_assert (*expr_p
== expr
|| ret
!= GS_ALL_DONE
);
3580 /* Gimplify the self modifying expression pointed to by EXPR_P
3583 PRE_P points to the list where side effects that must happen before
3584 *EXPR_P should be stored.
3586 POST_P points to the list where side effects that must happen after
3587 *EXPR_P should be stored.
3589 WANT_VALUE is nonzero iff we want to use the value of this expression
3590 in another expression.
3592 ARITH_TYPE is the type the computation should be performed in. */
3594 enum gimplify_status
3595 gimplify_self_mod_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3596 bool want_value
, tree arith_type
)
3598 enum tree_code code
;
3599 tree lhs
, lvalue
, rhs
, t1
;
3600 gimple_seq post
= NULL
, *orig_post_p
= post_p
;
3602 enum tree_code arith_code
;
3603 enum gimplify_status ret
;
3604 location_t loc
= EXPR_LOCATION (*expr_p
);
3606 code
= TREE_CODE (*expr_p
);
3608 gcc_assert (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
3609 || code
== PREINCREMENT_EXPR
|| code
== PREDECREMENT_EXPR
);
3611 /* Prefix or postfix? */
3612 if (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
)
3613 /* Faster to treat as prefix if result is not used. */
3614 postfix
= want_value
;
3618 /* For postfix, make sure the inner expression's post side effects
3619 are executed after side effects from this expression. */
3623 /* Add or subtract? */
3624 if (code
== PREINCREMENT_EXPR
|| code
== POSTINCREMENT_EXPR
)
3625 arith_code
= PLUS_EXPR
;
3627 arith_code
= MINUS_EXPR
;
3629 /* Gimplify the LHS into a GIMPLE lvalue. */
3630 lvalue
= TREE_OPERAND (*expr_p
, 0);
3631 ret
= gimplify_expr (&lvalue
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
3632 if (ret
== GS_ERROR
)
3635 /* Extract the operands to the arithmetic operation. */
3637 rhs
= TREE_OPERAND (*expr_p
, 1);
3639 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3640 that as the result value and in the postqueue operation. */
3643 ret
= gimplify_expr (&lhs
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
3644 if (ret
== GS_ERROR
)
3647 lhs
= get_initialized_tmp_var (lhs
, pre_p
);
3650 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3651 if (POINTER_TYPE_P (TREE_TYPE (lhs
)))
3653 rhs
= convert_to_ptrofftype_loc (loc
, rhs
);
3654 if (arith_code
== MINUS_EXPR
)
3655 rhs
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (rhs
), rhs
);
3656 t1
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (*expr_p
), lhs
, rhs
);
3659 t1
= fold_convert (TREE_TYPE (*expr_p
),
3660 fold_build2 (arith_code
, arith_type
,
3661 fold_convert (arith_type
, lhs
),
3662 fold_convert (arith_type
, rhs
)));
3666 gimplify_assign (lvalue
, t1
, pre_p
);
3667 gimplify_seq_add_seq (orig_post_p
, post
);
3673 *expr_p
= build2 (MODIFY_EXPR
, TREE_TYPE (lvalue
), lvalue
, t1
);
3678 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3681 maybe_with_size_expr (tree
*expr_p
)
3683 tree expr
= *expr_p
;
3684 tree type
= TREE_TYPE (expr
);
3687 /* If we've already wrapped this or the type is error_mark_node, we can't do
3689 if (TREE_CODE (expr
) == WITH_SIZE_EXPR
3690 || type
== error_mark_node
)
3693 /* If the size isn't known or is a constant, we have nothing to do. */
3694 size
= TYPE_SIZE_UNIT (type
);
3695 if (!size
|| poly_int_tree_p (size
))
3698 /* Otherwise, make a WITH_SIZE_EXPR. */
3699 size
= unshare_expr (size
);
3700 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, expr
);
3701 *expr_p
= build2 (WITH_SIZE_EXPR
, type
, expr
, size
);
3704 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3705 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3706 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3707 gimplified to an SSA name. */
3709 enum gimplify_status
3710 gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
3713 bool (*test
) (tree
);
3716 /* In general, we allow lvalues for function arguments to avoid
3717 extra overhead of copying large aggregates out of even larger
3718 aggregates into temporaries only to copy the temporaries to
3719 the argument list. Make optimizers happy by pulling out to
3720 temporaries those types that fit in registers. */
3721 if (is_gimple_reg_type (TREE_TYPE (*arg_p
)))
3722 test
= is_gimple_val
, fb
= fb_rvalue
;
3725 test
= is_gimple_lvalue
, fb
= fb_either
;
3726 /* Also strip a TARGET_EXPR that would force an extra copy. */
3727 if (TREE_CODE (*arg_p
) == TARGET_EXPR
)
3729 tree init
= TARGET_EXPR_INITIAL (*arg_p
);
3731 && !VOID_TYPE_P (TREE_TYPE (init
)))
3736 /* If this is a variable sized type, we must remember the size. */
3737 maybe_with_size_expr (arg_p
);
3739 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3740 /* Make sure arguments have the same location as the function call
3742 protected_set_expr_location (*arg_p
, call_location
);
3744 /* There is a sequence point before a function call. Side effects in
3745 the argument list must occur before the actual call. So, when
3746 gimplifying arguments, force gimplify_expr to use an internal
3747 post queue which is then appended to the end of PRE_P. */
3748 return gimplify_expr (arg_p
, pre_p
, NULL
, test
, fb
, allow_ssa
);
3751 /* Don't fold inside offloading or taskreg regions: it can break code by
3752 adding decl references that weren't in the source. We'll do it during
3753 omplower pass instead. */
3756 maybe_fold_stmt (gimple_stmt_iterator
*gsi
)
3758 struct gimplify_omp_ctx
*ctx
;
3759 for (ctx
= gimplify_omp_ctxp
; ctx
; ctx
= ctx
->outer_context
)
3760 if ((ctx
->region_type
& (ORT_TARGET
| ORT_PARALLEL
| ORT_TASK
)) != 0)
3762 else if ((ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
3764 /* Delay folding of builtins until the IL is in consistent state
3765 so the diagnostic machinery can do a better job. */
3766 if (gimple_call_builtin_p (gsi_stmt (*gsi
)))
3768 return fold_stmt (gsi
);
3771 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3772 WANT_VALUE is true if the result of the call is desired. */
3774 static enum gimplify_status
3775 gimplify_call_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
3777 tree fndecl
, parms
, p
, fnptrtype
;
3778 enum gimplify_status ret
;
3781 bool builtin_va_start_p
= false;
3782 location_t loc
= EXPR_LOCATION (*expr_p
);
3784 gcc_assert (TREE_CODE (*expr_p
) == CALL_EXPR
);
3786 /* For reliable diagnostics during inlining, it is necessary that
3787 every call_expr be annotated with file and line. */
3788 if (! EXPR_HAS_LOCATION (*expr_p
))
3789 SET_EXPR_LOCATION (*expr_p
, input_location
);
3791 /* Gimplify internal functions created in the FEs. */
3792 if (CALL_EXPR_FN (*expr_p
) == NULL_TREE
)
3797 nargs
= call_expr_nargs (*expr_p
);
3798 enum internal_fn ifn
= CALL_EXPR_IFN (*expr_p
);
3799 auto_vec
<tree
> vargs (nargs
);
3801 if (ifn
== IFN_ASSUME
)
3803 if (simple_condition_p (CALL_EXPR_ARG (*expr_p
, 0)))
3805 /* If the [[assume (cond)]]; condition is simple
3806 enough and can be evaluated unconditionally
3807 without side-effects, expand it as
3808 if (!cond) __builtin_unreachable (); */
3809 tree fndecl
= builtin_decl_explicit (BUILT_IN_UNREACHABLE
);
3810 *expr_p
= build3 (COND_EXPR
, void_type_node
,
3811 CALL_EXPR_ARG (*expr_p
, 0), void_node
,
3812 build_call_expr_loc (EXPR_LOCATION (*expr_p
),
3816 /* If not optimizing, ignore the assumptions. */
3817 if (!optimize
|| seen_error ())
3819 *expr_p
= NULL_TREE
;
3822 /* Temporarily, until gimple lowering, transform
3829 such that gimple lowering can outline the condition into
3830 a separate function easily. */
3831 tree guard
= create_tmp_var (boolean_type_node
);
3832 *expr_p
= build2 (MODIFY_EXPR
, void_type_node
, guard
,
3833 gimple_boolify (CALL_EXPR_ARG (*expr_p
, 0)));
3834 *expr_p
= build3 (BIND_EXPR
, void_type_node
, NULL
, *expr_p
, NULL
);
3835 push_gimplify_context ();
3836 gimple_seq body
= NULL
;
3837 gimple
*g
= gimplify_and_return_first (*expr_p
, &body
);
3838 pop_gimplify_context (g
);
3839 g
= gimple_build_assume (guard
, body
);
3840 gimple_set_location (g
, loc
);
3841 gimplify_seq_add_stmt (pre_p
, g
);
3842 *expr_p
= NULL_TREE
;
3846 for (i
= 0; i
< nargs
; i
++)
3848 gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3849 EXPR_LOCATION (*expr_p
));
3850 vargs
.quick_push (CALL_EXPR_ARG (*expr_p
, i
));
3853 gcall
*call
= gimple_build_call_internal_vec (ifn
, vargs
);
3854 gimple_call_set_nothrow (call
, TREE_NOTHROW (*expr_p
));
3855 gimplify_seq_add_stmt (pre_p
, call
);
3859 /* This may be a call to a builtin function.
3861 Builtin function calls may be transformed into different
3862 (and more efficient) builtin function calls under certain
3863 circumstances. Unfortunately, gimplification can muck things
3864 up enough that the builtin expanders are not aware that certain
3865 transformations are still valid.
3867 So we attempt transformation/gimplification of the call before
3868 we gimplify the CALL_EXPR. At this time we do not manage to
3869 transform all calls in the same manner as the expanders do, but
3870 we do transform most of them. */
3871 fndecl
= get_callee_fndecl (*expr_p
);
3872 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
3873 switch (DECL_FUNCTION_CODE (fndecl
))
3875 CASE_BUILT_IN_ALLOCA
:
3876 /* If the call has been built for a variable-sized object, then we
3877 want to restore the stack level when the enclosing BIND_EXPR is
3878 exited to reclaim the allocated space; otherwise, we precisely
3879 need to do the opposite and preserve the latest stack level. */
3880 if (CALL_ALLOCA_FOR_VAR_P (*expr_p
))
3881 gimplify_ctxp
->save_stack
= true;
3883 gimplify_ctxp
->keep_stack
= true;
3886 case BUILT_IN_VA_START
:
3888 builtin_va_start_p
= true;
3889 if (call_expr_nargs (*expr_p
) < 2)
3891 error ("too few arguments to function %<va_start%>");
3892 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3896 if (fold_builtin_next_arg (*expr_p
, true))
3898 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3904 case BUILT_IN_EH_RETURN
:
3905 cfun
->calls_eh_return
= true;
3908 case BUILT_IN_CLEAR_PADDING
:
3909 if (call_expr_nargs (*expr_p
) == 1)
3911 /* Remember the original type of the argument in an internal
3912 dummy second argument, as in GIMPLE pointer conversions are
3913 useless. Also mark this call as not for automatic
3914 initialization in the internal dummy third argument. */
3915 p
= CALL_EXPR_ARG (*expr_p
, 0);
3917 = build_call_expr_loc (EXPR_LOCATION (*expr_p
), fndecl
, 2, p
,
3918 build_zero_cst (TREE_TYPE (p
)));
3926 if (fndecl
&& fndecl_built_in_p (fndecl
))
3928 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3929 if (new_tree
&& new_tree
!= *expr_p
)
3931 /* There was a transformation of this call which computes the
3932 same value, but in a more efficient way. Return and try
3939 /* Remember the original function pointer type. */
3940 fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
3945 && (cfun
->curr_properties
& PROP_gimple_any
) == 0)
3947 tree variant
= omp_resolve_declare_variant (fndecl
);
3948 if (variant
!= fndecl
)
3949 CALL_EXPR_FN (*expr_p
) = build1 (ADDR_EXPR
, fnptrtype
, variant
);
3952 /* There is a sequence point before the call, so any side effects in
3953 the calling expression must occur before the actual call. Force
3954 gimplify_expr to use an internal post queue. */
3955 ret
= gimplify_expr (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
3956 is_gimple_call_addr
, fb_rvalue
);
3958 if (ret
== GS_ERROR
)
3961 nargs
= call_expr_nargs (*expr_p
);
3963 /* Get argument types for verification. */
3964 fndecl
= get_callee_fndecl (*expr_p
);
3967 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3969 parms
= TYPE_ARG_TYPES (TREE_TYPE (fnptrtype
));
3971 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
3972 p
= DECL_ARGUMENTS (fndecl
);
3977 for (i
= 0; i
< nargs
&& p
; i
++, p
= TREE_CHAIN (p
))
3980 /* If the last argument is __builtin_va_arg_pack () and it is not
3981 passed as a named argument, decrease the number of CALL_EXPR
3982 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3985 && TREE_CODE (CALL_EXPR_ARG (*expr_p
, nargs
- 1)) == CALL_EXPR
)
3987 tree last_arg
= CALL_EXPR_ARG (*expr_p
, nargs
- 1);
3988 tree last_arg_fndecl
= get_callee_fndecl (last_arg
);
3991 && fndecl_built_in_p (last_arg_fndecl
, BUILT_IN_VA_ARG_PACK
))
3993 tree call
= *expr_p
;
3996 *expr_p
= build_call_array_loc (loc
, TREE_TYPE (call
),
3997 CALL_EXPR_FN (call
),
3998 nargs
, CALL_EXPR_ARGP (call
));
4000 /* Copy all CALL_EXPR flags, location and block, except
4001 CALL_EXPR_VA_ARG_PACK flag. */
4002 CALL_EXPR_STATIC_CHAIN (*expr_p
) = CALL_EXPR_STATIC_CHAIN (call
);
4003 CALL_EXPR_TAILCALL (*expr_p
) = CALL_EXPR_TAILCALL (call
);
4004 CALL_EXPR_RETURN_SLOT_OPT (*expr_p
)
4005 = CALL_EXPR_RETURN_SLOT_OPT (call
);
4006 CALL_FROM_THUNK_P (*expr_p
) = CALL_FROM_THUNK_P (call
);
4007 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (call
));
4009 /* Set CALL_EXPR_VA_ARG_PACK. */
4010 CALL_EXPR_VA_ARG_PACK (*expr_p
) = 1;
4014 /* If the call returns twice then after building the CFG the call
4015 argument computations will no longer dominate the call because
4016 we add an abnormal incoming edge to the call. So do not use SSA
4018 bool returns_twice
= call_expr_flags (*expr_p
) & ECF_RETURNS_TWICE
;
4020 /* Gimplify the function arguments. */
4023 for (i
= (PUSH_ARGS_REVERSED
? nargs
- 1 : 0);
4024 PUSH_ARGS_REVERSED
? i
>= 0 : i
< nargs
;
4025 PUSH_ARGS_REVERSED
? i
-- : i
++)
4027 enum gimplify_status t
;
4029 /* Avoid gimplifying the second argument to va_start, which needs to
4030 be the plain PARM_DECL. */
4031 if ((i
!= 1) || !builtin_va_start_p
)
4033 t
= gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
4034 EXPR_LOCATION (*expr_p
), ! returns_twice
);
4042 /* Gimplify the static chain. */
4043 if (CALL_EXPR_STATIC_CHAIN (*expr_p
))
4045 if (fndecl
&& !DECL_STATIC_CHAIN (fndecl
))
4046 CALL_EXPR_STATIC_CHAIN (*expr_p
) = NULL
;
4049 enum gimplify_status t
;
4050 t
= gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p
), pre_p
,
4051 EXPR_LOCATION (*expr_p
), ! returns_twice
);
4057 /* Verify the function result. */
4058 if (want_value
&& fndecl
4059 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype
))))
4061 error_at (loc
, "using result of function returning %<void%>");
4065 /* Try this again in case gimplification exposed something. */
4066 if (ret
!= GS_ERROR
)
4068 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
4070 if (new_tree
&& new_tree
!= *expr_p
)
4072 /* There was a transformation of this call which computes the
4073 same value, but in a more efficient way. Return and try
4081 *expr_p
= error_mark_node
;
4085 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
4086 decl. This allows us to eliminate redundant or useless
4087 calls to "const" functions. */
4088 if (TREE_CODE (*expr_p
) == CALL_EXPR
)
4090 int flags
= call_expr_flags (*expr_p
);
4091 if (flags
& (ECF_CONST
| ECF_PURE
)
4092 /* An infinite loop is considered a side effect. */
4093 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
4094 TREE_SIDE_EFFECTS (*expr_p
) = 0;
4097 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
4098 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
4099 form and delegate the creation of a GIMPLE_CALL to
4100 gimplify_modify_expr. This is always possible because when
4101 WANT_VALUE is true, the caller wants the result of this call into
4102 a temporary, which means that we will emit an INIT_EXPR in
4103 internal_get_tmp_var which will then be handled by
4104 gimplify_modify_expr. */
4107 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
4108 have to do is replicate it as a GIMPLE_CALL tuple. */
4109 gimple_stmt_iterator gsi
;
4110 call
= gimple_build_call_from_tree (*expr_p
, fnptrtype
);
4111 notice_special_calls (call
);
4112 gimplify_seq_add_stmt (pre_p
, call
);
4113 gsi
= gsi_last (*pre_p
);
4114 maybe_fold_stmt (&gsi
);
4115 *expr_p
= NULL_TREE
;
4118 /* Remember the original function type. */
4119 CALL_EXPR_FN (*expr_p
) = build1 (NOP_EXPR
, fnptrtype
,
4120 CALL_EXPR_FN (*expr_p
));
4125 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
4126 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
4128 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
4129 condition is true or false, respectively. If null, we should generate
4130 our own to skip over the evaluation of this specific expression.
4132 LOCUS is the source location of the COND_EXPR.
4134 This function is the tree equivalent of do_jump.
4136 shortcut_cond_r should only be called by shortcut_cond_expr. */
4139 shortcut_cond_r (tree pred
, tree
*true_label_p
, tree
*false_label_p
,
4142 tree local_label
= NULL_TREE
;
4143 tree t
, expr
= NULL
;
4145 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
4146 retain the shortcut semantics. Just insert the gotos here;
4147 shortcut_cond_expr will append the real blocks later. */
4148 if (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
4150 location_t new_locus
;
4152 /* Turn if (a && b) into
4154 if (a); else goto no;
4155 if (b) goto yes; else goto no;
4158 if (false_label_p
== NULL
)
4159 false_label_p
= &local_label
;
4161 /* Keep the original source location on the first 'if'. */
4162 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), NULL
, false_label_p
, locus
);
4163 append_to_statement_list (t
, &expr
);
4165 /* Set the source location of the && on the second 'if'. */
4166 new_locus
= rexpr_location (pred
, locus
);
4167 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
4169 append_to_statement_list (t
, &expr
);
4171 else if (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
4173 location_t new_locus
;
4175 /* Turn if (a || b) into
4178 if (b) goto yes; else goto no;
4181 if (true_label_p
== NULL
)
4182 true_label_p
= &local_label
;
4184 /* Keep the original source location on the first 'if'. */
4185 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), true_label_p
, NULL
, locus
);
4186 append_to_statement_list (t
, &expr
);
4188 /* Set the source location of the || on the second 'if'. */
4189 new_locus
= rexpr_location (pred
, locus
);
4190 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
4192 append_to_statement_list (t
, &expr
);
4194 else if (TREE_CODE (pred
) == COND_EXPR
4195 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 1)))
4196 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 2))))
4198 location_t new_locus
;
4200 /* As long as we're messing with gotos, turn if (a ? b : c) into
4202 if (b) goto yes; else goto no;
4204 if (c) goto yes; else goto no;
4206 Don't do this if one of the arms has void type, which can happen
4207 in C++ when the arm is throw. */
4209 /* Keep the original source location on the first 'if'. Set the source
4210 location of the ? on the second 'if'. */
4211 new_locus
= rexpr_location (pred
, locus
);
4212 expr
= build3 (COND_EXPR
, void_type_node
, TREE_OPERAND (pred
, 0),
4213 shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
,
4214 false_label_p
, locus
),
4215 shortcut_cond_r (TREE_OPERAND (pred
, 2), true_label_p
,
4216 false_label_p
, new_locus
));
4220 expr
= build3 (COND_EXPR
, void_type_node
, pred
,
4221 build_and_jump (true_label_p
),
4222 build_and_jump (false_label_p
));
4223 SET_EXPR_LOCATION (expr
, locus
);
4228 t
= build1 (LABEL_EXPR
, void_type_node
, local_label
);
4229 append_to_statement_list (t
, &expr
);
4235 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
4236 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
4237 statement, if it is the last one. Otherwise, return NULL. */
4240 find_goto (tree expr
)
4245 if (TREE_CODE (expr
) == GOTO_EXPR
)
4248 if (TREE_CODE (expr
) != STATEMENT_LIST
)
4251 tree_stmt_iterator i
= tsi_start (expr
);
4253 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
4256 if (!tsi_one_before_end_p (i
))
4259 return find_goto (tsi_stmt (i
));
4262 /* Same as find_goto, except that it returns NULL if the destination
4263 is not a LABEL_DECL. */
4266 find_goto_label (tree expr
)
4268 tree dest
= find_goto (expr
);
4269 if (dest
&& TREE_CODE (GOTO_DESTINATION (dest
)) == LABEL_DECL
)
4274 /* Given a conditional expression EXPR with short-circuit boolean
4275 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4276 predicate apart into the equivalent sequence of conditionals. */
4279 shortcut_cond_expr (tree expr
)
4281 tree pred
= TREE_OPERAND (expr
, 0);
4282 tree then_
= TREE_OPERAND (expr
, 1);
4283 tree else_
= TREE_OPERAND (expr
, 2);
4284 tree true_label
, false_label
, end_label
, t
;
4286 tree
*false_label_p
;
4287 bool emit_end
, emit_false
, jump_over_else
;
4288 bool then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
4289 bool else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
4291 /* First do simple transformations. */
4294 /* If there is no 'else', turn
4297 if (a) if (b) then c. */
4298 while (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
4300 /* Keep the original source location on the first 'if'. */
4301 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
4302 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
4303 /* Set the source location of the && on the second 'if'. */
4304 if (rexpr_has_location (pred
))
4305 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
4306 then_
= shortcut_cond_expr (expr
);
4307 then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
4308 pred
= TREE_OPERAND (pred
, 0);
4309 expr
= build3 (COND_EXPR
, void_type_node
, pred
, then_
, NULL_TREE
);
4310 SET_EXPR_LOCATION (expr
, locus
);
4316 /* If there is no 'then', turn
4319 if (a); else if (b); else d. */
4320 while (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
4322 /* Keep the original source location on the first 'if'. */
4323 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
4324 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
4325 /* Set the source location of the || on the second 'if'. */
4326 if (rexpr_has_location (pred
))
4327 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
4328 else_
= shortcut_cond_expr (expr
);
4329 else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
4330 pred
= TREE_OPERAND (pred
, 0);
4331 expr
= build3 (COND_EXPR
, void_type_node
, pred
, NULL_TREE
, else_
);
4332 SET_EXPR_LOCATION (expr
, locus
);
4336 /* If we're done, great. */
4337 if (TREE_CODE (pred
) != TRUTH_ANDIF_EXPR
4338 && TREE_CODE (pred
) != TRUTH_ORIF_EXPR
)
4341 /* Otherwise we need to mess with gotos. Change
4344 if (a); else goto no;
4347 and recursively gimplify the condition. */
4349 true_label
= false_label
= end_label
= NULL_TREE
;
4351 /* If our arms just jump somewhere, hijack those labels so we don't
4352 generate jumps to jumps. */
4354 if (tree then_goto
= find_goto_label (then_
))
4356 true_label
= GOTO_DESTINATION (then_goto
);
4361 if (tree else_goto
= find_goto_label (else_
))
4363 false_label
= GOTO_DESTINATION (else_goto
);
4368 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4370 true_label_p
= &true_label
;
4372 true_label_p
= NULL
;
4374 /* The 'else' branch also needs a label if it contains interesting code. */
4375 if (false_label
|| else_se
)
4376 false_label_p
= &false_label
;
4378 false_label_p
= NULL
;
4380 /* If there was nothing else in our arms, just forward the label(s). */
4381 if (!then_se
&& !else_se
)
4382 return shortcut_cond_r (pred
, true_label_p
, false_label_p
,
4383 EXPR_LOC_OR_LOC (expr
, input_location
));
4385 /* If our last subexpression already has a terminal label, reuse it. */
4387 t
= expr_last (else_
);
4389 t
= expr_last (then_
);
4392 if (t
&& TREE_CODE (t
) == LABEL_EXPR
)
4393 end_label
= LABEL_EXPR_LABEL (t
);
4395 /* If we don't care about jumping to the 'else' branch, jump to the end
4396 if the condition is false. */
4398 false_label_p
= &end_label
;
4400 /* We only want to emit these labels if we aren't hijacking them. */
4401 emit_end
= (end_label
== NULL_TREE
);
4402 emit_false
= (false_label
== NULL_TREE
);
4404 /* We only emit the jump over the else clause if we have to--if the
4405 then clause may fall through. Otherwise we can wind up with a
4406 useless jump and a useless label at the end of gimplified code,
4407 which will cause us to think that this conditional as a whole
4408 falls through even if it doesn't. If we then inline a function
4409 which ends with such a condition, that can cause us to issue an
4410 inappropriate warning about control reaching the end of a
4411 non-void function. */
4412 jump_over_else
= block_may_fallthru (then_
);
4414 pred
= shortcut_cond_r (pred
, true_label_p
, false_label_p
,
4415 EXPR_LOC_OR_LOC (expr
, input_location
));
4418 append_to_statement_list (pred
, &expr
);
4420 append_to_statement_list (then_
, &expr
);
4425 tree last
= expr_last (expr
);
4426 t
= build_and_jump (&end_label
);
4427 if (rexpr_has_location (last
))
4428 SET_EXPR_LOCATION (t
, rexpr_location (last
));
4429 append_to_statement_list (t
, &expr
);
4433 t
= build1 (LABEL_EXPR
, void_type_node
, false_label
);
4434 append_to_statement_list (t
, &expr
);
4436 append_to_statement_list (else_
, &expr
);
4438 if (emit_end
&& end_label
)
4440 t
= build1 (LABEL_EXPR
, void_type_node
, end_label
);
4441 append_to_statement_list (t
, &expr
);
4447 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4450 gimple_boolify (tree expr
)
4452 tree type
= TREE_TYPE (expr
);
4453 location_t loc
= EXPR_LOCATION (expr
);
4455 if (TREE_CODE (expr
) == NE_EXPR
4456 && TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
4457 && integer_zerop (TREE_OPERAND (expr
, 1)))
4459 tree call
= TREE_OPERAND (expr
, 0);
4460 tree fn
= get_callee_fndecl (call
);
4462 /* For __builtin_expect ((long) (x), y) recurse into x as well
4463 if x is truth_value_p. */
4465 && fndecl_built_in_p (fn
, BUILT_IN_EXPECT
)
4466 && call_expr_nargs (call
) == 2)
4468 tree arg
= CALL_EXPR_ARG (call
, 0);
4471 if (TREE_CODE (arg
) == NOP_EXPR
4472 && TREE_TYPE (arg
) == TREE_TYPE (call
))
4473 arg
= TREE_OPERAND (arg
, 0);
4474 if (truth_value_p (TREE_CODE (arg
)))
4476 arg
= gimple_boolify (arg
);
4477 CALL_EXPR_ARG (call
, 0)
4478 = fold_convert_loc (loc
, TREE_TYPE (call
), arg
);
4484 switch (TREE_CODE (expr
))
4486 case TRUTH_AND_EXPR
:
4488 case TRUTH_XOR_EXPR
:
4489 case TRUTH_ANDIF_EXPR
:
4490 case TRUTH_ORIF_EXPR
:
4491 /* Also boolify the arguments of truth exprs. */
4492 TREE_OPERAND (expr
, 1) = gimple_boolify (TREE_OPERAND (expr
, 1));
4495 case TRUTH_NOT_EXPR
:
4496 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4498 /* These expressions always produce boolean results. */
4499 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4500 TREE_TYPE (expr
) = boolean_type_node
;
4504 switch ((enum annot_expr_kind
) TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)))
4506 case annot_expr_ivdep_kind
:
4507 case annot_expr_unroll_kind
:
4508 case annot_expr_no_vector_kind
:
4509 case annot_expr_vector_kind
:
4510 case annot_expr_parallel_kind
:
4511 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4512 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4513 TREE_TYPE (expr
) = boolean_type_node
;
4520 if (COMPARISON_CLASS_P (expr
))
4522 /* These expressions always produce boolean results. */
4523 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4524 TREE_TYPE (expr
) = boolean_type_node
;
4527 /* Other expressions that get here must have boolean values, but
4528 might need to be converted to the appropriate mode. */
4529 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
4531 return fold_convert_loc (loc
, boolean_type_node
, expr
);
4535 /* Given a conditional expression *EXPR_P without side effects, gimplify
4536 its operands. New statements are inserted to PRE_P. */
4538 static enum gimplify_status
4539 gimplify_pure_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
)
4541 tree expr
= *expr_p
, cond
;
4542 enum gimplify_status ret
, tret
;
4543 enum tree_code code
;
4545 cond
= gimple_boolify (COND_EXPR_COND (expr
));
4547 /* We need to handle && and || specially, as their gimplification
4548 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4549 code
= TREE_CODE (cond
);
4550 if (code
== TRUTH_ANDIF_EXPR
)
4551 TREE_SET_CODE (cond
, TRUTH_AND_EXPR
);
4552 else if (code
== TRUTH_ORIF_EXPR
)
4553 TREE_SET_CODE (cond
, TRUTH_OR_EXPR
);
4554 ret
= gimplify_expr (&cond
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
4555 COND_EXPR_COND (*expr_p
) = cond
;
4557 tret
= gimplify_expr (&COND_EXPR_THEN (expr
), pre_p
, NULL
,
4558 is_gimple_val
, fb_rvalue
);
4559 ret
= MIN (ret
, tret
);
4560 tret
= gimplify_expr (&COND_EXPR_ELSE (expr
), pre_p
, NULL
,
4561 is_gimple_val
, fb_rvalue
);
4563 return MIN (ret
, tret
);
4566 /* Return true if evaluating EXPR could trap.
4567 EXPR is GENERIC, while tree_could_trap_p can be called
4571 generic_expr_could_trap_p (tree expr
)
4575 if (!expr
|| is_gimple_val (expr
))
4578 if (!EXPR_P (expr
) || tree_could_trap_p (expr
))
4581 n
= TREE_OPERAND_LENGTH (expr
);
4582 for (i
= 0; i
< n
; i
++)
4583 if (generic_expr_could_trap_p (TREE_OPERAND (expr
, i
)))
4589 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4598 The second form is used when *EXPR_P is of type void.
4600 PRE_P points to the list where side effects that must happen before
4601 *EXPR_P should be stored. */
4603 static enum gimplify_status
4604 gimplify_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
, fallback_t fallback
)
4606 tree expr
= *expr_p
;
4607 tree type
= TREE_TYPE (expr
);
4608 location_t loc
= EXPR_LOCATION (expr
);
4609 tree tmp
, arm1
, arm2
;
4610 enum gimplify_status ret
;
4611 tree label_true
, label_false
, label_cont
;
4612 bool have_then_clause_p
, have_else_clause_p
;
4614 enum tree_code pred_code
;
4615 gimple_seq seq
= NULL
;
4617 /* If this COND_EXPR has a value, copy the values into a temporary within
4619 if (!VOID_TYPE_P (type
))
4621 tree then_
= TREE_OPERAND (expr
, 1), else_
= TREE_OPERAND (expr
, 2);
4624 /* If either an rvalue is ok or we do not require an lvalue, create the
4625 temporary. But we cannot do that if the type is addressable. */
4626 if (((fallback
& fb_rvalue
) || !(fallback
& fb_lvalue
))
4627 && !TREE_ADDRESSABLE (type
))
4629 if (gimplify_ctxp
->allow_rhs_cond_expr
4630 /* If either branch has side effects or could trap, it can't be
4631 evaluated unconditionally. */
4632 && !TREE_SIDE_EFFECTS (then_
)
4633 && !generic_expr_could_trap_p (then_
)
4634 && !TREE_SIDE_EFFECTS (else_
)
4635 && !generic_expr_could_trap_p (else_
))
4636 return gimplify_pure_cond_expr (expr_p
, pre_p
);
4638 tmp
= create_tmp_var (type
, "iftmp");
4642 /* Otherwise, only create and copy references to the values. */
4645 type
= build_pointer_type (type
);
4647 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4648 then_
= build_fold_addr_expr_loc (loc
, then_
);
4650 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4651 else_
= build_fold_addr_expr_loc (loc
, else_
);
4654 = build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0), then_
, else_
);
4656 tmp
= create_tmp_var (type
, "iftmp");
4657 result
= build_simple_mem_ref_loc (loc
, tmp
);
4660 /* Build the new then clause, `tmp = then_;'. But don't build the
4661 assignment if the value is void; in C++ it can be if it's a throw. */
4662 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4663 TREE_OPERAND (expr
, 1) = build2 (INIT_EXPR
, type
, tmp
, then_
);
4665 /* Similarly, build the new else clause, `tmp = else_;'. */
4666 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4667 TREE_OPERAND (expr
, 2) = build2 (INIT_EXPR
, type
, tmp
, else_
);
4669 TREE_TYPE (expr
) = void_type_node
;
4670 recalculate_side_effects (expr
);
4672 /* Move the COND_EXPR to the prequeue. */
4673 gimplify_stmt (&expr
, pre_p
);
4679 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4680 STRIP_TYPE_NOPS (TREE_OPERAND (expr
, 0));
4681 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == COMPOUND_EXPR
)
4682 gimplify_compound_expr (&TREE_OPERAND (expr
, 0), pre_p
, true);
4684 /* Make sure the condition has BOOLEAN_TYPE. */
4685 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4687 /* Break apart && and || conditions. */
4688 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ANDIF_EXPR
4689 || TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ORIF_EXPR
)
4691 expr
= shortcut_cond_expr (expr
);
4693 if (expr
!= *expr_p
)
4697 /* We can't rely on gimplify_expr to re-gimplify the expanded
4698 form properly, as cleanups might cause the target labels to be
4699 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4700 set up a conditional context. */
4701 gimple_push_condition ();
4702 gimplify_stmt (expr_p
, &seq
);
4703 gimple_pop_condition (pre_p
);
4704 gimple_seq_add_seq (pre_p
, seq
);
4710 /* Now do the normal gimplification. */
4712 /* Gimplify condition. */
4713 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, NULL
,
4714 is_gimple_condexpr_for_cond
, fb_rvalue
);
4715 if (ret
== GS_ERROR
)
4717 gcc_assert (TREE_OPERAND (expr
, 0) != NULL_TREE
);
4719 gimple_push_condition ();
4721 have_then_clause_p
= have_else_clause_p
= false;
4722 label_true
= find_goto_label (TREE_OPERAND (expr
, 1));
4724 && DECL_CONTEXT (GOTO_DESTINATION (label_true
)) == current_function_decl
4725 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4726 have different locations, otherwise we end up with incorrect
4727 location information on the branches. */
4729 || !EXPR_HAS_LOCATION (expr
)
4730 || !rexpr_has_location (label_true
)
4731 || EXPR_LOCATION (expr
) == rexpr_location (label_true
)))
4733 have_then_clause_p
= true;
4734 label_true
= GOTO_DESTINATION (label_true
);
4737 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
4738 label_false
= find_goto_label (TREE_OPERAND (expr
, 2));
4740 && DECL_CONTEXT (GOTO_DESTINATION (label_false
)) == current_function_decl
4741 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4742 have different locations, otherwise we end up with incorrect
4743 location information on the branches. */
4745 || !EXPR_HAS_LOCATION (expr
)
4746 || !rexpr_has_location (label_false
)
4747 || EXPR_LOCATION (expr
) == rexpr_location (label_false
)))
4749 have_else_clause_p
= true;
4750 label_false
= GOTO_DESTINATION (label_false
);
4753 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
4755 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr
), &pred_code
, &arm1
,
4757 cond_stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
,
4759 gimple_set_location (cond_stmt
, EXPR_LOCATION (expr
));
4760 copy_warning (cond_stmt
, COND_EXPR_COND (expr
));
4761 gimplify_seq_add_stmt (&seq
, cond_stmt
);
4762 gimple_stmt_iterator gsi
= gsi_last (seq
);
4763 maybe_fold_stmt (&gsi
);
4765 label_cont
= NULL_TREE
;
4766 if (!have_then_clause_p
)
4768 /* For if (...) {} else { code; } put label_true after
4770 if (TREE_OPERAND (expr
, 1) == NULL_TREE
4771 && !have_else_clause_p
4772 && TREE_OPERAND (expr
, 2) != NULL_TREE
)
4774 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4775 handling that label_cont == label_true can be only reached
4776 through fallthrough from { code; }. */
4777 if (integer_zerop (COND_EXPR_COND (expr
)))
4778 UNUSED_LABEL_P (label_true
) = 1;
4779 label_cont
= label_true
;
4783 bool then_side_effects
4784 = (TREE_OPERAND (expr
, 1)
4785 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr
, 1)));
4786 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_true
));
4787 have_then_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 1), &seq
);
4788 /* For if (...) { code; } else {} or
4789 if (...) { code; } else goto label; or
4790 if (...) { code; return; } else { ... }
4791 label_cont isn't needed. */
4792 if (!have_else_clause_p
4793 && TREE_OPERAND (expr
, 2) != NULL_TREE
4794 && gimple_seq_may_fallthru (seq
))
4797 label_cont
= create_artificial_label (UNKNOWN_LOCATION
);
4799 /* For if (0) { non-side-effect-code } else { code }
4800 tell -Wimplicit-fallthrough handling that label_cont can
4801 be only reached through fallthrough from { code }. */
4802 if (integer_zerop (COND_EXPR_COND (expr
)))
4804 UNUSED_LABEL_P (label_true
) = 1;
4805 if (!then_side_effects
)
4806 UNUSED_LABEL_P (label_cont
) = 1;
4809 g
= gimple_build_goto (label_cont
);
4811 /* GIMPLE_COND's are very low level; they have embedded
4812 gotos. This particular embedded goto should not be marked
4813 with the location of the original COND_EXPR, as it would
4814 correspond to the COND_EXPR's condition, not the ELSE or the
4815 THEN arms. To avoid marking it with the wrong location, flag
4816 it as "no location". */
4817 gimple_set_do_not_emit_location (g
);
4819 gimplify_seq_add_stmt (&seq
, g
);
4823 if (!have_else_clause_p
)
4825 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4826 tell -Wimplicit-fallthrough handling that label_false can be only
4827 reached through fallthrough from { code }. */
4828 if (integer_nonzerop (COND_EXPR_COND (expr
))
4829 && (TREE_OPERAND (expr
, 2) == NULL_TREE
4830 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr
, 2))))
4831 UNUSED_LABEL_P (label_false
) = 1;
4832 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_false
));
4833 have_else_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 2), &seq
);
4836 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_cont
));
4838 gimple_pop_condition (pre_p
);
4839 gimple_seq_add_seq (pre_p
, seq
);
4841 if (ret
== GS_ERROR
)
4843 else if (have_then_clause_p
|| have_else_clause_p
)
4847 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4848 expr
= TREE_OPERAND (expr
, 0);
4849 gimplify_stmt (&expr
, pre_p
);
4856 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4857 to be marked addressable.
4859 We cannot rely on such an expression being directly markable if a temporary
4860 has been created by the gimplification. In this case, we create another
4861 temporary and initialize it with a copy, which will become a store after we
4862 mark it addressable. This can happen if the front-end passed us something
4863 that it could not mark addressable yet, like a Fortran pass-by-reference
4864 parameter (int) floatvar. */
4867 prepare_gimple_addressable (tree
*expr_p
, gimple_seq
*seq_p
)
4869 while (handled_component_p (*expr_p
))
4870 expr_p
= &TREE_OPERAND (*expr_p
, 0);
4872 /* Do not allow an SSA name as the temporary. */
4873 if (is_gimple_reg (*expr_p
))
4874 *expr_p
= internal_get_tmp_var (*expr_p
, seq_p
, NULL
, false, false, true);
4877 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4878 a call to __builtin_memcpy. */
4880 static enum gimplify_status
4881 gimplify_modify_expr_to_memcpy (tree
*expr_p
, tree size
, bool want_value
,
4884 tree t
, to
, to_ptr
, from
, from_ptr
;
4886 location_t loc
= EXPR_LOCATION (*expr_p
);
4888 to
= TREE_OPERAND (*expr_p
, 0);
4889 from
= TREE_OPERAND (*expr_p
, 1);
4890 gcc_assert (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (to
)))
4891 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (from
))));
4893 /* Mark the RHS addressable. Beware that it may not be possible to do so
4894 directly if a temporary has been created by the gimplification. */
4895 prepare_gimple_addressable (&from
, seq_p
);
4897 mark_addressable (from
);
4898 from_ptr
= build_fold_addr_expr_loc (loc
, from
);
4899 gimplify_arg (&from_ptr
, seq_p
, loc
);
4901 mark_addressable (to
);
4902 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4903 gimplify_arg (&to_ptr
, seq_p
, loc
);
4905 t
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
4907 gs
= gimple_build_call (t
, 3, to_ptr
, from_ptr
, size
);
4908 gimple_call_set_alloca_for_var (gs
, true);
4912 /* tmp = memcpy() */
4913 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4914 gimple_call_set_lhs (gs
, t
);
4915 gimplify_seq_add_stmt (seq_p
, gs
);
4917 *expr_p
= build_simple_mem_ref (t
);
4921 gimplify_seq_add_stmt (seq_p
, gs
);
4926 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4927 a call to __builtin_memset. In this case we know that the RHS is
4928 a CONSTRUCTOR with an empty element list. */
4930 static enum gimplify_status
4931 gimplify_modify_expr_to_memset (tree
*expr_p
, tree size
, bool want_value
,
4934 tree t
, from
, to
, to_ptr
;
4936 location_t loc
= EXPR_LOCATION (*expr_p
);
4938 /* Assert our assumptions, to abort instead of producing wrong code
4939 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4940 not be immediately exposed. */
4941 from
= TREE_OPERAND (*expr_p
, 1);
4942 if (TREE_CODE (from
) == WITH_SIZE_EXPR
)
4943 from
= TREE_OPERAND (from
, 0);
4945 gcc_assert (TREE_CODE (from
) == CONSTRUCTOR
4946 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from
)));
4949 to
= TREE_OPERAND (*expr_p
, 0);
4950 gcc_assert (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (to
))));
4952 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4953 gimplify_arg (&to_ptr
, seq_p
, loc
);
4954 t
= builtin_decl_implicit (BUILT_IN_MEMSET
);
4956 gs
= gimple_build_call (t
, 3, to_ptr
, integer_zero_node
, size
);
4960 /* tmp = memset() */
4961 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4962 gimple_call_set_lhs (gs
, t
);
4963 gimplify_seq_add_stmt (seq_p
, gs
);
4965 *expr_p
= build1 (INDIRECT_REF
, TREE_TYPE (to
), t
);
4969 gimplify_seq_add_stmt (seq_p
, gs
);
4974 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4975 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4976 assignment. Return non-null if we detect a potential overlap. */
4978 struct gimplify_init_ctor_preeval_data
4980 /* The base decl of the lhs object. May be NULL, in which case we
4981 have to assume the lhs is indirect. */
4984 /* The alias set of the lhs object. */
4985 alias_set_type lhs_alias_set
;
4989 gimplify_init_ctor_preeval_1 (tree
*tp
, int *walk_subtrees
, void *xdata
)
4991 struct gimplify_init_ctor_preeval_data
*data
4992 = (struct gimplify_init_ctor_preeval_data
*) xdata
;
4995 /* If we find the base object, obviously we have overlap. */
4996 if (data
->lhs_base_decl
== t
)
4999 /* If the constructor component is indirect, determine if we have a
5000 potential overlap with the lhs. The only bits of information we
5001 have to go on at this point are addressability and alias sets. */
5002 if ((INDIRECT_REF_P (t
)
5003 || TREE_CODE (t
) == MEM_REF
)
5004 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
5005 && alias_sets_conflict_p (data
->lhs_alias_set
, get_alias_set (t
)))
5008 /* If the constructor component is a call, determine if it can hide a
5009 potential overlap with the lhs through an INDIRECT_REF like above.
5010 ??? Ugh - this is completely broken. In fact this whole analysis
5011 doesn't look conservative. */
5012 if (TREE_CODE (t
) == CALL_EXPR
)
5014 tree type
, fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t
)));
5016 for (type
= TYPE_ARG_TYPES (fntype
); type
; type
= TREE_CHAIN (type
))
5017 if (POINTER_TYPE_P (TREE_VALUE (type
))
5018 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
5019 && alias_sets_conflict_p (data
->lhs_alias_set
,
5021 (TREE_TYPE (TREE_VALUE (type
)))))
5025 if (IS_TYPE_OR_DECL_P (t
))
5030 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
5031 force values that overlap with the lhs (as described by *DATA)
5032 into temporaries. */
5035 gimplify_init_ctor_preeval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5036 struct gimplify_init_ctor_preeval_data
*data
)
5038 enum gimplify_status one
;
5040 /* If the value is constant, then there's nothing to pre-evaluate. */
5041 if (TREE_CONSTANT (*expr_p
))
5043 /* Ensure it does not have side effects, it might contain a reference to
5044 the object we're initializing. */
5045 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p
));
5049 /* If the type has non-trivial constructors, we can't pre-evaluate. */
5050 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)))
5053 /* Recurse for nested constructors. */
5054 if (TREE_CODE (*expr_p
) == CONSTRUCTOR
)
5056 unsigned HOST_WIDE_INT ix
;
5057 constructor_elt
*ce
;
5058 vec
<constructor_elt
, va_gc
> *v
= CONSTRUCTOR_ELTS (*expr_p
);
5060 FOR_EACH_VEC_SAFE_ELT (v
, ix
, ce
)
5061 gimplify_init_ctor_preeval (&ce
->value
, pre_p
, post_p
, data
);
5066 /* If this is a variable sized type, we must remember the size. */
5067 maybe_with_size_expr (expr_p
);
5069 /* Gimplify the constructor element to something appropriate for the rhs
5070 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
5071 the gimplifier will consider this a store to memory. Doing this
5072 gimplification now means that we won't have to deal with complicated
5073 language-specific trees, nor trees like SAVE_EXPR that can induce
5074 exponential search behavior. */
5075 one
= gimplify_expr (expr_p
, pre_p
, post_p
, is_gimple_mem_rhs
, fb_rvalue
);
5076 if (one
== GS_ERROR
)
5082 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
5083 with the lhs, since "a = { .x=a }" doesn't make sense. This will
5084 always be true for all scalars, since is_gimple_mem_rhs insists on a
5085 temporary variable for them. */
5086 if (DECL_P (*expr_p
))
5089 /* If this is of variable size, we have no choice but to assume it doesn't
5090 overlap since we can't make a temporary for it. */
5091 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p
))) != INTEGER_CST
)
5094 /* Otherwise, we must search for overlap ... */
5095 if (!walk_tree (expr_p
, gimplify_init_ctor_preeval_1
, data
, NULL
))
5098 /* ... and if found, force the value into a temporary. */
5099 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
5102 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
5103 a RANGE_EXPR in a CONSTRUCTOR for an array.
5107 object[var] = value;
5114 We increment var _after_ the loop exit check because we might otherwise
5115 fail if upper == TYPE_MAX_VALUE (type for upper).
5117 Note that we never have to deal with SAVE_EXPRs here, because this has
5118 already been taken care of for us, in gimplify_init_ctor_preeval(). */
5120 static void gimplify_init_ctor_eval (tree
, vec
<constructor_elt
, va_gc
> *,
5121 gimple_seq
*, bool);
5124 gimplify_init_ctor_eval_range (tree object
, tree lower
, tree upper
,
5125 tree value
, tree array_elt_type
,
5126 gimple_seq
*pre_p
, bool cleared
)
5128 tree loop_entry_label
, loop_exit_label
, fall_thru_label
;
5129 tree var
, var_type
, cref
, tmp
;
5131 loop_entry_label
= create_artificial_label (UNKNOWN_LOCATION
);
5132 loop_exit_label
= create_artificial_label (UNKNOWN_LOCATION
);
5133 fall_thru_label
= create_artificial_label (UNKNOWN_LOCATION
);
5135 /* Create and initialize the index variable. */
5136 var_type
= TREE_TYPE (upper
);
5137 var
= create_tmp_var (var_type
);
5138 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, lower
));
5140 /* Add the loop entry label. */
5141 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_entry_label
));
5143 /* Build the reference. */
5144 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
5145 var
, NULL_TREE
, NULL_TREE
);
5147 /* If we are a constructor, just call gimplify_init_ctor_eval to do
5148 the store. Otherwise just assign value to the reference. */
5150 if (TREE_CODE (value
) == CONSTRUCTOR
)
5151 /* NB we might have to call ourself recursively through
5152 gimplify_init_ctor_eval if the value is a constructor. */
5153 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
5157 if (gimplify_expr (&value
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
5159 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (cref
, value
));
5162 /* We exit the loop when the index var is equal to the upper bound. */
5163 gimplify_seq_add_stmt (pre_p
,
5164 gimple_build_cond (EQ_EXPR
, var
, upper
,
5165 loop_exit_label
, fall_thru_label
));
5167 gimplify_seq_add_stmt (pre_p
, gimple_build_label (fall_thru_label
));
5169 /* Otherwise, increment the index var... */
5170 tmp
= build2 (PLUS_EXPR
, var_type
, var
,
5171 fold_convert (var_type
, integer_one_node
));
5172 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, tmp
));
5174 /* ...and jump back to the loop entry. */
5175 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (loop_entry_label
));
5177 /* Add the loop exit label. */
5178 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_exit_label
));
5181 /* A subroutine of gimplify_init_constructor. Generate individual
5182 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
5183 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
5184 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
5188 gimplify_init_ctor_eval (tree object
, vec
<constructor_elt
, va_gc
> *elts
,
5189 gimple_seq
*pre_p
, bool cleared
)
5191 tree array_elt_type
= NULL
;
5192 unsigned HOST_WIDE_INT ix
;
5193 tree purpose
, value
;
5195 if (TREE_CODE (TREE_TYPE (object
)) == ARRAY_TYPE
)
5196 array_elt_type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object
)));
5198 FOR_EACH_CONSTRUCTOR_ELT (elts
, ix
, purpose
, value
)
5202 /* NULL values are created above for gimplification errors. */
5206 if (cleared
&& initializer_zerop (value
))
5209 /* ??? Here's to hoping the front end fills in all of the indices,
5210 so we don't have to figure out what's missing ourselves. */
5211 gcc_assert (purpose
);
5213 /* Skip zero-sized fields, unless value has side-effects. This can
5214 happen with calls to functions returning a empty type, which
5215 we shouldn't discard. As a number of downstream passes don't
5216 expect sets of empty type fields, we rely on the gimplification of
5217 the MODIFY_EXPR we make below to drop the assignment statement. */
5218 if (!TREE_SIDE_EFFECTS (value
)
5219 && TREE_CODE (purpose
) == FIELD_DECL
5220 && is_empty_type (TREE_TYPE (purpose
)))
5223 /* If we have a RANGE_EXPR, we have to build a loop to assign the
5225 if (TREE_CODE (purpose
) == RANGE_EXPR
)
5227 tree lower
= TREE_OPERAND (purpose
, 0);
5228 tree upper
= TREE_OPERAND (purpose
, 1);
5230 /* If the lower bound is equal to upper, just treat it as if
5231 upper was the index. */
5232 if (simple_cst_equal (lower
, upper
))
5236 gimplify_init_ctor_eval_range (object
, lower
, upper
, value
,
5237 array_elt_type
, pre_p
, cleared
);
5244 /* Do not use bitsizetype for ARRAY_REF indices. */
5245 if (TYPE_DOMAIN (TREE_TYPE (object
)))
5247 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object
))),
5249 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
5250 purpose
, NULL_TREE
, NULL_TREE
);
5254 gcc_assert (TREE_CODE (purpose
) == FIELD_DECL
);
5255 cref
= build3 (COMPONENT_REF
, TREE_TYPE (purpose
),
5256 unshare_expr (object
), purpose
, NULL_TREE
);
5259 if (TREE_CODE (value
) == CONSTRUCTOR
5260 && TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
)
5261 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
5265 tree init
= build2 (INIT_EXPR
, TREE_TYPE (cref
), cref
, value
);
5266 gimplify_and_add (init
, pre_p
);
5272 /* Return the appropriate RHS predicate for this LHS. */
5275 rhs_predicate_for (tree lhs
)
5277 if (is_gimple_reg (lhs
))
5278 return is_gimple_reg_rhs_or_call
;
5280 return is_gimple_mem_rhs_or_call
;
5283 /* Return the initial guess for an appropriate RHS predicate for this LHS,
5284 before the LHS has been gimplified. */
5286 static gimple_predicate
5287 initial_rhs_predicate_for (tree lhs
)
5289 if (is_gimple_reg_type (TREE_TYPE (lhs
)))
5290 return is_gimple_reg_rhs_or_call
;
5292 return is_gimple_mem_rhs_or_call
;
5295 /* Gimplify a C99 compound literal expression. This just means adding
5296 the DECL_EXPR before the current statement and using its anonymous
5299 static enum gimplify_status
5300 gimplify_compound_literal_expr (tree
*expr_p
, gimple_seq
*pre_p
,
5301 bool (*gimple_test_f
) (tree
),
5302 fallback_t fallback
)
5304 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p
);
5305 tree decl
= DECL_EXPR_DECL (decl_s
);
5306 tree init
= DECL_INITIAL (decl
);
5307 /* Mark the decl as addressable if the compound literal
5308 expression is addressable now, otherwise it is marked too late
5309 after we gimplify the initialization expression. */
5310 if (TREE_ADDRESSABLE (*expr_p
))
5311 TREE_ADDRESSABLE (decl
) = 1;
5312 /* Otherwise, if we don't need an lvalue and have a literal directly
5313 substitute it. Check if it matches the gimple predicate, as
5314 otherwise we'd generate a new temporary, and we can as well just
5315 use the decl we already have. */
5316 else if (!TREE_ADDRESSABLE (decl
)
5317 && !TREE_THIS_VOLATILE (decl
)
5319 && (fallback
& fb_lvalue
) == 0
5320 && gimple_test_f (init
))
5326 /* If the decl is not addressable, then it is being used in some
5327 expression or on the right hand side of a statement, and it can
5328 be put into a readonly data section. */
5329 if (!TREE_ADDRESSABLE (decl
) && (fallback
& fb_lvalue
) == 0)
5330 TREE_READONLY (decl
) = 1;
5332 /* This decl isn't mentioned in the enclosing block, so add it to the
5333 list of temps. FIXME it seems a bit of a kludge to say that
5334 anonymous artificial vars aren't pushed, but everything else is. */
5335 if (DECL_NAME (decl
) == NULL_TREE
&& !DECL_SEEN_IN_BIND_EXPR_P (decl
))
5336 gimple_add_tmp_var (decl
);
5338 gimplify_and_add (decl_s
, pre_p
);
5343 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5344 return a new CONSTRUCTOR if something changed. */
5347 optimize_compound_literals_in_ctor (tree orig_ctor
)
5349 tree ctor
= orig_ctor
;
5350 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (ctor
);
5351 unsigned int idx
, num
= vec_safe_length (elts
);
5353 for (idx
= 0; idx
< num
; idx
++)
5355 tree value
= (*elts
)[idx
].value
;
5356 tree newval
= value
;
5357 if (TREE_CODE (value
) == CONSTRUCTOR
)
5358 newval
= optimize_compound_literals_in_ctor (value
);
5359 else if (TREE_CODE (value
) == COMPOUND_LITERAL_EXPR
)
5361 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (value
);
5362 tree decl
= DECL_EXPR_DECL (decl_s
);
5363 tree init
= DECL_INITIAL (decl
);
5365 if (!TREE_ADDRESSABLE (value
)
5366 && !TREE_ADDRESSABLE (decl
)
5368 && TREE_CODE (init
) == CONSTRUCTOR
)
5369 newval
= optimize_compound_literals_in_ctor (init
);
5371 if (newval
== value
)
5374 if (ctor
== orig_ctor
)
5376 ctor
= copy_node (orig_ctor
);
5377 CONSTRUCTOR_ELTS (ctor
) = vec_safe_copy (elts
);
5378 elts
= CONSTRUCTOR_ELTS (ctor
);
5380 (*elts
)[idx
].value
= newval
;
5385 /* A subroutine of gimplify_modify_expr. Break out elements of a
5386 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5388 Note that we still need to clear any elements that don't have explicit
5389 initializers, so if not all elements are initialized we keep the
5390 original MODIFY_EXPR, we just remove all of the constructor elements.
5392 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5393 GS_ERROR if we would have to create a temporary when gimplifying
5394 this constructor. Otherwise, return GS_OK.
5396 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5398 static enum gimplify_status
5399 gimplify_init_constructor (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5400 bool want_value
, bool notify_temp_creation
)
5402 tree object
, ctor
, type
;
5403 enum gimplify_status ret
;
5404 vec
<constructor_elt
, va_gc
> *elts
;
5405 bool cleared
= false;
5406 bool is_empty_ctor
= false;
5407 bool is_init_expr
= (TREE_CODE (*expr_p
) == INIT_EXPR
);
5409 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == CONSTRUCTOR
);
5411 if (!notify_temp_creation
)
5413 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
5414 is_gimple_lvalue
, fb_lvalue
);
5415 if (ret
== GS_ERROR
)
5419 object
= TREE_OPERAND (*expr_p
, 0);
5420 ctor
= TREE_OPERAND (*expr_p
, 1)
5421 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p
, 1));
5422 type
= TREE_TYPE (ctor
);
5423 elts
= CONSTRUCTOR_ELTS (ctor
);
5426 switch (TREE_CODE (type
))
5430 case QUAL_UNION_TYPE
:
5433 /* Use readonly data for initializers of this or smaller size
5434 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5436 const HOST_WIDE_INT min_unique_size
= 64;
5437 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5438 is smaller than this, use readonly data. */
5439 const int unique_nonzero_ratio
= 8;
5440 /* True if a single access of the object must be ensured. This is the
5441 case if the target is volatile, the type is non-addressable and more
5442 than one field need to be assigned. */
5443 const bool ensure_single_access
5444 = TREE_THIS_VOLATILE (object
)
5445 && !TREE_ADDRESSABLE (type
)
5446 && vec_safe_length (elts
) > 1;
5447 struct gimplify_init_ctor_preeval_data preeval_data
;
5448 HOST_WIDE_INT num_ctor_elements
, num_nonzero_elements
;
5449 HOST_WIDE_INT num_unique_nonzero_elements
;
5450 bool complete_p
, valid_const_initializer
;
5452 /* Aggregate types must lower constructors to initialization of
5453 individual elements. The exception is that a CONSTRUCTOR node
5454 with no elements indicates zero-initialization of the whole. */
5455 if (vec_safe_is_empty (elts
))
5457 if (notify_temp_creation
)
5460 /* The var will be initialized and so appear on lhs of
5461 assignment, it can't be TREE_READONLY anymore. */
5463 TREE_READONLY (object
) = 0;
5465 is_empty_ctor
= true;
5469 /* Fetch information about the constructor to direct later processing.
5470 We might want to make static versions of it in various cases, and
5471 can only do so if it known to be a valid constant initializer. */
5472 valid_const_initializer
5473 = categorize_ctor_elements (ctor
, &num_nonzero_elements
,
5474 &num_unique_nonzero_elements
,
5475 &num_ctor_elements
, &complete_p
);
5477 /* If a const aggregate variable is being initialized, then it
5478 should never be a lose to promote the variable to be static. */
5479 if (valid_const_initializer
5480 && num_nonzero_elements
> 1
5481 && TREE_READONLY (object
)
5483 && !DECL_REGISTER (object
)
5484 && (flag_merge_constants
>= 2 || !TREE_ADDRESSABLE (object
)
5485 || DECL_MERGEABLE (object
))
5486 /* For ctors that have many repeated nonzero elements
5487 represented through RANGE_EXPRs, prefer initializing
5488 those through runtime loops over copies of large amounts
5489 of data from readonly data section. */
5490 && (num_unique_nonzero_elements
5491 > num_nonzero_elements
/ unique_nonzero_ratio
5492 || ((unsigned HOST_WIDE_INT
) int_size_in_bytes (type
)
5493 <= (unsigned HOST_WIDE_INT
) min_unique_size
)))
5495 if (notify_temp_creation
)
5498 DECL_INITIAL (object
) = ctor
;
5499 TREE_STATIC (object
) = 1;
5500 if (!DECL_NAME (object
))
5501 DECL_NAME (object
) = create_tmp_var_name ("C");
5502 walk_tree (&DECL_INITIAL (object
), force_labels_r
, NULL
, NULL
);
5504 /* ??? C++ doesn't automatically append a .<number> to the
5505 assembler name, and even when it does, it looks at FE private
5506 data structures to figure out what that number should be,
5507 which are not set for this variable. I suppose this is
5508 important for local statics for inline functions, which aren't
5509 "local" in the object file sense. So in order to get a unique
5510 TU-local symbol, we must invoke the lhd version now. */
5511 lhd_set_decl_assembler_name (object
);
5513 *expr_p
= NULL_TREE
;
5517 /* The var will be initialized and so appear on lhs of
5518 assignment, it can't be TREE_READONLY anymore. */
5519 if (VAR_P (object
) && !notify_temp_creation
)
5520 TREE_READONLY (object
) = 0;
5522 /* If there are "lots" of initialized elements, even discounting
5523 those that are not address constants (and thus *must* be
5524 computed at runtime), then partition the constructor into
5525 constant and non-constant parts. Block copy the constant
5526 parts in, then generate code for the non-constant parts. */
5527 /* TODO. There's code in cp/typeck.cc to do this. */
5529 if (int_size_in_bytes (TREE_TYPE (ctor
)) < 0)
5530 /* store_constructor will ignore the clearing of variable-sized
5531 objects. Initializers for such objects must explicitly set
5532 every field that needs to be set. */
5534 else if (!complete_p
)
5535 /* If the constructor isn't complete, clear the whole object
5536 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5538 ??? This ought not to be needed. For any element not present
5539 in the initializer, we should simply set them to zero. Except
5540 we'd need to *find* the elements that are not present, and that
5541 requires trickery to avoid quadratic compile-time behavior in
5542 large cases or excessive memory use in small cases. */
5543 cleared
= !CONSTRUCTOR_NO_CLEARING (ctor
);
5544 else if (num_ctor_elements
- num_nonzero_elements
5545 > CLEAR_RATIO (optimize_function_for_speed_p (cfun
))
5546 && num_nonzero_elements
< num_ctor_elements
/ 4)
5547 /* If there are "lots" of zeros, it's more efficient to clear
5548 the memory and then set the nonzero elements. */
5550 else if (ensure_single_access
&& num_nonzero_elements
== 0)
5551 /* If a single access to the target must be ensured and all elements
5552 are zero, then it's optimal to clear whatever their number. */
5557 /* If there are "lots" of initialized elements, and all of them
5558 are valid address constants, then the entire initializer can
5559 be dropped to memory, and then memcpy'd out. Don't do this
5560 for sparse arrays, though, as it's more efficient to follow
5561 the standard CONSTRUCTOR behavior of memset followed by
5562 individual element initialization. Also don't do this for small
5563 all-zero initializers (which aren't big enough to merit
5564 clearing), and don't try to make bitwise copies of
5565 TREE_ADDRESSABLE types. */
5566 if (valid_const_initializer
5568 && !(cleared
|| num_nonzero_elements
== 0)
5569 && !TREE_ADDRESSABLE (type
))
5571 HOST_WIDE_INT size
= int_size_in_bytes (type
);
5574 /* ??? We can still get unbounded array types, at least
5575 from the C++ front end. This seems wrong, but attempt
5576 to work around it for now. */
5579 size
= int_size_in_bytes (TREE_TYPE (object
));
5581 TREE_TYPE (ctor
) = type
= TREE_TYPE (object
);
5584 /* Find the maximum alignment we can assume for the object. */
5585 /* ??? Make use of DECL_OFFSET_ALIGN. */
5586 if (DECL_P (object
))
5587 align
= DECL_ALIGN (object
);
5589 align
= TYPE_ALIGN (type
);
5591 /* Do a block move either if the size is so small as to make
5592 each individual move a sub-unit move on average, or if it
5593 is so large as to make individual moves inefficient. */
5595 && num_nonzero_elements
> 1
5596 /* For ctors that have many repeated nonzero elements
5597 represented through RANGE_EXPRs, prefer initializing
5598 those through runtime loops over copies of large amounts
5599 of data from readonly data section. */
5600 && (num_unique_nonzero_elements
5601 > num_nonzero_elements
/ unique_nonzero_ratio
5602 || size
<= min_unique_size
)
5603 && (size
< num_nonzero_elements
5604 || !can_move_by_pieces (size
, align
)))
5606 if (notify_temp_creation
)
5609 walk_tree (&ctor
, force_labels_r
, NULL
, NULL
);
5610 ctor
= tree_output_constant_def (ctor
);
5611 if (!useless_type_conversion_p (type
, TREE_TYPE (ctor
)))
5612 ctor
= build1 (VIEW_CONVERT_EXPR
, type
, ctor
);
5613 TREE_OPERAND (*expr_p
, 1) = ctor
;
5615 /* This is no longer an assignment of a CONSTRUCTOR, but
5616 we still may have processing to do on the LHS. So
5617 pretend we didn't do anything here to let that happen. */
5618 return GS_UNHANDLED
;
5622 /* If a single access to the target must be ensured and there are
5623 nonzero elements or the zero elements are not assigned en masse,
5624 initialize the target from a temporary. */
5625 if (ensure_single_access
&& (num_nonzero_elements
> 0 || !cleared
))
5627 if (notify_temp_creation
)
5630 tree temp
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
5631 TREE_OPERAND (*expr_p
, 0) = temp
;
5632 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
5634 build2 (MODIFY_EXPR
, void_type_node
,
5639 if (notify_temp_creation
)
5642 /* If there are nonzero elements and if needed, pre-evaluate to capture
5643 elements overlapping with the lhs into temporaries. We must do this
5644 before clearing to fetch the values before they are zeroed-out. */
5645 if (num_nonzero_elements
> 0 && TREE_CODE (*expr_p
) != INIT_EXPR
)
5647 preeval_data
.lhs_base_decl
= get_base_address (object
);
5648 if (!DECL_P (preeval_data
.lhs_base_decl
))
5649 preeval_data
.lhs_base_decl
= NULL
;
5650 preeval_data
.lhs_alias_set
= get_alias_set (object
);
5652 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p
, 1),
5653 pre_p
, post_p
, &preeval_data
);
5656 bool ctor_has_side_effects_p
5657 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p
, 1));
5661 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5662 Note that we still have to gimplify, in order to handle the
5663 case of variable sized types. Avoid shared tree structures. */
5664 CONSTRUCTOR_ELTS (ctor
) = NULL
;
5665 TREE_SIDE_EFFECTS (ctor
) = 0;
5666 object
= unshare_expr (object
);
5667 gimplify_stmt (expr_p
, pre_p
);
5670 /* If we have not block cleared the object, or if there are nonzero
5671 elements in the constructor, or if the constructor has side effects,
5672 add assignments to the individual scalar fields of the object. */
5674 || num_nonzero_elements
> 0
5675 || ctor_has_side_effects_p
)
5676 gimplify_init_ctor_eval (object
, elts
, pre_p
, cleared
);
5678 *expr_p
= NULL_TREE
;
5686 if (notify_temp_creation
)
5689 /* Extract the real and imaginary parts out of the ctor. */
5690 gcc_assert (elts
->length () == 2);
5691 r
= (*elts
)[0].value
;
5692 i
= (*elts
)[1].value
;
5693 if (r
== NULL
|| i
== NULL
)
5695 tree zero
= build_zero_cst (TREE_TYPE (type
));
5702 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5703 represent creation of a complex value. */
5704 if (TREE_CONSTANT (r
) && TREE_CONSTANT (i
))
5706 ctor
= build_complex (type
, r
, i
);
5707 TREE_OPERAND (*expr_p
, 1) = ctor
;
5711 ctor
= build2 (COMPLEX_EXPR
, type
, r
, i
);
5712 TREE_OPERAND (*expr_p
, 1) = ctor
;
5713 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1),
5716 rhs_predicate_for (TREE_OPERAND (*expr_p
, 0)),
5724 unsigned HOST_WIDE_INT ix
;
5725 constructor_elt
*ce
;
5727 if (notify_temp_creation
)
5730 /* Vector types use CONSTRUCTOR all the way through gimple
5731 compilation as a general initializer. */
5732 FOR_EACH_VEC_SAFE_ELT (elts
, ix
, ce
)
5734 enum gimplify_status tret
;
5735 tret
= gimplify_expr (&ce
->value
, pre_p
, post_p
, is_gimple_val
,
5737 if (tret
== GS_ERROR
)
5739 else if (TREE_STATIC (ctor
)
5740 && !initializer_constant_valid_p (ce
->value
,
5741 TREE_TYPE (ce
->value
)))
5742 TREE_STATIC (ctor
) = 0;
5744 recompute_constructor_flags (ctor
);
5746 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5747 if (TREE_CONSTANT (ctor
))
5749 bool constant_p
= true;
5752 /* Even when ctor is constant, it might contain non-*_CST
5753 elements, such as addresses or trapping values like
5754 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5755 in VECTOR_CST nodes. */
5756 FOR_EACH_CONSTRUCTOR_VALUE (elts
, ix
, value
)
5757 if (!CONSTANT_CLASS_P (value
))
5765 TREE_OPERAND (*expr_p
, 1) = build_vector_from_ctor (type
, elts
);
5770 if (!is_gimple_reg (TREE_OPERAND (*expr_p
, 0)))
5771 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (ctor
, pre_p
);
5776 /* So how did we get a CONSTRUCTOR for a scalar type? */
5780 if (ret
== GS_ERROR
)
5782 /* If we have gimplified both sides of the initializer but have
5783 not emitted an assignment, do so now. */
5785 /* If the type is an empty type, we don't need to emit the
5787 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
5789 tree lhs
= TREE_OPERAND (*expr_p
, 0);
5790 tree rhs
= TREE_OPERAND (*expr_p
, 1);
5791 if (want_value
&& object
== lhs
)
5792 lhs
= unshare_expr (lhs
);
5793 gassign
*init
= gimple_build_assign (lhs
, rhs
);
5794 gimplify_seq_add_stmt (pre_p
, init
);
5807 /* If the user requests to initialize automatic variables, we
5808 should initialize paddings inside the variable. Add a call to
5809 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5810 initialize paddings of object always to zero regardless of
5811 INIT_TYPE. Note, we will not insert this call if the aggregate
5812 variable has be completely cleared already or it's initialized
5813 with an empty constructor. We cannot insert this call if the
5814 variable is a gimple register since __builtin_clear_padding will take
5815 the address of the variable. As a result, if a long double/_Complex long
5816 double variable will be spilled into stack later, its padding cannot
5817 be cleared with __builtin_clear_padding. We should clear its padding
5818 when it is spilled into memory. */
5820 && !is_gimple_reg (object
)
5821 && clear_padding_type_may_have_padding_p (type
)
5822 && ((AGGREGATE_TYPE_P (type
) && !cleared
&& !is_empty_ctor
)
5823 || !AGGREGATE_TYPE_P (type
))
5824 && is_var_need_auto_init (object
))
5825 gimple_add_padding_init_for_auto_var (object
, false, pre_p
);
5830 /* Given a pointer value OP0, return a simplified version of an
5831 indirection through OP0, or NULL_TREE if no simplification is
5832 possible. This may only be applied to a rhs of an expression.
5833 Note that the resulting type may be different from the type pointed
5834 to in the sense that it is still compatible from the langhooks
5838 gimple_fold_indirect_ref_rhs (tree t
)
5840 return gimple_fold_indirect_ref (t
);
5843 /* Subroutine of gimplify_modify_expr to do simplifications of
5844 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5845 something changes. */
5847 static enum gimplify_status
5848 gimplify_modify_expr_rhs (tree
*expr_p
, tree
*from_p
, tree
*to_p
,
5849 gimple_seq
*pre_p
, gimple_seq
*post_p
,
5852 enum gimplify_status ret
= GS_UNHANDLED
;
5858 switch (TREE_CODE (*from_p
))
5861 /* If we're assigning from a read-only variable initialized with
5862 a constructor and not volatile, do the direct assignment from
5863 the constructor, but only if the target is not volatile either
5864 since this latter assignment might end up being done on a per
5865 field basis. However, if the target is volatile and the type
5866 is aggregate and non-addressable, gimplify_init_constructor
5867 knows that it needs to ensure a single access to the target
5868 and it will return GS_OK only in this case. */
5869 if (TREE_READONLY (*from_p
)
5870 && DECL_INITIAL (*from_p
)
5871 && TREE_CODE (DECL_INITIAL (*from_p
)) == CONSTRUCTOR
5872 && !TREE_THIS_VOLATILE (*from_p
)
5873 && (!TREE_THIS_VOLATILE (*to_p
)
5874 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p
))
5875 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p
)))))
5877 tree old_from
= *from_p
;
5878 enum gimplify_status subret
;
5880 /* Move the constructor into the RHS. */
5881 *from_p
= unshare_expr (DECL_INITIAL (*from_p
));
5883 /* Let's see if gimplify_init_constructor will need to put
5885 subret
= gimplify_init_constructor (expr_p
, NULL
, NULL
,
5887 if (subret
== GS_ERROR
)
5889 /* If so, revert the change. */
5900 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p
)))
5901 /* If we have code like
5905 where the type of "x" is a (possibly cv-qualified variant
5906 of "A"), treat the entire expression as identical to "x".
5907 This kind of code arises in C++ when an object is bound
5908 to a const reference, and if "x" is a TARGET_EXPR we want
5909 to take advantage of the optimization below. But not if
5910 the type is TREE_ADDRESSABLE; then C++17 says that the
5911 TARGET_EXPR needs to be a temporary. */
5913 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p
, 0)))
5915 bool volatile_p
= TREE_THIS_VOLATILE (*from_p
);
5916 if (TREE_THIS_VOLATILE (t
) != volatile_p
)
5919 t
= build_simple_mem_ref_loc (EXPR_LOCATION (*from_p
),
5920 build_fold_addr_expr (t
));
5921 if (REFERENCE_CLASS_P (t
))
5922 TREE_THIS_VOLATILE (t
) = volatile_p
;
5932 /* If we are initializing something from a TARGET_EXPR, strip the
5933 TARGET_EXPR and initialize it directly, if possible. This can't
5934 be done if the initializer is void, since that implies that the
5935 temporary is set in some non-trivial way.
5937 ??? What about code that pulls out the temp and uses it
5938 elsewhere? I think that such code never uses the TARGET_EXPR as
5939 an initializer. If I'm wrong, we'll die because the temp won't
5940 have any RTL. In that case, I guess we'll need to replace
5941 references somehow. */
5942 tree init
= TARGET_EXPR_INITIAL (*from_p
);
5945 && (TREE_CODE (*expr_p
) != MODIFY_EXPR
5946 || !TARGET_EXPR_NO_ELIDE (*from_p
))
5947 && !VOID_TYPE_P (TREE_TYPE (init
)))
5957 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5959 gimplify_compound_expr (from_p
, pre_p
, true);
5965 /* If we already made some changes, let the front end have a
5966 crack at this before we break it down. */
5967 if (ret
!= GS_UNHANDLED
)
5970 /* If we're initializing from a CONSTRUCTOR, break this into
5971 individual MODIFY_EXPRs. */
5972 ret
= gimplify_init_constructor (expr_p
, pre_p
, post_p
, want_value
,
5977 /* If we're assigning to a non-register type, push the assignment
5978 down into the branches. This is mandatory for ADDRESSABLE types,
5979 since we cannot generate temporaries for such, but it saves a
5980 copy in other cases as well. */
5981 if (!is_gimple_reg_type (TREE_TYPE (*from_p
)))
5983 /* This code should mirror the code in gimplify_cond_expr. */
5984 enum tree_code code
= TREE_CODE (*expr_p
);
5985 tree cond
= *from_p
;
5986 tree result
= *to_p
;
5988 ret
= gimplify_expr (&result
, pre_p
, post_p
,
5989 is_gimple_lvalue
, fb_lvalue
);
5990 if (ret
!= GS_ERROR
)
5993 /* If we are going to write RESULT more than once, clear
5994 TREE_READONLY flag, otherwise we might incorrectly promote
5995 the variable to static const and initialize it at compile
5996 time in one of the branches. */
5998 && TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
5999 && TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
6000 TREE_READONLY (result
) = 0;
6001 if (TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
)
6002 TREE_OPERAND (cond
, 1)
6003 = build2 (code
, void_type_node
, result
,
6004 TREE_OPERAND (cond
, 1));
6005 if (TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
6006 TREE_OPERAND (cond
, 2)
6007 = build2 (code
, void_type_node
, unshare_expr (result
),
6008 TREE_OPERAND (cond
, 2));
6010 TREE_TYPE (cond
) = void_type_node
;
6011 recalculate_side_effects (cond
);
6015 gimplify_and_add (cond
, pre_p
);
6016 *expr_p
= unshare_expr (result
);
6025 /* For calls that return in memory, give *to_p as the CALL_EXPR's
6026 return slot so that we don't generate a temporary. */
6027 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p
)
6028 && aggregate_value_p (*from_p
, *from_p
))
6032 if (!(rhs_predicate_for (*to_p
))(*from_p
))
6033 /* If we need a temporary, *to_p isn't accurate. */
6035 /* It's OK to use the return slot directly unless it's an NRV. */
6036 else if (TREE_CODE (*to_p
) == RESULT_DECL
6037 && DECL_NAME (*to_p
) == NULL_TREE
6038 && needs_to_live_in_memory (*to_p
))
6040 else if (is_gimple_reg_type (TREE_TYPE (*to_p
))
6041 || (DECL_P (*to_p
) && DECL_REGISTER (*to_p
)))
6042 /* Don't force regs into memory. */
6044 else if (TREE_CODE (*expr_p
) == INIT_EXPR
)
6045 /* It's OK to use the target directly if it's being
6048 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p
)))
6050 /* Always use the target and thus RSO for variable-sized types.
6051 GIMPLE cannot deal with a variable-sized assignment
6052 embedded in a call statement. */
6054 else if (TREE_CODE (*to_p
) != SSA_NAME
6055 && (!is_gimple_variable (*to_p
)
6056 || needs_to_live_in_memory (*to_p
)))
6057 /* Don't use the original target if it's already addressable;
6058 if its address escapes, and the called function uses the
6059 NRV optimization, a conforming program could see *to_p
6060 change before the called function returns; see c++/19317.
6061 When optimizing, the return_slot pass marks more functions
6062 as safe after we have escape info. */
6069 CALL_EXPR_RETURN_SLOT_OPT (*from_p
) = 1;
6070 mark_addressable (*to_p
);
6075 case WITH_SIZE_EXPR
:
6076 /* Likewise for calls that return an aggregate of non-constant size,
6077 since we would not be able to generate a temporary at all. */
6078 if (TREE_CODE (TREE_OPERAND (*from_p
, 0)) == CALL_EXPR
)
6080 *from_p
= TREE_OPERAND (*from_p
, 0);
6081 /* We don't change ret in this case because the
6082 WITH_SIZE_EXPR might have been added in
6083 gimplify_modify_expr, so returning GS_OK would lead to an
6089 /* If we're initializing from a container, push the initialization
6091 case CLEANUP_POINT_EXPR
:
6093 case STATEMENT_LIST
:
6095 tree wrap
= *from_p
;
6098 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_min_lval
,
6100 if (ret
!= GS_ERROR
)
6103 t
= voidify_wrapper_expr (wrap
, *expr_p
);
6104 gcc_assert (t
== *expr_p
);
6108 gimplify_and_add (wrap
, pre_p
);
6109 *expr_p
= unshare_expr (*to_p
);
6117 /* Pull out compound literal expressions from a NOP_EXPR.
6118 Those are created in the C FE to drop qualifiers during
6119 lvalue conversion. */
6120 if ((TREE_CODE (TREE_OPERAND (*from_p
, 0)) == COMPOUND_LITERAL_EXPR
)
6121 && tree_ssa_useless_type_conversion (*from_p
))
6123 *from_p
= TREE_OPERAND (*from_p
, 0);
6129 case COMPOUND_LITERAL_EXPR
:
6131 tree complit
= TREE_OPERAND (*expr_p
, 1);
6132 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (complit
);
6133 tree decl
= DECL_EXPR_DECL (decl_s
);
6134 tree init
= DECL_INITIAL (decl
);
6136 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
6137 into struct T x = { 0, 1, 2 } if the address of the
6138 compound literal has never been taken. */
6139 if (!TREE_ADDRESSABLE (complit
)
6140 && !TREE_ADDRESSABLE (decl
)
6143 *expr_p
= copy_node (*expr_p
);
6144 TREE_OPERAND (*expr_p
, 1) = init
;
6159 /* Return true if T looks like a valid GIMPLE statement. */
6162 is_gimple_stmt (tree t
)
6164 const enum tree_code code
= TREE_CODE (t
);
6169 /* The only valid NOP_EXPR is the empty statement. */
6170 return IS_EMPTY_STMT (t
);
6174 /* These are only valid if they're void. */
6175 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
6181 case CASE_LABEL_EXPR
:
6182 case TRY_CATCH_EXPR
:
6183 case TRY_FINALLY_EXPR
:
6184 case EH_FILTER_EXPR
:
6187 case STATEMENT_LIST
:
6192 case OACC_HOST_DATA
:
6195 case OACC_ENTER_DATA
:
6196 case OACC_EXIT_DATA
:
6201 case OMP_DISTRIBUTE
:
6208 case OMP_STRUCTURED_BLOCK
:
6217 case OMP_TARGET_DATA
:
6218 case OMP_TARGET_UPDATE
:
6219 case OMP_TARGET_ENTER_DATA
:
6220 case OMP_TARGET_EXIT_DATA
:
6223 /* These are always void. */
6229 /* These are valid regardless of their type. */
6238 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
6239 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
6241 IMPORTANT NOTE: This promotion is performed by introducing a load of the
6242 other, unmodified part of the complex object just before the total store.
6243 As a consequence, if the object is still uninitialized, an undefined value
6244 will be loaded into a register, which may result in a spurious exception
6245 if the register is floating-point and the value happens to be a signaling
6246 NaN for example. Then the fully-fledged complex operations lowering pass
6247 followed by a DCE pass are necessary in order to fix things up. */
6249 static enum gimplify_status
6250 gimplify_modify_expr_complex_part (tree
*expr_p
, gimple_seq
*pre_p
,
6253 enum tree_code code
, ocode
;
6254 tree lhs
, rhs
, new_rhs
, other
, realpart
, imagpart
;
6256 lhs
= TREE_OPERAND (*expr_p
, 0);
6257 rhs
= TREE_OPERAND (*expr_p
, 1);
6258 code
= TREE_CODE (lhs
);
6259 lhs
= TREE_OPERAND (lhs
, 0);
6261 ocode
= code
== REALPART_EXPR
? IMAGPART_EXPR
: REALPART_EXPR
;
6262 other
= build1 (ocode
, TREE_TYPE (rhs
), lhs
);
6263 suppress_warning (other
);
6264 other
= get_formal_tmp_var (other
, pre_p
);
6266 realpart
= code
== REALPART_EXPR
? rhs
: other
;
6267 imagpart
= code
== REALPART_EXPR
? other
: rhs
;
6269 if (TREE_CONSTANT (realpart
) && TREE_CONSTANT (imagpart
))
6270 new_rhs
= build_complex (TREE_TYPE (lhs
), realpart
, imagpart
);
6272 new_rhs
= build2 (COMPLEX_EXPR
, TREE_TYPE (lhs
), realpart
, imagpart
);
6274 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (lhs
, new_rhs
));
6275 *expr_p
= (want_value
) ? rhs
: NULL_TREE
;
6280 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6286 PRE_P points to the list where side effects that must happen before
6287 *EXPR_P should be stored.
6289 POST_P points to the list where side effects that must happen after
6290 *EXPR_P should be stored.
6292 WANT_VALUE is nonzero iff we want to use the value of this expression
6293 in another expression. */
6295 static enum gimplify_status
6296 gimplify_modify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
6299 tree
*from_p
= &TREE_OPERAND (*expr_p
, 1);
6300 tree
*to_p
= &TREE_OPERAND (*expr_p
, 0);
6301 enum gimplify_status ret
= GS_UNHANDLED
;
6303 location_t loc
= EXPR_LOCATION (*expr_p
);
6304 gimple_stmt_iterator gsi
;
6306 if (error_operand_p (*from_p
) || error_operand_p (*to_p
))
6309 gcc_assert (TREE_CODE (*expr_p
) == MODIFY_EXPR
6310 || TREE_CODE (*expr_p
) == INIT_EXPR
);
6312 /* Trying to simplify a clobber using normal logic doesn't work,
6313 so handle it here. */
6314 if (TREE_CLOBBER_P (*from_p
))
6316 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
6317 if (ret
== GS_ERROR
)
6319 gcc_assert (!want_value
);
6320 if (!VAR_P (*to_p
) && TREE_CODE (*to_p
) != MEM_REF
)
6322 tree addr
= get_initialized_tmp_var (build_fold_addr_expr (*to_p
),
6324 *to_p
= build_simple_mem_ref_loc (EXPR_LOCATION (*to_p
), addr
);
6326 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (*to_p
, *from_p
));
6331 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6333 if (TREE_TYPE (*from_p
) != error_mark_node
6334 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p
))
6335 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p
)))
6336 && TREE_CODE (*from_p
) == CONSTRUCTOR
6337 && CONSTRUCTOR_NELTS (*from_p
) == 0)
6339 maybe_with_size_expr (from_p
);
6340 gcc_assert (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
);
6341 return gimplify_modify_expr_to_memset (expr_p
,
6342 TREE_OPERAND (*from_p
, 1),
6346 /* Insert pointer conversions required by the middle-end that are not
6347 required by the frontend. This fixes middle-end type checking for
6348 for example gcc.dg/redecl-6.c. */
6349 if (POINTER_TYPE_P (TREE_TYPE (*to_p
)))
6351 STRIP_USELESS_TYPE_CONVERSION (*from_p
);
6352 if (!useless_type_conversion_p (TREE_TYPE (*to_p
), TREE_TYPE (*from_p
)))
6353 *from_p
= fold_convert_loc (loc
, TREE_TYPE (*to_p
), *from_p
);
6356 /* See if any simplifications can be done based on what the RHS is. */
6357 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
6359 if (ret
!= GS_UNHANDLED
)
6362 /* For empty types only gimplify the left hand side and right hand
6363 side as statements and throw away the assignment. Do this after
6364 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6366 if (is_empty_type (TREE_TYPE (*from_p
))
6368 /* Don't do this for calls that return addressable types, expand_call
6369 relies on those having a lhs. */
6370 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p
))
6371 && TREE_CODE (*from_p
) == CALL_EXPR
))
6373 gimplify_stmt (from_p
, pre_p
);
6374 gimplify_stmt (to_p
, pre_p
);
6375 *expr_p
= NULL_TREE
;
6379 /* If the value being copied is of variable width, compute the length
6380 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6381 before gimplifying any of the operands so that we can resolve any
6382 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6383 the size of the expression to be copied, not of the destination, so
6384 that is what we must do here. */
6385 maybe_with_size_expr (from_p
);
6387 /* As a special case, we have to temporarily allow for assignments
6388 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6389 a toplevel statement, when gimplifying the GENERIC expression
6390 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6391 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6393 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6394 prevent gimplify_expr from trying to create a new temporary for
6395 foo's LHS, we tell it that it should only gimplify until it
6396 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6397 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6398 and all we need to do here is set 'a' to be its LHS. */
6400 /* Gimplify the RHS first for C++17 and bug 71104. */
6401 gimple_predicate initial_pred
= initial_rhs_predicate_for (*to_p
);
6402 ret
= gimplify_expr (from_p
, pre_p
, post_p
, initial_pred
, fb_rvalue
);
6403 if (ret
== GS_ERROR
)
6406 /* Then gimplify the LHS. */
6407 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6408 twice we have to make sure to gimplify into non-SSA as otherwise
6409 the abnormal edge added later will make those defs not dominate
6411 ??? Technically this applies only to the registers used in the
6412 resulting non-register *TO_P. */
6413 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
6415 && TREE_CODE (*from_p
) == CALL_EXPR
6416 && call_expr_flags (*from_p
) & ECF_RETURNS_TWICE
)
6417 gimplify_ctxp
->into_ssa
= false;
6418 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
6419 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
6420 if (ret
== GS_ERROR
)
6423 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6424 guess for the predicate was wrong. */
6425 gimple_predicate final_pred
= rhs_predicate_for (*to_p
);
6426 if (final_pred
!= initial_pred
)
6428 ret
= gimplify_expr (from_p
, pre_p
, post_p
, final_pred
, fb_rvalue
);
6429 if (ret
== GS_ERROR
)
6433 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6434 size as argument to the call. */
6435 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
6437 tree call
= TREE_OPERAND (*from_p
, 0);
6438 tree vlasize
= TREE_OPERAND (*from_p
, 1);
6440 if (TREE_CODE (call
) == CALL_EXPR
6441 && CALL_EXPR_IFN (call
) == IFN_VA_ARG
)
6443 int nargs
= call_expr_nargs (call
);
6444 tree type
= TREE_TYPE (call
);
6445 tree ap
= CALL_EXPR_ARG (call
, 0);
6446 tree tag
= CALL_EXPR_ARG (call
, 1);
6447 tree aptag
= CALL_EXPR_ARG (call
, 2);
6448 tree newcall
= build_call_expr_internal_loc (EXPR_LOCATION (call
),
6452 TREE_OPERAND (*from_p
, 0) = newcall
;
6456 /* Now see if the above changed *from_p to something we handle specially. */
6457 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
6459 if (ret
!= GS_UNHANDLED
)
6462 /* If we've got a variable sized assignment between two lvalues (i.e. does
6463 not involve a call), then we can make things a bit more straightforward
6464 by converting the assignment to memcpy or memset. */
6465 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
6467 tree from
= TREE_OPERAND (*from_p
, 0);
6468 tree size
= TREE_OPERAND (*from_p
, 1);
6470 if (TREE_CODE (from
) == CONSTRUCTOR
)
6471 return gimplify_modify_expr_to_memset (expr_p
, size
, want_value
, pre_p
);
6472 else if (is_gimple_addressable (from
)
6473 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (*to_p
)))
6474 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (from
))))
6477 return gimplify_modify_expr_to_memcpy (expr_p
, size
, want_value
,
6482 /* Transform partial stores to non-addressable complex variables into
6483 total stores. This allows us to use real instead of virtual operands
6484 for these variables, which improves optimization. */
6485 if ((TREE_CODE (*to_p
) == REALPART_EXPR
6486 || TREE_CODE (*to_p
) == IMAGPART_EXPR
)
6487 && is_gimple_reg (TREE_OPERAND (*to_p
, 0)))
6488 return gimplify_modify_expr_complex_part (expr_p
, pre_p
, want_value
);
6490 /* Try to alleviate the effects of the gimplification creating artificial
6491 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6492 make sure not to create DECL_DEBUG_EXPR links across functions. */
6493 if (!gimplify_ctxp
->into_ssa
6495 && DECL_IGNORED_P (*from_p
)
6497 && !DECL_IGNORED_P (*to_p
)
6498 && decl_function_context (*to_p
) == current_function_decl
6499 && decl_function_context (*from_p
) == current_function_decl
)
6501 if (!DECL_NAME (*from_p
) && DECL_NAME (*to_p
))
6503 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p
)));
6504 DECL_HAS_DEBUG_EXPR_P (*from_p
) = 1;
6505 SET_DECL_DEBUG_EXPR (*from_p
, *to_p
);
6508 if (want_value
&& TREE_THIS_VOLATILE (*to_p
))
6509 *from_p
= get_initialized_tmp_var (*from_p
, pre_p
, post_p
);
6511 if (TREE_CODE (*from_p
) == CALL_EXPR
)
6513 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6514 instead of a GIMPLE_ASSIGN. */
6516 if (CALL_EXPR_FN (*from_p
) == NULL_TREE
)
6518 /* Gimplify internal functions created in the FEs. */
6519 int nargs
= call_expr_nargs (*from_p
), i
;
6520 enum internal_fn ifn
= CALL_EXPR_IFN (*from_p
);
6521 auto_vec
<tree
> vargs (nargs
);
6523 for (i
= 0; i
< nargs
; i
++)
6525 gimplify_arg (&CALL_EXPR_ARG (*from_p
, i
), pre_p
,
6526 EXPR_LOCATION (*from_p
));
6527 vargs
.quick_push (CALL_EXPR_ARG (*from_p
, i
));
6529 call_stmt
= gimple_build_call_internal_vec (ifn
, vargs
);
6530 gimple_call_set_nothrow (call_stmt
, TREE_NOTHROW (*from_p
));
6531 gimple_set_location (call_stmt
, EXPR_LOCATION (*expr_p
));
6535 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*from_p
));
6536 CALL_EXPR_FN (*from_p
) = TREE_OPERAND (CALL_EXPR_FN (*from_p
), 0);
6537 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p
));
6538 tree fndecl
= get_callee_fndecl (*from_p
);
6540 && fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
6541 && call_expr_nargs (*from_p
) == 3)
6542 call_stmt
= gimple_build_call_internal (IFN_BUILTIN_EXPECT
, 3,
6543 CALL_EXPR_ARG (*from_p
, 0),
6544 CALL_EXPR_ARG (*from_p
, 1),
6545 CALL_EXPR_ARG (*from_p
, 2));
6548 call_stmt
= gimple_build_call_from_tree (*from_p
, fnptrtype
);
6551 notice_special_calls (call_stmt
);
6552 if (!gimple_call_noreturn_p (call_stmt
) || !should_remove_lhs_p (*to_p
))
6553 gimple_call_set_lhs (call_stmt
, *to_p
);
6554 else if (TREE_CODE (*to_p
) == SSA_NAME
)
6555 /* The above is somewhat premature, avoid ICEing later for a
6556 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6557 ??? This doesn't make it a default-def. */
6558 SSA_NAME_DEF_STMT (*to_p
) = gimple_build_nop ();
6564 assign
= gimple_build_assign (*to_p
, *from_p
);
6565 gimple_set_location (assign
, EXPR_LOCATION (*expr_p
));
6566 if (COMPARISON_CLASS_P (*from_p
))
6567 copy_warning (assign
, *from_p
);
6570 if (gimplify_ctxp
->into_ssa
&& is_gimple_reg (*to_p
))
6572 /* We should have got an SSA name from the start. */
6573 gcc_assert (TREE_CODE (*to_p
) == SSA_NAME
6574 || ! gimple_in_ssa_p (cfun
));
6577 gimplify_seq_add_stmt (pre_p
, assign
);
6578 gsi
= gsi_last (*pre_p
);
6579 maybe_fold_stmt (&gsi
);
6583 *expr_p
= TREE_THIS_VOLATILE (*to_p
) ? *from_p
: unshare_expr (*to_p
);
6592 /* Gimplify a comparison between two variable-sized objects. Do this
6593 with a call to BUILT_IN_MEMCMP. */
6595 static enum gimplify_status
6596 gimplify_variable_sized_compare (tree
*expr_p
)
6598 location_t loc
= EXPR_LOCATION (*expr_p
);
6599 tree op0
= TREE_OPERAND (*expr_p
, 0);
6600 tree op1
= TREE_OPERAND (*expr_p
, 1);
6601 tree t
, arg
, dest
, src
, expr
;
6603 arg
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
6604 arg
= unshare_expr (arg
);
6605 arg
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg
, op0
);
6606 src
= build_fold_addr_expr_loc (loc
, op1
);
6607 dest
= build_fold_addr_expr_loc (loc
, op0
);
6608 t
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
6609 t
= build_call_expr_loc (loc
, t
, 3, dest
, src
, arg
);
6612 = build2 (TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), t
, integer_zero_node
);
6613 SET_EXPR_LOCATION (expr
, loc
);
6619 /* Gimplify a comparison between two aggregate objects of integral scalar
6620 mode as a comparison between the bitwise equivalent scalar values. */
6622 static enum gimplify_status
6623 gimplify_scalar_mode_aggregate_compare (tree
*expr_p
)
6625 location_t loc
= EXPR_LOCATION (*expr_p
);
6626 tree op0
= TREE_OPERAND (*expr_p
, 0);
6627 tree op1
= TREE_OPERAND (*expr_p
, 1);
6629 tree type
= TREE_TYPE (op0
);
6630 tree scalar_type
= lang_hooks
.types
.type_for_mode (TYPE_MODE (type
), 1);
6632 op0
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op0
);
6633 op1
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op1
);
6636 = fold_build2_loc (loc
, TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), op0
, op1
);
6641 /* Gimplify an expression sequence. This function gimplifies each
6642 expression and rewrites the original expression with the last
6643 expression of the sequence in GIMPLE form.
6645 PRE_P points to the list where the side effects for all the
6646 expressions in the sequence will be emitted.
6648 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6650 static enum gimplify_status
6651 gimplify_compound_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
6657 tree
*sub_p
= &TREE_OPERAND (t
, 0);
6659 if (TREE_CODE (*sub_p
) == COMPOUND_EXPR
)
6660 gimplify_compound_expr (sub_p
, pre_p
, false);
6662 gimplify_stmt (sub_p
, pre_p
);
6664 t
= TREE_OPERAND (t
, 1);
6666 while (TREE_CODE (t
) == COMPOUND_EXPR
);
6673 gimplify_stmt (expr_p
, pre_p
);
6678 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6679 gimplify. After gimplification, EXPR_P will point to a new temporary
6680 that holds the original value of the SAVE_EXPR node.
6682 PRE_P points to the list where side effects that must happen before
6683 *EXPR_P should be stored. */
6685 static enum gimplify_status
6686 gimplify_save_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6688 enum gimplify_status ret
= GS_ALL_DONE
;
6691 gcc_assert (TREE_CODE (*expr_p
) == SAVE_EXPR
);
6692 val
= TREE_OPERAND (*expr_p
, 0);
6694 if (val
&& TREE_TYPE (val
) == error_mark_node
)
6697 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6698 if (!SAVE_EXPR_RESOLVED_P (*expr_p
))
6700 /* The operand may be a void-valued expression. It is
6701 being executed only for its side-effects. */
6702 if (TREE_TYPE (val
) == void_type_node
)
6704 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
6705 is_gimple_stmt
, fb_none
);
6709 /* The temporary may not be an SSA name as later abnormal and EH
6710 control flow may invalidate use/def domination. When in SSA
6711 form then assume there are no such issues and SAVE_EXPRs only
6712 appear via GENERIC foldings. */
6713 val
= get_initialized_tmp_var (val
, pre_p
, post_p
,
6714 gimple_in_ssa_p (cfun
));
6716 TREE_OPERAND (*expr_p
, 0) = val
;
6717 SAVE_EXPR_RESOLVED_P (*expr_p
) = 1;
6725 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6732 PRE_P points to the list where side effects that must happen before
6733 *EXPR_P should be stored.
6735 POST_P points to the list where side effects that must happen after
6736 *EXPR_P should be stored. */
6738 static enum gimplify_status
6739 gimplify_addr_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6741 tree expr
= *expr_p
;
6742 tree op0
= TREE_OPERAND (expr
, 0);
6743 enum gimplify_status ret
;
6744 location_t loc
= EXPR_LOCATION (*expr_p
);
6746 switch (TREE_CODE (op0
))
6750 /* Check if we are dealing with an expression of the form '&*ptr'.
6751 While the front end folds away '&*ptr' into 'ptr', these
6752 expressions may be generated internally by the compiler (e.g.,
6753 builtins like __builtin_va_end). */
6754 /* Caution: the silent array decomposition semantics we allow for
6755 ADDR_EXPR means we can't always discard the pair. */
6756 /* Gimplification of the ADDR_EXPR operand may drop
6757 cv-qualification conversions, so make sure we add them if
6760 tree op00
= TREE_OPERAND (op0
, 0);
6761 tree t_expr
= TREE_TYPE (expr
);
6762 tree t_op00
= TREE_TYPE (op00
);
6764 if (!useless_type_conversion_p (t_expr
, t_op00
))
6765 op00
= fold_convert_loc (loc
, TREE_TYPE (expr
), op00
);
6771 case VIEW_CONVERT_EXPR
:
6772 /* Take the address of our operand and then convert it to the type of
6775 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6776 all clear. The impact of this transformation is even less clear. */
6778 /* If the operand is a useless conversion, look through it. Doing so
6779 guarantees that the ADDR_EXPR and its operand will remain of the
6781 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0
, 0)))
6782 op0
= TREE_OPERAND (op0
, 0);
6784 *expr_p
= fold_convert_loc (loc
, TREE_TYPE (expr
),
6785 build_fold_addr_expr_loc (loc
,
6786 TREE_OPERAND (op0
, 0)));
6791 if (integer_zerop (TREE_OPERAND (op0
, 1)))
6792 goto do_indirect_ref
;
6797 /* If we see a call to a declared builtin or see its address
6798 being taken (we can unify those cases here) then we can mark
6799 the builtin for implicit generation by GCC. */
6800 if (TREE_CODE (op0
) == FUNCTION_DECL
6801 && fndecl_built_in_p (op0
, BUILT_IN_NORMAL
)
6802 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0
)))
6803 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0
), true);
6805 /* We use fb_either here because the C frontend sometimes takes
6806 the address of a call that returns a struct; see
6807 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6808 the implied temporary explicit. */
6810 /* Make the operand addressable. */
6811 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, post_p
,
6812 is_gimple_addressable
, fb_either
);
6813 if (ret
== GS_ERROR
)
6816 /* Then mark it. Beware that it may not be possible to do so directly
6817 if a temporary has been created by the gimplification. */
6818 prepare_gimple_addressable (&TREE_OPERAND (expr
, 0), pre_p
);
6820 op0
= TREE_OPERAND (expr
, 0);
6822 /* For various reasons, the gimplification of the expression
6823 may have made a new INDIRECT_REF. */
6824 if (INDIRECT_REF_P (op0
)
6825 || (TREE_CODE (op0
) == MEM_REF
6826 && integer_zerop (TREE_OPERAND (op0
, 1))))
6827 goto do_indirect_ref
;
6829 mark_addressable (TREE_OPERAND (expr
, 0));
6831 /* The FEs may end up building ADDR_EXPRs early on a decl with
6832 an incomplete type. Re-build ADDR_EXPRs in canonical form
6834 if (!types_compatible_p (TREE_TYPE (op0
), TREE_TYPE (TREE_TYPE (expr
))))
6835 *expr_p
= build_fold_addr_expr (op0
);
6837 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6838 recompute_tree_invariant_for_addr_expr (*expr_p
);
6840 /* If we re-built the ADDR_EXPR add a conversion to the original type
6842 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
6843 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
6851 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6852 value; output operands should be a gimple lvalue. */
6854 static enum gimplify_status
6855 gimplify_asm_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6859 const char **oconstraints
;
6862 const char *constraint
;
6863 bool allows_mem
, allows_reg
, is_inout
;
6864 enum gimplify_status ret
, tret
;
6866 vec
<tree
, va_gc
> *inputs
;
6867 vec
<tree
, va_gc
> *outputs
;
6868 vec
<tree
, va_gc
> *clobbers
;
6869 vec
<tree
, va_gc
> *labels
;
6873 noutputs
= list_length (ASM_OUTPUTS (expr
));
6874 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
6882 link_next
= NULL_TREE
;
6883 for (i
= 0, link
= ASM_OUTPUTS (expr
); link
; ++i
, link
= link_next
)
6886 size_t constraint_len
;
6888 link_next
= TREE_CHAIN (link
);
6892 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6893 constraint_len
= strlen (constraint
);
6894 if (constraint_len
== 0)
6897 ok
= parse_output_constraint (&constraint
, i
, 0, 0,
6898 &allows_mem
, &allows_reg
, &is_inout
);
6905 /* If we can't make copies, we can only accept memory.
6906 Similarly for VLAs. */
6907 tree outtype
= TREE_TYPE (TREE_VALUE (link
));
6908 if (outtype
!= error_mark_node
6909 && (TREE_ADDRESSABLE (outtype
)
6910 || !COMPLETE_TYPE_P (outtype
)
6911 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype
))))
6917 error ("impossible constraint in %<asm%>");
6918 error ("non-memory output %d must stay in memory", i
);
6923 if (!allows_reg
&& allows_mem
)
6924 mark_addressable (TREE_VALUE (link
));
6926 tree orig
= TREE_VALUE (link
);
6927 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6928 is_inout
? is_gimple_min_lval
: is_gimple_lvalue
,
6929 fb_lvalue
| fb_mayfail
);
6930 if (tret
== GS_ERROR
)
6932 if (orig
!= error_mark_node
)
6933 error ("invalid lvalue in %<asm%> output %d", i
);
6937 /* If the constraint does not allow memory make sure we gimplify
6938 it to a register if it is not already but its base is. This
6939 happens for complex and vector components. */
6942 tree op
= TREE_VALUE (link
);
6943 if (! is_gimple_val (op
)
6944 && is_gimple_reg_type (TREE_TYPE (op
))
6945 && is_gimple_reg (get_base_address (op
)))
6947 tree tem
= create_tmp_reg (TREE_TYPE (op
));
6951 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
),
6952 tem
, unshare_expr (op
));
6953 gimplify_and_add (ass
, pre_p
);
6955 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
), op
, tem
);
6956 gimplify_and_add (ass
, post_p
);
6958 TREE_VALUE (link
) = tem
;
6963 vec_safe_push (outputs
, link
);
6964 TREE_CHAIN (link
) = NULL_TREE
;
6968 /* An input/output operand. To give the optimizers more
6969 flexibility, split it into separate input and output
6972 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6975 /* Turn the in/out constraint into an output constraint. */
6976 char *p
= xstrdup (constraint
);
6978 TREE_VALUE (TREE_PURPOSE (link
)) = build_string (constraint_len
, p
);
6980 /* And add a matching input constraint. */
6983 sprintf (buf
, "%u", i
);
6985 /* If there are multiple alternatives in the constraint,
6986 handle each of them individually. Those that allow register
6987 will be replaced with operand number, the others will stay
6989 if (strchr (p
, ',') != NULL
)
6991 size_t len
= 0, buflen
= strlen (buf
);
6992 char *beg
, *end
, *str
, *dst
;
6996 end
= strchr (beg
, ',');
6998 end
= strchr (beg
, '\0');
6999 if ((size_t) (end
- beg
) < buflen
)
7002 len
+= end
- beg
+ 1;
7009 str
= (char *) alloca (len
);
7010 for (beg
= p
+ 1, dst
= str
;;)
7013 bool mem_p
, reg_p
, inout_p
;
7015 end
= strchr (beg
, ',');
7020 parse_output_constraint (&tem
, i
, 0, 0,
7021 &mem_p
, ®_p
, &inout_p
);
7026 memcpy (dst
, buf
, buflen
);
7035 memcpy (dst
, beg
, len
);
7044 input
= build_string (dst
- str
, str
);
7047 input
= build_string (strlen (buf
), buf
);
7050 input
= build_string (constraint_len
- 1, constraint
+ 1);
7054 input
= build_tree_list (build_tree_list (NULL_TREE
, input
),
7055 unshare_expr (TREE_VALUE (link
)));
7056 ASM_INPUTS (expr
) = chainon (ASM_INPUTS (expr
), input
);
7060 link_next
= NULL_TREE
;
7061 for (link
= ASM_INPUTS (expr
); link
; ++i
, link
= link_next
)
7063 link_next
= TREE_CHAIN (link
);
7064 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
7065 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
7066 oconstraints
, &allows_mem
, &allows_reg
);
7068 /* If we can't make copies, we can only accept memory. */
7069 tree intype
= TREE_TYPE (TREE_VALUE (link
));
7070 if (intype
!= error_mark_node
7071 && (TREE_ADDRESSABLE (intype
)
7072 || !COMPLETE_TYPE_P (intype
)
7073 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype
))))
7079 error ("impossible constraint in %<asm%>");
7080 error ("non-memory input %d must stay in memory", i
);
7085 /* If the operand is a memory input, it should be an lvalue. */
7086 if (!allows_reg
&& allows_mem
)
7088 tree inputv
= TREE_VALUE (link
);
7089 STRIP_NOPS (inputv
);
7090 if (TREE_CODE (inputv
) == PREDECREMENT_EXPR
7091 || TREE_CODE (inputv
) == PREINCREMENT_EXPR
7092 || TREE_CODE (inputv
) == POSTDECREMENT_EXPR
7093 || TREE_CODE (inputv
) == POSTINCREMENT_EXPR
7094 || TREE_CODE (inputv
) == MODIFY_EXPR
)
7095 TREE_VALUE (link
) = error_mark_node
;
7096 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
7097 is_gimple_lvalue
, fb_lvalue
| fb_mayfail
);
7098 if (tret
!= GS_ERROR
)
7100 /* Unlike output operands, memory inputs are not guaranteed
7101 to be lvalues by the FE, and while the expressions are
7102 marked addressable there, if it is e.g. a statement
7103 expression, temporaries in it might not end up being
7104 addressable. They might be already used in the IL and thus
7105 it is too late to make them addressable now though. */
7106 tree x
= TREE_VALUE (link
);
7107 while (handled_component_p (x
))
7108 x
= TREE_OPERAND (x
, 0);
7109 if (TREE_CODE (x
) == MEM_REF
7110 && TREE_CODE (TREE_OPERAND (x
, 0)) == ADDR_EXPR
)
7111 x
= TREE_OPERAND (TREE_OPERAND (x
, 0), 0);
7113 || TREE_CODE (x
) == PARM_DECL
7114 || TREE_CODE (x
) == RESULT_DECL
)
7115 && !TREE_ADDRESSABLE (x
)
7116 && is_gimple_reg (x
))
7118 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
),
7120 "memory input %d is not directly addressable",
7122 prepare_gimple_addressable (&TREE_VALUE (link
), pre_p
);
7125 mark_addressable (TREE_VALUE (link
));
7126 if (tret
== GS_ERROR
)
7128 if (inputv
!= error_mark_node
)
7129 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
), input_location
),
7130 "memory input %d is not directly addressable", i
);
7136 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
7137 is_gimple_asm_val
, fb_rvalue
);
7138 if (tret
== GS_ERROR
)
7142 TREE_CHAIN (link
) = NULL_TREE
;
7143 vec_safe_push (inputs
, link
);
7146 link_next
= NULL_TREE
;
7147 for (link
= ASM_CLOBBERS (expr
); link
; ++i
, link
= link_next
)
7149 link_next
= TREE_CHAIN (link
);
7150 TREE_CHAIN (link
) = NULL_TREE
;
7151 vec_safe_push (clobbers
, link
);
7154 link_next
= NULL_TREE
;
7155 for (link
= ASM_LABELS (expr
); link
; ++i
, link
= link_next
)
7157 link_next
= TREE_CHAIN (link
);
7158 TREE_CHAIN (link
) = NULL_TREE
;
7159 vec_safe_push (labels
, link
);
7162 /* Do not add ASMs with errors to the gimple IL stream. */
7163 if (ret
!= GS_ERROR
)
7165 stmt
= gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr
)),
7166 inputs
, outputs
, clobbers
, labels
);
7168 /* asm is volatile if it was marked by the user as volatile or
7169 there are no outputs or this is an asm goto. */
7170 gimple_asm_set_volatile (stmt
,
7171 ASM_VOLATILE_P (expr
)
7174 gimple_asm_set_input (stmt
, ASM_INPUT_P (expr
));
7175 gimple_asm_set_inline (stmt
, ASM_INLINE_P (expr
));
7177 gimplify_seq_add_stmt (pre_p
, stmt
);
7183 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
7184 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
7185 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
7186 return to this function.
7188 FIXME should we complexify the prequeue handling instead? Or use flags
7189 for all the cleanups and let the optimizer tighten them up? The current
7190 code seems pretty fragile; it will break on a cleanup within any
7191 non-conditional nesting. But any such nesting would be broken, anyway;
7192 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
7193 and continues out of it. We can do that at the RTL level, though, so
7194 having an optimizer to tighten up try/finally regions would be a Good
7197 static enum gimplify_status
7198 gimplify_cleanup_point_expr (tree
*expr_p
, gimple_seq
*pre_p
)
7200 gimple_stmt_iterator iter
;
7201 gimple_seq body_sequence
= NULL
;
7203 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
7205 /* We only care about the number of conditions between the innermost
7206 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
7207 any cleanups collected outside the CLEANUP_POINT_EXPR. */
7208 int old_conds
= gimplify_ctxp
->conditions
;
7209 gimple_seq old_cleanups
= gimplify_ctxp
->conditional_cleanups
;
7210 bool old_in_cleanup_point_expr
= gimplify_ctxp
->in_cleanup_point_expr
;
7211 gimplify_ctxp
->conditions
= 0;
7212 gimplify_ctxp
->conditional_cleanups
= NULL
;
7213 gimplify_ctxp
->in_cleanup_point_expr
= true;
7215 gimplify_stmt (&TREE_OPERAND (*expr_p
, 0), &body_sequence
);
7217 gimplify_ctxp
->conditions
= old_conds
;
7218 gimplify_ctxp
->conditional_cleanups
= old_cleanups
;
7219 gimplify_ctxp
->in_cleanup_point_expr
= old_in_cleanup_point_expr
;
7221 for (iter
= gsi_start (body_sequence
); !gsi_end_p (iter
); )
7223 gimple
*wce
= gsi_stmt (iter
);
7225 if (gimple_code (wce
) == GIMPLE_WITH_CLEANUP_EXPR
)
7227 if (gsi_one_before_end_p (iter
))
7229 /* Note that gsi_insert_seq_before and gsi_remove do not
7230 scan operands, unlike some other sequence mutators. */
7231 if (!gimple_wce_cleanup_eh_only (wce
))
7232 gsi_insert_seq_before_without_update (&iter
,
7233 gimple_wce_cleanup (wce
),
7235 gsi_remove (&iter
, true);
7242 enum gimple_try_flags kind
;
7244 if (gimple_wce_cleanup_eh_only (wce
))
7245 kind
= GIMPLE_TRY_CATCH
;
7247 kind
= GIMPLE_TRY_FINALLY
;
7248 seq
= gsi_split_seq_after (iter
);
7250 gtry
= gimple_build_try (seq
, gimple_wce_cleanup (wce
), kind
);
7251 /* Do not use gsi_replace here, as it may scan operands.
7252 We want to do a simple structural modification only. */
7253 gsi_set_stmt (&iter
, gtry
);
7254 iter
= gsi_start (gtry
->eval
);
7261 gimplify_seq_add_seq (pre_p
, body_sequence
);
7274 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
7275 is the cleanup action required. EH_ONLY is true if the cleanup should
7276 only be executed if an exception is thrown, not on normal exit.
7277 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
7278 only valid for clobbers. */
7281 gimple_push_cleanup (tree var
, tree cleanup
, bool eh_only
, gimple_seq
*pre_p
,
7282 bool force_uncond
= false)
7285 gimple_seq cleanup_stmts
= NULL
;
7287 /* Errors can result in improperly nested cleanups. Which results in
7288 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7292 if (gimple_conditional_context ())
7294 /* If we're in a conditional context, this is more complex. We only
7295 want to run the cleanup if we actually ran the initialization that
7296 necessitates it, but we want to run it after the end of the
7297 conditional context. So we wrap the try/finally around the
7298 condition and use a flag to determine whether or not to actually
7299 run the destructor. Thus
7303 becomes (approximately)
7307 if (test) { A::A(temp); flag = 1; val = f(temp); }
7310 if (flag) A::~A(temp);
7316 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7317 wce
= gimple_build_wce (cleanup_stmts
);
7318 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
7322 tree flag
= create_tmp_var (boolean_type_node
, "cleanup");
7323 gassign
*ffalse
= gimple_build_assign (flag
, boolean_false_node
);
7324 gassign
*ftrue
= gimple_build_assign (flag
, boolean_true_node
);
7326 cleanup
= build3 (COND_EXPR
, void_type_node
, flag
, cleanup
, NULL
);
7327 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7328 wce
= gimple_build_wce (cleanup_stmts
);
7329 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
7331 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, ffalse
);
7332 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
7333 gimplify_seq_add_stmt (pre_p
, ftrue
);
7335 /* Because of this manipulation, and the EH edges that jump
7336 threading cannot redirect, the temporary (VAR) will appear
7337 to be used uninitialized. Don't warn. */
7338 suppress_warning (var
, OPT_Wuninitialized
);
7343 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7344 wce
= gimple_build_wce (cleanup_stmts
);
7345 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
7346 gimplify_seq_add_stmt (pre_p
, wce
);
7350 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7352 static enum gimplify_status
7353 gimplify_target_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
7355 tree targ
= *expr_p
;
7356 tree temp
= TARGET_EXPR_SLOT (targ
);
7357 tree init
= TARGET_EXPR_INITIAL (targ
);
7358 enum gimplify_status ret
;
7360 bool unpoison_empty_seq
= false;
7361 gimple_stmt_iterator unpoison_it
;
7365 gimple_seq init_pre_p
= NULL
;
7367 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7368 to the temps list. Handle also variable length TARGET_EXPRs. */
7369 if (!poly_int_tree_p (DECL_SIZE (temp
)))
7371 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp
)))
7372 gimplify_type_sizes (TREE_TYPE (temp
), &init_pre_p
);
7373 /* FIXME: this is correct only when the size of the type does
7374 not depend on expressions evaluated in init. */
7375 gimplify_vla_decl (temp
, &init_pre_p
);
7379 /* Save location where we need to place unpoisoning. It's possible
7380 that a variable will be converted to needs_to_live_in_memory. */
7381 unpoison_it
= gsi_last (*pre_p
);
7382 unpoison_empty_seq
= gsi_end_p (unpoison_it
);
7384 gimple_add_tmp_var (temp
);
7387 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7388 expression is supposed to initialize the slot. */
7389 if (VOID_TYPE_P (TREE_TYPE (init
)))
7390 ret
= gimplify_expr (&init
, &init_pre_p
, post_p
, is_gimple_stmt
,
7394 tree init_expr
= build2 (INIT_EXPR
, void_type_node
, temp
, init
);
7396 ret
= gimplify_expr (&init
, &init_pre_p
, post_p
, is_gimple_stmt
,
7399 ggc_free (init_expr
);
7401 if (ret
== GS_ERROR
)
7403 /* PR c++/28266 Make sure this is expanded only once. */
7404 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
7409 gimplify_and_add (init
, &init_pre_p
);
7411 /* Add a clobber for the temporary going out of scope, like
7412 gimplify_bind_expr. But only if we did not promote the
7413 temporary to static storage. */
7414 if (gimplify_ctxp
->in_cleanup_point_expr
7415 && !TREE_STATIC (temp
)
7416 && needs_to_live_in_memory (temp
))
7418 if (flag_stack_reuse
== SR_ALL
)
7420 tree clobber
= build_clobber (TREE_TYPE (temp
),
7421 CLOBBER_STORAGE_END
);
7422 clobber
= build2 (MODIFY_EXPR
, TREE_TYPE (temp
), temp
, clobber
);
7423 gimple_push_cleanup (temp
, clobber
, false, pre_p
, true);
7425 if (asan_poisoned_variables
7426 && DECL_ALIGN (temp
) <= MAX_SUPPORTED_STACK_ALIGNMENT
7427 && !TREE_STATIC (temp
)
7428 && dbg_cnt (asan_use_after_scope
)
7429 && !gimplify_omp_ctxp
)
7431 tree asan_cleanup
= build_asan_poison_call_expr (temp
);
7434 if (unpoison_empty_seq
)
7435 unpoison_it
= gsi_start (*pre_p
);
7437 asan_poison_variable (temp
, false, &unpoison_it
,
7438 unpoison_empty_seq
);
7439 gimple_push_cleanup (temp
, asan_cleanup
, false, pre_p
);
7444 gimple_seq_add_seq (pre_p
, init_pre_p
);
7446 /* If needed, push the cleanup for the temp. */
7447 if (TARGET_EXPR_CLEANUP (targ
))
7448 gimple_push_cleanup (temp
, TARGET_EXPR_CLEANUP (targ
),
7449 CLEANUP_EH_ONLY (targ
), pre_p
);
7451 /* Only expand this once. */
7452 TREE_OPERAND (targ
, 3) = init
;
7453 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
7456 /* We should have expanded this before. */
7457 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp
));
7463 /* Gimplification of expression trees. */
7465 /* Gimplify an expression which appears at statement context. The
7466 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7467 NULL, a new sequence is allocated.
7469 Return true if we actually added a statement to the queue. */
7472 gimplify_stmt (tree
*stmt_p
, gimple_seq
*seq_p
)
7474 gimple_seq_node last
;
7476 last
= gimple_seq_last (*seq_p
);
7477 gimplify_expr (stmt_p
, seq_p
, NULL
, is_gimple_stmt
, fb_none
);
7478 return last
!= gimple_seq_last (*seq_p
);
7481 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7482 to CTX. If entries already exist, force them to be some flavor of private.
7483 If there is no enclosing parallel, do nothing. */
7486 omp_firstprivatize_variable (struct gimplify_omp_ctx
*ctx
, tree decl
)
7490 if (decl
== NULL
|| !DECL_P (decl
) || ctx
->region_type
== ORT_NONE
)
7495 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7498 if (n
->value
& GOVD_SHARED
)
7499 n
->value
= GOVD_FIRSTPRIVATE
| (n
->value
& GOVD_SEEN
);
7500 else if (n
->value
& GOVD_MAP
)
7501 n
->value
|= GOVD_MAP_TO_ONLY
;
7505 else if ((ctx
->region_type
& ORT_TARGET
) != 0)
7507 if (ctx
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
7508 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
7510 omp_add_variable (ctx
, decl
, GOVD_MAP
| GOVD_MAP_TO_ONLY
);
7512 else if (ctx
->region_type
!= ORT_WORKSHARE
7513 && ctx
->region_type
!= ORT_TASKGROUP
7514 && ctx
->region_type
!= ORT_SIMD
7515 && ctx
->region_type
!= ORT_ACC
7516 && !(ctx
->region_type
& ORT_TARGET_DATA
))
7517 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
7519 ctx
= ctx
->outer_context
;
7524 /* Similarly for each of the type sizes of TYPE. */
7527 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
7529 if (type
== NULL
|| type
== error_mark_node
)
7531 type
= TYPE_MAIN_VARIANT (type
);
7533 if (ctx
->privatized_types
->add (type
))
7536 switch (TREE_CODE (type
))
7542 case FIXED_POINT_TYPE
:
7543 omp_firstprivatize_variable (ctx
, TYPE_MIN_VALUE (type
));
7544 omp_firstprivatize_variable (ctx
, TYPE_MAX_VALUE (type
));
7548 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
7549 omp_firstprivatize_type_sizes (ctx
, TYPE_DOMAIN (type
));
7554 case QUAL_UNION_TYPE
:
7557 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
7558 if (TREE_CODE (field
) == FIELD_DECL
)
7560 omp_firstprivatize_variable (ctx
, DECL_FIELD_OFFSET (field
));
7561 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (field
));
7567 case REFERENCE_TYPE
:
7568 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
7575 omp_firstprivatize_variable (ctx
, TYPE_SIZE (type
));
7576 omp_firstprivatize_variable (ctx
, TYPE_SIZE_UNIT (type
));
7577 lang_hooks
.types
.omp_firstprivatize_type_sizes (ctx
, type
);
7580 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7583 omp_add_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned int flags
)
7586 unsigned int nflags
;
7589 if (error_operand_p (decl
) || ctx
->region_type
== ORT_NONE
)
7592 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7593 there are constructors involved somewhere. Exception is a shared clause,
7594 there is nothing privatized in that case. */
7595 if ((flags
& GOVD_SHARED
) == 0
7596 && (TREE_ADDRESSABLE (TREE_TYPE (decl
))
7597 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl
))))
7600 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7601 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7603 /* We shouldn't be re-adding the decl with the same data
7605 gcc_assert ((n
->value
& GOVD_DATA_SHARE_CLASS
& flags
) == 0);
7606 nflags
= n
->value
| flags
;
7607 /* The only combination of data sharing classes we should see is
7608 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7609 reduction variables to be used in data sharing clauses. */
7610 gcc_assert ((ctx
->region_type
& ORT_ACC
) != 0
7611 || ((nflags
& GOVD_DATA_SHARE_CLASS
)
7612 == (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
))
7613 || (flags
& GOVD_DATA_SHARE_CLASS
) == 0);
7618 /* When adding a variable-sized variable, we have to handle all sorts
7619 of additional bits of data: the pointer replacement variable, and
7620 the parameters of the type. */
7621 if (DECL_SIZE (decl
) && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7623 /* Add the pointer replacement variable as PRIVATE if the variable
7624 replacement is private, else FIRSTPRIVATE since we'll need the
7625 address of the original variable either for SHARED, or for the
7626 copy into or out of the context. */
7627 if (!(flags
& GOVD_LOCAL
) && ctx
->region_type
!= ORT_TASKGROUP
)
7629 if (flags
& GOVD_MAP
)
7630 nflags
= GOVD_MAP
| GOVD_MAP_TO_ONLY
| GOVD_EXPLICIT
;
7631 else if (flags
& GOVD_PRIVATE
)
7632 nflags
= GOVD_PRIVATE
;
7633 else if (((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7634 && (flags
& GOVD_FIRSTPRIVATE
))
7635 || (ctx
->region_type
== ORT_TARGET_DATA
7636 && (flags
& GOVD_DATA_SHARE_CLASS
) == 0))
7637 nflags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
7639 nflags
= GOVD_FIRSTPRIVATE
;
7640 nflags
|= flags
& GOVD_SEEN
;
7641 t
= DECL_VALUE_EXPR (decl
);
7642 gcc_assert (INDIRECT_REF_P (t
));
7643 t
= TREE_OPERAND (t
, 0);
7644 gcc_assert (DECL_P (t
));
7645 omp_add_variable (ctx
, t
, nflags
);
7648 /* Add all of the variable and type parameters (which should have
7649 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7650 omp_firstprivatize_variable (ctx
, DECL_SIZE_UNIT (decl
));
7651 omp_firstprivatize_variable (ctx
, DECL_SIZE (decl
));
7652 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7654 /* The variable-sized variable itself is never SHARED, only some form
7655 of PRIVATE. The sharing would take place via the pointer variable
7656 which we remapped above. */
7657 if (flags
& GOVD_SHARED
)
7658 flags
= GOVD_SHARED
| GOVD_DEBUG_PRIVATE
7659 | (flags
& (GOVD_SEEN
| GOVD_EXPLICIT
));
7661 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7662 alloca statement we generate for the variable, so make sure it
7663 is available. This isn't automatically needed for the SHARED
7664 case, since we won't be allocating local storage then.
7665 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7666 in this case omp_notice_variable will be called later
7667 on when it is gimplified. */
7668 else if (! (flags
& (GOVD_LOCAL
| GOVD_MAP
))
7669 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl
))))
7670 omp_notice_variable (ctx
, TYPE_SIZE_UNIT (TREE_TYPE (decl
)), true);
7672 else if ((flags
& (GOVD_MAP
| GOVD_LOCAL
)) == 0
7673 && omp_privatize_by_reference (decl
))
7675 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7677 /* Similar to the direct variable sized case above, we'll need the
7678 size of references being privatized. */
7679 if ((flags
& GOVD_SHARED
) == 0)
7681 t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7682 if (t
&& DECL_P (t
))
7683 omp_notice_variable (ctx
, t
, true);
7690 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, flags
);
7692 /* For reductions clauses in OpenACC loop directives, by default create a
7693 copy clause on the enclosing parallel construct for carrying back the
7695 if (ctx
->region_type
== ORT_ACC
&& (flags
& GOVD_REDUCTION
))
7697 struct gimplify_omp_ctx
*outer_ctx
= ctx
->outer_context
;
7700 n
= splay_tree_lookup (outer_ctx
->variables
, (splay_tree_key
)decl
);
7703 /* Ignore local variables and explicitly declared clauses. */
7704 if (n
->value
& (GOVD_LOCAL
| GOVD_EXPLICIT
))
7706 else if (outer_ctx
->region_type
== ORT_ACC_KERNELS
)
7708 /* According to the OpenACC spec, such a reduction variable
7709 should already have a copy map on a kernels construct,
7710 verify that here. */
7711 gcc_assert (!(n
->value
& GOVD_FIRSTPRIVATE
)
7712 && (n
->value
& GOVD_MAP
));
7714 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7716 /* Remove firstprivate and make it a copy map. */
7717 n
->value
&= ~GOVD_FIRSTPRIVATE
;
7718 n
->value
|= GOVD_MAP
;
7721 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7723 splay_tree_insert (outer_ctx
->variables
, (splay_tree_key
)decl
,
7724 GOVD_MAP
| GOVD_SEEN
);
7727 outer_ctx
= outer_ctx
->outer_context
;
7732 /* Notice a threadprivate variable DECL used in OMP context CTX.
7733 This just prints out diagnostics about threadprivate variable uses
7734 in untied tasks. If DECL2 is non-NULL, prevent this warning
7735 on that variable. */
7738 omp_notice_threadprivate_variable (struct gimplify_omp_ctx
*ctx
, tree decl
,
7742 struct gimplify_omp_ctx
*octx
;
7744 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
7745 if ((octx
->region_type
& ORT_TARGET
) != 0
7746 || octx
->order_concurrent
)
7748 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
)decl
);
7751 if (octx
->order_concurrent
)
7753 error ("threadprivate variable %qE used in a region with"
7754 " %<order(concurrent)%> clause", DECL_NAME (decl
));
7755 inform (octx
->location
, "enclosing region");
7759 error ("threadprivate variable %qE used in target region",
7761 inform (octx
->location
, "enclosing target region");
7763 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl
, 0);
7766 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl2
, 0);
7769 if (ctx
->region_type
!= ORT_UNTIED_TASK
)
7771 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7774 error ("threadprivate variable %qE used in untied task",
7776 inform (ctx
->location
, "enclosing task");
7777 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, 0);
7780 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl2
, 0);
7784 /* Return true if global var DECL is device resident. */
7787 device_resident_p (tree decl
)
7789 tree attr
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl
));
7794 for (tree t
= TREE_VALUE (attr
); t
; t
= TREE_PURPOSE (t
))
7796 tree c
= TREE_VALUE (t
);
7797 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DEVICE_RESIDENT
)
7804 /* Return true if DECL has an ACC DECLARE attribute. */
7807 is_oacc_declared (tree decl
)
7809 tree t
= TREE_CODE (decl
) == MEM_REF
? TREE_OPERAND (decl
, 0) : decl
;
7810 tree declared
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t
));
7811 return declared
!= NULL_TREE
;
7814 /* Determine outer default flags for DECL mentioned in an OMP region
7815 but not declared in an enclosing clause.
7817 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7818 remapped firstprivate instead of shared. To some extent this is
7819 addressed in omp_firstprivatize_type_sizes, but not
7823 omp_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
,
7824 bool in_code
, unsigned flags
)
7826 enum omp_clause_default_kind default_kind
= ctx
->default_kind
;
7827 enum omp_clause_default_kind kind
;
7829 kind
= lang_hooks
.decls
.omp_predetermined_sharing (decl
);
7830 if (ctx
->region_type
& ORT_TASK
)
7832 tree detach_clause
= omp_find_clause (ctx
->clauses
, OMP_CLAUSE_DETACH
);
7834 /* The event-handle specified by a detach clause should always be firstprivate,
7835 regardless of the current default. */
7836 if (detach_clause
&& OMP_CLAUSE_DECL (detach_clause
) == decl
)
7837 kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
7839 if (kind
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
7840 default_kind
= kind
;
7841 else if (VAR_P (decl
) && TREE_STATIC (decl
) && DECL_IN_CONSTANT_POOL (decl
))
7842 default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
7843 /* For C/C++ default({,first}private), variables with static storage duration
7844 declared in a namespace or global scope and referenced in construct
7845 must be explicitly specified, i.e. acts as default(none). */
7846 else if ((default_kind
== OMP_CLAUSE_DEFAULT_PRIVATE
7847 || default_kind
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
7849 && is_global_var (decl
)
7850 && (DECL_FILE_SCOPE_P (decl
)
7851 || (DECL_CONTEXT (decl
)
7852 && TREE_CODE (DECL_CONTEXT (decl
)) == NAMESPACE_DECL
))
7853 && !lang_GNU_Fortran ())
7854 default_kind
= OMP_CLAUSE_DEFAULT_NONE
;
7856 switch (default_kind
)
7858 case OMP_CLAUSE_DEFAULT_NONE
:
7862 if (ctx
->region_type
& ORT_PARALLEL
)
7864 else if ((ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
)
7866 else if (ctx
->region_type
& ORT_TASK
)
7868 else if (ctx
->region_type
& ORT_TEAMS
)
7873 error ("%qE not specified in enclosing %qs",
7874 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rtype
);
7875 inform (ctx
->location
, "enclosing %qs", rtype
);
7878 case OMP_CLAUSE_DEFAULT_SHARED
:
7879 flags
|= GOVD_SHARED
;
7881 case OMP_CLAUSE_DEFAULT_PRIVATE
:
7882 flags
|= GOVD_PRIVATE
;
7884 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
:
7885 flags
|= GOVD_FIRSTPRIVATE
;
7887 case OMP_CLAUSE_DEFAULT_UNSPECIFIED
:
7888 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7889 gcc_assert ((ctx
->region_type
& ORT_TASK
) != 0);
7890 if (struct gimplify_omp_ctx
*octx
= ctx
->outer_context
)
7892 omp_notice_variable (octx
, decl
, in_code
);
7893 for (; octx
; octx
= octx
->outer_context
)
7897 n2
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
7898 if ((octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)) != 0
7899 && (n2
== NULL
|| (n2
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7901 if (n2
&& (n2
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
)
7903 flags
|= GOVD_FIRSTPRIVATE
;
7906 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TEAMS
)) != 0)
7908 flags
|= GOVD_SHARED
;
7914 if (TREE_CODE (decl
) == PARM_DECL
7915 || (!is_global_var (decl
)
7916 && DECL_CONTEXT (decl
) == current_function_decl
))
7917 flags
|= GOVD_FIRSTPRIVATE
;
7919 flags
|= GOVD_SHARED
;
7930 /* Return string name for types of OpenACC constructs from ORT_* values. */
7933 oacc_region_type_name (enum omp_region_type region_type
)
7935 switch (region_type
)
7939 case ORT_ACC_PARALLEL
:
7941 case ORT_ACC_KERNELS
:
7943 case ORT_ACC_SERIAL
:
7950 /* Determine outer default flags for DECL mentioned in an OACC region
7951 but not declared in an enclosing clause. */
7954 oacc_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned flags
)
7956 struct gimplify_omp_ctx
*ctx_default
= ctx
;
7957 /* If no 'default' clause appears on this compute construct... */
7958 if (ctx_default
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
)
7960 /* ..., see if one appears on a lexically containing 'data'
7962 while ((ctx_default
= ctx_default
->outer_context
))
7964 if (ctx_default
->region_type
== ORT_ACC_DATA
7965 && ctx_default
->default_kind
!= OMP_CLAUSE_DEFAULT_SHARED
)
7968 /* If not, reset. */
7973 bool on_device
= false;
7974 bool is_private
= false;
7975 bool declared
= is_oacc_declared (decl
);
7976 tree type
= TREE_TYPE (decl
);
7978 if (omp_privatize_by_reference (decl
))
7979 type
= TREE_TYPE (type
);
7981 /* For Fortran COMMON blocks, only used variables in those blocks are
7982 transfered and remapped. The block itself will have a private clause to
7983 avoid transfering the data twice.
7984 The hook evaluates to false by default. For a variable in Fortran's COMMON
7985 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7986 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7987 the whole block. For C++ and Fortran, it can also be true under certain
7988 other conditions, if DECL_HAS_VALUE_EXPR. */
7989 if (RECORD_OR_UNION_TYPE_P (type
))
7990 is_private
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7992 if ((ctx
->region_type
& (ORT_ACC_PARALLEL
| ORT_ACC_KERNELS
)) != 0
7993 && is_global_var (decl
)
7994 && device_resident_p (decl
)
7998 flags
|= GOVD_MAP_TO_ONLY
;
8001 switch (ctx
->region_type
)
8003 case ORT_ACC_KERNELS
:
8005 flags
|= GOVD_FIRSTPRIVATE
;
8006 else if (AGGREGATE_TYPE_P (type
))
8008 /* Aggregates default to 'present_or_copy', or 'present'. */
8009 if (ctx_default
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
8012 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
8015 /* Scalars default to 'copy'. */
8016 flags
|= GOVD_MAP
| GOVD_MAP_FORCE
;
8020 case ORT_ACC_PARALLEL
:
8021 case ORT_ACC_SERIAL
:
8023 flags
|= GOVD_FIRSTPRIVATE
;
8024 else if (on_device
|| declared
)
8026 else if (AGGREGATE_TYPE_P (type
))
8028 /* Aggregates default to 'present_or_copy', or 'present'. */
8029 if (ctx_default
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
8032 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
8035 /* Scalars default to 'firstprivate'. */
8036 flags
|= GOVD_FIRSTPRIVATE
;
8044 if (DECL_ARTIFICIAL (decl
))
8045 ; /* We can get compiler-generated decls, and should not complain
8047 else if (ctx_default
->default_kind
== OMP_CLAUSE_DEFAULT_NONE
)
8049 error ("%qE not specified in enclosing OpenACC %qs construct",
8050 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)),
8051 oacc_region_type_name (ctx
->region_type
));
8052 if (ctx_default
!= ctx
)
8053 inform (ctx
->location
, "enclosing OpenACC %qs construct and",
8054 oacc_region_type_name (ctx
->region_type
));
8055 inform (ctx_default
->location
,
8056 "enclosing OpenACC %qs construct with %qs clause",
8057 oacc_region_type_name (ctx_default
->region_type
),
8060 else if (ctx_default
->default_kind
== OMP_CLAUSE_DEFAULT_PRESENT
)
8061 ; /* Handled above. */
8063 gcc_checking_assert (ctx_default
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
);
8068 /* Record the fact that DECL was used within the OMP context CTX.
8069 IN_CODE is true when real code uses DECL, and false when we should
8070 merely emit default(none) errors. Return true if DECL is going to
8071 be remapped and thus DECL shouldn't be gimplified into its
8072 DECL_VALUE_EXPR (if any). */
8075 omp_notice_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, bool in_code
)
8078 unsigned flags
= in_code
? GOVD_SEEN
: 0;
8079 bool ret
= false, shared
;
8081 if (error_operand_p (decl
))
8084 if (DECL_ARTIFICIAL (decl
))
8086 tree attr
= lookup_attribute ("omp allocate var", DECL_ATTRIBUTES (decl
));
8088 decl
= TREE_VALUE (TREE_VALUE (attr
));
8091 if (ctx
->region_type
== ORT_NONE
)
8092 return lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
8094 if (is_global_var (decl
))
8096 /* Threadprivate variables are predetermined. */
8097 if (DECL_THREAD_LOCAL_P (decl
))
8098 return omp_notice_threadprivate_variable (ctx
, decl
, NULL_TREE
);
8100 if (DECL_HAS_VALUE_EXPR_P (decl
))
8102 if (ctx
->region_type
& ORT_ACC
)
8103 /* For OpenACC, defer expansion of value to avoid transfering
8104 privatized common block data instead of im-/explicitly transfered
8105 variables which are in common blocks. */
8109 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
8111 if (value
&& DECL_P (value
) && DECL_THREAD_LOCAL_P (value
))
8112 return omp_notice_threadprivate_variable (ctx
, decl
, value
);
8116 if (gimplify_omp_ctxp
->outer_context
== NULL
8118 && oacc_get_fn_attrib (current_function_decl
))
8120 location_t loc
= DECL_SOURCE_LOCATION (decl
);
8122 if (lookup_attribute ("omp declare target link",
8123 DECL_ATTRIBUTES (decl
)))
8126 "%qE with %<link%> clause used in %<routine%> function",
8130 else if (!lookup_attribute ("omp declare target",
8131 DECL_ATTRIBUTES (decl
)))
8134 "%qE requires a %<declare%> directive for use "
8135 "in a %<routine%> function", DECL_NAME (decl
));
8141 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
8142 if ((ctx
->region_type
& ORT_TARGET
) != 0)
8144 if (ctx
->region_type
& ORT_ACC
)
8145 /* For OpenACC, as remarked above, defer expansion. */
8150 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
8153 unsigned nflags
= flags
;
8154 if ((ctx
->region_type
& ORT_ACC
) == 0)
8156 bool is_declare_target
= false;
8157 if (is_global_var (decl
)
8158 && varpool_node::get_create (decl
)->offloadable
)
8160 struct gimplify_omp_ctx
*octx
;
8161 for (octx
= ctx
->outer_context
;
8162 octx
; octx
= octx
->outer_context
)
8164 n
= splay_tree_lookup (octx
->variables
,
8165 (splay_tree_key
)decl
);
8167 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
8168 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
8171 is_declare_target
= octx
== NULL
;
8173 if (!is_declare_target
)
8176 enum omp_clause_defaultmap_kind kind
;
8177 if (lang_hooks
.decls
.omp_allocatable_p (decl
))
8178 gdmk
= GDMK_ALLOCATABLE
;
8179 else if (lang_hooks
.decls
.omp_scalar_target_p (decl
))
8180 gdmk
= GDMK_SCALAR_TARGET
;
8181 else if (lang_hooks
.decls
.omp_scalar_p (decl
, false))
8183 else if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
8184 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
8185 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
8187 gdmk
= GDMK_POINTER
;
8189 gdmk
= GDMK_AGGREGATE
;
8190 kind
= lang_hooks
.decls
.omp_predetermined_mapping (decl
);
8191 if (kind
!= OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
)
8193 if (kind
== OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
)
8194 nflags
|= GOVD_FIRSTPRIVATE
;
8195 else if (kind
== OMP_CLAUSE_DEFAULTMAP_TO
)
8196 nflags
|= GOVD_MAP
| GOVD_MAP_TO_ONLY
;
8200 else if (ctx
->defaultmap
[gdmk
] == 0)
8202 tree d
= lang_hooks
.decls
.omp_report_decl (decl
);
8203 error ("%qE not specified in enclosing %<target%>",
8205 inform (ctx
->location
, "enclosing %<target%>");
8207 else if (ctx
->defaultmap
[gdmk
]
8208 & (GOVD_MAP_0LEN_ARRAY
| GOVD_FIRSTPRIVATE
))
8209 nflags
|= ctx
->defaultmap
[gdmk
];
8210 else if (ctx
->defaultmap
[gdmk
] & GOVD_MAP_FORCE_PRESENT
)
8212 gcc_assert (ctx
->defaultmap
[gdmk
] & GOVD_MAP
);
8213 nflags
|= ctx
->defaultmap
[gdmk
] | GOVD_MAP_ALLOC_ONLY
;
8217 gcc_assert (ctx
->defaultmap
[gdmk
] & GOVD_MAP
);
8218 nflags
|= ctx
->defaultmap
[gdmk
] & ~GOVD_MAP
;
8223 struct gimplify_omp_ctx
*octx
= ctx
->outer_context
;
8224 if ((ctx
->region_type
& ORT_ACC
) && octx
)
8226 /* Look in outer OpenACC contexts, to see if there's a
8227 data attribute for this variable. */
8228 omp_notice_variable (octx
, decl
, in_code
);
8230 for (; octx
; octx
= octx
->outer_context
)
8232 if (!(octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)))
8235 = splay_tree_lookup (octx
->variables
,
8236 (splay_tree_key
) decl
);
8239 if (octx
->region_type
== ORT_ACC_HOST_DATA
)
8240 error ("variable %qE declared in enclosing "
8241 "%<host_data%> region", DECL_NAME (decl
));
8243 if (octx
->region_type
== ORT_ACC_DATA
8244 && (n2
->value
& GOVD_MAP_0LEN_ARRAY
))
8245 nflags
|= GOVD_MAP_0LEN_ARRAY
;
8251 if ((nflags
& ~(GOVD_MAP_TO_ONLY
| GOVD_MAP_FROM_ONLY
8252 | GOVD_MAP_ALLOC_ONLY
)) == flags
)
8254 tree type
= TREE_TYPE (decl
);
8256 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
8257 && omp_privatize_by_reference (decl
))
8258 type
= TREE_TYPE (type
);
8259 if (!omp_mappable_type (type
))
8261 error ("%qD referenced in target region does not have "
8262 "a mappable type", decl
);
8263 nflags
|= GOVD_MAP
| GOVD_EXPLICIT
;
8267 if ((ctx
->region_type
& ORT_ACC
) != 0)
8268 nflags
= oacc_default_clause (ctx
, decl
, flags
);
8274 omp_add_variable (ctx
, decl
, nflags
);
8278 /* If nothing changed, there's nothing left to do. */
8279 if ((n
->value
& flags
) == flags
)
8289 if (ctx
->region_type
== ORT_WORKSHARE
8290 || ctx
->region_type
== ORT_TASKGROUP
8291 || ctx
->region_type
== ORT_SIMD
8292 || ctx
->region_type
== ORT_ACC
8293 || (ctx
->region_type
& ORT_TARGET_DATA
) != 0)
8296 flags
= omp_default_clause (ctx
, decl
, in_code
, flags
);
8298 if ((flags
& GOVD_PRIVATE
)
8299 && lang_hooks
.decls
.omp_private_outer_ref (decl
))
8300 flags
|= GOVD_PRIVATE_OUTER_REF
;
8302 omp_add_variable (ctx
, decl
, flags
);
8304 shared
= (flags
& GOVD_SHARED
) != 0;
8305 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
8309 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
8310 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
8311 if (ctx
->region_type
== ORT_SIMD
8312 && ctx
->in_for_exprs
8313 && ((n
->value
& (GOVD_PRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
))
8315 flags
&= ~GOVD_SEEN
;
8317 if ((n
->value
& (GOVD_SEEN
| GOVD_LOCAL
)) == 0
8318 && (flags
& (GOVD_SEEN
| GOVD_LOCAL
)) == GOVD_SEEN
8319 && DECL_SIZE (decl
))
8321 if (TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
8324 tree t
= DECL_VALUE_EXPR (decl
);
8325 gcc_assert (INDIRECT_REF_P (t
));
8326 t
= TREE_OPERAND (t
, 0);
8327 gcc_assert (DECL_P (t
));
8328 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
8329 n2
->value
|= GOVD_SEEN
;
8331 else if (omp_privatize_by_reference (decl
)
8332 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)))
8333 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))))
8337 tree t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
8338 gcc_assert (DECL_P (t
));
8339 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
8341 omp_notice_variable (ctx
, t
, true);
8345 if (ctx
->region_type
& ORT_ACC
)
8346 /* For OpenACC, as remarked above, defer expansion. */
8349 shared
= ((flags
| n
->value
) & GOVD_SHARED
) != 0;
8350 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
8352 /* If nothing changed, there's nothing left to do. */
8353 if ((n
->value
& flags
) == flags
)
8359 /* If the variable is private in the current context, then we don't
8360 need to propagate anything to an outer context. */
8361 if ((flags
& GOVD_PRIVATE
) && !(flags
& GOVD_PRIVATE_OUTER_REF
))
8363 if ((flags
& (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8364 == (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8366 if ((flags
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
8367 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8368 == (GOVD_LASTPRIVATE
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8370 if (ctx
->outer_context
8371 && omp_notice_variable (ctx
->outer_context
, decl
, in_code
))
8376 /* Verify that DECL is private within CTX. If there's specific information
8377 to the contrary in the innermost scope, generate an error. */
8380 omp_is_private (struct gimplify_omp_ctx
*ctx
, tree decl
, int simd
)
8384 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
8387 if (n
->value
& GOVD_SHARED
)
8389 if (ctx
== gimplify_omp_ctxp
)
8392 error ("iteration variable %qE is predetermined linear",
8395 error ("iteration variable %qE should be private",
8397 n
->value
= GOVD_PRIVATE
;
8403 else if ((n
->value
& GOVD_EXPLICIT
) != 0
8404 && (ctx
== gimplify_omp_ctxp
8405 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
8406 && gimplify_omp_ctxp
->outer_context
== ctx
)))
8408 if ((n
->value
& GOVD_FIRSTPRIVATE
) != 0)
8409 error ("iteration variable %qE should not be firstprivate",
8411 else if ((n
->value
& GOVD_REDUCTION
) != 0)
8412 error ("iteration variable %qE should not be reduction",
8414 else if (simd
!= 1 && (n
->value
& GOVD_LINEAR
) != 0)
8415 error ("iteration variable %qE should not be linear",
8418 return (ctx
== gimplify_omp_ctxp
8419 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
8420 && gimplify_omp_ctxp
->outer_context
== ctx
));
8423 if (ctx
->region_type
!= ORT_WORKSHARE
8424 && ctx
->region_type
!= ORT_TASKGROUP
8425 && ctx
->region_type
!= ORT_SIMD
8426 && ctx
->region_type
!= ORT_ACC
)
8428 else if (ctx
->outer_context
)
8429 return omp_is_private (ctx
->outer_context
, decl
, simd
);
8433 /* Return true if DECL is private within a parallel region
8434 that binds to the current construct's context or in parallel
8435 region's REDUCTION clause. */
8438 omp_check_private (struct gimplify_omp_ctx
*ctx
, tree decl
, bool copyprivate
)
8444 ctx
= ctx
->outer_context
;
8447 if (is_global_var (decl
))
8450 /* References might be private, but might be shared too,
8451 when checking for copyprivate, assume they might be
8452 private, otherwise assume they might be shared. */
8456 if (omp_privatize_by_reference (decl
))
8459 /* Treat C++ privatized non-static data members outside
8460 of the privatization the same. */
8461 if (omp_member_access_dummy_var (decl
))
8467 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8469 if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
8470 && (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
8472 if ((ctx
->region_type
& ORT_TARGET_DATA
) != 0
8474 || (n
->value
& GOVD_MAP
) == 0)
8481 if ((n
->value
& GOVD_LOCAL
) != 0
8482 && omp_member_access_dummy_var (decl
))
8484 return (n
->value
& GOVD_SHARED
) == 0;
8487 if (ctx
->region_type
== ORT_WORKSHARE
8488 || ctx
->region_type
== ORT_TASKGROUP
8489 || ctx
->region_type
== ORT_SIMD
8490 || ctx
->region_type
== ORT_ACC
)
8499 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8502 find_decl_expr (tree
*tp
, int *walk_subtrees
, void *data
)
8506 /* If this node has been visited, unmark it and keep looking. */
8507 if (TREE_CODE (t
) == DECL_EXPR
&& DECL_EXPR_DECL (t
) == (tree
) data
)
8510 if (IS_TYPE_OR_DECL_P (t
))
8516 /* Gimplify the affinity clause but effectively ignore it.
8519 if ((step > 1) ? var <= end : var > end)
8520 locatator_var_expr; */
8523 gimplify_omp_affinity (tree
*list_p
, gimple_seq
*pre_p
)
8525 tree last_iter
= NULL_TREE
;
8526 tree last_bind
= NULL_TREE
;
8527 tree label
= NULL_TREE
;
8528 tree
*last_body
= NULL
;
8529 for (tree c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8530 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_AFFINITY
)
8532 tree t
= OMP_CLAUSE_DECL (c
);
8533 if (TREE_CODE (t
) == TREE_LIST
8535 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8537 if (TREE_VALUE (t
) == null_pointer_node
)
8539 if (TREE_PURPOSE (t
) != last_iter
)
8543 append_to_statement_list (label
, last_body
);
8544 gimplify_and_add (last_bind
, pre_p
);
8545 last_bind
= NULL_TREE
;
8547 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8549 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
8550 is_gimple_val
, fb_rvalue
) == GS_ERROR
8551 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
8552 is_gimple_val
, fb_rvalue
) == GS_ERROR
8553 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
8554 is_gimple_val
, fb_rvalue
) == GS_ERROR
8555 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
8556 is_gimple_val
, fb_rvalue
)
8560 last_iter
= TREE_PURPOSE (t
);
8561 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
8562 last_bind
= build3 (BIND_EXPR
, void_type_node
, BLOCK_VARS (block
),
8564 last_body
= &BIND_EXPR_BODY (last_bind
);
8565 tree cond
= NULL_TREE
;
8566 location_t loc
= OMP_CLAUSE_LOCATION (c
);
8567 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8569 tree var
= TREE_VEC_ELT (it
, 0);
8570 tree begin
= TREE_VEC_ELT (it
, 1);
8571 tree end
= TREE_VEC_ELT (it
, 2);
8572 tree step
= TREE_VEC_ELT (it
, 3);
8573 loc
= DECL_SOURCE_LOCATION (var
);
8574 tree tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8576 append_to_statement_list_force (tem
, last_body
);
8578 tree cond1
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8579 step
, build_zero_cst (TREE_TYPE (step
)));
8580 tree cond2
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
,
8582 tree cond3
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8584 cond1
= fold_build3_loc (loc
, COND_EXPR
, boolean_type_node
,
8585 cond1
, cond2
, cond3
);
8587 cond
= fold_build2_loc (loc
, TRUTH_AND_EXPR
,
8588 boolean_type_node
, cond
, cond1
);
8592 tree cont_label
= create_artificial_label (loc
);
8593 label
= build1 (LABEL_EXPR
, void_type_node
, cont_label
);
8594 tree tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, cond
,
8596 build_and_jump (&cont_label
));
8597 append_to_statement_list_force (tem
, last_body
);
8599 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
8601 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
), 0),
8603 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
8605 if (error_operand_p (TREE_VALUE (t
)))
8607 append_to_statement_list_force (TREE_VALUE (t
), last_body
);
8608 TREE_VALUE (t
) = null_pointer_node
;
8614 append_to_statement_list (label
, last_body
);
8615 gimplify_and_add (last_bind
, pre_p
);
8616 last_bind
= NULL_TREE
;
8618 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8620 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8621 NULL
, is_gimple_val
, fb_rvalue
);
8622 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8624 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8626 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8627 is_gimple_lvalue
, fb_lvalue
) == GS_ERROR
)
8629 gimplify_and_add (OMP_CLAUSE_DECL (c
), pre_p
);
8634 append_to_statement_list (label
, last_body
);
8635 gimplify_and_add (last_bind
, pre_p
);
8640 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8641 lower all the depend clauses by populating corresponding depend
8642 array. Returns 0 if there are no such depend clauses, or
8643 2 if all depend clauses should be removed, 1 otherwise. */
8646 gimplify_omp_depend (tree
*list_p
, gimple_seq
*pre_p
)
8650 size_t n
[5] = { 0, 0, 0, 0, 0 };
8652 tree counts
[5] = { NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
};
8653 tree last_iter
= NULL_TREE
, last_count
= NULL_TREE
;
8655 location_t first_loc
= UNKNOWN_LOCATION
;
8657 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8658 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8660 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8662 case OMP_CLAUSE_DEPEND_IN
:
8665 case OMP_CLAUSE_DEPEND_OUT
:
8666 case OMP_CLAUSE_DEPEND_INOUT
:
8669 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8672 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8675 case OMP_CLAUSE_DEPEND_INOUTSET
:
8681 tree t
= OMP_CLAUSE_DECL (c
);
8682 if (first_loc
== UNKNOWN_LOCATION
)
8683 first_loc
= OMP_CLAUSE_LOCATION (c
);
8684 if (TREE_CODE (t
) == TREE_LIST
8686 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8688 if (TREE_PURPOSE (t
) != last_iter
)
8690 tree tcnt
= size_one_node
;
8691 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8693 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
8694 is_gimple_val
, fb_rvalue
) == GS_ERROR
8695 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
8696 is_gimple_val
, fb_rvalue
) == GS_ERROR
8697 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
8698 is_gimple_val
, fb_rvalue
) == GS_ERROR
8699 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
8700 is_gimple_val
, fb_rvalue
)
8703 tree var
= TREE_VEC_ELT (it
, 0);
8704 tree begin
= TREE_VEC_ELT (it
, 1);
8705 tree end
= TREE_VEC_ELT (it
, 2);
8706 tree step
= TREE_VEC_ELT (it
, 3);
8707 tree orig_step
= TREE_VEC_ELT (it
, 4);
8708 tree type
= TREE_TYPE (var
);
8709 tree stype
= TREE_TYPE (step
);
8710 location_t loc
= DECL_SOURCE_LOCATION (var
);
8712 /* Compute count for this iterator as
8714 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8715 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8716 and compute product of those for the entire depend
8718 if (POINTER_TYPE_P (type
))
8719 endmbegin
= fold_build2_loc (loc
, POINTER_DIFF_EXPR
,
8722 endmbegin
= fold_build2_loc (loc
, MINUS_EXPR
, type
,
8724 tree stepm1
= fold_build2_loc (loc
, MINUS_EXPR
, stype
,
8726 build_int_cst (stype
, 1));
8727 tree stepp1
= fold_build2_loc (loc
, PLUS_EXPR
, stype
, step
,
8728 build_int_cst (stype
, 1));
8729 tree pos
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
8730 unshare_expr (endmbegin
),
8732 pos
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
8734 tree neg
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
8736 if (TYPE_UNSIGNED (stype
))
8738 neg
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, neg
);
8739 step
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, step
);
8741 neg
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
8744 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8747 pos
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, pos
,
8748 build_int_cst (stype
, 0));
8749 cond
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
,
8751 neg
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, neg
,
8752 build_int_cst (stype
, 0));
8753 tree osteptype
= TREE_TYPE (orig_step
);
8754 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8756 build_int_cst (osteptype
, 0));
8757 tree cnt
= fold_build3_loc (loc
, COND_EXPR
, stype
,
8759 cnt
= fold_convert_loc (loc
, sizetype
, cnt
);
8760 if (gimplify_expr (&cnt
, pre_p
, NULL
, is_gimple_val
,
8761 fb_rvalue
) == GS_ERROR
)
8763 tcnt
= size_binop_loc (loc
, MULT_EXPR
, tcnt
, cnt
);
8765 if (gimplify_expr (&tcnt
, pre_p
, NULL
, is_gimple_val
,
8766 fb_rvalue
) == GS_ERROR
)
8768 last_iter
= TREE_PURPOSE (t
);
8771 if (counts
[i
] == NULL_TREE
)
8772 counts
[i
] = last_count
;
8774 counts
[i
] = size_binop_loc (OMP_CLAUSE_LOCATION (c
),
8775 PLUS_EXPR
, counts
[i
], last_count
);
8780 for (i
= 0; i
< 5; i
++)
8786 tree total
= size_zero_node
;
8787 for (i
= 0; i
< 5; i
++)
8789 unused
[i
] = counts
[i
] == NULL_TREE
&& n
[i
] == 0;
8790 if (counts
[i
] == NULL_TREE
)
8791 counts
[i
] = size_zero_node
;
8793 counts
[i
] = size_binop (PLUS_EXPR
, counts
[i
], size_int (n
[i
]));
8794 if (gimplify_expr (&counts
[i
], pre_p
, NULL
, is_gimple_val
,
8795 fb_rvalue
) == GS_ERROR
)
8797 total
= size_binop (PLUS_EXPR
, total
, counts
[i
]);
8800 if (gimplify_expr (&total
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
8803 bool is_old
= unused
[1] && unused
[3] && unused
[4];
8804 tree totalpx
= size_binop (PLUS_EXPR
, unshare_expr (total
),
8805 size_int (is_old
? 1 : 4));
8807 totalpx
= size_binop (PLUS_EXPR
, totalpx
,
8808 size_binop (MULT_EXPR
, counts
[4], size_int (2)));
8809 tree type
= build_array_type (ptr_type_node
, build_index_type (totalpx
));
8810 tree array
= create_tmp_var_raw (type
);
8811 TREE_ADDRESSABLE (array
) = 1;
8812 if (!poly_int_tree_p (totalpx
))
8814 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array
)))
8815 gimplify_type_sizes (TREE_TYPE (array
), pre_p
);
8816 if (gimplify_omp_ctxp
)
8818 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8820 && (ctx
->region_type
== ORT_WORKSHARE
8821 || ctx
->region_type
== ORT_TASKGROUP
8822 || ctx
->region_type
== ORT_SIMD
8823 || ctx
->region_type
== ORT_ACC
))
8824 ctx
= ctx
->outer_context
;
8826 omp_add_variable (ctx
, array
, GOVD_LOCAL
| GOVD_SEEN
);
8828 gimplify_vla_decl (array
, pre_p
);
8831 gimple_add_tmp_var (array
);
8832 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
8837 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8838 build_int_cst (ptr_type_node
, 0));
8839 gimplify_and_add (tem
, pre_p
);
8840 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
8843 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8844 fold_convert (ptr_type_node
, total
));
8845 gimplify_and_add (tem
, pre_p
);
8846 for (i
= 1; i
< (is_old
? 2 : 4); i
++)
8848 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (i
+ !is_old
),
8849 NULL_TREE
, NULL_TREE
);
8850 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, counts
[i
- 1]);
8851 gimplify_and_add (tem
, pre_p
);
8858 for (i
= 0; i
< 5; i
++)
8860 if (i
&& (i
>= j
|| unused
[i
- 1]))
8862 cnts
[i
] = cnts
[i
- 1];
8865 cnts
[i
] = create_tmp_var (sizetype
);
8867 g
= gimple_build_assign (cnts
[i
], size_int (is_old
? 2 : 5));
8872 t
= size_binop (PLUS_EXPR
, counts
[0], size_int (2));
8874 t
= size_binop (PLUS_EXPR
, cnts
[i
- 1], counts
[i
- 1]);
8875 if (gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
8878 g
= gimple_build_assign (cnts
[i
], t
);
8880 gimple_seq_add_stmt (pre_p
, g
);
8883 cnts
[5] = NULL_TREE
;
8886 tree t
= size_binop (PLUS_EXPR
, total
, size_int (5));
8887 cnts
[5] = create_tmp_var (sizetype
);
8888 g
= gimple_build_assign (cnts
[i
], t
);
8889 gimple_seq_add_stmt (pre_p
, g
);
8892 last_iter
= NULL_TREE
;
8893 tree last_bind
= NULL_TREE
;
8894 tree
*last_body
= NULL
;
8895 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8896 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8898 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8900 case OMP_CLAUSE_DEPEND_IN
:
8903 case OMP_CLAUSE_DEPEND_OUT
:
8904 case OMP_CLAUSE_DEPEND_INOUT
:
8907 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8910 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8913 case OMP_CLAUSE_DEPEND_INOUTSET
:
8919 tree t
= OMP_CLAUSE_DECL (c
);
8920 if (TREE_CODE (t
) == TREE_LIST
8922 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8924 if (TREE_PURPOSE (t
) != last_iter
)
8927 gimplify_and_add (last_bind
, pre_p
);
8928 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
8929 last_bind
= build3 (BIND_EXPR
, void_type_node
,
8930 BLOCK_VARS (block
), NULL
, block
);
8931 TREE_SIDE_EFFECTS (last_bind
) = 1;
8932 SET_EXPR_LOCATION (last_bind
, OMP_CLAUSE_LOCATION (c
));
8933 tree
*p
= &BIND_EXPR_BODY (last_bind
);
8934 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8936 tree var
= TREE_VEC_ELT (it
, 0);
8937 tree begin
= TREE_VEC_ELT (it
, 1);
8938 tree end
= TREE_VEC_ELT (it
, 2);
8939 tree step
= TREE_VEC_ELT (it
, 3);
8940 tree orig_step
= TREE_VEC_ELT (it
, 4);
8941 tree type
= TREE_TYPE (var
);
8942 location_t loc
= DECL_SOURCE_LOCATION (var
);
8950 if (orig_step > 0) {
8951 if (var < end) goto beg_label;
8953 if (var > end) goto beg_label;
8955 for each iterator, with inner iterators added to
8957 tree beg_label
= create_artificial_label (loc
);
8958 tree cond_label
= NULL_TREE
;
8959 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8961 append_to_statement_list_force (tem
, p
);
8962 tem
= build_and_jump (&cond_label
);
8963 append_to_statement_list_force (tem
, p
);
8964 tem
= build1 (LABEL_EXPR
, void_type_node
, beg_label
);
8965 append_to_statement_list (tem
, p
);
8966 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
8967 NULL_TREE
, NULL_TREE
);
8968 TREE_SIDE_EFFECTS (bind
) = 1;
8969 SET_EXPR_LOCATION (bind
, loc
);
8970 append_to_statement_list_force (bind
, p
);
8971 if (POINTER_TYPE_P (type
))
8972 tem
= build2_loc (loc
, POINTER_PLUS_EXPR
, type
,
8973 var
, fold_convert_loc (loc
, sizetype
,
8976 tem
= build2_loc (loc
, PLUS_EXPR
, type
, var
, step
);
8977 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8979 append_to_statement_list_force (tem
, p
);
8980 tem
= build1 (LABEL_EXPR
, void_type_node
, cond_label
);
8981 append_to_statement_list (tem
, p
);
8982 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8986 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8987 cond
, build_and_jump (&beg_label
),
8989 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8992 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8993 cond
, build_and_jump (&beg_label
),
8995 tree osteptype
= TREE_TYPE (orig_step
);
8996 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8998 build_int_cst (osteptype
, 0));
8999 tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
9001 append_to_statement_list_force (tem
, p
);
9002 p
= &BIND_EXPR_BODY (bind
);
9006 last_iter
= TREE_PURPOSE (t
);
9007 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
9009 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
),
9011 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
9013 if (error_operand_p (TREE_VALUE (t
)))
9015 if (TREE_VALUE (t
) != null_pointer_node
)
9016 TREE_VALUE (t
) = build_fold_addr_expr (TREE_VALUE (t
));
9019 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
9020 NULL_TREE
, NULL_TREE
);
9021 tree r2
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[5],
9022 NULL_TREE
, NULL_TREE
);
9023 r2
= build_fold_addr_expr_with_type (r2
, ptr_type_node
);
9024 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
9025 void_type_node
, r
, r2
);
9026 append_to_statement_list_force (tem
, last_body
);
9027 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
9028 void_type_node
, cnts
[i
],
9029 size_binop (PLUS_EXPR
, cnts
[i
],
9031 append_to_statement_list_force (tem
, last_body
);
9034 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
9035 NULL_TREE
, NULL_TREE
);
9036 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
9037 void_type_node
, r
, TREE_VALUE (t
));
9038 append_to_statement_list_force (tem
, last_body
);
9041 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
9042 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)),
9043 NULL_TREE
, NULL_TREE
);
9044 tem
= build_int_cst (ptr_type_node
, GOMP_DEPEND_INOUTSET
);
9045 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
9046 void_type_node
, r
, tem
);
9047 append_to_statement_list_force (tem
, last_body
);
9049 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
9050 void_type_node
, cnts
[i
],
9051 size_binop (PLUS_EXPR
, cnts
[i
],
9052 size_int (1 + (i
== 5))));
9053 append_to_statement_list_force (tem
, last_body
);
9054 TREE_VALUE (t
) = null_pointer_node
;
9060 gimplify_and_add (last_bind
, pre_p
);
9061 last_bind
= NULL_TREE
;
9063 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
9065 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
9066 NULL
, is_gimple_val
, fb_rvalue
);
9067 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
9069 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
9071 if (OMP_CLAUSE_DECL (c
) != null_pointer_node
)
9072 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
9073 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
9074 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9078 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
9079 NULL_TREE
, NULL_TREE
);
9080 tree r2
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[5],
9081 NULL_TREE
, NULL_TREE
);
9082 r2
= build_fold_addr_expr_with_type (r2
, ptr_type_node
);
9083 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, r2
);
9084 gimplify_and_add (tem
, pre_p
);
9085 g
= gimple_build_assign (cnts
[i
], size_binop (PLUS_EXPR
,
9088 gimple_seq_add_stmt (pre_p
, g
);
9091 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
9092 NULL_TREE
, NULL_TREE
);
9093 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, OMP_CLAUSE_DECL (c
));
9094 gimplify_and_add (tem
, pre_p
);
9097 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
9098 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)),
9099 NULL_TREE
, NULL_TREE
);
9100 tem
= build_int_cst (ptr_type_node
, GOMP_DEPEND_INOUTSET
);
9101 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, tem
);
9102 append_to_statement_list_force (tem
, last_body
);
9103 gimplify_and_add (tem
, pre_p
);
9105 g
= gimple_build_assign (cnts
[i
],
9106 size_binop (PLUS_EXPR
, cnts
[i
],
9107 size_int (1 + (i
== 5))));
9108 gimple_seq_add_stmt (pre_p
, g
);
9112 gimplify_and_add (last_bind
, pre_p
);
9113 tree cond
= boolean_false_node
;
9117 cond
= build2_loc (first_loc
, NE_EXPR
, boolean_type_node
, cnts
[0],
9118 size_binop_loc (first_loc
, PLUS_EXPR
, counts
[0],
9121 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
9122 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
9124 size_binop_loc (first_loc
, PLUS_EXPR
,
9130 tree prev
= size_int (5);
9131 for (i
= 0; i
< 5; i
++)
9135 prev
= size_binop_loc (first_loc
, PLUS_EXPR
, counts
[i
], prev
);
9136 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
9137 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
9138 cnts
[i
], unshare_expr (prev
)));
9141 tem
= build3_loc (first_loc
, COND_EXPR
, void_type_node
, cond
,
9142 build_call_expr_loc (first_loc
,
9143 builtin_decl_explicit (BUILT_IN_TRAP
),
9145 gimplify_and_add (tem
, pre_p
);
9146 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
9147 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
9148 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
9149 OMP_CLAUSE_CHAIN (c
) = *list_p
;
9154 /* For a set of mappings describing an array section pointed to by a struct
9155 (or derived type, etc.) component, create an "alloc" or "release" node to
9156 insert into a list following a GOMP_MAP_STRUCT node. For some types of
9157 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
9158 be created that is inserted into the list of mapping nodes attached to the
9159 directive being processed -- not part of the sorted list of nodes after
9162 CODE is the code of the directive being processed. GRP_START and GRP_END
9163 are the first and last of two or three nodes representing this array section
9164 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
9165 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
9166 filled with the additional node described above, if needed.
9168 This function does not add the new nodes to any lists itself. It is the
9169 responsibility of the caller to do that. */
9172 build_omp_struct_comp_nodes (enum tree_code code
, tree grp_start
, tree grp_end
,
9175 enum gomp_map_kind mkind
9176 = (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
)
9177 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
9179 gcc_assert (grp_start
!= grp_end
);
9181 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
), OMP_CLAUSE_MAP
);
9182 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
9183 OMP_CLAUSE_DECL (c2
) = unshare_expr (OMP_CLAUSE_DECL (grp_end
));
9184 OMP_CLAUSE_CHAIN (c2
) = NULL_TREE
;
9185 tree grp_mid
= NULL_TREE
;
9186 if (OMP_CLAUSE_CHAIN (grp_start
) != grp_end
)
9187 grp_mid
= OMP_CLAUSE_CHAIN (grp_start
);
9190 && OMP_CLAUSE_CODE (grp_mid
) == OMP_CLAUSE_MAP
9191 && OMP_CLAUSE_MAP_KIND (grp_mid
) == GOMP_MAP_TO_PSET
)
9192 OMP_CLAUSE_SIZE (c2
) = OMP_CLAUSE_SIZE (grp_mid
);
9194 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (ptr_type_node
);
9197 && OMP_CLAUSE_CODE (grp_mid
) == OMP_CLAUSE_MAP
9198 && (OMP_CLAUSE_MAP_KIND (grp_mid
) == GOMP_MAP_ALWAYS_POINTER
9199 || OMP_CLAUSE_MAP_KIND (grp_mid
) == GOMP_MAP_ATTACH_DETACH
))
9202 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
), OMP_CLAUSE_MAP
);
9203 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
9204 OMP_CLAUSE_DECL (c3
) = unshare_expr (OMP_CLAUSE_DECL (grp_mid
));
9205 OMP_CLAUSE_SIZE (c3
) = TYPE_SIZE_UNIT (ptr_type_node
);
9206 OMP_CLAUSE_CHAIN (c3
) = NULL_TREE
;
9211 *extra_node
= NULL_TREE
;
9216 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
9217 and set *BITPOSP and *POFFSETP to the bit offset of the access.
9218 If BASE_REF is non-NULL and the containing object is a reference, set
9219 *BASE_REF to that reference before dereferencing the object.
9220 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
9221 has array type, else return NULL. */
9224 extract_base_bit_offset (tree base
, poly_int64
*bitposp
,
9225 poly_offset_int
*poffsetp
)
9228 poly_int64 bitsize
, bitpos
;
9230 int unsignedp
, reversep
, volatilep
= 0;
9231 poly_offset_int poffset
;
9235 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
, &mode
,
9236 &unsignedp
, &reversep
, &volatilep
);
9240 if (offset
&& poly_int_tree_p (offset
))
9242 poffset
= wi::to_poly_offset (offset
);
9248 if (maybe_ne (bitpos
, 0))
9249 poffset
+= bits_to_bytes_round_down (bitpos
);
9252 *poffsetp
= poffset
;
9257 /* Used for topological sorting of mapping groups. UNVISITED means we haven't
9258 started processing the group yet. The TEMPORARY mark is used when we first
9259 encounter a group on a depth-first traversal, and the PERMANENT mark is used
9260 when we have processed all the group's children (i.e. all the base pointers
9261 referred to by the group's mapping nodes, recursively). */
9263 enum omp_tsort_mark
{
9269 /* Hash for trees based on operand_equal_p. Like tree_operand_hash
9270 but ignores side effects in the equality comparisons. */
9272 struct tree_operand_hash_no_se
: tree_operand_hash
9274 static inline bool equal (const value_type
&,
9275 const compare_type
&);
9279 tree_operand_hash_no_se::equal (const value_type
&t1
,
9280 const compare_type
&t2
)
9282 return operand_equal_p (t1
, t2
, OEP_MATCH_SIDE_EFFECTS
);
9285 /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
9288 struct omp_mapping_group
{
9291 omp_tsort_mark mark
;
9292 /* If we've removed the group but need to reindex, mark the group as
9295 struct omp_mapping_group
*sibling
;
9296 struct omp_mapping_group
*next
;
9300 debug_mapping_group (omp_mapping_group
*grp
)
9302 tree tmp
= OMP_CLAUSE_CHAIN (grp
->grp_end
);
9303 OMP_CLAUSE_CHAIN (grp
->grp_end
) = NULL
;
9304 debug_generic_expr (*grp
->grp_start
);
9305 OMP_CLAUSE_CHAIN (grp
->grp_end
) = tmp
;
9308 /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
9312 omp_get_base_pointer (tree expr
)
9314 while (TREE_CODE (expr
) == ARRAY_REF
9315 || TREE_CODE (expr
) == COMPONENT_REF
)
9316 expr
= TREE_OPERAND (expr
, 0);
9318 if (INDIRECT_REF_P (expr
)
9319 || (TREE_CODE (expr
) == MEM_REF
9320 && integer_zerop (TREE_OPERAND (expr
, 1))))
9322 expr
= TREE_OPERAND (expr
, 0);
9323 while (TREE_CODE (expr
) == COMPOUND_EXPR
)
9324 expr
= TREE_OPERAND (expr
, 1);
9325 if (TREE_CODE (expr
) == POINTER_PLUS_EXPR
)
9326 expr
= TREE_OPERAND (expr
, 0);
9327 if (TREE_CODE (expr
) == SAVE_EXPR
)
9328 expr
= TREE_OPERAND (expr
, 0);
9336 /* Remove COMPONENT_REFS and indirections from EXPR. */
9339 omp_strip_components_and_deref (tree expr
)
9341 while (TREE_CODE (expr
) == COMPONENT_REF
9342 || INDIRECT_REF_P (expr
)
9343 || (TREE_CODE (expr
) == MEM_REF
9344 && integer_zerop (TREE_OPERAND (expr
, 1)))
9345 || TREE_CODE (expr
) == POINTER_PLUS_EXPR
9346 || TREE_CODE (expr
) == COMPOUND_EXPR
)
9347 if (TREE_CODE (expr
) == COMPOUND_EXPR
)
9348 expr
= TREE_OPERAND (expr
, 1);
9350 expr
= TREE_OPERAND (expr
, 0);
9358 omp_strip_indirections (tree expr
)
9360 while (INDIRECT_REF_P (expr
)
9361 || (TREE_CODE (expr
) == MEM_REF
9362 && integer_zerop (TREE_OPERAND (expr
, 1))))
9363 expr
= TREE_OPERAND (expr
, 0);
9368 /* An attach or detach operation depends directly on the address being
9369 attached/detached. Return that address, or none if there are no
9370 attachments/detachments. */
9373 omp_get_attachment (omp_mapping_group
*grp
)
9375 tree node
= *grp
->grp_start
;
9377 switch (OMP_CLAUSE_MAP_KIND (node
))
9381 case GOMP_MAP_TOFROM
:
9382 case GOMP_MAP_ALWAYS_FROM
:
9383 case GOMP_MAP_ALWAYS_TO
:
9384 case GOMP_MAP_ALWAYS_TOFROM
:
9385 case GOMP_MAP_FORCE_FROM
:
9386 case GOMP_MAP_FORCE_TO
:
9387 case GOMP_MAP_FORCE_TOFROM
:
9388 case GOMP_MAP_FORCE_PRESENT
:
9389 case GOMP_MAP_PRESENT_ALLOC
:
9390 case GOMP_MAP_PRESENT_FROM
:
9391 case GOMP_MAP_PRESENT_TO
:
9392 case GOMP_MAP_PRESENT_TOFROM
:
9393 case GOMP_MAP_ALWAYS_PRESENT_FROM
:
9394 case GOMP_MAP_ALWAYS_PRESENT_TO
:
9395 case GOMP_MAP_ALWAYS_PRESENT_TOFROM
:
9396 case GOMP_MAP_ALLOC
:
9397 case GOMP_MAP_RELEASE
:
9398 case GOMP_MAP_DELETE
:
9399 case GOMP_MAP_FORCE_ALLOC
:
9400 if (node
== grp
->grp_end
)
9403 node
= OMP_CLAUSE_CHAIN (node
);
9404 if (node
&& OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_TO_PSET
)
9406 gcc_assert (node
!= grp
->grp_end
);
9407 node
= OMP_CLAUSE_CHAIN (node
);
9410 switch (OMP_CLAUSE_MAP_KIND (node
))
9412 case GOMP_MAP_POINTER
:
9413 case GOMP_MAP_ALWAYS_POINTER
:
9414 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9415 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9416 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
9419 case GOMP_MAP_ATTACH_DETACH
:
9420 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9421 return OMP_CLAUSE_DECL (node
);
9424 internal_error ("unexpected mapping node");
9426 return error_mark_node
;
9428 case GOMP_MAP_TO_PSET
:
9429 gcc_assert (node
!= grp
->grp_end
);
9430 node
= OMP_CLAUSE_CHAIN (node
);
9431 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_ATTACH
9432 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_DETACH
)
9433 return OMP_CLAUSE_DECL (node
);
9435 internal_error ("unexpected mapping node");
9436 return error_mark_node
;
9438 case GOMP_MAP_ATTACH
:
9439 case GOMP_MAP_DETACH
:
9440 node
= OMP_CLAUSE_CHAIN (node
);
9441 if (!node
|| *grp
->grp_start
== grp
->grp_end
)
9442 return OMP_CLAUSE_DECL (*grp
->grp_start
);
9443 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9444 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9445 return OMP_CLAUSE_DECL (*grp
->grp_start
);
9447 internal_error ("unexpected mapping node");
9448 return error_mark_node
;
9450 case GOMP_MAP_STRUCT
:
9451 case GOMP_MAP_FORCE_DEVICEPTR
:
9452 case GOMP_MAP_DEVICE_RESIDENT
:
9454 case GOMP_MAP_IF_PRESENT
:
9455 case GOMP_MAP_FIRSTPRIVATE
:
9456 case GOMP_MAP_FIRSTPRIVATE_INT
:
9457 case GOMP_MAP_USE_DEVICE_PTR
:
9458 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9462 internal_error ("unexpected mapping node");
9465 return error_mark_node
;
9468 /* Given a pointer START_P to the start of a group of related (e.g. pointer)
9469 mappings, return the chain pointer to the end of that group in the list. */
9472 omp_group_last (tree
*start_p
)
9474 tree c
= *start_p
, nc
, *grp_last_p
= start_p
;
9476 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
);
9478 nc
= OMP_CLAUSE_CHAIN (c
);
9480 if (!nc
|| OMP_CLAUSE_CODE (nc
) != OMP_CLAUSE_MAP
)
9483 switch (OMP_CLAUSE_MAP_KIND (c
))
9487 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
9488 && (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9489 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9490 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ATTACH_DETACH
9491 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
9492 || (OMP_CLAUSE_MAP_KIND (nc
)
9493 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
)
9494 || (OMP_CLAUSE_MAP_KIND (nc
)
9495 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
)
9496 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ALWAYS_POINTER
9497 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_TO_PSET
))
9499 grp_last_p
= &OMP_CLAUSE_CHAIN (c
);
9501 tree nc2
= OMP_CLAUSE_CHAIN (nc
);
9503 && OMP_CLAUSE_CODE (nc2
) == OMP_CLAUSE_MAP
9504 && (OMP_CLAUSE_MAP_KIND (nc
)
9505 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
)
9506 && OMP_CLAUSE_MAP_KIND (nc2
) == GOMP_MAP_ATTACH
)
9508 grp_last_p
= &OMP_CLAUSE_CHAIN (nc
);
9510 nc2
= OMP_CLAUSE_CHAIN (nc2
);
9516 case GOMP_MAP_ATTACH
:
9517 case GOMP_MAP_DETACH
:
9518 /* This is a weird artifact of how directives are parsed: bare attach or
9519 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9520 FIRSTPRIVATE_REFERENCE node. FIXME. */
9522 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
9523 && (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9524 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
))
9525 grp_last_p
= &OMP_CLAUSE_CHAIN (c
);
9528 case GOMP_MAP_TO_PSET
:
9529 if (OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
9530 && (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ATTACH
9531 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_DETACH
))
9532 grp_last_p
= &OMP_CLAUSE_CHAIN (c
);
9535 case GOMP_MAP_STRUCT
:
9537 unsigned HOST_WIDE_INT num_mappings
9538 = tree_to_uhwi (OMP_CLAUSE_SIZE (c
));
9539 if (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9540 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9541 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ATTACH_DETACH
)
9542 grp_last_p
= &OMP_CLAUSE_CHAIN (*grp_last_p
);
9543 for (unsigned i
= 0; i
< num_mappings
; i
++)
9544 grp_last_p
= &OMP_CLAUSE_CHAIN (*grp_last_p
);
9552 /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9553 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9554 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9555 if we have more than one such group, else return NULL. */
9558 omp_gather_mapping_groups_1 (tree
*list_p
, vec
<omp_mapping_group
> *groups
,
9559 tree gather_sentinel
)
9561 for (tree
*cp
= list_p
;
9562 *cp
&& *cp
!= gather_sentinel
;
9563 cp
= &OMP_CLAUSE_CHAIN (*cp
))
9565 if (OMP_CLAUSE_CODE (*cp
) != OMP_CLAUSE_MAP
)
9568 tree
*grp_last_p
= omp_group_last (cp
);
9569 omp_mapping_group grp
;
9572 grp
.grp_end
= *grp_last_p
;
9573 grp
.mark
= UNVISITED
;
9575 grp
.deleted
= false;
9577 groups
->safe_push (grp
);
9583 static vec
<omp_mapping_group
> *
9584 omp_gather_mapping_groups (tree
*list_p
)
9586 vec
<omp_mapping_group
> *groups
= new vec
<omp_mapping_group
> ();
9588 omp_gather_mapping_groups_1 (list_p
, groups
, NULL_TREE
);
9590 if (groups
->length () > 0)
9599 /* A pointer mapping group GRP may define a block of memory starting at some
9600 base address, and maybe also define a firstprivate pointer or firstprivate
9601 reference that points to that block. The return value is a node containing
9602 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9603 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9604 return the number of consecutive chained nodes in CHAINED. */
9607 omp_group_base (omp_mapping_group
*grp
, unsigned int *chained
,
9610 tree node
= *grp
->grp_start
;
9612 *firstprivate
= NULL_TREE
;
9615 switch (OMP_CLAUSE_MAP_KIND (node
))
9619 case GOMP_MAP_TOFROM
:
9620 case GOMP_MAP_ALWAYS_FROM
:
9621 case GOMP_MAP_ALWAYS_TO
:
9622 case GOMP_MAP_ALWAYS_TOFROM
:
9623 case GOMP_MAP_FORCE_FROM
:
9624 case GOMP_MAP_FORCE_TO
:
9625 case GOMP_MAP_FORCE_TOFROM
:
9626 case GOMP_MAP_FORCE_PRESENT
:
9627 case GOMP_MAP_PRESENT_ALLOC
:
9628 case GOMP_MAP_PRESENT_FROM
:
9629 case GOMP_MAP_PRESENT_TO
:
9630 case GOMP_MAP_PRESENT_TOFROM
:
9631 case GOMP_MAP_ALWAYS_PRESENT_FROM
:
9632 case GOMP_MAP_ALWAYS_PRESENT_TO
:
9633 case GOMP_MAP_ALWAYS_PRESENT_TOFROM
:
9634 case GOMP_MAP_ALLOC
:
9635 case GOMP_MAP_RELEASE
:
9636 case GOMP_MAP_DELETE
:
9637 case GOMP_MAP_FORCE_ALLOC
:
9638 case GOMP_MAP_IF_PRESENT
:
9639 if (node
== grp
->grp_end
)
9642 node
= OMP_CLAUSE_CHAIN (node
);
9643 if (node
&& OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_TO_PSET
)
9645 if (node
== grp
->grp_end
)
9646 return *grp
->grp_start
;
9647 node
= OMP_CLAUSE_CHAIN (node
);
9650 switch (OMP_CLAUSE_MAP_KIND (node
))
9652 case GOMP_MAP_POINTER
:
9653 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9654 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9655 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
9656 *firstprivate
= OMP_CLAUSE_DECL (node
);
9657 return *grp
->grp_start
;
9659 case GOMP_MAP_ALWAYS_POINTER
:
9660 case GOMP_MAP_ATTACH_DETACH
:
9661 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9662 return *grp
->grp_start
;
9665 internal_error ("unexpected mapping node");
9668 internal_error ("unexpected mapping node");
9669 return error_mark_node
;
9671 case GOMP_MAP_TO_PSET
:
9672 gcc_assert (node
!= grp
->grp_end
);
9673 node
= OMP_CLAUSE_CHAIN (node
);
9674 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_ATTACH
9675 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_DETACH
)
9678 internal_error ("unexpected mapping node");
9679 return error_mark_node
;
9681 case GOMP_MAP_ATTACH
:
9682 case GOMP_MAP_DETACH
:
9683 node
= OMP_CLAUSE_CHAIN (node
);
9684 if (!node
|| *grp
->grp_start
== grp
->grp_end
)
9686 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9687 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9689 /* We're mapping the base pointer itself in a bare attach or detach
9690 node. This is a side effect of how parsing works, and the mapping
9691 will be removed anyway (at least for enter/exit data directives).
9692 We should ignore the mapping here. FIXME. */
9696 internal_error ("unexpected mapping node");
9697 return error_mark_node
;
9699 case GOMP_MAP_STRUCT
:
9701 unsigned HOST_WIDE_INT num_mappings
9702 = tree_to_uhwi (OMP_CLAUSE_SIZE (node
));
9703 node
= OMP_CLAUSE_CHAIN (node
);
9704 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9705 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9707 *firstprivate
= OMP_CLAUSE_DECL (node
);
9708 node
= OMP_CLAUSE_CHAIN (node
);
9710 *chained
= num_mappings
;
9714 case GOMP_MAP_FORCE_DEVICEPTR
:
9715 case GOMP_MAP_DEVICE_RESIDENT
:
9717 case GOMP_MAP_FIRSTPRIVATE
:
9718 case GOMP_MAP_FIRSTPRIVATE_INT
:
9719 case GOMP_MAP_USE_DEVICE_PTR
:
9720 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9723 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9724 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9725 case GOMP_MAP_POINTER
:
9726 case GOMP_MAP_ALWAYS_POINTER
:
9727 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
9728 /* These shouldn't appear by themselves. */
9730 internal_error ("unexpected pointer mapping node");
9731 return error_mark_node
;
9737 return error_mark_node
;
9740 /* Given a vector of omp_mapping_groups, build a hash table so we can look up
9741 nodes by tree_operand_hash_no_se. */
9744 omp_index_mapping_groups_1 (hash_map
<tree_operand_hash_no_se
,
9745 omp_mapping_group
*> *grpmap
,
9746 vec
<omp_mapping_group
> *groups
,
9747 tree reindex_sentinel
)
9749 omp_mapping_group
*grp
;
9751 bool reindexing
= reindex_sentinel
!= NULL_TREE
, above_hwm
= false;
9753 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
9755 if (reindexing
&& *grp
->grp_start
== reindex_sentinel
)
9758 if (reindexing
&& !above_hwm
)
9762 unsigned int chained
;
9763 tree node
= omp_group_base (grp
, &chained
, &fpp
);
9765 if (node
== error_mark_node
|| (!node
&& !fpp
))
9768 for (unsigned j
= 0;
9769 node
&& j
< chained
;
9770 node
= OMP_CLAUSE_CHAIN (node
), j
++)
9772 tree decl
= OMP_CLAUSE_DECL (node
);
9773 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
9774 meaning node-hash lookups don't work. This is a workaround for
9775 that, but ideally we should just create the INDIRECT_REF at
9776 source instead. FIXME. */
9777 if (TREE_CODE (decl
) == MEM_REF
9778 && integer_zerop (TREE_OPERAND (decl
, 1)))
9779 decl
= build_fold_indirect_ref (TREE_OPERAND (decl
, 0));
9781 omp_mapping_group
**prev
= grpmap
->get (decl
);
9783 if (prev
&& *prev
== grp
)
9787 /* Mapping the same thing twice is normally diagnosed as an error,
9788 but can happen under some circumstances, e.g. in pr99928-16.c,
9791 #pragma omp target simd reduction(+:a[:3]) \
9792 map(always, tofrom: a[:6])
9795 will result in two "a[0]" mappings (of different sizes). */
9797 grp
->sibling
= (*prev
)->sibling
;
9798 (*prev
)->sibling
= grp
;
9801 grpmap
->put (decl
, grp
);
9807 omp_mapping_group
**prev
= grpmap
->get (fpp
);
9808 if (prev
&& *prev
!= grp
)
9810 grp
->sibling
= (*prev
)->sibling
;
9811 (*prev
)->sibling
= grp
;
9814 grpmap
->put (fpp
, grp
);
9818 static hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *
9819 omp_index_mapping_groups (vec
<omp_mapping_group
> *groups
)
9821 hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *grpmap
9822 = new hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*>;
9824 omp_index_mapping_groups_1 (grpmap
, groups
, NULL_TREE
);
9829 /* Rebuild group map from partially-processed clause list (during
9830 omp_build_struct_sibling_lists). We have already processed nodes up until
9831 a high-water mark (HWM). This is a bit tricky because the list is being
9832 reordered as it is scanned, but we know:
9834 1. The list after HWM has not been touched yet, so we can reindex it safely.
9836 2. The list before and including HWM has been altered, but remains
9837 well-formed throughout the sibling-list building operation.
9839 so, we can do the reindex operation in two parts, on the processed and
9840 then the unprocessed halves of the list. */
9842 static hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *
9843 omp_reindex_mapping_groups (tree
*list_p
,
9844 vec
<omp_mapping_group
> *groups
,
9845 vec
<omp_mapping_group
> *processed_groups
,
9848 hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *grpmap
9849 = new hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*>;
9851 processed_groups
->truncate (0);
9853 omp_gather_mapping_groups_1 (list_p
, processed_groups
, sentinel
);
9854 omp_index_mapping_groups_1 (grpmap
, processed_groups
, NULL_TREE
);
9856 omp_index_mapping_groups_1 (grpmap
, groups
, sentinel
);
9861 /* Find the immediately-containing struct for a component ref (etc.)
9865 omp_containing_struct (tree expr
)
9871 /* Note: don't strip NOPs unless we're also stripping off array refs or a
9873 if (TREE_CODE (expr
) != ARRAY_REF
&& TREE_CODE (expr
) != COMPONENT_REF
)
9876 while (TREE_CODE (expr
) == ARRAY_REF
)
9877 expr
= TREE_OPERAND (expr
, 0);
9879 if (TREE_CODE (expr
) == COMPONENT_REF
)
9880 expr
= TREE_OPERAND (expr
, 0);
9885 /* Return TRUE if DECL describes a component that is part of a whole structure
9886 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
9887 that maps that structure, if present. */
9890 omp_mapped_by_containing_struct (hash_map
<tree_operand_hash_no_se
,
9891 omp_mapping_group
*> *grpmap
,
9893 omp_mapping_group
**mapped_by_group
)
9895 tree wsdecl
= NULL_TREE
;
9897 *mapped_by_group
= NULL
;
9901 wsdecl
= omp_containing_struct (decl
);
9904 omp_mapping_group
**wholestruct
= grpmap
->get (wsdecl
);
9906 && TREE_CODE (wsdecl
) == MEM_REF
9907 && integer_zerop (TREE_OPERAND (wsdecl
, 1)))
9909 tree deref
= TREE_OPERAND (wsdecl
, 0);
9910 deref
= build_fold_indirect_ref (deref
);
9911 wholestruct
= grpmap
->get (deref
);
9915 *mapped_by_group
= *wholestruct
;
9924 /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
9928 omp_tsort_mapping_groups_1 (omp_mapping_group
***outlist
,
9929 vec
<omp_mapping_group
> *groups
,
9930 hash_map
<tree_operand_hash_no_se
,
9931 omp_mapping_group
*> *grpmap
,
9932 omp_mapping_group
*grp
)
9934 if (grp
->mark
== PERMANENT
)
9936 if (grp
->mark
== TEMPORARY
)
9938 fprintf (stderr
, "when processing group:\n");
9939 debug_mapping_group (grp
);
9940 internal_error ("base pointer cycle detected");
9943 grp
->mark
= TEMPORARY
;
9945 tree attaches_to
= omp_get_attachment (grp
);
9949 omp_mapping_group
**basep
= grpmap
->get (attaches_to
);
9951 if (basep
&& *basep
!= grp
)
9953 for (omp_mapping_group
*w
= *basep
; w
; w
= w
->sibling
)
9954 if (!omp_tsort_mapping_groups_1 (outlist
, groups
, grpmap
, w
))
9959 tree decl
= OMP_CLAUSE_DECL (*grp
->grp_start
);
9963 tree base
= omp_get_base_pointer (decl
);
9968 omp_mapping_group
**innerp
= grpmap
->get (base
);
9969 omp_mapping_group
*wholestruct
;
9971 /* We should treat whole-structure mappings as if all (pointer, in this
9972 case) members are mapped as individual list items. Check if we have
9973 such a whole-structure mapping, if we don't have an explicit reference
9974 to the pointer member itself. */
9976 && TREE_CODE (base
) == COMPONENT_REF
9977 && omp_mapped_by_containing_struct (grpmap
, base
, &wholestruct
))
9978 innerp
= &wholestruct
;
9980 if (innerp
&& *innerp
!= grp
)
9982 for (omp_mapping_group
*w
= *innerp
; w
; w
= w
->sibling
)
9983 if (!omp_tsort_mapping_groups_1 (outlist
, groups
, grpmap
, w
))
9991 grp
->mark
= PERMANENT
;
9993 /* Emit grp to output list. */
9996 *outlist
= &grp
->next
;
10001 /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
10002 before mappings that use those pointers. This is an implementation of the
10003 depth-first search algorithm, described e.g. at:
10005 https://en.wikipedia.org/wiki/Topological_sorting
10008 static omp_mapping_group
*
10009 omp_tsort_mapping_groups (vec
<omp_mapping_group
> *groups
,
10010 hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*>
10013 omp_mapping_group
*grp
, *outlist
= NULL
, **cursor
;
10018 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10020 if (grp
->mark
!= PERMANENT
)
10021 if (!omp_tsort_mapping_groups_1 (&cursor
, groups
, grpmap
, grp
))
10028 /* Split INLIST into two parts, moving groups corresponding to
10029 ALLOC/RELEASE/DELETE mappings to one list, and other mappings to another.
10030 The former list is then appended to the latter. Each sub-list retains the
10031 order of the original list.
10032 Note that ATTACH nodes are later moved to the end of the list in
10033 gimplify_adjust_omp_clauses, for target regions. */
10035 static omp_mapping_group
*
10036 omp_segregate_mapping_groups (omp_mapping_group
*inlist
)
10038 omp_mapping_group
*ard_groups
= NULL
, *tf_groups
= NULL
;
10039 omp_mapping_group
**ard_tail
= &ard_groups
, **tf_tail
= &tf_groups
;
10041 for (omp_mapping_group
*w
= inlist
; w
;)
10043 tree c
= *w
->grp_start
;
10044 omp_mapping_group
*next
= w
->next
;
10046 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
);
10048 switch (OMP_CLAUSE_MAP_KIND (c
))
10050 case GOMP_MAP_ALLOC
:
10051 case GOMP_MAP_RELEASE
:
10052 case GOMP_MAP_DELETE
:
10055 ard_tail
= &w
->next
;
10061 tf_tail
= &w
->next
;
10067 /* Now splice the lists together... */
10068 *tf_tail
= ard_groups
;
10073 /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
10074 those groups based on the output list of omp_tsort_mapping_groups --
10075 singly-linked, threaded through each element's NEXT pointer starting at
10076 HEAD. Each list element appears exactly once in that linked list.
10078 Each element of GROUPS may correspond to one or several mapping nodes.
10079 Node groups are kept together, and in the reordered list, the positions of
10080 the original groups are reused for the positions of the reordered list.
10081 Hence if we have e.g.
10083 {to ptr ptr} firstprivate {tofrom ptr} ...
10085 first group non-"map" second group
10087 and say the second group contains a base pointer for the first so must be
10088 moved before it, the resulting list will contain:
10090 {tofrom ptr} firstprivate {to ptr ptr} ...
10091 ^ prev. second group ^ prev. first group
10095 omp_reorder_mapping_groups (vec
<omp_mapping_group
> *groups
,
10096 omp_mapping_group
*head
,
10099 omp_mapping_group
*grp
;
10101 unsigned numgroups
= groups
->length ();
10102 auto_vec
<tree
> old_heads (numgroups
);
10103 auto_vec
<tree
*> old_headps (numgroups
);
10104 auto_vec
<tree
> new_heads (numgroups
);
10105 auto_vec
<tree
> old_succs (numgroups
);
10106 bool map_at_start
= (list_p
== (*groups
)[0].grp_start
);
10108 tree
*new_grp_tail
= NULL
;
10110 /* Stash the start & end nodes of each mapping group before we start
10111 modifying the list. */
10112 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10114 old_headps
.quick_push (grp
->grp_start
);
10115 old_heads
.quick_push (*grp
->grp_start
);
10116 old_succs
.quick_push (OMP_CLAUSE_CHAIN (grp
->grp_end
));
10119 /* And similarly, the heads of the groups in the order we want to rearrange
10121 for (omp_mapping_group
*w
= head
; w
; w
= w
->next
)
10122 new_heads
.quick_push (*w
->grp_start
);
10124 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10128 if (new_grp_tail
&& old_succs
[i
- 1] == old_heads
[i
])
10130 /* a {b c d} {e f g} h i j (original)
10132 a {k l m} {e f g} h i j (inserted new group on last iter)
10134 a {k l m} {n o p} h i j (this time, chain last group to new one)
10137 *new_grp_tail
= new_heads
[i
];
10139 else if (new_grp_tail
)
10141 /* a {b c d} e {f g h} i j k (original)
10143 a {l m n} e {f g h} i j k (gap after last iter's group)
10145 a {l m n} e {o p q} h i j (chain last group to old successor)
10148 *new_grp_tail
= old_succs
[i
- 1];
10149 *old_headps
[i
] = new_heads
[i
];
10153 /* The first inserted group -- point to new group, and leave end
10159 *grp
->grp_start
= new_heads
[i
];
10162 new_grp_tail
= &OMP_CLAUSE_CHAIN (head
->grp_end
);
10168 *new_grp_tail
= old_succs
[numgroups
- 1];
10170 gcc_assert (!head
);
10172 return map_at_start
? (*groups
)[0].grp_start
: list_p
;
10175 /* DECL is supposed to have lastprivate semantics in the outer contexts
10176 of combined/composite constructs, starting with OCTX.
10177 Add needed lastprivate, shared or map clause if no data sharing or
10178 mapping clause are present. IMPLICIT_P is true if it is an implicit
10179 clause (IV on simd), in which case the lastprivate will not be
10180 copied to some constructs. */
10183 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx
*octx
,
10184 tree decl
, bool implicit_p
)
10186 struct gimplify_omp_ctx
*orig_octx
= octx
;
10187 for (; octx
; octx
= octx
->outer_context
)
10189 if ((octx
->region_type
== ORT_COMBINED_PARALLEL
10190 || (octx
->region_type
& ORT_COMBINED_TEAMS
) == ORT_COMBINED_TEAMS
)
10191 && splay_tree_lookup (octx
->variables
,
10192 (splay_tree_key
) decl
) == NULL
)
10194 omp_add_variable (octx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
10197 if ((octx
->region_type
& ORT_TASK
) != 0
10198 && octx
->combined_loop
10199 && splay_tree_lookup (octx
->variables
,
10200 (splay_tree_key
) decl
) == NULL
)
10202 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
10206 && octx
->region_type
== ORT_WORKSHARE
10207 && octx
->combined_loop
10208 && splay_tree_lookup (octx
->variables
,
10209 (splay_tree_key
) decl
) == NULL
10210 && octx
->outer_context
10211 && octx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
10212 && splay_tree_lookup (octx
->outer_context
->variables
,
10213 (splay_tree_key
) decl
) == NULL
)
10215 octx
= octx
->outer_context
;
10216 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
10219 if ((octx
->region_type
== ORT_WORKSHARE
|| octx
->region_type
== ORT_ACC
)
10220 && octx
->combined_loop
10221 && splay_tree_lookup (octx
->variables
,
10222 (splay_tree_key
) decl
) == NULL
10223 && !omp_check_private (octx
, decl
, false))
10225 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
10228 if (octx
->region_type
== ORT_COMBINED_TARGET
)
10230 splay_tree_node n
= splay_tree_lookup (octx
->variables
,
10231 (splay_tree_key
) decl
);
10234 omp_add_variable (octx
, decl
, GOVD_MAP
| GOVD_SEEN
);
10235 octx
= octx
->outer_context
;
10237 else if (!implicit_p
10238 && (n
->value
& GOVD_FIRSTPRIVATE_IMPLICIT
))
10240 n
->value
&= ~(GOVD_FIRSTPRIVATE
10241 | GOVD_FIRSTPRIVATE_IMPLICIT
10243 omp_add_variable (octx
, decl
, GOVD_MAP
| GOVD_SEEN
);
10244 octx
= octx
->outer_context
;
10249 if (octx
&& (implicit_p
|| octx
!= orig_octx
))
10250 omp_notice_variable (octx
, decl
, true);
10253 /* If we have mappings INNER and OUTER, where INNER is a component access and
10254 OUTER is a mapping of the whole containing struct, check that the mappings
10255 are compatible. We'll be deleting the inner mapping, so we need to make
10256 sure the outer mapping does (at least) the same transfers to/from the device
10257 as the inner mapping. */
10260 omp_check_mapping_compatibility (location_t loc
,
10261 omp_mapping_group
*outer
,
10262 omp_mapping_group
*inner
)
10264 tree first_outer
= *outer
->grp_start
, first_inner
= *inner
->grp_start
;
10266 gcc_assert (OMP_CLAUSE_CODE (first_outer
) == OMP_CLAUSE_MAP
);
10267 gcc_assert (OMP_CLAUSE_CODE (first_inner
) == OMP_CLAUSE_MAP
);
10269 enum gomp_map_kind outer_kind
= OMP_CLAUSE_MAP_KIND (first_outer
);
10270 enum gomp_map_kind inner_kind
= OMP_CLAUSE_MAP_KIND (first_inner
);
10272 if (outer_kind
== inner_kind
)
10275 switch (outer_kind
)
10277 case GOMP_MAP_ALWAYS_TO
:
10278 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
10279 || inner_kind
== GOMP_MAP_ALLOC
10280 || inner_kind
== GOMP_MAP_TO
)
10284 case GOMP_MAP_ALWAYS_FROM
:
10285 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
10286 || inner_kind
== GOMP_MAP_ALLOC
10287 || inner_kind
== GOMP_MAP_FROM
)
10292 case GOMP_MAP_FROM
:
10293 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
10294 || inner_kind
== GOMP_MAP_ALLOC
)
10298 case GOMP_MAP_ALWAYS_TOFROM
:
10299 case GOMP_MAP_TOFROM
:
10300 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
10301 || inner_kind
== GOMP_MAP_ALLOC
10302 || inner_kind
== GOMP_MAP_TO
10303 || inner_kind
== GOMP_MAP_FROM
10304 || inner_kind
== GOMP_MAP_TOFROM
)
10312 error_at (loc
, "data movement for component %qE is not compatible with "
10313 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner
),
10314 OMP_CLAUSE_DECL (first_outer
));
10319 /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
10320 clause dependencies we handle for now are struct element mappings and
10321 whole-struct mappings on the same directive, and duplicate clause
10325 oacc_resolve_clause_dependencies (vec
<omp_mapping_group
> *groups
,
10326 hash_map
<tree_operand_hash_no_se
,
10327 omp_mapping_group
*> *grpmap
)
10330 omp_mapping_group
*grp
;
10331 hash_set
<tree_operand_hash
> *seen_components
= NULL
;
10332 hash_set
<tree_operand_hash
> *shown_error
= NULL
;
10334 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10336 tree grp_end
= grp
->grp_end
;
10337 tree decl
= OMP_CLAUSE_DECL (grp_end
);
10339 gcc_assert (OMP_CLAUSE_CODE (grp_end
) == OMP_CLAUSE_MAP
);
10341 if (DECL_P (grp_end
))
10344 tree c
= OMP_CLAUSE_DECL (*grp
->grp_start
);
10345 while (TREE_CODE (c
) == ARRAY_REF
)
10346 c
= TREE_OPERAND (c
, 0);
10347 if (TREE_CODE (c
) != COMPONENT_REF
)
10349 if (!seen_components
)
10350 seen_components
= new hash_set
<tree_operand_hash
> ();
10352 shown_error
= new hash_set
<tree_operand_hash
> ();
10353 if (seen_components
->contains (c
)
10354 && !shown_error
->contains (c
))
10356 error_at (OMP_CLAUSE_LOCATION (grp_end
),
10357 "%qE appears more than once in map clauses",
10358 OMP_CLAUSE_DECL (grp_end
));
10359 shown_error
->add (c
);
10362 seen_components
->add (c
);
10364 omp_mapping_group
*struct_group
;
10365 if (omp_mapped_by_containing_struct (grpmap
, decl
, &struct_group
)
10366 && *grp
->grp_start
== grp_end
)
10368 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end
),
10369 struct_group
, grp
);
10370 /* Remove the whole of this mapping -- redundant. */
10371 grp
->deleted
= true;
10375 if (seen_components
)
10376 delete seen_components
;
10378 delete shown_error
;
10381 /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
10382 is linked to the previous node pointed to by INSERT_AT. */
10385 omp_siblist_insert_node_after (tree newnode
, tree
*insert_at
)
10387 OMP_CLAUSE_CHAIN (newnode
) = *insert_at
;
10388 *insert_at
= newnode
;
10389 return &OMP_CLAUSE_CHAIN (newnode
);
10392 /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
10393 pointed to by chain MOVE_AFTER instead. */
10396 omp_siblist_move_node_after (tree node
, tree
*old_pos
, tree
*move_after
)
10398 gcc_assert (node
== *old_pos
);
10399 *old_pos
= OMP_CLAUSE_CHAIN (node
);
10400 OMP_CLAUSE_CHAIN (node
) = *move_after
;
10401 *move_after
= node
;
10404 /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
10405 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
10406 new nodes are prepended to the list before splicing into the new position.
10407 Return the position we should continue scanning the list at, or NULL to
10408 stay where we were. */
10411 omp_siblist_move_nodes_after (tree
*first_ptr
, tree last_node
,
10414 if (first_ptr
== move_after
)
10417 tree tmp
= *first_ptr
;
10418 *first_ptr
= OMP_CLAUSE_CHAIN (last_node
);
10419 OMP_CLAUSE_CHAIN (last_node
) = *move_after
;
10425 /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
10426 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
10427 pointer MOVE_AFTER.
10429 The latter list was previously part of the OMP clause list, and the former
10430 (prepended) part is comprised of new nodes.
10432 We start with a list of nodes starting with a struct mapping node. We
10433 rearrange the list so that new nodes starting from FIRST_NEW and whose last
10434 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
10435 the group of mapping nodes we are currently processing (from the chain
10436 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
10437 we should continue processing from, or NULL to stay where we were.
10439 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
10440 different) is worked through below. Here we are processing LAST_NODE, and
10441 FIRST_PTR points at the preceding mapping clause:
10443 #. mapping node chain
10444 ---------------------------------------------------
10445 A. struct_node [->B]
10447 C. comp_2 [->D (move_after)]
10449 E. attach_3 [->F (first_ptr)]
10450 F. map_to_4 [->G (continue_at)]
10451 G. attach_4 (last_node) [->H]
10454 *last_new_tail = *first_ptr;
10456 I. new_node (first_new) [->F (last_new_tail)]
10458 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
10460 #. mapping node chain
10461 ----------------------------------------------------
10462 A. struct_node [->B]
10464 C. comp_2 [->D (move_after)]
10466 E. attach_3 [->H (first_ptr)]
10467 F. map_to_4 [->G (continue_at)]
10468 G. attach_4 (last_node) [->H]
10471 I. new_node (first_new) [->F (last_new_tail)]
10473 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10475 #. mapping node chain
10476 ---------------------------------------------------
10477 A. struct_node [->B]
10479 C. comp_2 [->D (move_after)]
10481 E. attach_3 [->H (continue_at)]
10483 G. attach_4 (last_node) [->D]
10486 I. new_node (first_new) [->F (last_new_tail)]
10488 *move_after = first_new;
10490 #. mapping node chain
10491 ---------------------------------------------------
10492 A. struct_node [->B]
10494 C. comp_2 [->I (move_after)]
10496 E. attach_3 [->H (continue_at)]
10498 G. attach_4 (last_node) [->D]
10500 I. new_node (first_new) [->F (last_new_tail)]
10504 #. mapping node chain
10505 ---------------------------------------------------
10506 A. struct_node [->B]
10508 C. comp_2 [->I (move_after)]
10509 I. new_node (first_new) [->F (last_new_tail)]
10511 G. attach_4 (last_node) [->D]
10513 E. attach_3 [->H (continue_at)]
10518 omp_siblist_move_concat_nodes_after (tree first_new
, tree
*last_new_tail
,
10519 tree
*first_ptr
, tree last_node
,
10522 tree
*continue_at
= NULL
;
10523 *last_new_tail
= *first_ptr
;
10524 if (first_ptr
== move_after
)
10525 *move_after
= first_new
;
10528 *first_ptr
= OMP_CLAUSE_CHAIN (last_node
);
10529 continue_at
= first_ptr
;
10530 OMP_CLAUSE_CHAIN (last_node
) = *move_after
;
10531 *move_after
= first_new
;
10533 return continue_at
;
10536 /* Mapping struct members causes an additional set of nodes to be created,
10537 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
10538 number of members being mapped, in order of ascending position (address or
10541 We scan through the list of mapping clauses, calling this function for each
10542 struct member mapping we find, and build up the list of mappings after the
10543 initial GOMP_MAP_STRUCT node. For pointer members, these will be
10544 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
10545 moved into place in the sorted list.
10554 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
10557 GOMP_MAP_STRUCT (4)
10558 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
10559 GOMP_MAP_ALLOC (struct.a)
10560 GOMP_MAP_ALLOC (struct.b)
10561 GOMP_MAP_TO (struct.c)
10562 GOMP_MAP_ALLOC (struct.d)
10565 In the case where we are mapping references to pointers, or in Fortran if
10566 we are mapping an array with a descriptor, additional nodes may be created
10567 after the struct node list also.
10569 The return code is either a pointer to the next node to process (if the
10570 list has been rearranged), else NULL to continue with the next node in the
10574 omp_accumulate_sibling_list (enum omp_region_type region_type
,
10575 enum tree_code code
,
10576 hash_map
<tree_operand_hash
, tree
>
10577 *&struct_map_to_clause
, tree
*grp_start_p
,
10578 tree grp_end
, tree
*inner
)
10580 poly_offset_int coffset
;
10581 poly_int64 cbitpos
;
10582 tree ocd
= OMP_CLAUSE_DECL (grp_end
);
10583 bool openmp
= !(region_type
& ORT_ACC
);
10584 tree
*continue_at
= NULL
;
10586 while (TREE_CODE (ocd
) == ARRAY_REF
)
10587 ocd
= TREE_OPERAND (ocd
, 0);
10589 if (INDIRECT_REF_P (ocd
))
10590 ocd
= TREE_OPERAND (ocd
, 0);
10592 tree base
= extract_base_bit_offset (ocd
, &cbitpos
, &coffset
);
10594 bool ptr
= (OMP_CLAUSE_MAP_KIND (grp_end
) == GOMP_MAP_ALWAYS_POINTER
);
10595 bool attach_detach
= ((OMP_CLAUSE_MAP_KIND (grp_end
)
10596 == GOMP_MAP_ATTACH_DETACH
)
10597 || (OMP_CLAUSE_MAP_KIND (grp_end
)
10598 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
));
10599 bool attach
= (OMP_CLAUSE_MAP_KIND (grp_end
) == GOMP_MAP_ATTACH
10600 || OMP_CLAUSE_MAP_KIND (grp_end
) == GOMP_MAP_DETACH
);
10602 /* FIXME: If we're not mapping the base pointer in some other clause on this
10603 directive, I think we want to create ALLOC/RELEASE here -- i.e. not
10605 if (openmp
&& attach_detach
)
10608 if (!struct_map_to_clause
|| struct_map_to_clause
->get (base
) == NULL
)
10610 tree l
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
), OMP_CLAUSE_MAP
);
10611 gomp_map_kind k
= attach
? GOMP_MAP_FORCE_PRESENT
: GOMP_MAP_STRUCT
;
10613 OMP_CLAUSE_SET_MAP_KIND (l
, k
);
10615 OMP_CLAUSE_DECL (l
) = unshare_expr (base
);
10617 OMP_CLAUSE_SIZE (l
)
10618 = (!attach
? size_int (1)
10619 : (DECL_P (OMP_CLAUSE_DECL (l
))
10620 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l
))
10621 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l
)))));
10622 if (struct_map_to_clause
== NULL
)
10623 struct_map_to_clause
= new hash_map
<tree_operand_hash
, tree
>;
10624 struct_map_to_clause
->put (base
, l
);
10626 if (ptr
|| attach_detach
)
10630 = build_omp_struct_comp_nodes (code
, *grp_start_p
, grp_end
,
10632 OMP_CLAUSE_CHAIN (l
) = alloc_node
;
10634 tree
*insert_node_pos
= grp_start_p
;
10638 OMP_CLAUSE_CHAIN (extra_node
) = *insert_node_pos
;
10639 OMP_CLAUSE_CHAIN (alloc_node
) = extra_node
;
10642 OMP_CLAUSE_CHAIN (alloc_node
) = *insert_node_pos
;
10644 *insert_node_pos
= l
;
10648 gcc_assert (*grp_start_p
== grp_end
);
10649 grp_start_p
= omp_siblist_insert_node_after (l
, grp_start_p
);
10652 tree noind
= omp_strip_indirections (base
);
10655 && (region_type
& ORT_TARGET
)
10656 && TREE_CODE (noind
) == COMPONENT_REF
)
10658 /* The base for this component access is a struct component access
10659 itself. Insert a node to be processed on the next iteration of
10660 our caller's loop, which will subsequently be turned into a new,
10661 inner GOMP_MAP_STRUCT mapping.
10663 We need to do this else the non-DECL_P base won't be
10664 rewritten correctly in the offloaded region. */
10665 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
),
10667 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_FORCE_PRESENT
);
10668 OMP_CLAUSE_DECL (c2
) = unshare_expr (noind
);
10669 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (TREE_TYPE (noind
));
10674 tree sdecl
= omp_strip_components_and_deref (base
);
10676 if (POINTER_TYPE_P (TREE_TYPE (sdecl
)) && (region_type
& ORT_TARGET
))
10678 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
),
10681 = (INDIRECT_REF_P (base
)
10682 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0)))
10684 || (INDIRECT_REF_P (TREE_OPERAND (base
, 0))
10685 && (TREE_CODE (TREE_TYPE (TREE_OPERAND
10686 (TREE_OPERAND (base
, 0), 0)))
10687 == REFERENCE_TYPE
))));
10688 enum gomp_map_kind mkind
= base_ref
? GOMP_MAP_FIRSTPRIVATE_REFERENCE
10689 : GOMP_MAP_FIRSTPRIVATE_POINTER
;
10690 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
10691 OMP_CLAUSE_DECL (c2
) = sdecl
;
10692 tree baddr
= build_fold_addr_expr (base
);
10693 baddr
= fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end
),
10694 ptrdiff_type_node
, baddr
);
10695 /* This isn't going to be good enough when we add support for more
10696 complicated lvalue expressions. FIXME. */
10697 if (TREE_CODE (TREE_TYPE (sdecl
)) == REFERENCE_TYPE
10698 && TREE_CODE (TREE_TYPE (TREE_TYPE (sdecl
))) == POINTER_TYPE
)
10699 sdecl
= build_simple_mem_ref (sdecl
);
10700 tree decladdr
= fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end
),
10701 ptrdiff_type_node
, sdecl
);
10702 OMP_CLAUSE_SIZE (c2
)
10703 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end
), MINUS_EXPR
,
10704 ptrdiff_type_node
, baddr
, decladdr
);
10705 /* Insert after struct node. */
10706 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (l
);
10707 OMP_CLAUSE_CHAIN (l
) = c2
;
10712 else if (struct_map_to_clause
)
10714 tree
*osc
= struct_map_to_clause
->get (base
);
10715 tree
*sc
= NULL
, *scp
= NULL
;
10716 sc
= &OMP_CLAUSE_CHAIN (*osc
);
10717 /* The struct mapping might be immediately followed by a
10718 FIRSTPRIVATE_POINTER and/or FIRSTPRIVATE_REFERENCE -- if it's an
10719 indirect access or a reference, or both. (This added node is removed
10720 in omp-low.c after it has been processed there.) */
10722 && (OMP_CLAUSE_MAP_KIND (*sc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
10723 || OMP_CLAUSE_MAP_KIND (*sc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
10724 sc
= &OMP_CLAUSE_CHAIN (*sc
);
10725 for (; *sc
!= grp_end
; sc
= &OMP_CLAUSE_CHAIN (*sc
))
10726 if ((ptr
|| attach_detach
) && sc
== grp_start_p
)
10728 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc
)) != COMPONENT_REF
10729 && TREE_CODE (OMP_CLAUSE_DECL (*sc
)) != INDIRECT_REF
10730 && TREE_CODE (OMP_CLAUSE_DECL (*sc
)) != ARRAY_REF
)
10734 tree sc_decl
= OMP_CLAUSE_DECL (*sc
);
10735 poly_offset_int offset
;
10738 if (TREE_CODE (sc_decl
) == ARRAY_REF
)
10740 while (TREE_CODE (sc_decl
) == ARRAY_REF
)
10741 sc_decl
= TREE_OPERAND (sc_decl
, 0);
10742 if (TREE_CODE (sc_decl
) != COMPONENT_REF
10743 || TREE_CODE (TREE_TYPE (sc_decl
)) != ARRAY_TYPE
)
10746 else if (INDIRECT_REF_P (sc_decl
)
10747 && TREE_CODE (TREE_OPERAND (sc_decl
, 0)) == COMPONENT_REF
10748 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl
, 0)))
10749 == REFERENCE_TYPE
))
10750 sc_decl
= TREE_OPERAND (sc_decl
, 0);
10752 tree base2
= extract_base_bit_offset (sc_decl
, &bitpos
, &offset
);
10753 if (!base2
|| !operand_equal_p (base2
, base
, 0))
10757 if (maybe_lt (coffset
, offset
)
10758 || (known_eq (coffset
, offset
)
10759 && maybe_lt (cbitpos
, bitpos
)))
10761 if (ptr
|| attach_detach
)
10769 OMP_CLAUSE_SIZE (*osc
)
10770 = size_binop (PLUS_EXPR
, OMP_CLAUSE_SIZE (*osc
), size_one_node
);
10771 if (ptr
|| attach_detach
)
10773 tree cl
= NULL_TREE
, extra_node
;
10774 tree alloc_node
= build_omp_struct_comp_nodes (code
, *grp_start_p
,
10775 grp_end
, &extra_node
);
10776 tree
*tail_chain
= NULL
;
10780 grp_end : the last (or only) node in this group.
10781 grp_start_p : pointer to the first node in a pointer mapping group
10782 up to and including GRP_END.
10783 sc : pointer to the chain for the end of the struct component
10785 scp : pointer to the chain for the sorted position at which we
10786 should insert in the middle of the struct component list
10787 (else NULL to insert at end).
10788 alloc_node : the "alloc" node for the structure (pointer-type)
10789 component. We insert at SCP (if present), else SC
10790 (the end of the struct component list).
10791 extra_node : a newly-synthesized node for an additional indirect
10792 pointer mapping or a Fortran pointer set, if needed.
10793 cl : first node to prepend before grp_start_p.
10794 tail_chain : pointer to chain of last prepended node.
10796 The general idea is we move the nodes for this struct mapping
10797 together: the alloc node goes into the sorted list directly after
10798 the struct mapping, and any extra nodes (together with the nodes
10799 mapping arrays pointed to by struct components) get moved after
10800 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
10801 the end of the struct component mapping list. It's important that
10802 the alloc_node comes first in that case because it's part of the
10803 sorted component mapping list (but subsequent nodes are not!). */
10806 omp_siblist_insert_node_after (alloc_node
, scp
);
10808 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
10809 already inserted it) and the extra_node (if it is present). The
10810 list can be empty if we added alloc_node above and there is no
10812 if (scp
&& extra_node
)
10815 tail_chain
= &OMP_CLAUSE_CHAIN (extra_node
);
10817 else if (extra_node
)
10819 OMP_CLAUSE_CHAIN (alloc_node
) = extra_node
;
10821 tail_chain
= &OMP_CLAUSE_CHAIN (extra_node
);
10826 tail_chain
= &OMP_CLAUSE_CHAIN (alloc_node
);
10830 = cl
? omp_siblist_move_concat_nodes_after (cl
, tail_chain
,
10831 grp_start_p
, grp_end
,
10833 : omp_siblist_move_nodes_after (grp_start_p
, grp_end
, sc
);
10835 else if (*sc
!= grp_end
)
10837 gcc_assert (*grp_start_p
== grp_end
);
10839 /* We are moving the current node back to a previous struct node:
10840 the node that used to point to the current node will now point to
10842 continue_at
= grp_start_p
;
10843 /* In the non-pointer case, the mapping clause itself is moved into
10844 the correct position in the struct component list, which in this
10845 case is just SC. */
10846 omp_siblist_move_node_after (*grp_start_p
, grp_start_p
, sc
);
10849 return continue_at
;
10852 /* Scan through GROUPS, and create sorted structure sibling lists without
10856 omp_build_struct_sibling_lists (enum tree_code code
,
10857 enum omp_region_type region_type
,
10858 vec
<omp_mapping_group
> *groups
,
10859 hash_map
<tree_operand_hash_no_se
,
10860 omp_mapping_group
*> **grpmap
,
10864 omp_mapping_group
*grp
;
10865 hash_map
<tree_operand_hash
, tree
> *struct_map_to_clause
= NULL
;
10866 bool success
= true;
10867 tree
*new_next
= NULL
;
10868 tree
*tail
= &OMP_CLAUSE_CHAIN ((*groups
)[groups
->length () - 1].grp_end
);
10869 auto_vec
<omp_mapping_group
> pre_hwm_groups
;
10871 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10873 tree c
= grp
->grp_end
;
10874 tree decl
= OMP_CLAUSE_DECL (c
);
10875 tree grp_end
= grp
->grp_end
;
10876 tree sentinel
= OMP_CLAUSE_CHAIN (grp_end
);
10879 grp
->grp_start
= new_next
;
10883 tree
*grp_start_p
= grp
->grp_start
;
10888 /* Skip groups we marked for deletion in
10889 oacc_resolve_clause_dependencies. */
10893 if (OMP_CLAUSE_CHAIN (*grp_start_p
)
10894 && OMP_CLAUSE_CHAIN (*grp_start_p
) != grp_end
)
10896 /* Don't process an array descriptor that isn't inside a derived type
10897 as a struct (the GOMP_MAP_POINTER following will have the form
10898 "var.data", but such mappings are handled specially). */
10899 tree grpmid
= OMP_CLAUSE_CHAIN (*grp_start_p
);
10900 if (OMP_CLAUSE_CODE (grpmid
) == OMP_CLAUSE_MAP
10901 && OMP_CLAUSE_MAP_KIND (grpmid
) == GOMP_MAP_TO_PSET
10902 && DECL_P (OMP_CLAUSE_DECL (grpmid
)))
10907 if (TREE_CODE (d
) == ARRAY_REF
)
10909 while (TREE_CODE (d
) == ARRAY_REF
)
10910 d
= TREE_OPERAND (d
, 0);
10911 if (TREE_CODE (d
) == COMPONENT_REF
10912 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
10916 && INDIRECT_REF_P (decl
)
10917 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
10918 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
10920 && (OMP_CLAUSE_MAP_KIND (c
)
10921 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
))
10922 decl
= TREE_OPERAND (decl
, 0);
10926 if (TREE_CODE (decl
) != COMPONENT_REF
)
10929 /* If we're mapping the whole struct in another node, skip adding this
10930 node to a sibling list. */
10931 omp_mapping_group
*wholestruct
;
10932 if (omp_mapped_by_containing_struct (*grpmap
, OMP_CLAUSE_DECL (c
),
10935 if (!(region_type
& ORT_ACC
)
10936 && *grp_start_p
== grp_end
)
10937 /* Remove the whole of this mapping -- redundant. */
10938 grp
->deleted
= true;
10943 if (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
10944 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
10945 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
10946 && code
!= OACC_UPDATE
10947 && code
!= OMP_TARGET_UPDATE
)
10949 if (error_operand_p (decl
))
10955 tree stype
= TREE_TYPE (decl
);
10956 if (TREE_CODE (stype
) == REFERENCE_TYPE
)
10957 stype
= TREE_TYPE (stype
);
10958 if (TYPE_SIZE_UNIT (stype
) == NULL
10959 || TREE_CODE (TYPE_SIZE_UNIT (stype
)) != INTEGER_CST
)
10961 error_at (OMP_CLAUSE_LOCATION (c
),
10962 "mapping field %qE of variable length "
10963 "structure", OMP_CLAUSE_DECL (c
));
10968 tree inner
= NULL_TREE
;
10971 = omp_accumulate_sibling_list (region_type
, code
,
10972 struct_map_to_clause
, grp_start_p
,
10977 if (new_next
&& *new_next
== NULL_TREE
)
10982 OMP_CLAUSE_CHAIN (inner
) = NULL_TREE
;
10983 omp_mapping_group newgrp
;
10984 newgrp
.grp_start
= new_next
? new_next
: tail
;
10985 newgrp
.grp_end
= inner
;
10986 newgrp
.mark
= UNVISITED
;
10987 newgrp
.sibling
= NULL
;
10988 newgrp
.deleted
= false;
10989 newgrp
.next
= NULL
;
10990 groups
->safe_push (newgrp
);
10992 /* !!! Growing GROUPS might invalidate the pointers in the group
10993 map. Rebuild it here. This is a bit inefficient, but
10994 shouldn't happen very often. */
10997 = omp_reindex_mapping_groups (list_p
, groups
, &pre_hwm_groups
,
11000 tail
= &OMP_CLAUSE_CHAIN (inner
);
11005 /* Delete groups marked for deletion above. At this point the order of the
11006 groups may no longer correspond to the order of the underlying list,
11007 which complicates this a little. First clear out OMP_CLAUSE_DECL for
11008 deleted nodes... */
11010 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
11012 for (tree d
= *grp
->grp_start
;
11013 d
!= OMP_CLAUSE_CHAIN (grp
->grp_end
);
11014 d
= OMP_CLAUSE_CHAIN (d
))
11015 OMP_CLAUSE_DECL (d
) = NULL_TREE
;
11017 /* ...then sweep through the list removing the now-empty nodes. */
11022 if (OMP_CLAUSE_CODE (*tail
) == OMP_CLAUSE_MAP
11023 && OMP_CLAUSE_DECL (*tail
) == NULL_TREE
)
11024 *tail
= OMP_CLAUSE_CHAIN (*tail
);
11026 tail
= &OMP_CLAUSE_CHAIN (*tail
);
11030 if (struct_map_to_clause
)
11031 delete struct_map_to_clause
;
11036 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
11037 and previous omp contexts. */
11040 gimplify_scan_omp_clauses (tree
*list_p
, gimple_seq
*pre_p
,
11041 enum omp_region_type region_type
,
11042 enum tree_code code
)
11044 struct gimplify_omp_ctx
*ctx
, *outer_ctx
;
11046 tree
*orig_list_p
= list_p
;
11047 int handled_depend_iterators
= -1;
11050 ctx
= new_omp_context (region_type
);
11052 outer_ctx
= ctx
->outer_context
;
11053 if (code
== OMP_TARGET
)
11055 if (!lang_GNU_Fortran ())
11056 ctx
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
11057 ctx
->defaultmap
[GDMK_SCALAR
] = GOVD_FIRSTPRIVATE
;
11058 ctx
->defaultmap
[GDMK_SCALAR_TARGET
] = (lang_GNU_Fortran ()
11059 ? GOVD_MAP
: GOVD_FIRSTPRIVATE
);
11061 if (!lang_GNU_Fortran ())
11065 case OMP_TARGET_DATA
:
11066 case OMP_TARGET_ENTER_DATA
:
11067 case OMP_TARGET_EXIT_DATA
:
11069 case OACC_HOST_DATA
:
11070 case OACC_PARALLEL
:
11072 ctx
->target_firstprivatize_array_bases
= true;
11077 if (code
== OMP_TARGET
11078 || code
== OMP_TARGET_DATA
11079 || code
== OMP_TARGET_ENTER_DATA
11080 || code
== OMP_TARGET_EXIT_DATA
)
11082 vec
<omp_mapping_group
> *groups
;
11083 groups
= omp_gather_mapping_groups (list_p
);
11086 hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *grpmap
;
11087 grpmap
= omp_index_mapping_groups (groups
);
11089 omp_build_struct_sibling_lists (code
, region_type
, groups
, &grpmap
,
11092 omp_mapping_group
*outlist
= NULL
;
11094 /* Topological sorting may fail if we have duplicate nodes, which
11095 we should have detected and shown an error for already. Skip
11096 sorting in that case. */
11103 /* Rebuild now we have struct sibling lists. */
11104 groups
= omp_gather_mapping_groups (list_p
);
11105 grpmap
= omp_index_mapping_groups (groups
);
11107 outlist
= omp_tsort_mapping_groups (groups
, grpmap
);
11108 outlist
= omp_segregate_mapping_groups (outlist
);
11109 list_p
= omp_reorder_mapping_groups (groups
, outlist
, list_p
);
11116 /* OpenMP map clauses with 'present' need to go in front of those
11118 tree present_map_head
= NULL
;
11119 tree
*present_map_tail_p
= &present_map_head
;
11120 tree
*first_map_clause_p
= NULL
;
11122 for (tree
*c_p
= list_p
; *c_p
; )
11125 tree
*next_c_p
= &OMP_CLAUSE_CHAIN (c
);
11127 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
11129 if (!first_map_clause_p
)
11130 first_map_clause_p
= c_p
;
11131 switch (OMP_CLAUSE_MAP_KIND (c
))
11133 case GOMP_MAP_PRESENT_ALLOC
:
11134 case GOMP_MAP_PRESENT_FROM
:
11135 case GOMP_MAP_PRESENT_TO
:
11136 case GOMP_MAP_PRESENT_TOFROM
:
11138 *c_p
= OMP_CLAUSE_CHAIN (c
);
11140 OMP_CLAUSE_CHAIN (c
) = NULL
;
11141 *present_map_tail_p
= c
;
11142 present_map_tail_p
= &OMP_CLAUSE_CHAIN (c
);
11153 if (first_map_clause_p
&& present_map_head
)
11155 tree next
= *first_map_clause_p
;
11156 *first_map_clause_p
= present_map_head
;
11157 *present_map_tail_p
= next
;
11160 else if (region_type
& ORT_ACC
)
11162 vec
<omp_mapping_group
> *groups
;
11163 groups
= omp_gather_mapping_groups (list_p
);
11166 hash_map
<tree_operand_hash_no_se
, omp_mapping_group
*> *grpmap
;
11167 grpmap
= omp_index_mapping_groups (groups
);
11169 oacc_resolve_clause_dependencies (groups
, grpmap
);
11170 omp_build_struct_sibling_lists (code
, region_type
, groups
, &grpmap
,
11178 while ((c
= *list_p
) != NULL
)
11180 bool remove
= false;
11181 bool notice_outer
= true;
11182 const char *check_non_private
= NULL
;
11183 unsigned int flags
;
11186 switch (OMP_CLAUSE_CODE (c
))
11188 case OMP_CLAUSE_PRIVATE
:
11189 flags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
11190 if (lang_hooks
.decls
.omp_private_outer_ref (OMP_CLAUSE_DECL (c
)))
11192 flags
|= GOVD_PRIVATE_OUTER_REF
;
11193 OMP_CLAUSE_PRIVATE_OUTER_REF (c
) = 1;
11196 notice_outer
= false;
11198 case OMP_CLAUSE_SHARED
:
11199 flags
= GOVD_SHARED
| GOVD_EXPLICIT
;
11201 case OMP_CLAUSE_FIRSTPRIVATE
:
11202 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
11203 check_non_private
= "firstprivate";
11204 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11206 gcc_assert (code
== OMP_TARGET
);
11207 flags
|= GOVD_FIRSTPRIVATE_IMPLICIT
;
11210 case OMP_CLAUSE_LASTPRIVATE
:
11211 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
11214 case OMP_DISTRIBUTE
:
11215 error_at (OMP_CLAUSE_LOCATION (c
),
11216 "conditional %<lastprivate%> clause on "
11217 "%qs construct", "distribute");
11218 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
11221 error_at (OMP_CLAUSE_LOCATION (c
),
11222 "conditional %<lastprivate%> clause on "
11223 "%qs construct", "taskloop");
11224 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
11229 flags
= GOVD_LASTPRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
;
11230 if (code
!= OMP_LOOP
)
11231 check_non_private
= "lastprivate";
11232 decl
= OMP_CLAUSE_DECL (c
);
11233 if (error_operand_p (decl
))
11235 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
11236 && !lang_hooks
.decls
.omp_scalar_p (decl
, true))
11238 error_at (OMP_CLAUSE_LOCATION (c
),
11239 "non-scalar variable %qD in conditional "
11240 "%<lastprivate%> clause", decl
);
11241 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
11243 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
11244 flags
|= GOVD_LASTPRIVATE_CONDITIONAL
;
11245 omp_lastprivate_for_combined_outer_constructs (outer_ctx
, decl
,
11248 case OMP_CLAUSE_REDUCTION
:
11249 if (OMP_CLAUSE_REDUCTION_TASK (c
))
11251 if (region_type
== ORT_WORKSHARE
|| code
== OMP_SCOPE
)
11254 nowait
= omp_find_clause (*list_p
,
11255 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
11257 && (outer_ctx
== NULL
11258 || outer_ctx
->region_type
!= ORT_COMBINED_PARALLEL
))
11260 error_at (OMP_CLAUSE_LOCATION (c
),
11261 "%<task%> reduction modifier on a construct "
11262 "with a %<nowait%> clause");
11263 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
11266 else if ((region_type
& ORT_PARALLEL
) != ORT_PARALLEL
)
11268 error_at (OMP_CLAUSE_LOCATION (c
),
11269 "invalid %<task%> reduction modifier on construct "
11270 "other than %<parallel%>, %qs, %<sections%> or "
11271 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
11272 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
11275 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
11279 error_at (OMP_CLAUSE_LOCATION (c
),
11280 "%<inscan%> %<reduction%> clause on "
11281 "%qs construct", "sections");
11282 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
11285 error_at (OMP_CLAUSE_LOCATION (c
),
11286 "%<inscan%> %<reduction%> clause on "
11287 "%qs construct", "parallel");
11288 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
11291 error_at (OMP_CLAUSE_LOCATION (c
),
11292 "%<inscan%> %<reduction%> clause on "
11293 "%qs construct", "teams");
11294 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
11297 error_at (OMP_CLAUSE_LOCATION (c
),
11298 "%<inscan%> %<reduction%> clause on "
11299 "%qs construct", "taskloop");
11300 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
11303 error_at (OMP_CLAUSE_LOCATION (c
),
11304 "%<inscan%> %<reduction%> clause on "
11305 "%qs construct", "scope");
11306 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
11312 case OMP_CLAUSE_IN_REDUCTION
:
11313 case OMP_CLAUSE_TASK_REDUCTION
:
11314 flags
= GOVD_REDUCTION
| GOVD_SEEN
| GOVD_EXPLICIT
;
11315 /* OpenACC permits reductions on private variables. */
11316 if (!(region_type
& ORT_ACC
)
11317 /* taskgroup is actually not a worksharing region. */
11318 && code
!= OMP_TASKGROUP
)
11319 check_non_private
= omp_clause_code_name
[OMP_CLAUSE_CODE (c
)];
11320 decl
= OMP_CLAUSE_DECL (c
);
11321 if (TREE_CODE (decl
) == MEM_REF
)
11323 tree type
= TREE_TYPE (decl
);
11324 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
11325 gimplify_ctxp
->into_ssa
= false;
11326 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type
)), pre_p
,
11327 NULL
, is_gimple_val
, fb_rvalue
, false)
11330 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
11334 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
11335 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
11338 omp_firstprivatize_variable (ctx
, v
);
11339 omp_notice_variable (ctx
, v
, true);
11341 decl
= TREE_OPERAND (decl
, 0);
11342 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
11344 gimplify_ctxp
->into_ssa
= false;
11345 if (gimplify_expr (&TREE_OPERAND (decl
, 1), pre_p
,
11346 NULL
, is_gimple_val
, fb_rvalue
, false)
11349 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
11353 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
11354 v
= TREE_OPERAND (decl
, 1);
11357 omp_firstprivatize_variable (ctx
, v
);
11358 omp_notice_variable (ctx
, v
, true);
11360 decl
= TREE_OPERAND (decl
, 0);
11362 if (TREE_CODE (decl
) == ADDR_EXPR
11363 || TREE_CODE (decl
) == INDIRECT_REF
)
11364 decl
= TREE_OPERAND (decl
, 0);
11367 case OMP_CLAUSE_LINEAR
:
11368 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
), pre_p
, NULL
,
11369 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
11376 if (code
== OMP_SIMD
11377 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
11379 struct gimplify_omp_ctx
*octx
= outer_ctx
;
11381 && octx
->region_type
== ORT_WORKSHARE
11382 && octx
->combined_loop
11383 && !octx
->distribute
)
11385 if (octx
->outer_context
11386 && (octx
->outer_context
->region_type
11387 == ORT_COMBINED_PARALLEL
))
11388 octx
= octx
->outer_context
->outer_context
;
11390 octx
= octx
->outer_context
;
11393 && octx
->region_type
== ORT_WORKSHARE
11394 && octx
->combined_loop
11395 && octx
->distribute
)
11397 error_at (OMP_CLAUSE_LOCATION (c
),
11398 "%<linear%> clause for variable other than "
11399 "loop iterator specified on construct "
11400 "combined with %<distribute%>");
11405 /* For combined #pragma omp parallel for simd, need to put
11406 lastprivate and perhaps firstprivate too on the
11407 parallel. Similarly for #pragma omp for simd. */
11408 struct gimplify_omp_ctx
*octx
= outer_ctx
;
11409 bool taskloop_seen
= false;
11413 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
11414 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11416 decl
= OMP_CLAUSE_DECL (c
);
11417 if (error_operand_p (decl
))
11423 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
11424 flags
|= GOVD_FIRSTPRIVATE
;
11425 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11426 flags
|= GOVD_LASTPRIVATE
;
11428 && octx
->region_type
== ORT_WORKSHARE
11429 && octx
->combined_loop
)
11431 if (octx
->outer_context
11432 && (octx
->outer_context
->region_type
11433 == ORT_COMBINED_PARALLEL
))
11434 octx
= octx
->outer_context
;
11435 else if (omp_check_private (octx
, decl
, false))
11439 && (octx
->region_type
& ORT_TASK
) != 0
11440 && octx
->combined_loop
)
11441 taskloop_seen
= true;
11443 && octx
->region_type
== ORT_COMBINED_PARALLEL
11444 && ((ctx
->region_type
== ORT_WORKSHARE
11445 && octx
== outer_ctx
)
11447 flags
= GOVD_SEEN
| GOVD_SHARED
;
11449 && ((octx
->region_type
& ORT_COMBINED_TEAMS
)
11450 == ORT_COMBINED_TEAMS
))
11451 flags
= GOVD_SEEN
| GOVD_SHARED
;
11453 && octx
->region_type
== ORT_COMBINED_TARGET
)
11455 if (flags
& GOVD_LASTPRIVATE
)
11456 flags
= GOVD_SEEN
| GOVD_MAP
;
11461 = splay_tree_lookup (octx
->variables
,
11462 (splay_tree_key
) decl
);
11463 if (on
&& (on
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
11468 omp_add_variable (octx
, decl
, flags
);
11469 if (octx
->outer_context
== NULL
)
11471 octx
= octx
->outer_context
;
11476 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
11477 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
11478 omp_notice_variable (octx
, decl
, true);
11480 flags
= GOVD_LINEAR
| GOVD_EXPLICIT
;
11481 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
11482 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11484 notice_outer
= false;
11485 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
11489 case OMP_CLAUSE_MAP
:
11490 decl
= OMP_CLAUSE_DECL (c
);
11491 if (error_operand_p (decl
))
11498 if (TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
)
11501 case OMP_TARGET_DATA
:
11502 case OMP_TARGET_ENTER_DATA
:
11503 case OMP_TARGET_EXIT_DATA
:
11504 case OACC_ENTER_DATA
:
11505 case OACC_EXIT_DATA
:
11506 case OACC_HOST_DATA
:
11507 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11508 || (OMP_CLAUSE_MAP_KIND (c
)
11509 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
11510 /* For target {,enter ,exit }data only the array slice is
11511 mapped, but not the pointer to it. */
11519 if (DECL_P (decl
) && outer_ctx
&& (region_type
& ORT_ACC
))
11521 struct gimplify_omp_ctx
*octx
;
11522 for (octx
= outer_ctx
; octx
; octx
= octx
->outer_context
)
11524 if (octx
->region_type
!= ORT_ACC_HOST_DATA
)
11527 = splay_tree_lookup (octx
->variables
,
11528 (splay_tree_key
) decl
);
11530 error_at (OMP_CLAUSE_LOCATION (c
), "variable %qE "
11531 "declared in enclosing %<host_data%> region",
11535 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
11536 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
11537 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
11538 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
11539 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
11544 else if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11545 || (OMP_CLAUSE_MAP_KIND (c
)
11546 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
11547 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
11548 && TREE_CODE (OMP_CLAUSE_SIZE (c
)) != INTEGER_CST
)
11550 OMP_CLAUSE_SIZE (c
)
11551 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c
), pre_p
, NULL
,
11553 if ((region_type
& ORT_TARGET
) != 0)
11554 omp_add_variable (ctx
, OMP_CLAUSE_SIZE (c
),
11555 GOVD_FIRSTPRIVATE
| GOVD_SEEN
);
11558 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
11560 tree base
= omp_strip_components_and_deref (decl
);
11565 = splay_tree_lookup (ctx
->variables
,
11566 (splay_tree_key
) decl
);
11569 && (n
->value
& (GOVD_MAP
| GOVD_FIRSTPRIVATE
)) != 0)
11574 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
11580 if (TREE_CODE (decl
) == TARGET_EXPR
)
11582 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
11583 is_gimple_lvalue
, fb_lvalue
)
11587 else if (!DECL_P (decl
))
11589 tree d
= decl
, *pd
;
11590 if (TREE_CODE (d
) == ARRAY_REF
)
11592 while (TREE_CODE (d
) == ARRAY_REF
)
11593 d
= TREE_OPERAND (d
, 0);
11594 if (TREE_CODE (d
) == COMPONENT_REF
11595 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
11598 pd
= &OMP_CLAUSE_DECL (c
);
11600 && TREE_CODE (decl
) == INDIRECT_REF
11601 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
11602 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
11604 && (OMP_CLAUSE_MAP_KIND (c
)
11605 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
))
11607 pd
= &TREE_OPERAND (decl
, 0);
11608 decl
= TREE_OPERAND (decl
, 0);
11610 /* An "attach/detach" operation on an update directive should
11611 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
11612 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
11613 depends on the previous mapping. */
11614 if (code
== OACC_UPDATE
11615 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
11616 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_ALWAYS_POINTER
);
11618 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
11620 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
11625 gomp_map_kind k
= ((code
== OACC_EXIT_DATA
11626 || code
== OMP_TARGET_EXIT_DATA
)
11627 ? GOMP_MAP_DETACH
: GOMP_MAP_ATTACH
);
11628 OMP_CLAUSE_SET_MAP_KIND (c
, k
);
11634 while (TREE_CODE (cref
) == ARRAY_REF
)
11635 cref
= TREE_OPERAND (cref
, 0);
11637 if (TREE_CODE (cref
) == INDIRECT_REF
)
11638 cref
= TREE_OPERAND (cref
, 0);
11640 if (TREE_CODE (cref
) == COMPONENT_REF
)
11643 while (base
&& !DECL_P (base
))
11645 tree innerbase
= omp_get_base_pointer (base
);
11652 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
))
11653 && POINTER_TYPE_P (TREE_TYPE (base
)))
11656 = splay_tree_lookup (ctx
->variables
,
11657 (splay_tree_key
) base
);
11658 n
->value
|= GOVD_SEEN
;
11662 if (code
== OMP_TARGET
&& OMP_CLAUSE_MAP_IN_REDUCTION (c
))
11664 /* Don't gimplify *pd fully at this point, as the base
11665 will need to be adjusted during omp lowering. */
11666 auto_vec
<tree
, 10> expr_stack
;
11668 while (handled_component_p (*p
)
11669 || TREE_CODE (*p
) == INDIRECT_REF
11670 || TREE_CODE (*p
) == ADDR_EXPR
11671 || TREE_CODE (*p
) == MEM_REF
11672 || TREE_CODE (*p
) == NON_LVALUE_EXPR
)
11674 expr_stack
.safe_push (*p
);
11675 p
= &TREE_OPERAND (*p
, 0);
11677 for (int i
= expr_stack
.length () - 1; i
>= 0; i
--)
11679 tree t
= expr_stack
[i
];
11680 if (TREE_CODE (t
) == ARRAY_REF
11681 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
11683 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
11685 tree low
= unshare_expr (array_ref_low_bound (t
));
11686 if (!is_gimple_min_invariant (low
))
11688 TREE_OPERAND (t
, 2) = low
;
11689 if (gimplify_expr (&TREE_OPERAND (t
, 2),
11692 fb_rvalue
) == GS_ERROR
)
11696 else if (gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
11697 NULL
, is_gimple_reg
,
11698 fb_rvalue
) == GS_ERROR
)
11700 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
11702 tree elmt_size
= array_ref_element_size (t
);
11703 if (!is_gimple_min_invariant (elmt_size
))
11705 elmt_size
= unshare_expr (elmt_size
);
11707 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
,
11710 = size_int (TYPE_ALIGN_UNIT (elmt_type
));
11712 = size_binop (EXACT_DIV_EXPR
, elmt_size
,
11714 TREE_OPERAND (t
, 3) = elmt_size
;
11715 if (gimplify_expr (&TREE_OPERAND (t
, 3),
11718 fb_rvalue
) == GS_ERROR
)
11722 else if (gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
11723 NULL
, is_gimple_reg
,
11724 fb_rvalue
) == GS_ERROR
)
11727 else if (TREE_CODE (t
) == COMPONENT_REF
)
11729 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
11731 tree offset
= component_ref_field_offset (t
);
11732 if (!is_gimple_min_invariant (offset
))
11734 offset
= unshare_expr (offset
);
11735 tree field
= TREE_OPERAND (t
, 1);
11737 = size_int (DECL_OFFSET_ALIGN (field
)
11739 offset
= size_binop (EXACT_DIV_EXPR
, offset
,
11741 TREE_OPERAND (t
, 2) = offset
;
11742 if (gimplify_expr (&TREE_OPERAND (t
, 2),
11745 fb_rvalue
) == GS_ERROR
)
11749 else if (gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
11750 NULL
, is_gimple_reg
,
11751 fb_rvalue
) == GS_ERROR
)
11755 for (; expr_stack
.length () > 0; )
11757 tree t
= expr_stack
.pop ();
11759 if (TREE_CODE (t
) == ARRAY_REF
11760 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
11762 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1))
11763 && gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
,
11764 NULL
, is_gimple_val
,
11765 fb_rvalue
) == GS_ERROR
)
11770 else if (gimplify_expr (pd
, pre_p
, NULL
, is_gimple_lvalue
,
11771 fb_lvalue
) == GS_ERROR
)
11778 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
11779 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TO
11780 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TOFROM
)
11781 flags
|= GOVD_MAP_ALWAYS_TO
;
11783 if ((code
== OMP_TARGET
11784 || code
== OMP_TARGET_DATA
11785 || code
== OMP_TARGET_ENTER_DATA
11786 || code
== OMP_TARGET_EXIT_DATA
)
11787 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
11789 for (struct gimplify_omp_ctx
*octx
= outer_ctx
; octx
;
11790 octx
= octx
->outer_context
)
11793 = splay_tree_lookup (octx
->variables
,
11794 (splay_tree_key
) OMP_CLAUSE_DECL (c
));
11795 /* If this is contained in an outer OpenMP region as a
11796 firstprivate value, remove the attach/detach. */
11797 if (n
&& (n
->value
& GOVD_FIRSTPRIVATE
))
11799 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
11804 enum gomp_map_kind map_kind
= (code
== OMP_TARGET_EXIT_DATA
11806 : GOMP_MAP_ATTACH
);
11807 OMP_CLAUSE_SET_MAP_KIND (c
, map_kind
);
11812 case OMP_CLAUSE_AFFINITY
:
11813 gimplify_omp_affinity (list_p
, pre_p
);
11816 case OMP_CLAUSE_DOACROSS
:
11817 if (OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SINK
)
11819 tree deps
= OMP_CLAUSE_DECL (c
);
11820 while (deps
&& TREE_CODE (deps
) == TREE_LIST
)
11822 if (TREE_CODE (TREE_PURPOSE (deps
)) == TRUNC_DIV_EXPR
11823 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps
), 1)))
11824 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps
), 1),
11825 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
11826 deps
= TREE_CHAIN (deps
);
11830 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c
)
11831 == OMP_CLAUSE_DOACROSS_SOURCE
);
11833 case OMP_CLAUSE_DEPEND
:
11834 if (handled_depend_iterators
== -1)
11835 handled_depend_iterators
= gimplify_omp_depend (list_p
, pre_p
);
11836 if (handled_depend_iterators
)
11838 if (handled_depend_iterators
== 2)
11842 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
11844 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
11845 NULL
, is_gimple_val
, fb_rvalue
);
11846 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
11848 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
11853 if (OMP_CLAUSE_DECL (c
) != null_pointer_node
)
11855 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
11856 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
11857 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
11863 if (code
== OMP_TASK
)
11864 ctx
->has_depend
= true;
11867 case OMP_CLAUSE_TO
:
11868 case OMP_CLAUSE_FROM
:
11869 case OMP_CLAUSE__CACHE_
:
11870 decl
= OMP_CLAUSE_DECL (c
);
11871 if (error_operand_p (decl
))
11876 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
11877 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
11878 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
11879 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
11880 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
11885 if (!DECL_P (decl
))
11887 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
,
11888 NULL
, is_gimple_lvalue
, fb_lvalue
)
11898 case OMP_CLAUSE_USE_DEVICE_PTR
:
11899 case OMP_CLAUSE_USE_DEVICE_ADDR
:
11900 flags
= GOVD_EXPLICIT
;
11903 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
11904 decl
= OMP_CLAUSE_DECL (c
);
11905 while (TREE_CODE (decl
) == INDIRECT_REF
11906 || TREE_CODE (decl
) == ARRAY_REF
)
11907 decl
= TREE_OPERAND (decl
, 0);
11908 flags
= GOVD_EXPLICIT
;
11911 case OMP_CLAUSE_IS_DEVICE_PTR
:
11912 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
11916 decl
= OMP_CLAUSE_DECL (c
);
11918 if (error_operand_p (decl
))
11923 if (DECL_NAME (decl
) == NULL_TREE
&& (flags
& GOVD_SHARED
) == 0)
11925 tree t
= omp_member_access_dummy_var (decl
);
11928 tree v
= DECL_VALUE_EXPR (decl
);
11929 DECL_NAME (decl
) = DECL_NAME (TREE_OPERAND (v
, 1));
11931 omp_notice_variable (outer_ctx
, t
, true);
11934 if (code
== OACC_DATA
11935 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11936 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
11937 flags
|= GOVD_MAP_0LEN_ARRAY
;
11938 omp_add_variable (ctx
, decl
, flags
);
11939 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11940 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
11941 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
11942 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
11944 struct gimplify_omp_ctx
*pctx
11945 = code
== OMP_TARGET
? outer_ctx
: ctx
;
11947 omp_add_variable (pctx
, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
11948 GOVD_LOCAL
| GOVD_SEEN
);
11950 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
11951 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c
),
11953 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
11954 NULL
) == NULL_TREE
)
11955 omp_add_variable (pctx
,
11956 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
11957 GOVD_LOCAL
| GOVD_SEEN
);
11958 gimplify_omp_ctxp
= pctx
;
11959 push_gimplify_context ();
11961 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
11962 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
11964 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c
),
11965 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
));
11966 pop_gimplify_context
11967 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
)));
11968 push_gimplify_context ();
11969 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c
),
11970 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
11971 pop_gimplify_context
11972 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
)));
11973 OMP_CLAUSE_REDUCTION_INIT (c
) = NULL_TREE
;
11974 OMP_CLAUSE_REDUCTION_MERGE (c
) = NULL_TREE
;
11976 gimplify_omp_ctxp
= outer_ctx
;
11978 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
11979 && OMP_CLAUSE_LASTPRIVATE_STMT (c
))
11981 gimplify_omp_ctxp
= ctx
;
11982 push_gimplify_context ();
11983 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c
)) != BIND_EXPR
)
11985 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
11987 TREE_SIDE_EFFECTS (bind
) = 1;
11988 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LASTPRIVATE_STMT (c
);
11989 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = bind
;
11991 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c
),
11992 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
11993 pop_gimplify_context
11994 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
)));
11995 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = NULL_TREE
;
11997 gimplify_omp_ctxp
= outer_ctx
;
11999 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
12000 && OMP_CLAUSE_LINEAR_STMT (c
))
12002 gimplify_omp_ctxp
= ctx
;
12003 push_gimplify_context ();
12004 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c
)) != BIND_EXPR
)
12006 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
12008 TREE_SIDE_EFFECTS (bind
) = 1;
12009 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LINEAR_STMT (c
);
12010 OMP_CLAUSE_LINEAR_STMT (c
) = bind
;
12012 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c
),
12013 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
12014 pop_gimplify_context
12015 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
)));
12016 OMP_CLAUSE_LINEAR_STMT (c
) = NULL_TREE
;
12018 gimplify_omp_ctxp
= outer_ctx
;
12024 case OMP_CLAUSE_COPYIN
:
12025 case OMP_CLAUSE_COPYPRIVATE
:
12026 decl
= OMP_CLAUSE_DECL (c
);
12027 if (error_operand_p (decl
))
12032 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_COPYPRIVATE
12034 && !omp_check_private (ctx
, decl
, true))
12037 if (is_global_var (decl
))
12039 if (DECL_THREAD_LOCAL_P (decl
))
12041 else if (DECL_HAS_VALUE_EXPR_P (decl
))
12043 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
12047 && DECL_THREAD_LOCAL_P (value
))
12052 error_at (OMP_CLAUSE_LOCATION (c
),
12053 "copyprivate variable %qE is not threadprivate"
12054 " or private in outer context", DECL_NAME (decl
));
12057 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
12058 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
12059 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
12061 && ((region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
12062 || (region_type
== ORT_WORKSHARE
12063 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
12064 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
12065 || code
== OMP_LOOP
)))
12066 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
12067 || (code
== OMP_LOOP
12068 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
12069 && ((outer_ctx
->region_type
& ORT_COMBINED_TEAMS
)
12070 == ORT_COMBINED_TEAMS
))))
12073 = splay_tree_lookup (outer_ctx
->variables
,
12074 (splay_tree_key
)decl
);
12075 if (on
== NULL
|| (on
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
12077 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
12078 && TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
12079 && (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
12080 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
12081 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
12082 == POINTER_TYPE
))))
12083 omp_firstprivatize_variable (outer_ctx
, decl
);
12086 omp_add_variable (outer_ctx
, decl
,
12087 GOVD_SEEN
| GOVD_SHARED
);
12088 if (outer_ctx
->outer_context
)
12089 omp_notice_variable (outer_ctx
->outer_context
, decl
,
12095 omp_notice_variable (outer_ctx
, decl
, true);
12096 if (check_non_private
12097 && (region_type
== ORT_WORKSHARE
|| code
== OMP_SCOPE
)
12098 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
12099 || decl
== OMP_CLAUSE_DECL (c
)
12100 || (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
12101 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
12103 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
12104 == POINTER_PLUS_EXPR
12105 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
12106 (OMP_CLAUSE_DECL (c
), 0), 0))
12108 && omp_check_private (ctx
, decl
, false))
12110 error ("%s variable %qE is private in outer context",
12111 check_non_private
, DECL_NAME (decl
));
12116 case OMP_CLAUSE_DETACH
:
12117 flags
= GOVD_FIRSTPRIVATE
| GOVD_SEEN
;
12120 case OMP_CLAUSE_IF
:
12121 if (OMP_CLAUSE_IF_MODIFIER (c
) != ERROR_MARK
12122 && OMP_CLAUSE_IF_MODIFIER (c
) != code
)
12125 for (int i
= 0; i
< 2; i
++)
12126 switch (i
? OMP_CLAUSE_IF_MODIFIER (c
) : code
)
12128 case VOID_CST
: p
[i
] = "cancel"; break;
12129 case OMP_PARALLEL
: p
[i
] = "parallel"; break;
12130 case OMP_SIMD
: p
[i
] = "simd"; break;
12131 case OMP_TASK
: p
[i
] = "task"; break;
12132 case OMP_TASKLOOP
: p
[i
] = "taskloop"; break;
12133 case OMP_TARGET_DATA
: p
[i
] = "target data"; break;
12134 case OMP_TARGET
: p
[i
] = "target"; break;
12135 case OMP_TARGET_UPDATE
: p
[i
] = "target update"; break;
12136 case OMP_TARGET_ENTER_DATA
:
12137 p
[i
] = "target enter data"; break;
12138 case OMP_TARGET_EXIT_DATA
: p
[i
] = "target exit data"; break;
12139 default: gcc_unreachable ();
12141 error_at (OMP_CLAUSE_LOCATION (c
),
12142 "expected %qs %<if%> clause modifier rather than %qs",
12146 /* Fall through. */
12148 case OMP_CLAUSE_SELF
:
12149 case OMP_CLAUSE_FINAL
:
12150 OMP_CLAUSE_OPERAND (c
, 0)
12151 = gimple_boolify (OMP_CLAUSE_OPERAND (c
, 0));
12152 /* Fall through. */
12154 case OMP_CLAUSE_NUM_TEAMS
:
12155 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
12156 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
12157 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)))
12159 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)))
12164 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
12165 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
),
12166 pre_p
, NULL
, true);
12168 /* Fall through. */
12170 case OMP_CLAUSE_SCHEDULE
:
12171 case OMP_CLAUSE_NUM_THREADS
:
12172 case OMP_CLAUSE_THREAD_LIMIT
:
12173 case OMP_CLAUSE_DIST_SCHEDULE
:
12174 case OMP_CLAUSE_DEVICE
:
12175 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEVICE
12176 && OMP_CLAUSE_DEVICE_ANCESTOR (c
))
12178 if (code
!= OMP_TARGET
)
12180 error_at (OMP_CLAUSE_LOCATION (c
),
12181 "%<device%> clause with %<ancestor%> is only "
12182 "allowed on %<target%> construct");
12187 tree clauses
= *orig_list_p
;
12188 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
12189 if (OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_DEVICE
12190 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_FIRSTPRIVATE
12191 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_PRIVATE
12192 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_DEFAULTMAP
12193 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_MAP
12196 error_at (OMP_CLAUSE_LOCATION (c
),
12197 "with %<ancestor%>, only the %<device%>, "
12198 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
12199 "and %<map%> clauses may appear on the "
12205 /* Fall through. */
12207 case OMP_CLAUSE_PRIORITY
:
12208 case OMP_CLAUSE_GRAINSIZE
:
12209 case OMP_CLAUSE_NUM_TASKS
:
12210 case OMP_CLAUSE_FILTER
:
12211 case OMP_CLAUSE_HINT
:
12212 case OMP_CLAUSE_ASYNC
:
12213 case OMP_CLAUSE_WAIT
:
12214 case OMP_CLAUSE_NUM_GANGS
:
12215 case OMP_CLAUSE_NUM_WORKERS
:
12216 case OMP_CLAUSE_VECTOR_LENGTH
:
12217 case OMP_CLAUSE_WORKER
:
12218 case OMP_CLAUSE_VECTOR
:
12219 if (OMP_CLAUSE_OPERAND (c
, 0)
12220 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c
, 0)))
12222 if (error_operand_p (OMP_CLAUSE_OPERAND (c
, 0)))
12227 /* All these clauses care about value, not a particular decl,
12228 so try to force it into a SSA_NAME or fresh temporary. */
12229 OMP_CLAUSE_OPERAND (c
, 0)
12230 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c
, 0),
12231 pre_p
, NULL
, true);
12235 case OMP_CLAUSE_GANG
:
12236 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
12237 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
12239 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 1), pre_p
, NULL
,
12240 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
12244 case OMP_CLAUSE_NOWAIT
:
12248 case OMP_CLAUSE_ORDERED
:
12249 case OMP_CLAUSE_UNTIED
:
12250 case OMP_CLAUSE_COLLAPSE
:
12251 case OMP_CLAUSE_TILE
:
12252 case OMP_CLAUSE_AUTO
:
12253 case OMP_CLAUSE_SEQ
:
12254 case OMP_CLAUSE_INDEPENDENT
:
12255 case OMP_CLAUSE_MERGEABLE
:
12256 case OMP_CLAUSE_PROC_BIND
:
12257 case OMP_CLAUSE_SAFELEN
:
12258 case OMP_CLAUSE_SIMDLEN
:
12259 case OMP_CLAUSE_NOGROUP
:
12260 case OMP_CLAUSE_THREADS
:
12261 case OMP_CLAUSE_SIMD
:
12262 case OMP_CLAUSE_BIND
:
12263 case OMP_CLAUSE_IF_PRESENT
:
12264 case OMP_CLAUSE_FINALIZE
:
12267 case OMP_CLAUSE_ORDER
:
12268 ctx
->order_concurrent
= true;
12271 case OMP_CLAUSE_DEFAULTMAP
:
12272 enum gimplify_defaultmap_kind gdmkmin
, gdmkmax
;
12273 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c
))
12275 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
:
12276 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALL
:
12277 gdmkmin
= GDMK_SCALAR
;
12278 gdmkmax
= GDMK_POINTER
;
12280 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
:
12281 gdmkmin
= GDMK_SCALAR
;
12282 gdmkmax
= GDMK_SCALAR_TARGET
;
12284 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE
:
12285 gdmkmin
= gdmkmax
= GDMK_AGGREGATE
;
12287 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE
:
12288 gdmkmin
= gdmkmax
= GDMK_ALLOCATABLE
;
12290 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER
:
12291 gdmkmin
= gdmkmax
= GDMK_POINTER
;
12294 gcc_unreachable ();
12296 for (int gdmk
= gdmkmin
; gdmk
<= gdmkmax
; gdmk
++)
12297 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c
))
12299 case OMP_CLAUSE_DEFAULTMAP_ALLOC
:
12300 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_ALLOC_ONLY
;
12302 case OMP_CLAUSE_DEFAULTMAP_TO
:
12303 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_TO_ONLY
;
12305 case OMP_CLAUSE_DEFAULTMAP_FROM
:
12306 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_FROM_ONLY
;
12308 case OMP_CLAUSE_DEFAULTMAP_TOFROM
:
12309 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
12311 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
:
12312 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
12314 case OMP_CLAUSE_DEFAULTMAP_NONE
:
12315 ctx
->defaultmap
[gdmk
] = 0;
12317 case OMP_CLAUSE_DEFAULTMAP_PRESENT
:
12318 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
12320 case OMP_CLAUSE_DEFAULTMAP_DEFAULT
:
12324 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
12326 case GDMK_SCALAR_TARGET
:
12327 ctx
->defaultmap
[gdmk
] = (lang_GNU_Fortran ()
12328 ? GOVD_MAP
: GOVD_FIRSTPRIVATE
);
12330 case GDMK_AGGREGATE
:
12331 case GDMK_ALLOCATABLE
:
12332 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
12335 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
12336 if (!lang_GNU_Fortran ())
12337 ctx
->defaultmap
[gdmk
] |= GOVD_MAP_0LEN_ARRAY
;
12340 gcc_unreachable ();
12344 gcc_unreachable ();
12348 case OMP_CLAUSE_ALIGNED
:
12349 decl
= OMP_CLAUSE_DECL (c
);
12350 if (error_operand_p (decl
))
12355 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c
), pre_p
, NULL
,
12356 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
12361 if (!is_global_var (decl
)
12362 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
12363 omp_add_variable (ctx
, decl
, GOVD_ALIGNED
);
12366 case OMP_CLAUSE_NONTEMPORAL
:
12367 decl
= OMP_CLAUSE_DECL (c
);
12368 if (error_operand_p (decl
))
12373 omp_add_variable (ctx
, decl
, GOVD_NONTEMPORAL
);
12376 case OMP_CLAUSE_ALLOCATE
:
12377 decl
= OMP_CLAUSE_DECL (c
);
12378 if (error_operand_p (decl
))
12383 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
), pre_p
, NULL
,
12384 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
12389 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
12390 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
12393 else if (code
== OMP_TASKLOOP
12394 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
12395 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
12396 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
12397 pre_p
, NULL
, false);
12400 case OMP_CLAUSE_DEFAULT
:
12401 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
12404 case OMP_CLAUSE_INCLUSIVE
:
12405 case OMP_CLAUSE_EXCLUSIVE
:
12406 decl
= OMP_CLAUSE_DECL (c
);
12408 splay_tree_node n
= splay_tree_lookup (outer_ctx
->variables
,
12409 (splay_tree_key
) decl
);
12410 if (n
== NULL
|| (n
->value
& GOVD_REDUCTION
) == 0)
12412 error_at (OMP_CLAUSE_LOCATION (c
),
12413 "%qD specified in %qs clause but not in %<inscan%> "
12414 "%<reduction%> clause on the containing construct",
12415 decl
, omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
12420 n
->value
|= GOVD_REDUCTION_INSCAN
;
12421 if (outer_ctx
->region_type
== ORT_SIMD
12422 && outer_ctx
->outer_context
12423 && outer_ctx
->outer_context
->region_type
== ORT_WORKSHARE
)
12425 n
= splay_tree_lookup (outer_ctx
->outer_context
->variables
,
12426 (splay_tree_key
) decl
);
12427 if (n
&& (n
->value
& GOVD_REDUCTION
) != 0)
12428 n
->value
|= GOVD_REDUCTION_INSCAN
;
12434 case OMP_CLAUSE_NOHOST
:
12436 gcc_unreachable ();
12439 if (code
== OACC_DATA
12440 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12441 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12442 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
12445 *list_p
= OMP_CLAUSE_CHAIN (c
);
12447 list_p
= &OMP_CLAUSE_CHAIN (c
);
12450 ctx
->clauses
= *orig_list_p
;
12451 gimplify_omp_ctxp
= ctx
;
12454 /* Return true if DECL is a candidate for shared to firstprivate
12455 optimization. We only consider non-addressable scalars, not
12456 too big, and not references. */
12459 omp_shared_to_firstprivate_optimizable_decl_p (tree decl
)
12461 if (TREE_ADDRESSABLE (decl
))
12463 tree type
= TREE_TYPE (decl
);
12464 if (!is_gimple_reg_type (type
)
12465 || TREE_CODE (type
) == REFERENCE_TYPE
12466 || TREE_ADDRESSABLE (type
))
12468 /* Don't optimize too large decls, as each thread/task will have
12470 HOST_WIDE_INT len
= int_size_in_bytes (type
);
12471 if (len
== -1 || len
> 4 * POINTER_SIZE
/ BITS_PER_UNIT
)
12473 if (omp_privatize_by_reference (decl
))
12478 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
12479 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
12480 GOVD_WRITTEN in outer contexts. */
12483 omp_mark_stores (struct gimplify_omp_ctx
*ctx
, tree decl
)
12485 for (; ctx
; ctx
= ctx
->outer_context
)
12487 splay_tree_node n
= splay_tree_lookup (ctx
->variables
,
12488 (splay_tree_key
) decl
);
12491 else if (n
->value
& GOVD_SHARED
)
12493 n
->value
|= GOVD_WRITTEN
;
12496 else if (n
->value
& GOVD_DATA_SHARE_CLASS
)
12501 /* Helper callback for walk_gimple_seq to discover possible stores
12502 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12503 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12507 omp_find_stores_op (tree
*tp
, int *walk_subtrees
, void *data
)
12509 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
12511 *walk_subtrees
= 0;
12518 if (handled_component_p (op
))
12519 op
= TREE_OPERAND (op
, 0);
12520 else if ((TREE_CODE (op
) == MEM_REF
|| TREE_CODE (op
) == TARGET_MEM_REF
)
12521 && TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
12522 op
= TREE_OPERAND (TREE_OPERAND (op
, 0), 0);
12527 if (!DECL_P (op
) || !omp_shared_to_firstprivate_optimizable_decl_p (op
))
12530 omp_mark_stores (gimplify_omp_ctxp
, op
);
12534 /* Helper callback for walk_gimple_seq to discover possible stores
12535 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12536 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12540 omp_find_stores_stmt (gimple_stmt_iterator
*gsi_p
,
12541 bool *handled_ops_p
,
12542 struct walk_stmt_info
*wi
)
12544 gimple
*stmt
= gsi_stmt (*gsi_p
);
12545 switch (gimple_code (stmt
))
12547 /* Don't recurse on OpenMP constructs for which
12548 gimplify_adjust_omp_clauses already handled the bodies,
12549 except handle gimple_omp_for_pre_body. */
12550 case GIMPLE_OMP_FOR
:
12551 *handled_ops_p
= true;
12552 if (gimple_omp_for_pre_body (stmt
))
12553 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
12554 omp_find_stores_stmt
, omp_find_stores_op
, wi
);
12556 case GIMPLE_OMP_PARALLEL
:
12557 case GIMPLE_OMP_TASK
:
12558 case GIMPLE_OMP_SECTIONS
:
12559 case GIMPLE_OMP_SINGLE
:
12560 case GIMPLE_OMP_SCOPE
:
12561 case GIMPLE_OMP_TARGET
:
12562 case GIMPLE_OMP_TEAMS
:
12563 case GIMPLE_OMP_CRITICAL
:
12564 *handled_ops_p
= true;
12572 struct gimplify_adjust_omp_clauses_data
12578 /* For all variables that were not actually used within the context,
12579 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
12582 gimplify_adjust_omp_clauses_1 (splay_tree_node n
, void *data
)
12584 tree
*list_p
= ((struct gimplify_adjust_omp_clauses_data
*) data
)->list_p
;
12586 = ((struct gimplify_adjust_omp_clauses_data
*) data
)->pre_p
;
12587 tree decl
= (tree
) n
->key
;
12588 unsigned flags
= n
->value
;
12589 enum omp_clause_code code
;
12591 bool private_debug
;
12593 if (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
12594 && (flags
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0)
12595 flags
= GOVD_SHARED
| GOVD_SEEN
| GOVD_WRITTEN
;
12596 if (flags
& (GOVD_EXPLICIT
| GOVD_LOCAL
))
12598 if ((flags
& GOVD_SEEN
) == 0)
12600 if (flags
& GOVD_DEBUG_PRIVATE
)
12602 gcc_assert ((flags
& GOVD_DATA_SHARE_CLASS
) == GOVD_SHARED
);
12603 private_debug
= true;
12605 else if (flags
& GOVD_MAP
)
12606 private_debug
= false;
12609 = lang_hooks
.decls
.omp_private_debug_clause (decl
,
12610 !!(flags
& GOVD_SHARED
));
12612 code
= OMP_CLAUSE_PRIVATE
;
12613 else if (flags
& GOVD_MAP
)
12615 code
= OMP_CLAUSE_MAP
;
12616 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
12617 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
12619 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl
);
12623 && DECL_IN_CONSTANT_POOL (decl
)
12624 && !lookup_attribute ("omp declare target",
12625 DECL_ATTRIBUTES (decl
)))
12627 tree id
= get_identifier ("omp declare target");
12628 DECL_ATTRIBUTES (decl
)
12629 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
12630 varpool_node
*node
= varpool_node::get (decl
);
12633 node
->offloadable
= 1;
12634 if (ENABLE_OFFLOADING
)
12635 g
->have_offload
= true;
12639 else if (flags
& GOVD_SHARED
)
12641 if (is_global_var (decl
))
12643 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
12644 while (ctx
!= NULL
)
12647 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12648 if (on
&& (on
->value
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
12649 | GOVD_PRIVATE
| GOVD_REDUCTION
12650 | GOVD_LINEAR
| GOVD_MAP
)) != 0)
12652 ctx
= ctx
->outer_context
;
12657 code
= OMP_CLAUSE_SHARED
;
12658 /* Don't optimize shared into firstprivate for read-only vars
12659 on tasks with depend clause, we shouldn't try to copy them
12660 until the dependencies are satisfied. */
12661 if (gimplify_omp_ctxp
->has_depend
)
12662 flags
|= GOVD_WRITTEN
;
12664 else if (flags
& GOVD_PRIVATE
)
12665 code
= OMP_CLAUSE_PRIVATE
;
12666 else if (flags
& GOVD_FIRSTPRIVATE
)
12668 code
= OMP_CLAUSE_FIRSTPRIVATE
;
12669 if ((gimplify_omp_ctxp
->region_type
& ORT_TARGET
)
12670 && (gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
12671 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
12673 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
12674 "%<target%> construct", decl
);
12678 else if (flags
& GOVD_LASTPRIVATE
)
12679 code
= OMP_CLAUSE_LASTPRIVATE
;
12680 else if (flags
& (GOVD_ALIGNED
| GOVD_NONTEMPORAL
))
12682 else if (flags
& GOVD_CONDTEMP
)
12684 code
= OMP_CLAUSE__CONDTEMP_
;
12685 gimple_add_tmp_var (decl
);
12688 gcc_unreachable ();
12690 if (((flags
& GOVD_LASTPRIVATE
)
12691 || (code
== OMP_CLAUSE_SHARED
&& (flags
& GOVD_WRITTEN
)))
12692 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12693 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
12695 tree chain
= *list_p
;
12696 clause
= build_omp_clause (input_location
, code
);
12697 OMP_CLAUSE_DECL (clause
) = decl
;
12698 OMP_CLAUSE_CHAIN (clause
) = chain
;
12700 OMP_CLAUSE_PRIVATE_DEBUG (clause
) = 1;
12701 else if (code
== OMP_CLAUSE_PRIVATE
&& (flags
& GOVD_PRIVATE_OUTER_REF
))
12702 OMP_CLAUSE_PRIVATE_OUTER_REF (clause
) = 1;
12703 else if (code
== OMP_CLAUSE_SHARED
12704 && (flags
& GOVD_WRITTEN
) == 0
12705 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12706 OMP_CLAUSE_SHARED_READONLY (clause
) = 1;
12707 else if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_EXPLICIT
) == 0)
12708 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause
) = 1;
12709 else if (code
== OMP_CLAUSE_MAP
&& (flags
& GOVD_MAP_0LEN_ARRAY
) != 0)
12711 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
12712 OMP_CLAUSE_DECL (nc
) = decl
;
12713 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
12714 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
12715 OMP_CLAUSE_DECL (clause
)
12716 = build_simple_mem_ref_loc (input_location
, decl
);
12717 OMP_CLAUSE_DECL (clause
)
12718 = build2 (MEM_REF
, char_type_node
, OMP_CLAUSE_DECL (clause
),
12719 build_int_cst (build_pointer_type (char_type_node
), 0));
12720 OMP_CLAUSE_SIZE (clause
) = size_zero_node
;
12721 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
12722 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_ALLOC
);
12723 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause
) = 1;
12724 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
12725 OMP_CLAUSE_CHAIN (nc
) = chain
;
12726 OMP_CLAUSE_CHAIN (clause
) = nc
;
12727 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12728 gimplify_omp_ctxp
= ctx
->outer_context
;
12729 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0),
12730 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
12731 gimplify_omp_ctxp
= ctx
;
12733 else if (code
== OMP_CLAUSE_MAP
)
12736 /* Not all combinations of these GOVD_MAP flags are actually valid. */
12737 switch (flags
& (GOVD_MAP_TO_ONLY
12739 | GOVD_MAP_FORCE_PRESENT
12740 | GOVD_MAP_ALLOC_ONLY
12741 | GOVD_MAP_FROM_ONLY
))
12744 kind
= GOMP_MAP_TOFROM
;
12746 case GOVD_MAP_FORCE
:
12747 kind
= GOMP_MAP_TOFROM
| GOMP_MAP_FLAG_FORCE
;
12749 case GOVD_MAP_TO_ONLY
:
12750 kind
= GOMP_MAP_TO
;
12752 case GOVD_MAP_FROM_ONLY
:
12753 kind
= GOMP_MAP_FROM
;
12755 case GOVD_MAP_ALLOC_ONLY
:
12756 kind
= GOMP_MAP_ALLOC
;
12758 case GOVD_MAP_TO_ONLY
| GOVD_MAP_FORCE
:
12759 kind
= GOMP_MAP_TO
| GOMP_MAP_FLAG_FORCE
;
12761 case GOVD_MAP_FORCE_PRESENT
:
12762 kind
= GOMP_MAP_FORCE_PRESENT
;
12764 case GOVD_MAP_FORCE_PRESENT
| GOVD_MAP_ALLOC_ONLY
:
12765 kind
= GOMP_MAP_FORCE_PRESENT
;
12768 gcc_unreachable ();
12770 OMP_CLAUSE_SET_MAP_KIND (clause
, kind
);
12771 /* Setting of the implicit flag for the runtime is currently disabled for
12773 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0)
12774 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause
) = 1;
12775 if (DECL_SIZE (decl
)
12776 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
12778 tree decl2
= DECL_VALUE_EXPR (decl
);
12779 gcc_assert (INDIRECT_REF_P (decl2
));
12780 decl2
= TREE_OPERAND (decl2
, 0);
12781 gcc_assert (DECL_P (decl2
));
12782 tree mem
= build_simple_mem_ref (decl2
);
12783 OMP_CLAUSE_DECL (clause
) = mem
;
12784 OMP_CLAUSE_SIZE (clause
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
12785 if (gimplify_omp_ctxp
->outer_context
)
12787 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
12788 omp_notice_variable (ctx
, decl2
, true);
12789 omp_notice_variable (ctx
, OMP_CLAUSE_SIZE (clause
), true);
12791 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
12793 OMP_CLAUSE_DECL (nc
) = decl
;
12794 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
12795 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
)
12796 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
12798 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
12799 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
12800 OMP_CLAUSE_CHAIN (clause
) = nc
;
12802 else if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
12803 && omp_privatize_by_reference (decl
))
12805 OMP_CLAUSE_DECL (clause
) = build_simple_mem_ref (decl
);
12806 OMP_CLAUSE_SIZE (clause
)
12807 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))));
12808 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12809 gimplify_omp_ctxp
= ctx
->outer_context
;
12810 gimplify_expr (&OMP_CLAUSE_SIZE (clause
),
12811 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
12812 gimplify_omp_ctxp
= ctx
;
12813 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
12815 OMP_CLAUSE_DECL (nc
) = decl
;
12816 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
12817 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_REFERENCE
);
12818 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
12819 OMP_CLAUSE_CHAIN (clause
) = nc
;
12822 OMP_CLAUSE_SIZE (clause
) = DECL_SIZE_UNIT (decl
);
12824 if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_LASTPRIVATE
) != 0)
12826 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
12827 OMP_CLAUSE_DECL (nc
) = decl
;
12828 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc
) = 1;
12829 OMP_CLAUSE_CHAIN (nc
) = chain
;
12830 OMP_CLAUSE_CHAIN (clause
) = nc
;
12831 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12832 gimplify_omp_ctxp
= ctx
->outer_context
;
12833 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
12834 (ctx
->region_type
& ORT_ACC
) != 0);
12835 gimplify_omp_ctxp
= ctx
;
12838 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12839 gimplify_omp_ctxp
= ctx
->outer_context
;
12840 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
12841 in simd. Those are only added for the local vars inside of simd body
12842 and they don't need to be e.g. default constructible. */
12843 if (code
!= OMP_CLAUSE_PRIVATE
|| ctx
->region_type
!= ORT_SIMD
)
12844 lang_hooks
.decls
.omp_finish_clause (clause
, pre_p
,
12845 (ctx
->region_type
& ORT_ACC
) != 0);
12846 if (gimplify_omp_ctxp
)
12847 for (; clause
!= chain
; clause
= OMP_CLAUSE_CHAIN (clause
))
12848 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
12849 && DECL_P (OMP_CLAUSE_SIZE (clause
)))
12850 omp_notice_variable (gimplify_omp_ctxp
, OMP_CLAUSE_SIZE (clause
),
12852 gimplify_omp_ctxp
= ctx
;
12857 gimplify_adjust_omp_clauses (gimple_seq
*pre_p
, gimple_seq body
, tree
*list_p
,
12858 enum tree_code code
)
12860 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12861 tree
*orig_list_p
= list_p
;
12863 bool has_inscan_reductions
= false;
12867 struct gimplify_omp_ctx
*octx
;
12868 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
12869 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TASK
| ORT_TEAMS
)) != 0)
12873 struct walk_stmt_info wi
;
12874 memset (&wi
, 0, sizeof (wi
));
12875 walk_gimple_seq (body
, omp_find_stores_stmt
,
12876 omp_find_stores_op
, &wi
);
12880 if (ctx
->add_safelen1
)
12882 /* If there are VLAs in the body of simd loop, prevent
12884 gcc_assert (ctx
->region_type
== ORT_SIMD
);
12885 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
12886 OMP_CLAUSE_SAFELEN_EXPR (c
) = integer_one_node
;
12887 OMP_CLAUSE_CHAIN (c
) = *list_p
;
12889 list_p
= &OMP_CLAUSE_CHAIN (c
);
12892 if (ctx
->region_type
== ORT_WORKSHARE
12893 && ctx
->outer_context
12894 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
)
12896 for (c
= ctx
->outer_context
->clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12897 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12898 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
12900 decl
= OMP_CLAUSE_DECL (c
);
12902 = splay_tree_lookup (ctx
->outer_context
->variables
,
12903 (splay_tree_key
) decl
);
12904 gcc_checking_assert (!splay_tree_lookup (ctx
->variables
,
12905 (splay_tree_key
) decl
));
12906 omp_add_variable (ctx
, decl
, n
->value
);
12907 tree c2
= copy_node (c
);
12908 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
12910 if ((n
->value
& GOVD_FIRSTPRIVATE
) == 0)
12912 c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12913 OMP_CLAUSE_FIRSTPRIVATE
);
12914 OMP_CLAUSE_DECL (c2
) = decl
;
12915 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
12920 tree attach_list
= NULL_TREE
;
12921 tree
*attach_tail
= &attach_list
;
12923 while ((c
= *list_p
) != NULL
)
12926 bool remove
= false;
12927 bool move_attach
= false;
12929 switch (OMP_CLAUSE_CODE (c
))
12931 case OMP_CLAUSE_FIRSTPRIVATE
:
12932 if ((ctx
->region_type
& ORT_TARGET
)
12933 && (ctx
->region_type
& ORT_ACC
) == 0
12934 && TYPE_ATOMIC (strip_array_types
12935 (TREE_TYPE (OMP_CLAUSE_DECL (c
)))))
12937 error_at (OMP_CLAUSE_LOCATION (c
),
12938 "%<_Atomic%> %qD in %<firstprivate%> clause on "
12939 "%<target%> construct", OMP_CLAUSE_DECL (c
));
12943 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
12945 decl
= OMP_CLAUSE_DECL (c
);
12946 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12947 if ((n
->value
& GOVD_MAP
) != 0)
12952 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c
) = 0;
12953 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
) = 0;
12956 case OMP_CLAUSE_PRIVATE
:
12957 case OMP_CLAUSE_SHARED
:
12958 case OMP_CLAUSE_LINEAR
:
12959 decl
= OMP_CLAUSE_DECL (c
);
12960 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12961 remove
= !(n
->value
& GOVD_SEEN
);
12962 if ((n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0
12963 && code
== OMP_PARALLEL
12964 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12968 bool shared
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
;
12969 if ((n
->value
& GOVD_DEBUG_PRIVATE
)
12970 || lang_hooks
.decls
.omp_private_debug_clause (decl
, shared
))
12972 gcc_assert ((n
->value
& GOVD_DEBUG_PRIVATE
) == 0
12973 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
12975 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_PRIVATE
);
12976 OMP_CLAUSE_PRIVATE_DEBUG (c
) = 1;
12978 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
12981 n
->value
|= GOVD_WRITTEN
;
12982 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
12983 && (n
->value
& GOVD_WRITTEN
) == 0
12985 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12986 OMP_CLAUSE_SHARED_READONLY (c
) = 1;
12987 else if (DECL_P (decl
)
12988 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
12989 && (n
->value
& GOVD_WRITTEN
) != 0)
12990 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
12991 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
12992 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12993 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
12996 n
->value
&= ~GOVD_EXPLICIT
;
12999 case OMP_CLAUSE_LASTPRIVATE
:
13000 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
13001 accurately reflect the presence of a FIRSTPRIVATE clause. */
13002 decl
= OMP_CLAUSE_DECL (c
);
13003 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
13004 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
13005 = (n
->value
& GOVD_FIRSTPRIVATE
) != 0;
13006 if (code
== OMP_DISTRIBUTE
13007 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
13010 error_at (OMP_CLAUSE_LOCATION (c
),
13011 "same variable used in %<firstprivate%> and "
13012 "%<lastprivate%> clauses on %<distribute%> "
13016 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
13018 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
13019 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
13020 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) && code
== OMP_PARALLEL
)
13024 case OMP_CLAUSE_ALIGNED
:
13025 decl
= OMP_CLAUSE_DECL (c
);
13026 if (!is_global_var (decl
))
13028 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
13029 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
13030 if (!remove
&& TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
13032 struct gimplify_omp_ctx
*octx
;
13034 && (n
->value
& (GOVD_DATA_SHARE_CLASS
13035 & ~GOVD_FIRSTPRIVATE
)))
13038 for (octx
= ctx
->outer_context
; octx
;
13039 octx
= octx
->outer_context
)
13041 n
= splay_tree_lookup (octx
->variables
,
13042 (splay_tree_key
) decl
);
13045 if (n
->value
& GOVD_LOCAL
)
13047 /* We have to avoid assigning a shared variable
13048 to itself when trying to add
13049 __builtin_assume_aligned. */
13050 if (n
->value
& GOVD_SHARED
)
13058 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
13060 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
13061 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
13066 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
13067 decl
= OMP_CLAUSE_DECL (c
);
13068 while (INDIRECT_REF_P (decl
)
13069 || TREE_CODE (decl
) == ARRAY_REF
)
13070 decl
= TREE_OPERAND (decl
, 0);
13071 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
13072 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
13075 case OMP_CLAUSE_IS_DEVICE_PTR
:
13076 case OMP_CLAUSE_NONTEMPORAL
:
13077 decl
= OMP_CLAUSE_DECL (c
);
13078 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
13079 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
13082 case OMP_CLAUSE_MAP
:
13083 switch (OMP_CLAUSE_MAP_KIND (c
))
13085 case GOMP_MAP_PRESENT_ALLOC
:
13086 case GOMP_MAP_PRESENT_TO
:
13087 case GOMP_MAP_PRESENT_FROM
:
13088 case GOMP_MAP_PRESENT_TOFROM
:
13089 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_PRESENT
);
13094 if (code
== OMP_TARGET_EXIT_DATA
13095 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
13100 /* If we have a target region, we can push all the attaches to the
13101 end of the list (we may have standalone "attach" operations
13102 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
13103 the attachment point AND the pointed-to block have been mapped).
13104 If we have something else, e.g. "enter data", we need to keep
13105 "attach" nodes together with the previous node they attach to so
13106 that separate "exit data" operations work properly (see
13107 libgomp/target.c). */
13108 if ((ctx
->region_type
& ORT_TARGET
) != 0
13109 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
13110 || (OMP_CLAUSE_MAP_KIND (c
)
13111 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
)))
13112 move_attach
= true;
13113 decl
= OMP_CLAUSE_DECL (c
);
13114 /* Data clauses associated with reductions must be
13115 compatible with present_or_copy. Warn and adjust the clause
13116 if that is not the case. */
13117 if (ctx
->region_type
== ORT_ACC_PARALLEL
13118 || ctx
->region_type
== ORT_ACC_SERIAL
)
13120 tree t
= DECL_P (decl
) ? decl
: TREE_OPERAND (decl
, 0);
13124 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
13126 if (n
&& (n
->value
& GOVD_REDUCTION
))
13128 enum gomp_map_kind kind
= OMP_CLAUSE_MAP_KIND (c
);
13130 OMP_CLAUSE_MAP_IN_REDUCTION (c
) = 1;
13131 if ((kind
& GOMP_MAP_TOFROM
) != GOMP_MAP_TOFROM
13132 && kind
!= GOMP_MAP_FORCE_PRESENT
13133 && kind
!= GOMP_MAP_POINTER
)
13135 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
13136 "incompatible data clause with reduction "
13137 "on %qE; promoting to %<present_or_copy%>",
13139 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
13143 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
13144 && (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
))
13149 if (!DECL_P (decl
))
13151 if ((ctx
->region_type
& ORT_TARGET
) != 0
13152 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
13154 if (INDIRECT_REF_P (decl
)
13155 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
13156 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
13157 == REFERENCE_TYPE
))
13158 decl
= TREE_OPERAND (decl
, 0);
13159 if (TREE_CODE (decl
) == COMPONENT_REF
)
13161 while (TREE_CODE (decl
) == COMPONENT_REF
)
13162 decl
= TREE_OPERAND (decl
, 0);
13165 n
= splay_tree_lookup (ctx
->variables
,
13166 (splay_tree_key
) decl
);
13167 if (!(n
->value
& GOVD_SEEN
))
13174 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
13175 if ((ctx
->region_type
& ORT_TARGET
) != 0
13176 && !(n
->value
& GOVD_SEEN
)
13177 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) == 0
13178 && (!is_global_var (decl
)
13179 || !lookup_attribute ("omp declare target link",
13180 DECL_ATTRIBUTES (decl
))))
13183 /* For struct element mapping, if struct is never referenced
13184 in target block and none of the mapping has always modifier,
13185 remove all the struct element mappings, which immediately
13186 follow the GOMP_MAP_STRUCT map clause. */
13187 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
13189 HOST_WIDE_INT cnt
= tree_to_shwi (OMP_CLAUSE_SIZE (c
));
13191 OMP_CLAUSE_CHAIN (c
)
13192 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c
));
13195 else if (DECL_SIZE (decl
)
13196 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
13197 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_POINTER
13198 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
13199 && (OMP_CLAUSE_MAP_KIND (c
)
13200 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
13202 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
13203 for these, TREE_CODE (DECL_SIZE (decl)) will always be
13205 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DEVICEPTR
);
13207 tree decl2
= DECL_VALUE_EXPR (decl
);
13208 gcc_assert (INDIRECT_REF_P (decl2
));
13209 decl2
= TREE_OPERAND (decl2
, 0);
13210 gcc_assert (DECL_P (decl2
));
13211 tree mem
= build_simple_mem_ref (decl2
);
13212 OMP_CLAUSE_DECL (c
) = mem
;
13213 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
13214 if (ctx
->outer_context
)
13216 omp_notice_variable (ctx
->outer_context
, decl2
, true);
13217 omp_notice_variable (ctx
->outer_context
,
13218 OMP_CLAUSE_SIZE (c
), true);
13220 if (((ctx
->region_type
& ORT_TARGET
) != 0
13221 || !ctx
->target_firstprivatize_array_bases
)
13222 && ((n
->value
& GOVD_SEEN
) == 0
13223 || (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
)) == 0))
13225 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
13227 OMP_CLAUSE_DECL (nc
) = decl
;
13228 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
13229 if (ctx
->target_firstprivatize_array_bases
)
13230 OMP_CLAUSE_SET_MAP_KIND (nc
,
13231 GOMP_MAP_FIRSTPRIVATE_POINTER
);
13233 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
13234 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (c
);
13235 OMP_CLAUSE_CHAIN (c
) = nc
;
13241 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
13242 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
13243 gcc_assert ((n
->value
& GOVD_SEEN
) == 0
13244 || ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
13249 case OMP_CLAUSE_TO
:
13250 case OMP_CLAUSE_FROM
:
13251 case OMP_CLAUSE__CACHE_
:
13252 decl
= OMP_CLAUSE_DECL (c
);
13253 if (!DECL_P (decl
))
13255 if (DECL_SIZE (decl
)
13256 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
13258 tree decl2
= DECL_VALUE_EXPR (decl
);
13259 gcc_assert (INDIRECT_REF_P (decl2
));
13260 decl2
= TREE_OPERAND (decl2
, 0);
13261 gcc_assert (DECL_P (decl2
));
13262 tree mem
= build_simple_mem_ref (decl2
);
13263 OMP_CLAUSE_DECL (c
) = mem
;
13264 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
13265 if (ctx
->outer_context
)
13267 omp_notice_variable (ctx
->outer_context
, decl2
, true);
13268 omp_notice_variable (ctx
->outer_context
,
13269 OMP_CLAUSE_SIZE (c
), true);
13272 else if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
13273 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
13276 case OMP_CLAUSE_REDUCTION
:
13277 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
13279 decl
= OMP_CLAUSE_DECL (c
);
13280 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
13281 if ((n
->value
& GOVD_REDUCTION_INSCAN
) == 0)
13284 error_at (OMP_CLAUSE_LOCATION (c
),
13285 "%qD specified in %<inscan%> %<reduction%> clause "
13286 "but not in %<scan%> directive clause", decl
);
13289 has_inscan_reductions
= true;
13292 case OMP_CLAUSE_IN_REDUCTION
:
13293 case OMP_CLAUSE_TASK_REDUCTION
:
13294 decl
= OMP_CLAUSE_DECL (c
);
13295 /* OpenACC reductions need a present_or_copy data clause.
13296 Add one if necessary. Emit error when the reduction is private. */
13297 if (ctx
->region_type
== ORT_ACC_PARALLEL
13298 || ctx
->region_type
== ORT_ACC_SERIAL
)
13300 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
13301 if (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
13304 error_at (OMP_CLAUSE_LOCATION (c
), "invalid private "
13305 "reduction on %qE", DECL_NAME (decl
));
13307 else if ((n
->value
& GOVD_MAP
) == 0)
13309 tree next
= OMP_CLAUSE_CHAIN (c
);
13310 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_MAP
);
13311 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_TOFROM
);
13312 OMP_CLAUSE_DECL (nc
) = decl
;
13313 OMP_CLAUSE_CHAIN (c
) = nc
;
13314 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
13319 OMP_CLAUSE_MAP_IN_REDUCTION (nc
) = 1;
13320 if (OMP_CLAUSE_CHAIN (nc
) == NULL
)
13322 nc
= OMP_CLAUSE_CHAIN (nc
);
13324 OMP_CLAUSE_CHAIN (nc
) = next
;
13325 n
->value
|= GOVD_MAP
;
13329 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
13330 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
13333 case OMP_CLAUSE_ALLOCATE
:
13334 decl
= OMP_CLAUSE_DECL (c
);
13335 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
13336 if (n
!= NULL
&& !(n
->value
& GOVD_SEEN
))
13338 if ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
| GOVD_LINEAR
))
13340 && (n
->value
& (GOVD_REDUCTION
| GOVD_LASTPRIVATE
)) == 0)
13344 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
13345 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)) != INTEGER_CST
13346 && ((ctx
->region_type
& (ORT_PARALLEL
| ORT_TARGET
)) != 0
13347 || (ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASK
13348 || (ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
))
13350 tree allocator
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
13351 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) allocator
);
13354 enum omp_clause_default_kind default_kind
13355 = ctx
->default_kind
;
13356 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
13357 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
13359 ctx
->default_kind
= default_kind
;
13362 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
13367 case OMP_CLAUSE_COPYIN
:
13368 case OMP_CLAUSE_COPYPRIVATE
:
13369 case OMP_CLAUSE_IF
:
13370 case OMP_CLAUSE_SELF
:
13371 case OMP_CLAUSE_NUM_THREADS
:
13372 case OMP_CLAUSE_NUM_TEAMS
:
13373 case OMP_CLAUSE_THREAD_LIMIT
:
13374 case OMP_CLAUSE_DIST_SCHEDULE
:
13375 case OMP_CLAUSE_DEVICE
:
13376 case OMP_CLAUSE_SCHEDULE
:
13377 case OMP_CLAUSE_NOWAIT
:
13378 case OMP_CLAUSE_ORDERED
:
13379 case OMP_CLAUSE_DEFAULT
:
13380 case OMP_CLAUSE_UNTIED
:
13381 case OMP_CLAUSE_COLLAPSE
:
13382 case OMP_CLAUSE_FINAL
:
13383 case OMP_CLAUSE_MERGEABLE
:
13384 case OMP_CLAUSE_PROC_BIND
:
13385 case OMP_CLAUSE_SAFELEN
:
13386 case OMP_CLAUSE_SIMDLEN
:
13387 case OMP_CLAUSE_DEPEND
:
13388 case OMP_CLAUSE_DOACROSS
:
13389 case OMP_CLAUSE_PRIORITY
:
13390 case OMP_CLAUSE_GRAINSIZE
:
13391 case OMP_CLAUSE_NUM_TASKS
:
13392 case OMP_CLAUSE_NOGROUP
:
13393 case OMP_CLAUSE_THREADS
:
13394 case OMP_CLAUSE_SIMD
:
13395 case OMP_CLAUSE_FILTER
:
13396 case OMP_CLAUSE_HINT
:
13397 case OMP_CLAUSE_DEFAULTMAP
:
13398 case OMP_CLAUSE_ORDER
:
13399 case OMP_CLAUSE_BIND
:
13400 case OMP_CLAUSE_DETACH
:
13401 case OMP_CLAUSE_USE_DEVICE_PTR
:
13402 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13403 case OMP_CLAUSE_ASYNC
:
13404 case OMP_CLAUSE_WAIT
:
13405 case OMP_CLAUSE_INDEPENDENT
:
13406 case OMP_CLAUSE_NUM_GANGS
:
13407 case OMP_CLAUSE_NUM_WORKERS
:
13408 case OMP_CLAUSE_VECTOR_LENGTH
:
13409 case OMP_CLAUSE_GANG
:
13410 case OMP_CLAUSE_WORKER
:
13411 case OMP_CLAUSE_VECTOR
:
13412 case OMP_CLAUSE_AUTO
:
13413 case OMP_CLAUSE_SEQ
:
13414 case OMP_CLAUSE_TILE
:
13415 case OMP_CLAUSE_IF_PRESENT
:
13416 case OMP_CLAUSE_FINALIZE
:
13417 case OMP_CLAUSE_INCLUSIVE
:
13418 case OMP_CLAUSE_EXCLUSIVE
:
13421 case OMP_CLAUSE_NOHOST
:
13423 gcc_unreachable ();
13427 *list_p
= OMP_CLAUSE_CHAIN (c
);
13428 else if (move_attach
)
13430 /* Remove attach node from here, separate out into its own list. */
13432 *list_p
= OMP_CLAUSE_CHAIN (c
);
13433 OMP_CLAUSE_CHAIN (c
) = NULL_TREE
;
13434 attach_tail
= &OMP_CLAUSE_CHAIN (c
);
13437 list_p
= &OMP_CLAUSE_CHAIN (c
);
13440 /* Splice attach nodes at the end of the list. */
13443 *list_p
= attach_list
;
13444 list_p
= attach_tail
;
13447 /* Add in any implicit data sharing. */
13448 struct gimplify_adjust_omp_clauses_data data
;
13449 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0)
13451 /* OpenMP. Implicit clauses are added at the start of the clause list,
13452 but after any non-map clauses. */
13453 tree
*implicit_add_list_p
= orig_list_p
;
13454 while (*implicit_add_list_p
13455 && OMP_CLAUSE_CODE (*implicit_add_list_p
) != OMP_CLAUSE_MAP
)
13456 implicit_add_list_p
= &OMP_CLAUSE_CHAIN (*implicit_add_list_p
);
13457 data
.list_p
= implicit_add_list_p
;
13461 data
.list_p
= list_p
;
13462 data
.pre_p
= pre_p
;
13463 splay_tree_foreach (ctx
->variables
, gimplify_adjust_omp_clauses_1
, &data
);
13465 if (has_inscan_reductions
)
13466 for (c
= *orig_list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13467 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
13468 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
13470 error_at (OMP_CLAUSE_LOCATION (c
),
13471 "%<inscan%> %<reduction%> clause used together with "
13472 "%<linear%> clause for a variable other than loop "
13477 gimplify_omp_ctxp
= ctx
->outer_context
;
13478 delete_omp_context (ctx
);
13481 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
13482 -1 if unknown yet (simd is involved, won't be known until vectorization)
13483 and 1 if they do. If SCORES is non-NULL, it should point to an array
13484 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
13485 of the CONSTRUCTS (position -1 if it will never match) followed by
13486 number of constructs in the OpenMP context construct trait. If the
13487 score depends on whether it will be in a declare simd clone or not,
13488 the function returns 2 and there will be two sets of the scores, the first
13489 one for the case that it is not in a declare simd clone, the other
13490 that it is in a declare simd clone. */
13493 omp_construct_selector_matches (enum tree_code
*constructs
, int nconstructs
,
13496 int matched
= 0, cnt
= 0;
13497 bool simd_seen
= false;
13498 bool target_seen
= false;
13499 int declare_simd_cnt
= -1;
13500 auto_vec
<enum tree_code
, 16> codes
;
13501 for (struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
; ctx
;)
13503 if (((ctx
->region_type
& ORT_PARALLEL
) && ctx
->code
== OMP_PARALLEL
)
13504 || ((ctx
->region_type
& (ORT_TARGET
| ORT_IMPLICIT_TARGET
| ORT_ACC
))
13505 == ORT_TARGET
&& ctx
->code
== OMP_TARGET
)
13506 || ((ctx
->region_type
& ORT_TEAMS
) && ctx
->code
== OMP_TEAMS
)
13507 || (ctx
->region_type
== ORT_WORKSHARE
&& ctx
->code
== OMP_FOR
)
13508 || (ctx
->region_type
== ORT_SIMD
13509 && ctx
->code
== OMP_SIMD
13510 && !omp_find_clause (ctx
->clauses
, OMP_CLAUSE_BIND
)))
13514 codes
.safe_push (ctx
->code
);
13515 else if (matched
< nconstructs
&& ctx
->code
== constructs
[matched
])
13517 if (ctx
->code
== OMP_SIMD
)
13525 if (ctx
->code
== OMP_TARGET
)
13527 if (scores
== NULL
)
13528 return matched
< nconstructs
? 0 : simd_seen
? -1 : 1;
13529 target_seen
= true;
13533 else if (ctx
->region_type
== ORT_WORKSHARE
13534 && ctx
->code
== OMP_LOOP
13535 && ctx
->outer_context
13536 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
13537 && ctx
->outer_context
->outer_context
13538 && ctx
->outer_context
->outer_context
->code
== OMP_LOOP
13539 && ctx
->outer_context
->outer_context
->distribute
)
13540 ctx
= ctx
->outer_context
->outer_context
;
13541 ctx
= ctx
->outer_context
;
13544 && lookup_attribute ("omp declare simd",
13545 DECL_ATTRIBUTES (current_function_decl
)))
13547 /* Declare simd is a maybe case, it is supposed to be added only to the
13548 omp-simd-clone.cc added clones and not to the base function. */
13549 declare_simd_cnt
= cnt
++;
13551 codes
.safe_push (OMP_SIMD
);
13553 && constructs
[0] == OMP_SIMD
)
13555 gcc_assert (matched
== 0);
13557 if (++matched
== nconstructs
)
13561 if (tree attr
= lookup_attribute ("omp declare variant variant",
13562 DECL_ATTRIBUTES (current_function_decl
)))
13564 enum tree_code variant_constructs
[5];
13565 int variant_nconstructs
= 0;
13567 variant_nconstructs
13568 = omp_constructor_traits_to_codes (TREE_VALUE (attr
),
13569 variant_constructs
);
13570 for (int i
= 0; i
< variant_nconstructs
; i
++)
13574 codes
.safe_push (variant_constructs
[i
]);
13575 else if (matched
< nconstructs
13576 && variant_constructs
[i
] == constructs
[matched
])
13578 if (variant_constructs
[i
] == OMP_SIMD
)
13589 && lookup_attribute ("omp declare target block",
13590 DECL_ATTRIBUTES (current_function_decl
)))
13593 codes
.safe_push (OMP_TARGET
);
13594 else if (matched
< nconstructs
&& constructs
[matched
] == OMP_TARGET
)
13599 for (int pass
= 0; pass
< (declare_simd_cnt
== -1 ? 1 : 2); pass
++)
13601 int j
= codes
.length () - 1;
13602 for (int i
= nconstructs
- 1; i
>= 0; i
--)
13605 && (pass
!= 0 || declare_simd_cnt
!= j
)
13606 && constructs
[i
] != codes
[j
])
13608 if (pass
== 0 && declare_simd_cnt
!= -1 && j
> declare_simd_cnt
)
13613 *scores
++ = ((pass
== 0 && declare_simd_cnt
!= -1)
13614 ? codes
.length () - 1 : codes
.length ());
13616 return declare_simd_cnt
== -1 ? 1 : 2;
13618 if (matched
== nconstructs
)
13619 return simd_seen
? -1 : 1;
13623 /* Gimplify OACC_CACHE. */
13626 gimplify_oacc_cache (tree
*expr_p
, gimple_seq
*pre_p
)
13628 tree expr
= *expr_p
;
13630 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr
), pre_p
, ORT_ACC
,
13632 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OACC_CACHE_CLAUSES (expr
),
13635 /* TODO: Do something sensible with this information. */
13637 *expr_p
= NULL_TREE
;
13640 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
13641 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
13642 kind. The entry kind will replace the one in CLAUSE, while the exit
13643 kind will be used in a new omp_clause and returned to the caller. */
13646 gimplify_oacc_declare_1 (tree clause
)
13648 HOST_WIDE_INT kind
, new_op
;
13652 kind
= OMP_CLAUSE_MAP_KIND (clause
);
13656 case GOMP_MAP_ALLOC
:
13657 new_op
= GOMP_MAP_RELEASE
;
13661 case GOMP_MAP_FROM
:
13662 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
13663 new_op
= GOMP_MAP_FROM
;
13667 case GOMP_MAP_TOFROM
:
13668 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_TO
);
13669 new_op
= GOMP_MAP_FROM
;
13673 case GOMP_MAP_DEVICE_RESIDENT
:
13674 case GOMP_MAP_FORCE_DEVICEPTR
:
13675 case GOMP_MAP_FORCE_PRESENT
:
13676 case GOMP_MAP_LINK
:
13677 case GOMP_MAP_POINTER
:
13682 gcc_unreachable ();
13688 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
), OMP_CLAUSE_MAP
);
13689 OMP_CLAUSE_SET_MAP_KIND (c
, new_op
);
13690 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clause
);
13696 /* Gimplify OACC_DECLARE. */
13699 gimplify_oacc_declare (tree
*expr_p
, gimple_seq
*pre_p
)
13701 tree expr
= *expr_p
;
13703 tree clauses
, t
, decl
;
13705 clauses
= OACC_DECLARE_CLAUSES (expr
);
13707 gimplify_scan_omp_clauses (&clauses
, pre_p
, ORT_TARGET_DATA
, OACC_DECLARE
);
13708 gimplify_adjust_omp_clauses (pre_p
, NULL
, &clauses
, OACC_DECLARE
);
13710 for (t
= clauses
; t
; t
= OMP_CLAUSE_CHAIN (t
))
13712 decl
= OMP_CLAUSE_DECL (t
);
13714 if (TREE_CODE (decl
) == MEM_REF
)
13715 decl
= TREE_OPERAND (decl
, 0);
13717 if (VAR_P (decl
) && !is_oacc_declared (decl
))
13719 tree attr
= get_identifier ("oacc declare target");
13720 DECL_ATTRIBUTES (decl
) = tree_cons (attr
, NULL_TREE
,
13721 DECL_ATTRIBUTES (decl
));
13725 && !is_global_var (decl
)
13726 && DECL_CONTEXT (decl
) == current_function_decl
)
13728 tree c
= gimplify_oacc_declare_1 (t
);
13731 if (oacc_declare_returns
== NULL
)
13732 oacc_declare_returns
= new hash_map
<tree
, tree
>;
13734 oacc_declare_returns
->put (decl
, c
);
13738 if (gimplify_omp_ctxp
)
13739 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_SEEN
);
13742 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
13745 gimplify_seq_add_stmt (pre_p
, stmt
);
13747 *expr_p
= NULL_TREE
;
13750 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
13751 gimplification of the body, as well as scanning the body for used
13752 variables. We need to do this scan now, because variable-sized
13753 decls will be decomposed during gimplification. */
13756 gimplify_omp_parallel (tree
*expr_p
, gimple_seq
*pre_p
)
13758 tree expr
= *expr_p
;
13760 gimple_seq body
= NULL
;
13762 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr
), pre_p
,
13763 OMP_PARALLEL_COMBINED (expr
)
13764 ? ORT_COMBINED_PARALLEL
13765 : ORT_PARALLEL
, OMP_PARALLEL
);
13767 push_gimplify_context ();
13769 g
= gimplify_and_return_first (OMP_PARALLEL_BODY (expr
), &body
);
13770 if (gimple_code (g
) == GIMPLE_BIND
)
13771 pop_gimplify_context (g
);
13773 pop_gimplify_context (NULL
);
13775 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_PARALLEL_CLAUSES (expr
),
13778 g
= gimple_build_omp_parallel (body
,
13779 OMP_PARALLEL_CLAUSES (expr
),
13780 NULL_TREE
, NULL_TREE
);
13781 if (OMP_PARALLEL_COMBINED (expr
))
13782 gimple_omp_set_subcode (g
, GF_OMP_PARALLEL_COMBINED
);
13783 gimplify_seq_add_stmt (pre_p
, g
);
13784 *expr_p
= NULL_TREE
;
13787 /* Gimplify the contents of an OMP_TASK statement. This involves
13788 gimplification of the body, as well as scanning the body for used
13789 variables. We need to do this scan now, because variable-sized
13790 decls will be decomposed during gimplification. */
13793 gimplify_omp_task (tree
*expr_p
, gimple_seq
*pre_p
)
13795 tree expr
= *expr_p
;
13797 gimple_seq body
= NULL
;
13798 bool nowait
= false;
13799 bool has_depend
= false;
13801 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
13803 for (tree c
= OMP_TASK_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13804 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
13807 if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET
)
13809 error_at (OMP_CLAUSE_LOCATION (c
),
13810 "%<mutexinoutset%> kind in %<depend%> clause on a "
13811 "%<taskwait%> construct");
13815 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NOWAIT
)
13817 if (nowait
&& !has_depend
)
13819 error_at (EXPR_LOCATION (expr
),
13820 "%<taskwait%> construct with %<nowait%> clause but no "
13821 "%<depend%> clauses");
13822 *expr_p
= NULL_TREE
;
13827 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr
), pre_p
,
13828 omp_find_clause (OMP_TASK_CLAUSES (expr
),
13830 ? ORT_UNTIED_TASK
: ORT_TASK
, OMP_TASK
);
13832 if (OMP_TASK_BODY (expr
))
13834 push_gimplify_context ();
13836 g
= gimplify_and_return_first (OMP_TASK_BODY (expr
), &body
);
13837 if (gimple_code (g
) == GIMPLE_BIND
)
13838 pop_gimplify_context (g
);
13840 pop_gimplify_context (NULL
);
13843 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_TASK_CLAUSES (expr
),
13846 g
= gimple_build_omp_task (body
,
13847 OMP_TASK_CLAUSES (expr
),
13848 NULL_TREE
, NULL_TREE
,
13849 NULL_TREE
, NULL_TREE
, NULL_TREE
);
13850 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
13851 gimple_omp_task_set_taskwait_p (g
, true);
13852 gimplify_seq_add_stmt (pre_p
, g
);
13853 *expr_p
= NULL_TREE
;
13856 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
13857 force it into a temporary initialized in PRE_P and add firstprivate clause
13858 to ORIG_FOR_STMT. */
13861 gimplify_omp_taskloop_expr (tree type
, tree
*tp
, gimple_seq
*pre_p
,
13862 tree orig_for_stmt
)
13864 if (*tp
== NULL
|| is_gimple_constant (*tp
))
13867 *tp
= get_initialized_tmp_var (*tp
, pre_p
, NULL
, false);
13868 /* Reference to pointer conversion is considered useless,
13869 but is significant for firstprivate clause. Force it
13872 && TREE_CODE (type
) == POINTER_TYPE
13873 && TREE_CODE (TREE_TYPE (*tp
)) == REFERENCE_TYPE
)
13875 tree v
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
13876 tree m
= build2 (INIT_EXPR
, TREE_TYPE (v
), v
, *tp
);
13877 gimplify_and_add (m
, pre_p
);
13881 tree c
= build_omp_clause (input_location
, OMP_CLAUSE_FIRSTPRIVATE
);
13882 OMP_CLAUSE_DECL (c
) = *tp
;
13883 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
13884 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
13887 /* Helper function of gimplify_omp_for, find OMP_ORDERED with
13888 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
13891 find_standalone_omp_ordered (tree
*tp
, int *walk_subtrees
, void *)
13893 switch (TREE_CODE (*tp
))
13896 if (OMP_ORDERED_BODY (*tp
) == NULL_TREE
)
13902 *walk_subtrees
= 0;
13910 /* Gimplify the gross structure of an OMP_FOR statement. */
13912 static enum gimplify_status
13913 gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
13915 tree for_stmt
, orig_for_stmt
, inner_for_stmt
= NULL_TREE
, decl
, var
, t
;
13916 enum gimplify_status ret
= GS_ALL_DONE
;
13917 enum gimplify_status tret
;
13919 gimple_seq for_body
, for_pre_body
;
13921 bitmap has_decl_expr
= NULL
;
13922 enum omp_region_type ort
= ORT_WORKSHARE
;
13923 bool openacc
= TREE_CODE (*expr_p
) == OACC_LOOP
;
13925 orig_for_stmt
= for_stmt
= *expr_p
;
13927 bool loop_p
= (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_BIND
)
13929 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
13931 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
13932 gcc_assert (TREE_CODE (for_stmt
) != OACC_LOOP
);
13933 inner_for_stmt
= walk_tree (&OMP_FOR_BODY (for_stmt
),
13934 find_combined_omp_for
, data
, NULL
);
13935 if (inner_for_stmt
== NULL_TREE
)
13937 gcc_assert (seen_error ());
13938 *expr_p
= NULL_TREE
;
13941 if (data
[2] && OMP_FOR_PRE_BODY (*data
[2]))
13943 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data
[2]),
13944 &OMP_FOR_PRE_BODY (for_stmt
));
13945 OMP_FOR_PRE_BODY (*data
[2]) = NULL_TREE
;
13947 if (OMP_FOR_PRE_BODY (inner_for_stmt
))
13949 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt
),
13950 &OMP_FOR_PRE_BODY (for_stmt
));
13951 OMP_FOR_PRE_BODY (inner_for_stmt
) = NULL_TREE
;
13956 /* We have some statements or variable declarations in between
13957 the composite construct directives. Move them around the
13960 for (i
= 0; i
< 3; i
++)
13964 if (i
< 2 && data
[i
+ 1] == &OMP_BODY (t
))
13965 data
[i
+ 1] = data
[i
];
13966 *data
[i
] = OMP_BODY (t
);
13967 tree body
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
13968 NULL_TREE
, make_node (BLOCK
));
13969 OMP_BODY (t
) = body
;
13970 append_to_statement_list_force (inner_for_stmt
,
13971 &BIND_EXPR_BODY (body
));
13973 data
[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body
)));
13974 gcc_assert (*data
[3] == inner_for_stmt
);
13979 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
13981 && OMP_FOR_ORIG_DECLS (inner_for_stmt
)
13982 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
13984 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
13987 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
13988 /* Class iterators aren't allowed on OMP_SIMD, so the only
13989 case we need to solve is distribute parallel for. They are
13990 allowed on the loop construct, but that is already handled
13991 in gimplify_omp_loop. */
13992 gcc_assert (TREE_CODE (inner_for_stmt
) == OMP_FOR
13993 && TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
13995 tree orig_decl
= TREE_PURPOSE (orig
);
13996 tree last
= TREE_VALUE (orig
);
13998 for (pc
= &OMP_FOR_CLAUSES (inner_for_stmt
);
13999 *pc
; pc
= &OMP_CLAUSE_CHAIN (*pc
))
14000 if ((OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
14001 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LASTPRIVATE
)
14002 && OMP_CLAUSE_DECL (*pc
) == orig_decl
)
14004 if (*pc
== NULL_TREE
)
14007 for (spc
= &OMP_PARALLEL_CLAUSES (*data
[1]);
14008 *spc
; spc
= &OMP_CLAUSE_CHAIN (*spc
))
14009 if (OMP_CLAUSE_CODE (*spc
) == OMP_CLAUSE_PRIVATE
14010 && OMP_CLAUSE_DECL (*spc
) == orig_decl
)
14015 *spc
= OMP_CLAUSE_CHAIN (c
);
14016 OMP_CLAUSE_CHAIN (c
) = NULL_TREE
;
14020 if (*pc
== NULL_TREE
)
14022 else if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
)
14024 /* private clause will appear only on inner_for_stmt.
14025 Change it into firstprivate, and add private clause
14027 tree c
= copy_node (*pc
);
14028 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
14029 OMP_FOR_CLAUSES (for_stmt
) = c
;
14030 OMP_CLAUSE_CODE (*pc
) = OMP_CLAUSE_FIRSTPRIVATE
;
14031 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
14035 /* lastprivate clause will appear on both inner_for_stmt
14036 and for_stmt. Add firstprivate clause to
14038 tree c
= build_omp_clause (OMP_CLAUSE_LOCATION (*pc
),
14039 OMP_CLAUSE_FIRSTPRIVATE
);
14040 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (*pc
);
14041 OMP_CLAUSE_CHAIN (c
) = *pc
;
14043 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
14045 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
14046 OMP_CLAUSE_FIRSTPRIVATE
);
14047 OMP_CLAUSE_DECL (c
) = last
;
14048 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
14049 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
14050 c
= build_omp_clause (UNKNOWN_LOCATION
,
14051 *pc
? OMP_CLAUSE_SHARED
14052 : OMP_CLAUSE_FIRSTPRIVATE
);
14053 OMP_CLAUSE_DECL (c
) = orig_decl
;
14054 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
14055 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
14057 /* Similarly, take care of C++ range for temporaries, those should
14058 be firstprivate on OMP_PARALLEL if any. */
14060 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
14061 if (OMP_FOR_ORIG_DECLS (inner_for_stmt
)
14062 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
14064 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
14068 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
14069 tree v
= TREE_CHAIN (orig
);
14070 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
14071 OMP_CLAUSE_FIRSTPRIVATE
);
14072 /* First add firstprivate clause for the __for_end artificial
14074 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 1);
14075 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
14077 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
14078 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
14079 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
14080 if (TREE_VEC_ELT (v
, 0))
14082 /* And now the same for __for_range artificial decl if it
14084 c
= build_omp_clause (UNKNOWN_LOCATION
,
14085 OMP_CLAUSE_FIRSTPRIVATE
);
14086 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 0);
14087 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
14089 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
14090 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
14091 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
14096 switch (TREE_CODE (for_stmt
))
14099 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
))
14101 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
14102 OMP_CLAUSE_SCHEDULE
))
14103 error_at (EXPR_LOCATION (for_stmt
),
14104 "%qs clause may not appear on non-rectangular %qs",
14105 "schedule", lang_GNU_Fortran () ? "do" : "for");
14106 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
))
14107 error_at (EXPR_LOCATION (for_stmt
),
14108 "%qs clause may not appear on non-rectangular %qs",
14109 "ordered", lang_GNU_Fortran () ? "do" : "for");
14112 case OMP_DISTRIBUTE
:
14113 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
)
14114 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
14115 OMP_CLAUSE_DIST_SCHEDULE
))
14116 error_at (EXPR_LOCATION (for_stmt
),
14117 "%qs clause may not appear on non-rectangular %qs",
14118 "dist_schedule", "distribute");
14124 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
))
14126 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
14127 OMP_CLAUSE_GRAINSIZE
))
14128 error_at (EXPR_LOCATION (for_stmt
),
14129 "%qs clause may not appear on non-rectangular %qs",
14130 "grainsize", "taskloop");
14131 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
14132 OMP_CLAUSE_NUM_TASKS
))
14133 error_at (EXPR_LOCATION (for_stmt
),
14134 "%qs clause may not appear on non-rectangular %qs",
14135 "num_tasks", "taskloop");
14137 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_UNTIED
))
14138 ort
= ORT_UNTIED_TASKLOOP
;
14140 ort
= ORT_TASKLOOP
;
14146 gcc_unreachable ();
14149 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
14150 clause for the IV. */
14151 if (ort
== ORT_SIMD
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
14153 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), 0);
14154 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
14155 decl
= TREE_OPERAND (t
, 0);
14156 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
14157 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
14158 && OMP_CLAUSE_DECL (c
) == decl
)
14160 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
14165 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
)
14166 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt
), pre_p
, ort
,
14167 loop_p
&& TREE_CODE (for_stmt
) != OMP_SIMD
14168 ? OMP_LOOP
: TREE_CODE (for_stmt
));
14170 if (TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
)
14171 gimplify_omp_ctxp
->distribute
= true;
14173 /* Handle OMP_FOR_INIT. */
14174 for_pre_body
= NULL
;
14175 if ((ort
== ORT_SIMD
14176 || (inner_for_stmt
&& TREE_CODE (inner_for_stmt
) == OMP_SIMD
))
14177 && OMP_FOR_PRE_BODY (for_stmt
))
14179 has_decl_expr
= BITMAP_ALLOC (NULL
);
14180 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == DECL_EXPR
14181 && VAR_P (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt
))))
14183 t
= OMP_FOR_PRE_BODY (for_stmt
);
14184 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
14186 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == STATEMENT_LIST
)
14188 tree_stmt_iterator si
;
14189 for (si
= tsi_start (OMP_FOR_PRE_BODY (for_stmt
)); !tsi_end_p (si
);
14193 if (TREE_CODE (t
) == DECL_EXPR
14194 && VAR_P (DECL_EXPR_DECL (t
)))
14195 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
14199 if (OMP_FOR_PRE_BODY (for_stmt
))
14201 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
|| gimplify_omp_ctxp
)
14202 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
14205 struct gimplify_omp_ctx ctx
;
14206 memset (&ctx
, 0, sizeof (ctx
));
14207 ctx
.region_type
= ORT_NONE
;
14208 gimplify_omp_ctxp
= &ctx
;
14209 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
14210 gimplify_omp_ctxp
= NULL
;
14213 OMP_FOR_PRE_BODY (for_stmt
) = NULL_TREE
;
14215 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
14216 for_stmt
= inner_for_stmt
;
14218 /* For taskloop, need to gimplify the start, end and step before the
14219 taskloop, outside of the taskloop omp context. */
14220 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
14222 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
14224 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
14225 gimple_seq
*for_pre_p
= (gimple_seq_empty_p (for_pre_body
)
14226 ? pre_p
: &for_pre_body
);
14227 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
14228 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
14230 tree v
= TREE_OPERAND (t
, 1);
14231 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
14232 for_pre_p
, orig_for_stmt
);
14233 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
14234 for_pre_p
, orig_for_stmt
);
14237 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
14240 /* Handle OMP_FOR_COND. */
14241 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
14242 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
14244 tree v
= TREE_OPERAND (t
, 1);
14245 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
14246 for_pre_p
, orig_for_stmt
);
14247 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
14248 for_pre_p
, orig_for_stmt
);
14251 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
14254 /* Handle OMP_FOR_INCR. */
14255 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
14256 if (TREE_CODE (t
) == MODIFY_EXPR
)
14258 decl
= TREE_OPERAND (t
, 0);
14259 t
= TREE_OPERAND (t
, 1);
14260 tree
*tp
= &TREE_OPERAND (t
, 1);
14261 if (TREE_CODE (t
) == PLUS_EXPR
&& *tp
== decl
)
14262 tp
= &TREE_OPERAND (t
, 0);
14264 gimplify_omp_taskloop_expr (NULL_TREE
, tp
, for_pre_p
,
14269 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt
), pre_p
, ort
,
14273 if (orig_for_stmt
!= for_stmt
)
14274 gimplify_omp_ctxp
->combined_loop
= true;
14277 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
14278 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt
)));
14279 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
14280 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt
)));
14282 tree c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
);
14283 bool is_doacross
= false;
14284 if (c
&& walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt
),
14285 find_standalone_omp_ordered
, NULL
))
14287 OMP_CLAUSE_ORDERED_DOACROSS (c
) = 1;
14288 is_doacross
= true;
14289 int len
= TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
));
14290 gimplify_omp_ctxp
->loop_iter_var
.create (len
* 2);
14291 for (tree
*pc
= &OMP_FOR_CLAUSES (for_stmt
); *pc
; )
14292 if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LINEAR
)
14294 error_at (OMP_CLAUSE_LOCATION (*pc
),
14295 "%<linear%> clause may not be specified together "
14296 "with %<ordered%> clause if stand-alone %<ordered%> "
14297 "construct is nested in it");
14298 *pc
= OMP_CLAUSE_CHAIN (*pc
);
14301 pc
= &OMP_CLAUSE_CHAIN (*pc
);
14303 int collapse
= 1, tile
= 0;
14304 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_COLLAPSE
);
14306 collapse
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c
));
14307 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_TILE
);
14309 tile
= list_length (OMP_CLAUSE_TILE_LIST (c
));
14310 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ALLOCATE
);
14311 hash_set
<tree
> *allocate_uids
= NULL
;
14314 allocate_uids
= new hash_set
<tree
>;
14315 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
14316 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
)
14317 allocate_uids
->add (OMP_CLAUSE_DECL (c
));
14319 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
14321 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
14322 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
14323 decl
= TREE_OPERAND (t
, 0);
14324 gcc_assert (DECL_P (decl
));
14325 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl
))
14326 || POINTER_TYPE_P (TREE_TYPE (decl
)));
14329 if (TREE_CODE (for_stmt
) == OMP_FOR
&& OMP_FOR_ORIG_DECLS (for_stmt
))
14331 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
14332 if (TREE_CODE (orig_decl
) == TREE_LIST
)
14334 orig_decl
= TREE_PURPOSE (orig_decl
);
14338 gimplify_omp_ctxp
->loop_iter_var
.quick_push (orig_decl
);
14341 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
14342 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
14345 if (for_stmt
== orig_for_stmt
)
14347 tree orig_decl
= decl
;
14348 if (OMP_FOR_ORIG_DECLS (for_stmt
))
14350 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
14351 if (TREE_CODE (orig_decl
) == TREE_LIST
)
14353 orig_decl
= TREE_PURPOSE (orig_decl
);
14358 if (is_global_var (orig_decl
) && DECL_THREAD_LOCAL_P (orig_decl
))
14359 error_at (EXPR_LOCATION (for_stmt
),
14360 "threadprivate iteration variable %qD", orig_decl
);
14363 /* Make sure the iteration variable is private. */
14364 tree c
= NULL_TREE
;
14365 tree c2
= NULL_TREE
;
14366 if (orig_for_stmt
!= for_stmt
)
14368 /* Preserve this information until we gimplify the inner simd. */
14370 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
14371 TREE_PRIVATE (t
) = 1;
14373 else if (ort
== ORT_SIMD
)
14375 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
14376 (splay_tree_key
) decl
);
14377 omp_is_private (gimplify_omp_ctxp
, decl
,
14378 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
14380 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
14382 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
14383 if (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
)
14384 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
14385 OMP_CLAUSE_LASTPRIVATE
);
14386 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
14387 OMP_CLAUSE_LASTPRIVATE
))
14388 if (OMP_CLAUSE_DECL (c3
) == decl
)
14390 warning_at (OMP_CLAUSE_LOCATION (c3
), OPT_Wopenmp
,
14391 "conditional %<lastprivate%> on loop "
14392 "iterator %qD ignored", decl
);
14393 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
14394 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
14397 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1 && !loop_p
)
14399 c
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
14400 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
14401 unsigned int flags
= GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
;
14403 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
14404 || TREE_PRIVATE (t
))
14406 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
14407 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
14409 struct gimplify_omp_ctx
*outer
14410 = gimplify_omp_ctxp
->outer_context
;
14411 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
14413 if (outer
->region_type
== ORT_WORKSHARE
14414 && outer
->combined_loop
)
14416 n
= splay_tree_lookup (outer
->variables
,
14417 (splay_tree_key
)decl
);
14418 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
14420 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
14421 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
14425 struct gimplify_omp_ctx
*octx
= outer
->outer_context
;
14427 && octx
->region_type
== ORT_COMBINED_PARALLEL
14428 && octx
->outer_context
14429 && (octx
->outer_context
->region_type
14431 && octx
->outer_context
->combined_loop
)
14433 octx
= octx
->outer_context
;
14434 n
= splay_tree_lookup (octx
->variables
,
14435 (splay_tree_key
)decl
);
14436 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
14438 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
14439 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
14446 OMP_CLAUSE_DECL (c
) = decl
;
14447 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
14448 OMP_FOR_CLAUSES (for_stmt
) = c
;
14449 omp_add_variable (gimplify_omp_ctxp
, decl
, flags
);
14450 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
14451 omp_lastprivate_for_combined_outer_constructs (outer
, decl
,
14458 || !bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)));
14459 if (TREE_PRIVATE (t
))
14460 lastprivate
= false;
14461 if (loop_p
&& OMP_FOR_ORIG_DECLS (for_stmt
))
14463 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
14464 if (TREE_CODE (elt
) == TREE_LIST
&& TREE_PURPOSE (elt
))
14465 lastprivate
= false;
14468 struct gimplify_omp_ctx
*outer
14469 = gimplify_omp_ctxp
->outer_context
;
14470 if (outer
&& lastprivate
)
14471 omp_lastprivate_for_combined_outer_constructs (outer
, decl
,
14474 c
= build_omp_clause (input_location
,
14475 lastprivate
? OMP_CLAUSE_LASTPRIVATE
14476 : OMP_CLAUSE_PRIVATE
);
14477 OMP_CLAUSE_DECL (c
) = decl
;
14478 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
14479 OMP_FOR_CLAUSES (for_stmt
) = c
;
14480 omp_add_variable (gimplify_omp_ctxp
, decl
,
14481 (lastprivate
? GOVD_LASTPRIVATE
: GOVD_PRIVATE
)
14482 | GOVD_EXPLICIT
| GOVD_SEEN
);
14486 else if (omp_is_private (gimplify_omp_ctxp
, decl
, 0))
14488 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
14489 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
14490 (splay_tree_key
) decl
);
14491 if (n
&& (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
))
14492 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
14493 OMP_CLAUSE_LASTPRIVATE
);
14494 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
14495 OMP_CLAUSE_LASTPRIVATE
))
14496 if (OMP_CLAUSE_DECL (c3
) == decl
)
14498 warning_at (OMP_CLAUSE_LOCATION (c3
), OPT_Wopenmp
,
14499 "conditional %<lastprivate%> on loop "
14500 "iterator %qD ignored", decl
);
14501 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
14502 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
14506 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_PRIVATE
| GOVD_SEEN
);
14508 /* If DECL is not a gimple register, create a temporary variable to act
14509 as an iteration counter. This is valid, since DECL cannot be
14510 modified in the body of the loop. Similarly for any iteration vars
14511 in simd with collapse > 1 where the iterator vars must be
14512 lastprivate. And similarly for vars mentioned in allocate clauses. */
14513 if (orig_for_stmt
!= for_stmt
)
14515 else if (!is_gimple_reg (decl
)
14516 || (ort
== ORT_SIMD
14517 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) > 1)
14518 || (allocate_uids
&& allocate_uids
->contains (decl
)))
14520 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
14521 /* Make sure omp_add_variable is not called on it prematurely.
14522 We call it ourselves a few lines later. */
14523 gimplify_omp_ctxp
= NULL
;
14524 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
14525 gimplify_omp_ctxp
= ctx
;
14526 TREE_OPERAND (t
, 0) = var
;
14528 gimplify_seq_add_stmt (&for_body
, gimple_build_assign (decl
, var
));
14530 if (ort
== ORT_SIMD
14531 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
14533 c2
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
14534 OMP_CLAUSE_LINEAR_NO_COPYIN (c2
) = 1;
14535 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2
) = 1;
14536 OMP_CLAUSE_DECL (c2
) = var
;
14537 OMP_CLAUSE_CHAIN (c2
) = OMP_FOR_CLAUSES (for_stmt
);
14538 OMP_FOR_CLAUSES (for_stmt
) = c2
;
14539 omp_add_variable (gimplify_omp_ctxp
, var
,
14540 GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
);
14541 if (c
== NULL_TREE
)
14548 omp_add_variable (gimplify_omp_ctxp
, var
,
14549 GOVD_PRIVATE
| GOVD_SEEN
);
14554 gimplify_omp_ctxp
->in_for_exprs
= true;
14555 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
14557 tree lb
= TREE_OPERAND (t
, 1);
14558 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 1), &for_pre_body
, NULL
,
14559 is_gimple_val
, fb_rvalue
, false);
14560 ret
= MIN (ret
, tret
);
14561 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 2), &for_pre_body
, NULL
,
14562 is_gimple_val
, fb_rvalue
, false);
14565 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
14566 is_gimple_val
, fb_rvalue
, false);
14567 gimplify_omp_ctxp
->in_for_exprs
= false;
14568 ret
= MIN (ret
, tret
);
14569 if (ret
== GS_ERROR
)
14572 /* Handle OMP_FOR_COND. */
14573 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
14574 gcc_assert (COMPARISON_CLASS_P (t
));
14575 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
14577 gimplify_omp_ctxp
->in_for_exprs
= true;
14578 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
14580 tree ub
= TREE_OPERAND (t
, 1);
14581 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 1), &for_pre_body
, NULL
,
14582 is_gimple_val
, fb_rvalue
, false);
14583 ret
= MIN (ret
, tret
);
14584 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 2), &for_pre_body
, NULL
,
14585 is_gimple_val
, fb_rvalue
, false);
14588 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
14589 is_gimple_val
, fb_rvalue
, false);
14590 gimplify_omp_ctxp
->in_for_exprs
= false;
14591 ret
= MIN (ret
, tret
);
14593 /* Handle OMP_FOR_INCR. */
14594 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
14595 switch (TREE_CODE (t
))
14597 case PREINCREMENT_EXPR
:
14598 case POSTINCREMENT_EXPR
:
14600 tree decl
= TREE_OPERAND (t
, 0);
14601 /* c_omp_for_incr_canonicalize_ptr() should have been
14602 called to massage things appropriately. */
14603 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
14605 if (orig_for_stmt
!= for_stmt
)
14607 t
= build_int_cst (TREE_TYPE (decl
), 1);
14609 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
14610 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
14611 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
14612 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
14616 case PREDECREMENT_EXPR
:
14617 case POSTDECREMENT_EXPR
:
14618 /* c_omp_for_incr_canonicalize_ptr() should have been
14619 called to massage things appropriately. */
14620 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
14621 if (orig_for_stmt
!= for_stmt
)
14623 t
= build_int_cst (TREE_TYPE (decl
), -1);
14625 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
14626 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
14627 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
14628 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
14632 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
14633 TREE_OPERAND (t
, 0) = var
;
14635 t
= TREE_OPERAND (t
, 1);
14636 switch (TREE_CODE (t
))
14639 if (TREE_OPERAND (t
, 1) == decl
)
14641 TREE_OPERAND (t
, 1) = TREE_OPERAND (t
, 0);
14642 TREE_OPERAND (t
, 0) = var
;
14648 case POINTER_PLUS_EXPR
:
14649 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
14650 TREE_OPERAND (t
, 0) = var
;
14653 gcc_unreachable ();
14656 gimplify_omp_ctxp
->in_for_exprs
= true;
14657 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
14658 is_gimple_val
, fb_rvalue
, false);
14659 ret
= MIN (ret
, tret
);
14662 tree step
= TREE_OPERAND (t
, 1);
14663 tree stept
= TREE_TYPE (decl
);
14664 if (POINTER_TYPE_P (stept
))
14666 step
= fold_convert (stept
, step
);
14667 if (TREE_CODE (t
) == MINUS_EXPR
)
14668 step
= fold_build1 (NEGATE_EXPR
, stept
, step
);
14669 OMP_CLAUSE_LINEAR_STEP (c
) = step
;
14670 if (step
!= TREE_OPERAND (t
, 1))
14672 tret
= gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
),
14673 &for_pre_body
, NULL
,
14674 is_gimple_val
, fb_rvalue
, false);
14675 ret
= MIN (ret
, tret
);
14678 gimplify_omp_ctxp
->in_for_exprs
= false;
14682 gcc_unreachable ();
14688 OMP_CLAUSE_LINEAR_STEP (c2
) = OMP_CLAUSE_LINEAR_STEP (c
);
14691 if ((var
!= decl
|| collapse
> 1 || tile
) && orig_for_stmt
== for_stmt
)
14693 for (c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
14694 if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
14695 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) == NULL
)
14696 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
14697 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)
14698 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) == NULL
))
14699 && OMP_CLAUSE_DECL (c
) == decl
)
14701 if (is_doacross
&& (collapse
== 1 || i
>= collapse
))
14705 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
14706 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
14707 gcc_assert (TREE_OPERAND (t
, 0) == var
);
14708 t
= TREE_OPERAND (t
, 1);
14709 gcc_assert (TREE_CODE (t
) == PLUS_EXPR
14710 || TREE_CODE (t
) == MINUS_EXPR
14711 || TREE_CODE (t
) == POINTER_PLUS_EXPR
);
14712 gcc_assert (TREE_OPERAND (t
, 0) == var
);
14713 t
= build2 (TREE_CODE (t
), TREE_TYPE (decl
),
14714 is_doacross
? var
: decl
,
14715 TREE_OPERAND (t
, 1));
14718 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
14719 seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
);
14721 seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
);
14722 push_gimplify_context ();
14723 gimplify_assign (decl
, t
, seq
);
14724 gimple
*bind
= NULL
;
14725 if (gimplify_ctxp
->temps
)
14727 bind
= gimple_build_bind (NULL_TREE
, *seq
, NULL_TREE
);
14729 gimplify_seq_add_stmt (seq
, bind
);
14731 pop_gimplify_context (bind
);
14734 if (OMP_FOR_NON_RECTANGULAR (for_stmt
) && var
!= decl
)
14735 for (int j
= i
+ 1; j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
14737 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
14738 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
14739 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
14740 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
14741 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
14742 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
14743 gcc_assert (COMPARISON_CLASS_P (t
));
14744 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
14745 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
14746 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
14750 BITMAP_FREE (has_decl_expr
);
14751 delete allocate_uids
;
14753 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
14754 || (loop_p
&& orig_for_stmt
== for_stmt
))
14756 push_gimplify_context ();
14757 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt
)) != BIND_EXPR
)
14759 OMP_FOR_BODY (orig_for_stmt
)
14760 = build3 (BIND_EXPR
, void_type_node
, NULL
,
14761 OMP_FOR_BODY (orig_for_stmt
), NULL
);
14762 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt
)) = 1;
14766 gimple
*g
= gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt
),
14769 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
14770 || (loop_p
&& orig_for_stmt
== for_stmt
))
14772 if (gimple_code (g
) == GIMPLE_BIND
)
14773 pop_gimplify_context (g
);
14775 pop_gimplify_context (NULL
);
14778 if (orig_for_stmt
!= for_stmt
)
14779 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
14781 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
14782 decl
= TREE_OPERAND (t
, 0);
14783 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
14784 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
14785 gimplify_omp_ctxp
= ctx
->outer_context
;
14786 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
14787 gimplify_omp_ctxp
= ctx
;
14788 omp_add_variable (gimplify_omp_ctxp
, var
, GOVD_PRIVATE
| GOVD_SEEN
);
14789 TREE_OPERAND (t
, 0) = var
;
14790 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
14791 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
14792 TREE_OPERAND (TREE_OPERAND (t
, 1), 0) = var
;
14793 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
14794 for (int j
= i
+ 1;
14795 j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
14797 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
14798 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
14799 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
14800 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
14802 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
14803 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
14805 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
14806 gcc_assert (COMPARISON_CLASS_P (t
));
14807 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
14808 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
14810 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
14811 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
14816 gimplify_adjust_omp_clauses (pre_p
, for_body
,
14817 &OMP_FOR_CLAUSES (orig_for_stmt
),
14818 TREE_CODE (orig_for_stmt
));
14821 switch (TREE_CODE (orig_for_stmt
))
14823 case OMP_FOR
: kind
= GF_OMP_FOR_KIND_FOR
; break;
14824 case OMP_SIMD
: kind
= GF_OMP_FOR_KIND_SIMD
; break;
14825 case OMP_DISTRIBUTE
: kind
= GF_OMP_FOR_KIND_DISTRIBUTE
; break;
14826 case OMP_TASKLOOP
: kind
= GF_OMP_FOR_KIND_TASKLOOP
; break;
14827 case OACC_LOOP
: kind
= GF_OMP_FOR_KIND_OACC_LOOP
; break;
14829 gcc_unreachable ();
14831 if (loop_p
&& kind
== GF_OMP_FOR_KIND_SIMD
)
14833 gimplify_seq_add_seq (pre_p
, for_pre_body
);
14834 for_pre_body
= NULL
;
14836 gfor
= gimple_build_omp_for (for_body
, kind
, OMP_FOR_CLAUSES (orig_for_stmt
),
14837 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)),
14839 if (orig_for_stmt
!= for_stmt
)
14840 gimple_omp_for_set_combined_p (gfor
, true);
14841 if (gimplify_omp_ctxp
14842 && (gimplify_omp_ctxp
->combined_loop
14843 || (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
14844 && gimplify_omp_ctxp
->outer_context
14845 && gimplify_omp_ctxp
->outer_context
->combined_loop
)))
14847 gimple_omp_for_set_combined_into_p (gfor
, true);
14848 if (gimplify_omp_ctxp
->combined_loop
)
14849 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_SIMD
);
14851 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_FOR
);
14854 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
14856 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
14857 gimple_omp_for_set_index (gfor
, i
, TREE_OPERAND (t
, 0));
14858 gimple_omp_for_set_initial (gfor
, i
, TREE_OPERAND (t
, 1));
14859 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
14860 gimple_omp_for_set_cond (gfor
, i
, TREE_CODE (t
));
14861 gimple_omp_for_set_final (gfor
, i
, TREE_OPERAND (t
, 1));
14862 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
14863 gimple_omp_for_set_incr (gfor
, i
, TREE_OPERAND (t
, 1));
14866 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
14867 constructs with GIMPLE_OMP_TASK sandwiched in between them.
14868 The outer taskloop stands for computing the number of iterations,
14869 counts for collapsed loops and holding taskloop specific clauses.
14870 The task construct stands for the effect of data sharing on the
14871 explicit task it creates and the inner taskloop stands for expansion
14872 of the static loop inside of the explicit task construct. */
14873 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
14875 tree
*gfor_clauses_ptr
= gimple_omp_for_clauses_ptr (gfor
);
14876 tree task_clauses
= NULL_TREE
;
14877 tree c
= *gfor_clauses_ptr
;
14878 tree
*gtask_clauses_ptr
= &task_clauses
;
14879 tree outer_for_clauses
= NULL_TREE
;
14880 tree
*gforo_clauses_ptr
= &outer_for_clauses
;
14881 bitmap lastprivate_uids
= NULL
;
14882 if (omp_find_clause (c
, OMP_CLAUSE_ALLOCATE
))
14884 c
= omp_find_clause (c
, OMP_CLAUSE_LASTPRIVATE
);
14887 lastprivate_uids
= BITMAP_ALLOC (NULL
);
14888 for (; c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
14889 OMP_CLAUSE_LASTPRIVATE
))
14890 bitmap_set_bit (lastprivate_uids
,
14891 DECL_UID (OMP_CLAUSE_DECL (c
)));
14893 c
= *gfor_clauses_ptr
;
14895 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
14896 switch (OMP_CLAUSE_CODE (c
))
14898 /* These clauses are allowed on task, move them there. */
14899 case OMP_CLAUSE_SHARED
:
14900 case OMP_CLAUSE_FIRSTPRIVATE
:
14901 case OMP_CLAUSE_DEFAULT
:
14902 case OMP_CLAUSE_IF
:
14903 case OMP_CLAUSE_UNTIED
:
14904 case OMP_CLAUSE_FINAL
:
14905 case OMP_CLAUSE_MERGEABLE
:
14906 case OMP_CLAUSE_PRIORITY
:
14907 case OMP_CLAUSE_REDUCTION
:
14908 case OMP_CLAUSE_IN_REDUCTION
:
14909 *gtask_clauses_ptr
= c
;
14910 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14912 case OMP_CLAUSE_PRIVATE
:
14913 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c
))
14915 /* We want private on outer for and firstprivate
14918 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
14919 OMP_CLAUSE_FIRSTPRIVATE
);
14920 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
14921 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
14923 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
14924 *gforo_clauses_ptr
= c
;
14925 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14929 *gtask_clauses_ptr
= c
;
14930 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14933 /* These clauses go into outer taskloop clauses. */
14934 case OMP_CLAUSE_GRAINSIZE
:
14935 case OMP_CLAUSE_NUM_TASKS
:
14936 case OMP_CLAUSE_NOGROUP
:
14937 *gforo_clauses_ptr
= c
;
14938 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14940 /* Collapse clause we duplicate on both taskloops. */
14941 case OMP_CLAUSE_COLLAPSE
:
14942 *gfor_clauses_ptr
= c
;
14943 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14944 *gforo_clauses_ptr
= copy_node (c
);
14945 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
14947 /* For lastprivate, keep the clause on inner taskloop, and add
14948 a shared clause on task. If the same decl is also firstprivate,
14949 add also firstprivate clause on the inner taskloop. */
14950 case OMP_CLAUSE_LASTPRIVATE
:
14951 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
14953 /* For taskloop C++ lastprivate IVs, we want:
14954 1) private on outer taskloop
14955 2) firstprivate and shared on task
14956 3) lastprivate on inner taskloop */
14958 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
14959 OMP_CLAUSE_FIRSTPRIVATE
);
14960 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
14961 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
14963 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
14964 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
) = 1;
14965 *gforo_clauses_ptr
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
14966 OMP_CLAUSE_PRIVATE
);
14967 OMP_CLAUSE_DECL (*gforo_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
14968 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr
) = 1;
14969 TREE_TYPE (*gforo_clauses_ptr
) = TREE_TYPE (c
);
14970 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
14972 *gfor_clauses_ptr
= c
;
14973 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14975 = build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_SHARED
);
14976 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
14977 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
14978 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr
) = 1;
14980 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
14982 /* Allocate clause we duplicate on task and inner taskloop
14983 if the decl is lastprivate, otherwise just put on task. */
14984 case OMP_CLAUSE_ALLOCATE
:
14985 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
14986 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
14988 /* Additionally, put firstprivate clause on task
14989 for the allocator if it is not constant. */
14991 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
14992 OMP_CLAUSE_FIRSTPRIVATE
);
14993 OMP_CLAUSE_DECL (*gtask_clauses_ptr
)
14994 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
14995 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
14997 if (lastprivate_uids
14998 && bitmap_bit_p (lastprivate_uids
,
14999 DECL_UID (OMP_CLAUSE_DECL (c
))))
15001 *gfor_clauses_ptr
= c
;
15002 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
15003 *gtask_clauses_ptr
= copy_node (c
);
15004 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
15008 *gtask_clauses_ptr
= c
;
15009 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
15013 gcc_unreachable ();
15015 *gfor_clauses_ptr
= NULL_TREE
;
15016 *gtask_clauses_ptr
= NULL_TREE
;
15017 *gforo_clauses_ptr
= NULL_TREE
;
15018 BITMAP_FREE (lastprivate_uids
);
15019 gimple_set_location (gfor
, input_location
);
15020 g
= gimple_build_bind (NULL_TREE
, gfor
, NULL_TREE
);
15021 g
= gimple_build_omp_task (g
, task_clauses
, NULL_TREE
, NULL_TREE
,
15022 NULL_TREE
, NULL_TREE
, NULL_TREE
);
15023 gimple_set_location (g
, input_location
);
15024 gimple_omp_task_set_taskloop_p (g
, true);
15025 g
= gimple_build_bind (NULL_TREE
, g
, NULL_TREE
);
15027 = gimple_build_omp_for (g
, GF_OMP_FOR_KIND_TASKLOOP
, outer_for_clauses
,
15028 gimple_omp_for_collapse (gfor
),
15029 gimple_omp_for_pre_body (gfor
));
15030 gimple_omp_for_set_pre_body (gfor
, NULL
);
15031 gimple_omp_for_set_combined_p (gforo
, true);
15032 gimple_omp_for_set_combined_into_p (gfor
, true);
15033 for (i
= 0; i
< (int) gimple_omp_for_collapse (gfor
); i
++)
15035 tree type
= TREE_TYPE (gimple_omp_for_index (gfor
, i
));
15036 tree v
= create_tmp_var (type
);
15037 gimple_omp_for_set_index (gforo
, i
, v
);
15038 t
= unshare_expr (gimple_omp_for_initial (gfor
, i
));
15039 gimple_omp_for_set_initial (gforo
, i
, t
);
15040 gimple_omp_for_set_cond (gforo
, i
,
15041 gimple_omp_for_cond (gfor
, i
));
15042 t
= unshare_expr (gimple_omp_for_final (gfor
, i
));
15043 gimple_omp_for_set_final (gforo
, i
, t
);
15044 t
= unshare_expr (gimple_omp_for_incr (gfor
, i
));
15045 gcc_assert (TREE_OPERAND (t
, 0) == gimple_omp_for_index (gfor
, i
));
15046 TREE_OPERAND (t
, 0) = v
;
15047 gimple_omp_for_set_incr (gforo
, i
, t
);
15048 t
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
15049 OMP_CLAUSE_DECL (t
) = v
;
15050 OMP_CLAUSE_CHAIN (t
) = gimple_omp_for_clauses (gforo
);
15051 gimple_omp_for_set_clauses (gforo
, t
);
15052 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
15054 tree
*p1
= NULL
, *p2
= NULL
;
15055 t
= gimple_omp_for_initial (gforo
, i
);
15056 if (TREE_CODE (t
) == TREE_VEC
)
15057 p1
= &TREE_VEC_ELT (t
, 0);
15058 t
= gimple_omp_for_final (gforo
, i
);
15059 if (TREE_CODE (t
) == TREE_VEC
)
15062 p2
= &TREE_VEC_ELT (t
, 0);
15064 p1
= &TREE_VEC_ELT (t
, 0);
15069 for (j
= 0; j
< i
; j
++)
15070 if (*p1
== gimple_omp_for_index (gfor
, j
))
15072 *p1
= gimple_omp_for_index (gforo
, j
);
15077 gcc_assert (j
< i
);
15081 gimplify_seq_add_stmt (pre_p
, gforo
);
15084 gimplify_seq_add_stmt (pre_p
, gfor
);
15086 if (TREE_CODE (orig_for_stmt
) == OMP_FOR
)
15088 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
15089 unsigned lastprivate_conditional
= 0;
15091 && (ctx
->region_type
== ORT_TARGET_DATA
15092 || ctx
->region_type
== ORT_TASKGROUP
))
15093 ctx
= ctx
->outer_context
;
15094 if (ctx
&& (ctx
->region_type
& ORT_PARALLEL
) != 0)
15095 for (tree c
= gimple_omp_for_clauses (gfor
);
15096 c
; c
= OMP_CLAUSE_CHAIN (c
))
15097 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
15098 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
15099 ++lastprivate_conditional
;
15100 if (lastprivate_conditional
)
15102 struct omp_for_data fd
;
15103 omp_extract_for_data (gfor
, &fd
, NULL
);
15104 tree type
= build_array_type_nelts (unsigned_type_for (fd
.iter_type
),
15105 lastprivate_conditional
);
15106 tree var
= create_tmp_var_raw (type
);
15107 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
15108 OMP_CLAUSE_DECL (c
) = var
;
15109 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
15110 gimple_omp_for_set_clauses (gfor
, c
);
15111 omp_add_variable (ctx
, var
, GOVD_CONDTEMP
| GOVD_SEEN
);
15114 else if (TREE_CODE (orig_for_stmt
) == OMP_SIMD
)
15116 unsigned lastprivate_conditional
= 0;
15117 for (tree c
= gimple_omp_for_clauses (gfor
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15118 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
15119 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
15120 ++lastprivate_conditional
;
15121 if (lastprivate_conditional
)
15123 struct omp_for_data fd
;
15124 omp_extract_for_data (gfor
, &fd
, NULL
);
15125 tree type
= unsigned_type_for (fd
.iter_type
);
15126 while (lastprivate_conditional
--)
15128 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
15129 OMP_CLAUSE__CONDTEMP_
);
15130 OMP_CLAUSE_DECL (c
) = create_tmp_var (type
);
15131 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
15132 gimple_omp_for_set_clauses (gfor
, c
);
15137 if (ret
!= GS_ALL_DONE
)
15139 *expr_p
= NULL_TREE
;
15140 return GS_ALL_DONE
;
15143 /* Helper for gimplify_omp_loop, called through walk_tree. */
15146 note_no_context_vars (tree
*tp
, int *, void *data
)
15149 && DECL_CONTEXT (*tp
) == NULL_TREE
15150 && !is_global_var (*tp
))
15152 vec
<tree
> *d
= (vec
<tree
> *) data
;
15153 d
->safe_push (*tp
);
15154 DECL_CONTEXT (*tp
) = current_function_decl
;
15159 /* Gimplify the gross structure of an OMP_LOOP statement. */
15161 static enum gimplify_status
15162 gimplify_omp_loop (tree
*expr_p
, gimple_seq
*pre_p
)
15164 tree for_stmt
= *expr_p
;
15165 tree clauses
= OMP_FOR_CLAUSES (for_stmt
);
15166 struct gimplify_omp_ctx
*octx
= gimplify_omp_ctxp
;
15167 enum omp_clause_bind_kind kind
= OMP_CLAUSE_BIND_THREAD
;
15170 /* If order is not present, the behavior is as if order(concurrent)
15172 tree order
= omp_find_clause (clauses
, OMP_CLAUSE_ORDER
);
15173 if (order
== NULL_TREE
)
15175 order
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_ORDER
);
15176 OMP_CLAUSE_CHAIN (order
) = clauses
;
15177 OMP_FOR_CLAUSES (for_stmt
) = clauses
= order
;
15180 tree bind
= omp_find_clause (clauses
, OMP_CLAUSE_BIND
);
15181 if (bind
== NULL_TREE
)
15183 if (!flag_openmp
) /* flag_openmp_simd */
15185 else if (octx
&& (octx
->region_type
& ORT_TEAMS
) != 0)
15186 kind
= OMP_CLAUSE_BIND_TEAMS
;
15187 else if (octx
&& (octx
->region_type
& ORT_PARALLEL
) != 0)
15188 kind
= OMP_CLAUSE_BIND_PARALLEL
;
15191 for (; octx
; octx
= octx
->outer_context
)
15193 if ((octx
->region_type
& ORT_ACC
) != 0
15194 || octx
->region_type
== ORT_NONE
15195 || octx
->region_type
== ORT_IMPLICIT_TARGET
)
15199 if (octx
== NULL
&& !in_omp_construct
)
15200 error_at (EXPR_LOCATION (for_stmt
),
15201 "%<bind%> clause not specified on a %<loop%> "
15202 "construct not nested inside another OpenMP construct");
15204 bind
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_BIND
);
15205 OMP_CLAUSE_CHAIN (bind
) = clauses
;
15206 OMP_CLAUSE_BIND_KIND (bind
) = kind
;
15207 OMP_FOR_CLAUSES (for_stmt
) = bind
;
15210 switch (OMP_CLAUSE_BIND_KIND (bind
))
15212 case OMP_CLAUSE_BIND_THREAD
:
15214 case OMP_CLAUSE_BIND_PARALLEL
:
15215 if (!flag_openmp
) /* flag_openmp_simd */
15217 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
15220 for (; octx
; octx
= octx
->outer_context
)
15221 if (octx
->region_type
== ORT_SIMD
15222 && omp_find_clause (octx
->clauses
, OMP_CLAUSE_BIND
) == NULL_TREE
)
15224 error_at (EXPR_LOCATION (for_stmt
),
15225 "%<bind(parallel)%> on a %<loop%> construct nested "
15226 "inside %<simd%> construct");
15227 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
15230 kind
= OMP_CLAUSE_BIND_PARALLEL
;
15232 case OMP_CLAUSE_BIND_TEAMS
:
15233 if (!flag_openmp
) /* flag_openmp_simd */
15235 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
15239 && octx
->region_type
!= ORT_IMPLICIT_TARGET
15240 && octx
->region_type
!= ORT_NONE
15241 && (octx
->region_type
& ORT_TEAMS
) == 0)
15242 || in_omp_construct
)
15244 error_at (EXPR_LOCATION (for_stmt
),
15245 "%<bind(teams)%> on a %<loop%> region not strictly "
15246 "nested inside of a %<teams%> region");
15247 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
15250 kind
= OMP_CLAUSE_BIND_TEAMS
;
15253 gcc_unreachable ();
15256 for (tree
*pc
= &OMP_FOR_CLAUSES (for_stmt
); *pc
; )
15257 switch (OMP_CLAUSE_CODE (*pc
))
15259 case OMP_CLAUSE_REDUCTION
:
15260 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc
))
15262 error_at (OMP_CLAUSE_LOCATION (*pc
),
15263 "%<inscan%> %<reduction%> clause on "
15264 "%qs construct", "loop");
15265 OMP_CLAUSE_REDUCTION_INSCAN (*pc
) = 0;
15267 if (OMP_CLAUSE_REDUCTION_TASK (*pc
))
15269 error_at (OMP_CLAUSE_LOCATION (*pc
),
15270 "invalid %<task%> reduction modifier on construct "
15271 "other than %<parallel%>, %qs or %<sections%>",
15272 lang_GNU_Fortran () ? "do" : "for");
15273 OMP_CLAUSE_REDUCTION_TASK (*pc
) = 0;
15275 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15277 case OMP_CLAUSE_LASTPRIVATE
:
15278 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
15280 tree t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
15281 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
15282 if (OMP_CLAUSE_DECL (*pc
) == TREE_OPERAND (t
, 0))
15284 if (OMP_FOR_ORIG_DECLS (for_stmt
)
15285 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
15287 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
15290 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
15291 if (OMP_CLAUSE_DECL (*pc
) == TREE_PURPOSE (orig
))
15295 if (i
== TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)))
15297 error_at (OMP_CLAUSE_LOCATION (*pc
),
15298 "%<lastprivate%> clause on a %<loop%> construct refers "
15299 "to a variable %qD which is not the loop iterator",
15300 OMP_CLAUSE_DECL (*pc
));
15301 *pc
= OMP_CLAUSE_CHAIN (*pc
);
15304 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15307 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15311 TREE_SET_CODE (for_stmt
, OMP_SIMD
);
15316 case OMP_CLAUSE_BIND_THREAD
: last
= 0; break;
15317 case OMP_CLAUSE_BIND_PARALLEL
: last
= 1; break;
15318 case OMP_CLAUSE_BIND_TEAMS
: last
= 2; break;
15320 for (int pass
= 1; pass
<= last
; pass
++)
15324 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
,
15325 make_node (BLOCK
));
15326 append_to_statement_list (*expr_p
, &BIND_EXPR_BODY (bind
));
15327 *expr_p
= make_node (OMP_PARALLEL
);
15328 TREE_TYPE (*expr_p
) = void_type_node
;
15329 OMP_PARALLEL_BODY (*expr_p
) = bind
;
15330 OMP_PARALLEL_COMBINED (*expr_p
) = 1;
15331 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (for_stmt
));
15332 tree
*pc
= &OMP_PARALLEL_CLAUSES (*expr_p
);
15333 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
15334 if (OMP_FOR_ORIG_DECLS (for_stmt
)
15335 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
))
15338 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
15339 if (TREE_PURPOSE (elt
) && TREE_VALUE (elt
))
15341 *pc
= build_omp_clause (UNKNOWN_LOCATION
,
15342 OMP_CLAUSE_FIRSTPRIVATE
);
15343 OMP_CLAUSE_DECL (*pc
) = TREE_VALUE (elt
);
15344 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15348 tree t
= make_node (pass
== 2 ? OMP_DISTRIBUTE
: OMP_FOR
);
15349 tree
*pc
= &OMP_FOR_CLAUSES (t
);
15350 TREE_TYPE (t
) = void_type_node
;
15351 OMP_FOR_BODY (t
) = *expr_p
;
15352 SET_EXPR_LOCATION (t
, EXPR_LOCATION (for_stmt
));
15353 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15354 switch (OMP_CLAUSE_CODE (c
))
15356 case OMP_CLAUSE_BIND
:
15357 case OMP_CLAUSE_ORDER
:
15358 case OMP_CLAUSE_COLLAPSE
:
15359 *pc
= copy_node (c
);
15360 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15362 case OMP_CLAUSE_PRIVATE
:
15363 case OMP_CLAUSE_FIRSTPRIVATE
:
15364 /* Only needed on innermost. */
15366 case OMP_CLAUSE_LASTPRIVATE
:
15367 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
) && pass
!= last
)
15369 *pc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
15370 OMP_CLAUSE_FIRSTPRIVATE
);
15371 OMP_CLAUSE_DECL (*pc
) = OMP_CLAUSE_DECL (c
);
15372 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
15373 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15375 *pc
= copy_node (c
);
15376 OMP_CLAUSE_LASTPRIVATE_STMT (*pc
) = NULL_TREE
;
15377 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
15378 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
15381 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc
) = 1;
15383 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
15384 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc
) = 0;
15386 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15388 case OMP_CLAUSE_REDUCTION
:
15389 *pc
= copy_node (c
);
15390 OMP_CLAUSE_DECL (*pc
) = unshare_expr (OMP_CLAUSE_DECL (c
));
15391 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
15392 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
))
15394 auto_vec
<tree
> no_context_vars
;
15395 int walk_subtrees
= 0;
15396 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
15397 &walk_subtrees
, &no_context_vars
);
15398 if (tree p
= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
))
15399 note_no_context_vars (&p
, &walk_subtrees
, &no_context_vars
);
15400 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c
),
15401 note_no_context_vars
,
15403 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c
),
15404 note_no_context_vars
,
15407 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
)
15408 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
));
15409 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
15410 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
)
15411 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
));
15413 hash_map
<tree
, tree
> decl_map
;
15414 decl_map
.put (OMP_CLAUSE_DECL (c
), OMP_CLAUSE_DECL (c
));
15415 decl_map
.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
15416 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
));
15417 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
15418 decl_map
.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
15419 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
));
15422 memset (&id
, 0, sizeof (id
));
15423 id
.src_fn
= current_function_decl
;
15424 id
.dst_fn
= current_function_decl
;
15425 id
.src_cfun
= cfun
;
15426 id
.decl_map
= &decl_map
;
15427 id
.copy_decl
= copy_decl_no_change
;
15428 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
15429 id
.transform_new_cfg
= true;
15430 id
.transform_return_to_modify
= false;
15432 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc
), copy_tree_body_r
,
15434 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc
), copy_tree_body_r
,
15437 for (tree d
: no_context_vars
)
15439 DECL_CONTEXT (d
) = NULL_TREE
;
15440 DECL_CONTEXT (*decl_map
.get (d
)) = NULL_TREE
;
15445 OMP_CLAUSE_REDUCTION_INIT (*pc
)
15446 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c
));
15447 OMP_CLAUSE_REDUCTION_MERGE (*pc
)
15448 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c
));
15450 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15453 gcc_unreachable ();
15458 return gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_stmt
, fb_none
);
15462 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
15463 of OMP_TARGET's body. */
15466 find_omp_teams (tree
*tp
, int *walk_subtrees
, void *)
15468 *walk_subtrees
= 0;
15469 switch (TREE_CODE (*tp
))
15474 case STATEMENT_LIST
:
15475 *walk_subtrees
= 1;
15483 /* Helper function of optimize_target_teams, determine if the expression
15484 can be computed safely before the target construct on the host. */
15487 computable_teams_clause (tree
*tp
, int *walk_subtrees
, void *)
15493 *walk_subtrees
= 0;
15496 switch (TREE_CODE (*tp
))
15501 *walk_subtrees
= 0;
15502 if (error_operand_p (*tp
)
15503 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
15504 || DECL_HAS_VALUE_EXPR_P (*tp
)
15505 || DECL_THREAD_LOCAL_P (*tp
)
15506 || TREE_SIDE_EFFECTS (*tp
)
15507 || TREE_THIS_VOLATILE (*tp
))
15509 if (is_global_var (*tp
)
15510 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp
))
15511 || lookup_attribute ("omp declare target link",
15512 DECL_ATTRIBUTES (*tp
))))
15515 && !DECL_SEEN_IN_BIND_EXPR_P (*tp
)
15516 && !is_global_var (*tp
)
15517 && decl_function_context (*tp
) == current_function_decl
)
15519 n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
15520 (splay_tree_key
) *tp
);
15523 if (gimplify_omp_ctxp
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
15527 else if (n
->value
& GOVD_LOCAL
)
15529 else if (n
->value
& GOVD_FIRSTPRIVATE
)
15531 else if ((n
->value
& (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
15532 == (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
15536 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
15540 if (TARGET_EXPR_INITIAL (*tp
)
15541 || TREE_CODE (TARGET_EXPR_SLOT (*tp
)) != VAR_DECL
)
15543 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp
),
15544 walk_subtrees
, NULL
);
15545 /* Allow some reasonable subset of integral arithmetics. */
15549 case TRUNC_DIV_EXPR
:
15550 case CEIL_DIV_EXPR
:
15551 case FLOOR_DIV_EXPR
:
15552 case ROUND_DIV_EXPR
:
15553 case TRUNC_MOD_EXPR
:
15554 case CEIL_MOD_EXPR
:
15555 case FLOOR_MOD_EXPR
:
15556 case ROUND_MOD_EXPR
:
15558 case EXACT_DIV_EXPR
:
15569 case NON_LVALUE_EXPR
:
15571 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
15574 /* And disallow anything else, except for comparisons. */
15576 if (COMPARISON_CLASS_P (*tp
))
15582 /* Try to determine if the num_teams and/or thread_limit expressions
15583 can have their values determined already before entering the
15585 INTEGER_CSTs trivially are,
15586 integral decls that are firstprivate (explicitly or implicitly)
15587 or explicitly map(always, to:) or map(always, tofrom:) on the target
15588 region too, and expressions involving simple arithmetics on those
15589 too, function calls are not ok, dereferencing something neither etc.
15590 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
15591 EXPR based on what we find:
15592 0 stands for clause not specified at all, use implementation default
15593 -1 stands for value that can't be determined easily before entering
15594 the target construct.
15595 -2 means that no explicit teams construct was specified
15596 If teams construct is not present at all, use 1 for num_teams
15597 and 0 for thread_limit (only one team is involved, and the thread
15598 limit is implementation defined. */
15601 optimize_target_teams (tree target
, gimple_seq
*pre_p
)
15603 tree body
= OMP_BODY (target
);
15604 tree teams
= walk_tree (&body
, find_omp_teams
, NULL
, NULL
);
15605 tree num_teams_lower
= NULL_TREE
;
15606 tree num_teams_upper
= integer_zero_node
;
15607 tree thread_limit
= integer_zero_node
;
15608 location_t num_teams_loc
= EXPR_LOCATION (target
);
15609 location_t thread_limit_loc
= EXPR_LOCATION (target
);
15611 struct gimplify_omp_ctx
*target_ctx
= gimplify_omp_ctxp
;
15613 if (teams
== NULL_TREE
)
15614 num_teams_upper
= build_int_cst (integer_type_node
, -2);
15616 for (c
= OMP_TEAMS_CLAUSES (teams
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15618 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
)
15620 p
= &num_teams_upper
;
15621 num_teams_loc
= OMP_CLAUSE_LOCATION (c
);
15622 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
))
15624 expr
= OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
);
15625 if (TREE_CODE (expr
) == INTEGER_CST
)
15626 num_teams_lower
= expr
;
15627 else if (walk_tree (&expr
, computable_teams_clause
,
15629 num_teams_lower
= integer_minus_one_node
;
15632 num_teams_lower
= expr
;
15633 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
15634 if (gimplify_expr (&num_teams_lower
, pre_p
, NULL
,
15635 is_gimple_val
, fb_rvalue
, false)
15638 gimplify_omp_ctxp
= target_ctx
;
15639 num_teams_lower
= integer_minus_one_node
;
15643 gimplify_omp_ctxp
= target_ctx
;
15644 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
15645 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
15651 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREAD_LIMIT
)
15654 thread_limit_loc
= OMP_CLAUSE_LOCATION (c
);
15658 expr
= OMP_CLAUSE_OPERAND (c
, 0);
15659 if (TREE_CODE (expr
) == INTEGER_CST
)
15664 if (walk_tree (&expr
, computable_teams_clause
, NULL
, NULL
))
15666 *p
= integer_minus_one_node
;
15670 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
15671 if (gimplify_expr (p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
, false)
15674 gimplify_omp_ctxp
= target_ctx
;
15675 *p
= integer_minus_one_node
;
15678 gimplify_omp_ctxp
= target_ctx
;
15679 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
15680 OMP_CLAUSE_OPERAND (c
, 0) = *p
;
15682 if (!omp_find_clause (OMP_TARGET_CLAUSES (target
), OMP_CLAUSE_THREAD_LIMIT
))
15684 c
= build_omp_clause (thread_limit_loc
, OMP_CLAUSE_THREAD_LIMIT
);
15685 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
15686 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
15687 OMP_TARGET_CLAUSES (target
) = c
;
15689 c
= build_omp_clause (num_teams_loc
, OMP_CLAUSE_NUM_TEAMS
);
15690 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c
) = num_teams_upper
;
15691 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
) = num_teams_lower
;
15692 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
15693 OMP_TARGET_CLAUSES (target
) = c
;
15696 /* Gimplify the gross structure of several OMP constructs. */
15699 gimplify_omp_workshare (tree
*expr_p
, gimple_seq
*pre_p
)
15701 tree expr
= *expr_p
;
15703 gimple_seq body
= NULL
;
15704 enum omp_region_type ort
;
15706 switch (TREE_CODE (expr
))
15710 ort
= ORT_WORKSHARE
;
15713 ort
= ORT_TASKGROUP
;
15716 ort
= OMP_TARGET_COMBINED (expr
) ? ORT_COMBINED_TARGET
: ORT_TARGET
;
15719 ort
= ORT_ACC_KERNELS
;
15721 case OACC_PARALLEL
:
15722 ort
= ORT_ACC_PARALLEL
;
15725 ort
= ORT_ACC_SERIAL
;
15728 ort
= ORT_ACC_DATA
;
15730 case OMP_TARGET_DATA
:
15731 ort
= ORT_TARGET_DATA
;
15734 ort
= OMP_TEAMS_COMBINED (expr
) ? ORT_COMBINED_TEAMS
: ORT_TEAMS
;
15735 if (gimplify_omp_ctxp
== NULL
15736 || gimplify_omp_ctxp
->region_type
== ORT_IMPLICIT_TARGET
)
15737 ort
= (enum omp_region_type
) (ort
| ORT_HOST_TEAMS
);
15739 case OACC_HOST_DATA
:
15740 ort
= ORT_ACC_HOST_DATA
;
15743 gcc_unreachable ();
15746 bool save_in_omp_construct
= in_omp_construct
;
15747 if ((ort
& ORT_ACC
) == 0)
15748 in_omp_construct
= false;
15749 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr
), pre_p
, ort
,
15751 if (TREE_CODE (expr
) == OMP_TARGET
)
15752 optimize_target_teams (expr
, pre_p
);
15753 if ((ort
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
15754 || (ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
15756 push_gimplify_context ();
15757 gimple
*g
= gimplify_and_return_first (OMP_BODY (expr
), &body
);
15758 if (gimple_code (g
) == GIMPLE_BIND
)
15759 pop_gimplify_context (g
);
15761 pop_gimplify_context (NULL
);
15762 if ((ort
& ORT_TARGET_DATA
) != 0)
15764 enum built_in_function end_ix
;
15765 switch (TREE_CODE (expr
))
15768 case OACC_HOST_DATA
:
15769 end_ix
= BUILT_IN_GOACC_DATA_END
;
15771 case OMP_TARGET_DATA
:
15772 end_ix
= BUILT_IN_GOMP_TARGET_END_DATA
;
15775 gcc_unreachable ();
15777 tree fn
= builtin_decl_explicit (end_ix
);
15778 g
= gimple_build_call (fn
, 0);
15779 gimple_seq cleanup
= NULL
;
15780 gimple_seq_add_stmt (&cleanup
, g
);
15781 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
15783 gimple_seq_add_stmt (&body
, g
);
15787 gimplify_and_add (OMP_BODY (expr
), &body
);
15788 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_CLAUSES (expr
),
15790 in_omp_construct
= save_in_omp_construct
;
15792 switch (TREE_CODE (expr
))
15795 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_DATA
,
15796 OMP_CLAUSES (expr
));
15798 case OACC_HOST_DATA
:
15799 if (omp_find_clause (OMP_CLAUSES (expr
), OMP_CLAUSE_IF_PRESENT
))
15801 for (tree c
= OMP_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15802 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
15803 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
) = 1;
15806 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_HOST_DATA
,
15807 OMP_CLAUSES (expr
));
15810 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_KERNELS
,
15811 OMP_CLAUSES (expr
));
15813 case OACC_PARALLEL
:
15814 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_PARALLEL
,
15815 OMP_CLAUSES (expr
));
15818 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_SERIAL
,
15819 OMP_CLAUSES (expr
));
15822 stmt
= gimple_build_omp_sections (body
, OMP_CLAUSES (expr
));
15825 stmt
= gimple_build_omp_single (body
, OMP_CLAUSES (expr
));
15828 stmt
= gimple_build_omp_scope (body
, OMP_CLAUSES (expr
));
15831 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_REGION
,
15832 OMP_CLAUSES (expr
));
15834 case OMP_TARGET_DATA
:
15835 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
15836 to be evaluated before the use_device_{ptr,addr} clauses if they
15837 refer to the same variables. */
15839 tree use_device_clauses
;
15840 tree
*pc
, *uc
= &use_device_clauses
;
15841 for (pc
= &OMP_CLAUSES (expr
); *pc
; )
15842 if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_PTR
15843 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_ADDR
)
15846 *pc
= OMP_CLAUSE_CHAIN (*pc
);
15847 uc
= &OMP_CLAUSE_CHAIN (*uc
);
15850 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15852 *pc
= use_device_clauses
;
15853 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_DATA
,
15854 OMP_CLAUSES (expr
));
15858 stmt
= gimple_build_omp_teams (body
, OMP_CLAUSES (expr
));
15859 if ((ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
15860 gimple_omp_teams_set_host (as_a
<gomp_teams
*> (stmt
), true);
15863 gcc_unreachable ();
15866 gimplify_seq_add_stmt (pre_p
, stmt
);
15867 *expr_p
= NULL_TREE
;
15870 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
15871 target update constructs. */
15874 gimplify_omp_target_update (tree
*expr_p
, gimple_seq
*pre_p
)
15876 tree expr
= *expr_p
;
15879 enum omp_region_type ort
= ORT_WORKSHARE
;
15881 switch (TREE_CODE (expr
))
15883 case OACC_ENTER_DATA
:
15884 kind
= GF_OMP_TARGET_KIND_OACC_ENTER_DATA
;
15887 case OACC_EXIT_DATA
:
15888 kind
= GF_OMP_TARGET_KIND_OACC_EXIT_DATA
;
15892 kind
= GF_OMP_TARGET_KIND_OACC_UPDATE
;
15895 case OMP_TARGET_UPDATE
:
15896 kind
= GF_OMP_TARGET_KIND_UPDATE
;
15898 case OMP_TARGET_ENTER_DATA
:
15899 kind
= GF_OMP_TARGET_KIND_ENTER_DATA
;
15901 case OMP_TARGET_EXIT_DATA
:
15902 kind
= GF_OMP_TARGET_KIND_EXIT_DATA
;
15905 gcc_unreachable ();
15907 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr
), pre_p
,
15908 ort
, TREE_CODE (expr
));
15909 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OMP_STANDALONE_CLAUSES (expr
),
15911 if (TREE_CODE (expr
) == OACC_UPDATE
15912 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
15913 OMP_CLAUSE_IF_PRESENT
))
15915 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
15917 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15918 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
15919 switch (OMP_CLAUSE_MAP_KIND (c
))
15921 case GOMP_MAP_FORCE_TO
:
15922 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
15924 case GOMP_MAP_FORCE_FROM
:
15925 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FROM
);
15931 else if (TREE_CODE (expr
) == OACC_EXIT_DATA
15932 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
15933 OMP_CLAUSE_FINALIZE
))
15935 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
15937 bool have_clause
= false;
15938 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15939 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
15940 switch (OMP_CLAUSE_MAP_KIND (c
))
15942 case GOMP_MAP_FROM
:
15943 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_FROM
);
15944 have_clause
= true;
15946 case GOMP_MAP_RELEASE
:
15947 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_DELETE
);
15948 have_clause
= true;
15950 case GOMP_MAP_TO_PSET
:
15951 /* Fortran arrays with descriptors must map that descriptor when
15952 doing standalone "attach" operations (in OpenACC). In that
15953 case GOMP_MAP_TO_PSET appears by itself with no preceding
15954 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
15956 case GOMP_MAP_POINTER
:
15957 /* TODO PR92929: we may see these here, but they'll always follow
15958 one of the clauses above, and will be handled by libgomp as
15959 one group, so no handling required here. */
15960 gcc_assert (have_clause
);
15962 case GOMP_MAP_DETACH
:
15963 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_DETACH
);
15964 have_clause
= false;
15966 case GOMP_MAP_STRUCT
:
15967 have_clause
= false;
15970 gcc_unreachable ();
15973 stmt
= gimple_build_omp_target (NULL
, kind
, OMP_STANDALONE_CLAUSES (expr
));
15975 gimplify_seq_add_stmt (pre_p
, stmt
);
15976 *expr_p
= NULL_TREE
;
15979 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
15980 stabilized the lhs of the atomic operation as *ADDR. Return true if
15981 EXPR is this stabilized form. */
15984 goa_lhs_expr_p (tree expr
, tree addr
)
15986 /* Also include casts to other type variants. The C front end is fond
15987 of adding these for e.g. volatile variables. This is like
15988 STRIP_TYPE_NOPS but includes the main variant lookup. */
15989 STRIP_USELESS_TYPE_CONVERSION (expr
);
15991 if (INDIRECT_REF_P (expr
))
15993 expr
= TREE_OPERAND (expr
, 0);
15994 while (expr
!= addr
15995 && (CONVERT_EXPR_P (expr
)
15996 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
15997 && TREE_CODE (expr
) == TREE_CODE (addr
)
15998 && types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (addr
)))
16000 expr
= TREE_OPERAND (expr
, 0);
16001 addr
= TREE_OPERAND (addr
, 0);
16005 return (TREE_CODE (addr
) == ADDR_EXPR
16006 && TREE_CODE (expr
) == ADDR_EXPR
16007 && TREE_OPERAND (addr
, 0) == TREE_OPERAND (expr
, 0));
16009 if (TREE_CODE (addr
) == ADDR_EXPR
&& expr
== TREE_OPERAND (addr
, 0))
16014 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
16015 expression does not involve the lhs, evaluate it into a temporary.
16016 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
16017 or -1 if an error was encountered. */
16020 goa_stabilize_expr (tree
*expr_p
, gimple_seq
*pre_p
, tree lhs_addr
,
16021 tree lhs_var
, tree
&target_expr
, bool rhs
, int depth
)
16023 tree expr
= *expr_p
;
16026 if (goa_lhs_expr_p (expr
, lhs_addr
))
16032 if (is_gimple_val (expr
))
16035 /* Maximum depth of lhs in expression is for the
16036 __builtin_clear_padding (...), __builtin_clear_padding (...),
16037 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
16041 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
16044 case tcc_comparison
:
16045 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
, lhs_addr
,
16046 lhs_var
, target_expr
, true, depth
);
16049 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
, lhs_addr
,
16050 lhs_var
, target_expr
, true, depth
);
16052 case tcc_expression
:
16053 switch (TREE_CODE (expr
))
16055 case TRUTH_ANDIF_EXPR
:
16056 case TRUTH_ORIF_EXPR
:
16057 case TRUTH_AND_EXPR
:
16058 case TRUTH_OR_EXPR
:
16059 case TRUTH_XOR_EXPR
:
16060 case BIT_INSERT_EXPR
:
16061 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
16062 lhs_addr
, lhs_var
, target_expr
, true,
16065 case TRUTH_NOT_EXPR
:
16066 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
16067 lhs_addr
, lhs_var
, target_expr
, true,
16071 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
, lhs_var
,
16072 target_expr
, true, depth
))
16074 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
16075 lhs_addr
, lhs_var
, target_expr
, true,
16077 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
16078 lhs_addr
, lhs_var
, target_expr
, false,
16083 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
, lhs_var
,
16084 target_expr
, true, depth
))
16086 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
16087 lhs_addr
, lhs_var
, target_expr
, false,
16090 case COMPOUND_EXPR
:
16091 /* Break out any preevaluations from cp_build_modify_expr. */
16092 for (; TREE_CODE (expr
) == COMPOUND_EXPR
;
16093 expr
= TREE_OPERAND (expr
, 1))
16095 /* Special-case __builtin_clear_padding call before
16096 __builtin_memcmp. */
16097 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
)
16099 tree fndecl
= get_callee_fndecl (TREE_OPERAND (expr
, 0));
16101 && fndecl_built_in_p (fndecl
, BUILT_IN_CLEAR_PADDING
)
16102 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
16104 || goa_stabilize_expr (&TREE_OPERAND (expr
, 0), NULL
,
16106 target_expr
, true, depth
)))
16110 saw_lhs
= goa_stabilize_expr (&TREE_OPERAND (expr
, 0),
16111 pre_p
, lhs_addr
, lhs_var
,
16112 target_expr
, true, depth
);
16113 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1),
16114 pre_p
, lhs_addr
, lhs_var
,
16115 target_expr
, rhs
, depth
);
16121 gimplify_stmt (&TREE_OPERAND (expr
, 0), pre_p
);
16124 return goa_stabilize_expr (&expr
, pre_p
, lhs_addr
, lhs_var
,
16125 target_expr
, rhs
, depth
);
16127 return goa_stabilize_expr (expr_p
, pre_p
, lhs_addr
, lhs_var
,
16128 target_expr
, rhs
, depth
);
16130 if (!goa_stabilize_expr (&TREE_OPERAND (expr
, 0), NULL
, lhs_addr
,
16131 lhs_var
, target_expr
, true, depth
))
16133 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
16134 lhs_addr
, lhs_var
, target_expr
, true,
16136 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
16137 lhs_addr
, lhs_var
, target_expr
, true,
16139 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 2), pre_p
,
16140 lhs_addr
, lhs_var
, target_expr
, true,
16144 if (TARGET_EXPR_INITIAL (expr
))
16146 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
,
16147 lhs_var
, target_expr
, true,
16150 if (expr
== target_expr
)
16154 saw_lhs
= goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr
),
16155 pre_p
, lhs_addr
, lhs_var
,
16156 target_expr
, true, depth
);
16157 if (saw_lhs
&& target_expr
== NULL_TREE
&& pre_p
)
16158 target_expr
= expr
;
16166 case tcc_reference
:
16167 if (TREE_CODE (expr
) == BIT_FIELD_REF
16168 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
16169 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
16170 lhs_addr
, lhs_var
, target_expr
, true,
16174 if (TREE_CODE (expr
) == CALL_EXPR
)
16176 if (tree fndecl
= get_callee_fndecl (expr
))
16177 if (fndecl_built_in_p (fndecl
, BUILT_IN_CLEAR_PADDING
,
16180 int nargs
= call_expr_nargs (expr
);
16181 for (int i
= 0; i
< nargs
; i
++)
16182 saw_lhs
|= goa_stabilize_expr (&CALL_EXPR_ARG (expr
, i
),
16183 pre_p
, lhs_addr
, lhs_var
,
16184 target_expr
, true, depth
);
16193 if (saw_lhs
== 0 && pre_p
)
16195 enum gimplify_status gs
;
16196 if (TREE_CODE (expr
) == CALL_EXPR
&& VOID_TYPE_P (TREE_TYPE (expr
)))
16198 gimplify_stmt (&expr
, pre_p
);
16202 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
16204 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_lvalue
, fb_lvalue
);
16205 if (gs
!= GS_ALL_DONE
)
16212 /* Gimplify an OMP_ATOMIC statement. */
16214 static enum gimplify_status
16215 gimplify_omp_atomic (tree
*expr_p
, gimple_seq
*pre_p
)
16217 tree addr
= TREE_OPERAND (*expr_p
, 0);
16218 tree rhs
= TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
16219 ? NULL
: TREE_OPERAND (*expr_p
, 1);
16220 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr
)));
16222 gomp_atomic_load
*loadstmt
;
16223 gomp_atomic_store
*storestmt
;
16224 tree target_expr
= NULL_TREE
;
16226 tmp_load
= create_tmp_reg (type
);
16228 && goa_stabilize_expr (&rhs
, pre_p
, addr
, tmp_load
, target_expr
,
16232 if (gimplify_expr (&addr
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
16236 loadstmt
= gimple_build_omp_atomic_load (tmp_load
, addr
,
16237 OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
16238 gimplify_seq_add_stmt (pre_p
, loadstmt
);
16241 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
16242 representatives. Use BIT_FIELD_REF on the lhs instead. */
16244 if (TREE_CODE (rhs
) == COND_EXPR
)
16245 rhsarg
= TREE_OPERAND (rhs
, 1);
16246 if (TREE_CODE (rhsarg
) == BIT_INSERT_EXPR
16247 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load
)))
16249 tree bitpos
= TREE_OPERAND (rhsarg
, 2);
16250 tree op1
= TREE_OPERAND (rhsarg
, 1);
16252 tree tmp_store
= tmp_load
;
16253 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_CAPTURE_OLD
)
16254 tmp_store
= get_initialized_tmp_var (tmp_load
, pre_p
);
16255 if (INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
16256 bitsize
= bitsize_int (TYPE_PRECISION (TREE_TYPE (op1
)));
16258 bitsize
= TYPE_SIZE (TREE_TYPE (op1
));
16259 gcc_assert (TREE_OPERAND (rhsarg
, 0) == tmp_load
);
16260 tree t
= build2_loc (EXPR_LOCATION (rhsarg
),
16261 MODIFY_EXPR
, void_type_node
,
16262 build3_loc (EXPR_LOCATION (rhsarg
),
16263 BIT_FIELD_REF
, TREE_TYPE (op1
),
16264 tmp_store
, bitsize
, bitpos
), op1
);
16265 if (TREE_CODE (rhs
) == COND_EXPR
)
16266 t
= build3_loc (EXPR_LOCATION (rhs
), COND_EXPR
, void_type_node
,
16267 TREE_OPERAND (rhs
, 0), t
, void_node
);
16268 gimplify_and_add (t
, pre_p
);
16271 bool save_allow_rhs_cond_expr
= gimplify_ctxp
->allow_rhs_cond_expr
;
16272 if (TREE_CODE (rhs
) == COND_EXPR
)
16273 gimplify_ctxp
->allow_rhs_cond_expr
= true;
16274 enum gimplify_status gs
= gimplify_expr (&rhs
, pre_p
, NULL
,
16275 is_gimple_val
, fb_rvalue
);
16276 gimplify_ctxp
->allow_rhs_cond_expr
= save_allow_rhs_cond_expr
;
16277 if (gs
!= GS_ALL_DONE
)
16281 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
)
16284 = gimple_build_omp_atomic_store (rhs
, OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
16285 if (TREE_CODE (*expr_p
) != OMP_ATOMIC_READ
&& OMP_ATOMIC_WEAK (*expr_p
))
16287 gimple_omp_atomic_set_weak (loadstmt
);
16288 gimple_omp_atomic_set_weak (storestmt
);
16290 gimplify_seq_add_stmt (pre_p
, storestmt
);
16291 switch (TREE_CODE (*expr_p
))
16293 case OMP_ATOMIC_READ
:
16294 case OMP_ATOMIC_CAPTURE_OLD
:
16295 *expr_p
= tmp_load
;
16296 gimple_omp_atomic_set_need_value (loadstmt
);
16298 case OMP_ATOMIC_CAPTURE_NEW
:
16300 gimple_omp_atomic_set_need_value (storestmt
);
16307 return GS_ALL_DONE
;
16310 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
16311 body, and adding some EH bits. */
16313 static enum gimplify_status
16314 gimplify_transaction (tree
*expr_p
, gimple_seq
*pre_p
)
16316 tree expr
= *expr_p
, temp
, tbody
= TRANSACTION_EXPR_BODY (expr
);
16318 gtransaction
*trans_stmt
;
16319 gimple_seq body
= NULL
;
16322 /* Wrap the transaction body in a BIND_EXPR so we have a context
16323 where to put decls for OMP. */
16324 if (TREE_CODE (tbody
) != BIND_EXPR
)
16326 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, tbody
, NULL
);
16327 TREE_SIDE_EFFECTS (bind
) = 1;
16328 SET_EXPR_LOCATION (bind
, EXPR_LOCATION (tbody
));
16329 TRANSACTION_EXPR_BODY (expr
) = bind
;
16332 push_gimplify_context ();
16333 temp
= voidify_wrapper_expr (*expr_p
, NULL
);
16335 body_stmt
= gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr
), &body
);
16336 pop_gimplify_context (body_stmt
);
16338 trans_stmt
= gimple_build_transaction (body
);
16339 if (TRANSACTION_EXPR_OUTER (expr
))
16340 subcode
= GTMA_IS_OUTER
;
16341 else if (TRANSACTION_EXPR_RELAXED (expr
))
16342 subcode
= GTMA_IS_RELAXED
;
16343 gimple_transaction_set_subcode (trans_stmt
, subcode
);
16345 gimplify_seq_add_stmt (pre_p
, trans_stmt
);
16353 *expr_p
= NULL_TREE
;
16354 return GS_ALL_DONE
;
16357 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
16358 is the OMP_BODY of the original EXPR (which has already been
16359 gimplified so it's not present in the EXPR).
16361 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
16364 gimplify_omp_ordered (tree expr
, gimple_seq body
)
16369 tree source_c
= NULL_TREE
;
16370 tree sink_c
= NULL_TREE
;
16372 if (gimplify_omp_ctxp
)
16374 for (c
= OMP_ORDERED_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
16375 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
16376 && gimplify_omp_ctxp
->loop_iter_var
.is_empty ())
16378 error_at (OMP_CLAUSE_LOCATION (c
),
16379 "%<ordered%> construct with %qs clause must be "
16380 "closely nested inside a loop with %<ordered%> clause",
16381 OMP_CLAUSE_DOACROSS_DEPEND (c
) ? "depend" : "doacross");
16384 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
16385 && OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SINK
)
16389 if (OMP_CLAUSE_DECL (c
) == NULL_TREE
)
16390 continue; /* omp_cur_iteration - 1 */
16391 for (decls
= OMP_CLAUSE_DECL (c
), i
= 0;
16392 decls
&& TREE_CODE (decls
) == TREE_LIST
;
16393 decls
= TREE_CHAIN (decls
), ++i
)
16394 if (i
>= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
16396 else if (TREE_VALUE (decls
)
16397 != gimplify_omp_ctxp
->loop_iter_var
[2 * i
])
16399 error_at (OMP_CLAUSE_LOCATION (c
),
16400 "variable %qE is not an iteration "
16401 "of outermost loop %d, expected %qE",
16402 TREE_VALUE (decls
), i
+ 1,
16403 gimplify_omp_ctxp
->loop_iter_var
[2 * i
]);
16409 = gimplify_omp_ctxp
->loop_iter_var
[2 * i
+ 1];
16410 if (!fail
&& i
!= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
16412 error_at (OMP_CLAUSE_LOCATION (c
),
16413 "number of variables in %qs clause with "
16414 "%<sink%> modifier does not match number of "
16415 "iteration variables",
16416 OMP_CLAUSE_DOACROSS_DEPEND (c
)
16417 ? "depend" : "doacross");
16421 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
16422 && OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SOURCE
)
16426 error_at (OMP_CLAUSE_LOCATION (c
),
16427 "more than one %qs clause with %<source%> "
16428 "modifier on an %<ordered%> construct",
16429 OMP_CLAUSE_DOACROSS_DEPEND (source_c
)
16430 ? "depend" : "doacross");
16437 if (source_c
&& sink_c
)
16439 error_at (OMP_CLAUSE_LOCATION (source_c
),
16440 "%qs clause with %<source%> modifier specified "
16441 "together with %qs clauses with %<sink%> modifier "
16442 "on the same construct",
16443 OMP_CLAUSE_DOACROSS_DEPEND (source_c
) ? "depend" : "doacross",
16444 OMP_CLAUSE_DOACROSS_DEPEND (sink_c
) ? "depend" : "doacross");
16449 return gimple_build_nop ();
16450 return gimple_build_omp_ordered (body
, OMP_ORDERED_CLAUSES (expr
));
16453 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
16454 expression produces a value to be used as an operand inside a GIMPLE
16455 statement, the value will be stored back in *EXPR_P. This value will
16456 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
16457 an SSA_NAME. The corresponding sequence of GIMPLE statements is
16458 emitted in PRE_P and POST_P.
16460 Additionally, this process may overwrite parts of the input
16461 expression during gimplification. Ideally, it should be
16462 possible to do non-destructive gimplification.
16464 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
16465 the expression needs to evaluate to a value to be used as
16466 an operand in a GIMPLE statement, this value will be stored in
16467 *EXPR_P on exit. This happens when the caller specifies one
16468 of fb_lvalue or fb_rvalue fallback flags.
16470 PRE_P will contain the sequence of GIMPLE statements corresponding
16471 to the evaluation of EXPR and all the side-effects that must
16472 be executed before the main expression. On exit, the last
16473 statement of PRE_P is the core statement being gimplified. For
16474 instance, when gimplifying 'if (++a)' the last statement in
16475 PRE_P will be 'if (t.1)' where t.1 is the result of
16476 pre-incrementing 'a'.
16478 POST_P will contain the sequence of GIMPLE statements corresponding
16479 to the evaluation of all the side-effects that must be executed
16480 after the main expression. If this is NULL, the post
16481 side-effects are stored at the end of PRE_P.
16483 The reason why the output is split in two is to handle post
16484 side-effects explicitly. In some cases, an expression may have
16485 inner and outer post side-effects which need to be emitted in
16486 an order different from the one given by the recursive
16487 traversal. For instance, for the expression (*p--)++ the post
16488 side-effects of '--' must actually occur *after* the post
16489 side-effects of '++'. However, gimplification will first visit
16490 the inner expression, so if a separate POST sequence was not
16491 used, the resulting sequence would be:
16498 However, the post-decrement operation in line #2 must not be
16499 evaluated until after the store to *p at line #4, so the
16500 correct sequence should be:
16507 So, by specifying a separate post queue, it is possible
16508 to emit the post side-effects in the correct order.
16509 If POST_P is NULL, an internal queue will be used. Before
16510 returning to the caller, the sequence POST_P is appended to
16511 the main output sequence PRE_P.
16513 GIMPLE_TEST_F points to a function that takes a tree T and
16514 returns nonzero if T is in the GIMPLE form requested by the
16515 caller. The GIMPLE predicates are in gimple.cc.
16517 FALLBACK tells the function what sort of a temporary we want if
16518 gimplification cannot produce an expression that complies with
16521 fb_none means that no temporary should be generated
16522 fb_rvalue means that an rvalue is OK to generate
16523 fb_lvalue means that an lvalue is OK to generate
16524 fb_either means that either is OK, but an lvalue is preferable.
16525 fb_mayfail means that gimplification may fail (in which case
16526 GS_ERROR will be returned)
16528 The return value is either GS_ERROR or GS_ALL_DONE, since this
16529 function iterates until EXPR is completely gimplified or an error
16532 enum gimplify_status
16533 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
16534 bool (*gimple_test_f
) (tree
), fallback_t fallback
)
16537 gimple_seq internal_pre
= NULL
;
16538 gimple_seq internal_post
= NULL
;
16541 location_t saved_location
;
16542 enum gimplify_status ret
;
16543 gimple_stmt_iterator pre_last_gsi
, post_last_gsi
;
16546 save_expr
= *expr_p
;
16547 if (save_expr
== NULL_TREE
)
16548 return GS_ALL_DONE
;
16550 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
16551 is_statement
= gimple_test_f
== is_gimple_stmt
;
16553 gcc_assert (pre_p
);
16555 /* Consistency checks. */
16556 if (gimple_test_f
== is_gimple_reg
)
16557 gcc_assert (fallback
& (fb_rvalue
| fb_lvalue
));
16558 else if (gimple_test_f
== is_gimple_val
16559 || gimple_test_f
== is_gimple_call_addr
16560 || gimple_test_f
== is_gimple_condexpr_for_cond
16561 || gimple_test_f
== is_gimple_mem_rhs
16562 || gimple_test_f
== is_gimple_mem_rhs_or_call
16563 || gimple_test_f
== is_gimple_reg_rhs
16564 || gimple_test_f
== is_gimple_reg_rhs_or_call
16565 || gimple_test_f
== is_gimple_asm_val
16566 || gimple_test_f
== is_gimple_mem_ref_addr
)
16567 gcc_assert (fallback
& fb_rvalue
);
16568 else if (gimple_test_f
== is_gimple_min_lval
16569 || gimple_test_f
== is_gimple_lvalue
)
16570 gcc_assert (fallback
& fb_lvalue
);
16571 else if (gimple_test_f
== is_gimple_addressable
)
16572 gcc_assert (fallback
& fb_either
);
16573 else if (gimple_test_f
== is_gimple_stmt
)
16574 gcc_assert (fallback
== fb_none
);
16577 /* We should have recognized the GIMPLE_TEST_F predicate to
16578 know what kind of fallback to use in case a temporary is
16579 needed to hold the value or address of *EXPR_P. */
16580 gcc_unreachable ();
16583 /* We used to check the predicate here and return immediately if it
16584 succeeds. This is wrong; the design is for gimplification to be
16585 idempotent, and for the predicates to only test for valid forms, not
16586 whether they are fully simplified. */
16588 pre_p
= &internal_pre
;
16590 if (post_p
== NULL
)
16591 post_p
= &internal_post
;
16593 /* Remember the last statements added to PRE_P and POST_P. Every
16594 new statement added by the gimplification helpers needs to be
16595 annotated with location information. To centralize the
16596 responsibility, we remember the last statement that had been
16597 added to both queues before gimplifying *EXPR_P. If
16598 gimplification produces new statements in PRE_P and POST_P, those
16599 statements will be annotated with the same location information
16601 pre_last_gsi
= gsi_last (*pre_p
);
16602 post_last_gsi
= gsi_last (*post_p
);
16604 saved_location
= input_location
;
16605 if (save_expr
!= error_mark_node
16606 && EXPR_HAS_LOCATION (*expr_p
))
16607 input_location
= EXPR_LOCATION (*expr_p
);
16609 /* Loop over the specific gimplifiers until the toplevel node
16610 remains the same. */
16613 /* Strip away as many useless type conversions as possible
16614 at the toplevel. */
16615 STRIP_USELESS_TYPE_CONVERSION (*expr_p
);
16617 /* Remember the expr. */
16618 save_expr
= *expr_p
;
16620 /* Die, die, die, my darling. */
16621 if (error_operand_p (save_expr
))
16627 /* Do any language-specific gimplification. */
16628 ret
= ((enum gimplify_status
)
16629 lang_hooks
.gimplify_expr (expr_p
, pre_p
, post_p
));
16632 if (*expr_p
== NULL_TREE
)
16634 if (*expr_p
!= save_expr
)
16637 else if (ret
!= GS_UNHANDLED
)
16640 /* Make sure that all the cases set 'ret' appropriately. */
16641 ret
= GS_UNHANDLED
;
16642 switch (TREE_CODE (*expr_p
))
16644 /* First deal with the special cases. */
16646 case POSTINCREMENT_EXPR
:
16647 case POSTDECREMENT_EXPR
:
16648 case PREINCREMENT_EXPR
:
16649 case PREDECREMENT_EXPR
:
16650 ret
= gimplify_self_mod_expr (expr_p
, pre_p
, post_p
,
16651 fallback
!= fb_none
,
16652 TREE_TYPE (*expr_p
));
16655 case VIEW_CONVERT_EXPR
:
16656 if ((fallback
& fb_rvalue
)
16657 && is_gimple_reg_type (TREE_TYPE (*expr_p
))
16658 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
16660 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
16661 post_p
, is_gimple_val
, fb_rvalue
);
16662 recalculate_side_effects (*expr_p
);
16668 case ARRAY_RANGE_REF
:
16669 case REALPART_EXPR
:
16670 case IMAGPART_EXPR
:
16671 case COMPONENT_REF
:
16672 ret
= gimplify_compound_lval (expr_p
, pre_p
, post_p
,
16673 fallback
? fallback
: fb_rvalue
);
16677 ret
= gimplify_cond_expr (expr_p
, pre_p
, fallback
);
16679 /* C99 code may assign to an array in a structure value of a
16680 conditional expression, and this has undefined behavior
16681 only on execution, so create a temporary if an lvalue is
16683 if (fallback
== fb_lvalue
)
16685 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
16686 mark_addressable (*expr_p
);
16692 ret
= gimplify_call_expr (expr_p
, pre_p
, fallback
!= fb_none
);
16694 /* C99 code may assign to an array in a structure returned
16695 from a function, and this has undefined behavior only on
16696 execution, so create a temporary if an lvalue is
16698 if (fallback
== fb_lvalue
)
16700 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
16701 mark_addressable (*expr_p
);
16707 gcc_unreachable ();
16709 case COMPOUND_EXPR
:
16710 ret
= gimplify_compound_expr (expr_p
, pre_p
, fallback
!= fb_none
);
16713 case COMPOUND_LITERAL_EXPR
:
16714 ret
= gimplify_compound_literal_expr (expr_p
, pre_p
,
16715 gimple_test_f
, fallback
);
16720 ret
= gimplify_modify_expr (expr_p
, pre_p
, post_p
,
16721 fallback
!= fb_none
);
16724 case TRUTH_ANDIF_EXPR
:
16725 case TRUTH_ORIF_EXPR
:
16727 /* Preserve the original type of the expression and the
16728 source location of the outer expression. */
16729 tree org_type
= TREE_TYPE (*expr_p
);
16730 *expr_p
= gimple_boolify (*expr_p
);
16731 *expr_p
= build3_loc (input_location
, COND_EXPR
,
16735 org_type
, boolean_true_node
),
16738 org_type
, boolean_false_node
));
16743 case TRUTH_NOT_EXPR
:
16745 tree type
= TREE_TYPE (*expr_p
);
16746 /* The parsers are careful to generate TRUTH_NOT_EXPR
16747 only with operands that are always zero or one.
16748 We do not fold here but handle the only interesting case
16749 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
16750 *expr_p
= gimple_boolify (*expr_p
);
16751 if (TYPE_PRECISION (TREE_TYPE (*expr_p
)) == 1)
16752 *expr_p
= build1_loc (input_location
, BIT_NOT_EXPR
,
16753 TREE_TYPE (*expr_p
),
16754 TREE_OPERAND (*expr_p
, 0));
16756 *expr_p
= build2_loc (input_location
, BIT_XOR_EXPR
,
16757 TREE_TYPE (*expr_p
),
16758 TREE_OPERAND (*expr_p
, 0),
16759 build_int_cst (TREE_TYPE (*expr_p
), 1));
16760 if (!useless_type_conversion_p (type
, TREE_TYPE (*expr_p
)))
16761 *expr_p
= fold_convert_loc (input_location
, type
, *expr_p
);
16767 ret
= gimplify_addr_expr (expr_p
, pre_p
, post_p
);
16770 case ANNOTATE_EXPR
:
16772 tree cond
= TREE_OPERAND (*expr_p
, 0);
16773 tree kind
= TREE_OPERAND (*expr_p
, 1);
16774 tree data
= TREE_OPERAND (*expr_p
, 2);
16775 tree type
= TREE_TYPE (cond
);
16776 if (!INTEGRAL_TYPE_P (type
))
16782 tree tmp
= create_tmp_var (type
);
16783 gimplify_arg (&cond
, pre_p
, EXPR_LOCATION (*expr_p
));
16785 = gimple_build_call_internal (IFN_ANNOTATE
, 3, cond
, kind
, data
);
16786 gimple_call_set_lhs (call
, tmp
);
16787 gimplify_seq_add_stmt (pre_p
, call
);
16794 ret
= gimplify_va_arg_expr (expr_p
, pre_p
, post_p
);
16798 if (IS_EMPTY_STMT (*expr_p
))
16804 if (VOID_TYPE_P (TREE_TYPE (*expr_p
))
16805 || fallback
== fb_none
)
16807 /* Just strip a conversion to void (or in void context) and
16809 *expr_p
= TREE_OPERAND (*expr_p
, 0);
16814 ret
= gimplify_conversion (expr_p
);
16815 if (ret
== GS_ERROR
)
16817 if (*expr_p
!= save_expr
)
16821 case FIX_TRUNC_EXPR
:
16822 /* unary_expr: ... | '(' cast ')' val | ... */
16823 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
16824 is_gimple_val
, fb_rvalue
);
16825 recalculate_side_effects (*expr_p
);
16830 bool volatilep
= TREE_THIS_VOLATILE (*expr_p
);
16831 bool notrap
= TREE_THIS_NOTRAP (*expr_p
);
16832 tree saved_ptr_type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 0));
16834 *expr_p
= fold_indirect_ref_loc (input_location
, *expr_p
);
16835 if (*expr_p
!= save_expr
)
16841 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
16842 is_gimple_reg
, fb_rvalue
);
16843 if (ret
== GS_ERROR
)
16846 recalculate_side_effects (*expr_p
);
16847 *expr_p
= fold_build2_loc (input_location
, MEM_REF
,
16848 TREE_TYPE (*expr_p
),
16849 TREE_OPERAND (*expr_p
, 0),
16850 build_int_cst (saved_ptr_type
, 0));
16851 TREE_THIS_VOLATILE (*expr_p
) = volatilep
;
16852 TREE_THIS_NOTRAP (*expr_p
) = notrap
;
16857 /* We arrive here through the various re-gimplifcation paths. */
16859 /* First try re-folding the whole thing. */
16860 tmp
= fold_binary (MEM_REF
, TREE_TYPE (*expr_p
),
16861 TREE_OPERAND (*expr_p
, 0),
16862 TREE_OPERAND (*expr_p
, 1));
16865 REF_REVERSE_STORAGE_ORDER (tmp
)
16866 = REF_REVERSE_STORAGE_ORDER (*expr_p
);
16868 recalculate_side_effects (*expr_p
);
16872 /* Avoid re-gimplifying the address operand if it is already
16873 in suitable form. Re-gimplifying would mark the address
16874 operand addressable. Always gimplify when not in SSA form
16875 as we still may have to gimplify decls with value-exprs. */
16876 if (!gimplify_ctxp
|| !gimple_in_ssa_p (cfun
)
16877 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p
, 0)))
16879 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
16880 is_gimple_mem_ref_addr
, fb_rvalue
);
16881 if (ret
== GS_ERROR
)
16884 recalculate_side_effects (*expr_p
);
16888 /* Constants need not be gimplified. */
16895 /* Drop the overflow flag on constants, we do not want
16896 that in the GIMPLE IL. */
16897 if (TREE_OVERFLOW_P (*expr_p
))
16898 *expr_p
= drop_tree_overflow (*expr_p
);
16903 /* If we require an lvalue, such as for ADDR_EXPR, retain the
16904 CONST_DECL node. Otherwise the decl is replaceable by its
16906 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
16907 if (fallback
& fb_lvalue
)
16911 *expr_p
= DECL_INITIAL (*expr_p
);
16917 ret
= gimplify_decl_expr (expr_p
, pre_p
);
16921 ret
= gimplify_bind_expr (expr_p
, pre_p
);
16925 ret
= gimplify_loop_expr (expr_p
, pre_p
);
16929 ret
= gimplify_switch_expr (expr_p
, pre_p
);
16933 ret
= gimplify_exit_expr (expr_p
);
16937 /* If the target is not LABEL, then it is a computed jump
16938 and the target needs to be gimplified. */
16939 if (TREE_CODE (GOTO_DESTINATION (*expr_p
)) != LABEL_DECL
)
16941 ret
= gimplify_expr (&GOTO_DESTINATION (*expr_p
), pre_p
,
16942 NULL
, is_gimple_val
, fb_rvalue
);
16943 if (ret
== GS_ERROR
)
16946 gimplify_seq_add_stmt (pre_p
,
16947 gimple_build_goto (GOTO_DESTINATION (*expr_p
)));
16952 gimplify_seq_add_stmt (pre_p
,
16953 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p
),
16954 PREDICT_EXPR_OUTCOME (*expr_p
)));
16959 ret
= gimplify_label_expr (expr_p
, pre_p
);
16960 label
= LABEL_EXPR_LABEL (*expr_p
);
16961 gcc_assert (decl_function_context (label
) == current_function_decl
);
16963 /* If the label is used in a goto statement, or address of the label
16964 is taken, we need to unpoison all variables that were seen so far.
16965 Doing so would prevent us from reporting a false positives. */
16966 if (asan_poisoned_variables
16967 && asan_used_labels
!= NULL
16968 && asan_used_labels
->contains (label
)
16969 && !gimplify_omp_ctxp
)
16970 asan_poison_variables (asan_poisoned_variables
, false, pre_p
);
16973 case CASE_LABEL_EXPR
:
16974 ret
= gimplify_case_label_expr (expr_p
, pre_p
);
16976 if (gimplify_ctxp
->live_switch_vars
)
16977 asan_poison_variables (gimplify_ctxp
->live_switch_vars
, false,
16982 ret
= gimplify_return_expr (*expr_p
, pre_p
);
16986 /* Don't reduce this in place; let gimplify_init_constructor work its
16987 magic. Buf if we're just elaborating this for side effects, just
16988 gimplify any element that has side-effects. */
16989 if (fallback
== fb_none
)
16991 unsigned HOST_WIDE_INT ix
;
16993 tree temp
= NULL_TREE
;
16994 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p
), ix
, val
)
16995 if (TREE_SIDE_EFFECTS (val
))
16996 append_to_statement_list (val
, &temp
);
16999 ret
= temp
? GS_OK
: GS_ALL_DONE
;
17001 /* C99 code may assign to an array in a constructed
17002 structure or union, and this has undefined behavior only
17003 on execution, so create a temporary if an lvalue is
17005 else if (fallback
== fb_lvalue
)
17007 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
17008 mark_addressable (*expr_p
);
17015 /* The following are special cases that are not handled by the
17016 original GIMPLE grammar. */
17018 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
17021 ret
= gimplify_save_expr (expr_p
, pre_p
, post_p
);
17024 case BIT_FIELD_REF
:
17025 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
17026 post_p
, is_gimple_lvalue
, fb_either
);
17027 recalculate_side_effects (*expr_p
);
17030 case TARGET_MEM_REF
:
17032 enum gimplify_status r0
= GS_ALL_DONE
, r1
= GS_ALL_DONE
;
17034 if (TMR_BASE (*expr_p
))
17035 r0
= gimplify_expr (&TMR_BASE (*expr_p
), pre_p
,
17036 post_p
, is_gimple_mem_ref_addr
, fb_either
);
17037 if (TMR_INDEX (*expr_p
))
17038 r1
= gimplify_expr (&TMR_INDEX (*expr_p
), pre_p
,
17039 post_p
, is_gimple_val
, fb_rvalue
);
17040 if (TMR_INDEX2 (*expr_p
))
17041 r1
= gimplify_expr (&TMR_INDEX2 (*expr_p
), pre_p
,
17042 post_p
, is_gimple_val
, fb_rvalue
);
17043 /* TMR_STEP and TMR_OFFSET are always integer constants. */
17044 ret
= MIN (r0
, r1
);
17048 case NON_LVALUE_EXPR
:
17049 /* This should have been stripped above. */
17050 gcc_unreachable ();
17053 ret
= gimplify_asm_expr (expr_p
, pre_p
, post_p
);
17056 case TRY_FINALLY_EXPR
:
17057 case TRY_CATCH_EXPR
:
17059 gimple_seq eval
, cleanup
;
17062 /* Calls to destructors are generated automatically in FINALLY/CATCH
17063 block. They should have location as UNKNOWN_LOCATION. However,
17064 gimplify_call_expr will reset these call stmts to input_location
17065 if it finds stmt's location is unknown. To prevent resetting for
17066 destructors, we set the input_location to unknown.
17067 Note that this only affects the destructor calls in FINALLY/CATCH
17068 block, and will automatically reset to its original value by the
17069 end of gimplify_expr. */
17070 input_location
= UNKNOWN_LOCATION
;
17071 eval
= cleanup
= NULL
;
17072 gimplify_and_add (TREE_OPERAND (*expr_p
, 0), &eval
);
17073 if (TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
17074 && TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == EH_ELSE_EXPR
)
17076 gimple_seq n
= NULL
, e
= NULL
;
17077 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
17079 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
17081 if (!gimple_seq_empty_p (n
) && !gimple_seq_empty_p (e
))
17083 geh_else
*stmt
= gimple_build_eh_else (n
, e
);
17084 gimple_seq_add_stmt (&cleanup
, stmt
);
17088 gimplify_and_add (TREE_OPERAND (*expr_p
, 1), &cleanup
);
17089 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
17090 if (gimple_seq_empty_p (cleanup
))
17092 gimple_seq_add_seq (pre_p
, eval
);
17096 try_
= gimple_build_try (eval
, cleanup
,
17097 TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
17098 ? GIMPLE_TRY_FINALLY
17099 : GIMPLE_TRY_CATCH
);
17100 if (EXPR_HAS_LOCATION (save_expr
))
17101 gimple_set_location (try_
, EXPR_LOCATION (save_expr
));
17102 else if (LOCATION_LOCUS (saved_location
) != UNKNOWN_LOCATION
)
17103 gimple_set_location (try_
, saved_location
);
17104 if (TREE_CODE (*expr_p
) == TRY_CATCH_EXPR
)
17105 gimple_try_set_catch_is_cleanup (try_
,
17106 TRY_CATCH_IS_CLEANUP (*expr_p
));
17107 gimplify_seq_add_stmt (pre_p
, try_
);
17112 case CLEANUP_POINT_EXPR
:
17113 ret
= gimplify_cleanup_point_expr (expr_p
, pre_p
);
17117 ret
= gimplify_target_expr (expr_p
, pre_p
, post_p
);
17123 gimple_seq handler
= NULL
;
17124 gimplify_and_add (CATCH_BODY (*expr_p
), &handler
);
17125 c
= gimple_build_catch (CATCH_TYPES (*expr_p
), handler
);
17126 gimplify_seq_add_stmt (pre_p
, c
);
17131 case EH_FILTER_EXPR
:
17134 gimple_seq failure
= NULL
;
17136 gimplify_and_add (EH_FILTER_FAILURE (*expr_p
), &failure
);
17137 ehf
= gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p
), failure
);
17138 copy_warning (ehf
, *expr_p
);
17139 gimplify_seq_add_stmt (pre_p
, ehf
);
17146 enum gimplify_status r0
, r1
;
17147 r0
= gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p
), pre_p
,
17148 post_p
, is_gimple_val
, fb_rvalue
);
17149 r1
= gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p
), pre_p
,
17150 post_p
, is_gimple_val
, fb_rvalue
);
17151 TREE_SIDE_EFFECTS (*expr_p
) = 0;
17152 ret
= MIN (r0
, r1
);
17157 /* We get here when taking the address of a label. We mark
17158 the label as "forced"; meaning it can never be removed and
17159 it is a potential target for any computed goto. */
17160 FORCED_LABEL (*expr_p
) = 1;
17164 case STATEMENT_LIST
:
17165 ret
= gimplify_statement_list (expr_p
, pre_p
);
17168 case WITH_SIZE_EXPR
:
17170 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
17171 post_p
== &internal_post
? NULL
: post_p
,
17172 gimple_test_f
, fallback
);
17173 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
17174 is_gimple_val
, fb_rvalue
);
17181 ret
= gimplify_var_or_parm_decl (expr_p
);
17185 /* When within an OMP context, notice uses of variables. */
17186 if (gimplify_omp_ctxp
)
17187 omp_notice_variable (gimplify_omp_ctxp
, *expr_p
, true);
17191 case DEBUG_EXPR_DECL
:
17192 gcc_unreachable ();
17194 case DEBUG_BEGIN_STMT
:
17195 gimplify_seq_add_stmt (pre_p
,
17196 gimple_build_debug_begin_stmt
17197 (TREE_BLOCK (*expr_p
),
17198 EXPR_LOCATION (*expr_p
)));
17204 /* Allow callbacks into the gimplifier during optimization. */
17209 gimplify_omp_parallel (expr_p
, pre_p
);
17214 gimplify_omp_task (expr_p
, pre_p
);
17220 /* Temporarily disable into_ssa, as scan_omp_simd
17221 which calls copy_gimple_seq_and_replace_locals can't deal
17222 with SSA_NAMEs defined outside of the body properly. */
17223 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
17224 gimplify_ctxp
->into_ssa
= false;
17225 ret
= gimplify_omp_for (expr_p
, pre_p
);
17226 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
17231 case OMP_DISTRIBUTE
:
17234 ret
= gimplify_omp_for (expr_p
, pre_p
);
17238 ret
= gimplify_omp_loop (expr_p
, pre_p
);
17242 gimplify_oacc_cache (expr_p
, pre_p
);
17247 gimplify_oacc_declare (expr_p
, pre_p
);
17251 case OACC_HOST_DATA
:
17254 case OACC_PARALLEL
:
17260 case OMP_TARGET_DATA
:
17262 gimplify_omp_workshare (expr_p
, pre_p
);
17266 case OACC_ENTER_DATA
:
17267 case OACC_EXIT_DATA
:
17269 case OMP_TARGET_UPDATE
:
17270 case OMP_TARGET_ENTER_DATA
:
17271 case OMP_TARGET_EXIT_DATA
:
17272 gimplify_omp_target_update (expr_p
, pre_p
);
17277 case OMP_STRUCTURED_BLOCK
:
17284 gimple_seq body
= NULL
;
17286 bool saved_in_omp_construct
= in_omp_construct
;
17288 in_omp_construct
= true;
17289 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
17290 in_omp_construct
= saved_in_omp_construct
;
17291 switch (TREE_CODE (*expr_p
))
17294 g
= gimple_build_omp_section (body
);
17296 case OMP_STRUCTURED_BLOCK
:
17297 g
= gimple_build_omp_structured_block (body
);
17300 g
= gimple_build_omp_master (body
);
17303 g
= gimplify_omp_ordered (*expr_p
, body
);
17304 if (OMP_BODY (*expr_p
) == NULL_TREE
17305 && gimple_code (g
) == GIMPLE_OMP_ORDERED
)
17306 gimple_omp_ordered_standalone (g
);
17309 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p
),
17310 pre_p
, ORT_WORKSHARE
, OMP_MASKED
);
17311 gimplify_adjust_omp_clauses (pre_p
, body
,
17312 &OMP_MASKED_CLAUSES (*expr_p
),
17314 g
= gimple_build_omp_masked (body
,
17315 OMP_MASKED_CLAUSES (*expr_p
));
17318 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p
),
17319 pre_p
, ORT_WORKSHARE
, OMP_CRITICAL
);
17320 gimplify_adjust_omp_clauses (pre_p
, body
,
17321 &OMP_CRITICAL_CLAUSES (*expr_p
),
17323 g
= gimple_build_omp_critical (body
,
17324 OMP_CRITICAL_NAME (*expr_p
),
17325 OMP_CRITICAL_CLAUSES (*expr_p
));
17328 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p
),
17329 pre_p
, ORT_WORKSHARE
, OMP_SCAN
);
17330 gimplify_adjust_omp_clauses (pre_p
, body
,
17331 &OMP_SCAN_CLAUSES (*expr_p
),
17333 g
= gimple_build_omp_scan (body
, OMP_SCAN_CLAUSES (*expr_p
));
17336 gcc_unreachable ();
17338 gimplify_seq_add_stmt (pre_p
, g
);
17343 case OMP_TASKGROUP
:
17345 gimple_seq body
= NULL
;
17347 tree
*pclauses
= &OMP_TASKGROUP_CLAUSES (*expr_p
);
17348 bool saved_in_omp_construct
= in_omp_construct
;
17349 gimplify_scan_omp_clauses (pclauses
, pre_p
, ORT_TASKGROUP
,
17351 gimplify_adjust_omp_clauses (pre_p
, NULL
, pclauses
, OMP_TASKGROUP
);
17353 in_omp_construct
= true;
17354 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
17355 in_omp_construct
= saved_in_omp_construct
;
17356 gimple_seq cleanup
= NULL
;
17357 tree fn
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END
);
17358 gimple
*g
= gimple_build_call (fn
, 0);
17359 gimple_seq_add_stmt (&cleanup
, g
);
17360 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
17362 gimple_seq_add_stmt (&body
, g
);
17363 g
= gimple_build_omp_taskgroup (body
, *pclauses
);
17364 gimplify_seq_add_stmt (pre_p
, g
);
17370 case OMP_ATOMIC_READ
:
17371 case OMP_ATOMIC_CAPTURE_OLD
:
17372 case OMP_ATOMIC_CAPTURE_NEW
:
17373 ret
= gimplify_omp_atomic (expr_p
, pre_p
);
17376 case TRANSACTION_EXPR
:
17377 ret
= gimplify_transaction (expr_p
, pre_p
);
17380 case TRUTH_AND_EXPR
:
17381 case TRUTH_OR_EXPR
:
17382 case TRUTH_XOR_EXPR
:
17384 tree orig_type
= TREE_TYPE (*expr_p
);
17385 tree new_type
, xop0
, xop1
;
17386 *expr_p
= gimple_boolify (*expr_p
);
17387 new_type
= TREE_TYPE (*expr_p
);
17388 if (!useless_type_conversion_p (orig_type
, new_type
))
17390 *expr_p
= fold_convert_loc (input_location
, orig_type
, *expr_p
);
17395 /* Boolified binary truth expressions are semantically equivalent
17396 to bitwise binary expressions. Canonicalize them to the
17397 bitwise variant. */
17398 switch (TREE_CODE (*expr_p
))
17400 case TRUTH_AND_EXPR
:
17401 TREE_SET_CODE (*expr_p
, BIT_AND_EXPR
);
17403 case TRUTH_OR_EXPR
:
17404 TREE_SET_CODE (*expr_p
, BIT_IOR_EXPR
);
17406 case TRUTH_XOR_EXPR
:
17407 TREE_SET_CODE (*expr_p
, BIT_XOR_EXPR
);
17412 /* Now make sure that operands have compatible type to
17413 expression's new_type. */
17414 xop0
= TREE_OPERAND (*expr_p
, 0);
17415 xop1
= TREE_OPERAND (*expr_p
, 1);
17416 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop0
)))
17417 TREE_OPERAND (*expr_p
, 0) = fold_convert_loc (input_location
,
17420 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop1
)))
17421 TREE_OPERAND (*expr_p
, 1) = fold_convert_loc (input_location
,
17424 /* Continue classified as tcc_binary. */
17428 case VEC_COND_EXPR
:
17431 case VEC_PERM_EXPR
:
17432 /* Classified as tcc_expression. */
17435 case BIT_INSERT_EXPR
:
17436 /* Argument 3 is a constant. */
17439 case POINTER_PLUS_EXPR
:
17441 enum gimplify_status r0
, r1
;
17442 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
17443 post_p
, is_gimple_val
, fb_rvalue
);
17444 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
17445 post_p
, is_gimple_val
, fb_rvalue
);
17446 recalculate_side_effects (*expr_p
);
17447 ret
= MIN (r0
, r1
);
17452 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p
)))
17454 case tcc_comparison
:
17455 /* Handle comparison of objects of non scalar mode aggregates
17456 with a call to memcmp. It would be nice to only have to do
17457 this for variable-sized objects, but then we'd have to allow
17458 the same nest of reference nodes we allow for MODIFY_EXPR and
17459 that's too complex.
17461 Compare scalar mode aggregates as scalar mode values. Using
17462 memcmp for them would be very inefficient at best, and is
17463 plain wrong if bitfields are involved. */
17464 if (error_operand_p (TREE_OPERAND (*expr_p
, 1)))
17468 tree type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 1));
17470 /* Vector comparisons need no boolification. */
17471 if (TREE_CODE (type
) == VECTOR_TYPE
)
17473 else if (!AGGREGATE_TYPE_P (type
))
17475 tree org_type
= TREE_TYPE (*expr_p
);
17476 *expr_p
= gimple_boolify (*expr_p
);
17477 if (!useless_type_conversion_p (org_type
,
17478 TREE_TYPE (*expr_p
)))
17480 *expr_p
= fold_convert_loc (input_location
,
17481 org_type
, *expr_p
);
17487 else if (TYPE_MODE (type
) != BLKmode
)
17488 ret
= gimplify_scalar_mode_aggregate_compare (expr_p
);
17490 ret
= gimplify_variable_sized_compare (expr_p
);
17494 /* If *EXPR_P does not need to be special-cased, handle it
17495 according to its class. */
17497 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
17498 post_p
, is_gimple_val
, fb_rvalue
);
17504 enum gimplify_status r0
, r1
;
17506 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
17507 post_p
, is_gimple_val
, fb_rvalue
);
17508 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
17509 post_p
, is_gimple_val
, fb_rvalue
);
17511 ret
= MIN (r0
, r1
);
17517 enum gimplify_status r0
, r1
, r2
;
17519 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
17520 post_p
, is_gimple_val
, fb_rvalue
);
17521 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
17522 post_p
, is_gimple_val
, fb_rvalue
);
17523 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
17524 post_p
, is_gimple_val
, fb_rvalue
);
17526 ret
= MIN (MIN (r0
, r1
), r2
);
17530 case tcc_declaration
:
17533 goto dont_recalculate
;
17536 gcc_unreachable ();
17539 recalculate_side_effects (*expr_p
);
17545 gcc_assert (*expr_p
|| ret
!= GS_OK
);
17547 while (ret
== GS_OK
);
17549 /* If we encountered an error_mark somewhere nested inside, either
17550 stub out the statement or propagate the error back out. */
17551 if (ret
== GS_ERROR
)
17558 /* This was only valid as a return value from the langhook, which
17559 we handled. Make sure it doesn't escape from any other context. */
17560 gcc_assert (ret
!= GS_UNHANDLED
);
17562 if (fallback
== fb_none
&& *expr_p
&& !is_gimple_stmt (*expr_p
))
17564 /* We aren't looking for a value, and we don't have a valid
17565 statement. If it doesn't have side-effects, throw it away.
17566 We can also get here with code such as "*&&L;", where L is
17567 a LABEL_DECL that is marked as FORCED_LABEL. */
17568 if (TREE_CODE (*expr_p
) == LABEL_DECL
17569 || !TREE_SIDE_EFFECTS (*expr_p
))
17571 else if (!TREE_THIS_VOLATILE (*expr_p
))
17573 /* This is probably a _REF that contains something nested that
17574 has side effects. Recurse through the operands to find it. */
17575 enum tree_code code
= TREE_CODE (*expr_p
);
17579 case COMPONENT_REF
:
17580 case REALPART_EXPR
:
17581 case IMAGPART_EXPR
:
17582 case VIEW_CONVERT_EXPR
:
17583 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
17584 gimple_test_f
, fallback
);
17588 case ARRAY_RANGE_REF
:
17589 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
17590 gimple_test_f
, fallback
);
17591 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
17592 gimple_test_f
, fallback
);
17596 /* Anything else with side-effects must be converted to
17597 a valid statement before we get here. */
17598 gcc_unreachable ();
17603 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p
))
17604 && TYPE_MODE (TREE_TYPE (*expr_p
)) != BLKmode
17605 && !is_empty_type (TREE_TYPE (*expr_p
)))
17607 /* Historically, the compiler has treated a bare reference
17608 to a non-BLKmode volatile lvalue as forcing a load. */
17609 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p
));
17611 /* Normally, we do not want to create a temporary for a
17612 TREE_ADDRESSABLE type because such a type should not be
17613 copied by bitwise-assignment. However, we make an
17614 exception here, as all we are doing here is ensuring that
17615 we read the bytes that make up the type. We use
17616 create_tmp_var_raw because create_tmp_var will abort when
17617 given a TREE_ADDRESSABLE type. */
17618 tree tmp
= create_tmp_var_raw (type
, "vol");
17619 gimple_add_tmp_var (tmp
);
17620 gimplify_assign (tmp
, *expr_p
, pre_p
);
17624 /* We can't do anything useful with a volatile reference to
17625 an incomplete type, so just throw it away. Likewise for
17626 a BLKmode type, since any implicit inner load should
17627 already have been turned into an explicit one by the
17628 gimplification process. */
17632 /* If we are gimplifying at the statement level, we're done. Tack
17633 everything together and return. */
17634 if (fallback
== fb_none
|| is_statement
)
17636 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
17637 it out for GC to reclaim it. */
17638 *expr_p
= NULL_TREE
;
17640 if (!gimple_seq_empty_p (internal_pre
)
17641 || !gimple_seq_empty_p (internal_post
))
17643 gimplify_seq_add_seq (&internal_pre
, internal_post
);
17644 gimplify_seq_add_seq (pre_p
, internal_pre
);
17647 /* The result of gimplifying *EXPR_P is going to be the last few
17648 statements in *PRE_P and *POST_P. Add location information
17649 to all the statements that were added by the gimplification
17651 if (!gimple_seq_empty_p (*pre_p
))
17652 annotate_all_with_location_after (*pre_p
, pre_last_gsi
, input_location
);
17654 if (!gimple_seq_empty_p (*post_p
))
17655 annotate_all_with_location_after (*post_p
, post_last_gsi
,
17661 #ifdef ENABLE_GIMPLE_CHECKING
17664 enum tree_code code
= TREE_CODE (*expr_p
);
17665 /* These expressions should already be in gimple IR form. */
17666 gcc_assert (code
!= MODIFY_EXPR
17667 && code
!= ASM_EXPR
17668 && code
!= BIND_EXPR
17669 && code
!= CATCH_EXPR
17670 && (code
!= COND_EXPR
|| gimplify_ctxp
->allow_rhs_cond_expr
)
17671 && code
!= EH_FILTER_EXPR
17672 && code
!= GOTO_EXPR
17673 && code
!= LABEL_EXPR
17674 && code
!= LOOP_EXPR
17675 && code
!= SWITCH_EXPR
17676 && code
!= TRY_FINALLY_EXPR
17677 && code
!= EH_ELSE_EXPR
17678 && code
!= OACC_PARALLEL
17679 && code
!= OACC_KERNELS
17680 && code
!= OACC_SERIAL
17681 && code
!= OACC_DATA
17682 && code
!= OACC_HOST_DATA
17683 && code
!= OACC_DECLARE
17684 && code
!= OACC_UPDATE
17685 && code
!= OACC_ENTER_DATA
17686 && code
!= OACC_EXIT_DATA
17687 && code
!= OACC_CACHE
17688 && code
!= OMP_CRITICAL
17690 && code
!= OACC_LOOP
17691 && code
!= OMP_MASTER
17692 && code
!= OMP_MASKED
17693 && code
!= OMP_TASKGROUP
17694 && code
!= OMP_ORDERED
17695 && code
!= OMP_PARALLEL
17696 && code
!= OMP_SCAN
17697 && code
!= OMP_SECTIONS
17698 && code
!= OMP_SECTION
17699 && code
!= OMP_STRUCTURED_BLOCK
17700 && code
!= OMP_SINGLE
17701 && code
!= OMP_SCOPE
);
17705 /* Otherwise we're gimplifying a subexpression, so the resulting
17706 value is interesting. If it's a valid operand that matches
17707 GIMPLE_TEST_F, we're done. Unless we are handling some
17708 post-effects internally; if that's the case, we need to copy into
17709 a temporary before adding the post-effects to POST_P. */
17710 if (gimple_seq_empty_p (internal_post
) && (*gimple_test_f
) (*expr_p
))
17713 /* Otherwise, we need to create a new temporary for the gimplified
17716 /* We can't return an lvalue if we have an internal postqueue. The
17717 object the lvalue refers to would (probably) be modified by the
17718 postqueue; we need to copy the value out first, which means an
17720 if ((fallback
& fb_lvalue
)
17721 && gimple_seq_empty_p (internal_post
)
17722 && is_gimple_addressable (*expr_p
))
17724 /* An lvalue will do. Take the address of the expression, store it
17725 in a temporary, and replace the expression with an INDIRECT_REF of
17727 tree ref_alias_type
= reference_alias_ptr_type (*expr_p
);
17728 unsigned int ref_align
= get_object_alignment (*expr_p
);
17729 tree ref_type
= TREE_TYPE (*expr_p
);
17730 tmp
= build_fold_addr_expr_loc (input_location
, *expr_p
);
17731 gimplify_expr (&tmp
, pre_p
, post_p
, is_gimple_reg
, fb_rvalue
);
17732 if (TYPE_ALIGN (ref_type
) != ref_align
)
17733 ref_type
= build_aligned_type (ref_type
, ref_align
);
17734 *expr_p
= build2 (MEM_REF
, ref_type
,
17735 tmp
, build_zero_cst (ref_alias_type
));
17737 else if ((fallback
& fb_rvalue
) && is_gimple_reg_rhs_or_call (*expr_p
))
17739 /* An rvalue will do. Assign the gimplified expression into a
17740 new temporary TMP and replace the original expression with
17741 TMP. First, make sure that the expression has a type so that
17742 it can be assigned into a temporary. */
17743 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p
)));
17744 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
17748 #ifdef ENABLE_GIMPLE_CHECKING
17749 if (!(fallback
& fb_mayfail
))
17751 fprintf (stderr
, "gimplification failed:\n");
17752 print_generic_expr (stderr
, *expr_p
);
17753 debug_tree (*expr_p
);
17754 internal_error ("gimplification failed");
17757 gcc_assert (fallback
& fb_mayfail
);
17759 /* If this is an asm statement, and the user asked for the
17760 impossible, don't die. Fail and let gimplify_asm_expr
17766 /* Make sure the temporary matches our predicate. */
17767 gcc_assert ((*gimple_test_f
) (*expr_p
));
17769 if (!gimple_seq_empty_p (internal_post
))
17771 annotate_all_with_location (internal_post
, input_location
);
17772 gimplify_seq_add_seq (pre_p
, internal_post
);
17776 input_location
= saved_location
;
17780 /* Like gimplify_expr but make sure the gimplified result is not itself
17781 a SSA name (but a decl if it were). Temporaries required by
17782 evaluating *EXPR_P may be still SSA names. */
17784 static enum gimplify_status
17785 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
17786 bool (*gimple_test_f
) (tree
), fallback_t fallback
,
17789 enum gimplify_status ret
= gimplify_expr (expr_p
, pre_p
, post_p
,
17790 gimple_test_f
, fallback
);
17792 && TREE_CODE (*expr_p
) == SSA_NAME
)
17793 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, NULL
, false);
17797 /* Look through TYPE for variable-sized objects and gimplify each such
17798 size that we find. Add to LIST_P any statements generated. */
17801 gimplify_type_sizes (tree type
, gimple_seq
*list_p
)
17803 if (type
== NULL
|| type
== error_mark_node
)
17806 const bool ignored_p
17808 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
17809 && DECL_IGNORED_P (TYPE_NAME (type
));
17812 /* We first do the main variant, then copy into any other variants. */
17813 type
= TYPE_MAIN_VARIANT (type
);
17815 /* Avoid infinite recursion. */
17816 if (TYPE_SIZES_GIMPLIFIED (type
))
17819 TYPE_SIZES_GIMPLIFIED (type
) = 1;
17821 switch (TREE_CODE (type
))
17824 case ENUMERAL_TYPE
:
17827 case FIXED_POINT_TYPE
:
17828 gimplify_one_sizepos (&TYPE_MIN_VALUE (type
), list_p
);
17829 gimplify_one_sizepos (&TYPE_MAX_VALUE (type
), list_p
);
17831 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
17833 TYPE_MIN_VALUE (t
) = TYPE_MIN_VALUE (type
);
17834 TYPE_MAX_VALUE (t
) = TYPE_MAX_VALUE (type
);
17839 /* These types may not have declarations, so handle them here. */
17840 gimplify_type_sizes (TREE_TYPE (type
), list_p
);
17841 gimplify_type_sizes (TYPE_DOMAIN (type
), list_p
);
17842 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
17843 with assigned stack slots, for -O1+ -g they should be tracked
17846 && TYPE_DOMAIN (type
)
17847 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type
)))
17849 t
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
17850 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
17851 DECL_IGNORED_P (t
) = 0;
17852 t
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
17853 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
17854 DECL_IGNORED_P (t
) = 0;
17860 case QUAL_UNION_TYPE
:
17861 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
17862 if (TREE_CODE (field
) == FIELD_DECL
)
17864 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field
), list_p
);
17865 /* Likewise, ensure variable offsets aren't removed. */
17867 && (t
= DECL_FIELD_OFFSET (field
))
17869 && DECL_ARTIFICIAL (t
))
17870 DECL_IGNORED_P (t
) = 0;
17871 gimplify_one_sizepos (&DECL_SIZE (field
), list_p
);
17872 gimplify_one_sizepos (&DECL_SIZE_UNIT (field
), list_p
);
17873 gimplify_type_sizes (TREE_TYPE (field
), list_p
);
17878 case REFERENCE_TYPE
:
17879 /* We used to recurse on the pointed-to type here, which turned out to
17880 be incorrect because its definition might refer to variables not
17881 yet initialized at this point if a forward declaration is involved.
17883 It was actually useful for anonymous pointed-to types to ensure
17884 that the sizes evaluation dominates every possible later use of the
17885 values. Restricting to such types here would be safe since there
17886 is no possible forward declaration around, but would introduce an
17887 undesirable middle-end semantic to anonymity. We then defer to
17888 front-ends the responsibility of ensuring that the sizes are
17889 evaluated both early and late enough, e.g. by attaching artificial
17890 type declarations to the tree. */
17897 gimplify_one_sizepos (&TYPE_SIZE (type
), list_p
);
17898 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type
), list_p
);
17900 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
17902 TYPE_SIZE (t
) = TYPE_SIZE (type
);
17903 TYPE_SIZE_UNIT (t
) = TYPE_SIZE_UNIT (type
);
17904 TYPE_SIZES_GIMPLIFIED (t
) = 1;
17908 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
17909 a size or position, has had all of its SAVE_EXPRs evaluated.
17910 We add any required statements to *STMT_P. */
17913 gimplify_one_sizepos (tree
*expr_p
, gimple_seq
*stmt_p
)
17915 tree expr
= *expr_p
;
17917 /* We don't do anything if the value isn't there, is constant, or contains
17918 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
17919 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
17920 will want to replace it with a new variable, but that will cause problems
17921 if this type is from outside the function. It's OK to have that here. */
17922 if (expr
== NULL_TREE
17923 || is_gimple_constant (expr
)
17925 || CONTAINS_PLACEHOLDER_P (expr
))
17928 *expr_p
= unshare_expr (expr
);
17930 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
17931 if the def vanishes. */
17932 gimplify_expr (expr_p
, stmt_p
, NULL
, is_gimple_val
, fb_rvalue
, false);
17934 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
17935 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
17936 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
17937 if (is_gimple_constant (*expr_p
))
17938 *expr_p
= get_initialized_tmp_var (*expr_p
, stmt_p
, NULL
, false);
17941 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
17942 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
17943 is true, also gimplify the parameters. */
17946 gimplify_body (tree fndecl
, bool do_parms
)
17948 location_t saved_location
= input_location
;
17949 gimple_seq parm_stmts
, parm_cleanup
= NULL
, seq
;
17950 gimple
*outer_stmt
;
17953 timevar_push (TV_TREE_GIMPLIFY
);
17955 init_tree_ssa (cfun
);
17957 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
17959 default_rtl_profile ();
17961 gcc_assert (gimplify_ctxp
== NULL
);
17962 push_gimplify_context (true);
17964 if (flag_openacc
|| flag_openmp
)
17966 gcc_assert (gimplify_omp_ctxp
== NULL
);
17967 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl
)))
17968 gimplify_omp_ctxp
= new_omp_context (ORT_IMPLICIT_TARGET
);
17971 /* Unshare most shared trees in the body and in that of any nested functions.
17972 It would seem we don't have to do this for nested functions because
17973 they are supposed to be output and then the outer function gimplified
17974 first, but the g++ front end doesn't always do it that way. */
17975 unshare_body (fndecl
);
17976 unvisit_body (fndecl
);
17978 /* Make sure input_location isn't set to something weird. */
17979 input_location
= DECL_SOURCE_LOCATION (fndecl
);
17981 /* Resolve callee-copies. This has to be done before processing
17982 the body so that DECL_VALUE_EXPR gets processed correctly. */
17983 parm_stmts
= do_parms
? gimplify_parameters (&parm_cleanup
) : NULL
;
17985 /* Gimplify the function's body. */
17987 gimplify_stmt (&DECL_SAVED_TREE (fndecl
), &seq
);
17988 outer_stmt
= gimple_seq_first_nondebug_stmt (seq
);
17991 outer_stmt
= gimple_build_nop ();
17992 gimplify_seq_add_stmt (&seq
, outer_stmt
);
17995 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
17996 not the case, wrap everything in a GIMPLE_BIND to make it so. */
17997 if (gimple_code (outer_stmt
) == GIMPLE_BIND
17998 && (gimple_seq_first_nondebug_stmt (seq
)
17999 == gimple_seq_last_nondebug_stmt (seq
)))
18001 outer_bind
= as_a
<gbind
*> (outer_stmt
);
18002 if (gimple_seq_first_stmt (seq
) != outer_stmt
18003 || gimple_seq_last_stmt (seq
) != outer_stmt
)
18005 /* If there are debug stmts before or after outer_stmt, move them
18006 inside of outer_bind body. */
18007 gimple_stmt_iterator gsi
= gsi_for_stmt (outer_stmt
, &seq
);
18008 gimple_seq second_seq
= NULL
;
18009 if (gimple_seq_first_stmt (seq
) != outer_stmt
18010 && gimple_seq_last_stmt (seq
) != outer_stmt
)
18012 second_seq
= gsi_split_seq_after (gsi
);
18013 gsi_remove (&gsi
, false);
18015 else if (gimple_seq_first_stmt (seq
) != outer_stmt
)
18016 gsi_remove (&gsi
, false);
18019 gsi_remove (&gsi
, false);
18023 gimple_seq_add_seq_without_update (&seq
,
18024 gimple_bind_body (outer_bind
));
18025 gimple_seq_add_seq_without_update (&seq
, second_seq
);
18026 gimple_bind_set_body (outer_bind
, seq
);
18030 outer_bind
= gimple_build_bind (NULL_TREE
, seq
, NULL
);
18032 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
18034 /* If we had callee-copies statements, insert them at the beginning
18035 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
18036 if (!gimple_seq_empty_p (parm_stmts
))
18040 gimplify_seq_add_seq (&parm_stmts
, gimple_bind_body (outer_bind
));
18043 gtry
*g
= gimple_build_try (parm_stmts
, parm_cleanup
,
18044 GIMPLE_TRY_FINALLY
);
18046 gimple_seq_add_stmt (&parm_stmts
, g
);
18048 gimple_bind_set_body (outer_bind
, parm_stmts
);
18050 for (parm
= DECL_ARGUMENTS (current_function_decl
);
18051 parm
; parm
= DECL_CHAIN (parm
))
18052 if (DECL_HAS_VALUE_EXPR_P (parm
))
18054 DECL_HAS_VALUE_EXPR_P (parm
) = 0;
18055 DECL_IGNORED_P (parm
) = 0;
18059 if ((flag_openacc
|| flag_openmp
|| flag_openmp_simd
)
18060 && gimplify_omp_ctxp
)
18062 delete_omp_context (gimplify_omp_ctxp
);
18063 gimplify_omp_ctxp
= NULL
;
18066 pop_gimplify_context (outer_bind
);
18067 gcc_assert (gimplify_ctxp
== NULL
);
18069 if (flag_checking
&& !seen_error ())
18070 verify_gimple_in_seq (gimple_bind_body (outer_bind
));
18072 timevar_pop (TV_TREE_GIMPLIFY
);
18073 input_location
= saved_location
;
18078 typedef char *char_p
; /* For DEF_VEC_P. */
18080 /* Return whether we should exclude FNDECL from instrumentation. */
18083 flag_instrument_functions_exclude_p (tree fndecl
)
18087 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_functions
;
18088 if (v
&& v
->length () > 0)
18094 name
= lang_hooks
.decl_printable_name (fndecl
, 1);
18095 FOR_EACH_VEC_ELT (*v
, i
, s
)
18096 if (strstr (name
, s
) != NULL
)
18100 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_files
;
18101 if (v
&& v
->length () > 0)
18107 name
= DECL_SOURCE_FILE (fndecl
);
18108 FOR_EACH_VEC_ELT (*v
, i
, s
)
18109 if (strstr (name
, s
) != NULL
)
18116 /* Build a call to the instrumentation function FNCODE and add it to SEQ.
18117 If COND_VAR is not NULL, it is a boolean variable guarding the call to
18118 the instrumentation function. IF STMT is not NULL, it is a statement
18119 to be executed just before the call to the instrumentation function. */
18122 build_instrumentation_call (gimple_seq
*seq
, enum built_in_function fncode
,
18123 tree cond_var
, gimple
*stmt
)
18125 /* The instrumentation hooks aren't going to call the instrumented
18126 function and the address they receive is expected to be matchable
18127 against symbol addresses. Make sure we don't create a trampoline,
18128 in case the current function is nested. */
18129 tree this_fn_addr
= build_fold_addr_expr (current_function_decl
);
18130 TREE_NO_TRAMPOLINE (this_fn_addr
) = 1;
18132 tree label_true
, label_false
;
18135 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
18136 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
18137 gcond
*cond
= gimple_build_cond (EQ_EXPR
, cond_var
, boolean_false_node
,
18138 label_true
, label_false
);
18139 gimplify_seq_add_stmt (seq
, cond
);
18140 gimplify_seq_add_stmt (seq
, gimple_build_label (label_true
));
18141 gimplify_seq_add_stmt (seq
, gimple_build_predict (PRED_COLD_LABEL
,
18146 gimplify_seq_add_stmt (seq
, stmt
);
18148 tree x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
18149 gcall
*call
= gimple_build_call (x
, 1, integer_zero_node
);
18150 tree tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
18151 gimple_call_set_lhs (call
, tmp_var
);
18152 gimplify_seq_add_stmt (seq
, call
);
18153 x
= builtin_decl_implicit (fncode
);
18154 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
18155 gimplify_seq_add_stmt (seq
, call
);
18158 gimplify_seq_add_stmt (seq
, gimple_build_label (label_false
));
18161 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
18162 node for the function we want to gimplify.
18164 Return the sequence of GIMPLE statements corresponding to the body
18168 gimplify_function_tree (tree fndecl
)
18173 gcc_assert (!gimple_body (fndecl
));
18175 if (DECL_STRUCT_FUNCTION (fndecl
))
18176 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
18178 push_struct_function (fndecl
);
18180 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
18182 cfun
->curr_properties
|= PROP_gimple_lva
;
18184 if (asan_sanitize_use_after_scope ())
18185 asan_poisoned_variables
= new hash_set
<tree
> ();
18186 bind
= gimplify_body (fndecl
, true);
18187 if (asan_poisoned_variables
)
18189 delete asan_poisoned_variables
;
18190 asan_poisoned_variables
= NULL
;
18193 /* The tree body of the function is no longer needed, replace it
18194 with the new GIMPLE body. */
18196 gimple_seq_add_stmt (&seq
, bind
);
18197 gimple_set_body (fndecl
, seq
);
18199 /* If we're instrumenting function entry/exit, then prepend the call to
18200 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
18201 catch the exit hook. */
18202 /* ??? Add some way to ignore exceptions for this TFE. */
18203 if (flag_instrument_function_entry_exit
18204 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
)
18205 /* Do not instrument extern inline functions. */
18206 && !(DECL_DECLARED_INLINE_P (fndecl
)
18207 && DECL_EXTERNAL (fndecl
)
18208 && DECL_DISREGARD_INLINE_LIMITS (fndecl
))
18209 && !flag_instrument_functions_exclude_p (fndecl
))
18211 gimple_seq body
= NULL
, cleanup
= NULL
;
18215 /* If -finstrument-functions-once is specified, generate:
18217 static volatile bool C.0 = false;
18224 [call profiling enter function]
18227 without specific protection for data races. */
18228 if (flag_instrument_function_entry_exit
> 1)
18231 = build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
18233 create_tmp_var_name ("C"),
18234 boolean_type_node
);
18235 DECL_ARTIFICIAL (first_var
) = 1;
18236 DECL_IGNORED_P (first_var
) = 1;
18237 TREE_STATIC (first_var
) = 1;
18238 TREE_THIS_VOLATILE (first_var
) = 1;
18239 TREE_USED (first_var
) = 1;
18240 DECL_INITIAL (first_var
) = boolean_false_node
;
18241 varpool_node::add (first_var
);
18243 cond_var
= create_tmp_var (boolean_type_node
, "tmp_called");
18244 assign
= gimple_build_assign (cond_var
, first_var
);
18245 gimplify_seq_add_stmt (&body
, assign
);
18247 assign
= gimple_build_assign (first_var
, boolean_true_node
);
18252 cond_var
= NULL_TREE
;
18256 build_instrumentation_call (&body
, BUILT_IN_PROFILE_FUNC_ENTER
,
18259 /* If -finstrument-functions-once is specified, generate:
18262 [call profiling exit function]
18264 without specific protection for data races. */
18265 build_instrumentation_call (&cleanup
, BUILT_IN_PROFILE_FUNC_EXIT
,
18268 gimple
*tf
= gimple_build_try (seq
, cleanup
, GIMPLE_TRY_FINALLY
);
18269 gimplify_seq_add_stmt (&body
, tf
);
18270 gbind
*new_bind
= gimple_build_bind (NULL
, body
, NULL
);
18272 /* Replace the current function body with the body
18273 wrapped in the try/finally TF. */
18275 gimple_seq_add_stmt (&seq
, new_bind
);
18276 gimple_set_body (fndecl
, seq
);
18280 if (sanitize_flags_p (SANITIZE_THREAD
)
18281 && param_tsan_instrument_func_entry_exit
)
18283 gcall
*call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
18284 gimple
*tf
= gimple_build_try (seq
, call
, GIMPLE_TRY_FINALLY
);
18285 gbind
*new_bind
= gimple_build_bind (NULL
, tf
, NULL
);
18286 /* Replace the current function body with the body
18287 wrapped in the try/finally TF. */
18289 gimple_seq_add_stmt (&seq
, new_bind
);
18290 gimple_set_body (fndecl
, seq
);
18293 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
18294 cfun
->curr_properties
|= PROP_gimple_any
;
18298 dump_function (TDI_gimple
, fndecl
);
18301 /* Return a dummy expression of type TYPE in order to keep going after an
18305 dummy_object (tree type
)
18307 tree t
= build_int_cst (build_pointer_type (type
), 0);
18308 return build2 (MEM_REF
, type
, t
, t
);
18311 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
18312 builtin function, but a very special sort of operator. */
18314 enum gimplify_status
18315 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
,
18316 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
18318 tree promoted_type
, have_va_type
;
18319 tree valist
= TREE_OPERAND (*expr_p
, 0);
18320 tree type
= TREE_TYPE (*expr_p
);
18321 tree t
, tag
, aptag
;
18322 location_t loc
= EXPR_LOCATION (*expr_p
);
18324 /* Verify that valist is of the proper type. */
18325 have_va_type
= TREE_TYPE (valist
);
18326 if (have_va_type
== error_mark_node
)
18328 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
18329 if (have_va_type
== NULL_TREE
18330 && POINTER_TYPE_P (TREE_TYPE (valist
)))
18331 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
18333 = targetm
.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist
)));
18334 gcc_assert (have_va_type
!= NULL_TREE
);
18336 /* Generate a diagnostic for requesting data of a type that cannot
18337 be passed through `...' due to type promotion at the call site. */
18338 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
18341 static bool gave_help
;
18343 /* Use the expansion point to handle cases such as passing bool (defined
18344 in a system header) through `...'. */
18346 = expansion_point_location_if_in_system_header (loc
);
18348 /* Unfortunately, this is merely undefined, rather than a constraint
18349 violation, so we cannot make this an error. If this call is never
18350 executed, the program is still strictly conforming. */
18351 auto_diagnostic_group d
;
18352 warned
= warning_at (xloc
, 0,
18353 "%qT is promoted to %qT when passed through %<...%>",
18354 type
, promoted_type
);
18355 if (!gave_help
&& warned
)
18358 inform (xloc
, "(so you should pass %qT not %qT to %<va_arg%>)",
18359 promoted_type
, type
);
18362 /* We can, however, treat "undefined" any way we please.
18363 Call abort to encourage the user to fix the program. */
18365 inform (xloc
, "if this code is reached, the program will abort");
18366 /* Before the abort, allow the evaluation of the va_list
18367 expression to exit or longjmp. */
18368 gimplify_and_add (valist
, pre_p
);
18369 t
= build_call_expr_loc (loc
,
18370 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
18371 gimplify_and_add (t
, pre_p
);
18373 /* This is dead code, but go ahead and finish so that the
18374 mode of the result comes out right. */
18375 *expr_p
= dummy_object (type
);
18376 return GS_ALL_DONE
;
18379 tag
= build_int_cst (build_pointer_type (type
), 0);
18380 aptag
= build_int_cst (TREE_TYPE (valist
), 0);
18382 *expr_p
= build_call_expr_internal_loc (loc
, IFN_VA_ARG
, type
, 3,
18383 valist
, tag
, aptag
);
18385 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
18386 needs to be expanded. */
18387 cfun
->curr_properties
&= ~PROP_gimple_lva
;
18392 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
18394 DST/SRC are the destination and source respectively. You can pass
18395 ungimplified trees in DST or SRC, in which case they will be
18396 converted to a gimple operand if necessary.
18398 This function returns the newly created GIMPLE_ASSIGN tuple. */
18401 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
18403 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
18404 gimplify_and_add (t
, seq_p
);
18406 return gimple_seq_last_stmt (*seq_p
);
18410 gimplify_hasher::hash (const elt_t
*p
)
18413 return iterative_hash_expr (t
, 0);
18417 gimplify_hasher::equal (const elt_t
*p1
, const elt_t
*p2
)
18421 enum tree_code code
= TREE_CODE (t1
);
18423 if (TREE_CODE (t2
) != code
18424 || TREE_TYPE (t1
) != TREE_TYPE (t2
))
18427 if (!operand_equal_p (t1
, t2
, 0))
18430 /* Only allow them to compare equal if they also hash equal; otherwise
18431 results are nondeterminate, and we fail bootstrap comparison. */
18432 gcc_checking_assert (hash (p1
) == hash (p2
));