1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2022 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
40 #include "fold-const.h"
45 #include "gimple-iterator.h"
46 #include "gimple-fold.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
56 #include "tree-hash-traits.h"
57 #include "omp-general.h"
59 #include "gimple-low.h"
60 #include "gomp-constants.h"
61 #include "splay-tree.h"
62 #include "gimple-walk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
65 #include "stringpool.h"
69 #include "omp-offload.h"
71 #include "tree-nested.h"
73 /* Hash set of poisoned variables in a bind expr. */
74 static hash_set
<tree
> *asan_poisoned_variables
= NULL
;
76 enum gimplify_omp_var_data
79 GOVD_EXPLICIT
= 0x000002,
80 GOVD_SHARED
= 0x000004,
81 GOVD_PRIVATE
= 0x000008,
82 GOVD_FIRSTPRIVATE
= 0x000010,
83 GOVD_LASTPRIVATE
= 0x000020,
84 GOVD_REDUCTION
= 0x000040,
87 GOVD_DEBUG_PRIVATE
= 0x000200,
88 GOVD_PRIVATE_OUTER_REF
= 0x000400,
89 GOVD_LINEAR
= 0x000800,
90 GOVD_ALIGNED
= 0x001000,
92 /* Flag for GOVD_MAP: don't copy back. */
93 GOVD_MAP_TO_ONLY
= 0x002000,
95 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
96 GOVD_LINEAR_LASTPRIVATE_NO_OUTER
= 0x004000,
98 GOVD_MAP_0LEN_ARRAY
= 0x008000,
100 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
101 GOVD_MAP_ALWAYS_TO
= 0x010000,
103 /* Flag for shared vars that are or might be stored to in the region. */
104 GOVD_WRITTEN
= 0x020000,
106 /* Flag for GOVD_MAP, if it is a forced mapping. */
107 GOVD_MAP_FORCE
= 0x040000,
109 /* Flag for GOVD_MAP: must be present already. */
110 GOVD_MAP_FORCE_PRESENT
= 0x080000,
112 /* Flag for GOVD_MAP: only allocate. */
113 GOVD_MAP_ALLOC_ONLY
= 0x100000,
115 /* Flag for GOVD_MAP: only copy back. */
116 GOVD_MAP_FROM_ONLY
= 0x200000,
118 GOVD_NONTEMPORAL
= 0x400000,
120 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
121 GOVD_LASTPRIVATE_CONDITIONAL
= 0x800000,
123 GOVD_CONDTEMP
= 0x1000000,
125 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
126 GOVD_REDUCTION_INSCAN
= 0x2000000,
128 /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for
130 GOVD_MAP_HAS_ATTACHMENTS
= 0x4000000,
132 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
133 GOVD_FIRSTPRIVATE_IMPLICIT
= 0x8000000,
135 GOVD_DATA_SHARE_CLASS
= (GOVD_SHARED
| GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
136 | GOVD_LASTPRIVATE
| GOVD_REDUCTION
| GOVD_LINEAR
143 ORT_WORKSHARE
= 0x00,
144 ORT_TASKGROUP
= 0x01,
148 ORT_COMBINED_PARALLEL
= ORT_PARALLEL
| 1,
151 ORT_UNTIED_TASK
= ORT_TASK
| 1,
152 ORT_TASKLOOP
= ORT_TASK
| 2,
153 ORT_UNTIED_TASKLOOP
= ORT_UNTIED_TASK
| 2,
156 ORT_COMBINED_TEAMS
= ORT_TEAMS
| 1,
157 ORT_HOST_TEAMS
= ORT_TEAMS
| 2,
158 ORT_COMBINED_HOST_TEAMS
= ORT_COMBINED_TEAMS
| 2,
161 ORT_TARGET_DATA
= 0x40,
163 /* Data region with offloading. */
165 ORT_COMBINED_TARGET
= ORT_TARGET
| 1,
166 ORT_IMPLICIT_TARGET
= ORT_TARGET
| 2,
168 /* OpenACC variants. */
169 ORT_ACC
= 0x100, /* A generic OpenACC region. */
170 ORT_ACC_DATA
= ORT_ACC
| ORT_TARGET_DATA
, /* Data construct. */
171 ORT_ACC_PARALLEL
= ORT_ACC
| ORT_TARGET
, /* Parallel construct */
172 ORT_ACC_KERNELS
= ORT_ACC
| ORT_TARGET
| 2, /* Kernels construct. */
173 ORT_ACC_SERIAL
= ORT_ACC
| ORT_TARGET
| 4, /* Serial construct. */
174 ORT_ACC_HOST_DATA
= ORT_ACC
| ORT_TARGET_DATA
| 2, /* Host data. */
176 /* Dummy OpenMP region, used to disable expansion of
177 DECL_VALUE_EXPRs in taskloop pre body. */
181 /* Gimplify hashtable helper. */
183 struct gimplify_hasher
: free_ptr_hash
<elt_t
>
185 static inline hashval_t
hash (const elt_t
*);
186 static inline bool equal (const elt_t
*, const elt_t
*);
191 struct gimplify_ctx
*prev_context
;
193 vec
<gbind
*> bind_expr_stack
;
195 gimple_seq conditional_cleanups
;
199 vec
<tree
> case_labels
;
200 hash_set
<tree
> *live_switch_vars
;
201 /* The formal temporary table. Should this be persistent? */
202 hash_table
<gimplify_hasher
> *temp_htab
;
205 unsigned into_ssa
: 1;
206 unsigned allow_rhs_cond_expr
: 1;
207 unsigned in_cleanup_point_expr
: 1;
208 unsigned keep_stack
: 1;
209 unsigned save_stack
: 1;
210 unsigned in_switch_expr
: 1;
213 enum gimplify_defaultmap_kind
216 GDMK_SCALAR_TARGET
, /* w/ Fortran's target attr, implicit mapping, only. */
222 struct gimplify_omp_ctx
224 struct gimplify_omp_ctx
*outer_context
;
225 splay_tree variables
;
226 hash_set
<tree
> *privatized_types
;
228 /* Iteration variables in an OMP_FOR. */
229 vec
<tree
> loop_iter_var
;
231 enum omp_clause_default_kind default_kind
;
232 enum omp_region_type region_type
;
236 bool target_firstprivatize_array_bases
;
238 bool order_concurrent
;
244 static struct gimplify_ctx
*gimplify_ctxp
;
245 static struct gimplify_omp_ctx
*gimplify_omp_ctxp
;
246 static bool in_omp_construct
;
248 /* Forward declaration. */
249 static enum gimplify_status
gimplify_compound_expr (tree
*, gimple_seq
*, bool);
250 static hash_map
<tree
, tree
> *oacc_declare_returns
;
251 static enum gimplify_status
gimplify_expr (tree
*, gimple_seq
*, gimple_seq
*,
252 bool (*) (tree
), fallback_t
, bool);
253 static void prepare_gimple_addressable (tree
*, gimple_seq
*);
255 /* Shorter alias name for the above function for use in gimplify.cc
259 gimplify_seq_add_stmt (gimple_seq
*seq_p
, gimple
*gs
)
261 gimple_seq_add_stmt_without_update (seq_p
, gs
);
264 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
265 NULL, a new sequence is allocated. This function is
266 similar to gimple_seq_add_seq, but does not scan the operands.
267 During gimplification, we need to manipulate statement sequences
268 before the def/use vectors have been constructed. */
271 gimplify_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
273 gimple_stmt_iterator si
;
278 si
= gsi_last (*dst_p
);
279 gsi_insert_seq_after_without_update (&si
, src
, GSI_NEW_STMT
);
283 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
284 and popping gimplify contexts. */
286 static struct gimplify_ctx
*ctx_pool
= NULL
;
288 /* Return a gimplify context struct from the pool. */
290 static inline struct gimplify_ctx
*
293 struct gimplify_ctx
* c
= ctx_pool
;
296 ctx_pool
= c
->prev_context
;
298 c
= XNEW (struct gimplify_ctx
);
300 memset (c
, '\0', sizeof (*c
));
304 /* Put gimplify context C back into the pool. */
307 ctx_free (struct gimplify_ctx
*c
)
309 c
->prev_context
= ctx_pool
;
313 /* Free allocated ctx stack memory. */
316 free_gimplify_stack (void)
318 struct gimplify_ctx
*c
;
320 while ((c
= ctx_pool
))
322 ctx_pool
= c
->prev_context
;
328 /* Set up a context for the gimplifier. */
331 push_gimplify_context (bool in_ssa
, bool rhs_cond_ok
)
333 struct gimplify_ctx
*c
= ctx_alloc ();
335 c
->prev_context
= gimplify_ctxp
;
337 gimplify_ctxp
->into_ssa
= in_ssa
;
338 gimplify_ctxp
->allow_rhs_cond_expr
= rhs_cond_ok
;
341 /* Tear down a context for the gimplifier. If BODY is non-null, then
342 put the temporaries into the outer BIND_EXPR. Otherwise, put them
345 BODY is not a sequence, but the first tuple in a sequence. */
348 pop_gimplify_context (gimple
*body
)
350 struct gimplify_ctx
*c
= gimplify_ctxp
;
353 && (!c
->bind_expr_stack
.exists ()
354 || c
->bind_expr_stack
.is_empty ()));
355 c
->bind_expr_stack
.release ();
356 gimplify_ctxp
= c
->prev_context
;
359 declare_vars (c
->temps
, body
, false);
361 record_vars (c
->temps
);
368 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
371 gimple_push_bind_expr (gbind
*bind_stmt
)
373 gimplify_ctxp
->bind_expr_stack
.reserve (8);
374 gimplify_ctxp
->bind_expr_stack
.safe_push (bind_stmt
);
377 /* Pop the first element off the stack of bindings. */
380 gimple_pop_bind_expr (void)
382 gimplify_ctxp
->bind_expr_stack
.pop ();
385 /* Return the first element of the stack of bindings. */
388 gimple_current_bind_expr (void)
390 return gimplify_ctxp
->bind_expr_stack
.last ();
393 /* Return the stack of bindings created during gimplification. */
396 gimple_bind_expr_stack (void)
398 return gimplify_ctxp
->bind_expr_stack
;
401 /* Return true iff there is a COND_EXPR between us and the innermost
402 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
405 gimple_conditional_context (void)
407 return gimplify_ctxp
->conditions
> 0;
410 /* Note that we've entered a COND_EXPR. */
413 gimple_push_condition (void)
415 #ifdef ENABLE_GIMPLE_CHECKING
416 if (gimplify_ctxp
->conditions
== 0)
417 gcc_assert (gimple_seq_empty_p (gimplify_ctxp
->conditional_cleanups
));
419 ++(gimplify_ctxp
->conditions
);
422 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
423 now, add any conditional cleanups we've seen to the prequeue. */
426 gimple_pop_condition (gimple_seq
*pre_p
)
428 int conds
= --(gimplify_ctxp
->conditions
);
430 gcc_assert (conds
>= 0);
433 gimplify_seq_add_seq (pre_p
, gimplify_ctxp
->conditional_cleanups
);
434 gimplify_ctxp
->conditional_cleanups
= NULL
;
438 /* A stable comparison routine for use with splay trees and DECLs. */
441 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
446 return DECL_UID (a
) - DECL_UID (b
);
449 /* Create a new omp construct that deals with variable remapping. */
451 static struct gimplify_omp_ctx
*
452 new_omp_context (enum omp_region_type region_type
)
454 struct gimplify_omp_ctx
*c
;
456 c
= XCNEW (struct gimplify_omp_ctx
);
457 c
->outer_context
= gimplify_omp_ctxp
;
458 c
->variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
459 c
->privatized_types
= new hash_set
<tree
>;
460 c
->location
= input_location
;
461 c
->region_type
= region_type
;
462 if ((region_type
& ORT_TASK
) == 0)
463 c
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
465 c
->default_kind
= OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
466 c
->defaultmap
[GDMK_SCALAR
] = GOVD_MAP
;
467 c
->defaultmap
[GDMK_SCALAR_TARGET
] = GOVD_MAP
;
468 c
->defaultmap
[GDMK_AGGREGATE
] = GOVD_MAP
;
469 c
->defaultmap
[GDMK_ALLOCATABLE
] = GOVD_MAP
;
470 c
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
;
475 /* Destroy an omp construct that deals with variable remapping. */
478 delete_omp_context (struct gimplify_omp_ctx
*c
)
480 splay_tree_delete (c
->variables
);
481 delete c
->privatized_types
;
482 c
->loop_iter_var
.release ();
486 static void omp_add_variable (struct gimplify_omp_ctx
*, tree
, unsigned int);
487 static bool omp_notice_variable (struct gimplify_omp_ctx
*, tree
, bool);
489 /* Both gimplify the statement T and append it to *SEQ_P. This function
490 behaves exactly as gimplify_stmt, but you don't have to pass T as a
494 gimplify_and_add (tree t
, gimple_seq
*seq_p
)
496 gimplify_stmt (&t
, seq_p
);
499 /* Gimplify statement T into sequence *SEQ_P, and return the first
500 tuple in the sequence of generated tuples for this statement.
501 Return NULL if gimplifying T produced no tuples. */
504 gimplify_and_return_first (tree t
, gimple_seq
*seq_p
)
506 gimple_stmt_iterator last
= gsi_last (*seq_p
);
508 gimplify_and_add (t
, seq_p
);
510 if (!gsi_end_p (last
))
513 return gsi_stmt (last
);
516 return gimple_seq_first_stmt (*seq_p
);
519 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
520 LHS, or for a call argument. */
523 is_gimple_mem_rhs (tree t
)
525 /* If we're dealing with a renamable type, either source or dest must be
526 a renamed variable. */
527 if (is_gimple_reg_type (TREE_TYPE (t
)))
528 return is_gimple_val (t
);
530 return is_gimple_val (t
) || is_gimple_lvalue (t
);
533 /* Return true if T is a CALL_EXPR or an expression that can be
534 assigned to a temporary. Note that this predicate should only be
535 used during gimplification. See the rationale for this in
536 gimplify_modify_expr. */
539 is_gimple_reg_rhs_or_call (tree t
)
541 return (get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
542 || TREE_CODE (t
) == CALL_EXPR
);
545 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
546 this predicate should only be used during gimplification. See the
547 rationale for this in gimplify_modify_expr. */
550 is_gimple_mem_rhs_or_call (tree t
)
552 /* If we're dealing with a renamable type, either source or dest must be
553 a renamed variable. */
554 if (is_gimple_reg_type (TREE_TYPE (t
)))
555 return is_gimple_val (t
);
557 return (is_gimple_val (t
)
558 || is_gimple_lvalue (t
)
559 || TREE_CLOBBER_P (t
)
560 || TREE_CODE (t
) == CALL_EXPR
);
563 /* Create a temporary with a name derived from VAL. Subroutine of
564 lookup_tmp_var; nobody else should call this function. */
567 create_tmp_from_val (tree val
)
569 /* Drop all qualifiers and address-space information from the value type. */
570 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (val
));
571 tree var
= create_tmp_var (type
, get_name (val
));
575 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
576 an existing expression temporary. */
579 lookup_tmp_var (tree val
, bool is_formal
)
583 /* If not optimizing, never really reuse a temporary. local-alloc
584 won't allocate any variable that is used in more than one basic
585 block, which means it will go into memory, causing much extra
586 work in reload and final and poorer code generation, outweighing
587 the extra memory allocation here. */
588 if (!optimize
|| !is_formal
|| TREE_SIDE_EFFECTS (val
))
589 ret
= create_tmp_from_val (val
);
596 if (!gimplify_ctxp
->temp_htab
)
597 gimplify_ctxp
->temp_htab
= new hash_table
<gimplify_hasher
> (1000);
598 slot
= gimplify_ctxp
->temp_htab
->find_slot (&elt
, INSERT
);
601 elt_p
= XNEW (elt_t
);
603 elt_p
->temp
= ret
= create_tmp_from_val (val
);
616 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
619 internal_get_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
620 bool is_formal
, bool allow_ssa
)
624 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
625 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
626 gimplify_expr (&val
, pre_p
, post_p
, is_gimple_reg_rhs_or_call
,
630 && gimplify_ctxp
->into_ssa
631 && is_gimple_reg_type (TREE_TYPE (val
)))
633 t
= make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val
)));
634 if (! gimple_in_ssa_p (cfun
))
636 const char *name
= get_name (val
);
638 SET_SSA_NAME_VAR_OR_IDENTIFIER (t
, create_tmp_var_name (name
));
642 t
= lookup_tmp_var (val
, is_formal
);
644 mod
= build2 (INIT_EXPR
, TREE_TYPE (t
), t
, unshare_expr (val
));
646 SET_EXPR_LOCATION (mod
, EXPR_LOC_OR_LOC (val
, input_location
));
648 /* gimplify_modify_expr might want to reduce this further. */
649 gimplify_and_add (mod
, pre_p
);
655 /* Return a formal temporary variable initialized with VAL. PRE_P is as
656 in gimplify_expr. Only use this function if:
658 1) The value of the unfactored expression represented by VAL will not
659 change between the initialization and use of the temporary, and
660 2) The temporary will not be otherwise modified.
662 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
663 and #2 means it is inappropriate for && temps.
665 For other cases, use get_initialized_tmp_var instead. */
668 get_formal_tmp_var (tree val
, gimple_seq
*pre_p
)
670 return internal_get_tmp_var (val
, pre_p
, NULL
, true, true);
673 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
674 are as in gimplify_expr. */
677 get_initialized_tmp_var (tree val
, gimple_seq
*pre_p
,
678 gimple_seq
*post_p
/* = NULL */,
679 bool allow_ssa
/* = true */)
681 return internal_get_tmp_var (val
, pre_p
, post_p
, false, allow_ssa
);
684 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
685 generate debug info for them; otherwise don't. */
688 declare_vars (tree vars
, gimple
*gs
, bool debug_info
)
695 gbind
*scope
= as_a
<gbind
*> (gs
);
697 temps
= nreverse (last
);
699 block
= gimple_bind_block (scope
);
700 gcc_assert (!block
|| TREE_CODE (block
) == BLOCK
);
701 if (!block
|| !debug_info
)
703 DECL_CHAIN (last
) = gimple_bind_vars (scope
);
704 gimple_bind_set_vars (scope
, temps
);
708 /* We need to attach the nodes both to the BIND_EXPR and to its
709 associated BLOCK for debugging purposes. The key point here
710 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
711 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
712 if (BLOCK_VARS (block
))
713 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), temps
);
716 gimple_bind_set_vars (scope
,
717 chainon (gimple_bind_vars (scope
), temps
));
718 BLOCK_VARS (block
) = temps
;
724 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
725 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
726 no such upper bound can be obtained. */
729 force_constant_size (tree var
)
731 /* The only attempt we make is by querying the maximum size of objects
732 of the variable's type. */
734 HOST_WIDE_INT max_size
;
736 gcc_assert (VAR_P (var
));
738 max_size
= max_int_size_in_bytes (TREE_TYPE (var
));
740 gcc_assert (max_size
>= 0);
743 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var
)), max_size
);
745 = build_int_cst (TREE_TYPE (DECL_SIZE (var
)), max_size
* BITS_PER_UNIT
);
748 /* Push the temporary variable TMP into the current binding. */
751 gimple_add_tmp_var_fn (struct function
*fn
, tree tmp
)
753 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
755 /* Later processing assumes that the object size is constant, which might
756 not be true at this point. Force the use of a constant upper bound in
758 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
759 force_constant_size (tmp
);
761 DECL_CONTEXT (tmp
) = fn
->decl
;
762 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
764 record_vars_into (tmp
, fn
->decl
);
767 /* Push the temporary variable TMP into the current binding. */
770 gimple_add_tmp_var (tree tmp
)
772 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
774 /* Later processing assumes that the object size is constant, which might
775 not be true at this point. Force the use of a constant upper bound in
777 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
778 force_constant_size (tmp
);
780 DECL_CONTEXT (tmp
) = current_function_decl
;
781 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
785 DECL_CHAIN (tmp
) = gimplify_ctxp
->temps
;
786 gimplify_ctxp
->temps
= tmp
;
788 /* Mark temporaries local within the nearest enclosing parallel. */
789 if (gimplify_omp_ctxp
)
791 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
792 int flag
= GOVD_LOCAL
| GOVD_SEEN
;
794 && (ctx
->region_type
== ORT_WORKSHARE
795 || ctx
->region_type
== ORT_TASKGROUP
796 || ctx
->region_type
== ORT_SIMD
797 || ctx
->region_type
== ORT_ACC
))
799 if (ctx
->region_type
== ORT_SIMD
800 && TREE_ADDRESSABLE (tmp
)
801 && !TREE_STATIC (tmp
))
803 if (TREE_CODE (DECL_SIZE_UNIT (tmp
)) != INTEGER_CST
)
804 ctx
->add_safelen1
= true;
805 else if (ctx
->in_for_exprs
)
808 flag
= GOVD_PRIVATE
| GOVD_SEEN
;
811 ctx
= ctx
->outer_context
;
814 omp_add_variable (ctx
, tmp
, flag
);
823 /* This case is for nested functions. We need to expose the locals
825 body_seq
= gimple_body (current_function_decl
);
826 declare_vars (tmp
, gimple_seq_first_stmt (body_seq
), false);
832 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
833 nodes that are referenced more than once in GENERIC functions. This is
834 necessary because gimplification (translation into GIMPLE) is performed
835 by modifying tree nodes in-place, so gimplication of a shared node in a
836 first context could generate an invalid GIMPLE form in a second context.
838 This is achieved with a simple mark/copy/unmark algorithm that walks the
839 GENERIC representation top-down, marks nodes with TREE_VISITED the first
840 time it encounters them, duplicates them if they already have TREE_VISITED
841 set, and finally removes the TREE_VISITED marks it has set.
843 The algorithm works only at the function level, i.e. it generates a GENERIC
844 representation of a function with no nodes shared within the function when
845 passed a GENERIC function (except for nodes that are allowed to be shared).
847 At the global level, it is also necessary to unshare tree nodes that are
848 referenced in more than one function, for the same aforementioned reason.
849 This requires some cooperation from the front-end. There are 2 strategies:
851 1. Manual unsharing. The front-end needs to call unshare_expr on every
852 expression that might end up being shared across functions.
854 2. Deep unsharing. This is an extension of regular unsharing. Instead
855 of calling unshare_expr on expressions that might be shared across
856 functions, the front-end pre-marks them with TREE_VISITED. This will
857 ensure that they are unshared on the first reference within functions
858 when the regular unsharing algorithm runs. The counterpart is that
859 this algorithm must look deeper than for manual unsharing, which is
860 specified by LANG_HOOKS_DEEP_UNSHARING.
862 If there are only few specific cases of node sharing across functions, it is
863 probably easier for a front-end to unshare the expressions manually. On the
864 contrary, if the expressions generated at the global level are as widespread
865 as expressions generated within functions, deep unsharing is very likely the
868 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
869 These nodes model computations that must be done once. If we were to
870 unshare something like SAVE_EXPR(i++), the gimplification process would
871 create wrong code. However, if DATA is non-null, it must hold a pointer
872 set that is used to unshare the subtrees of these nodes. */
875 mostly_copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data
)
878 enum tree_code code
= TREE_CODE (t
);
880 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
881 copy their subtrees if we can make sure to do it only once. */
882 if (code
== SAVE_EXPR
|| code
== TARGET_EXPR
|| code
== BIND_EXPR
)
884 if (data
&& !((hash_set
<tree
> *)data
)->add (t
))
890 /* Stop at types, decls, constants like copy_tree_r. */
891 else if (TREE_CODE_CLASS (code
) == tcc_type
892 || TREE_CODE_CLASS (code
) == tcc_declaration
893 || TREE_CODE_CLASS (code
) == tcc_constant
)
896 /* Cope with the statement expression extension. */
897 else if (code
== STATEMENT_LIST
)
900 /* Leave the bulk of the work to copy_tree_r itself. */
902 copy_tree_r (tp
, walk_subtrees
, NULL
);
907 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
908 If *TP has been visited already, then *TP is deeply copied by calling
909 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
912 copy_if_shared_r (tree
*tp
, int *walk_subtrees
, void *data
)
915 enum tree_code code
= TREE_CODE (t
);
917 /* Skip types, decls, and constants. But we do want to look at their
918 types and the bounds of types. Mark them as visited so we properly
919 unmark their subtrees on the unmark pass. If we've already seen them,
920 don't look down further. */
921 if (TREE_CODE_CLASS (code
) == tcc_type
922 || TREE_CODE_CLASS (code
) == tcc_declaration
923 || TREE_CODE_CLASS (code
) == tcc_constant
)
925 if (TREE_VISITED (t
))
928 TREE_VISITED (t
) = 1;
931 /* If this node has been visited already, unshare it and don't look
933 else if (TREE_VISITED (t
))
935 walk_tree (tp
, mostly_copy_tree_r
, data
, NULL
);
939 /* Otherwise, mark the node as visited and keep looking. */
941 TREE_VISITED (t
) = 1;
946 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
947 copy_if_shared_r callback unmodified. */
950 copy_if_shared (tree
*tp
, void *data
)
952 walk_tree (tp
, copy_if_shared_r
, data
, NULL
);
955 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
956 any nested functions. */
959 unshare_body (tree fndecl
)
961 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
962 /* If the language requires deep unsharing, we need a pointer set to make
963 sure we don't repeatedly unshare subtrees of unshareable nodes. */
964 hash_set
<tree
> *visited
965 = lang_hooks
.deep_unsharing
? new hash_set
<tree
> : NULL
;
967 copy_if_shared (&DECL_SAVED_TREE (fndecl
), visited
);
968 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl
)), visited
);
969 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)), visited
);
974 for (cgn
= first_nested_function (cgn
); cgn
;
975 cgn
= next_nested_function (cgn
))
976 unshare_body (cgn
->decl
);
979 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
980 Subtrees are walked until the first unvisited node is encountered. */
983 unmark_visited_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
987 /* If this node has been visited, unmark it and keep looking. */
988 if (TREE_VISITED (t
))
989 TREE_VISITED (t
) = 0;
991 /* Otherwise, don't look any deeper. */
998 /* Unmark the visited trees rooted at *TP. */
1001 unmark_visited (tree
*tp
)
1003 walk_tree (tp
, unmark_visited_r
, NULL
, NULL
);
1006 /* Likewise, but mark all trees as not visited. */
1009 unvisit_body (tree fndecl
)
1011 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
1013 unmark_visited (&DECL_SAVED_TREE (fndecl
));
1014 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl
)));
1015 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)));
1018 for (cgn
= first_nested_function (cgn
);
1019 cgn
; cgn
= next_nested_function (cgn
))
1020 unvisit_body (cgn
->decl
);
1023 /* Unconditionally make an unshared copy of EXPR. This is used when using
1024 stored expressions which span multiple functions, such as BINFO_VTABLE,
1025 as the normal unsharing process can't tell that they're shared. */
1028 unshare_expr (tree expr
)
1030 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1034 /* Worker for unshare_expr_without_location. */
1037 prune_expr_location (tree
*tp
, int *walk_subtrees
, void *)
1040 SET_EXPR_LOCATION (*tp
, UNKNOWN_LOCATION
);
1046 /* Similar to unshare_expr but also prune all expression locations
1050 unshare_expr_without_location (tree expr
)
1052 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1054 walk_tree (&expr
, prune_expr_location
, NULL
, NULL
);
1058 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1059 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1060 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1061 EXPR is the location of the EXPR. */
1064 rexpr_location (tree expr
, location_t or_else
= UNKNOWN_LOCATION
)
1069 if (EXPR_HAS_LOCATION (expr
))
1070 return EXPR_LOCATION (expr
);
1072 if (TREE_CODE (expr
) != STATEMENT_LIST
)
1075 tree_stmt_iterator i
= tsi_start (expr
);
1078 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
1084 if (!found
|| !tsi_one_before_end_p (i
))
1087 return rexpr_location (tsi_stmt (i
), or_else
);
1090 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1091 rexpr_location for the potential recursion. */
1094 rexpr_has_location (tree expr
)
1096 return rexpr_location (expr
) != UNKNOWN_LOCATION
;
1100 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1101 contain statements and have a value. Assign its value to a temporary
1102 and give it void_type_node. Return the temporary, or NULL_TREE if
1103 WRAPPER was already void. */
1106 voidify_wrapper_expr (tree wrapper
, tree temp
)
1108 tree type
= TREE_TYPE (wrapper
);
1109 if (type
&& !VOID_TYPE_P (type
))
1113 /* Set p to point to the body of the wrapper. Loop until we find
1114 something that isn't a wrapper. */
1115 for (p
= &wrapper
; p
&& *p
; )
1117 switch (TREE_CODE (*p
))
1120 TREE_SIDE_EFFECTS (*p
) = 1;
1121 TREE_TYPE (*p
) = void_type_node
;
1122 /* For a BIND_EXPR, the body is operand 1. */
1123 p
= &BIND_EXPR_BODY (*p
);
1126 case CLEANUP_POINT_EXPR
:
1127 case TRY_FINALLY_EXPR
:
1128 case TRY_CATCH_EXPR
:
1129 TREE_SIDE_EFFECTS (*p
) = 1;
1130 TREE_TYPE (*p
) = void_type_node
;
1131 p
= &TREE_OPERAND (*p
, 0);
1134 case STATEMENT_LIST
:
1136 tree_stmt_iterator i
= tsi_last (*p
);
1137 TREE_SIDE_EFFECTS (*p
) = 1;
1138 TREE_TYPE (*p
) = void_type_node
;
1139 p
= tsi_end_p (i
) ? NULL
: tsi_stmt_ptr (i
);
1144 /* Advance to the last statement. Set all container types to
1146 for (; TREE_CODE (*p
) == COMPOUND_EXPR
; p
= &TREE_OPERAND (*p
, 1))
1148 TREE_SIDE_EFFECTS (*p
) = 1;
1149 TREE_TYPE (*p
) = void_type_node
;
1153 case TRANSACTION_EXPR
:
1154 TREE_SIDE_EFFECTS (*p
) = 1;
1155 TREE_TYPE (*p
) = void_type_node
;
1156 p
= &TRANSACTION_EXPR_BODY (*p
);
1160 /* Assume that any tree upon which voidify_wrapper_expr is
1161 directly called is a wrapper, and that its body is op0. */
1164 TREE_SIDE_EFFECTS (*p
) = 1;
1165 TREE_TYPE (*p
) = void_type_node
;
1166 p
= &TREE_OPERAND (*p
, 0);
1174 if (p
== NULL
|| IS_EMPTY_STMT (*p
))
1178 /* The wrapper is on the RHS of an assignment that we're pushing
1180 gcc_assert (TREE_CODE (temp
) == INIT_EXPR
1181 || TREE_CODE (temp
) == MODIFY_EXPR
);
1182 TREE_OPERAND (temp
, 1) = *p
;
1187 temp
= create_tmp_var (type
, "retval");
1188 *p
= build2 (INIT_EXPR
, type
, temp
, *p
);
1197 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1198 a temporary through which they communicate. */
1201 build_stack_save_restore (gcall
**save
, gcall
**restore
)
1205 *save
= gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE
), 0);
1206 tmp_var
= create_tmp_var (ptr_type_node
, "saved_stack");
1207 gimple_call_set_lhs (*save
, tmp_var
);
1210 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE
),
1214 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1217 build_asan_poison_call_expr (tree decl
)
1219 /* Do not poison variables that have size equal to zero. */
1220 tree unit_size
= DECL_SIZE_UNIT (decl
);
1221 if (zerop (unit_size
))
1224 tree base
= build_fold_addr_expr (decl
);
1226 return build_call_expr_internal_loc (UNKNOWN_LOCATION
, IFN_ASAN_MARK
,
1228 build_int_cst (integer_type_node
,
1233 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1234 on POISON flag, shadow memory of a DECL variable. The call will be
1235 put on location identified by IT iterator, where BEFORE flag drives
1236 position where the stmt will be put. */
1239 asan_poison_variable (tree decl
, bool poison
, gimple_stmt_iterator
*it
,
1242 tree unit_size
= DECL_SIZE_UNIT (decl
);
1243 tree base
= build_fold_addr_expr (decl
);
1245 /* Do not poison variables that have size equal to zero. */
1246 if (zerop (unit_size
))
1249 /* It's necessary to have all stack variables aligned to ASAN granularity
1251 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1252 unsigned shadow_granularity
1253 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE
: ASAN_SHADOW_GRANULARITY
;
1254 if (DECL_ALIGN_UNIT (decl
) <= shadow_granularity
)
1255 SET_DECL_ALIGN (decl
, BITS_PER_UNIT
* shadow_granularity
);
1257 HOST_WIDE_INT flags
= poison
? ASAN_MARK_POISON
: ASAN_MARK_UNPOISON
;
1260 = gimple_build_call_internal (IFN_ASAN_MARK
, 3,
1261 build_int_cst (integer_type_node
, flags
),
1265 gsi_insert_before (it
, g
, GSI_NEW_STMT
);
1267 gsi_insert_after (it
, g
, GSI_NEW_STMT
);
1270 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1271 either poisons or unpoisons a DECL. Created statement is appended
1272 to SEQ_P gimple sequence. */
1275 asan_poison_variable (tree decl
, bool poison
, gimple_seq
*seq_p
)
1277 gimple_stmt_iterator it
= gsi_last (*seq_p
);
1278 bool before
= false;
1283 asan_poison_variable (decl
, poison
, &it
, before
);
1286 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1289 sort_by_decl_uid (const void *a
, const void *b
)
1291 const tree
*t1
= (const tree
*)a
;
1292 const tree
*t2
= (const tree
*)b
;
1294 int uid1
= DECL_UID (*t1
);
1295 int uid2
= DECL_UID (*t2
);
1299 else if (uid1
> uid2
)
1305 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1306 depending on POISON flag. Created statement is appended
1307 to SEQ_P gimple sequence. */
1310 asan_poison_variables (hash_set
<tree
> *variables
, bool poison
, gimple_seq
*seq_p
)
1312 unsigned c
= variables
->elements ();
1316 auto_vec
<tree
> sorted_variables (c
);
1318 for (hash_set
<tree
>::iterator it
= variables
->begin ();
1319 it
!= variables
->end (); ++it
)
1320 sorted_variables
.safe_push (*it
);
1322 sorted_variables
.qsort (sort_by_decl_uid
);
1326 FOR_EACH_VEC_ELT (sorted_variables
, i
, var
)
1328 asan_poison_variable (var
, poison
, seq_p
);
1330 /* Add use_after_scope_memory attribute for the variable in order
1331 to prevent re-written into SSA. */
1332 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
,
1333 DECL_ATTRIBUTES (var
)))
1334 DECL_ATTRIBUTES (var
)
1335 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
),
1337 DECL_ATTRIBUTES (var
));
1341 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1343 static enum gimplify_status
1344 gimplify_bind_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1346 tree bind_expr
= *expr_p
;
1347 bool old_keep_stack
= gimplify_ctxp
->keep_stack
;
1348 bool old_save_stack
= gimplify_ctxp
->save_stack
;
1351 gimple_seq body
, cleanup
;
1353 location_t start_locus
= 0, end_locus
= 0;
1354 tree ret_clauses
= NULL
;
1356 tree temp
= voidify_wrapper_expr (bind_expr
, NULL
);
1358 /* Mark variables seen in this bind expr. */
1359 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1363 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
1365 /* Mark variable as local. */
1366 if (ctx
&& ctx
->region_type
!= ORT_NONE
&& !DECL_EXTERNAL (t
))
1368 if (! DECL_SEEN_IN_BIND_EXPR_P (t
)
1369 || splay_tree_lookup (ctx
->variables
,
1370 (splay_tree_key
) t
) == NULL
)
1372 int flag
= GOVD_LOCAL
;
1373 if (ctx
->region_type
== ORT_SIMD
1374 && TREE_ADDRESSABLE (t
)
1375 && !TREE_STATIC (t
))
1377 if (TREE_CODE (DECL_SIZE_UNIT (t
)) != INTEGER_CST
)
1378 ctx
->add_safelen1
= true;
1380 flag
= GOVD_PRIVATE
;
1382 omp_add_variable (ctx
, t
, flag
| GOVD_SEEN
);
1384 /* Static locals inside of target construct or offloaded
1385 routines need to be "omp declare target". */
1386 if (TREE_STATIC (t
))
1387 for (; ctx
; ctx
= ctx
->outer_context
)
1388 if ((ctx
->region_type
& ORT_TARGET
) != 0)
1390 if (!lookup_attribute ("omp declare target",
1391 DECL_ATTRIBUTES (t
)))
1393 tree id
= get_identifier ("omp declare target");
1395 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (t
));
1396 varpool_node
*node
= varpool_node::get (t
);
1399 node
->offloadable
= 1;
1400 if (ENABLE_OFFLOADING
&& !DECL_EXTERNAL (t
))
1402 g
->have_offload
= true;
1404 vec_safe_push (offload_vars
, t
);
1412 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
1414 if (DECL_HARD_REGISTER (t
) && !is_global_var (t
) && cfun
)
1415 cfun
->has_local_explicit_reg_vars
= true;
1419 bind_stmt
= gimple_build_bind (BIND_EXPR_VARS (bind_expr
), NULL
,
1420 BIND_EXPR_BLOCK (bind_expr
));
1421 gimple_push_bind_expr (bind_stmt
);
1423 gimplify_ctxp
->keep_stack
= false;
1424 gimplify_ctxp
->save_stack
= false;
1426 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1428 gimplify_stmt (&BIND_EXPR_BODY (bind_expr
), &body
);
1429 gimple_bind_set_body (bind_stmt
, body
);
1431 /* Source location wise, the cleanup code (stack_restore and clobbers)
1432 belongs to the end of the block, so propagate what we have. The
1433 stack_save operation belongs to the beginning of block, which we can
1434 infer from the bind_expr directly if the block has no explicit
1436 if (BIND_EXPR_BLOCK (bind_expr
))
1438 end_locus
= BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1439 start_locus
= BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1441 if (start_locus
== 0)
1442 start_locus
= EXPR_LOCATION (bind_expr
);
1447 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1448 the stack space allocated to the VLAs. */
1449 if (gimplify_ctxp
->save_stack
&& !gimplify_ctxp
->keep_stack
)
1451 gcall
*stack_restore
;
1453 /* Save stack on entry and restore it on exit. Add a try_finally
1454 block to achieve this. */
1455 build_stack_save_restore (&stack_save
, &stack_restore
);
1457 gimple_set_location (stack_save
, start_locus
);
1458 gimple_set_location (stack_restore
, end_locus
);
1460 gimplify_seq_add_stmt (&cleanup
, stack_restore
);
1463 /* Add clobbers for all variables that go out of scope. */
1464 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1467 && !is_global_var (t
)
1468 && DECL_CONTEXT (t
) == current_function_decl
)
1470 if (!DECL_HARD_REGISTER (t
)
1471 && !TREE_THIS_VOLATILE (t
)
1472 && !DECL_HAS_VALUE_EXPR_P (t
)
1473 /* Only care for variables that have to be in memory. Others
1474 will be rewritten into SSA names, hence moved to the
1476 && !is_gimple_reg (t
)
1477 && flag_stack_reuse
!= SR_NONE
)
1479 tree clobber
= build_clobber (TREE_TYPE (t
), CLOBBER_EOL
);
1480 gimple
*clobber_stmt
;
1481 clobber_stmt
= gimple_build_assign (t
, clobber
);
1482 gimple_set_location (clobber_stmt
, end_locus
);
1483 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1486 if (flag_openacc
&& oacc_declare_returns
!= NULL
)
1489 if (DECL_HAS_VALUE_EXPR_P (key
))
1491 key
= DECL_VALUE_EXPR (key
);
1492 if (TREE_CODE (key
) == INDIRECT_REF
)
1493 key
= TREE_OPERAND (key
, 0);
1495 tree
*c
= oacc_declare_returns
->get (key
);
1499 OMP_CLAUSE_CHAIN (*c
) = ret_clauses
;
1501 ret_clauses
= unshare_expr (*c
);
1503 oacc_declare_returns
->remove (key
);
1505 if (oacc_declare_returns
->is_empty ())
1507 delete oacc_declare_returns
;
1508 oacc_declare_returns
= NULL
;
1514 if (asan_poisoned_variables
!= NULL
1515 && asan_poisoned_variables
->contains (t
))
1517 asan_poisoned_variables
->remove (t
);
1518 asan_poison_variable (t
, true, &cleanup
);
1521 if (gimplify_ctxp
->live_switch_vars
!= NULL
1522 && gimplify_ctxp
->live_switch_vars
->contains (t
))
1523 gimplify_ctxp
->live_switch_vars
->remove (t
);
1529 gimple_stmt_iterator si
= gsi_start (cleanup
);
1531 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
1533 gsi_insert_seq_before_without_update (&si
, stmt
, GSI_NEW_STMT
);
1539 gimple_seq new_body
;
1542 gs
= gimple_build_try (gimple_bind_body (bind_stmt
), cleanup
,
1543 GIMPLE_TRY_FINALLY
);
1546 gimplify_seq_add_stmt (&new_body
, stack_save
);
1547 gimplify_seq_add_stmt (&new_body
, gs
);
1548 gimple_bind_set_body (bind_stmt
, new_body
);
1551 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1552 if (!gimplify_ctxp
->keep_stack
)
1553 gimplify_ctxp
->keep_stack
= old_keep_stack
;
1554 gimplify_ctxp
->save_stack
= old_save_stack
;
1556 gimple_pop_bind_expr ();
1558 gimplify_seq_add_stmt (pre_p
, bind_stmt
);
1566 *expr_p
= NULL_TREE
;
1570 /* Maybe add early return predict statement to PRE_P sequence. */
1573 maybe_add_early_return_predict_stmt (gimple_seq
*pre_p
)
1575 /* If we are not in a conditional context, add PREDICT statement. */
1576 if (gimple_conditional_context ())
1578 gimple
*predict
= gimple_build_predict (PRED_TREE_EARLY_RETURN
,
1580 gimplify_seq_add_stmt (pre_p
, predict
);
1584 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1585 GIMPLE value, it is assigned to a new temporary and the statement is
1586 re-written to return the temporary.
1588 PRE_P points to the sequence where side effects that must happen before
1589 STMT should be stored. */
1591 static enum gimplify_status
1592 gimplify_return_expr (tree stmt
, gimple_seq
*pre_p
)
1595 tree ret_expr
= TREE_OPERAND (stmt
, 0);
1596 tree result_decl
, result
;
1598 if (ret_expr
== error_mark_node
)
1602 || TREE_CODE (ret_expr
) == RESULT_DECL
)
1604 maybe_add_early_return_predict_stmt (pre_p
);
1605 greturn
*ret
= gimple_build_return (ret_expr
);
1606 copy_warning (ret
, stmt
);
1607 gimplify_seq_add_stmt (pre_p
, ret
);
1611 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1612 result_decl
= NULL_TREE
;
1613 else if (TREE_CODE (ret_expr
) == COMPOUND_EXPR
)
1615 /* Used in C++ for handling EH cleanup of the return value if a local
1616 cleanup throws. Assume the front-end knows what it's doing. */
1617 result_decl
= DECL_RESULT (current_function_decl
);
1618 /* But crash if we end up trying to modify ret_expr below. */
1619 ret_expr
= NULL_TREE
;
1623 result_decl
= TREE_OPERAND (ret_expr
, 0);
1625 /* See through a return by reference. */
1626 if (TREE_CODE (result_decl
) == INDIRECT_REF
)
1627 result_decl
= TREE_OPERAND (result_decl
, 0);
1629 gcc_assert ((TREE_CODE (ret_expr
) == MODIFY_EXPR
1630 || TREE_CODE (ret_expr
) == INIT_EXPR
)
1631 && TREE_CODE (result_decl
) == RESULT_DECL
);
1634 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1635 Recall that aggregate_value_p is FALSE for any aggregate type that is
1636 returned in registers. If we're returning values in registers, then
1637 we don't want to extend the lifetime of the RESULT_DECL, particularly
1638 across another call. In addition, for those aggregates for which
1639 hard_function_value generates a PARALLEL, we'll die during normal
1640 expansion of structure assignments; there's special code in expand_return
1641 to handle this case that does not exist in expand_expr. */
1644 else if (aggregate_value_p (result_decl
, TREE_TYPE (current_function_decl
)))
1646 if (!poly_int_tree_p (DECL_SIZE (result_decl
)))
1648 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl
)))
1649 gimplify_type_sizes (TREE_TYPE (result_decl
), pre_p
);
1650 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1651 should be effectively allocated by the caller, i.e. all calls to
1652 this function must be subject to the Return Slot Optimization. */
1653 gimplify_one_sizepos (&DECL_SIZE (result_decl
), pre_p
);
1654 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl
), pre_p
);
1656 result
= result_decl
;
1658 else if (gimplify_ctxp
->return_temp
)
1659 result
= gimplify_ctxp
->return_temp
;
1662 result
= create_tmp_reg (TREE_TYPE (result_decl
));
1664 /* ??? With complex control flow (usually involving abnormal edges),
1665 we can wind up warning about an uninitialized value for this. Due
1666 to how this variable is constructed and initialized, this is never
1667 true. Give up and never warn. */
1668 suppress_warning (result
, OPT_Wuninitialized
);
1670 gimplify_ctxp
->return_temp
= result
;
1673 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1674 Then gimplify the whole thing. */
1675 if (result
!= result_decl
)
1676 TREE_OPERAND (ret_expr
, 0) = result
;
1678 gimplify_and_add (TREE_OPERAND (stmt
, 0), pre_p
);
1680 maybe_add_early_return_predict_stmt (pre_p
);
1681 ret
= gimple_build_return (result
);
1682 copy_warning (ret
, stmt
);
1683 gimplify_seq_add_stmt (pre_p
, ret
);
1688 /* Gimplify a variable-length array DECL. */
1691 gimplify_vla_decl (tree decl
, gimple_seq
*seq_p
)
1693 /* This is a variable-sized decl. Simplify its size and mark it
1694 for deferred expansion. */
1695 tree t
, addr
, ptr_type
;
1697 gimplify_one_sizepos (&DECL_SIZE (decl
), seq_p
);
1698 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl
), seq_p
);
1700 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1701 if (DECL_HAS_VALUE_EXPR_P (decl
))
1704 /* All occurrences of this decl in final gimplified code will be
1705 replaced by indirection. Setting DECL_VALUE_EXPR does two
1706 things: First, it lets the rest of the gimplifier know what
1707 replacement to use. Second, it lets the debug info know
1708 where to find the value. */
1709 ptr_type
= build_pointer_type (TREE_TYPE (decl
));
1710 addr
= create_tmp_var (ptr_type
, get_name (decl
));
1711 DECL_IGNORED_P (addr
) = 0;
1712 t
= build_fold_indirect_ref (addr
);
1713 TREE_THIS_NOTRAP (t
) = 1;
1714 SET_DECL_VALUE_EXPR (decl
, t
);
1715 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
1717 t
= build_alloca_call_expr (DECL_SIZE_UNIT (decl
), DECL_ALIGN (decl
),
1718 max_int_size_in_bytes (TREE_TYPE (decl
)));
1719 /* The call has been built for a variable-sized object. */
1720 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
1721 t
= fold_convert (ptr_type
, t
);
1722 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
1724 gimplify_and_add (t
, seq_p
);
1726 /* Record the dynamic allocation associated with DECL if requested. */
1727 if (flag_callgraph_info
& CALLGRAPH_INFO_DYNAMIC_ALLOC
)
1728 record_dynamic_alloc (decl
);
1731 /* A helper function to be called via walk_tree. Mark all labels under *TP
1732 as being forced. To be called for DECL_INITIAL of static variables. */
1735 force_labels_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1739 if (TREE_CODE (*tp
) == LABEL_DECL
)
1741 FORCED_LABEL (*tp
) = 1;
1742 cfun
->has_forced_label_in_static
= 1;
1748 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1749 Build a call to internal const function DEFERRED_INIT:
1750 1st argument: SIZE of the DECL;
1751 2nd argument: INIT_TYPE;
1752 3rd argument: NAME of the DECL;
1754 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1757 gimple_add_init_for_auto_var (tree decl
,
1758 enum auto_init_type init_type
,
1761 gcc_assert (auto_var_p (decl
));
1762 gcc_assert (init_type
> AUTO_INIT_UNINITIALIZED
);
1763 location_t loc
= EXPR_LOCATION (decl
);
1764 tree decl_size
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1767 = build_int_cst (integer_type_node
, (int) init_type
);
1769 tree decl_name
= NULL_TREE
;
1770 if (DECL_NAME (decl
))
1772 decl_name
= build_string_literal (IDENTIFIER_LENGTH (DECL_NAME (decl
)) + 1,
1773 IDENTIFIER_POINTER (DECL_NAME (decl
)));
1777 char *decl_name_anonymous
= xasprintf ("D.%u", DECL_UID (decl
));
1778 decl_name
= build_string_literal (strlen (decl_name_anonymous
) + 1,
1779 decl_name_anonymous
);
1780 free (decl_name_anonymous
);
1783 tree call
= build_call_expr_internal_loc (loc
, IFN_DEFERRED_INIT
,
1784 TREE_TYPE (decl
), 3,
1785 decl_size
, init_type_node
,
1788 gimplify_assign (decl
, call
, seq_p
);
1791 /* Generate padding initialization for automatic vairable DECL.
1792 C guarantees that brace-init with fewer initializers than members
1793 aggregate will initialize the rest of the aggregate as-if it were
1794 static initialization. In turn static initialization guarantees
1795 that padding is initialized to zero. So, we always initialize paddings
1796 to zeroes regardless INIT_TYPE.
1797 To do the padding initialization, we insert a call to
1798 __builtin_clear_padding (&decl, 0, for_auto_init = true).
1799 Note, we add an additional dummy argument for __builtin_clear_padding,
1800 'for_auto_init' to distinguish whether this call is for automatic
1801 variable initialization or not.
1804 gimple_add_padding_init_for_auto_var (tree decl
, bool is_vla
,
1807 tree addr_of_decl
= NULL_TREE
;
1808 tree fn
= builtin_decl_explicit (BUILT_IN_CLEAR_PADDING
);
1812 /* The temporary address variable for this vla should be
1813 created in gimplify_vla_decl. */
1814 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
1815 gcc_assert (TREE_CODE (DECL_VALUE_EXPR (decl
)) == INDIRECT_REF
);
1816 addr_of_decl
= TREE_OPERAND (DECL_VALUE_EXPR (decl
), 0);
1820 mark_addressable (decl
);
1821 addr_of_decl
= build_fold_addr_expr (decl
);
1824 gimple
*call
= gimple_build_call (fn
, 2, addr_of_decl
,
1825 build_one_cst (TREE_TYPE (addr_of_decl
)));
1826 gimplify_seq_add_stmt (seq_p
, call
);
1829 /* Return true if the DECL need to be automaticly initialized by the
1832 is_var_need_auto_init (tree decl
)
1834 if (auto_var_p (decl
)
1835 && (TREE_CODE (decl
) != VAR_DECL
1836 || !DECL_HARD_REGISTER (decl
))
1837 && (flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
)
1838 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl
)))
1839 && !OPAQUE_TYPE_P (TREE_TYPE (decl
))
1840 && !is_empty_type (TREE_TYPE (decl
)))
1845 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1846 and initialization explicit. */
1848 static enum gimplify_status
1849 gimplify_decl_expr (tree
*stmt_p
, gimple_seq
*seq_p
)
1851 tree stmt
= *stmt_p
;
1852 tree decl
= DECL_EXPR_DECL (stmt
);
1854 *stmt_p
= NULL_TREE
;
1856 if (TREE_TYPE (decl
) == error_mark_node
)
1859 if ((TREE_CODE (decl
) == TYPE_DECL
1861 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl
)))
1863 gimplify_type_sizes (TREE_TYPE (decl
), seq_p
);
1864 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1865 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl
)), seq_p
);
1868 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1869 in case its size expressions contain problematic nodes like CALL_EXPR. */
1870 if (TREE_CODE (decl
) == TYPE_DECL
1871 && DECL_ORIGINAL_TYPE (decl
)
1872 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl
)))
1874 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl
), seq_p
);
1875 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl
)) == REFERENCE_TYPE
)
1876 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl
)), seq_p
);
1879 if (VAR_P (decl
) && !DECL_EXTERNAL (decl
))
1881 tree init
= DECL_INITIAL (decl
);
1882 bool is_vla
= false;
1883 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
1884 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
1885 If the decl has VALUE_EXPR that was created by FE (usually
1886 C++FE), it's a proxy varaible, and FE already initialized
1887 the VALUE_EXPR of it, we should not initialize it anymore. */
1888 bool decl_had_value_expr_p
= DECL_HAS_VALUE_EXPR_P (decl
);
1891 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl
), &size
)
1892 || (!TREE_STATIC (decl
)
1893 && flag_stack_check
== GENERIC_STACK_CHECK
1895 (unsigned HOST_WIDE_INT
) STACK_CHECK_MAX_VAR_SIZE
)))
1897 gimplify_vla_decl (decl
, seq_p
);
1901 if (asan_poisoned_variables
1903 && TREE_ADDRESSABLE (decl
)
1904 && !TREE_STATIC (decl
)
1905 && !DECL_HAS_VALUE_EXPR_P (decl
)
1906 && DECL_ALIGN (decl
) <= MAX_SUPPORTED_STACK_ALIGNMENT
1907 && dbg_cnt (asan_use_after_scope
)
1908 && !gimplify_omp_ctxp
1909 /* GNAT introduces temporaries to hold return values of calls in
1910 initializers of variables defined in other units, so the
1911 declaration of the variable is discarded completely. We do not
1912 want to issue poison calls for such dropped variables. */
1913 && (DECL_SEEN_IN_BIND_EXPR_P (decl
)
1914 || (DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)))
1916 asan_poisoned_variables
->add (decl
);
1917 asan_poison_variable (decl
, false, seq_p
);
1918 if (!DECL_ARTIFICIAL (decl
) && gimplify_ctxp
->live_switch_vars
)
1919 gimplify_ctxp
->live_switch_vars
->add (decl
);
1922 /* Some front ends do not explicitly declare all anonymous
1923 artificial variables. We compensate here by declaring the
1924 variables, though it would be better if the front ends would
1925 explicitly declare them. */
1926 if (!DECL_SEEN_IN_BIND_EXPR_P (decl
)
1927 && DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)
1928 gimple_add_tmp_var (decl
);
1930 if (init
&& init
!= error_mark_node
)
1932 if (!TREE_STATIC (decl
))
1934 DECL_INITIAL (decl
) = NULL_TREE
;
1935 init
= build2 (INIT_EXPR
, void_type_node
, decl
, init
);
1936 gimplify_and_add (init
, seq_p
);
1938 /* Clear TREE_READONLY if we really have an initialization. */
1939 if (!DECL_INITIAL (decl
)
1940 && !omp_privatize_by_reference (decl
))
1941 TREE_READONLY (decl
) = 0;
1944 /* We must still examine initializers for static variables
1945 as they may contain a label address. */
1946 walk_tree (&init
, force_labels_r
, NULL
, NULL
);
1948 /* When there is no explicit initializer, if the user requested,
1949 We should insert an artifical initializer for this automatic
1951 else if (is_var_need_auto_init (decl
)
1952 && !decl_had_value_expr_p
)
1954 gimple_add_init_for_auto_var (decl
,
1957 /* The expanding of a call to the above .DEFERRED_INIT will apply
1958 block initialization to the whole space covered by this variable.
1959 As a result, all the paddings will be initialized to zeroes
1960 for zero initialization and 0xFE byte-repeatable patterns for
1961 pattern initialization.
1962 In order to make the paddings as zeroes for pattern init, We
1963 should add a call to __builtin_clear_padding to clear the
1964 paddings to zero in compatiple with CLANG.
1965 We cannot insert this call if the variable is a gimple register
1966 since __builtin_clear_padding will take the address of the
1967 variable. As a result, if a long double/_Complex long double
1968 variable will spilled into stack later, its padding is 0XFE. */
1969 if (flag_auto_var_init
== AUTO_INIT_PATTERN
1970 && !is_gimple_reg (decl
)
1971 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl
)))
1972 gimple_add_padding_init_for_auto_var (decl
, is_vla
, seq_p
);
1979 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1980 and replacing the LOOP_EXPR with goto, but if the loop contains an
1981 EXIT_EXPR, we need to append a label for it to jump to. */
1983 static enum gimplify_status
1984 gimplify_loop_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1986 tree saved_label
= gimplify_ctxp
->exit_label
;
1987 tree start_label
= create_artificial_label (UNKNOWN_LOCATION
);
1989 gimplify_seq_add_stmt (pre_p
, gimple_build_label (start_label
));
1991 gimplify_ctxp
->exit_label
= NULL_TREE
;
1993 gimplify_and_add (LOOP_EXPR_BODY (*expr_p
), pre_p
);
1995 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (start_label
));
1997 if (gimplify_ctxp
->exit_label
)
1998 gimplify_seq_add_stmt (pre_p
,
1999 gimple_build_label (gimplify_ctxp
->exit_label
));
2001 gimplify_ctxp
->exit_label
= saved_label
;
2007 /* Gimplify a statement list onto a sequence. These may be created either
2008 by an enlightened front-end, or by shortcut_cond_expr. */
2010 static enum gimplify_status
2011 gimplify_statement_list (tree
*expr_p
, gimple_seq
*pre_p
)
2013 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
2015 tree_stmt_iterator i
= tsi_start (*expr_p
);
2017 while (!tsi_end_p (i
))
2019 gimplify_stmt (tsi_stmt_ptr (i
), pre_p
);
2033 /* Emit warning for the unreachable statment STMT if needed.
2034 Return the gimple itself when the warning is emitted, otherwise
2037 emit_warn_switch_unreachable (gimple
*stmt
)
2039 if (gimple_code (stmt
) == GIMPLE_GOTO
2040 && TREE_CODE (gimple_goto_dest (stmt
)) == LABEL_DECL
2041 && DECL_ARTIFICIAL (gimple_goto_dest (stmt
)))
2042 /* Don't warn for compiler-generated gotos. These occur
2043 in Duff's devices, for example. */
2045 else if ((flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
)
2046 && ((gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
2047 || (gimple_call_builtin_p (stmt
, BUILT_IN_CLEAR_PADDING
)
2048 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt
, 1)))
2049 || (is_gimple_assign (stmt
)
2050 && gimple_assign_single_p (stmt
)
2051 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
2052 && gimple_call_internal_p (
2053 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
)),
2054 IFN_DEFERRED_INIT
))))
2055 /* Don't warn for compiler-generated initializations for
2056 -ftrivial-auto-var-init.
2058 case 1: a call to .DEFERRED_INIT;
2059 case 2: a call to __builtin_clear_padding with the 2nd argument is
2060 present and non-zero;
2061 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2062 that has the LHS of .DEFERRED_INIT as the RHS as following:
2063 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2067 warning_at (gimple_location (stmt
), OPT_Wswitch_unreachable
,
2068 "statement will never be executed");
2072 /* Callback for walk_gimple_seq. */
2075 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator
*gsi_p
,
2076 bool *handled_ops_p
,
2077 struct walk_stmt_info
*wi
)
2079 gimple
*stmt
= gsi_stmt (*gsi_p
);
2080 bool unreachable_issued
= wi
->info
!= NULL
;
2082 *handled_ops_p
= true;
2083 switch (gimple_code (stmt
))
2086 /* A compiler-generated cleanup or a user-written try block.
2087 If it's empty, don't dive into it--that would result in
2088 worse location info. */
2089 if (gimple_try_eval (stmt
) == NULL
)
2091 if (warn_switch_unreachable
&& !unreachable_issued
)
2092 wi
->info
= emit_warn_switch_unreachable (stmt
);
2094 /* Stop when auto var init warning is not on. */
2095 if (!warn_trivial_auto_var_init
)
2096 return integer_zero_node
;
2101 case GIMPLE_EH_FILTER
:
2102 case GIMPLE_TRANSACTION
:
2103 /* Walk the sub-statements. */
2104 *handled_ops_p
= false;
2108 /* Ignore these. We may generate them before declarations that
2109 are never executed. If there's something to warn about,
2110 there will be non-debug stmts too, and we'll catch those. */
2114 /* Stop till the first Label. */
2115 return integer_zero_node
;
2117 if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2119 *handled_ops_p
= false;
2122 if (warn_trivial_auto_var_init
2123 && flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
2124 && gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
2126 /* Get the variable name from the 3rd argument of call. */
2127 tree var_name
= gimple_call_arg (stmt
, 2);
2128 var_name
= TREE_OPERAND (TREE_OPERAND (var_name
, 0), 0);
2129 const char *var_name_str
= TREE_STRING_POINTER (var_name
);
2131 warning_at (gimple_location (stmt
), OPT_Wtrivial_auto_var_init
,
2132 "%qs cannot be initialized with"
2133 "%<-ftrivial-auto-var_init%>",
2140 /* check the first "real" statement (not a decl/lexical scope/...), issue
2141 warning if needed. */
2142 if (warn_switch_unreachable
&& !unreachable_issued
)
2143 wi
->info
= emit_warn_switch_unreachable (stmt
);
2144 /* Stop when auto var init warning is not on. */
2145 if (!warn_trivial_auto_var_init
)
2146 return integer_zero_node
;
2153 /* Possibly warn about unreachable statements between switch's controlling
2154 expression and the first case. Also warn about -ftrivial-auto-var-init
2155 cannot initialize the auto variable under such situation.
2156 SEQ is the body of a switch expression. */
2159 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq
)
2161 if ((!warn_switch_unreachable
&& !warn_trivial_auto_var_init
)
2162 /* This warning doesn't play well with Fortran when optimizations
2164 || lang_GNU_Fortran ()
2168 struct walk_stmt_info wi
;
2170 memset (&wi
, 0, sizeof (wi
));
2171 walk_gimple_seq (seq
, warn_switch_unreachable_and_auto_init_r
, NULL
, &wi
);
2175 /* A label entry that pairs label and a location. */
2182 /* Find LABEL in vector of label entries VEC. */
2184 static struct label_entry
*
2185 find_label_entry (const auto_vec
<struct label_entry
> *vec
, tree label
)
2188 struct label_entry
*l
;
2190 FOR_EACH_VEC_ELT (*vec
, i
, l
)
2191 if (l
->label
== label
)
2196 /* Return true if LABEL, a LABEL_DECL, represents a case label
2197 in a vector of labels CASES. */
2200 case_label_p (const vec
<tree
> *cases
, tree label
)
2205 FOR_EACH_VEC_ELT (*cases
, i
, l
)
2206 if (CASE_LABEL (l
) == label
)
2211 /* Find the last nondebug statement in a scope STMT. */
2214 last_stmt_in_scope (gimple
*stmt
)
2219 switch (gimple_code (stmt
))
2223 gbind
*bind
= as_a
<gbind
*> (stmt
);
2224 stmt
= gimple_seq_last_nondebug_stmt (gimple_bind_body (bind
));
2225 return last_stmt_in_scope (stmt
);
2230 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
2231 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt
));
2232 gimple
*last_eval
= last_stmt_in_scope (stmt
);
2233 if (gimple_stmt_may_fallthru (last_eval
)
2234 && (last_eval
== NULL
2235 || !gimple_call_internal_p (last_eval
, IFN_FALLTHROUGH
))
2236 && gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
2238 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt
));
2239 return last_stmt_in_scope (stmt
);
2253 /* Collect labels that may fall through into LABELS and return the statement
2254 preceding another case label, or a user-defined label. Store a location
2255 useful to give warnings at *PREVLOC (usually the location of the returned
2256 statement or of its surrounding scope). */
2259 collect_fallthrough_labels (gimple_stmt_iterator
*gsi_p
,
2260 auto_vec
<struct label_entry
> *labels
,
2261 location_t
*prevloc
)
2263 gimple
*prev
= NULL
;
2265 *prevloc
= UNKNOWN_LOCATION
;
2268 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
)
2270 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2271 which starts on a GIMPLE_SWITCH and ends with a break label.
2272 Handle that as a single statement that can fall through. */
2273 gbind
*bind
= as_a
<gbind
*> (gsi_stmt (*gsi_p
));
2274 gimple
*first
= gimple_seq_first_stmt (gimple_bind_body (bind
));
2275 gimple
*last
= gimple_seq_last_stmt (gimple_bind_body (bind
));
2277 && gimple_code (first
) == GIMPLE_SWITCH
2278 && gimple_code (last
) == GIMPLE_LABEL
)
2280 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2281 if (SWITCH_BREAK_LABEL_P (label
))
2289 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
2290 || gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_TRY
)
2292 /* Nested scope. Only look at the last statement of
2293 the innermost scope. */
2294 location_t bind_loc
= gimple_location (gsi_stmt (*gsi_p
));
2295 gimple
*last
= last_stmt_in_scope (gsi_stmt (*gsi_p
));
2299 /* It might be a label without a location. Use the
2300 location of the scope then. */
2301 if (!gimple_has_location (prev
))
2302 *prevloc
= bind_loc
;
2308 /* Ifs are tricky. */
2309 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_COND
)
2311 gcond
*cond_stmt
= as_a
<gcond
*> (gsi_stmt (*gsi_p
));
2312 tree false_lab
= gimple_cond_false_label (cond_stmt
);
2313 location_t if_loc
= gimple_location (cond_stmt
);
2316 if (i > 1) goto <D.2259>; else goto D;
2317 we can't do much with the else-branch. */
2318 if (!DECL_ARTIFICIAL (false_lab
))
2321 /* Go on until the false label, then one step back. */
2322 for (; !gsi_end_p (*gsi_p
); gsi_next (gsi_p
))
2324 gimple
*stmt
= gsi_stmt (*gsi_p
);
2325 if (gimple_code (stmt
) == GIMPLE_LABEL
2326 && gimple_label_label (as_a
<glabel
*> (stmt
)) == false_lab
)
2330 /* Not found? Oops. */
2331 if (gsi_end_p (*gsi_p
))
2334 /* A dead label can't fall through. */
2335 if (!UNUSED_LABEL_P (false_lab
))
2337 struct label_entry l
= { false_lab
, if_loc
};
2338 labels
->safe_push (l
);
2341 /* Go to the last statement of the then branch. */
2344 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2350 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_GOTO
2351 && !gimple_has_location (gsi_stmt (*gsi_p
)))
2353 /* Look at the statement before, it might be
2354 attribute fallthrough, in which case don't warn. */
2356 bool fallthru_before_dest
2357 = gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_FALLTHROUGH
);
2359 tree goto_dest
= gimple_goto_dest (gsi_stmt (*gsi_p
));
2360 if (!fallthru_before_dest
)
2362 struct label_entry l
= { goto_dest
, if_loc
};
2363 labels
->safe_push (l
);
2366 /* This case is about
2367 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2372 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2373 through to #3. So set PREV to #1. */
2374 else if (UNUSED_LABEL_P (false_lab
))
2375 prev
= gsi_stmt (*gsi_p
);
2377 /* And move back. */
2381 /* Remember the last statement. Skip labels that are of no interest
2383 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2385 tree label
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (*gsi_p
)));
2386 if (find_label_entry (labels
, label
))
2387 prev
= gsi_stmt (*gsi_p
);
2389 else if (gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_ASAN_MARK
))
2391 else if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_PREDICT
)
2393 else if (!is_gimple_debug (gsi_stmt (*gsi_p
)))
2394 prev
= gsi_stmt (*gsi_p
);
2397 while (!gsi_end_p (*gsi_p
)
2398 /* Stop if we find a case or a user-defined label. */
2399 && (gimple_code (gsi_stmt (*gsi_p
)) != GIMPLE_LABEL
2400 || !gimple_has_location (gsi_stmt (*gsi_p
))));
2402 if (prev
&& gimple_has_location (prev
))
2403 *prevloc
= gimple_location (prev
);
2407 /* Return true if the switch fallthough warning should occur. LABEL is
2408 the label statement that we're falling through to. */
2411 should_warn_for_implicit_fallthrough (gimple_stmt_iterator
*gsi_p
, tree label
)
2413 gimple_stmt_iterator gsi
= *gsi_p
;
2415 /* Don't warn if the label is marked with a "falls through" comment. */
2416 if (FALLTHROUGH_LABEL_P (label
))
2419 /* Don't warn for non-case labels followed by a statement:
2424 as these are likely intentional. */
2425 if (!case_label_p (&gimplify_ctxp
->case_labels
, label
))
2428 while (!gsi_end_p (gsi
)
2429 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2430 && (l
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi
))))
2431 && !case_label_p (&gimplify_ctxp
->case_labels
, l
))
2432 gsi_next_nondebug (&gsi
);
2433 if (gsi_end_p (gsi
) || gimple_code (gsi_stmt (gsi
)) != GIMPLE_LABEL
)
2437 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2438 immediately breaks. */
2441 /* Skip all immediately following labels. */
2442 while (!gsi_end_p (gsi
)
2443 && (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2444 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_PREDICT
))
2445 gsi_next_nondebug (&gsi
);
2447 /* { ... something; default:; } */
2449 /* { ... something; default: break; } or
2450 { ... something; default: goto L; } */
2451 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_GOTO
2452 /* { ... something; default: return; } */
2453 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
2459 /* Callback for walk_gimple_seq. */
2462 warn_implicit_fallthrough_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2463 struct walk_stmt_info
*)
2465 gimple
*stmt
= gsi_stmt (*gsi_p
);
2467 *handled_ops_p
= true;
2468 switch (gimple_code (stmt
))
2473 case GIMPLE_EH_FILTER
:
2474 case GIMPLE_TRANSACTION
:
2475 /* Walk the sub-statements. */
2476 *handled_ops_p
= false;
2479 /* Find a sequence of form:
2486 and possibly warn. */
2489 /* Found a label. Skip all immediately following labels. */
2490 while (!gsi_end_p (*gsi_p
)
2491 && gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2492 gsi_next_nondebug (gsi_p
);
2494 /* There might be no more statements. */
2495 if (gsi_end_p (*gsi_p
))
2496 return integer_zero_node
;
2498 /* Vector of labels that fall through. */
2499 auto_vec
<struct label_entry
> labels
;
2501 gimple
*prev
= collect_fallthrough_labels (gsi_p
, &labels
, &prevloc
);
2503 /* There might be no more statements. */
2504 if (gsi_end_p (*gsi_p
))
2505 return integer_zero_node
;
2507 gimple
*next
= gsi_stmt (*gsi_p
);
2509 /* If what follows is a label, then we may have a fallthrough. */
2510 if (gimple_code (next
) == GIMPLE_LABEL
2511 && gimple_has_location (next
)
2512 && (label
= gimple_label_label (as_a
<glabel
*> (next
)))
2515 struct label_entry
*l
;
2516 bool warned_p
= false;
2517 auto_diagnostic_group d
;
2518 if (!should_warn_for_implicit_fallthrough (gsi_p
, label
))
2520 else if (gimple_code (prev
) == GIMPLE_LABEL
2521 && (label
= gimple_label_label (as_a
<glabel
*> (prev
)))
2522 && (l
= find_label_entry (&labels
, label
)))
2523 warned_p
= warning_at (l
->loc
, OPT_Wimplicit_fallthrough_
,
2524 "this statement may fall through");
2525 else if (!gimple_call_internal_p (prev
, IFN_FALLTHROUGH
)
2526 /* Try to be clever and don't warn when the statement
2527 can't actually fall through. */
2528 && gimple_stmt_may_fallthru (prev
)
2529 && prevloc
!= UNKNOWN_LOCATION
)
2530 warned_p
= warning_at (prevloc
,
2531 OPT_Wimplicit_fallthrough_
,
2532 "this statement may fall through");
2534 inform (gimple_location (next
), "here");
2536 /* Mark this label as processed so as to prevent multiple
2537 warnings in nested switches. */
2538 FALLTHROUGH_LABEL_P (label
) = true;
2540 /* So that next warn_implicit_fallthrough_r will start looking for
2541 a new sequence starting with this label. */
2552 /* Warn when a switch case falls through. */
2555 maybe_warn_implicit_fallthrough (gimple_seq seq
)
2557 if (!warn_implicit_fallthrough
)
2560 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2563 || lang_GNU_OBJC ()))
2566 struct walk_stmt_info wi
;
2567 memset (&wi
, 0, sizeof (wi
));
2568 walk_gimple_seq (seq
, warn_implicit_fallthrough_r
, NULL
, &wi
);
2571 /* Callback for walk_gimple_seq. */
2574 expand_FALLTHROUGH_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2575 struct walk_stmt_info
*wi
)
2577 gimple
*stmt
= gsi_stmt (*gsi_p
);
2579 *handled_ops_p
= true;
2580 switch (gimple_code (stmt
))
2585 case GIMPLE_EH_FILTER
:
2586 case GIMPLE_TRANSACTION
:
2587 /* Walk the sub-statements. */
2588 *handled_ops_p
= false;
2591 if (gimple_call_internal_p (stmt
, IFN_FALLTHROUGH
))
2593 gsi_remove (gsi_p
, true);
2594 if (gsi_end_p (*gsi_p
))
2596 *static_cast<location_t
*>(wi
->info
) = gimple_location (stmt
);
2597 return integer_zero_node
;
2601 location_t loc
= gimple_location (stmt
);
2603 gimple_stmt_iterator gsi2
= *gsi_p
;
2604 stmt
= gsi_stmt (gsi2
);
2605 if (gimple_code (stmt
) == GIMPLE_GOTO
&& !gimple_has_location (stmt
))
2607 /* Go on until the artificial label. */
2608 tree goto_dest
= gimple_goto_dest (stmt
);
2609 for (; !gsi_end_p (gsi2
); gsi_next (&gsi2
))
2611 if (gimple_code (gsi_stmt (gsi2
)) == GIMPLE_LABEL
2612 && gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi2
)))
2617 /* Not found? Stop. */
2618 if (gsi_end_p (gsi2
))
2621 /* Look one past it. */
2625 /* We're looking for a case label or default label here. */
2626 while (!gsi_end_p (gsi2
))
2628 stmt
= gsi_stmt (gsi2
);
2629 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2631 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
2632 if (gimple_has_location (stmt
) && DECL_ARTIFICIAL (label
))
2638 else if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2640 else if (!is_gimple_debug (stmt
))
2641 /* Anything else is not expected. */
2646 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2647 "a case label or default label");
2656 /* Expand all FALLTHROUGH () calls in SEQ. */
2659 expand_FALLTHROUGH (gimple_seq
*seq_p
)
2661 struct walk_stmt_info wi
;
2663 memset (&wi
, 0, sizeof (wi
));
2664 wi
.info
= (void *) &loc
;
2665 walk_gimple_seq_mod (seq_p
, expand_FALLTHROUGH_r
, NULL
, &wi
);
2666 if (wi
.callback_result
== integer_zero_node
)
2667 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2668 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2669 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2670 "a case label or default label");
2674 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2677 static enum gimplify_status
2678 gimplify_switch_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2680 tree switch_expr
= *expr_p
;
2681 gimple_seq switch_body_seq
= NULL
;
2682 enum gimplify_status ret
;
2683 tree index_type
= TREE_TYPE (switch_expr
);
2684 if (index_type
== NULL_TREE
)
2685 index_type
= TREE_TYPE (SWITCH_COND (switch_expr
));
2687 ret
= gimplify_expr (&SWITCH_COND (switch_expr
), pre_p
, NULL
, is_gimple_val
,
2689 if (ret
== GS_ERROR
|| ret
== GS_UNHANDLED
)
2692 if (SWITCH_BODY (switch_expr
))
2695 vec
<tree
> saved_labels
;
2696 hash_set
<tree
> *saved_live_switch_vars
= NULL
;
2697 tree default_case
= NULL_TREE
;
2698 gswitch
*switch_stmt
;
2700 /* Save old labels, get new ones from body, then restore the old
2701 labels. Save all the things from the switch body to append after. */
2702 saved_labels
= gimplify_ctxp
->case_labels
;
2703 gimplify_ctxp
->case_labels
.create (8);
2705 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2706 saved_live_switch_vars
= gimplify_ctxp
->live_switch_vars
;
2707 tree_code body_type
= TREE_CODE (SWITCH_BODY (switch_expr
));
2708 if (body_type
== BIND_EXPR
|| body_type
== STATEMENT_LIST
)
2709 gimplify_ctxp
->live_switch_vars
= new hash_set
<tree
> (4);
2711 gimplify_ctxp
->live_switch_vars
= NULL
;
2713 bool old_in_switch_expr
= gimplify_ctxp
->in_switch_expr
;
2714 gimplify_ctxp
->in_switch_expr
= true;
2716 gimplify_stmt (&SWITCH_BODY (switch_expr
), &switch_body_seq
);
2718 gimplify_ctxp
->in_switch_expr
= old_in_switch_expr
;
2719 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq
);
2720 maybe_warn_implicit_fallthrough (switch_body_seq
);
2721 /* Only do this for the outermost GIMPLE_SWITCH. */
2722 if (!gimplify_ctxp
->in_switch_expr
)
2723 expand_FALLTHROUGH (&switch_body_seq
);
2725 labels
= gimplify_ctxp
->case_labels
;
2726 gimplify_ctxp
->case_labels
= saved_labels
;
2728 if (gimplify_ctxp
->live_switch_vars
)
2730 gcc_assert (gimplify_ctxp
->live_switch_vars
->is_empty ());
2731 delete gimplify_ctxp
->live_switch_vars
;
2733 gimplify_ctxp
->live_switch_vars
= saved_live_switch_vars
;
2735 preprocess_case_label_vec_for_gimple (labels
, index_type
,
2738 bool add_bind
= false;
2741 glabel
*new_default
;
2744 = build_case_label (NULL_TREE
, NULL_TREE
,
2745 create_artificial_label (UNKNOWN_LOCATION
));
2746 if (old_in_switch_expr
)
2748 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case
)) = 1;
2751 new_default
= gimple_build_label (CASE_LABEL (default_case
));
2752 gimplify_seq_add_stmt (&switch_body_seq
, new_default
);
2754 else if (old_in_switch_expr
)
2756 gimple
*last
= gimple_seq_last_stmt (switch_body_seq
);
2757 if (last
&& gimple_code (last
) == GIMPLE_LABEL
)
2759 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2760 if (SWITCH_BREAK_LABEL_P (label
))
2765 switch_stmt
= gimple_build_switch (SWITCH_COND (switch_expr
),
2766 default_case
, labels
);
2767 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2768 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2769 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2770 so that we can easily find the start and end of the switch
2774 gimple_seq bind_body
= NULL
;
2775 gimplify_seq_add_stmt (&bind_body
, switch_stmt
);
2776 gimple_seq_add_seq (&bind_body
, switch_body_seq
);
2777 gbind
*bind
= gimple_build_bind (NULL_TREE
, bind_body
, NULL_TREE
);
2778 gimple_set_location (bind
, EXPR_LOCATION (switch_expr
));
2779 gimplify_seq_add_stmt (pre_p
, bind
);
2783 gimplify_seq_add_stmt (pre_p
, switch_stmt
);
2784 gimplify_seq_add_seq (pre_p
, switch_body_seq
);
2794 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2796 static enum gimplify_status
2797 gimplify_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2799 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p
))
2800 == current_function_decl
);
2802 tree label
= LABEL_EXPR_LABEL (*expr_p
);
2803 glabel
*label_stmt
= gimple_build_label (label
);
2804 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2805 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2807 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2808 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2810 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2811 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2817 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2819 static enum gimplify_status
2820 gimplify_case_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2822 struct gimplify_ctx
*ctxp
;
2825 /* Invalid programs can play Duff's Device type games with, for example,
2826 #pragma omp parallel. At least in the C front end, we don't
2827 detect such invalid branches until after gimplification, in the
2828 diagnose_omp_blocks pass. */
2829 for (ctxp
= gimplify_ctxp
; ; ctxp
= ctxp
->prev_context
)
2830 if (ctxp
->case_labels
.exists ())
2833 tree label
= CASE_LABEL (*expr_p
);
2834 label_stmt
= gimple_build_label (label
);
2835 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2836 ctxp
->case_labels
.safe_push (*expr_p
);
2837 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2839 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2840 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2842 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2843 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2849 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2853 build_and_jump (tree
*label_p
)
2855 if (label_p
== NULL
)
2856 /* If there's nowhere to jump, just fall through. */
2859 if (*label_p
== NULL_TREE
)
2861 tree label
= create_artificial_label (UNKNOWN_LOCATION
);
2865 return build1 (GOTO_EXPR
, void_type_node
, *label_p
);
2868 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2869 This also involves building a label to jump to and communicating it to
2870 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2872 static enum gimplify_status
2873 gimplify_exit_expr (tree
*expr_p
)
2875 tree cond
= TREE_OPERAND (*expr_p
, 0);
2878 expr
= build_and_jump (&gimplify_ctxp
->exit_label
);
2879 expr
= build3 (COND_EXPR
, void_type_node
, cond
, expr
, NULL_TREE
);
2885 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2886 different from its canonical type, wrap the whole thing inside a
2887 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2890 The canonical type of a COMPONENT_REF is the type of the field being
2891 referenced--unless the field is a bit-field which can be read directly
2892 in a smaller mode, in which case the canonical type is the
2893 sign-appropriate type corresponding to that mode. */
2896 canonicalize_component_ref (tree
*expr_p
)
2898 tree expr
= *expr_p
;
2901 gcc_assert (TREE_CODE (expr
) == COMPONENT_REF
);
2903 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
)))
2904 type
= TREE_TYPE (get_unwidened (expr
, NULL_TREE
));
2906 type
= TREE_TYPE (TREE_OPERAND (expr
, 1));
2908 /* One could argue that all the stuff below is not necessary for
2909 the non-bitfield case and declare it a FE error if type
2910 adjustment would be needed. */
2911 if (TREE_TYPE (expr
) != type
)
2913 #ifdef ENABLE_TYPES_CHECKING
2914 tree old_type
= TREE_TYPE (expr
);
2918 /* We need to preserve qualifiers and propagate them from
2920 type_quals
= TYPE_QUALS (type
)
2921 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr
, 0)));
2922 if (TYPE_QUALS (type
) != type_quals
)
2923 type
= build_qualified_type (TYPE_MAIN_VARIANT (type
), type_quals
);
2925 /* Set the type of the COMPONENT_REF to the underlying type. */
2926 TREE_TYPE (expr
) = type
;
2928 #ifdef ENABLE_TYPES_CHECKING
2929 /* It is now a FE error, if the conversion from the canonical
2930 type to the original expression type is not useless. */
2931 gcc_assert (useless_type_conversion_p (old_type
, type
));
2936 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2937 to foo, embed that change in the ADDR_EXPR by converting
2942 where L is the lower bound. For simplicity, only do this for constant
2944 The constraint is that the type of &array[L] is trivially convertible
2948 canonicalize_addr_expr (tree
*expr_p
)
2950 tree expr
= *expr_p
;
2951 tree addr_expr
= TREE_OPERAND (expr
, 0);
2952 tree datype
, ddatype
, pddatype
;
2954 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2955 if (!POINTER_TYPE_P (TREE_TYPE (expr
))
2956 || TREE_CODE (addr_expr
) != ADDR_EXPR
)
2959 /* The addr_expr type should be a pointer to an array. */
2960 datype
= TREE_TYPE (TREE_TYPE (addr_expr
));
2961 if (TREE_CODE (datype
) != ARRAY_TYPE
)
2964 /* The pointer to element type shall be trivially convertible to
2965 the expression pointer type. */
2966 ddatype
= TREE_TYPE (datype
);
2967 pddatype
= build_pointer_type (ddatype
);
2968 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr
)),
2972 /* The lower bound and element sizes must be constant. */
2973 if (!TYPE_SIZE_UNIT (ddatype
)
2974 || TREE_CODE (TYPE_SIZE_UNIT (ddatype
)) != INTEGER_CST
2975 || !TYPE_DOMAIN (datype
) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))
2976 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))) != INTEGER_CST
)
2979 /* All checks succeeded. Build a new node to merge the cast. */
2980 *expr_p
= build4 (ARRAY_REF
, ddatype
, TREE_OPERAND (addr_expr
, 0),
2981 TYPE_MIN_VALUE (TYPE_DOMAIN (datype
)),
2982 NULL_TREE
, NULL_TREE
);
2983 *expr_p
= build1 (ADDR_EXPR
, pddatype
, *expr_p
);
2985 /* We can have stripped a required restrict qualifier above. */
2986 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
2987 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
2990 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2991 underneath as appropriate. */
2993 static enum gimplify_status
2994 gimplify_conversion (tree
*expr_p
)
2996 location_t loc
= EXPR_LOCATION (*expr_p
);
2997 gcc_assert (CONVERT_EXPR_P (*expr_p
));
2999 /* Then strip away all but the outermost conversion. */
3000 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p
, 0));
3002 /* And remove the outermost conversion if it's useless. */
3003 if (tree_ssa_useless_type_conversion (*expr_p
))
3004 *expr_p
= TREE_OPERAND (*expr_p
, 0);
3006 /* If we still have a conversion at the toplevel,
3007 then canonicalize some constructs. */
3008 if (CONVERT_EXPR_P (*expr_p
))
3010 tree sub
= TREE_OPERAND (*expr_p
, 0);
3012 /* If a NOP conversion is changing the type of a COMPONENT_REF
3013 expression, then canonicalize its type now in order to expose more
3014 redundant conversions. */
3015 if (TREE_CODE (sub
) == COMPONENT_REF
)
3016 canonicalize_component_ref (&TREE_OPERAND (*expr_p
, 0));
3018 /* If a NOP conversion is changing a pointer to array of foo
3019 to a pointer to foo, embed that change in the ADDR_EXPR. */
3020 else if (TREE_CODE (sub
) == ADDR_EXPR
)
3021 canonicalize_addr_expr (expr_p
);
3024 /* If we have a conversion to a non-register type force the
3025 use of a VIEW_CONVERT_EXPR instead. */
3026 if (CONVERT_EXPR_P (*expr_p
) && !is_gimple_reg_type (TREE_TYPE (*expr_p
)))
3027 *expr_p
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (*expr_p
),
3028 TREE_OPERAND (*expr_p
, 0));
3030 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3031 if (TREE_CODE (*expr_p
) == CONVERT_EXPR
)
3032 TREE_SET_CODE (*expr_p
, NOP_EXPR
);
3037 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3038 DECL_VALUE_EXPR, and it's worth re-examining things. */
3040 static enum gimplify_status
3041 gimplify_var_or_parm_decl (tree
*expr_p
)
3043 tree decl
= *expr_p
;
3045 /* ??? If this is a local variable, and it has not been seen in any
3046 outer BIND_EXPR, then it's probably the result of a duplicate
3047 declaration, for which we've already issued an error. It would
3048 be really nice if the front end wouldn't leak these at all.
3049 Currently the only known culprit is C++ destructors, as seen
3050 in g++.old-deja/g++.jason/binding.C.
3051 Another possible culpit are size expressions for variably modified
3052 types which are lost in the FE or not gimplified correctly. */
3054 && !DECL_SEEN_IN_BIND_EXPR_P (decl
)
3055 && !TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)
3056 && decl_function_context (decl
) == current_function_decl
)
3058 gcc_assert (seen_error ());
3062 /* When within an OMP context, notice uses of variables. */
3063 if (gimplify_omp_ctxp
&& omp_notice_variable (gimplify_omp_ctxp
, decl
, true))
3066 /* If the decl is an alias for another expression, substitute it now. */
3067 if (DECL_HAS_VALUE_EXPR_P (decl
))
3069 *expr_p
= unshare_expr (DECL_VALUE_EXPR (decl
));
3076 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3079 recalculate_side_effects (tree t
)
3081 enum tree_code code
= TREE_CODE (t
);
3082 int len
= TREE_OPERAND_LENGTH (t
);
3085 switch (TREE_CODE_CLASS (code
))
3087 case tcc_expression
:
3093 case PREDECREMENT_EXPR
:
3094 case PREINCREMENT_EXPR
:
3095 case POSTDECREMENT_EXPR
:
3096 case POSTINCREMENT_EXPR
:
3097 /* All of these have side-effects, no matter what their
3106 case tcc_comparison
: /* a comparison expression */
3107 case tcc_unary
: /* a unary arithmetic expression */
3108 case tcc_binary
: /* a binary arithmetic expression */
3109 case tcc_reference
: /* a reference */
3110 case tcc_vl_exp
: /* a function call */
3111 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
3112 for (i
= 0; i
< len
; ++i
)
3114 tree op
= TREE_OPERAND (t
, i
);
3115 if (op
&& TREE_SIDE_EFFECTS (op
))
3116 TREE_SIDE_EFFECTS (t
) = 1;
3121 /* No side-effects. */
3129 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3133 : min_lval '[' val ']'
3135 | compound_lval '[' val ']'
3136 | compound_lval '.' ID
3138 This is not part of the original SIMPLE definition, which separates
3139 array and member references, but it seems reasonable to handle them
3140 together. Also, this way we don't run into problems with union
3141 aliasing; gcc requires that for accesses through a union to alias, the
3142 union reference must be explicit, which was not always the case when we
3143 were splitting up array and member refs.
3145 PRE_P points to the sequence where side effects that must happen before
3146 *EXPR_P should be stored.
3148 POST_P points to the sequence where side effects that must happen after
3149 *EXPR_P should be stored. */
3151 static enum gimplify_status
3152 gimplify_compound_lval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3153 fallback_t fallback
)
3156 enum gimplify_status ret
= GS_ALL_DONE
, tret
;
3158 location_t loc
= EXPR_LOCATION (*expr_p
);
3159 tree expr
= *expr_p
;
3161 /* Create a stack of the subexpressions so later we can walk them in
3162 order from inner to outer. */
3163 auto_vec
<tree
, 10> expr_stack
;
3165 /* We can handle anything that get_inner_reference can deal with. */
3166 for (p
= expr_p
; ; p
= &TREE_OPERAND (*p
, 0))
3169 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3170 if (TREE_CODE (*p
) == INDIRECT_REF
)
3171 *p
= fold_indirect_ref_loc (loc
, *p
);
3173 if (handled_component_p (*p
))
3175 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3176 additional COMPONENT_REFs. */
3177 else if ((VAR_P (*p
) || TREE_CODE (*p
) == PARM_DECL
)
3178 && gimplify_var_or_parm_decl (p
) == GS_OK
)
3183 expr_stack
.safe_push (*p
);
3186 gcc_assert (expr_stack
.length ());
3188 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3189 walked through and P points to the innermost expression.
3191 Java requires that we elaborated nodes in source order. That
3192 means we must gimplify the inner expression followed by each of
3193 the indices, in order. But we can't gimplify the inner
3194 expression until we deal with any variable bounds, sizes, or
3195 positions in order to deal with PLACEHOLDER_EXPRs.
3197 The base expression may contain a statement expression that
3198 has declarations used in size expressions, so has to be
3199 gimplified before gimplifying the size expressions.
3201 So we do this in three steps. First we deal with variable
3202 bounds, sizes, and positions, then we gimplify the base and
3203 ensure it is memory if needed, then we deal with the annotations
3204 for any variables in the components and any indices, from left
3207 bool need_non_reg
= false;
3208 for (i
= expr_stack
.length () - 1; i
>= 0; i
--)
3210 tree t
= expr_stack
[i
];
3212 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3214 /* Deal with the low bound and element type size and put them into
3215 the ARRAY_REF. If these values are set, they have already been
3217 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3219 tree low
= unshare_expr (array_ref_low_bound (t
));
3220 if (!is_gimple_min_invariant (low
))
3222 TREE_OPERAND (t
, 2) = low
;
3226 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
3228 tree elmt_size
= array_ref_element_size (t
);
3229 if (!is_gimple_min_invariant (elmt_size
))
3231 elmt_size
= unshare_expr (elmt_size
);
3232 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
, 0)));
3233 tree factor
= size_int (TYPE_ALIGN_UNIT (elmt_type
));
3235 /* Divide the element size by the alignment of the element
3237 elmt_size
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3240 TREE_OPERAND (t
, 3) = elmt_size
;
3243 need_non_reg
= true;
3245 else if (TREE_CODE (t
) == COMPONENT_REF
)
3247 /* Set the field offset into T and gimplify it. */
3248 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3250 tree offset
= component_ref_field_offset (t
);
3251 if (!is_gimple_min_invariant (offset
))
3253 offset
= unshare_expr (offset
);
3254 tree field
= TREE_OPERAND (t
, 1);
3256 = size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
);
3258 /* Divide the offset by its alignment. */
3259 offset
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3262 TREE_OPERAND (t
, 2) = offset
;
3265 need_non_reg
= true;
3269 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3270 so as to match the min_lval predicate. Failure to do so may result
3271 in the creation of large aggregate temporaries. */
3272 tret
= gimplify_expr (p
, pre_p
, post_p
, is_gimple_min_lval
,
3273 fallback
| fb_lvalue
);
3274 ret
= MIN (ret
, tret
);
3276 /* Step 2a: if we have component references we do not support on
3277 registers then make sure the base isn't a register. Of course
3278 we can only do so if an rvalue is OK. */
3279 if (need_non_reg
&& (fallback
& fb_rvalue
))
3280 prepare_gimple_addressable (p
, pre_p
);
3282 /* Step 3: gimplify size expressions and the indices and operands of
3283 ARRAY_REF. During this loop we also remove any useless conversions. */
3285 for (; expr_stack
.length () > 0; )
3287 tree t
= expr_stack
.pop ();
3289 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3291 /* Gimplify the low bound and element type size. */
3292 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3293 is_gimple_reg
, fb_rvalue
);
3294 ret
= MIN (ret
, tret
);
3296 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
, post_p
,
3297 is_gimple_reg
, fb_rvalue
);
3298 ret
= MIN (ret
, tret
);
3300 /* Gimplify the dimension. */
3301 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, post_p
,
3302 is_gimple_val
, fb_rvalue
);
3303 ret
= MIN (ret
, tret
);
3305 else if (TREE_CODE (t
) == COMPONENT_REF
)
3307 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3308 is_gimple_reg
, fb_rvalue
);
3309 ret
= MIN (ret
, tret
);
3312 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t
, 0));
3314 /* The innermost expression P may have originally had
3315 TREE_SIDE_EFFECTS set which would have caused all the outer
3316 expressions in *EXPR_P leading to P to also have had
3317 TREE_SIDE_EFFECTS set. */
3318 recalculate_side_effects (t
);
3321 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3322 if ((fallback
& fb_rvalue
) && TREE_CODE (*expr_p
) == COMPONENT_REF
)
3324 canonicalize_component_ref (expr_p
);
3327 expr_stack
.release ();
3329 gcc_assert (*expr_p
== expr
|| ret
!= GS_ALL_DONE
);
3334 /* Gimplify the self modifying expression pointed to by EXPR_P
3337 PRE_P points to the list where side effects that must happen before
3338 *EXPR_P should be stored.
3340 POST_P points to the list where side effects that must happen after
3341 *EXPR_P should be stored.
3343 WANT_VALUE is nonzero iff we want to use the value of this expression
3344 in another expression.
3346 ARITH_TYPE is the type the computation should be performed in. */
3348 enum gimplify_status
3349 gimplify_self_mod_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3350 bool want_value
, tree arith_type
)
3352 enum tree_code code
;
3353 tree lhs
, lvalue
, rhs
, t1
;
3354 gimple_seq post
= NULL
, *orig_post_p
= post_p
;
3356 enum tree_code arith_code
;
3357 enum gimplify_status ret
;
3358 location_t loc
= EXPR_LOCATION (*expr_p
);
3360 code
= TREE_CODE (*expr_p
);
3362 gcc_assert (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
3363 || code
== PREINCREMENT_EXPR
|| code
== PREDECREMENT_EXPR
);
3365 /* Prefix or postfix? */
3366 if (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
)
3367 /* Faster to treat as prefix if result is not used. */
3368 postfix
= want_value
;
3372 /* For postfix, make sure the inner expression's post side effects
3373 are executed after side effects from this expression. */
3377 /* Add or subtract? */
3378 if (code
== PREINCREMENT_EXPR
|| code
== POSTINCREMENT_EXPR
)
3379 arith_code
= PLUS_EXPR
;
3381 arith_code
= MINUS_EXPR
;
3383 /* Gimplify the LHS into a GIMPLE lvalue. */
3384 lvalue
= TREE_OPERAND (*expr_p
, 0);
3385 ret
= gimplify_expr (&lvalue
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
3386 if (ret
== GS_ERROR
)
3389 /* Extract the operands to the arithmetic operation. */
3391 rhs
= TREE_OPERAND (*expr_p
, 1);
3393 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3394 that as the result value and in the postqueue operation. */
3397 ret
= gimplify_expr (&lhs
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
3398 if (ret
== GS_ERROR
)
3401 lhs
= get_initialized_tmp_var (lhs
, pre_p
);
3404 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3405 if (POINTER_TYPE_P (TREE_TYPE (lhs
)))
3407 rhs
= convert_to_ptrofftype_loc (loc
, rhs
);
3408 if (arith_code
== MINUS_EXPR
)
3409 rhs
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (rhs
), rhs
);
3410 t1
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (*expr_p
), lhs
, rhs
);
3413 t1
= fold_convert (TREE_TYPE (*expr_p
),
3414 fold_build2 (arith_code
, arith_type
,
3415 fold_convert (arith_type
, lhs
),
3416 fold_convert (arith_type
, rhs
)));
3420 gimplify_assign (lvalue
, t1
, pre_p
);
3421 gimplify_seq_add_seq (orig_post_p
, post
);
3427 *expr_p
= build2 (MODIFY_EXPR
, TREE_TYPE (lvalue
), lvalue
, t1
);
3432 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3435 maybe_with_size_expr (tree
*expr_p
)
3437 tree expr
= *expr_p
;
3438 tree type
= TREE_TYPE (expr
);
3441 /* If we've already wrapped this or the type is error_mark_node, we can't do
3443 if (TREE_CODE (expr
) == WITH_SIZE_EXPR
3444 || type
== error_mark_node
)
3447 /* If the size isn't known or is a constant, we have nothing to do. */
3448 size
= TYPE_SIZE_UNIT (type
);
3449 if (!size
|| poly_int_tree_p (size
))
3452 /* Otherwise, make a WITH_SIZE_EXPR. */
3453 size
= unshare_expr (size
);
3454 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, expr
);
3455 *expr_p
= build2 (WITH_SIZE_EXPR
, type
, expr
, size
);
3458 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3459 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3460 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3461 gimplified to an SSA name. */
3463 enum gimplify_status
3464 gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
3467 bool (*test
) (tree
);
3470 /* In general, we allow lvalues for function arguments to avoid
3471 extra overhead of copying large aggregates out of even larger
3472 aggregates into temporaries only to copy the temporaries to
3473 the argument list. Make optimizers happy by pulling out to
3474 temporaries those types that fit in registers. */
3475 if (is_gimple_reg_type (TREE_TYPE (*arg_p
)))
3476 test
= is_gimple_val
, fb
= fb_rvalue
;
3479 test
= is_gimple_lvalue
, fb
= fb_either
;
3480 /* Also strip a TARGET_EXPR that would force an extra copy. */
3481 if (TREE_CODE (*arg_p
) == TARGET_EXPR
)
3483 tree init
= TARGET_EXPR_INITIAL (*arg_p
);
3485 && !VOID_TYPE_P (TREE_TYPE (init
)))
3490 /* If this is a variable sized type, we must remember the size. */
3491 maybe_with_size_expr (arg_p
);
3493 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3494 /* Make sure arguments have the same location as the function call
3496 protected_set_expr_location (*arg_p
, call_location
);
3498 /* There is a sequence point before a function call. Side effects in
3499 the argument list must occur before the actual call. So, when
3500 gimplifying arguments, force gimplify_expr to use an internal
3501 post queue which is then appended to the end of PRE_P. */
3502 return gimplify_expr (arg_p
, pre_p
, NULL
, test
, fb
, allow_ssa
);
3505 /* Don't fold inside offloading or taskreg regions: it can break code by
3506 adding decl references that weren't in the source. We'll do it during
3507 omplower pass instead. */
3510 maybe_fold_stmt (gimple_stmt_iterator
*gsi
)
3512 struct gimplify_omp_ctx
*ctx
;
3513 for (ctx
= gimplify_omp_ctxp
; ctx
; ctx
= ctx
->outer_context
)
3514 if ((ctx
->region_type
& (ORT_TARGET
| ORT_PARALLEL
| ORT_TASK
)) != 0)
3516 else if ((ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
3518 /* Delay folding of builtins until the IL is in consistent state
3519 so the diagnostic machinery can do a better job. */
3520 if (gimple_call_builtin_p (gsi_stmt (*gsi
)))
3522 return fold_stmt (gsi
);
3525 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3526 WANT_VALUE is true if the result of the call is desired. */
3528 static enum gimplify_status
3529 gimplify_call_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
3531 tree fndecl
, parms
, p
, fnptrtype
;
3532 enum gimplify_status ret
;
3535 bool builtin_va_start_p
= false;
3536 location_t loc
= EXPR_LOCATION (*expr_p
);
3538 gcc_assert (TREE_CODE (*expr_p
) == CALL_EXPR
);
3540 /* For reliable diagnostics during inlining, it is necessary that
3541 every call_expr be annotated with file and line. */
3542 if (! EXPR_HAS_LOCATION (*expr_p
))
3543 SET_EXPR_LOCATION (*expr_p
, input_location
);
3545 /* Gimplify internal functions created in the FEs. */
3546 if (CALL_EXPR_FN (*expr_p
) == NULL_TREE
)
3551 nargs
= call_expr_nargs (*expr_p
);
3552 enum internal_fn ifn
= CALL_EXPR_IFN (*expr_p
);
3553 auto_vec
<tree
> vargs (nargs
);
3555 for (i
= 0; i
< nargs
; i
++)
3557 gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3558 EXPR_LOCATION (*expr_p
));
3559 vargs
.quick_push (CALL_EXPR_ARG (*expr_p
, i
));
3562 gcall
*call
= gimple_build_call_internal_vec (ifn
, vargs
);
3563 gimple_call_set_nothrow (call
, TREE_NOTHROW (*expr_p
));
3564 gimplify_seq_add_stmt (pre_p
, call
);
3568 /* This may be a call to a builtin function.
3570 Builtin function calls may be transformed into different
3571 (and more efficient) builtin function calls under certain
3572 circumstances. Unfortunately, gimplification can muck things
3573 up enough that the builtin expanders are not aware that certain
3574 transformations are still valid.
3576 So we attempt transformation/gimplification of the call before
3577 we gimplify the CALL_EXPR. At this time we do not manage to
3578 transform all calls in the same manner as the expanders do, but
3579 we do transform most of them. */
3580 fndecl
= get_callee_fndecl (*expr_p
);
3581 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
3582 switch (DECL_FUNCTION_CODE (fndecl
))
3584 CASE_BUILT_IN_ALLOCA
:
3585 /* If the call has been built for a variable-sized object, then we
3586 want to restore the stack level when the enclosing BIND_EXPR is
3587 exited to reclaim the allocated space; otherwise, we precisely
3588 need to do the opposite and preserve the latest stack level. */
3589 if (CALL_ALLOCA_FOR_VAR_P (*expr_p
))
3590 gimplify_ctxp
->save_stack
= true;
3592 gimplify_ctxp
->keep_stack
= true;
3595 case BUILT_IN_VA_START
:
3597 builtin_va_start_p
= TRUE
;
3598 if (call_expr_nargs (*expr_p
) < 2)
3600 error ("too few arguments to function %<va_start%>");
3601 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3605 if (fold_builtin_next_arg (*expr_p
, true))
3607 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3613 case BUILT_IN_EH_RETURN
:
3614 cfun
->calls_eh_return
= true;
3617 case BUILT_IN_CLEAR_PADDING
:
3618 if (call_expr_nargs (*expr_p
) == 1)
3620 /* Remember the original type of the argument in an internal
3621 dummy second argument, as in GIMPLE pointer conversions are
3622 useless. Also mark this call as not for automatic
3623 initialization in the internal dummy third argument. */
3624 p
= CALL_EXPR_ARG (*expr_p
, 0);
3626 = build_call_expr_loc (EXPR_LOCATION (*expr_p
), fndecl
, 2, p
,
3627 build_zero_cst (TREE_TYPE (p
)));
3635 if (fndecl
&& fndecl_built_in_p (fndecl
))
3637 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3638 if (new_tree
&& new_tree
!= *expr_p
)
3640 /* There was a transformation of this call which computes the
3641 same value, but in a more efficient way. Return and try
3648 /* Remember the original function pointer type. */
3649 fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
3654 && (cfun
->curr_properties
& PROP_gimple_any
) == 0)
3656 tree variant
= omp_resolve_declare_variant (fndecl
);
3657 if (variant
!= fndecl
)
3658 CALL_EXPR_FN (*expr_p
) = build1 (ADDR_EXPR
, fnptrtype
, variant
);
3661 /* There is a sequence point before the call, so any side effects in
3662 the calling expression must occur before the actual call. Force
3663 gimplify_expr to use an internal post queue. */
3664 ret
= gimplify_expr (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
3665 is_gimple_call_addr
, fb_rvalue
);
3667 nargs
= call_expr_nargs (*expr_p
);
3669 /* Get argument types for verification. */
3670 fndecl
= get_callee_fndecl (*expr_p
);
3673 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3675 parms
= TYPE_ARG_TYPES (TREE_TYPE (fnptrtype
));
3677 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
3678 p
= DECL_ARGUMENTS (fndecl
);
3683 for (i
= 0; i
< nargs
&& p
; i
++, p
= TREE_CHAIN (p
))
3686 /* If the last argument is __builtin_va_arg_pack () and it is not
3687 passed as a named argument, decrease the number of CALL_EXPR
3688 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3691 && TREE_CODE (CALL_EXPR_ARG (*expr_p
, nargs
- 1)) == CALL_EXPR
)
3693 tree last_arg
= CALL_EXPR_ARG (*expr_p
, nargs
- 1);
3694 tree last_arg_fndecl
= get_callee_fndecl (last_arg
);
3697 && fndecl_built_in_p (last_arg_fndecl
, BUILT_IN_VA_ARG_PACK
))
3699 tree call
= *expr_p
;
3702 *expr_p
= build_call_array_loc (loc
, TREE_TYPE (call
),
3703 CALL_EXPR_FN (call
),
3704 nargs
, CALL_EXPR_ARGP (call
));
3706 /* Copy all CALL_EXPR flags, location and block, except
3707 CALL_EXPR_VA_ARG_PACK flag. */
3708 CALL_EXPR_STATIC_CHAIN (*expr_p
) = CALL_EXPR_STATIC_CHAIN (call
);
3709 CALL_EXPR_TAILCALL (*expr_p
) = CALL_EXPR_TAILCALL (call
);
3710 CALL_EXPR_RETURN_SLOT_OPT (*expr_p
)
3711 = CALL_EXPR_RETURN_SLOT_OPT (call
);
3712 CALL_FROM_THUNK_P (*expr_p
) = CALL_FROM_THUNK_P (call
);
3713 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (call
));
3715 /* Set CALL_EXPR_VA_ARG_PACK. */
3716 CALL_EXPR_VA_ARG_PACK (*expr_p
) = 1;
3720 /* If the call returns twice then after building the CFG the call
3721 argument computations will no longer dominate the call because
3722 we add an abnormal incoming edge to the call. So do not use SSA
3724 bool returns_twice
= call_expr_flags (*expr_p
) & ECF_RETURNS_TWICE
;
3726 /* Gimplify the function arguments. */
3729 for (i
= (PUSH_ARGS_REVERSED
? nargs
- 1 : 0);
3730 PUSH_ARGS_REVERSED
? i
>= 0 : i
< nargs
;
3731 PUSH_ARGS_REVERSED
? i
-- : i
++)
3733 enum gimplify_status t
;
3735 /* Avoid gimplifying the second argument to va_start, which needs to
3736 be the plain PARM_DECL. */
3737 if ((i
!= 1) || !builtin_va_start_p
)
3739 t
= gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3740 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3748 /* Gimplify the static chain. */
3749 if (CALL_EXPR_STATIC_CHAIN (*expr_p
))
3751 if (fndecl
&& !DECL_STATIC_CHAIN (fndecl
))
3752 CALL_EXPR_STATIC_CHAIN (*expr_p
) = NULL
;
3755 enum gimplify_status t
;
3756 t
= gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p
), pre_p
,
3757 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3763 /* Verify the function result. */
3764 if (want_value
&& fndecl
3765 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype
))))
3767 error_at (loc
, "using result of function returning %<void%>");
3771 /* Try this again in case gimplification exposed something. */
3772 if (ret
!= GS_ERROR
)
3774 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3776 if (new_tree
&& new_tree
!= *expr_p
)
3778 /* There was a transformation of this call which computes the
3779 same value, but in a more efficient way. Return and try
3787 *expr_p
= error_mark_node
;
3791 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3792 decl. This allows us to eliminate redundant or useless
3793 calls to "const" functions. */
3794 if (TREE_CODE (*expr_p
) == CALL_EXPR
)
3796 int flags
= call_expr_flags (*expr_p
);
3797 if (flags
& (ECF_CONST
| ECF_PURE
)
3798 /* An infinite loop is considered a side effect. */
3799 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
3800 TREE_SIDE_EFFECTS (*expr_p
) = 0;
3803 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3804 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3805 form and delegate the creation of a GIMPLE_CALL to
3806 gimplify_modify_expr. This is always possible because when
3807 WANT_VALUE is true, the caller wants the result of this call into
3808 a temporary, which means that we will emit an INIT_EXPR in
3809 internal_get_tmp_var which will then be handled by
3810 gimplify_modify_expr. */
3813 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3814 have to do is replicate it as a GIMPLE_CALL tuple. */
3815 gimple_stmt_iterator gsi
;
3816 call
= gimple_build_call_from_tree (*expr_p
, fnptrtype
);
3817 notice_special_calls (call
);
3818 gimplify_seq_add_stmt (pre_p
, call
);
3819 gsi
= gsi_last (*pre_p
);
3820 maybe_fold_stmt (&gsi
);
3821 *expr_p
= NULL_TREE
;
3824 /* Remember the original function type. */
3825 CALL_EXPR_FN (*expr_p
) = build1 (NOP_EXPR
, fnptrtype
,
3826 CALL_EXPR_FN (*expr_p
));
3831 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3832 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3834 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3835 condition is true or false, respectively. If null, we should generate
3836 our own to skip over the evaluation of this specific expression.
3838 LOCUS is the source location of the COND_EXPR.
3840 This function is the tree equivalent of do_jump.
3842 shortcut_cond_r should only be called by shortcut_cond_expr. */
3845 shortcut_cond_r (tree pred
, tree
*true_label_p
, tree
*false_label_p
,
3848 tree local_label
= NULL_TREE
;
3849 tree t
, expr
= NULL
;
3851 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3852 retain the shortcut semantics. Just insert the gotos here;
3853 shortcut_cond_expr will append the real blocks later. */
3854 if (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3856 location_t new_locus
;
3858 /* Turn if (a && b) into
3860 if (a); else goto no;
3861 if (b) goto yes; else goto no;
3864 if (false_label_p
== NULL
)
3865 false_label_p
= &local_label
;
3867 /* Keep the original source location on the first 'if'. */
3868 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), NULL
, false_label_p
, locus
);
3869 append_to_statement_list (t
, &expr
);
3871 /* Set the source location of the && on the second 'if'. */
3872 new_locus
= rexpr_location (pred
, locus
);
3873 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3875 append_to_statement_list (t
, &expr
);
3877 else if (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3879 location_t new_locus
;
3881 /* Turn if (a || b) into
3884 if (b) goto yes; else goto no;
3887 if (true_label_p
== NULL
)
3888 true_label_p
= &local_label
;
3890 /* Keep the original source location on the first 'if'. */
3891 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), true_label_p
, NULL
, locus
);
3892 append_to_statement_list (t
, &expr
);
3894 /* Set the source location of the || on the second 'if'. */
3895 new_locus
= rexpr_location (pred
, locus
);
3896 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3898 append_to_statement_list (t
, &expr
);
3900 else if (TREE_CODE (pred
) == COND_EXPR
3901 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 1)))
3902 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 2))))
3904 location_t new_locus
;
3906 /* As long as we're messing with gotos, turn if (a ? b : c) into
3908 if (b) goto yes; else goto no;
3910 if (c) goto yes; else goto no;
3912 Don't do this if one of the arms has void type, which can happen
3913 in C++ when the arm is throw. */
3915 /* Keep the original source location on the first 'if'. Set the source
3916 location of the ? on the second 'if'. */
3917 new_locus
= rexpr_location (pred
, locus
);
3918 expr
= build3 (COND_EXPR
, void_type_node
, TREE_OPERAND (pred
, 0),
3919 shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
,
3920 false_label_p
, locus
),
3921 shortcut_cond_r (TREE_OPERAND (pred
, 2), true_label_p
,
3922 false_label_p
, new_locus
));
3926 expr
= build3 (COND_EXPR
, void_type_node
, pred
,
3927 build_and_jump (true_label_p
),
3928 build_and_jump (false_label_p
));
3929 SET_EXPR_LOCATION (expr
, locus
);
3934 t
= build1 (LABEL_EXPR
, void_type_node
, local_label
);
3935 append_to_statement_list (t
, &expr
);
3941 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3942 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3943 statement, if it is the last one. Otherwise, return NULL. */
3946 find_goto (tree expr
)
3951 if (TREE_CODE (expr
) == GOTO_EXPR
)
3954 if (TREE_CODE (expr
) != STATEMENT_LIST
)
3957 tree_stmt_iterator i
= tsi_start (expr
);
3959 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
3962 if (!tsi_one_before_end_p (i
))
3965 return find_goto (tsi_stmt (i
));
3968 /* Same as find_goto, except that it returns NULL if the destination
3969 is not a LABEL_DECL. */
3972 find_goto_label (tree expr
)
3974 tree dest
= find_goto (expr
);
3975 if (dest
&& TREE_CODE (GOTO_DESTINATION (dest
)) == LABEL_DECL
)
3980 /* Given a conditional expression EXPR with short-circuit boolean
3981 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3982 predicate apart into the equivalent sequence of conditionals. */
3985 shortcut_cond_expr (tree expr
)
3987 tree pred
= TREE_OPERAND (expr
, 0);
3988 tree then_
= TREE_OPERAND (expr
, 1);
3989 tree else_
= TREE_OPERAND (expr
, 2);
3990 tree true_label
, false_label
, end_label
, t
;
3992 tree
*false_label_p
;
3993 bool emit_end
, emit_false
, jump_over_else
;
3994 bool then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3995 bool else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3997 /* First do simple transformations. */
4000 /* If there is no 'else', turn
4003 if (a) if (b) then c. */
4004 while (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
4006 /* Keep the original source location on the first 'if'. */
4007 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
4008 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
4009 /* Set the source location of the && on the second 'if'. */
4010 if (rexpr_has_location (pred
))
4011 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
4012 then_
= shortcut_cond_expr (expr
);
4013 then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
4014 pred
= TREE_OPERAND (pred
, 0);
4015 expr
= build3 (COND_EXPR
, void_type_node
, pred
, then_
, NULL_TREE
);
4016 SET_EXPR_LOCATION (expr
, locus
);
4022 /* If there is no 'then', turn
4025 if (a); else if (b); else d. */
4026 while (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
4028 /* Keep the original source location on the first 'if'. */
4029 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
4030 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
4031 /* Set the source location of the || on the second 'if'. */
4032 if (rexpr_has_location (pred
))
4033 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
4034 else_
= shortcut_cond_expr (expr
);
4035 else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
4036 pred
= TREE_OPERAND (pred
, 0);
4037 expr
= build3 (COND_EXPR
, void_type_node
, pred
, NULL_TREE
, else_
);
4038 SET_EXPR_LOCATION (expr
, locus
);
4042 /* If we're done, great. */
4043 if (TREE_CODE (pred
) != TRUTH_ANDIF_EXPR
4044 && TREE_CODE (pred
) != TRUTH_ORIF_EXPR
)
4047 /* Otherwise we need to mess with gotos. Change
4050 if (a); else goto no;
4053 and recursively gimplify the condition. */
4055 true_label
= false_label
= end_label
= NULL_TREE
;
4057 /* If our arms just jump somewhere, hijack those labels so we don't
4058 generate jumps to jumps. */
4060 if (tree then_goto
= find_goto_label (then_
))
4062 true_label
= GOTO_DESTINATION (then_goto
);
4067 if (tree else_goto
= find_goto_label (else_
))
4069 false_label
= GOTO_DESTINATION (else_goto
);
4074 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4076 true_label_p
= &true_label
;
4078 true_label_p
= NULL
;
4080 /* The 'else' branch also needs a label if it contains interesting code. */
4081 if (false_label
|| else_se
)
4082 false_label_p
= &false_label
;
4084 false_label_p
= NULL
;
4086 /* If there was nothing else in our arms, just forward the label(s). */
4087 if (!then_se
&& !else_se
)
4088 return shortcut_cond_r (pred
, true_label_p
, false_label_p
,
4089 EXPR_LOC_OR_LOC (expr
, input_location
));
4091 /* If our last subexpression already has a terminal label, reuse it. */
4093 t
= expr_last (else_
);
4095 t
= expr_last (then_
);
4098 if (t
&& TREE_CODE (t
) == LABEL_EXPR
)
4099 end_label
= LABEL_EXPR_LABEL (t
);
4101 /* If we don't care about jumping to the 'else' branch, jump to the end
4102 if the condition is false. */
4104 false_label_p
= &end_label
;
4106 /* We only want to emit these labels if we aren't hijacking them. */
4107 emit_end
= (end_label
== NULL_TREE
);
4108 emit_false
= (false_label
== NULL_TREE
);
4110 /* We only emit the jump over the else clause if we have to--if the
4111 then clause may fall through. Otherwise we can wind up with a
4112 useless jump and a useless label at the end of gimplified code,
4113 which will cause us to think that this conditional as a whole
4114 falls through even if it doesn't. If we then inline a function
4115 which ends with such a condition, that can cause us to issue an
4116 inappropriate warning about control reaching the end of a
4117 non-void function. */
4118 jump_over_else
= block_may_fallthru (then_
);
4120 pred
= shortcut_cond_r (pred
, true_label_p
, false_label_p
,
4121 EXPR_LOC_OR_LOC (expr
, input_location
));
4124 append_to_statement_list (pred
, &expr
);
4126 append_to_statement_list (then_
, &expr
);
4131 tree last
= expr_last (expr
);
4132 t
= build_and_jump (&end_label
);
4133 if (rexpr_has_location (last
))
4134 SET_EXPR_LOCATION (t
, rexpr_location (last
));
4135 append_to_statement_list (t
, &expr
);
4139 t
= build1 (LABEL_EXPR
, void_type_node
, false_label
);
4140 append_to_statement_list (t
, &expr
);
4142 append_to_statement_list (else_
, &expr
);
4144 if (emit_end
&& end_label
)
4146 t
= build1 (LABEL_EXPR
, void_type_node
, end_label
);
4147 append_to_statement_list (t
, &expr
);
4153 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4156 gimple_boolify (tree expr
)
4158 tree type
= TREE_TYPE (expr
);
4159 location_t loc
= EXPR_LOCATION (expr
);
4161 if (TREE_CODE (expr
) == NE_EXPR
4162 && TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
4163 && integer_zerop (TREE_OPERAND (expr
, 1)))
4165 tree call
= TREE_OPERAND (expr
, 0);
4166 tree fn
= get_callee_fndecl (call
);
4168 /* For __builtin_expect ((long) (x), y) recurse into x as well
4169 if x is truth_value_p. */
4171 && fndecl_built_in_p (fn
, BUILT_IN_EXPECT
)
4172 && call_expr_nargs (call
) == 2)
4174 tree arg
= CALL_EXPR_ARG (call
, 0);
4177 if (TREE_CODE (arg
) == NOP_EXPR
4178 && TREE_TYPE (arg
) == TREE_TYPE (call
))
4179 arg
= TREE_OPERAND (arg
, 0);
4180 if (truth_value_p (TREE_CODE (arg
)))
4182 arg
= gimple_boolify (arg
);
4183 CALL_EXPR_ARG (call
, 0)
4184 = fold_convert_loc (loc
, TREE_TYPE (call
), arg
);
4190 switch (TREE_CODE (expr
))
4192 case TRUTH_AND_EXPR
:
4194 case TRUTH_XOR_EXPR
:
4195 case TRUTH_ANDIF_EXPR
:
4196 case TRUTH_ORIF_EXPR
:
4197 /* Also boolify the arguments of truth exprs. */
4198 TREE_OPERAND (expr
, 1) = gimple_boolify (TREE_OPERAND (expr
, 1));
4201 case TRUTH_NOT_EXPR
:
4202 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4204 /* These expressions always produce boolean results. */
4205 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4206 TREE_TYPE (expr
) = boolean_type_node
;
4210 switch ((enum annot_expr_kind
) TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)))
4212 case annot_expr_ivdep_kind
:
4213 case annot_expr_unroll_kind
:
4214 case annot_expr_no_vector_kind
:
4215 case annot_expr_vector_kind
:
4216 case annot_expr_parallel_kind
:
4217 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4218 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4219 TREE_TYPE (expr
) = boolean_type_node
;
4226 if (COMPARISON_CLASS_P (expr
))
4228 /* There expressions always prduce boolean results. */
4229 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4230 TREE_TYPE (expr
) = boolean_type_node
;
4233 /* Other expressions that get here must have boolean values, but
4234 might need to be converted to the appropriate mode. */
4235 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
4237 return fold_convert_loc (loc
, boolean_type_node
, expr
);
4241 /* Given a conditional expression *EXPR_P without side effects, gimplify
4242 its operands. New statements are inserted to PRE_P. */
4244 static enum gimplify_status
4245 gimplify_pure_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
)
4247 tree expr
= *expr_p
, cond
;
4248 enum gimplify_status ret
, tret
;
4249 enum tree_code code
;
4251 cond
= gimple_boolify (COND_EXPR_COND (expr
));
4253 /* We need to handle && and || specially, as their gimplification
4254 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4255 code
= TREE_CODE (cond
);
4256 if (code
== TRUTH_ANDIF_EXPR
)
4257 TREE_SET_CODE (cond
, TRUTH_AND_EXPR
);
4258 else if (code
== TRUTH_ORIF_EXPR
)
4259 TREE_SET_CODE (cond
, TRUTH_OR_EXPR
);
4260 ret
= gimplify_expr (&cond
, pre_p
, NULL
, is_gimple_condexpr
, fb_rvalue
);
4261 COND_EXPR_COND (*expr_p
) = cond
;
4263 tret
= gimplify_expr (&COND_EXPR_THEN (expr
), pre_p
, NULL
,
4264 is_gimple_val
, fb_rvalue
);
4265 ret
= MIN (ret
, tret
);
4266 tret
= gimplify_expr (&COND_EXPR_ELSE (expr
), pre_p
, NULL
,
4267 is_gimple_val
, fb_rvalue
);
4269 return MIN (ret
, tret
);
4272 /* Return true if evaluating EXPR could trap.
4273 EXPR is GENERIC, while tree_could_trap_p can be called
4277 generic_expr_could_trap_p (tree expr
)
4281 if (!expr
|| is_gimple_val (expr
))
4284 if (!EXPR_P (expr
) || tree_could_trap_p (expr
))
4287 n
= TREE_OPERAND_LENGTH (expr
);
4288 for (i
= 0; i
< n
; i
++)
4289 if (generic_expr_could_trap_p (TREE_OPERAND (expr
, i
)))
4295 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4304 The second form is used when *EXPR_P is of type void.
4306 PRE_P points to the list where side effects that must happen before
4307 *EXPR_P should be stored. */
4309 static enum gimplify_status
4310 gimplify_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
, fallback_t fallback
)
4312 tree expr
= *expr_p
;
4313 tree type
= TREE_TYPE (expr
);
4314 location_t loc
= EXPR_LOCATION (expr
);
4315 tree tmp
, arm1
, arm2
;
4316 enum gimplify_status ret
;
4317 tree label_true
, label_false
, label_cont
;
4318 bool have_then_clause_p
, have_else_clause_p
;
4320 enum tree_code pred_code
;
4321 gimple_seq seq
= NULL
;
4323 /* If this COND_EXPR has a value, copy the values into a temporary within
4325 if (!VOID_TYPE_P (type
))
4327 tree then_
= TREE_OPERAND (expr
, 1), else_
= TREE_OPERAND (expr
, 2);
4330 /* If either an rvalue is ok or we do not require an lvalue, create the
4331 temporary. But we cannot do that if the type is addressable. */
4332 if (((fallback
& fb_rvalue
) || !(fallback
& fb_lvalue
))
4333 && !TREE_ADDRESSABLE (type
))
4335 if (gimplify_ctxp
->allow_rhs_cond_expr
4336 /* If either branch has side effects or could trap, it can't be
4337 evaluated unconditionally. */
4338 && !TREE_SIDE_EFFECTS (then_
)
4339 && !generic_expr_could_trap_p (then_
)
4340 && !TREE_SIDE_EFFECTS (else_
)
4341 && !generic_expr_could_trap_p (else_
))
4342 return gimplify_pure_cond_expr (expr_p
, pre_p
);
4344 tmp
= create_tmp_var (type
, "iftmp");
4348 /* Otherwise, only create and copy references to the values. */
4351 type
= build_pointer_type (type
);
4353 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4354 then_
= build_fold_addr_expr_loc (loc
, then_
);
4356 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4357 else_
= build_fold_addr_expr_loc (loc
, else_
);
4360 = build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0), then_
, else_
);
4362 tmp
= create_tmp_var (type
, "iftmp");
4363 result
= build_simple_mem_ref_loc (loc
, tmp
);
4366 /* Build the new then clause, `tmp = then_;'. But don't build the
4367 assignment if the value is void; in C++ it can be if it's a throw. */
4368 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4369 TREE_OPERAND (expr
, 1) = build2 (INIT_EXPR
, type
, tmp
, then_
);
4371 /* Similarly, build the new else clause, `tmp = else_;'. */
4372 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4373 TREE_OPERAND (expr
, 2) = build2 (INIT_EXPR
, type
, tmp
, else_
);
4375 TREE_TYPE (expr
) = void_type_node
;
4376 recalculate_side_effects (expr
);
4378 /* Move the COND_EXPR to the prequeue. */
4379 gimplify_stmt (&expr
, pre_p
);
4385 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4386 STRIP_TYPE_NOPS (TREE_OPERAND (expr
, 0));
4387 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == COMPOUND_EXPR
)
4388 gimplify_compound_expr (&TREE_OPERAND (expr
, 0), pre_p
, true);
4390 /* Make sure the condition has BOOLEAN_TYPE. */
4391 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4393 /* Break apart && and || conditions. */
4394 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ANDIF_EXPR
4395 || TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ORIF_EXPR
)
4397 expr
= shortcut_cond_expr (expr
);
4399 if (expr
!= *expr_p
)
4403 /* We can't rely on gimplify_expr to re-gimplify the expanded
4404 form properly, as cleanups might cause the target labels to be
4405 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4406 set up a conditional context. */
4407 gimple_push_condition ();
4408 gimplify_stmt (expr_p
, &seq
);
4409 gimple_pop_condition (pre_p
);
4410 gimple_seq_add_seq (pre_p
, seq
);
4416 /* Now do the normal gimplification. */
4418 /* Gimplify condition. */
4419 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, NULL
,
4420 is_gimple_condexpr_for_cond
, fb_rvalue
);
4421 if (ret
== GS_ERROR
)
4423 gcc_assert (TREE_OPERAND (expr
, 0) != NULL_TREE
);
4425 gimple_push_condition ();
4427 have_then_clause_p
= have_else_clause_p
= false;
4428 label_true
= find_goto_label (TREE_OPERAND (expr
, 1));
4430 && DECL_CONTEXT (GOTO_DESTINATION (label_true
)) == current_function_decl
4431 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4432 have different locations, otherwise we end up with incorrect
4433 location information on the branches. */
4435 || !EXPR_HAS_LOCATION (expr
)
4436 || !rexpr_has_location (label_true
)
4437 || EXPR_LOCATION (expr
) == rexpr_location (label_true
)))
4439 have_then_clause_p
= true;
4440 label_true
= GOTO_DESTINATION (label_true
);
4443 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
4444 label_false
= find_goto_label (TREE_OPERAND (expr
, 2));
4446 && DECL_CONTEXT (GOTO_DESTINATION (label_false
)) == current_function_decl
4447 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4448 have different locations, otherwise we end up with incorrect
4449 location information on the branches. */
4451 || !EXPR_HAS_LOCATION (expr
)
4452 || !rexpr_has_location (label_false
)
4453 || EXPR_LOCATION (expr
) == rexpr_location (label_false
)))
4455 have_else_clause_p
= true;
4456 label_false
= GOTO_DESTINATION (label_false
);
4459 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
4461 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr
), &pred_code
, &arm1
,
4463 cond_stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
,
4465 gimple_set_location (cond_stmt
, EXPR_LOCATION (expr
));
4466 copy_warning (cond_stmt
, COND_EXPR_COND (expr
));
4467 gimplify_seq_add_stmt (&seq
, cond_stmt
);
4468 gimple_stmt_iterator gsi
= gsi_last (seq
);
4469 maybe_fold_stmt (&gsi
);
4471 label_cont
= NULL_TREE
;
4472 if (!have_then_clause_p
)
4474 /* For if (...) {} else { code; } put label_true after
4476 if (TREE_OPERAND (expr
, 1) == NULL_TREE
4477 && !have_else_clause_p
4478 && TREE_OPERAND (expr
, 2) != NULL_TREE
)
4480 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4481 handling that label_cont == label_true can be only reached
4482 through fallthrough from { code; }. */
4483 if (integer_zerop (COND_EXPR_COND (expr
)))
4484 UNUSED_LABEL_P (label_true
) = 1;
4485 label_cont
= label_true
;
4489 bool then_side_effects
4490 = (TREE_OPERAND (expr
, 1)
4491 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr
, 1)));
4492 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_true
));
4493 have_then_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 1), &seq
);
4494 /* For if (...) { code; } else {} or
4495 if (...) { code; } else goto label; or
4496 if (...) { code; return; } else { ... }
4497 label_cont isn't needed. */
4498 if (!have_else_clause_p
4499 && TREE_OPERAND (expr
, 2) != NULL_TREE
4500 && gimple_seq_may_fallthru (seq
))
4503 label_cont
= create_artificial_label (UNKNOWN_LOCATION
);
4505 /* For if (0) { non-side-effect-code } else { code }
4506 tell -Wimplicit-fallthrough handling that label_cont can
4507 be only reached through fallthrough from { code }. */
4508 if (integer_zerop (COND_EXPR_COND (expr
)))
4510 UNUSED_LABEL_P (label_true
) = 1;
4511 if (!then_side_effects
)
4512 UNUSED_LABEL_P (label_cont
) = 1;
4515 g
= gimple_build_goto (label_cont
);
4517 /* GIMPLE_COND's are very low level; they have embedded
4518 gotos. This particular embedded goto should not be marked
4519 with the location of the original COND_EXPR, as it would
4520 correspond to the COND_EXPR's condition, not the ELSE or the
4521 THEN arms. To avoid marking it with the wrong location, flag
4522 it as "no location". */
4523 gimple_set_do_not_emit_location (g
);
4525 gimplify_seq_add_stmt (&seq
, g
);
4529 if (!have_else_clause_p
)
4531 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4532 tell -Wimplicit-fallthrough handling that label_false can be only
4533 reached through fallthrough from { code }. */
4534 if (integer_nonzerop (COND_EXPR_COND (expr
))
4535 && (TREE_OPERAND (expr
, 2) == NULL_TREE
4536 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr
, 2))))
4537 UNUSED_LABEL_P (label_false
) = 1;
4538 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_false
));
4539 have_else_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 2), &seq
);
4542 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_cont
));
4544 gimple_pop_condition (pre_p
);
4545 gimple_seq_add_seq (pre_p
, seq
);
4547 if (ret
== GS_ERROR
)
4549 else if (have_then_clause_p
|| have_else_clause_p
)
4553 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4554 expr
= TREE_OPERAND (expr
, 0);
4555 gimplify_stmt (&expr
, pre_p
);
4562 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4563 to be marked addressable.
4565 We cannot rely on such an expression being directly markable if a temporary
4566 has been created by the gimplification. In this case, we create another
4567 temporary and initialize it with a copy, which will become a store after we
4568 mark it addressable. This can happen if the front-end passed us something
4569 that it could not mark addressable yet, like a Fortran pass-by-reference
4570 parameter (int) floatvar. */
4573 prepare_gimple_addressable (tree
*expr_p
, gimple_seq
*seq_p
)
4575 while (handled_component_p (*expr_p
))
4576 expr_p
= &TREE_OPERAND (*expr_p
, 0);
4577 if (is_gimple_reg (*expr_p
))
4579 /* Do not allow an SSA name as the temporary. */
4580 tree var
= get_initialized_tmp_var (*expr_p
, seq_p
, NULL
, false);
4581 DECL_NOT_GIMPLE_REG_P (var
) = 1;
4586 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4587 a call to __builtin_memcpy. */
4589 static enum gimplify_status
4590 gimplify_modify_expr_to_memcpy (tree
*expr_p
, tree size
, bool want_value
,
4593 tree t
, to
, to_ptr
, from
, from_ptr
;
4595 location_t loc
= EXPR_LOCATION (*expr_p
);
4597 to
= TREE_OPERAND (*expr_p
, 0);
4598 from
= TREE_OPERAND (*expr_p
, 1);
4600 /* Mark the RHS addressable. Beware that it may not be possible to do so
4601 directly if a temporary has been created by the gimplification. */
4602 prepare_gimple_addressable (&from
, seq_p
);
4604 mark_addressable (from
);
4605 from_ptr
= build_fold_addr_expr_loc (loc
, from
);
4606 gimplify_arg (&from_ptr
, seq_p
, loc
);
4608 mark_addressable (to
);
4609 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4610 gimplify_arg (&to_ptr
, seq_p
, loc
);
4612 t
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
4614 gs
= gimple_build_call (t
, 3, to_ptr
, from_ptr
, size
);
4615 gimple_call_set_alloca_for_var (gs
, true);
4619 /* tmp = memcpy() */
4620 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4621 gimple_call_set_lhs (gs
, t
);
4622 gimplify_seq_add_stmt (seq_p
, gs
);
4624 *expr_p
= build_simple_mem_ref (t
);
4628 gimplify_seq_add_stmt (seq_p
, gs
);
4633 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4634 a call to __builtin_memset. In this case we know that the RHS is
4635 a CONSTRUCTOR with an empty element list. */
4637 static enum gimplify_status
4638 gimplify_modify_expr_to_memset (tree
*expr_p
, tree size
, bool want_value
,
4641 tree t
, from
, to
, to_ptr
;
4643 location_t loc
= EXPR_LOCATION (*expr_p
);
4645 /* Assert our assumptions, to abort instead of producing wrong code
4646 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4647 not be immediately exposed. */
4648 from
= TREE_OPERAND (*expr_p
, 1);
4649 if (TREE_CODE (from
) == WITH_SIZE_EXPR
)
4650 from
= TREE_OPERAND (from
, 0);
4652 gcc_assert (TREE_CODE (from
) == CONSTRUCTOR
4653 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from
)));
4656 to
= TREE_OPERAND (*expr_p
, 0);
4658 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4659 gimplify_arg (&to_ptr
, seq_p
, loc
);
4660 t
= builtin_decl_implicit (BUILT_IN_MEMSET
);
4662 gs
= gimple_build_call (t
, 3, to_ptr
, integer_zero_node
, size
);
4666 /* tmp = memset() */
4667 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4668 gimple_call_set_lhs (gs
, t
);
4669 gimplify_seq_add_stmt (seq_p
, gs
);
4671 *expr_p
= build1 (INDIRECT_REF
, TREE_TYPE (to
), t
);
4675 gimplify_seq_add_stmt (seq_p
, gs
);
4680 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4681 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4682 assignment. Return non-null if we detect a potential overlap. */
4684 struct gimplify_init_ctor_preeval_data
4686 /* The base decl of the lhs object. May be NULL, in which case we
4687 have to assume the lhs is indirect. */
4690 /* The alias set of the lhs object. */
4691 alias_set_type lhs_alias_set
;
4695 gimplify_init_ctor_preeval_1 (tree
*tp
, int *walk_subtrees
, void *xdata
)
4697 struct gimplify_init_ctor_preeval_data
*data
4698 = (struct gimplify_init_ctor_preeval_data
*) xdata
;
4701 /* If we find the base object, obviously we have overlap. */
4702 if (data
->lhs_base_decl
== t
)
4705 /* If the constructor component is indirect, determine if we have a
4706 potential overlap with the lhs. The only bits of information we
4707 have to go on at this point are addressability and alias sets. */
4708 if ((INDIRECT_REF_P (t
)
4709 || TREE_CODE (t
) == MEM_REF
)
4710 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4711 && alias_sets_conflict_p (data
->lhs_alias_set
, get_alias_set (t
)))
4714 /* If the constructor component is a call, determine if it can hide a
4715 potential overlap with the lhs through an INDIRECT_REF like above.
4716 ??? Ugh - this is completely broken. In fact this whole analysis
4717 doesn't look conservative. */
4718 if (TREE_CODE (t
) == CALL_EXPR
)
4720 tree type
, fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t
)));
4722 for (type
= TYPE_ARG_TYPES (fntype
); type
; type
= TREE_CHAIN (type
))
4723 if (POINTER_TYPE_P (TREE_VALUE (type
))
4724 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4725 && alias_sets_conflict_p (data
->lhs_alias_set
,
4727 (TREE_TYPE (TREE_VALUE (type
)))))
4731 if (IS_TYPE_OR_DECL_P (t
))
4736 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4737 force values that overlap with the lhs (as described by *DATA)
4738 into temporaries. */
4741 gimplify_init_ctor_preeval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4742 struct gimplify_init_ctor_preeval_data
*data
)
4744 enum gimplify_status one
;
4746 /* If the value is constant, then there's nothing to pre-evaluate. */
4747 if (TREE_CONSTANT (*expr_p
))
4749 /* Ensure it does not have side effects, it might contain a reference to
4750 the object we're initializing. */
4751 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p
));
4755 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4756 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)))
4759 /* Recurse for nested constructors. */
4760 if (TREE_CODE (*expr_p
) == CONSTRUCTOR
)
4762 unsigned HOST_WIDE_INT ix
;
4763 constructor_elt
*ce
;
4764 vec
<constructor_elt
, va_gc
> *v
= CONSTRUCTOR_ELTS (*expr_p
);
4766 FOR_EACH_VEC_SAFE_ELT (v
, ix
, ce
)
4767 gimplify_init_ctor_preeval (&ce
->value
, pre_p
, post_p
, data
);
4772 /* If this is a variable sized type, we must remember the size. */
4773 maybe_with_size_expr (expr_p
);
4775 /* Gimplify the constructor element to something appropriate for the rhs
4776 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4777 the gimplifier will consider this a store to memory. Doing this
4778 gimplification now means that we won't have to deal with complicated
4779 language-specific trees, nor trees like SAVE_EXPR that can induce
4780 exponential search behavior. */
4781 one
= gimplify_expr (expr_p
, pre_p
, post_p
, is_gimple_mem_rhs
, fb_rvalue
);
4782 if (one
== GS_ERROR
)
4788 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4789 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4790 always be true for all scalars, since is_gimple_mem_rhs insists on a
4791 temporary variable for them. */
4792 if (DECL_P (*expr_p
))
4795 /* If this is of variable size, we have no choice but to assume it doesn't
4796 overlap since we can't make a temporary for it. */
4797 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p
))) != INTEGER_CST
)
4800 /* Otherwise, we must search for overlap ... */
4801 if (!walk_tree (expr_p
, gimplify_init_ctor_preeval_1
, data
, NULL
))
4804 /* ... and if found, force the value into a temporary. */
4805 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
4808 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4809 a RANGE_EXPR in a CONSTRUCTOR for an array.
4813 object[var] = value;
4820 We increment var _after_ the loop exit check because we might otherwise
4821 fail if upper == TYPE_MAX_VALUE (type for upper).
4823 Note that we never have to deal with SAVE_EXPRs here, because this has
4824 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4826 static void gimplify_init_ctor_eval (tree
, vec
<constructor_elt
, va_gc
> *,
4827 gimple_seq
*, bool);
4830 gimplify_init_ctor_eval_range (tree object
, tree lower
, tree upper
,
4831 tree value
, tree array_elt_type
,
4832 gimple_seq
*pre_p
, bool cleared
)
4834 tree loop_entry_label
, loop_exit_label
, fall_thru_label
;
4835 tree var
, var_type
, cref
, tmp
;
4837 loop_entry_label
= create_artificial_label (UNKNOWN_LOCATION
);
4838 loop_exit_label
= create_artificial_label (UNKNOWN_LOCATION
);
4839 fall_thru_label
= create_artificial_label (UNKNOWN_LOCATION
);
4841 /* Create and initialize the index variable. */
4842 var_type
= TREE_TYPE (upper
);
4843 var
= create_tmp_var (var_type
);
4844 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, lower
));
4846 /* Add the loop entry label. */
4847 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_entry_label
));
4849 /* Build the reference. */
4850 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4851 var
, NULL_TREE
, NULL_TREE
);
4853 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4854 the store. Otherwise just assign value to the reference. */
4856 if (TREE_CODE (value
) == CONSTRUCTOR
)
4857 /* NB we might have to call ourself recursively through
4858 gimplify_init_ctor_eval if the value is a constructor. */
4859 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4863 if (gimplify_expr (&value
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
4865 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (cref
, value
));
4868 /* We exit the loop when the index var is equal to the upper bound. */
4869 gimplify_seq_add_stmt (pre_p
,
4870 gimple_build_cond (EQ_EXPR
, var
, upper
,
4871 loop_exit_label
, fall_thru_label
));
4873 gimplify_seq_add_stmt (pre_p
, gimple_build_label (fall_thru_label
));
4875 /* Otherwise, increment the index var... */
4876 tmp
= build2 (PLUS_EXPR
, var_type
, var
,
4877 fold_convert (var_type
, integer_one_node
));
4878 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, tmp
));
4880 /* ...and jump back to the loop entry. */
4881 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (loop_entry_label
));
4883 /* Add the loop exit label. */
4884 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_exit_label
));
4887 /* A subroutine of gimplify_init_constructor. Generate individual
4888 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4889 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4890 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4894 gimplify_init_ctor_eval (tree object
, vec
<constructor_elt
, va_gc
> *elts
,
4895 gimple_seq
*pre_p
, bool cleared
)
4897 tree array_elt_type
= NULL
;
4898 unsigned HOST_WIDE_INT ix
;
4899 tree purpose
, value
;
4901 if (TREE_CODE (TREE_TYPE (object
)) == ARRAY_TYPE
)
4902 array_elt_type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object
)));
4904 FOR_EACH_CONSTRUCTOR_ELT (elts
, ix
, purpose
, value
)
4908 /* NULL values are created above for gimplification errors. */
4912 if (cleared
&& initializer_zerop (value
))
4915 /* ??? Here's to hoping the front end fills in all of the indices,
4916 so we don't have to figure out what's missing ourselves. */
4917 gcc_assert (purpose
);
4919 /* Skip zero-sized fields, unless value has side-effects. This can
4920 happen with calls to functions returning a empty type, which
4921 we shouldn't discard. As a number of downstream passes don't
4922 expect sets of empty type fields, we rely on the gimplification of
4923 the MODIFY_EXPR we make below to drop the assignment statement. */
4924 if (!TREE_SIDE_EFFECTS (value
)
4925 && TREE_CODE (purpose
) == FIELD_DECL
4926 && is_empty_type (TREE_TYPE (purpose
)))
4929 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4931 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4933 tree lower
= TREE_OPERAND (purpose
, 0);
4934 tree upper
= TREE_OPERAND (purpose
, 1);
4936 /* If the lower bound is equal to upper, just treat it as if
4937 upper was the index. */
4938 if (simple_cst_equal (lower
, upper
))
4942 gimplify_init_ctor_eval_range (object
, lower
, upper
, value
,
4943 array_elt_type
, pre_p
, cleared
);
4950 /* Do not use bitsizetype for ARRAY_REF indices. */
4951 if (TYPE_DOMAIN (TREE_TYPE (object
)))
4953 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object
))),
4955 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4956 purpose
, NULL_TREE
, NULL_TREE
);
4960 gcc_assert (TREE_CODE (purpose
) == FIELD_DECL
);
4961 cref
= build3 (COMPONENT_REF
, TREE_TYPE (purpose
),
4962 unshare_expr (object
), purpose
, NULL_TREE
);
4965 if (TREE_CODE (value
) == CONSTRUCTOR
4966 && TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
)
4967 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4971 tree init
= build2 (INIT_EXPR
, TREE_TYPE (cref
), cref
, value
);
4972 gimplify_and_add (init
, pre_p
);
4978 /* Return the appropriate RHS predicate for this LHS. */
4981 rhs_predicate_for (tree lhs
)
4983 if (is_gimple_reg (lhs
))
4984 return is_gimple_reg_rhs_or_call
;
4986 return is_gimple_mem_rhs_or_call
;
4989 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4990 before the LHS has been gimplified. */
4992 static gimple_predicate
4993 initial_rhs_predicate_for (tree lhs
)
4995 if (is_gimple_reg_type (TREE_TYPE (lhs
)))
4996 return is_gimple_reg_rhs_or_call
;
4998 return is_gimple_mem_rhs_or_call
;
5001 /* Gimplify a C99 compound literal expression. This just means adding
5002 the DECL_EXPR before the current statement and using its anonymous
5005 static enum gimplify_status
5006 gimplify_compound_literal_expr (tree
*expr_p
, gimple_seq
*pre_p
,
5007 bool (*gimple_test_f
) (tree
),
5008 fallback_t fallback
)
5010 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p
);
5011 tree decl
= DECL_EXPR_DECL (decl_s
);
5012 tree init
= DECL_INITIAL (decl
);
5013 /* Mark the decl as addressable if the compound literal
5014 expression is addressable now, otherwise it is marked too late
5015 after we gimplify the initialization expression. */
5016 if (TREE_ADDRESSABLE (*expr_p
))
5017 TREE_ADDRESSABLE (decl
) = 1;
5018 /* Otherwise, if we don't need an lvalue and have a literal directly
5019 substitute it. Check if it matches the gimple predicate, as
5020 otherwise we'd generate a new temporary, and we can as well just
5021 use the decl we already have. */
5022 else if (!TREE_ADDRESSABLE (decl
)
5023 && !TREE_THIS_VOLATILE (decl
)
5025 && (fallback
& fb_lvalue
) == 0
5026 && gimple_test_f (init
))
5032 /* If the decl is not addressable, then it is being used in some
5033 expression or on the right hand side of a statement, and it can
5034 be put into a readonly data section. */
5035 if (!TREE_ADDRESSABLE (decl
) && (fallback
& fb_lvalue
) == 0)
5036 TREE_READONLY (decl
) = 1;
5038 /* This decl isn't mentioned in the enclosing block, so add it to the
5039 list of temps. FIXME it seems a bit of a kludge to say that
5040 anonymous artificial vars aren't pushed, but everything else is. */
5041 if (DECL_NAME (decl
) == NULL_TREE
&& !DECL_SEEN_IN_BIND_EXPR_P (decl
))
5042 gimple_add_tmp_var (decl
);
5044 gimplify_and_add (decl_s
, pre_p
);
5049 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5050 return a new CONSTRUCTOR if something changed. */
5053 optimize_compound_literals_in_ctor (tree orig_ctor
)
5055 tree ctor
= orig_ctor
;
5056 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (ctor
);
5057 unsigned int idx
, num
= vec_safe_length (elts
);
5059 for (idx
= 0; idx
< num
; idx
++)
5061 tree value
= (*elts
)[idx
].value
;
5062 tree newval
= value
;
5063 if (TREE_CODE (value
) == CONSTRUCTOR
)
5064 newval
= optimize_compound_literals_in_ctor (value
);
5065 else if (TREE_CODE (value
) == COMPOUND_LITERAL_EXPR
)
5067 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (value
);
5068 tree decl
= DECL_EXPR_DECL (decl_s
);
5069 tree init
= DECL_INITIAL (decl
);
5071 if (!TREE_ADDRESSABLE (value
)
5072 && !TREE_ADDRESSABLE (decl
)
5074 && TREE_CODE (init
) == CONSTRUCTOR
)
5075 newval
= optimize_compound_literals_in_ctor (init
);
5077 if (newval
== value
)
5080 if (ctor
== orig_ctor
)
5082 ctor
= copy_node (orig_ctor
);
5083 CONSTRUCTOR_ELTS (ctor
) = vec_safe_copy (elts
);
5084 elts
= CONSTRUCTOR_ELTS (ctor
);
5086 (*elts
)[idx
].value
= newval
;
5091 /* A subroutine of gimplify_modify_expr. Break out elements of a
5092 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5094 Note that we still need to clear any elements that don't have explicit
5095 initializers, so if not all elements are initialized we keep the
5096 original MODIFY_EXPR, we just remove all of the constructor elements.
5098 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5099 GS_ERROR if we would have to create a temporary when gimplifying
5100 this constructor. Otherwise, return GS_OK.
5102 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5104 static enum gimplify_status
5105 gimplify_init_constructor (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5106 bool want_value
, bool notify_temp_creation
)
5108 tree object
, ctor
, type
;
5109 enum gimplify_status ret
;
5110 vec
<constructor_elt
, va_gc
> *elts
;
5111 bool cleared
= false;
5112 bool is_empty_ctor
= false;
5113 bool is_init_expr
= (TREE_CODE (*expr_p
) == INIT_EXPR
);
5115 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == CONSTRUCTOR
);
5117 if (!notify_temp_creation
)
5119 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
5120 is_gimple_lvalue
, fb_lvalue
);
5121 if (ret
== GS_ERROR
)
5125 object
= TREE_OPERAND (*expr_p
, 0);
5126 ctor
= TREE_OPERAND (*expr_p
, 1)
5127 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p
, 1));
5128 type
= TREE_TYPE (ctor
);
5129 elts
= CONSTRUCTOR_ELTS (ctor
);
5132 switch (TREE_CODE (type
))
5136 case QUAL_UNION_TYPE
:
5139 /* Use readonly data for initializers of this or smaller size
5140 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5142 const HOST_WIDE_INT min_unique_size
= 64;
5143 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5144 is smaller than this, use readonly data. */
5145 const int unique_nonzero_ratio
= 8;
5146 /* True if a single access of the object must be ensured. This is the
5147 case if the target is volatile, the type is non-addressable and more
5148 than one field need to be assigned. */
5149 const bool ensure_single_access
5150 = TREE_THIS_VOLATILE (object
)
5151 && !TREE_ADDRESSABLE (type
)
5152 && vec_safe_length (elts
) > 1;
5153 struct gimplify_init_ctor_preeval_data preeval_data
;
5154 HOST_WIDE_INT num_ctor_elements
, num_nonzero_elements
;
5155 HOST_WIDE_INT num_unique_nonzero_elements
;
5156 bool complete_p
, valid_const_initializer
;
5158 /* Aggregate types must lower constructors to initialization of
5159 individual elements. The exception is that a CONSTRUCTOR node
5160 with no elements indicates zero-initialization of the whole. */
5161 if (vec_safe_is_empty (elts
))
5163 if (notify_temp_creation
)
5166 /* The var will be initialized and so appear on lhs of
5167 assignment, it can't be TREE_READONLY anymore. */
5169 TREE_READONLY (object
) = 0;
5171 is_empty_ctor
= true;
5175 /* Fetch information about the constructor to direct later processing.
5176 We might want to make static versions of it in various cases, and
5177 can only do so if it known to be a valid constant initializer. */
5178 valid_const_initializer
5179 = categorize_ctor_elements (ctor
, &num_nonzero_elements
,
5180 &num_unique_nonzero_elements
,
5181 &num_ctor_elements
, &complete_p
);
5183 /* If a const aggregate variable is being initialized, then it
5184 should never be a lose to promote the variable to be static. */
5185 if (valid_const_initializer
5186 && num_nonzero_elements
> 1
5187 && TREE_READONLY (object
)
5189 && !DECL_REGISTER (object
)
5190 && (flag_merge_constants
>= 2 || !TREE_ADDRESSABLE (object
))
5191 /* For ctors that have many repeated nonzero elements
5192 represented through RANGE_EXPRs, prefer initializing
5193 those through runtime loops over copies of large amounts
5194 of data from readonly data section. */
5195 && (num_unique_nonzero_elements
5196 > num_nonzero_elements
/ unique_nonzero_ratio
5197 || ((unsigned HOST_WIDE_INT
) int_size_in_bytes (type
)
5198 <= (unsigned HOST_WIDE_INT
) min_unique_size
)))
5200 if (notify_temp_creation
)
5203 DECL_INITIAL (object
) = ctor
;
5204 TREE_STATIC (object
) = 1;
5205 if (!DECL_NAME (object
))
5206 DECL_NAME (object
) = create_tmp_var_name ("C");
5207 walk_tree (&DECL_INITIAL (object
), force_labels_r
, NULL
, NULL
);
5209 /* ??? C++ doesn't automatically append a .<number> to the
5210 assembler name, and even when it does, it looks at FE private
5211 data structures to figure out what that number should be,
5212 which are not set for this variable. I suppose this is
5213 important for local statics for inline functions, which aren't
5214 "local" in the object file sense. So in order to get a unique
5215 TU-local symbol, we must invoke the lhd version now. */
5216 lhd_set_decl_assembler_name (object
);
5218 *expr_p
= NULL_TREE
;
5222 /* The var will be initialized and so appear on lhs of
5223 assignment, it can't be TREE_READONLY anymore. */
5224 if (VAR_P (object
) && !notify_temp_creation
)
5225 TREE_READONLY (object
) = 0;
5227 /* If there are "lots" of initialized elements, even discounting
5228 those that are not address constants (and thus *must* be
5229 computed at runtime), then partition the constructor into
5230 constant and non-constant parts. Block copy the constant
5231 parts in, then generate code for the non-constant parts. */
5232 /* TODO. There's code in cp/typeck.cc to do this. */
5234 if (int_size_in_bytes (TREE_TYPE (ctor
)) < 0)
5235 /* store_constructor will ignore the clearing of variable-sized
5236 objects. Initializers for such objects must explicitly set
5237 every field that needs to be set. */
5239 else if (!complete_p
)
5240 /* If the constructor isn't complete, clear the whole object
5241 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5243 ??? This ought not to be needed. For any element not present
5244 in the initializer, we should simply set them to zero. Except
5245 we'd need to *find* the elements that are not present, and that
5246 requires trickery to avoid quadratic compile-time behavior in
5247 large cases or excessive memory use in small cases. */
5248 cleared
= !CONSTRUCTOR_NO_CLEARING (ctor
);
5249 else if (num_ctor_elements
- num_nonzero_elements
5250 > CLEAR_RATIO (optimize_function_for_speed_p (cfun
))
5251 && num_nonzero_elements
< num_ctor_elements
/ 4)
5252 /* If there are "lots" of zeros, it's more efficient to clear
5253 the memory and then set the nonzero elements. */
5255 else if (ensure_single_access
&& num_nonzero_elements
== 0)
5256 /* If a single access to the target must be ensured and all elements
5257 are zero, then it's optimal to clear whatever their number. */
5262 /* If there are "lots" of initialized elements, and all of them
5263 are valid address constants, then the entire initializer can
5264 be dropped to memory, and then memcpy'd out. Don't do this
5265 for sparse arrays, though, as it's more efficient to follow
5266 the standard CONSTRUCTOR behavior of memset followed by
5267 individual element initialization. Also don't do this for small
5268 all-zero initializers (which aren't big enough to merit
5269 clearing), and don't try to make bitwise copies of
5270 TREE_ADDRESSABLE types. */
5271 if (valid_const_initializer
5273 && !(cleared
|| num_nonzero_elements
== 0)
5274 && !TREE_ADDRESSABLE (type
))
5276 HOST_WIDE_INT size
= int_size_in_bytes (type
);
5279 /* ??? We can still get unbounded array types, at least
5280 from the C++ front end. This seems wrong, but attempt
5281 to work around it for now. */
5284 size
= int_size_in_bytes (TREE_TYPE (object
));
5286 TREE_TYPE (ctor
) = type
= TREE_TYPE (object
);
5289 /* Find the maximum alignment we can assume for the object. */
5290 /* ??? Make use of DECL_OFFSET_ALIGN. */
5291 if (DECL_P (object
))
5292 align
= DECL_ALIGN (object
);
5294 align
= TYPE_ALIGN (type
);
5296 /* Do a block move either if the size is so small as to make
5297 each individual move a sub-unit move on average, or if it
5298 is so large as to make individual moves inefficient. */
5300 && num_nonzero_elements
> 1
5301 /* For ctors that have many repeated nonzero elements
5302 represented through RANGE_EXPRs, prefer initializing
5303 those through runtime loops over copies of large amounts
5304 of data from readonly data section. */
5305 && (num_unique_nonzero_elements
5306 > num_nonzero_elements
/ unique_nonzero_ratio
5307 || size
<= min_unique_size
)
5308 && (size
< num_nonzero_elements
5309 || !can_move_by_pieces (size
, align
)))
5311 if (notify_temp_creation
)
5314 walk_tree (&ctor
, force_labels_r
, NULL
, NULL
);
5315 ctor
= tree_output_constant_def (ctor
);
5316 if (!useless_type_conversion_p (type
, TREE_TYPE (ctor
)))
5317 ctor
= build1 (VIEW_CONVERT_EXPR
, type
, ctor
);
5318 TREE_OPERAND (*expr_p
, 1) = ctor
;
5320 /* This is no longer an assignment of a CONSTRUCTOR, but
5321 we still may have processing to do on the LHS. So
5322 pretend we didn't do anything here to let that happen. */
5323 return GS_UNHANDLED
;
5327 /* If a single access to the target must be ensured and there are
5328 nonzero elements or the zero elements are not assigned en masse,
5329 initialize the target from a temporary. */
5330 if (ensure_single_access
&& (num_nonzero_elements
> 0 || !cleared
))
5332 if (notify_temp_creation
)
5335 tree temp
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
5336 TREE_OPERAND (*expr_p
, 0) = temp
;
5337 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
5339 build2 (MODIFY_EXPR
, void_type_node
,
5344 if (notify_temp_creation
)
5347 /* If there are nonzero elements and if needed, pre-evaluate to capture
5348 elements overlapping with the lhs into temporaries. We must do this
5349 before clearing to fetch the values before they are zeroed-out. */
5350 if (num_nonzero_elements
> 0 && TREE_CODE (*expr_p
) != INIT_EXPR
)
5352 preeval_data
.lhs_base_decl
= get_base_address (object
);
5353 if (!DECL_P (preeval_data
.lhs_base_decl
))
5354 preeval_data
.lhs_base_decl
= NULL
;
5355 preeval_data
.lhs_alias_set
= get_alias_set (object
);
5357 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p
, 1),
5358 pre_p
, post_p
, &preeval_data
);
5361 bool ctor_has_side_effects_p
5362 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p
, 1));
5366 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5367 Note that we still have to gimplify, in order to handle the
5368 case of variable sized types. Avoid shared tree structures. */
5369 CONSTRUCTOR_ELTS (ctor
) = NULL
;
5370 TREE_SIDE_EFFECTS (ctor
) = 0;
5371 object
= unshare_expr (object
);
5372 gimplify_stmt (expr_p
, pre_p
);
5375 /* If we have not block cleared the object, or if there are nonzero
5376 elements in the constructor, or if the constructor has side effects,
5377 add assignments to the individual scalar fields of the object. */
5379 || num_nonzero_elements
> 0
5380 || ctor_has_side_effects_p
)
5381 gimplify_init_ctor_eval (object
, elts
, pre_p
, cleared
);
5383 *expr_p
= NULL_TREE
;
5391 if (notify_temp_creation
)
5394 /* Extract the real and imaginary parts out of the ctor. */
5395 gcc_assert (elts
->length () == 2);
5396 r
= (*elts
)[0].value
;
5397 i
= (*elts
)[1].value
;
5398 if (r
== NULL
|| i
== NULL
)
5400 tree zero
= build_zero_cst (TREE_TYPE (type
));
5407 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5408 represent creation of a complex value. */
5409 if (TREE_CONSTANT (r
) && TREE_CONSTANT (i
))
5411 ctor
= build_complex (type
, r
, i
);
5412 TREE_OPERAND (*expr_p
, 1) = ctor
;
5416 ctor
= build2 (COMPLEX_EXPR
, type
, r
, i
);
5417 TREE_OPERAND (*expr_p
, 1) = ctor
;
5418 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1),
5421 rhs_predicate_for (TREE_OPERAND (*expr_p
, 0)),
5429 unsigned HOST_WIDE_INT ix
;
5430 constructor_elt
*ce
;
5432 if (notify_temp_creation
)
5435 /* Vector types use CONSTRUCTOR all the way through gimple
5436 compilation as a general initializer. */
5437 FOR_EACH_VEC_SAFE_ELT (elts
, ix
, ce
)
5439 enum gimplify_status tret
;
5440 tret
= gimplify_expr (&ce
->value
, pre_p
, post_p
, is_gimple_val
,
5442 if (tret
== GS_ERROR
)
5444 else if (TREE_STATIC (ctor
)
5445 && !initializer_constant_valid_p (ce
->value
,
5446 TREE_TYPE (ce
->value
)))
5447 TREE_STATIC (ctor
) = 0;
5449 recompute_constructor_flags (ctor
);
5451 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5452 if (TREE_CONSTANT (ctor
))
5454 bool constant_p
= true;
5457 /* Even when ctor is constant, it might contain non-*_CST
5458 elements, such as addresses or trapping values like
5459 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5460 in VECTOR_CST nodes. */
5461 FOR_EACH_CONSTRUCTOR_VALUE (elts
, ix
, value
)
5462 if (!CONSTANT_CLASS_P (value
))
5470 TREE_OPERAND (*expr_p
, 1) = build_vector_from_ctor (type
, elts
);
5475 if (!is_gimple_reg (TREE_OPERAND (*expr_p
, 0)))
5476 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (ctor
, pre_p
);
5481 /* So how did we get a CONSTRUCTOR for a scalar type? */
5485 if (ret
== GS_ERROR
)
5487 /* If we have gimplified both sides of the initializer but have
5488 not emitted an assignment, do so now. */
5491 tree lhs
= TREE_OPERAND (*expr_p
, 0);
5492 tree rhs
= TREE_OPERAND (*expr_p
, 1);
5493 if (want_value
&& object
== lhs
)
5494 lhs
= unshare_expr (lhs
);
5495 gassign
*init
= gimple_build_assign (lhs
, rhs
);
5496 gimplify_seq_add_stmt (pre_p
, init
);
5509 /* If the user requests to initialize automatic variables, we
5510 should initialize paddings inside the variable. Add a call to
5511 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5512 initialize paddings of object always to zero regardless of
5513 INIT_TYPE. Note, we will not insert this call if the aggregate
5514 variable has be completely cleared already or it's initialized
5515 with an empty constructor. We cannot insert this call if the
5516 variable is a gimple register since __builtin_clear_padding will take
5517 the address of the variable. As a result, if a long double/_Complex long
5518 double variable will be spilled into stack later, its padding cannot
5519 be cleared with __builtin_clear_padding. We should clear its padding
5520 when it is spilled into memory. */
5522 && !is_gimple_reg (object
)
5523 && clear_padding_type_may_have_padding_p (type
)
5524 && ((AGGREGATE_TYPE_P (type
) && !cleared
&& !is_empty_ctor
)
5525 || !AGGREGATE_TYPE_P (type
))
5526 && is_var_need_auto_init (object
))
5527 gimple_add_padding_init_for_auto_var (object
, false, pre_p
);
5532 /* Given a pointer value OP0, return a simplified version of an
5533 indirection through OP0, or NULL_TREE if no simplification is
5534 possible. This may only be applied to a rhs of an expression.
5535 Note that the resulting type may be different from the type pointed
5536 to in the sense that it is still compatible from the langhooks
5540 gimple_fold_indirect_ref_rhs (tree t
)
5542 return gimple_fold_indirect_ref (t
);
5545 /* Subroutine of gimplify_modify_expr to do simplifications of
5546 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5547 something changes. */
5549 static enum gimplify_status
5550 gimplify_modify_expr_rhs (tree
*expr_p
, tree
*from_p
, tree
*to_p
,
5551 gimple_seq
*pre_p
, gimple_seq
*post_p
,
5554 enum gimplify_status ret
= GS_UNHANDLED
;
5560 switch (TREE_CODE (*from_p
))
5563 /* If we're assigning from a read-only variable initialized with
5564 a constructor and not volatile, do the direct assignment from
5565 the constructor, but only if the target is not volatile either
5566 since this latter assignment might end up being done on a per
5567 field basis. However, if the target is volatile and the type
5568 is aggregate and non-addressable, gimplify_init_constructor
5569 knows that it needs to ensure a single access to the target
5570 and it will return GS_OK only in this case. */
5571 if (TREE_READONLY (*from_p
)
5572 && DECL_INITIAL (*from_p
)
5573 && TREE_CODE (DECL_INITIAL (*from_p
)) == CONSTRUCTOR
5574 && !TREE_THIS_VOLATILE (*from_p
)
5575 && (!TREE_THIS_VOLATILE (*to_p
)
5576 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p
))
5577 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p
)))))
5579 tree old_from
= *from_p
;
5580 enum gimplify_status subret
;
5582 /* Move the constructor into the RHS. */
5583 *from_p
= unshare_expr (DECL_INITIAL (*from_p
));
5585 /* Let's see if gimplify_init_constructor will need to put
5587 subret
= gimplify_init_constructor (expr_p
, NULL
, NULL
,
5589 if (subret
== GS_ERROR
)
5591 /* If so, revert the change. */
5603 /* If we have code like
5607 where the type of "x" is a (possibly cv-qualified variant
5608 of "A"), treat the entire expression as identical to "x".
5609 This kind of code arises in C++ when an object is bound
5610 to a const reference, and if "x" is a TARGET_EXPR we want
5611 to take advantage of the optimization below. */
5612 bool volatile_p
= TREE_THIS_VOLATILE (*from_p
);
5613 tree t
= gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p
, 0));
5616 if (TREE_THIS_VOLATILE (t
) != volatile_p
)
5619 t
= build_simple_mem_ref_loc (EXPR_LOCATION (*from_p
),
5620 build_fold_addr_expr (t
));
5621 if (REFERENCE_CLASS_P (t
))
5622 TREE_THIS_VOLATILE (t
) = volatile_p
;
5633 /* If we are initializing something from a TARGET_EXPR, strip the
5634 TARGET_EXPR and initialize it directly, if possible. This can't
5635 be done if the initializer is void, since that implies that the
5636 temporary is set in some non-trivial way.
5638 ??? What about code that pulls out the temp and uses it
5639 elsewhere? I think that such code never uses the TARGET_EXPR as
5640 an initializer. If I'm wrong, we'll die because the temp won't
5641 have any RTL. In that case, I guess we'll need to replace
5642 references somehow. */
5643 tree init
= TARGET_EXPR_INITIAL (*from_p
);
5646 && (TREE_CODE (*expr_p
) != MODIFY_EXPR
5647 || !TARGET_EXPR_NO_ELIDE (*from_p
))
5648 && !VOID_TYPE_P (TREE_TYPE (init
)))
5658 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5660 gimplify_compound_expr (from_p
, pre_p
, true);
5666 /* If we already made some changes, let the front end have a
5667 crack at this before we break it down. */
5668 if (ret
!= GS_UNHANDLED
)
5671 /* If we're initializing from a CONSTRUCTOR, break this into
5672 individual MODIFY_EXPRs. */
5673 ret
= gimplify_init_constructor (expr_p
, pre_p
, post_p
, want_value
,
5678 /* If we're assigning to a non-register type, push the assignment
5679 down into the branches. This is mandatory for ADDRESSABLE types,
5680 since we cannot generate temporaries for such, but it saves a
5681 copy in other cases as well. */
5682 if (!is_gimple_reg_type (TREE_TYPE (*from_p
)))
5684 /* This code should mirror the code in gimplify_cond_expr. */
5685 enum tree_code code
= TREE_CODE (*expr_p
);
5686 tree cond
= *from_p
;
5687 tree result
= *to_p
;
5689 ret
= gimplify_expr (&result
, pre_p
, post_p
,
5690 is_gimple_lvalue
, fb_lvalue
);
5691 if (ret
!= GS_ERROR
)
5694 /* If we are going to write RESULT more than once, clear
5695 TREE_READONLY flag, otherwise we might incorrectly promote
5696 the variable to static const and initialize it at compile
5697 time in one of the branches. */
5699 && TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
5700 && TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5701 TREE_READONLY (result
) = 0;
5702 if (TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
)
5703 TREE_OPERAND (cond
, 1)
5704 = build2 (code
, void_type_node
, result
,
5705 TREE_OPERAND (cond
, 1));
5706 if (TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5707 TREE_OPERAND (cond
, 2)
5708 = build2 (code
, void_type_node
, unshare_expr (result
),
5709 TREE_OPERAND (cond
, 2));
5711 TREE_TYPE (cond
) = void_type_node
;
5712 recalculate_side_effects (cond
);
5716 gimplify_and_add (cond
, pre_p
);
5717 *expr_p
= unshare_expr (result
);
5726 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5727 return slot so that we don't generate a temporary. */
5728 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p
)
5729 && aggregate_value_p (*from_p
, *from_p
))
5733 if (!(rhs_predicate_for (*to_p
))(*from_p
))
5734 /* If we need a temporary, *to_p isn't accurate. */
5736 /* It's OK to use the return slot directly unless it's an NRV. */
5737 else if (TREE_CODE (*to_p
) == RESULT_DECL
5738 && DECL_NAME (*to_p
) == NULL_TREE
5739 && needs_to_live_in_memory (*to_p
))
5741 else if (is_gimple_reg_type (TREE_TYPE (*to_p
))
5742 || (DECL_P (*to_p
) && DECL_REGISTER (*to_p
)))
5743 /* Don't force regs into memory. */
5745 else if (TREE_CODE (*expr_p
) == INIT_EXPR
)
5746 /* It's OK to use the target directly if it's being
5749 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p
)))
5751 /* Always use the target and thus RSO for variable-sized types.
5752 GIMPLE cannot deal with a variable-sized assignment
5753 embedded in a call statement. */
5755 else if (TREE_CODE (*to_p
) != SSA_NAME
5756 && (!is_gimple_variable (*to_p
)
5757 || needs_to_live_in_memory (*to_p
)))
5758 /* Don't use the original target if it's already addressable;
5759 if its address escapes, and the called function uses the
5760 NRV optimization, a conforming program could see *to_p
5761 change before the called function returns; see c++/19317.
5762 When optimizing, the return_slot pass marks more functions
5763 as safe after we have escape info. */
5770 CALL_EXPR_RETURN_SLOT_OPT (*from_p
) = 1;
5771 mark_addressable (*to_p
);
5776 case WITH_SIZE_EXPR
:
5777 /* Likewise for calls that return an aggregate of non-constant size,
5778 since we would not be able to generate a temporary at all. */
5779 if (TREE_CODE (TREE_OPERAND (*from_p
, 0)) == CALL_EXPR
)
5781 *from_p
= TREE_OPERAND (*from_p
, 0);
5782 /* We don't change ret in this case because the
5783 WITH_SIZE_EXPR might have been added in
5784 gimplify_modify_expr, so returning GS_OK would lead to an
5790 /* If we're initializing from a container, push the initialization
5792 case CLEANUP_POINT_EXPR
:
5794 case STATEMENT_LIST
:
5796 tree wrap
= *from_p
;
5799 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_min_lval
,
5801 if (ret
!= GS_ERROR
)
5804 t
= voidify_wrapper_expr (wrap
, *expr_p
);
5805 gcc_assert (t
== *expr_p
);
5809 gimplify_and_add (wrap
, pre_p
);
5810 *expr_p
= unshare_expr (*to_p
);
5818 /* Pull out compound literal expressions from a NOP_EXPR.
5819 Those are created in the C FE to drop qualifiers during
5820 lvalue conversion. */
5821 if ((TREE_CODE (TREE_OPERAND (*from_p
, 0)) == COMPOUND_LITERAL_EXPR
)
5822 && tree_ssa_useless_type_conversion (*from_p
))
5824 *from_p
= TREE_OPERAND (*from_p
, 0);
5830 case COMPOUND_LITERAL_EXPR
:
5832 tree complit
= TREE_OPERAND (*expr_p
, 1);
5833 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (complit
);
5834 tree decl
= DECL_EXPR_DECL (decl_s
);
5835 tree init
= DECL_INITIAL (decl
);
5837 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5838 into struct T x = { 0, 1, 2 } if the address of the
5839 compound literal has never been taken. */
5840 if (!TREE_ADDRESSABLE (complit
)
5841 && !TREE_ADDRESSABLE (decl
)
5844 *expr_p
= copy_node (*expr_p
);
5845 TREE_OPERAND (*expr_p
, 1) = init
;
5860 /* Return true if T looks like a valid GIMPLE statement. */
5863 is_gimple_stmt (tree t
)
5865 const enum tree_code code
= TREE_CODE (t
);
5870 /* The only valid NOP_EXPR is the empty statement. */
5871 return IS_EMPTY_STMT (t
);
5875 /* These are only valid if they're void. */
5876 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
5882 case CASE_LABEL_EXPR
:
5883 case TRY_CATCH_EXPR
:
5884 case TRY_FINALLY_EXPR
:
5885 case EH_FILTER_EXPR
:
5888 case STATEMENT_LIST
:
5893 case OACC_HOST_DATA
:
5896 case OACC_ENTER_DATA
:
5897 case OACC_EXIT_DATA
:
5902 case OMP_DISTRIBUTE
:
5917 case OMP_TARGET_DATA
:
5918 case OMP_TARGET_UPDATE
:
5919 case OMP_TARGET_ENTER_DATA
:
5920 case OMP_TARGET_EXIT_DATA
:
5923 /* These are always void. */
5929 /* These are valid regardless of their type. */
5938 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5939 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5941 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5942 other, unmodified part of the complex object just before the total store.
5943 As a consequence, if the object is still uninitialized, an undefined value
5944 will be loaded into a register, which may result in a spurious exception
5945 if the register is floating-point and the value happens to be a signaling
5946 NaN for example. Then the fully-fledged complex operations lowering pass
5947 followed by a DCE pass are necessary in order to fix things up. */
5949 static enum gimplify_status
5950 gimplify_modify_expr_complex_part (tree
*expr_p
, gimple_seq
*pre_p
,
5953 enum tree_code code
, ocode
;
5954 tree lhs
, rhs
, new_rhs
, other
, realpart
, imagpart
;
5956 lhs
= TREE_OPERAND (*expr_p
, 0);
5957 rhs
= TREE_OPERAND (*expr_p
, 1);
5958 code
= TREE_CODE (lhs
);
5959 lhs
= TREE_OPERAND (lhs
, 0);
5961 ocode
= code
== REALPART_EXPR
? IMAGPART_EXPR
: REALPART_EXPR
;
5962 other
= build1 (ocode
, TREE_TYPE (rhs
), lhs
);
5963 suppress_warning (other
);
5964 other
= get_formal_tmp_var (other
, pre_p
);
5966 realpart
= code
== REALPART_EXPR
? rhs
: other
;
5967 imagpart
= code
== REALPART_EXPR
? other
: rhs
;
5969 if (TREE_CONSTANT (realpart
) && TREE_CONSTANT (imagpart
))
5970 new_rhs
= build_complex (TREE_TYPE (lhs
), realpart
, imagpart
);
5972 new_rhs
= build2 (COMPLEX_EXPR
, TREE_TYPE (lhs
), realpart
, imagpart
);
5974 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (lhs
, new_rhs
));
5975 *expr_p
= (want_value
) ? rhs
: NULL_TREE
;
5980 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5986 PRE_P points to the list where side effects that must happen before
5987 *EXPR_P should be stored.
5989 POST_P points to the list where side effects that must happen after
5990 *EXPR_P should be stored.
5992 WANT_VALUE is nonzero iff we want to use the value of this expression
5993 in another expression. */
5995 static enum gimplify_status
5996 gimplify_modify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5999 tree
*from_p
= &TREE_OPERAND (*expr_p
, 1);
6000 tree
*to_p
= &TREE_OPERAND (*expr_p
, 0);
6001 enum gimplify_status ret
= GS_UNHANDLED
;
6003 location_t loc
= EXPR_LOCATION (*expr_p
);
6004 gimple_stmt_iterator gsi
;
6006 gcc_assert (TREE_CODE (*expr_p
) == MODIFY_EXPR
6007 || TREE_CODE (*expr_p
) == INIT_EXPR
);
6009 /* Trying to simplify a clobber using normal logic doesn't work,
6010 so handle it here. */
6011 if (TREE_CLOBBER_P (*from_p
))
6013 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
6014 if (ret
== GS_ERROR
)
6016 gcc_assert (!want_value
);
6017 if (!VAR_P (*to_p
) && TREE_CODE (*to_p
) != MEM_REF
)
6019 tree addr
= get_initialized_tmp_var (build_fold_addr_expr (*to_p
),
6021 *to_p
= build_simple_mem_ref_loc (EXPR_LOCATION (*to_p
), addr
);
6023 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (*to_p
, *from_p
));
6028 /* Insert pointer conversions required by the middle-end that are not
6029 required by the frontend. This fixes middle-end type checking for
6030 for example gcc.dg/redecl-6.c. */
6031 if (POINTER_TYPE_P (TREE_TYPE (*to_p
)))
6033 STRIP_USELESS_TYPE_CONVERSION (*from_p
);
6034 if (!useless_type_conversion_p (TREE_TYPE (*to_p
), TREE_TYPE (*from_p
)))
6035 *from_p
= fold_convert_loc (loc
, TREE_TYPE (*to_p
), *from_p
);
6038 /* See if any simplifications can be done based on what the RHS is. */
6039 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
6041 if (ret
!= GS_UNHANDLED
)
6044 /* For empty types only gimplify the left hand side and right hand
6045 side as statements and throw away the assignment. Do this after
6046 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6048 if (is_empty_type (TREE_TYPE (*from_p
))
6050 /* Don't do this for calls that return addressable types, expand_call
6051 relies on those having a lhs. */
6052 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p
))
6053 && TREE_CODE (*from_p
) == CALL_EXPR
))
6055 gimplify_stmt (from_p
, pre_p
);
6056 gimplify_stmt (to_p
, pre_p
);
6057 *expr_p
= NULL_TREE
;
6061 /* If the value being copied is of variable width, compute the length
6062 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6063 before gimplifying any of the operands so that we can resolve any
6064 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6065 the size of the expression to be copied, not of the destination, so
6066 that is what we must do here. */
6067 maybe_with_size_expr (from_p
);
6069 /* As a special case, we have to temporarily allow for assignments
6070 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6071 a toplevel statement, when gimplifying the GENERIC expression
6072 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6073 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6075 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6076 prevent gimplify_expr from trying to create a new temporary for
6077 foo's LHS, we tell it that it should only gimplify until it
6078 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6079 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6080 and all we need to do here is set 'a' to be its LHS. */
6082 /* Gimplify the RHS first for C++17 and bug 71104. */
6083 gimple_predicate initial_pred
= initial_rhs_predicate_for (*to_p
);
6084 ret
= gimplify_expr (from_p
, pre_p
, post_p
, initial_pred
, fb_rvalue
);
6085 if (ret
== GS_ERROR
)
6088 /* Then gimplify the LHS. */
6089 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6090 twice we have to make sure to gimplify into non-SSA as otherwise
6091 the abnormal edge added later will make those defs not dominate
6093 ??? Technically this applies only to the registers used in the
6094 resulting non-register *TO_P. */
6095 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
6097 && TREE_CODE (*from_p
) == CALL_EXPR
6098 && call_expr_flags (*from_p
) & ECF_RETURNS_TWICE
)
6099 gimplify_ctxp
->into_ssa
= false;
6100 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
6101 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
6102 if (ret
== GS_ERROR
)
6105 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6106 guess for the predicate was wrong. */
6107 gimple_predicate final_pred
= rhs_predicate_for (*to_p
);
6108 if (final_pred
!= initial_pred
)
6110 ret
= gimplify_expr (from_p
, pre_p
, post_p
, final_pred
, fb_rvalue
);
6111 if (ret
== GS_ERROR
)
6115 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6116 size as argument to the call. */
6117 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
6119 tree call
= TREE_OPERAND (*from_p
, 0);
6120 tree vlasize
= TREE_OPERAND (*from_p
, 1);
6122 if (TREE_CODE (call
) == CALL_EXPR
6123 && CALL_EXPR_IFN (call
) == IFN_VA_ARG
)
6125 int nargs
= call_expr_nargs (call
);
6126 tree type
= TREE_TYPE (call
);
6127 tree ap
= CALL_EXPR_ARG (call
, 0);
6128 tree tag
= CALL_EXPR_ARG (call
, 1);
6129 tree aptag
= CALL_EXPR_ARG (call
, 2);
6130 tree newcall
= build_call_expr_internal_loc (EXPR_LOCATION (call
),
6134 TREE_OPERAND (*from_p
, 0) = newcall
;
6138 /* Now see if the above changed *from_p to something we handle specially. */
6139 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
6141 if (ret
!= GS_UNHANDLED
)
6144 /* If we've got a variable sized assignment between two lvalues (i.e. does
6145 not involve a call), then we can make things a bit more straightforward
6146 by converting the assignment to memcpy or memset. */
6147 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
6149 tree from
= TREE_OPERAND (*from_p
, 0);
6150 tree size
= TREE_OPERAND (*from_p
, 1);
6152 if (TREE_CODE (from
) == CONSTRUCTOR
)
6153 return gimplify_modify_expr_to_memset (expr_p
, size
, want_value
, pre_p
);
6155 if (is_gimple_addressable (from
))
6158 return gimplify_modify_expr_to_memcpy (expr_p
, size
, want_value
,
6163 /* Transform partial stores to non-addressable complex variables into
6164 total stores. This allows us to use real instead of virtual operands
6165 for these variables, which improves optimization. */
6166 if ((TREE_CODE (*to_p
) == REALPART_EXPR
6167 || TREE_CODE (*to_p
) == IMAGPART_EXPR
)
6168 && is_gimple_reg (TREE_OPERAND (*to_p
, 0)))
6169 return gimplify_modify_expr_complex_part (expr_p
, pre_p
, want_value
);
6171 /* Try to alleviate the effects of the gimplification creating artificial
6172 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6173 make sure not to create DECL_DEBUG_EXPR links across functions. */
6174 if (!gimplify_ctxp
->into_ssa
6176 && DECL_IGNORED_P (*from_p
)
6178 && !DECL_IGNORED_P (*to_p
)
6179 && decl_function_context (*to_p
) == current_function_decl
6180 && decl_function_context (*from_p
) == current_function_decl
)
6182 if (!DECL_NAME (*from_p
) && DECL_NAME (*to_p
))
6184 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p
)));
6185 DECL_HAS_DEBUG_EXPR_P (*from_p
) = 1;
6186 SET_DECL_DEBUG_EXPR (*from_p
, *to_p
);
6189 if (want_value
&& TREE_THIS_VOLATILE (*to_p
))
6190 *from_p
= get_initialized_tmp_var (*from_p
, pre_p
, post_p
);
6192 if (TREE_CODE (*from_p
) == CALL_EXPR
)
6194 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6195 instead of a GIMPLE_ASSIGN. */
6197 if (CALL_EXPR_FN (*from_p
) == NULL_TREE
)
6199 /* Gimplify internal functions created in the FEs. */
6200 int nargs
= call_expr_nargs (*from_p
), i
;
6201 enum internal_fn ifn
= CALL_EXPR_IFN (*from_p
);
6202 auto_vec
<tree
> vargs (nargs
);
6204 for (i
= 0; i
< nargs
; i
++)
6206 gimplify_arg (&CALL_EXPR_ARG (*from_p
, i
), pre_p
,
6207 EXPR_LOCATION (*from_p
));
6208 vargs
.quick_push (CALL_EXPR_ARG (*from_p
, i
));
6210 call_stmt
= gimple_build_call_internal_vec (ifn
, vargs
);
6211 gimple_call_set_nothrow (call_stmt
, TREE_NOTHROW (*from_p
));
6212 gimple_set_location (call_stmt
, EXPR_LOCATION (*expr_p
));
6216 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*from_p
));
6217 CALL_EXPR_FN (*from_p
) = TREE_OPERAND (CALL_EXPR_FN (*from_p
), 0);
6218 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p
));
6219 tree fndecl
= get_callee_fndecl (*from_p
);
6221 && fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
6222 && call_expr_nargs (*from_p
) == 3)
6223 call_stmt
= gimple_build_call_internal (IFN_BUILTIN_EXPECT
, 3,
6224 CALL_EXPR_ARG (*from_p
, 0),
6225 CALL_EXPR_ARG (*from_p
, 1),
6226 CALL_EXPR_ARG (*from_p
, 2));
6229 call_stmt
= gimple_build_call_from_tree (*from_p
, fnptrtype
);
6232 notice_special_calls (call_stmt
);
6233 if (!gimple_call_noreturn_p (call_stmt
) || !should_remove_lhs_p (*to_p
))
6234 gimple_call_set_lhs (call_stmt
, *to_p
);
6235 else if (TREE_CODE (*to_p
) == SSA_NAME
)
6236 /* The above is somewhat premature, avoid ICEing later for a
6237 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6238 ??? This doesn't make it a default-def. */
6239 SSA_NAME_DEF_STMT (*to_p
) = gimple_build_nop ();
6245 assign
= gimple_build_assign (*to_p
, *from_p
);
6246 gimple_set_location (assign
, EXPR_LOCATION (*expr_p
));
6247 if (COMPARISON_CLASS_P (*from_p
))
6248 copy_warning (assign
, *from_p
);
6251 if (gimplify_ctxp
->into_ssa
&& is_gimple_reg (*to_p
))
6253 /* We should have got an SSA name from the start. */
6254 gcc_assert (TREE_CODE (*to_p
) == SSA_NAME
6255 || ! gimple_in_ssa_p (cfun
));
6258 gimplify_seq_add_stmt (pre_p
, assign
);
6259 gsi
= gsi_last (*pre_p
);
6260 maybe_fold_stmt (&gsi
);
6264 *expr_p
= TREE_THIS_VOLATILE (*to_p
) ? *from_p
: unshare_expr (*to_p
);
6273 /* Gimplify a comparison between two variable-sized objects. Do this
6274 with a call to BUILT_IN_MEMCMP. */
6276 static enum gimplify_status
6277 gimplify_variable_sized_compare (tree
*expr_p
)
6279 location_t loc
= EXPR_LOCATION (*expr_p
);
6280 tree op0
= TREE_OPERAND (*expr_p
, 0);
6281 tree op1
= TREE_OPERAND (*expr_p
, 1);
6282 tree t
, arg
, dest
, src
, expr
;
6284 arg
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
6285 arg
= unshare_expr (arg
);
6286 arg
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg
, op0
);
6287 src
= build_fold_addr_expr_loc (loc
, op1
);
6288 dest
= build_fold_addr_expr_loc (loc
, op0
);
6289 t
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
6290 t
= build_call_expr_loc (loc
, t
, 3, dest
, src
, arg
);
6293 = build2 (TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), t
, integer_zero_node
);
6294 SET_EXPR_LOCATION (expr
, loc
);
6300 /* Gimplify a comparison between two aggregate objects of integral scalar
6301 mode as a comparison between the bitwise equivalent scalar values. */
6303 static enum gimplify_status
6304 gimplify_scalar_mode_aggregate_compare (tree
*expr_p
)
6306 location_t loc
= EXPR_LOCATION (*expr_p
);
6307 tree op0
= TREE_OPERAND (*expr_p
, 0);
6308 tree op1
= TREE_OPERAND (*expr_p
, 1);
6310 tree type
= TREE_TYPE (op0
);
6311 tree scalar_type
= lang_hooks
.types
.type_for_mode (TYPE_MODE (type
), 1);
6313 op0
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op0
);
6314 op1
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op1
);
6317 = fold_build2_loc (loc
, TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), op0
, op1
);
6322 /* Gimplify an expression sequence. This function gimplifies each
6323 expression and rewrites the original expression with the last
6324 expression of the sequence in GIMPLE form.
6326 PRE_P points to the list where the side effects for all the
6327 expressions in the sequence will be emitted.
6329 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6331 static enum gimplify_status
6332 gimplify_compound_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
6338 tree
*sub_p
= &TREE_OPERAND (t
, 0);
6340 if (TREE_CODE (*sub_p
) == COMPOUND_EXPR
)
6341 gimplify_compound_expr (sub_p
, pre_p
, false);
6343 gimplify_stmt (sub_p
, pre_p
);
6345 t
= TREE_OPERAND (t
, 1);
6347 while (TREE_CODE (t
) == COMPOUND_EXPR
);
6354 gimplify_stmt (expr_p
, pre_p
);
6359 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6360 gimplify. After gimplification, EXPR_P will point to a new temporary
6361 that holds the original value of the SAVE_EXPR node.
6363 PRE_P points to the list where side effects that must happen before
6364 *EXPR_P should be stored. */
6366 static enum gimplify_status
6367 gimplify_save_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6369 enum gimplify_status ret
= GS_ALL_DONE
;
6372 gcc_assert (TREE_CODE (*expr_p
) == SAVE_EXPR
);
6373 val
= TREE_OPERAND (*expr_p
, 0);
6375 if (TREE_TYPE (val
) == error_mark_node
)
6378 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6379 if (!SAVE_EXPR_RESOLVED_P (*expr_p
))
6381 /* The operand may be a void-valued expression. It is
6382 being executed only for its side-effects. */
6383 if (TREE_TYPE (val
) == void_type_node
)
6385 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
6386 is_gimple_stmt
, fb_none
);
6390 /* The temporary may not be an SSA name as later abnormal and EH
6391 control flow may invalidate use/def domination. When in SSA
6392 form then assume there are no such issues and SAVE_EXPRs only
6393 appear via GENERIC foldings. */
6394 val
= get_initialized_tmp_var (val
, pre_p
, post_p
,
6395 gimple_in_ssa_p (cfun
));
6397 TREE_OPERAND (*expr_p
, 0) = val
;
6398 SAVE_EXPR_RESOLVED_P (*expr_p
) = 1;
6406 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6413 PRE_P points to the list where side effects that must happen before
6414 *EXPR_P should be stored.
6416 POST_P points to the list where side effects that must happen after
6417 *EXPR_P should be stored. */
6419 static enum gimplify_status
6420 gimplify_addr_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6422 tree expr
= *expr_p
;
6423 tree op0
= TREE_OPERAND (expr
, 0);
6424 enum gimplify_status ret
;
6425 location_t loc
= EXPR_LOCATION (*expr_p
);
6427 switch (TREE_CODE (op0
))
6431 /* Check if we are dealing with an expression of the form '&*ptr'.
6432 While the front end folds away '&*ptr' into 'ptr', these
6433 expressions may be generated internally by the compiler (e.g.,
6434 builtins like __builtin_va_end). */
6435 /* Caution: the silent array decomposition semantics we allow for
6436 ADDR_EXPR means we can't always discard the pair. */
6437 /* Gimplification of the ADDR_EXPR operand may drop
6438 cv-qualification conversions, so make sure we add them if
6441 tree op00
= TREE_OPERAND (op0
, 0);
6442 tree t_expr
= TREE_TYPE (expr
);
6443 tree t_op00
= TREE_TYPE (op00
);
6445 if (!useless_type_conversion_p (t_expr
, t_op00
))
6446 op00
= fold_convert_loc (loc
, TREE_TYPE (expr
), op00
);
6452 case VIEW_CONVERT_EXPR
:
6453 /* Take the address of our operand and then convert it to the type of
6456 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6457 all clear. The impact of this transformation is even less clear. */
6459 /* If the operand is a useless conversion, look through it. Doing so
6460 guarantees that the ADDR_EXPR and its operand will remain of the
6462 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0
, 0)))
6463 op0
= TREE_OPERAND (op0
, 0);
6465 *expr_p
= fold_convert_loc (loc
, TREE_TYPE (expr
),
6466 build_fold_addr_expr_loc (loc
,
6467 TREE_OPERAND (op0
, 0)));
6472 if (integer_zerop (TREE_OPERAND (op0
, 1)))
6473 goto do_indirect_ref
;
6478 /* If we see a call to a declared builtin or see its address
6479 being taken (we can unify those cases here) then we can mark
6480 the builtin for implicit generation by GCC. */
6481 if (TREE_CODE (op0
) == FUNCTION_DECL
6482 && fndecl_built_in_p (op0
, BUILT_IN_NORMAL
)
6483 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0
)))
6484 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0
), true);
6486 /* We use fb_either here because the C frontend sometimes takes
6487 the address of a call that returns a struct; see
6488 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6489 the implied temporary explicit. */
6491 /* Make the operand addressable. */
6492 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, post_p
,
6493 is_gimple_addressable
, fb_either
);
6494 if (ret
== GS_ERROR
)
6497 /* Then mark it. Beware that it may not be possible to do so directly
6498 if a temporary has been created by the gimplification. */
6499 prepare_gimple_addressable (&TREE_OPERAND (expr
, 0), pre_p
);
6501 op0
= TREE_OPERAND (expr
, 0);
6503 /* For various reasons, the gimplification of the expression
6504 may have made a new INDIRECT_REF. */
6505 if (TREE_CODE (op0
) == INDIRECT_REF
6506 || (TREE_CODE (op0
) == MEM_REF
6507 && integer_zerop (TREE_OPERAND (op0
, 1))))
6508 goto do_indirect_ref
;
6510 mark_addressable (TREE_OPERAND (expr
, 0));
6512 /* The FEs may end up building ADDR_EXPRs early on a decl with
6513 an incomplete type. Re-build ADDR_EXPRs in canonical form
6515 if (!types_compatible_p (TREE_TYPE (op0
), TREE_TYPE (TREE_TYPE (expr
))))
6516 *expr_p
= build_fold_addr_expr (op0
);
6518 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6519 recompute_tree_invariant_for_addr_expr (*expr_p
);
6521 /* If we re-built the ADDR_EXPR add a conversion to the original type
6523 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
6524 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
6532 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6533 value; output operands should be a gimple lvalue. */
6535 static enum gimplify_status
6536 gimplify_asm_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6540 const char **oconstraints
;
6543 const char *constraint
;
6544 bool allows_mem
, allows_reg
, is_inout
;
6545 enum gimplify_status ret
, tret
;
6547 vec
<tree
, va_gc
> *inputs
;
6548 vec
<tree
, va_gc
> *outputs
;
6549 vec
<tree
, va_gc
> *clobbers
;
6550 vec
<tree
, va_gc
> *labels
;
6554 noutputs
= list_length (ASM_OUTPUTS (expr
));
6555 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
6563 link_next
= NULL_TREE
;
6564 for (i
= 0, link
= ASM_OUTPUTS (expr
); link
; ++i
, link
= link_next
)
6567 size_t constraint_len
;
6569 link_next
= TREE_CHAIN (link
);
6573 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6574 constraint_len
= strlen (constraint
);
6575 if (constraint_len
== 0)
6578 ok
= parse_output_constraint (&constraint
, i
, 0, 0,
6579 &allows_mem
, &allows_reg
, &is_inout
);
6586 /* If we can't make copies, we can only accept memory.
6587 Similarly for VLAs. */
6588 tree outtype
= TREE_TYPE (TREE_VALUE (link
));
6589 if (outtype
!= error_mark_node
6590 && (TREE_ADDRESSABLE (outtype
)
6591 || !COMPLETE_TYPE_P (outtype
)
6592 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype
))))
6598 error ("impossible constraint in %<asm%>");
6599 error ("non-memory output %d must stay in memory", i
);
6604 if (!allows_reg
&& allows_mem
)
6605 mark_addressable (TREE_VALUE (link
));
6607 tree orig
= TREE_VALUE (link
);
6608 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6609 is_inout
? is_gimple_min_lval
: is_gimple_lvalue
,
6610 fb_lvalue
| fb_mayfail
);
6611 if (tret
== GS_ERROR
)
6613 if (orig
!= error_mark_node
)
6614 error ("invalid lvalue in %<asm%> output %d", i
);
6618 /* If the constraint does not allow memory make sure we gimplify
6619 it to a register if it is not already but its base is. This
6620 happens for complex and vector components. */
6623 tree op
= TREE_VALUE (link
);
6624 if (! is_gimple_val (op
)
6625 && is_gimple_reg_type (TREE_TYPE (op
))
6626 && is_gimple_reg (get_base_address (op
)))
6628 tree tem
= create_tmp_reg (TREE_TYPE (op
));
6632 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
),
6633 tem
, unshare_expr (op
));
6634 gimplify_and_add (ass
, pre_p
);
6636 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
), op
, tem
);
6637 gimplify_and_add (ass
, post_p
);
6639 TREE_VALUE (link
) = tem
;
6644 vec_safe_push (outputs
, link
);
6645 TREE_CHAIN (link
) = NULL_TREE
;
6649 /* An input/output operand. To give the optimizers more
6650 flexibility, split it into separate input and output
6653 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6656 /* Turn the in/out constraint into an output constraint. */
6657 char *p
= xstrdup (constraint
);
6659 TREE_VALUE (TREE_PURPOSE (link
)) = build_string (constraint_len
, p
);
6661 /* And add a matching input constraint. */
6664 sprintf (buf
, "%u", i
);
6666 /* If there are multiple alternatives in the constraint,
6667 handle each of them individually. Those that allow register
6668 will be replaced with operand number, the others will stay
6670 if (strchr (p
, ',') != NULL
)
6672 size_t len
= 0, buflen
= strlen (buf
);
6673 char *beg
, *end
, *str
, *dst
;
6677 end
= strchr (beg
, ',');
6679 end
= strchr (beg
, '\0');
6680 if ((size_t) (end
- beg
) < buflen
)
6683 len
+= end
- beg
+ 1;
6690 str
= (char *) alloca (len
);
6691 for (beg
= p
+ 1, dst
= str
;;)
6694 bool mem_p
, reg_p
, inout_p
;
6696 end
= strchr (beg
, ',');
6701 parse_output_constraint (&tem
, i
, 0, 0,
6702 &mem_p
, ®_p
, &inout_p
);
6707 memcpy (dst
, buf
, buflen
);
6716 memcpy (dst
, beg
, len
);
6725 input
= build_string (dst
- str
, str
);
6728 input
= build_string (strlen (buf
), buf
);
6731 input
= build_string (constraint_len
- 1, constraint
+ 1);
6735 input
= build_tree_list (build_tree_list (NULL_TREE
, input
),
6736 unshare_expr (TREE_VALUE (link
)));
6737 ASM_INPUTS (expr
) = chainon (ASM_INPUTS (expr
), input
);
6741 link_next
= NULL_TREE
;
6742 for (link
= ASM_INPUTS (expr
); link
; ++i
, link
= link_next
)
6744 link_next
= TREE_CHAIN (link
);
6745 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6746 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
6747 oconstraints
, &allows_mem
, &allows_reg
);
6749 /* If we can't make copies, we can only accept memory. */
6750 tree intype
= TREE_TYPE (TREE_VALUE (link
));
6751 if (intype
!= error_mark_node
6752 && (TREE_ADDRESSABLE (intype
)
6753 || !COMPLETE_TYPE_P (intype
)
6754 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype
))))
6760 error ("impossible constraint in %<asm%>");
6761 error ("non-memory input %d must stay in memory", i
);
6766 /* If the operand is a memory input, it should be an lvalue. */
6767 if (!allows_reg
&& allows_mem
)
6769 tree inputv
= TREE_VALUE (link
);
6770 STRIP_NOPS (inputv
);
6771 if (TREE_CODE (inputv
) == PREDECREMENT_EXPR
6772 || TREE_CODE (inputv
) == PREINCREMENT_EXPR
6773 || TREE_CODE (inputv
) == POSTDECREMENT_EXPR
6774 || TREE_CODE (inputv
) == POSTINCREMENT_EXPR
6775 || TREE_CODE (inputv
) == MODIFY_EXPR
)
6776 TREE_VALUE (link
) = error_mark_node
;
6777 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6778 is_gimple_lvalue
, fb_lvalue
| fb_mayfail
);
6779 if (tret
!= GS_ERROR
)
6781 /* Unlike output operands, memory inputs are not guaranteed
6782 to be lvalues by the FE, and while the expressions are
6783 marked addressable there, if it is e.g. a statement
6784 expression, temporaries in it might not end up being
6785 addressable. They might be already used in the IL and thus
6786 it is too late to make them addressable now though. */
6787 tree x
= TREE_VALUE (link
);
6788 while (handled_component_p (x
))
6789 x
= TREE_OPERAND (x
, 0);
6790 if (TREE_CODE (x
) == MEM_REF
6791 && TREE_CODE (TREE_OPERAND (x
, 0)) == ADDR_EXPR
)
6792 x
= TREE_OPERAND (TREE_OPERAND (x
, 0), 0);
6794 || TREE_CODE (x
) == PARM_DECL
6795 || TREE_CODE (x
) == RESULT_DECL
)
6796 && !TREE_ADDRESSABLE (x
)
6797 && is_gimple_reg (x
))
6799 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
),
6801 "memory input %d is not directly addressable",
6803 prepare_gimple_addressable (&TREE_VALUE (link
), pre_p
);
6806 mark_addressable (TREE_VALUE (link
));
6807 if (tret
== GS_ERROR
)
6809 if (inputv
!= error_mark_node
)
6810 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
), input_location
),
6811 "memory input %d is not directly addressable", i
);
6817 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6818 is_gimple_asm_val
, fb_rvalue
);
6819 if (tret
== GS_ERROR
)
6823 TREE_CHAIN (link
) = NULL_TREE
;
6824 vec_safe_push (inputs
, link
);
6827 link_next
= NULL_TREE
;
6828 for (link
= ASM_CLOBBERS (expr
); link
; ++i
, link
= link_next
)
6830 link_next
= TREE_CHAIN (link
);
6831 TREE_CHAIN (link
) = NULL_TREE
;
6832 vec_safe_push (clobbers
, link
);
6835 link_next
= NULL_TREE
;
6836 for (link
= ASM_LABELS (expr
); link
; ++i
, link
= link_next
)
6838 link_next
= TREE_CHAIN (link
);
6839 TREE_CHAIN (link
) = NULL_TREE
;
6840 vec_safe_push (labels
, link
);
6843 /* Do not add ASMs with errors to the gimple IL stream. */
6844 if (ret
!= GS_ERROR
)
6846 stmt
= gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr
)),
6847 inputs
, outputs
, clobbers
, labels
);
6849 gimple_asm_set_volatile (stmt
, ASM_VOLATILE_P (expr
) || noutputs
== 0);
6850 gimple_asm_set_input (stmt
, ASM_INPUT_P (expr
));
6851 gimple_asm_set_inline (stmt
, ASM_INLINE_P (expr
));
6853 gimplify_seq_add_stmt (pre_p
, stmt
);
6859 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6860 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6861 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6862 return to this function.
6864 FIXME should we complexify the prequeue handling instead? Or use flags
6865 for all the cleanups and let the optimizer tighten them up? The current
6866 code seems pretty fragile; it will break on a cleanup within any
6867 non-conditional nesting. But any such nesting would be broken, anyway;
6868 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6869 and continues out of it. We can do that at the RTL level, though, so
6870 having an optimizer to tighten up try/finally regions would be a Good
6873 static enum gimplify_status
6874 gimplify_cleanup_point_expr (tree
*expr_p
, gimple_seq
*pre_p
)
6876 gimple_stmt_iterator iter
;
6877 gimple_seq body_sequence
= NULL
;
6879 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
6881 /* We only care about the number of conditions between the innermost
6882 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6883 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6884 int old_conds
= gimplify_ctxp
->conditions
;
6885 gimple_seq old_cleanups
= gimplify_ctxp
->conditional_cleanups
;
6886 bool old_in_cleanup_point_expr
= gimplify_ctxp
->in_cleanup_point_expr
;
6887 gimplify_ctxp
->conditions
= 0;
6888 gimplify_ctxp
->conditional_cleanups
= NULL
;
6889 gimplify_ctxp
->in_cleanup_point_expr
= true;
6891 gimplify_stmt (&TREE_OPERAND (*expr_p
, 0), &body_sequence
);
6893 gimplify_ctxp
->conditions
= old_conds
;
6894 gimplify_ctxp
->conditional_cleanups
= old_cleanups
;
6895 gimplify_ctxp
->in_cleanup_point_expr
= old_in_cleanup_point_expr
;
6897 for (iter
= gsi_start (body_sequence
); !gsi_end_p (iter
); )
6899 gimple
*wce
= gsi_stmt (iter
);
6901 if (gimple_code (wce
) == GIMPLE_WITH_CLEANUP_EXPR
)
6903 if (gsi_one_before_end_p (iter
))
6905 /* Note that gsi_insert_seq_before and gsi_remove do not
6906 scan operands, unlike some other sequence mutators. */
6907 if (!gimple_wce_cleanup_eh_only (wce
))
6908 gsi_insert_seq_before_without_update (&iter
,
6909 gimple_wce_cleanup (wce
),
6911 gsi_remove (&iter
, true);
6918 enum gimple_try_flags kind
;
6920 if (gimple_wce_cleanup_eh_only (wce
))
6921 kind
= GIMPLE_TRY_CATCH
;
6923 kind
= GIMPLE_TRY_FINALLY
;
6924 seq
= gsi_split_seq_after (iter
);
6926 gtry
= gimple_build_try (seq
, gimple_wce_cleanup (wce
), kind
);
6927 /* Do not use gsi_replace here, as it may scan operands.
6928 We want to do a simple structural modification only. */
6929 gsi_set_stmt (&iter
, gtry
);
6930 iter
= gsi_start (gtry
->eval
);
6937 gimplify_seq_add_seq (pre_p
, body_sequence
);
6950 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6951 is the cleanup action required. EH_ONLY is true if the cleanup should
6952 only be executed if an exception is thrown, not on normal exit.
6953 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6954 only valid for clobbers. */
6957 gimple_push_cleanup (tree var
, tree cleanup
, bool eh_only
, gimple_seq
*pre_p
,
6958 bool force_uncond
= false)
6961 gimple_seq cleanup_stmts
= NULL
;
6963 /* Errors can result in improperly nested cleanups. Which results in
6964 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6968 if (gimple_conditional_context ())
6970 /* If we're in a conditional context, this is more complex. We only
6971 want to run the cleanup if we actually ran the initialization that
6972 necessitates it, but we want to run it after the end of the
6973 conditional context. So we wrap the try/finally around the
6974 condition and use a flag to determine whether or not to actually
6975 run the destructor. Thus
6979 becomes (approximately)
6983 if (test) { A::A(temp); flag = 1; val = f(temp); }
6986 if (flag) A::~A(temp);
6992 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6993 wce
= gimple_build_wce (cleanup_stmts
);
6994 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6998 tree flag
= create_tmp_var (boolean_type_node
, "cleanup");
6999 gassign
*ffalse
= gimple_build_assign (flag
, boolean_false_node
);
7000 gassign
*ftrue
= gimple_build_assign (flag
, boolean_true_node
);
7002 cleanup
= build3 (COND_EXPR
, void_type_node
, flag
, cleanup
, NULL
);
7003 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7004 wce
= gimple_build_wce (cleanup_stmts
);
7005 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
7007 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, ffalse
);
7008 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
7009 gimplify_seq_add_stmt (pre_p
, ftrue
);
7011 /* Because of this manipulation, and the EH edges that jump
7012 threading cannot redirect, the temporary (VAR) will appear
7013 to be used uninitialized. Don't warn. */
7014 suppress_warning (var
, OPT_Wuninitialized
);
7019 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7020 wce
= gimple_build_wce (cleanup_stmts
);
7021 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
7022 gimplify_seq_add_stmt (pre_p
, wce
);
7026 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7028 static enum gimplify_status
7029 gimplify_target_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
7031 tree targ
= *expr_p
;
7032 tree temp
= TARGET_EXPR_SLOT (targ
);
7033 tree init
= TARGET_EXPR_INITIAL (targ
);
7034 enum gimplify_status ret
;
7036 bool unpoison_empty_seq
= false;
7037 gimple_stmt_iterator unpoison_it
;
7041 gimple_seq init_pre_p
= NULL
;
7043 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7044 to the temps list. Handle also variable length TARGET_EXPRs. */
7045 if (!poly_int_tree_p (DECL_SIZE (temp
)))
7047 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp
)))
7048 gimplify_type_sizes (TREE_TYPE (temp
), &init_pre_p
);
7049 /* FIXME: this is correct only when the size of the type does
7050 not depend on expressions evaluated in init. */
7051 gimplify_vla_decl (temp
, &init_pre_p
);
7055 /* Save location where we need to place unpoisoning. It's possible
7056 that a variable will be converted to needs_to_live_in_memory. */
7057 unpoison_it
= gsi_last (*pre_p
);
7058 unpoison_empty_seq
= gsi_end_p (unpoison_it
);
7060 gimple_add_tmp_var (temp
);
7063 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7064 expression is supposed to initialize the slot. */
7065 if (VOID_TYPE_P (TREE_TYPE (init
)))
7066 ret
= gimplify_expr (&init
, &init_pre_p
, post_p
, is_gimple_stmt
,
7070 tree init_expr
= build2 (INIT_EXPR
, void_type_node
, temp
, init
);
7072 ret
= gimplify_expr (&init
, &init_pre_p
, post_p
, is_gimple_stmt
,
7075 ggc_free (init_expr
);
7077 if (ret
== GS_ERROR
)
7079 /* PR c++/28266 Make sure this is expanded only once. */
7080 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
7085 gimplify_and_add (init
, &init_pre_p
);
7087 /* Add a clobber for the temporary going out of scope, like
7088 gimplify_bind_expr. */
7089 if (gimplify_ctxp
->in_cleanup_point_expr
7090 && needs_to_live_in_memory (temp
))
7092 if (flag_stack_reuse
== SR_ALL
)
7094 tree clobber
= build_clobber (TREE_TYPE (temp
), CLOBBER_EOL
);
7095 clobber
= build2 (MODIFY_EXPR
, TREE_TYPE (temp
), temp
, clobber
);
7096 gimple_push_cleanup (temp
, clobber
, false, pre_p
, true);
7098 if (asan_poisoned_variables
7099 && DECL_ALIGN (temp
) <= MAX_SUPPORTED_STACK_ALIGNMENT
7100 && !TREE_STATIC (temp
)
7101 && dbg_cnt (asan_use_after_scope
)
7102 && !gimplify_omp_ctxp
)
7104 tree asan_cleanup
= build_asan_poison_call_expr (temp
);
7107 if (unpoison_empty_seq
)
7108 unpoison_it
= gsi_start (*pre_p
);
7110 asan_poison_variable (temp
, false, &unpoison_it
,
7111 unpoison_empty_seq
);
7112 gimple_push_cleanup (temp
, asan_cleanup
, false, pre_p
);
7117 gimple_seq_add_seq (pre_p
, init_pre_p
);
7119 /* If needed, push the cleanup for the temp. */
7120 if (TARGET_EXPR_CLEANUP (targ
))
7121 gimple_push_cleanup (temp
, TARGET_EXPR_CLEANUP (targ
),
7122 CLEANUP_EH_ONLY (targ
), pre_p
);
7124 /* Only expand this once. */
7125 TREE_OPERAND (targ
, 3) = init
;
7126 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
7129 /* We should have expanded this before. */
7130 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp
));
7136 /* Gimplification of expression trees. */
7138 /* Gimplify an expression which appears at statement context. The
7139 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7140 NULL, a new sequence is allocated.
7142 Return true if we actually added a statement to the queue. */
7145 gimplify_stmt (tree
*stmt_p
, gimple_seq
*seq_p
)
7147 gimple_seq_node last
;
7149 last
= gimple_seq_last (*seq_p
);
7150 gimplify_expr (stmt_p
, seq_p
, NULL
, is_gimple_stmt
, fb_none
);
7151 return last
!= gimple_seq_last (*seq_p
);
7154 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7155 to CTX. If entries already exist, force them to be some flavor of private.
7156 If there is no enclosing parallel, do nothing. */
7159 omp_firstprivatize_variable (struct gimplify_omp_ctx
*ctx
, tree decl
)
7163 if (decl
== NULL
|| !DECL_P (decl
) || ctx
->region_type
== ORT_NONE
)
7168 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7171 if (n
->value
& GOVD_SHARED
)
7172 n
->value
= GOVD_FIRSTPRIVATE
| (n
->value
& GOVD_SEEN
);
7173 else if (n
->value
& GOVD_MAP
)
7174 n
->value
|= GOVD_MAP_TO_ONLY
;
7178 else if ((ctx
->region_type
& ORT_TARGET
) != 0)
7180 if (ctx
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
7181 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
7183 omp_add_variable (ctx
, decl
, GOVD_MAP
| GOVD_MAP_TO_ONLY
);
7185 else if (ctx
->region_type
!= ORT_WORKSHARE
7186 && ctx
->region_type
!= ORT_TASKGROUP
7187 && ctx
->region_type
!= ORT_SIMD
7188 && ctx
->region_type
!= ORT_ACC
7189 && !(ctx
->region_type
& ORT_TARGET_DATA
))
7190 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
7192 ctx
= ctx
->outer_context
;
7197 /* Similarly for each of the type sizes of TYPE. */
7200 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
7202 if (type
== NULL
|| type
== error_mark_node
)
7204 type
= TYPE_MAIN_VARIANT (type
);
7206 if (ctx
->privatized_types
->add (type
))
7209 switch (TREE_CODE (type
))
7215 case FIXED_POINT_TYPE
:
7216 omp_firstprivatize_variable (ctx
, TYPE_MIN_VALUE (type
));
7217 omp_firstprivatize_variable (ctx
, TYPE_MAX_VALUE (type
));
7221 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
7222 omp_firstprivatize_type_sizes (ctx
, TYPE_DOMAIN (type
));
7227 case QUAL_UNION_TYPE
:
7230 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
7231 if (TREE_CODE (field
) == FIELD_DECL
)
7233 omp_firstprivatize_variable (ctx
, DECL_FIELD_OFFSET (field
));
7234 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (field
));
7240 case REFERENCE_TYPE
:
7241 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
7248 omp_firstprivatize_variable (ctx
, TYPE_SIZE (type
));
7249 omp_firstprivatize_variable (ctx
, TYPE_SIZE_UNIT (type
));
7250 lang_hooks
.types
.omp_firstprivatize_type_sizes (ctx
, type
);
7253 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7256 omp_add_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned int flags
)
7259 unsigned int nflags
;
7262 if (error_operand_p (decl
) || ctx
->region_type
== ORT_NONE
)
7265 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7266 there are constructors involved somewhere. Exception is a shared clause,
7267 there is nothing privatized in that case. */
7268 if ((flags
& GOVD_SHARED
) == 0
7269 && (TREE_ADDRESSABLE (TREE_TYPE (decl
))
7270 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl
))))
7273 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7274 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7276 /* We shouldn't be re-adding the decl with the same data
7278 gcc_assert ((n
->value
& GOVD_DATA_SHARE_CLASS
& flags
) == 0);
7279 nflags
= n
->value
| flags
;
7280 /* The only combination of data sharing classes we should see is
7281 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7282 reduction variables to be used in data sharing clauses. */
7283 gcc_assert ((ctx
->region_type
& ORT_ACC
) != 0
7284 || ((nflags
& GOVD_DATA_SHARE_CLASS
)
7285 == (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
))
7286 || (flags
& GOVD_DATA_SHARE_CLASS
) == 0);
7291 /* When adding a variable-sized variable, we have to handle all sorts
7292 of additional bits of data: the pointer replacement variable, and
7293 the parameters of the type. */
7294 if (DECL_SIZE (decl
) && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7296 /* Add the pointer replacement variable as PRIVATE if the variable
7297 replacement is private, else FIRSTPRIVATE since we'll need the
7298 address of the original variable either for SHARED, or for the
7299 copy into or out of the context. */
7300 if (!(flags
& GOVD_LOCAL
) && ctx
->region_type
!= ORT_TASKGROUP
)
7302 if (flags
& GOVD_MAP
)
7303 nflags
= GOVD_MAP
| GOVD_MAP_TO_ONLY
| GOVD_EXPLICIT
;
7304 else if (flags
& GOVD_PRIVATE
)
7305 nflags
= GOVD_PRIVATE
;
7306 else if (((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7307 && (flags
& GOVD_FIRSTPRIVATE
))
7308 || (ctx
->region_type
== ORT_TARGET_DATA
7309 && (flags
& GOVD_DATA_SHARE_CLASS
) == 0))
7310 nflags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
7312 nflags
= GOVD_FIRSTPRIVATE
;
7313 nflags
|= flags
& GOVD_SEEN
;
7314 t
= DECL_VALUE_EXPR (decl
);
7315 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
7316 t
= TREE_OPERAND (t
, 0);
7317 gcc_assert (DECL_P (t
));
7318 omp_add_variable (ctx
, t
, nflags
);
7321 /* Add all of the variable and type parameters (which should have
7322 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7323 omp_firstprivatize_variable (ctx
, DECL_SIZE_UNIT (decl
));
7324 omp_firstprivatize_variable (ctx
, DECL_SIZE (decl
));
7325 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7327 /* The variable-sized variable itself is never SHARED, only some form
7328 of PRIVATE. The sharing would take place via the pointer variable
7329 which we remapped above. */
7330 if (flags
& GOVD_SHARED
)
7331 flags
= GOVD_SHARED
| GOVD_DEBUG_PRIVATE
7332 | (flags
& (GOVD_SEEN
| GOVD_EXPLICIT
));
7334 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7335 alloca statement we generate for the variable, so make sure it
7336 is available. This isn't automatically needed for the SHARED
7337 case, since we won't be allocating local storage then.
7338 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7339 in this case omp_notice_variable will be called later
7340 on when it is gimplified. */
7341 else if (! (flags
& (GOVD_LOCAL
| GOVD_MAP
))
7342 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl
))))
7343 omp_notice_variable (ctx
, TYPE_SIZE_UNIT (TREE_TYPE (decl
)), true);
7345 else if ((flags
& (GOVD_MAP
| GOVD_LOCAL
)) == 0
7346 && omp_privatize_by_reference (decl
))
7348 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7350 /* Similar to the direct variable sized case above, we'll need the
7351 size of references being privatized. */
7352 if ((flags
& GOVD_SHARED
) == 0)
7354 t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7355 if (t
&& DECL_P (t
))
7356 omp_notice_variable (ctx
, t
, true);
7363 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, flags
);
7365 /* For reductions clauses in OpenACC loop directives, by default create a
7366 copy clause on the enclosing parallel construct for carrying back the
7368 if (ctx
->region_type
== ORT_ACC
&& (flags
& GOVD_REDUCTION
))
7370 struct gimplify_omp_ctx
*outer_ctx
= ctx
->outer_context
;
7373 n
= splay_tree_lookup (outer_ctx
->variables
, (splay_tree_key
)decl
);
7376 /* Ignore local variables and explicitly declared clauses. */
7377 if (n
->value
& (GOVD_LOCAL
| GOVD_EXPLICIT
))
7379 else if (outer_ctx
->region_type
== ORT_ACC_KERNELS
)
7381 /* According to the OpenACC spec, such a reduction variable
7382 should already have a copy map on a kernels construct,
7383 verify that here. */
7384 gcc_assert (!(n
->value
& GOVD_FIRSTPRIVATE
)
7385 && (n
->value
& GOVD_MAP
));
7387 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7389 /* Remove firstprivate and make it a copy map. */
7390 n
->value
&= ~GOVD_FIRSTPRIVATE
;
7391 n
->value
|= GOVD_MAP
;
7394 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7396 splay_tree_insert (outer_ctx
->variables
, (splay_tree_key
)decl
,
7397 GOVD_MAP
| GOVD_SEEN
);
7400 outer_ctx
= outer_ctx
->outer_context
;
7405 /* Notice a threadprivate variable DECL used in OMP context CTX.
7406 This just prints out diagnostics about threadprivate variable uses
7407 in untied tasks. If DECL2 is non-NULL, prevent this warning
7408 on that variable. */
7411 omp_notice_threadprivate_variable (struct gimplify_omp_ctx
*ctx
, tree decl
,
7415 struct gimplify_omp_ctx
*octx
;
7417 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
7418 if ((octx
->region_type
& ORT_TARGET
) != 0
7419 || octx
->order_concurrent
)
7421 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
)decl
);
7424 if (octx
->order_concurrent
)
7426 error ("threadprivate variable %qE used in a region with"
7427 " %<order(concurrent)%> clause", DECL_NAME (decl
));
7428 inform (octx
->location
, "enclosing region");
7432 error ("threadprivate variable %qE used in target region",
7434 inform (octx
->location
, "enclosing target region");
7436 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl
, 0);
7439 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl2
, 0);
7442 if (ctx
->region_type
!= ORT_UNTIED_TASK
)
7444 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7447 error ("threadprivate variable %qE used in untied task",
7449 inform (ctx
->location
, "enclosing task");
7450 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, 0);
7453 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl2
, 0);
7457 /* Return true if global var DECL is device resident. */
7460 device_resident_p (tree decl
)
7462 tree attr
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl
));
7467 for (tree t
= TREE_VALUE (attr
); t
; t
= TREE_PURPOSE (t
))
7469 tree c
= TREE_VALUE (t
);
7470 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DEVICE_RESIDENT
)
7477 /* Return true if DECL has an ACC DECLARE attribute. */
7480 is_oacc_declared (tree decl
)
7482 tree t
= TREE_CODE (decl
) == MEM_REF
? TREE_OPERAND (decl
, 0) : decl
;
7483 tree declared
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t
));
7484 return declared
!= NULL_TREE
;
7487 /* Determine outer default flags for DECL mentioned in an OMP region
7488 but not declared in an enclosing clause.
7490 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7491 remapped firstprivate instead of shared. To some extent this is
7492 addressed in omp_firstprivatize_type_sizes, but not
7496 omp_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
,
7497 bool in_code
, unsigned flags
)
7499 enum omp_clause_default_kind default_kind
= ctx
->default_kind
;
7500 enum omp_clause_default_kind kind
;
7502 kind
= lang_hooks
.decls
.omp_predetermined_sharing (decl
);
7503 if (ctx
->region_type
& ORT_TASK
)
7505 tree detach_clause
= omp_find_clause (ctx
->clauses
, OMP_CLAUSE_DETACH
);
7507 /* The event-handle specified by a detach clause should always be firstprivate,
7508 regardless of the current default. */
7509 if (detach_clause
&& OMP_CLAUSE_DECL (detach_clause
) == decl
)
7510 kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
7512 if (kind
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
7513 default_kind
= kind
;
7514 else if (VAR_P (decl
) && TREE_STATIC (decl
) && DECL_IN_CONSTANT_POOL (decl
))
7515 default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
7516 /* For C/C++ default({,first}private), variables with static storage duration
7517 declared in a namespace or global scope and referenced in construct
7518 must be explicitly specified, i.e. acts as default(none). */
7519 else if ((default_kind
== OMP_CLAUSE_DEFAULT_PRIVATE
7520 || default_kind
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
7522 && is_global_var (decl
)
7523 && (DECL_FILE_SCOPE_P (decl
)
7524 || (DECL_CONTEXT (decl
)
7525 && TREE_CODE (DECL_CONTEXT (decl
)) == NAMESPACE_DECL
))
7526 && !lang_GNU_Fortran ())
7527 default_kind
= OMP_CLAUSE_DEFAULT_NONE
;
7529 switch (default_kind
)
7531 case OMP_CLAUSE_DEFAULT_NONE
:
7535 if (ctx
->region_type
& ORT_PARALLEL
)
7537 else if ((ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
)
7539 else if (ctx
->region_type
& ORT_TASK
)
7541 else if (ctx
->region_type
& ORT_TEAMS
)
7546 error ("%qE not specified in enclosing %qs",
7547 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rtype
);
7548 inform (ctx
->location
, "enclosing %qs", rtype
);
7551 case OMP_CLAUSE_DEFAULT_SHARED
:
7552 flags
|= GOVD_SHARED
;
7554 case OMP_CLAUSE_DEFAULT_PRIVATE
:
7555 flags
|= GOVD_PRIVATE
;
7557 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
:
7558 flags
|= GOVD_FIRSTPRIVATE
;
7560 case OMP_CLAUSE_DEFAULT_UNSPECIFIED
:
7561 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7562 gcc_assert ((ctx
->region_type
& ORT_TASK
) != 0);
7563 if (struct gimplify_omp_ctx
*octx
= ctx
->outer_context
)
7565 omp_notice_variable (octx
, decl
, in_code
);
7566 for (; octx
; octx
= octx
->outer_context
)
7570 n2
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
7571 if ((octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)) != 0
7572 && (n2
== NULL
|| (n2
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7574 if (n2
&& (n2
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
)
7576 flags
|= GOVD_FIRSTPRIVATE
;
7579 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TEAMS
)) != 0)
7581 flags
|= GOVD_SHARED
;
7587 if (TREE_CODE (decl
) == PARM_DECL
7588 || (!is_global_var (decl
)
7589 && DECL_CONTEXT (decl
) == current_function_decl
))
7590 flags
|= GOVD_FIRSTPRIVATE
;
7592 flags
|= GOVD_SHARED
;
7604 /* Determine outer default flags for DECL mentioned in an OACC region
7605 but not declared in an enclosing clause. */
7608 oacc_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned flags
)
7611 bool on_device
= false;
7612 bool is_private
= false;
7613 bool declared
= is_oacc_declared (decl
);
7614 tree type
= TREE_TYPE (decl
);
7616 if (omp_privatize_by_reference (decl
))
7617 type
= TREE_TYPE (type
);
7619 /* For Fortran COMMON blocks, only used variables in those blocks are
7620 transfered and remapped. The block itself will have a private clause to
7621 avoid transfering the data twice.
7622 The hook evaluates to false by default. For a variable in Fortran's COMMON
7623 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7624 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7625 the whole block. For C++ and Fortran, it can also be true under certain
7626 other conditions, if DECL_HAS_VALUE_EXPR. */
7627 if (RECORD_OR_UNION_TYPE_P (type
))
7628 is_private
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7630 if ((ctx
->region_type
& (ORT_ACC_PARALLEL
| ORT_ACC_KERNELS
)) != 0
7631 && is_global_var (decl
)
7632 && device_resident_p (decl
)
7636 flags
|= GOVD_MAP_TO_ONLY
;
7639 switch (ctx
->region_type
)
7641 case ORT_ACC_KERNELS
:
7645 flags
|= GOVD_FIRSTPRIVATE
;
7646 else if (AGGREGATE_TYPE_P (type
))
7648 /* Aggregates default to 'present_or_copy', or 'present'. */
7649 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7652 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7655 /* Scalars default to 'copy'. */
7656 flags
|= GOVD_MAP
| GOVD_MAP_FORCE
;
7660 case ORT_ACC_PARALLEL
:
7661 case ORT_ACC_SERIAL
:
7662 rkind
= ctx
->region_type
== ORT_ACC_PARALLEL
? "parallel" : "serial";
7665 flags
|= GOVD_FIRSTPRIVATE
;
7666 else if (on_device
|| declared
)
7668 else if (AGGREGATE_TYPE_P (type
))
7670 /* Aggregates default to 'present_or_copy', or 'present'. */
7671 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7674 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7677 /* Scalars default to 'firstprivate'. */
7678 flags
|= GOVD_FIRSTPRIVATE
;
7686 if (DECL_ARTIFICIAL (decl
))
7687 ; /* We can get compiler-generated decls, and should not complain
7689 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_NONE
)
7691 error ("%qE not specified in enclosing OpenACC %qs construct",
7692 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rkind
);
7693 inform (ctx
->location
, "enclosing OpenACC %qs construct", rkind
);
7695 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_PRESENT
)
7696 ; /* Handled above. */
7698 gcc_checking_assert (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
);
7703 /* Record the fact that DECL was used within the OMP context CTX.
7704 IN_CODE is true when real code uses DECL, and false when we should
7705 merely emit default(none) errors. Return true if DECL is going to
7706 be remapped and thus DECL shouldn't be gimplified into its
7707 DECL_VALUE_EXPR (if any). */
7710 omp_notice_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, bool in_code
)
7713 unsigned flags
= in_code
? GOVD_SEEN
: 0;
7714 bool ret
= false, shared
;
7716 if (error_operand_p (decl
))
7719 if (ctx
->region_type
== ORT_NONE
)
7720 return lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7722 if (is_global_var (decl
))
7724 /* Threadprivate variables are predetermined. */
7725 if (DECL_THREAD_LOCAL_P (decl
))
7726 return omp_notice_threadprivate_variable (ctx
, decl
, NULL_TREE
);
7728 if (DECL_HAS_VALUE_EXPR_P (decl
))
7730 if (ctx
->region_type
& ORT_ACC
)
7731 /* For OpenACC, defer expansion of value to avoid transfering
7732 privatized common block data instead of im-/explicitly transfered
7733 variables which are in common blocks. */
7737 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
7739 if (value
&& DECL_P (value
) && DECL_THREAD_LOCAL_P (value
))
7740 return omp_notice_threadprivate_variable (ctx
, decl
, value
);
7744 if (gimplify_omp_ctxp
->outer_context
== NULL
7746 && oacc_get_fn_attrib (current_function_decl
))
7748 location_t loc
= DECL_SOURCE_LOCATION (decl
);
7750 if (lookup_attribute ("omp declare target link",
7751 DECL_ATTRIBUTES (decl
)))
7754 "%qE with %<link%> clause used in %<routine%> function",
7758 else if (!lookup_attribute ("omp declare target",
7759 DECL_ATTRIBUTES (decl
)))
7762 "%qE requires a %<declare%> directive for use "
7763 "in a %<routine%> function", DECL_NAME (decl
));
7769 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7770 if ((ctx
->region_type
& ORT_TARGET
) != 0)
7772 if (ctx
->region_type
& ORT_ACC
)
7773 /* For OpenACC, as remarked above, defer expansion. */
7778 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7781 unsigned nflags
= flags
;
7782 if ((ctx
->region_type
& ORT_ACC
) == 0)
7784 bool is_declare_target
= false;
7785 if (is_global_var (decl
)
7786 && varpool_node::get_create (decl
)->offloadable
)
7788 struct gimplify_omp_ctx
*octx
;
7789 for (octx
= ctx
->outer_context
;
7790 octx
; octx
= octx
->outer_context
)
7792 n
= splay_tree_lookup (octx
->variables
,
7793 (splay_tree_key
)decl
);
7795 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
7796 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7799 is_declare_target
= octx
== NULL
;
7801 if (!is_declare_target
)
7804 enum omp_clause_defaultmap_kind kind
;
7805 if (lang_hooks
.decls
.omp_allocatable_p (decl
))
7806 gdmk
= GDMK_ALLOCATABLE
;
7807 else if (lang_hooks
.decls
.omp_scalar_target_p (decl
))
7808 gdmk
= GDMK_SCALAR_TARGET
;
7809 else if (lang_hooks
.decls
.omp_scalar_p (decl
, false))
7811 else if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
7812 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
7813 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
7815 gdmk
= GDMK_POINTER
;
7817 gdmk
= GDMK_AGGREGATE
;
7818 kind
= lang_hooks
.decls
.omp_predetermined_mapping (decl
);
7819 if (kind
!= OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
)
7821 if (kind
== OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
)
7822 nflags
|= GOVD_FIRSTPRIVATE
;
7823 else if (kind
== OMP_CLAUSE_DEFAULTMAP_TO
)
7824 nflags
|= GOVD_MAP
| GOVD_MAP_TO_ONLY
;
7828 else if (ctx
->defaultmap
[gdmk
] == 0)
7830 tree d
= lang_hooks
.decls
.omp_report_decl (decl
);
7831 error ("%qE not specified in enclosing %<target%>",
7833 inform (ctx
->location
, "enclosing %<target%>");
7835 else if (ctx
->defaultmap
[gdmk
]
7836 & (GOVD_MAP_0LEN_ARRAY
| GOVD_FIRSTPRIVATE
))
7837 nflags
|= ctx
->defaultmap
[gdmk
];
7840 gcc_assert (ctx
->defaultmap
[gdmk
] & GOVD_MAP
);
7841 nflags
|= ctx
->defaultmap
[gdmk
] & ~GOVD_MAP
;
7846 struct gimplify_omp_ctx
*octx
= ctx
->outer_context
;
7847 if ((ctx
->region_type
& ORT_ACC
) && octx
)
7849 /* Look in outer OpenACC contexts, to see if there's a
7850 data attribute for this variable. */
7851 omp_notice_variable (octx
, decl
, in_code
);
7853 for (; octx
; octx
= octx
->outer_context
)
7855 if (!(octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)))
7858 = splay_tree_lookup (octx
->variables
,
7859 (splay_tree_key
) decl
);
7862 if (octx
->region_type
== ORT_ACC_HOST_DATA
)
7863 error ("variable %qE declared in enclosing "
7864 "%<host_data%> region", DECL_NAME (decl
));
7866 if (octx
->region_type
== ORT_ACC_DATA
7867 && (n2
->value
& GOVD_MAP_0LEN_ARRAY
))
7868 nflags
|= GOVD_MAP_0LEN_ARRAY
;
7874 if ((nflags
& ~(GOVD_MAP_TO_ONLY
| GOVD_MAP_FROM_ONLY
7875 | GOVD_MAP_ALLOC_ONLY
)) == flags
)
7877 tree type
= TREE_TYPE (decl
);
7879 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
7880 && omp_privatize_by_reference (decl
))
7881 type
= TREE_TYPE (type
);
7882 if (!lang_hooks
.types
.omp_mappable_type (type
))
7884 error ("%qD referenced in target region does not have "
7885 "a mappable type", decl
);
7886 nflags
|= GOVD_MAP
| GOVD_EXPLICIT
;
7890 if ((ctx
->region_type
& ORT_ACC
) != 0)
7891 nflags
= oacc_default_clause (ctx
, decl
, flags
);
7897 omp_add_variable (ctx
, decl
, nflags
);
7901 /* If nothing changed, there's nothing left to do. */
7902 if ((n
->value
& flags
) == flags
)
7912 if (ctx
->region_type
== ORT_WORKSHARE
7913 || ctx
->region_type
== ORT_TASKGROUP
7914 || ctx
->region_type
== ORT_SIMD
7915 || ctx
->region_type
== ORT_ACC
7916 || (ctx
->region_type
& ORT_TARGET_DATA
) != 0)
7919 flags
= omp_default_clause (ctx
, decl
, in_code
, flags
);
7921 if ((flags
& GOVD_PRIVATE
)
7922 && lang_hooks
.decls
.omp_private_outer_ref (decl
))
7923 flags
|= GOVD_PRIVATE_OUTER_REF
;
7925 omp_add_variable (ctx
, decl
, flags
);
7927 shared
= (flags
& GOVD_SHARED
) != 0;
7928 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7932 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
7933 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
7934 if (ctx
->region_type
== ORT_SIMD
7935 && ctx
->in_for_exprs
7936 && ((n
->value
& (GOVD_PRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
))
7938 flags
&= ~GOVD_SEEN
;
7940 if ((n
->value
& (GOVD_SEEN
| GOVD_LOCAL
)) == 0
7941 && (flags
& (GOVD_SEEN
| GOVD_LOCAL
)) == GOVD_SEEN
7942 && DECL_SIZE (decl
))
7944 if (TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7947 tree t
= DECL_VALUE_EXPR (decl
);
7948 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
7949 t
= TREE_OPERAND (t
, 0);
7950 gcc_assert (DECL_P (t
));
7951 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7952 n2
->value
|= GOVD_SEEN
;
7954 else if (omp_privatize_by_reference (decl
)
7955 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)))
7956 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))))
7960 tree t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7961 gcc_assert (DECL_P (t
));
7962 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7964 omp_notice_variable (ctx
, t
, true);
7968 if (ctx
->region_type
& ORT_ACC
)
7969 /* For OpenACC, as remarked above, defer expansion. */
7972 shared
= ((flags
| n
->value
) & GOVD_SHARED
) != 0;
7973 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7975 /* If nothing changed, there's nothing left to do. */
7976 if ((n
->value
& flags
) == flags
)
7982 /* If the variable is private in the current context, then we don't
7983 need to propagate anything to an outer context. */
7984 if ((flags
& GOVD_PRIVATE
) && !(flags
& GOVD_PRIVATE_OUTER_REF
))
7986 if ((flags
& (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7987 == (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7989 if ((flags
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
7990 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7991 == (GOVD_LASTPRIVATE
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7993 if (ctx
->outer_context
7994 && omp_notice_variable (ctx
->outer_context
, decl
, in_code
))
7999 /* Verify that DECL is private within CTX. If there's specific information
8000 to the contrary in the innermost scope, generate an error. */
8003 omp_is_private (struct gimplify_omp_ctx
*ctx
, tree decl
, int simd
)
8007 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
8010 if (n
->value
& GOVD_SHARED
)
8012 if (ctx
== gimplify_omp_ctxp
)
8015 error ("iteration variable %qE is predetermined linear",
8018 error ("iteration variable %qE should be private",
8020 n
->value
= GOVD_PRIVATE
;
8026 else if ((n
->value
& GOVD_EXPLICIT
) != 0
8027 && (ctx
== gimplify_omp_ctxp
8028 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
8029 && gimplify_omp_ctxp
->outer_context
== ctx
)))
8031 if ((n
->value
& GOVD_FIRSTPRIVATE
) != 0)
8032 error ("iteration variable %qE should not be firstprivate",
8034 else if ((n
->value
& GOVD_REDUCTION
) != 0)
8035 error ("iteration variable %qE should not be reduction",
8037 else if (simd
!= 1 && (n
->value
& GOVD_LINEAR
) != 0)
8038 error ("iteration variable %qE should not be linear",
8041 return (ctx
== gimplify_omp_ctxp
8042 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
8043 && gimplify_omp_ctxp
->outer_context
== ctx
));
8046 if (ctx
->region_type
!= ORT_WORKSHARE
8047 && ctx
->region_type
!= ORT_TASKGROUP
8048 && ctx
->region_type
!= ORT_SIMD
8049 && ctx
->region_type
!= ORT_ACC
)
8051 else if (ctx
->outer_context
)
8052 return omp_is_private (ctx
->outer_context
, decl
, simd
);
8056 /* Return true if DECL is private within a parallel region
8057 that binds to the current construct's context or in parallel
8058 region's REDUCTION clause. */
8061 omp_check_private (struct gimplify_omp_ctx
*ctx
, tree decl
, bool copyprivate
)
8067 ctx
= ctx
->outer_context
;
8070 if (is_global_var (decl
))
8073 /* References might be private, but might be shared too,
8074 when checking for copyprivate, assume they might be
8075 private, otherwise assume they might be shared. */
8079 if (omp_privatize_by_reference (decl
))
8082 /* Treat C++ privatized non-static data members outside
8083 of the privatization the same. */
8084 if (omp_member_access_dummy_var (decl
))
8090 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8092 if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
8093 && (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
8095 if ((ctx
->region_type
& ORT_TARGET_DATA
) != 0
8097 || (n
->value
& GOVD_MAP
) == 0)
8104 if ((n
->value
& GOVD_LOCAL
) != 0
8105 && omp_member_access_dummy_var (decl
))
8107 return (n
->value
& GOVD_SHARED
) == 0;
8110 if (ctx
->region_type
== ORT_WORKSHARE
8111 || ctx
->region_type
== ORT_TASKGROUP
8112 || ctx
->region_type
== ORT_SIMD
8113 || ctx
->region_type
== ORT_ACC
)
8122 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8125 find_decl_expr (tree
*tp
, int *walk_subtrees
, void *data
)
8129 /* If this node has been visited, unmark it and keep looking. */
8130 if (TREE_CODE (t
) == DECL_EXPR
&& DECL_EXPR_DECL (t
) == (tree
) data
)
8133 if (IS_TYPE_OR_DECL_P (t
))
8139 /* Gimplify the affinity clause but effectively ignore it.
8142 if ((step > 1) ? var <= end : var > end)
8143 locatator_var_expr; */
8146 gimplify_omp_affinity (tree
*list_p
, gimple_seq
*pre_p
)
8148 tree last_iter
= NULL_TREE
;
8149 tree last_bind
= NULL_TREE
;
8150 tree label
= NULL_TREE
;
8151 tree
*last_body
= NULL
;
8152 for (tree c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8153 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_AFFINITY
)
8155 tree t
= OMP_CLAUSE_DECL (c
);
8156 if (TREE_CODE (t
) == TREE_LIST
8158 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8160 if (TREE_VALUE (t
) == null_pointer_node
)
8162 if (TREE_PURPOSE (t
) != last_iter
)
8166 append_to_statement_list (label
, last_body
);
8167 gimplify_and_add (last_bind
, pre_p
);
8168 last_bind
= NULL_TREE
;
8170 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8172 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
8173 is_gimple_val
, fb_rvalue
) == GS_ERROR
8174 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
8175 is_gimple_val
, fb_rvalue
) == GS_ERROR
8176 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
8177 is_gimple_val
, fb_rvalue
) == GS_ERROR
8178 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
8179 is_gimple_val
, fb_rvalue
)
8183 last_iter
= TREE_PURPOSE (t
);
8184 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
8185 last_bind
= build3 (BIND_EXPR
, void_type_node
, BLOCK_VARS (block
),
8187 last_body
= &BIND_EXPR_BODY (last_bind
);
8188 tree cond
= NULL_TREE
;
8189 location_t loc
= OMP_CLAUSE_LOCATION (c
);
8190 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8192 tree var
= TREE_VEC_ELT (it
, 0);
8193 tree begin
= TREE_VEC_ELT (it
, 1);
8194 tree end
= TREE_VEC_ELT (it
, 2);
8195 tree step
= TREE_VEC_ELT (it
, 3);
8196 loc
= DECL_SOURCE_LOCATION (var
);
8197 tree tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8199 append_to_statement_list_force (tem
, last_body
);
8201 tree cond1
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8202 step
, build_zero_cst (TREE_TYPE (step
)));
8203 tree cond2
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
,
8205 tree cond3
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8207 cond1
= fold_build3_loc (loc
, COND_EXPR
, boolean_type_node
,
8208 cond1
, cond2
, cond3
);
8210 cond
= fold_build2_loc (loc
, TRUTH_AND_EXPR
,
8211 boolean_type_node
, cond
, cond1
);
8215 tree cont_label
= create_artificial_label (loc
);
8216 label
= build1 (LABEL_EXPR
, void_type_node
, cont_label
);
8217 tree tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, cond
,
8219 build_and_jump (&cont_label
));
8220 append_to_statement_list_force (tem
, last_body
);
8222 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
8224 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
), 0),
8226 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
8228 if (error_operand_p (TREE_VALUE (t
)))
8230 append_to_statement_list_force (TREE_VALUE (t
), last_body
);
8231 TREE_VALUE (t
) = null_pointer_node
;
8237 append_to_statement_list (label
, last_body
);
8238 gimplify_and_add (last_bind
, pre_p
);
8239 last_bind
= NULL_TREE
;
8241 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8243 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8244 NULL
, is_gimple_val
, fb_rvalue
);
8245 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8247 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8249 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8250 is_gimple_lvalue
, fb_lvalue
) == GS_ERROR
)
8252 gimplify_and_add (OMP_CLAUSE_DECL (c
), pre_p
);
8257 append_to_statement_list (label
, last_body
);
8258 gimplify_and_add (last_bind
, pre_p
);
8263 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8264 lower all the depend clauses by populating corresponding depend
8265 array. Returns 0 if there are no such depend clauses, or
8266 2 if all depend clauses should be removed, 1 otherwise. */
8269 gimplify_omp_depend (tree
*list_p
, gimple_seq
*pre_p
)
8273 size_t n
[4] = { 0, 0, 0, 0 };
8275 tree counts
[4] = { NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
};
8276 tree last_iter
= NULL_TREE
, last_count
= NULL_TREE
;
8278 location_t first_loc
= UNKNOWN_LOCATION
;
8280 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8281 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8283 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8285 case OMP_CLAUSE_DEPEND_IN
:
8288 case OMP_CLAUSE_DEPEND_OUT
:
8289 case OMP_CLAUSE_DEPEND_INOUT
:
8292 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8295 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8298 case OMP_CLAUSE_DEPEND_SOURCE
:
8299 case OMP_CLAUSE_DEPEND_SINK
:
8304 tree t
= OMP_CLAUSE_DECL (c
);
8305 if (first_loc
== UNKNOWN_LOCATION
)
8306 first_loc
= OMP_CLAUSE_LOCATION (c
);
8307 if (TREE_CODE (t
) == TREE_LIST
8309 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8311 if (TREE_PURPOSE (t
) != last_iter
)
8313 tree tcnt
= size_one_node
;
8314 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8316 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
8317 is_gimple_val
, fb_rvalue
) == GS_ERROR
8318 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
8319 is_gimple_val
, fb_rvalue
) == GS_ERROR
8320 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
8321 is_gimple_val
, fb_rvalue
) == GS_ERROR
8322 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
8323 is_gimple_val
, fb_rvalue
)
8326 tree var
= TREE_VEC_ELT (it
, 0);
8327 tree begin
= TREE_VEC_ELT (it
, 1);
8328 tree end
= TREE_VEC_ELT (it
, 2);
8329 tree step
= TREE_VEC_ELT (it
, 3);
8330 tree orig_step
= TREE_VEC_ELT (it
, 4);
8331 tree type
= TREE_TYPE (var
);
8332 tree stype
= TREE_TYPE (step
);
8333 location_t loc
= DECL_SOURCE_LOCATION (var
);
8335 /* Compute count for this iterator as
8337 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8338 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8339 and compute product of those for the entire depend
8341 if (POINTER_TYPE_P (type
))
8342 endmbegin
= fold_build2_loc (loc
, POINTER_DIFF_EXPR
,
8345 endmbegin
= fold_build2_loc (loc
, MINUS_EXPR
, type
,
8347 tree stepm1
= fold_build2_loc (loc
, MINUS_EXPR
, stype
,
8349 build_int_cst (stype
, 1));
8350 tree stepp1
= fold_build2_loc (loc
, PLUS_EXPR
, stype
, step
,
8351 build_int_cst (stype
, 1));
8352 tree pos
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
8353 unshare_expr (endmbegin
),
8355 pos
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
8357 tree neg
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
8359 if (TYPE_UNSIGNED (stype
))
8361 neg
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, neg
);
8362 step
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, step
);
8364 neg
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
8367 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8370 pos
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, pos
,
8371 build_int_cst (stype
, 0));
8372 cond
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
,
8374 neg
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, neg
,
8375 build_int_cst (stype
, 0));
8376 tree osteptype
= TREE_TYPE (orig_step
);
8377 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8379 build_int_cst (osteptype
, 0));
8380 tree cnt
= fold_build3_loc (loc
, COND_EXPR
, stype
,
8382 cnt
= fold_convert_loc (loc
, sizetype
, cnt
);
8383 if (gimplify_expr (&cnt
, pre_p
, NULL
, is_gimple_val
,
8384 fb_rvalue
) == GS_ERROR
)
8386 tcnt
= size_binop_loc (loc
, MULT_EXPR
, tcnt
, cnt
);
8388 if (gimplify_expr (&tcnt
, pre_p
, NULL
, is_gimple_val
,
8389 fb_rvalue
) == GS_ERROR
)
8391 last_iter
= TREE_PURPOSE (t
);
8394 if (counts
[i
] == NULL_TREE
)
8395 counts
[i
] = last_count
;
8397 counts
[i
] = size_binop_loc (OMP_CLAUSE_LOCATION (c
),
8398 PLUS_EXPR
, counts
[i
], last_count
);
8403 for (i
= 0; i
< 4; i
++)
8409 tree total
= size_zero_node
;
8410 for (i
= 0; i
< 4; i
++)
8412 unused
[i
] = counts
[i
] == NULL_TREE
&& n
[i
] == 0;
8413 if (counts
[i
] == NULL_TREE
)
8414 counts
[i
] = size_zero_node
;
8416 counts
[i
] = size_binop (PLUS_EXPR
, counts
[i
], size_int (n
[i
]));
8417 if (gimplify_expr (&counts
[i
], pre_p
, NULL
, is_gimple_val
,
8418 fb_rvalue
) == GS_ERROR
)
8420 total
= size_binop (PLUS_EXPR
, total
, counts
[i
]);
8423 if (gimplify_expr (&total
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
8426 bool is_old
= unused
[1] && unused
[3];
8427 tree totalpx
= size_binop (PLUS_EXPR
, unshare_expr (total
),
8428 size_int (is_old
? 1 : 4));
8429 tree type
= build_array_type (ptr_type_node
, build_index_type (totalpx
));
8430 tree array
= create_tmp_var_raw (type
);
8431 TREE_ADDRESSABLE (array
) = 1;
8432 if (!poly_int_tree_p (totalpx
))
8434 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array
)))
8435 gimplify_type_sizes (TREE_TYPE (array
), pre_p
);
8436 if (gimplify_omp_ctxp
)
8438 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8440 && (ctx
->region_type
== ORT_WORKSHARE
8441 || ctx
->region_type
== ORT_TASKGROUP
8442 || ctx
->region_type
== ORT_SIMD
8443 || ctx
->region_type
== ORT_ACC
))
8444 ctx
= ctx
->outer_context
;
8446 omp_add_variable (ctx
, array
, GOVD_LOCAL
| GOVD_SEEN
);
8448 gimplify_vla_decl (array
, pre_p
);
8451 gimple_add_tmp_var (array
);
8452 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
8457 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8458 build_int_cst (ptr_type_node
, 0));
8459 gimplify_and_add (tem
, pre_p
);
8460 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
8463 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8464 fold_convert (ptr_type_node
, total
));
8465 gimplify_and_add (tem
, pre_p
);
8466 for (i
= 1; i
< (is_old
? 2 : 4); i
++)
8468 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (i
+ !is_old
),
8469 NULL_TREE
, NULL_TREE
);
8470 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, counts
[i
- 1]);
8471 gimplify_and_add (tem
, pre_p
);
8478 for (i
= 0; i
< 4; i
++)
8480 if (i
&& (i
>= j
|| unused
[i
- 1]))
8482 cnts
[i
] = cnts
[i
- 1];
8485 cnts
[i
] = create_tmp_var (sizetype
);
8487 g
= gimple_build_assign (cnts
[i
], size_int (is_old
? 2 : 5));
8492 t
= size_binop (PLUS_EXPR
, counts
[0], size_int (2));
8494 t
= size_binop (PLUS_EXPR
, cnts
[i
- 1], counts
[i
- 1]);
8495 if (gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
8498 g
= gimple_build_assign (cnts
[i
], t
);
8500 gimple_seq_add_stmt (pre_p
, g
);
8503 last_iter
= NULL_TREE
;
8504 tree last_bind
= NULL_TREE
;
8505 tree
*last_body
= NULL
;
8506 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8507 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8509 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8511 case OMP_CLAUSE_DEPEND_IN
:
8514 case OMP_CLAUSE_DEPEND_OUT
:
8515 case OMP_CLAUSE_DEPEND_INOUT
:
8518 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8521 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8524 case OMP_CLAUSE_DEPEND_SOURCE
:
8525 case OMP_CLAUSE_DEPEND_SINK
:
8530 tree t
= OMP_CLAUSE_DECL (c
);
8531 if (TREE_CODE (t
) == TREE_LIST
8533 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8535 if (TREE_PURPOSE (t
) != last_iter
)
8538 gimplify_and_add (last_bind
, pre_p
);
8539 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
8540 last_bind
= build3 (BIND_EXPR
, void_type_node
,
8541 BLOCK_VARS (block
), NULL
, block
);
8542 TREE_SIDE_EFFECTS (last_bind
) = 1;
8543 SET_EXPR_LOCATION (last_bind
, OMP_CLAUSE_LOCATION (c
));
8544 tree
*p
= &BIND_EXPR_BODY (last_bind
);
8545 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8547 tree var
= TREE_VEC_ELT (it
, 0);
8548 tree begin
= TREE_VEC_ELT (it
, 1);
8549 tree end
= TREE_VEC_ELT (it
, 2);
8550 tree step
= TREE_VEC_ELT (it
, 3);
8551 tree orig_step
= TREE_VEC_ELT (it
, 4);
8552 tree type
= TREE_TYPE (var
);
8553 location_t loc
= DECL_SOURCE_LOCATION (var
);
8561 if (orig_step > 0) {
8562 if (var < end) goto beg_label;
8564 if (var > end) goto beg_label;
8566 for each iterator, with inner iterators added to
8568 tree beg_label
= create_artificial_label (loc
);
8569 tree cond_label
= NULL_TREE
;
8570 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8572 append_to_statement_list_force (tem
, p
);
8573 tem
= build_and_jump (&cond_label
);
8574 append_to_statement_list_force (tem
, p
);
8575 tem
= build1 (LABEL_EXPR
, void_type_node
, beg_label
);
8576 append_to_statement_list (tem
, p
);
8577 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
8578 NULL_TREE
, NULL_TREE
);
8579 TREE_SIDE_EFFECTS (bind
) = 1;
8580 SET_EXPR_LOCATION (bind
, loc
);
8581 append_to_statement_list_force (bind
, p
);
8582 if (POINTER_TYPE_P (type
))
8583 tem
= build2_loc (loc
, POINTER_PLUS_EXPR
, type
,
8584 var
, fold_convert_loc (loc
, sizetype
,
8587 tem
= build2_loc (loc
, PLUS_EXPR
, type
, var
, step
);
8588 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8590 append_to_statement_list_force (tem
, p
);
8591 tem
= build1 (LABEL_EXPR
, void_type_node
, cond_label
);
8592 append_to_statement_list (tem
, p
);
8593 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8597 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8598 cond
, build_and_jump (&beg_label
),
8600 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8603 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8604 cond
, build_and_jump (&beg_label
),
8606 tree osteptype
= TREE_TYPE (orig_step
);
8607 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8609 build_int_cst (osteptype
, 0));
8610 tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8612 append_to_statement_list_force (tem
, p
);
8613 p
= &BIND_EXPR_BODY (bind
);
8617 last_iter
= TREE_PURPOSE (t
);
8618 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
8620 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
),
8622 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
8624 if (error_operand_p (TREE_VALUE (t
)))
8626 if (TREE_VALUE (t
) != null_pointer_node
)
8627 TREE_VALUE (t
) = build_fold_addr_expr (TREE_VALUE (t
));
8628 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8629 NULL_TREE
, NULL_TREE
);
8630 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8631 void_type_node
, r
, TREE_VALUE (t
));
8632 append_to_statement_list_force (tem
, last_body
);
8633 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8634 void_type_node
, cnts
[i
],
8635 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)));
8636 append_to_statement_list_force (tem
, last_body
);
8637 TREE_VALUE (t
) = null_pointer_node
;
8643 gimplify_and_add (last_bind
, pre_p
);
8644 last_bind
= NULL_TREE
;
8646 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8648 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8649 NULL
, is_gimple_val
, fb_rvalue
);
8650 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8652 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8654 if (OMP_CLAUSE_DECL (c
) != null_pointer_node
)
8655 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
8656 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8657 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8659 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8660 NULL_TREE
, NULL_TREE
);
8661 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, OMP_CLAUSE_DECL (c
));
8662 gimplify_and_add (tem
, pre_p
);
8663 g
= gimple_build_assign (cnts
[i
], size_binop (PLUS_EXPR
, cnts
[i
],
8665 gimple_seq_add_stmt (pre_p
, g
);
8669 gimplify_and_add (last_bind
, pre_p
);
8670 tree cond
= boolean_false_node
;
8674 cond
= build2_loc (first_loc
, NE_EXPR
, boolean_type_node
, cnts
[0],
8675 size_binop_loc (first_loc
, PLUS_EXPR
, counts
[0],
8678 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8679 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8681 size_binop_loc (first_loc
, PLUS_EXPR
,
8687 tree prev
= size_int (5);
8688 for (i
= 0; i
< 4; i
++)
8692 prev
= size_binop_loc (first_loc
, PLUS_EXPR
, counts
[i
], prev
);
8693 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8694 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8695 cnts
[i
], unshare_expr (prev
)));
8698 tem
= build3_loc (first_loc
, COND_EXPR
, void_type_node
, cond
,
8699 build_call_expr_loc (first_loc
,
8700 builtin_decl_explicit (BUILT_IN_TRAP
),
8702 gimplify_and_add (tem
, pre_p
);
8703 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
8704 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
8705 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
8706 OMP_CLAUSE_CHAIN (c
) = *list_p
;
8711 /* Insert a GOMP_MAP_ALLOC or GOMP_MAP_RELEASE node following a
8712 GOMP_MAP_STRUCT mapping. C is an always_pointer mapping. STRUCT_NODE is
8713 the struct node to insert the new mapping after (when the struct node is
8714 initially created). PREV_NODE is the first of two or three mappings for a
8715 pointer, and is either:
8716 - the node before C, when a pair of mappings is used, e.g. for a C/C++
8718 - not the node before C. This is true when we have a reference-to-pointer
8719 type (with a mapping for the reference and for the pointer), or for
8720 Fortran derived-type mappings with a GOMP_MAP_TO_PSET.
8721 If SCP is non-null, the new node is inserted before *SCP.
8722 if SCP is null, the new node is inserted before PREV_NODE.
8724 - PREV_NODE, if SCP is non-null.
8725 - The newly-created ALLOC or RELEASE node, if SCP is null.
8726 - The second newly-created ALLOC or RELEASE node, if we are mapping a
8727 reference to a pointer. */
8730 insert_struct_comp_map (enum tree_code code
, tree c
, tree struct_node
,
8731 tree prev_node
, tree
*scp
)
8733 enum gomp_map_kind mkind
8734 = (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
)
8735 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
8737 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
8738 tree cl
= scp
? prev_node
: c2
;
8739 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
8740 OMP_CLAUSE_DECL (c2
) = unshare_expr (OMP_CLAUSE_DECL (c
));
8741 OMP_CLAUSE_CHAIN (c2
) = scp
? *scp
: prev_node
;
8742 if (OMP_CLAUSE_CHAIN (prev_node
) != c
8743 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node
)) == OMP_CLAUSE_MAP
8744 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node
))
8745 == GOMP_MAP_TO_PSET
))
8746 OMP_CLAUSE_SIZE (c2
) = OMP_CLAUSE_SIZE (OMP_CLAUSE_CHAIN (prev_node
));
8748 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (ptr_type_node
);
8750 OMP_CLAUSE_CHAIN (struct_node
) = c2
;
8752 /* We might need to create an additional mapping if we have a reference to a
8753 pointer (in C++). Don't do this if we have something other than a
8754 GOMP_MAP_ALWAYS_POINTER though, i.e. a GOMP_MAP_TO_PSET. */
8755 if (OMP_CLAUSE_CHAIN (prev_node
) != c
8756 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node
)) == OMP_CLAUSE_MAP
8757 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node
))
8758 == GOMP_MAP_ALWAYS_POINTER
)
8759 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node
))
8760 == GOMP_MAP_ATTACH_DETACH
)))
8762 tree c4
= OMP_CLAUSE_CHAIN (prev_node
);
8763 tree c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
8764 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
8765 OMP_CLAUSE_DECL (c3
) = unshare_expr (OMP_CLAUSE_DECL (c4
));
8766 OMP_CLAUSE_SIZE (c3
) = TYPE_SIZE_UNIT (ptr_type_node
);
8767 OMP_CLAUSE_CHAIN (c3
) = prev_node
;
8769 OMP_CLAUSE_CHAIN (c2
) = c3
;
8780 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8781 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8782 If BASE_REF is non-NULL and the containing object is a reference, set
8783 *BASE_REF to that reference before dereferencing the object.
8784 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8785 has array type, else return NULL. */
8788 extract_base_bit_offset (tree base
, tree
*base_ref
, poly_int64
*bitposp
,
8789 poly_offset_int
*poffsetp
, tree
*offsetp
)
8792 poly_int64 bitsize
, bitpos
;
8794 int unsignedp
, reversep
, volatilep
= 0;
8795 poly_offset_int poffset
;
8799 *base_ref
= NULL_TREE
;
8801 while (TREE_CODE (base
) == ARRAY_REF
)
8802 base
= TREE_OPERAND (base
, 0);
8804 if (TREE_CODE (base
) == INDIRECT_REF
)
8805 base
= TREE_OPERAND (base
, 0);
8809 if (TREE_CODE (base
) == ARRAY_REF
)
8811 while (TREE_CODE (base
) == ARRAY_REF
)
8812 base
= TREE_OPERAND (base
, 0);
8813 if (TREE_CODE (base
) != COMPONENT_REF
8814 || TREE_CODE (TREE_TYPE (base
)) != ARRAY_TYPE
)
8817 else if (TREE_CODE (base
) == INDIRECT_REF
8818 && TREE_CODE (TREE_OPERAND (base
, 0)) == COMPONENT_REF
8819 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0)))
8821 base
= TREE_OPERAND (base
, 0);
8824 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
, &mode
,
8825 &unsignedp
, &reversep
, &volatilep
);
8827 tree orig_base
= base
;
8829 if ((TREE_CODE (base
) == INDIRECT_REF
8830 || (TREE_CODE (base
) == MEM_REF
8831 && integer_zerop (TREE_OPERAND (base
, 1))))
8832 && DECL_P (TREE_OPERAND (base
, 0))
8833 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0))) == REFERENCE_TYPE
)
8834 base
= TREE_OPERAND (base
, 0);
8836 if (offset
&& poly_int_tree_p (offset
))
8838 poffset
= wi::to_poly_offset (offset
);
8844 if (maybe_ne (bitpos
, 0))
8845 poffset
+= bits_to_bytes_round_down (bitpos
);
8848 *poffsetp
= poffset
;
8851 /* Set *BASE_REF if BASE was a dereferenced reference variable. */
8852 if (base_ref
&& orig_base
!= base
)
8853 *base_ref
= orig_base
;
8858 /* Returns true if EXPR is or contains (as a sub-component) BASE_PTR. */
8861 is_or_contains_p (tree expr
, tree base_ptr
)
8863 if ((TREE_CODE (expr
) == INDIRECT_REF
&& TREE_CODE (base_ptr
) == MEM_REF
)
8864 || (TREE_CODE (expr
) == MEM_REF
&& TREE_CODE (base_ptr
) == INDIRECT_REF
))
8865 return operand_equal_p (TREE_OPERAND (expr
, 0),
8866 TREE_OPERAND (base_ptr
, 0));
8867 while (!operand_equal_p (expr
, base_ptr
))
8869 if (TREE_CODE (base_ptr
) == COMPOUND_EXPR
)
8870 base_ptr
= TREE_OPERAND (base_ptr
, 1);
8871 if (TREE_CODE (base_ptr
) == COMPONENT_REF
8872 || TREE_CODE (base_ptr
) == POINTER_PLUS_EXPR
8873 || TREE_CODE (base_ptr
) == SAVE_EXPR
)
8874 base_ptr
= TREE_OPERAND (base_ptr
, 0);
8878 return operand_equal_p (expr
, base_ptr
);
8881 /* Implement OpenMP 5.x map ordering rules for target directives. There are
8882 several rules, and with some level of ambiguity, hopefully we can at least
8883 collect the complexity here in one place. */
8886 omp_target_reorder_clauses (tree
*list_p
)
8888 /* Collect refs to alloc/release/delete maps. */
8889 auto_vec
<tree
, 32> ard
;
8891 while (*cp
!= NULL_TREE
)
8892 if (OMP_CLAUSE_CODE (*cp
) == OMP_CLAUSE_MAP
8893 && (OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_ALLOC
8894 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_RELEASE
8895 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_DELETE
))
8897 /* Unlink cp and push to ard. */
8899 tree nc
= OMP_CLAUSE_CHAIN (c
);
8903 /* Any associated pointer type maps should also move along. */
8904 while (*cp
!= NULL_TREE
8905 && OMP_CLAUSE_CODE (*cp
) == OMP_CLAUSE_MAP
8906 && (OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
8907 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8908 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_ATTACH_DETACH
8909 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_POINTER
8910 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_ALWAYS_POINTER
8911 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_TO_PSET
))
8914 nc
= OMP_CLAUSE_CHAIN (c
);
8920 cp
= &OMP_CLAUSE_CHAIN (*cp
);
8922 /* Link alloc/release/delete maps to the end of list. */
8923 for (unsigned int i
= 0; i
< ard
.length (); i
++)
8926 cp
= &OMP_CLAUSE_CHAIN (ard
[i
]);
8930 /* OpenMP 5.0 requires that pointer variables are mapped before
8931 its use as a base-pointer. */
8932 auto_vec
<tree
*, 32> atf
;
8933 for (tree
*cp
= list_p
; *cp
; cp
= &OMP_CLAUSE_CHAIN (*cp
))
8934 if (OMP_CLAUSE_CODE (*cp
) == OMP_CLAUSE_MAP
)
8936 /* Collect alloc, to, from, to/from clause tree pointers. */
8937 gomp_map_kind k
= OMP_CLAUSE_MAP_KIND (*cp
);
8938 if (k
== GOMP_MAP_ALLOC
8940 || k
== GOMP_MAP_FROM
8941 || k
== GOMP_MAP_TOFROM
8942 || k
== GOMP_MAP_ALWAYS_TO
8943 || k
== GOMP_MAP_ALWAYS_FROM
8944 || k
== GOMP_MAP_ALWAYS_TOFROM
)
8948 for (unsigned int i
= 0; i
< atf
.length (); i
++)
8952 tree decl
= OMP_CLAUSE_DECL (*cp
);
8953 if (TREE_CODE (decl
) == INDIRECT_REF
|| TREE_CODE (decl
) == MEM_REF
)
8955 tree base_ptr
= TREE_OPERAND (decl
, 0);
8956 STRIP_TYPE_NOPS (base_ptr
);
8957 for (unsigned int j
= i
+ 1; j
< atf
.length (); j
++)
8961 tree decl2
= OMP_CLAUSE_DECL (*cp2
);
8963 decl2
= OMP_CLAUSE_DECL (*cp2
);
8964 if (is_or_contains_p (decl2
, base_ptr
))
8966 /* Move *cp2 to before *cp. */
8968 *cp2
= OMP_CLAUSE_CHAIN (c
);
8969 OMP_CLAUSE_CHAIN (c
) = *cp
;
8972 if (*cp2
!= NULL_TREE
8973 && OMP_CLAUSE_CODE (*cp2
) == OMP_CLAUSE_MAP
8974 && OMP_CLAUSE_MAP_KIND (*cp2
) == GOMP_MAP_ALWAYS_POINTER
)
8977 *cp2
= OMP_CLAUSE_CHAIN (c2
);
8978 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (c
);
8979 OMP_CLAUSE_CHAIN (c
) = c2
;
8988 /* For attach_detach map clauses, if there is another map that maps the
8989 attached/detached pointer, make sure that map is ordered before the
8992 for (tree
*cp
= list_p
; *cp
; cp
= &OMP_CLAUSE_CHAIN (*cp
))
8993 if (OMP_CLAUSE_CODE (*cp
) == OMP_CLAUSE_MAP
)
8995 /* Collect alloc, to, from, to/from clauses, and
8996 always_pointer/attach_detach clauses. */
8997 gomp_map_kind k
= OMP_CLAUSE_MAP_KIND (*cp
);
8998 if (k
== GOMP_MAP_ALLOC
9000 || k
== GOMP_MAP_FROM
9001 || k
== GOMP_MAP_TOFROM
9002 || k
== GOMP_MAP_ALWAYS_TO
9003 || k
== GOMP_MAP_ALWAYS_FROM
9004 || k
== GOMP_MAP_ALWAYS_TOFROM
9005 || k
== GOMP_MAP_ATTACH_DETACH
9006 || k
== GOMP_MAP_ALWAYS_POINTER
)
9010 for (unsigned int i
= 0; i
< atf
.length (); i
++)
9014 tree ptr
= OMP_CLAUSE_DECL (*cp
);
9015 STRIP_TYPE_NOPS (ptr
);
9016 if (OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_ATTACH_DETACH
)
9017 for (unsigned int j
= i
+ 1; j
< atf
.length (); j
++)
9020 tree decl2
= OMP_CLAUSE_DECL (*cp2
);
9021 if (OMP_CLAUSE_MAP_KIND (*cp2
) != GOMP_MAP_ATTACH_DETACH
9022 && OMP_CLAUSE_MAP_KIND (*cp2
) != GOMP_MAP_ALWAYS_POINTER
9023 && is_or_contains_p (decl2
, ptr
))
9025 /* Move *cp2 to before *cp. */
9027 *cp2
= OMP_CLAUSE_CHAIN (c
);
9028 OMP_CLAUSE_CHAIN (c
) = *cp
;
9032 /* If decl2 is of the form '*decl2_opnd0', and followed by an
9033 ALWAYS_POINTER or ATTACH_DETACH of 'decl2_opnd0', move the
9034 pointer operation along with *cp2. This can happen for C++
9035 reference sequences. */
9036 if (j
+ 1 < atf
.length ()
9037 && (TREE_CODE (decl2
) == INDIRECT_REF
9038 || TREE_CODE (decl2
) == MEM_REF
))
9040 tree
*cp3
= atf
[j
+ 1];
9041 tree decl3
= OMP_CLAUSE_DECL (*cp3
);
9042 tree decl2_opnd0
= TREE_OPERAND (decl2
, 0);
9043 if ((OMP_CLAUSE_MAP_KIND (*cp3
) == GOMP_MAP_ALWAYS_POINTER
9044 || OMP_CLAUSE_MAP_KIND (*cp3
) == GOMP_MAP_ATTACH_DETACH
)
9045 && operand_equal_p (decl3
, decl2_opnd0
))
9047 /* Also move *cp3 to before *cp. */
9049 *cp2
= OMP_CLAUSE_CHAIN (c
);
9050 OMP_CLAUSE_CHAIN (c
) = *cp
;
9061 /* DECL is supposed to have lastprivate semantics in the outer contexts
9062 of combined/composite constructs, starting with OCTX.
9063 Add needed lastprivate, shared or map clause if no data sharing or
9064 mapping clause are present. IMPLICIT_P is true if it is an implicit
9065 clause (IV on simd), in which case the lastprivate will not be
9066 copied to some constructs. */
9069 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx
*octx
,
9070 tree decl
, bool implicit_p
)
9072 struct gimplify_omp_ctx
*orig_octx
= octx
;
9073 for (; octx
; octx
= octx
->outer_context
)
9075 if ((octx
->region_type
== ORT_COMBINED_PARALLEL
9076 || (octx
->region_type
& ORT_COMBINED_TEAMS
) == ORT_COMBINED_TEAMS
)
9077 && splay_tree_lookup (octx
->variables
,
9078 (splay_tree_key
) decl
) == NULL
)
9080 omp_add_variable (octx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
9083 if ((octx
->region_type
& ORT_TASK
) != 0
9084 && octx
->combined_loop
9085 && splay_tree_lookup (octx
->variables
,
9086 (splay_tree_key
) decl
) == NULL
)
9088 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
9092 && octx
->region_type
== ORT_WORKSHARE
9093 && octx
->combined_loop
9094 && splay_tree_lookup (octx
->variables
,
9095 (splay_tree_key
) decl
) == NULL
9096 && octx
->outer_context
9097 && octx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
9098 && splay_tree_lookup (octx
->outer_context
->variables
,
9099 (splay_tree_key
) decl
) == NULL
)
9101 octx
= octx
->outer_context
;
9102 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
9105 if ((octx
->region_type
== ORT_WORKSHARE
|| octx
->region_type
== ORT_ACC
)
9106 && octx
->combined_loop
9107 && splay_tree_lookup (octx
->variables
,
9108 (splay_tree_key
) decl
) == NULL
9109 && !omp_check_private (octx
, decl
, false))
9111 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
9114 if (octx
->region_type
== ORT_COMBINED_TARGET
)
9116 splay_tree_node n
= splay_tree_lookup (octx
->variables
,
9117 (splay_tree_key
) decl
);
9120 omp_add_variable (octx
, decl
, GOVD_MAP
| GOVD_SEEN
);
9121 octx
= octx
->outer_context
;
9123 else if (!implicit_p
9124 && (n
->value
& GOVD_FIRSTPRIVATE_IMPLICIT
))
9126 n
->value
&= ~(GOVD_FIRSTPRIVATE
9127 | GOVD_FIRSTPRIVATE_IMPLICIT
9129 omp_add_variable (octx
, decl
, GOVD_MAP
| GOVD_SEEN
);
9130 octx
= octx
->outer_context
;
9135 if (octx
&& (implicit_p
|| octx
!= orig_octx
))
9136 omp_notice_variable (octx
, decl
, true);
9139 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
9140 and previous omp contexts. */
9143 gimplify_scan_omp_clauses (tree
*list_p
, gimple_seq
*pre_p
,
9144 enum omp_region_type region_type
,
9145 enum tree_code code
)
9147 struct gimplify_omp_ctx
*ctx
, *outer_ctx
;
9149 hash_map
<tree_operand_hash
, tree
> *struct_map_to_clause
= NULL
;
9150 hash_map
<tree_operand_hash
, tree
*> *struct_seen_clause
= NULL
;
9151 hash_set
<tree
> *struct_deref_set
= NULL
;
9152 tree
*prev_list_p
= NULL
, *orig_list_p
= list_p
;
9153 int handled_depend_iterators
= -1;
9156 ctx
= new_omp_context (region_type
);
9158 outer_ctx
= ctx
->outer_context
;
9159 if (code
== OMP_TARGET
)
9161 if (!lang_GNU_Fortran ())
9162 ctx
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
9163 ctx
->defaultmap
[GDMK_SCALAR
] = GOVD_FIRSTPRIVATE
;
9164 ctx
->defaultmap
[GDMK_SCALAR_TARGET
] = (lang_GNU_Fortran ()
9165 ? GOVD_MAP
: GOVD_FIRSTPRIVATE
);
9167 if (!lang_GNU_Fortran ())
9171 case OMP_TARGET_DATA
:
9172 case OMP_TARGET_ENTER_DATA
:
9173 case OMP_TARGET_EXIT_DATA
:
9175 case OACC_HOST_DATA
:
9178 ctx
->target_firstprivatize_array_bases
= true;
9183 if (code
== OMP_TARGET
9184 || code
== OMP_TARGET_DATA
9185 || code
== OMP_TARGET_ENTER_DATA
9186 || code
== OMP_TARGET_EXIT_DATA
)
9187 omp_target_reorder_clauses (list_p
);
9189 while ((c
= *list_p
) != NULL
)
9191 bool remove
= false;
9192 bool notice_outer
= true;
9193 const char *check_non_private
= NULL
;
9197 switch (OMP_CLAUSE_CODE (c
))
9199 case OMP_CLAUSE_PRIVATE
:
9200 flags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
9201 if (lang_hooks
.decls
.omp_private_outer_ref (OMP_CLAUSE_DECL (c
)))
9203 flags
|= GOVD_PRIVATE_OUTER_REF
;
9204 OMP_CLAUSE_PRIVATE_OUTER_REF (c
) = 1;
9207 notice_outer
= false;
9209 case OMP_CLAUSE_SHARED
:
9210 flags
= GOVD_SHARED
| GOVD_EXPLICIT
;
9212 case OMP_CLAUSE_FIRSTPRIVATE
:
9213 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
9214 check_non_private
= "firstprivate";
9215 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
9217 gcc_assert (code
== OMP_TARGET
);
9218 flags
|= GOVD_FIRSTPRIVATE_IMPLICIT
;
9221 case OMP_CLAUSE_LASTPRIVATE
:
9222 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
9225 case OMP_DISTRIBUTE
:
9226 error_at (OMP_CLAUSE_LOCATION (c
),
9227 "conditional %<lastprivate%> clause on "
9228 "%qs construct", "distribute");
9229 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
9232 error_at (OMP_CLAUSE_LOCATION (c
),
9233 "conditional %<lastprivate%> clause on "
9234 "%qs construct", "taskloop");
9235 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
9240 flags
= GOVD_LASTPRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
;
9241 if (code
!= OMP_LOOP
)
9242 check_non_private
= "lastprivate";
9243 decl
= OMP_CLAUSE_DECL (c
);
9244 if (error_operand_p (decl
))
9246 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
9247 && !lang_hooks
.decls
.omp_scalar_p (decl
, true))
9249 error_at (OMP_CLAUSE_LOCATION (c
),
9250 "non-scalar variable %qD in conditional "
9251 "%<lastprivate%> clause", decl
);
9252 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
9254 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
9255 flags
|= GOVD_LASTPRIVATE_CONDITIONAL
;
9256 omp_lastprivate_for_combined_outer_constructs (outer_ctx
, decl
,
9259 case OMP_CLAUSE_REDUCTION
:
9260 if (OMP_CLAUSE_REDUCTION_TASK (c
))
9262 if (region_type
== ORT_WORKSHARE
|| code
== OMP_SCOPE
)
9265 nowait
= omp_find_clause (*list_p
,
9266 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
9268 && (outer_ctx
== NULL
9269 || outer_ctx
->region_type
!= ORT_COMBINED_PARALLEL
))
9271 error_at (OMP_CLAUSE_LOCATION (c
),
9272 "%<task%> reduction modifier on a construct "
9273 "with a %<nowait%> clause");
9274 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
9277 else if ((region_type
& ORT_PARALLEL
) != ORT_PARALLEL
)
9279 error_at (OMP_CLAUSE_LOCATION (c
),
9280 "invalid %<task%> reduction modifier on construct "
9281 "other than %<parallel%>, %qs, %<sections%> or "
9282 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
9283 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
9286 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
9290 error_at (OMP_CLAUSE_LOCATION (c
),
9291 "%<inscan%> %<reduction%> clause on "
9292 "%qs construct", "sections");
9293 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
9296 error_at (OMP_CLAUSE_LOCATION (c
),
9297 "%<inscan%> %<reduction%> clause on "
9298 "%qs construct", "parallel");
9299 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
9302 error_at (OMP_CLAUSE_LOCATION (c
),
9303 "%<inscan%> %<reduction%> clause on "
9304 "%qs construct", "teams");
9305 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
9308 error_at (OMP_CLAUSE_LOCATION (c
),
9309 "%<inscan%> %<reduction%> clause on "
9310 "%qs construct", "taskloop");
9311 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
9314 error_at (OMP_CLAUSE_LOCATION (c
),
9315 "%<inscan%> %<reduction%> clause on "
9316 "%qs construct", "scope");
9317 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
9323 case OMP_CLAUSE_IN_REDUCTION
:
9324 case OMP_CLAUSE_TASK_REDUCTION
:
9325 flags
= GOVD_REDUCTION
| GOVD_SEEN
| GOVD_EXPLICIT
;
9326 /* OpenACC permits reductions on private variables. */
9327 if (!(region_type
& ORT_ACC
)
9328 /* taskgroup is actually not a worksharing region. */
9329 && code
!= OMP_TASKGROUP
)
9330 check_non_private
= omp_clause_code_name
[OMP_CLAUSE_CODE (c
)];
9331 decl
= OMP_CLAUSE_DECL (c
);
9332 if (TREE_CODE (decl
) == MEM_REF
)
9334 tree type
= TREE_TYPE (decl
);
9335 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
9336 gimplify_ctxp
->into_ssa
= false;
9337 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type
)), pre_p
,
9338 NULL
, is_gimple_val
, fb_rvalue
, false)
9341 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
9345 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
9346 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
9349 omp_firstprivatize_variable (ctx
, v
);
9350 omp_notice_variable (ctx
, v
, true);
9352 decl
= TREE_OPERAND (decl
, 0);
9353 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
9355 gimplify_ctxp
->into_ssa
= false;
9356 if (gimplify_expr (&TREE_OPERAND (decl
, 1), pre_p
,
9357 NULL
, is_gimple_val
, fb_rvalue
, false)
9360 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
9364 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
9365 v
= TREE_OPERAND (decl
, 1);
9368 omp_firstprivatize_variable (ctx
, v
);
9369 omp_notice_variable (ctx
, v
, true);
9371 decl
= TREE_OPERAND (decl
, 0);
9373 if (TREE_CODE (decl
) == ADDR_EXPR
9374 || TREE_CODE (decl
) == INDIRECT_REF
)
9375 decl
= TREE_OPERAND (decl
, 0);
9378 case OMP_CLAUSE_LINEAR
:
9379 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
), pre_p
, NULL
,
9380 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9387 if (code
== OMP_SIMD
9388 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
9390 struct gimplify_omp_ctx
*octx
= outer_ctx
;
9392 && octx
->region_type
== ORT_WORKSHARE
9393 && octx
->combined_loop
9394 && !octx
->distribute
)
9396 if (octx
->outer_context
9397 && (octx
->outer_context
->region_type
9398 == ORT_COMBINED_PARALLEL
))
9399 octx
= octx
->outer_context
->outer_context
;
9401 octx
= octx
->outer_context
;
9404 && octx
->region_type
== ORT_WORKSHARE
9405 && octx
->combined_loop
9406 && octx
->distribute
)
9408 error_at (OMP_CLAUSE_LOCATION (c
),
9409 "%<linear%> clause for variable other than "
9410 "loop iterator specified on construct "
9411 "combined with %<distribute%>");
9416 /* For combined #pragma omp parallel for simd, need to put
9417 lastprivate and perhaps firstprivate too on the
9418 parallel. Similarly for #pragma omp for simd. */
9419 struct gimplify_omp_ctx
*octx
= outer_ctx
;
9420 bool taskloop_seen
= false;
9424 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
9425 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
9427 decl
= OMP_CLAUSE_DECL (c
);
9428 if (error_operand_p (decl
))
9434 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
9435 flags
|= GOVD_FIRSTPRIVATE
;
9436 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
9437 flags
|= GOVD_LASTPRIVATE
;
9439 && octx
->region_type
== ORT_WORKSHARE
9440 && octx
->combined_loop
)
9442 if (octx
->outer_context
9443 && (octx
->outer_context
->region_type
9444 == ORT_COMBINED_PARALLEL
))
9445 octx
= octx
->outer_context
;
9446 else if (omp_check_private (octx
, decl
, false))
9450 && (octx
->region_type
& ORT_TASK
) != 0
9451 && octx
->combined_loop
)
9452 taskloop_seen
= true;
9454 && octx
->region_type
== ORT_COMBINED_PARALLEL
9455 && ((ctx
->region_type
== ORT_WORKSHARE
9456 && octx
== outer_ctx
)
9458 flags
= GOVD_SEEN
| GOVD_SHARED
;
9460 && ((octx
->region_type
& ORT_COMBINED_TEAMS
)
9461 == ORT_COMBINED_TEAMS
))
9462 flags
= GOVD_SEEN
| GOVD_SHARED
;
9464 && octx
->region_type
== ORT_COMBINED_TARGET
)
9466 if (flags
& GOVD_LASTPRIVATE
)
9467 flags
= GOVD_SEEN
| GOVD_MAP
;
9472 = splay_tree_lookup (octx
->variables
,
9473 (splay_tree_key
) decl
);
9474 if (on
&& (on
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
9479 omp_add_variable (octx
, decl
, flags
);
9480 if (octx
->outer_context
== NULL
)
9482 octx
= octx
->outer_context
;
9487 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
9488 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
9489 omp_notice_variable (octx
, decl
, true);
9491 flags
= GOVD_LINEAR
| GOVD_EXPLICIT
;
9492 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
9493 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
9495 notice_outer
= false;
9496 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
9500 case OMP_CLAUSE_MAP
:
9501 decl
= OMP_CLAUSE_DECL (c
);
9502 if (error_operand_p (decl
))
9509 if (TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
)
9512 case OMP_TARGET_DATA
:
9513 case OMP_TARGET_ENTER_DATA
:
9514 case OMP_TARGET_EXIT_DATA
:
9515 case OACC_ENTER_DATA
:
9516 case OACC_EXIT_DATA
:
9517 case OACC_HOST_DATA
:
9518 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9519 || (OMP_CLAUSE_MAP_KIND (c
)
9520 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
9521 /* For target {,enter ,exit }data only the array slice is
9522 mapped, but not the pointer to it. */
9528 /* For Fortran, not only the pointer to the data is mapped but also
9529 the address of the pointer, the array descriptor etc.; for
9530 'exit data' - and in particular for 'delete:' - having an 'alloc:'
9531 does not make sense. Likewise, for 'update' only transferring the
9532 data itself is needed as the rest has been handled in previous
9533 directives. However, for 'exit data', the array descriptor needs
9534 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE.
9536 NOTE: Generally, it is not safe to perform "enter data" operations
9537 on arrays where the data *or the descriptor* may go out of scope
9538 before a corresponding "exit data" operation -- and such a
9539 descriptor may be synthesized temporarily, e.g. to pass an
9540 explicit-shape array to a function expecting an assumed-shape
9541 argument. Performing "enter data" inside the called function
9542 would thus be problematic. */
9543 if (code
== OMP_TARGET_EXIT_DATA
9544 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
)
9545 OMP_CLAUSE_SET_MAP_KIND (c
, OMP_CLAUSE_MAP_KIND (*prev_list_p
)
9547 ? GOMP_MAP_DELETE
: GOMP_MAP_RELEASE
);
9548 else if ((code
== OMP_TARGET_EXIT_DATA
|| code
== OMP_TARGET_UPDATE
)
9549 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
9550 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
))
9555 if (DECL_P (decl
) && outer_ctx
&& (region_type
& ORT_ACC
))
9557 struct gimplify_omp_ctx
*octx
;
9558 for (octx
= outer_ctx
; octx
; octx
= octx
->outer_context
)
9560 if (octx
->region_type
!= ORT_ACC_HOST_DATA
)
9563 = splay_tree_lookup (octx
->variables
,
9564 (splay_tree_key
) decl
);
9566 error_at (OMP_CLAUSE_LOCATION (c
), "variable %qE "
9567 "declared in enclosing %<host_data%> region",
9571 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
9572 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
9573 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
9574 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
9575 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9580 else if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9581 || (OMP_CLAUSE_MAP_KIND (c
)
9582 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9583 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9584 && TREE_CODE (OMP_CLAUSE_SIZE (c
)) != INTEGER_CST
)
9587 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c
), pre_p
, NULL
,
9589 if ((region_type
& ORT_TARGET
) != 0)
9590 omp_add_variable (ctx
, OMP_CLAUSE_SIZE (c
),
9591 GOVD_FIRSTPRIVATE
| GOVD_SEEN
);
9594 if (TREE_CODE (decl
) == TARGET_EXPR
)
9596 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
9597 is_gimple_lvalue
, fb_lvalue
)
9601 else if (!DECL_P (decl
))
9604 if (TREE_CODE (d
) == ARRAY_REF
)
9606 while (TREE_CODE (d
) == ARRAY_REF
)
9607 d
= TREE_OPERAND (d
, 0);
9608 if (TREE_CODE (d
) == COMPONENT_REF
9609 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
9612 pd
= &OMP_CLAUSE_DECL (c
);
9614 && TREE_CODE (decl
) == INDIRECT_REF
9615 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
9616 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9618 && (OMP_CLAUSE_MAP_KIND (c
)
9619 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
))
9621 pd
= &TREE_OPERAND (decl
, 0);
9622 decl
= TREE_OPERAND (decl
, 0);
9624 bool indir_p
= false;
9625 bool component_ref_p
= false;
9626 tree indir_base
= NULL_TREE
;
9627 tree orig_decl
= decl
;
9628 tree decl_ref
= NULL_TREE
;
9629 if ((region_type
& (ORT_ACC
| ORT_TARGET
| ORT_TARGET_DATA
)) != 0
9630 && TREE_CODE (*pd
) == COMPONENT_REF
9631 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
9632 && code
!= OACC_UPDATE
)
9634 while (TREE_CODE (decl
) == COMPONENT_REF
)
9636 decl
= TREE_OPERAND (decl
, 0);
9637 component_ref_p
= true;
9638 if (((TREE_CODE (decl
) == MEM_REF
9639 && integer_zerop (TREE_OPERAND (decl
, 1)))
9640 || INDIRECT_REF_P (decl
))
9641 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9646 decl
= TREE_OPERAND (decl
, 0);
9649 if (TREE_CODE (decl
) == INDIRECT_REF
9650 && DECL_P (TREE_OPERAND (decl
, 0))
9651 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9655 decl
= TREE_OPERAND (decl
, 0);
9659 else if (TREE_CODE (decl
) == COMPONENT_REF
9660 && (OMP_CLAUSE_MAP_KIND (c
)
9661 != GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
))
9663 component_ref_p
= true;
9664 while (TREE_CODE (decl
) == COMPONENT_REF
)
9665 decl
= TREE_OPERAND (decl
, 0);
9666 if (TREE_CODE (decl
) == INDIRECT_REF
9667 && DECL_P (TREE_OPERAND (decl
, 0))
9668 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9670 decl
= TREE_OPERAND (decl
, 0);
9672 if (decl
!= orig_decl
&& DECL_P (decl
) && indir_p
9673 && (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
9675 && TREE_CODE (TREE_TYPE (decl_ref
)) == POINTER_TYPE
)))
9678 = ((code
== OACC_EXIT_DATA
|| code
== OMP_TARGET_EXIT_DATA
)
9679 ? GOMP_MAP_DETACH
: GOMP_MAP_ATTACH
);
9680 /* We have a dereference of a struct member. Make this an
9681 attach/detach operation, and ensure the base pointer is
9682 mapped as a FIRSTPRIVATE_POINTER. */
9683 OMP_CLAUSE_SET_MAP_KIND (c
, k
);
9684 flags
= GOVD_MAP
| GOVD_SEEN
| GOVD_EXPLICIT
;
9685 tree next_clause
= OMP_CLAUSE_CHAIN (c
);
9686 if (k
== GOMP_MAP_ATTACH
9687 && code
!= OACC_ENTER_DATA
9688 && code
!= OMP_TARGET_ENTER_DATA
9690 || (OMP_CLAUSE_CODE (next_clause
) != OMP_CLAUSE_MAP
)
9691 || (OMP_CLAUSE_MAP_KIND (next_clause
)
9692 != GOMP_MAP_POINTER
)
9693 || OMP_CLAUSE_DECL (next_clause
) != decl
)
9694 && (!struct_deref_set
9695 || !struct_deref_set
->contains (decl
))
9696 && (!struct_map_to_clause
9697 || !struct_map_to_clause
->get (indir_base
)))
9699 if (!struct_deref_set
)
9700 struct_deref_set
= new hash_set
<tree
> ();
9701 /* As well as the attach, we also need a
9702 FIRSTPRIVATE_POINTER clause to properly map the
9703 pointer to the struct base. */
9704 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9706 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_ALLOC
);
9707 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c2
)
9710 = build_int_cst (build_pointer_type (char_type_node
),
9712 OMP_CLAUSE_DECL (c2
)
9713 = build2 (MEM_REF
, char_type_node
,
9714 decl_ref
? decl_ref
: decl
, charptr_zero
);
9715 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
9716 tree c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9718 OMP_CLAUSE_SET_MAP_KIND (c3
,
9719 GOMP_MAP_FIRSTPRIVATE_POINTER
);
9720 OMP_CLAUSE_DECL (c3
) = decl
;
9721 OMP_CLAUSE_SIZE (c3
) = size_zero_node
;
9722 tree mapgrp
= *prev_list_p
;
9724 OMP_CLAUSE_CHAIN (c3
) = mapgrp
;
9725 OMP_CLAUSE_CHAIN (c2
) = c3
;
9727 struct_deref_set
->add (decl
);
9731 /* An "attach/detach" operation on an update directive should
9732 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
9733 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
9734 depends on the previous mapping. */
9735 if (code
== OACC_UPDATE
9736 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9737 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_ALWAYS_POINTER
);
9740 && (INDIRECT_REF_P (decl
)
9741 || TREE_CODE (decl
) == MEM_REF
9742 || TREE_CODE (decl
) == ARRAY_REF
)))
9743 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
9744 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
9745 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
9746 && code
!= OACC_UPDATE
9747 && code
!= OMP_TARGET_UPDATE
)
9749 if (error_operand_p (decl
))
9755 tree stype
= TREE_TYPE (decl
);
9756 if (TREE_CODE (stype
) == REFERENCE_TYPE
)
9757 stype
= TREE_TYPE (stype
);
9758 if (TYPE_SIZE_UNIT (stype
) == NULL
9759 || TREE_CODE (TYPE_SIZE_UNIT (stype
)) != INTEGER_CST
)
9761 error_at (OMP_CLAUSE_LOCATION (c
),
9762 "mapping field %qE of variable length "
9763 "structure", OMP_CLAUSE_DECL (c
));
9768 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
9769 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9771 /* Error recovery. */
9772 if (prev_list_p
== NULL
)
9778 /* The below prev_list_p based error recovery code is
9779 currently no longer valid for OpenMP. */
9780 if (code
!= OMP_TARGET
9781 && code
!= OMP_TARGET_DATA
9782 && code
!= OMP_TARGET_UPDATE
9783 && code
!= OMP_TARGET_ENTER_DATA
9784 && code
!= OMP_TARGET_EXIT_DATA
9785 && OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
9787 tree ch
= OMP_CLAUSE_CHAIN (*prev_list_p
);
9788 if (ch
== NULL_TREE
|| OMP_CLAUSE_CHAIN (ch
) != c
)
9796 poly_offset_int offset1
;
9802 = extract_base_bit_offset (OMP_CLAUSE_DECL (c
), &base_ref
,
9806 bool do_map_struct
= (base
== decl
&& !tree_offset1
);
9810 ? splay_tree_lookup (ctx
->variables
,
9811 (splay_tree_key
) decl
)
9813 bool ptr
= (OMP_CLAUSE_MAP_KIND (c
)
9814 == GOMP_MAP_ALWAYS_POINTER
);
9815 bool attach_detach
= (OMP_CLAUSE_MAP_KIND (c
)
9816 == GOMP_MAP_ATTACH_DETACH
);
9817 bool attach
= OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
9818 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
;
9819 bool has_attachments
= false;
9820 /* For OpenACC, pointers in structs should trigger an
9823 && ((region_type
& (ORT_ACC
| ORT_TARGET
| ORT_TARGET_DATA
))
9824 || code
== OMP_TARGET_ENTER_DATA
9825 || code
== OMP_TARGET_EXIT_DATA
))
9828 /* Turn a GOMP_MAP_ATTACH_DETACH clause into a
9829 GOMP_MAP_ATTACH or GOMP_MAP_DETACH clause after we
9830 have detected a case that needs a GOMP_MAP_STRUCT
9833 = ((code
== OACC_EXIT_DATA
|| code
== OMP_TARGET_EXIT_DATA
)
9834 ? GOMP_MAP_DETACH
: GOMP_MAP_ATTACH
);
9835 OMP_CLAUSE_SET_MAP_KIND (c
, k
);
9836 has_attachments
= true;
9839 /* We currently don't handle non-constant offset accesses wrt to
9840 GOMP_MAP_STRUCT elements. */
9842 goto skip_map_struct
;
9844 /* Nor for attach_detach for OpenMP. */
9845 if ((code
== OMP_TARGET
9846 || code
== OMP_TARGET_DATA
9847 || code
== OMP_TARGET_UPDATE
9848 || code
== OMP_TARGET_ENTER_DATA
9849 || code
== OMP_TARGET_EXIT_DATA
)
9854 if (struct_seen_clause
== NULL
)
9856 = new hash_map
<tree_operand_hash
, tree
*>;
9857 if (!struct_seen_clause
->get (decl
))
9858 struct_seen_clause
->put (decl
, list_p
);
9861 goto skip_map_struct
;
9865 && (n
== NULL
|| (n
->value
& GOVD_MAP
) == 0))
9867 && (!struct_map_to_clause
9868 || struct_map_to_clause
->get (decl
) == NULL
)))
9870 tree l
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9872 gomp_map_kind k
= attach
? GOMP_MAP_FORCE_PRESENT
9875 OMP_CLAUSE_SET_MAP_KIND (l
, k
);
9877 OMP_CLAUSE_DECL (l
) = unshare_expr (base_ref
);
9880 OMP_CLAUSE_DECL (l
) = unshare_expr (decl
);
9881 if (!DECL_P (OMP_CLAUSE_DECL (l
))
9882 && (gimplify_expr (&OMP_CLAUSE_DECL (l
),
9883 pre_p
, NULL
, is_gimple_lvalue
,
9894 : DECL_P (OMP_CLAUSE_DECL (l
))
9895 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l
))
9896 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l
))));
9897 if (struct_map_to_clause
== NULL
)
9898 struct_map_to_clause
9899 = new hash_map
<tree_operand_hash
, tree
>;
9900 struct_map_to_clause
->put (decl
, l
);
9901 if (ptr
|| attach_detach
)
9903 tree
**sc
= (struct_seen_clause
9904 ? struct_seen_clause
->get (decl
)
9906 tree
*insert_node_pos
= sc
? *sc
: prev_list_p
;
9908 insert_struct_comp_map (code
, c
, l
, *insert_node_pos
,
9910 *insert_node_pos
= l
;
9915 OMP_CLAUSE_CHAIN (l
) = c
;
9917 list_p
= &OMP_CLAUSE_CHAIN (l
);
9919 if (base_ref
&& code
== OMP_TARGET
)
9921 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9923 enum gomp_map_kind mkind
9924 = GOMP_MAP_FIRSTPRIVATE_REFERENCE
;
9925 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
9926 OMP_CLAUSE_DECL (c2
) = decl
;
9927 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
9928 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (l
);
9929 OMP_CLAUSE_CHAIN (l
) = c2
;
9931 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
9932 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
))
9936 if (has_attachments
)
9937 flags
|= GOVD_MAP_HAS_ATTACHMENTS
;
9939 /* If this is a *pointer-to-struct expression, make sure a
9940 firstprivate map of the base-pointer exists. */
9942 && ((TREE_CODE (decl
) == MEM_REF
9943 && integer_zerop (TREE_OPERAND (decl
, 1)))
9944 || INDIRECT_REF_P (decl
))
9945 && DECL_P (TREE_OPERAND (decl
, 0))
9946 && !splay_tree_lookup (ctx
->variables
,
9948 TREE_OPERAND (decl
, 0))))
9950 decl
= TREE_OPERAND (decl
, 0);
9951 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9953 enum gomp_map_kind mkind
9954 = GOMP_MAP_FIRSTPRIVATE_POINTER
;
9955 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
9956 OMP_CLAUSE_DECL (c2
) = decl
;
9957 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
9958 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (c
);
9959 OMP_CLAUSE_CHAIN (c
) = c2
;
9965 else if (struct_map_to_clause
)
9967 tree
*osc
= struct_map_to_clause
->get (decl
);
9968 tree
*sc
= NULL
, *scp
= NULL
;
9970 && (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
))
9973 n
->value
|= GOVD_SEEN
;
9974 sc
= &OMP_CLAUSE_CHAIN (*osc
);
9976 && (OMP_CLAUSE_MAP_KIND (*sc
)
9977 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
9978 sc
= &OMP_CLAUSE_CHAIN (*sc
);
9979 /* Here "prev_list_p" is the end of the inserted
9980 alloc/release nodes after the struct node, OSC. */
9981 for (; *sc
!= c
; sc
= &OMP_CLAUSE_CHAIN (*sc
))
9982 if ((ptr
|| attach_detach
) && sc
== prev_list_p
)
9984 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
9986 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
9988 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
9993 tree sc_decl
= OMP_CLAUSE_DECL (*sc
);
9994 poly_offset_int offsetn
;
9998 = extract_base_bit_offset (sc_decl
, NULL
,
10005 if ((region_type
& ORT_ACC
) != 0)
10007 /* This duplicate checking code is currently only
10008 enabled for OpenACC. */
10009 tree d1
= OMP_CLAUSE_DECL (*sc
);
10010 tree d2
= OMP_CLAUSE_DECL (c
);
10011 while (TREE_CODE (d1
) == ARRAY_REF
)
10012 d1
= TREE_OPERAND (d1
, 0);
10013 while (TREE_CODE (d2
) == ARRAY_REF
)
10014 d2
= TREE_OPERAND (d2
, 0);
10015 if (TREE_CODE (d1
) == INDIRECT_REF
)
10016 d1
= TREE_OPERAND (d1
, 0);
10017 if (TREE_CODE (d2
) == INDIRECT_REF
)
10018 d2
= TREE_OPERAND (d2
, 0);
10019 while (TREE_CODE (d1
) == COMPONENT_REF
)
10020 if (TREE_CODE (d2
) == COMPONENT_REF
10021 && TREE_OPERAND (d1
, 1)
10022 == TREE_OPERAND (d2
, 1))
10024 d1
= TREE_OPERAND (d1
, 0);
10025 d2
= TREE_OPERAND (d2
, 0);
10031 error_at (OMP_CLAUSE_LOCATION (c
),
10032 "%qE appears more than once in map "
10033 "clauses", OMP_CLAUSE_DECL (c
));
10038 if (maybe_lt (offset1
, offsetn
)
10039 || (known_eq (offset1
, offsetn
)
10040 && maybe_lt (bitpos1
, bitposn
)))
10042 if (ptr
|| attach_detach
)
10051 OMP_CLAUSE_SIZE (*osc
)
10052 = size_binop (PLUS_EXPR
, OMP_CLAUSE_SIZE (*osc
),
10054 if (ptr
|| attach_detach
)
10056 tree cl
= insert_struct_comp_map (code
, c
, NULL
,
10057 *prev_list_p
, scp
);
10058 if (sc
== prev_list_p
)
10061 prev_list_p
= NULL
;
10065 *prev_list_p
= OMP_CLAUSE_CHAIN (c
);
10066 list_p
= prev_list_p
;
10067 prev_list_p
= NULL
;
10068 OMP_CLAUSE_CHAIN (c
) = *sc
;
10075 if (gimplify_expr (pd
, pre_p
, NULL
, is_gimple_lvalue
,
10082 *list_p
= OMP_CLAUSE_CHAIN (c
);
10083 OMP_CLAUSE_CHAIN (c
) = *sc
;
10091 else if ((code
== OACC_ENTER_DATA
10092 || code
== OACC_EXIT_DATA
10093 || code
== OACC_DATA
10094 || code
== OACC_PARALLEL
10095 || code
== OACC_KERNELS
10096 || code
== OACC_SERIAL
10097 || code
== OMP_TARGET_ENTER_DATA
10098 || code
== OMP_TARGET_EXIT_DATA
)
10099 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
10101 gomp_map_kind k
= ((code
== OACC_EXIT_DATA
10102 || code
== OMP_TARGET_EXIT_DATA
)
10103 ? GOMP_MAP_DETACH
: GOMP_MAP_ATTACH
);
10104 OMP_CLAUSE_SET_MAP_KIND (c
, k
);
10107 if (code
== OMP_TARGET
&& OMP_CLAUSE_MAP_IN_REDUCTION (c
))
10109 /* Don't gimplify *pd fully at this point, as the base
10110 will need to be adjusted during omp lowering. */
10111 auto_vec
<tree
, 10> expr_stack
;
10113 while (handled_component_p (*p
)
10114 || TREE_CODE (*p
) == INDIRECT_REF
10115 || TREE_CODE (*p
) == ADDR_EXPR
10116 || TREE_CODE (*p
) == MEM_REF
10117 || TREE_CODE (*p
) == NON_LVALUE_EXPR
)
10119 expr_stack
.safe_push (*p
);
10120 p
= &TREE_OPERAND (*p
, 0);
10122 for (int i
= expr_stack
.length () - 1; i
>= 0; i
--)
10124 tree t
= expr_stack
[i
];
10125 if (TREE_CODE (t
) == ARRAY_REF
10126 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
10128 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
10130 tree low
= unshare_expr (array_ref_low_bound (t
));
10131 if (!is_gimple_min_invariant (low
))
10133 TREE_OPERAND (t
, 2) = low
;
10134 if (gimplify_expr (&TREE_OPERAND (t
, 2),
10137 fb_rvalue
) == GS_ERROR
)
10141 else if (gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
10142 NULL
, is_gimple_reg
,
10143 fb_rvalue
) == GS_ERROR
)
10145 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
10147 tree elmt_size
= array_ref_element_size (t
);
10148 if (!is_gimple_min_invariant (elmt_size
))
10150 elmt_size
= unshare_expr (elmt_size
);
10152 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
,
10155 = size_int (TYPE_ALIGN_UNIT (elmt_type
));
10157 = size_binop (EXACT_DIV_EXPR
, elmt_size
,
10159 TREE_OPERAND (t
, 3) = elmt_size
;
10160 if (gimplify_expr (&TREE_OPERAND (t
, 3),
10163 fb_rvalue
) == GS_ERROR
)
10167 else if (gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
10168 NULL
, is_gimple_reg
,
10169 fb_rvalue
) == GS_ERROR
)
10172 else if (TREE_CODE (t
) == COMPONENT_REF
)
10174 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
10176 tree offset
= component_ref_field_offset (t
);
10177 if (!is_gimple_min_invariant (offset
))
10179 offset
= unshare_expr (offset
);
10180 tree field
= TREE_OPERAND (t
, 1);
10182 = size_int (DECL_OFFSET_ALIGN (field
)
10184 offset
= size_binop (EXACT_DIV_EXPR
, offset
,
10186 TREE_OPERAND (t
, 2) = offset
;
10187 if (gimplify_expr (&TREE_OPERAND (t
, 2),
10190 fb_rvalue
) == GS_ERROR
)
10194 else if (gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
10195 NULL
, is_gimple_reg
,
10196 fb_rvalue
) == GS_ERROR
)
10200 for (; expr_stack
.length () > 0; )
10202 tree t
= expr_stack
.pop ();
10204 if (TREE_CODE (t
) == ARRAY_REF
10205 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
10207 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1))
10208 && gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
,
10209 NULL
, is_gimple_val
,
10210 fb_rvalue
) == GS_ERROR
)
10215 else if (gimplify_expr (pd
, pre_p
, NULL
, is_gimple_lvalue
,
10216 fb_lvalue
) == GS_ERROR
)
10222 /* If this was of the form map(*pointer_to_struct), then the
10223 'pointer_to_struct' DECL should be considered deref'ed. */
10224 if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALLOC
10225 || GOMP_MAP_COPY_TO_P (OMP_CLAUSE_MAP_KIND (c
))
10226 || GOMP_MAP_COPY_FROM_P (OMP_CLAUSE_MAP_KIND (c
)))
10227 && INDIRECT_REF_P (orig_decl
)
10228 && DECL_P (TREE_OPERAND (orig_decl
, 0))
10229 && TREE_CODE (TREE_TYPE (orig_decl
)) == RECORD_TYPE
)
10231 tree ptr
= TREE_OPERAND (orig_decl
, 0);
10232 if (!struct_deref_set
|| !struct_deref_set
->contains (ptr
))
10234 if (!struct_deref_set
)
10235 struct_deref_set
= new hash_set
<tree
> ();
10236 struct_deref_set
->add (ptr
);
10241 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_POINTER
10242 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH_DETACH
10243 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
10244 && OMP_CLAUSE_CHAIN (c
)
10245 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
)) == OMP_CLAUSE_MAP
10246 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
10247 == GOMP_MAP_ALWAYS_POINTER
)
10248 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
10249 == GOMP_MAP_ATTACH_DETACH
)
10250 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
10251 == GOMP_MAP_TO_PSET
)))
10252 prev_list_p
= list_p
;
10258 /* DECL_P (decl) == true */
10260 if (struct_map_to_clause
10261 && (sc
= struct_map_to_clause
->get (decl
)) != NULL
10262 && OMP_CLAUSE_MAP_KIND (*sc
) == GOMP_MAP_STRUCT
10263 && decl
== OMP_CLAUSE_DECL (*sc
))
10265 /* We have found a map of the whole structure after a
10266 leading GOMP_MAP_STRUCT has been created, so refill the
10267 leading clause into a map of the whole structure
10268 variable, and remove the current one.
10269 TODO: we should be able to remove some maps of the
10270 following structure element maps if they are of
10271 compatible TO/FROM/ALLOC type. */
10272 OMP_CLAUSE_SET_MAP_KIND (*sc
, OMP_CLAUSE_MAP_KIND (c
));
10273 OMP_CLAUSE_SIZE (*sc
) = unshare_expr (OMP_CLAUSE_SIZE (c
));
10278 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
10279 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TO
10280 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TOFROM
)
10281 flags
|= GOVD_MAP_ALWAYS_TO
;
10283 if ((code
== OMP_TARGET
10284 || code
== OMP_TARGET_DATA
10285 || code
== OMP_TARGET_ENTER_DATA
10286 || code
== OMP_TARGET_EXIT_DATA
)
10287 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
10289 for (struct gimplify_omp_ctx
*octx
= outer_ctx
; octx
;
10290 octx
= octx
->outer_context
)
10293 = splay_tree_lookup (octx
->variables
,
10294 (splay_tree_key
) OMP_CLAUSE_DECL (c
));
10295 /* If this is contained in an outer OpenMP region as a
10296 firstprivate value, remove the attach/detach. */
10297 if (n
&& (n
->value
& GOVD_FIRSTPRIVATE
))
10299 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
10304 enum gomp_map_kind map_kind
= (code
== OMP_TARGET_EXIT_DATA
10306 : GOMP_MAP_ATTACH
);
10307 OMP_CLAUSE_SET_MAP_KIND (c
, map_kind
);
10312 case OMP_CLAUSE_AFFINITY
:
10313 gimplify_omp_affinity (list_p
, pre_p
);
10316 case OMP_CLAUSE_DEPEND
:
10317 if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
10319 tree deps
= OMP_CLAUSE_DECL (c
);
10320 while (deps
&& TREE_CODE (deps
) == TREE_LIST
)
10322 if (TREE_CODE (TREE_PURPOSE (deps
)) == TRUNC_DIV_EXPR
10323 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps
), 1)))
10324 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps
), 1),
10325 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
10326 deps
= TREE_CHAIN (deps
);
10330 else if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
10332 if (handled_depend_iterators
== -1)
10333 handled_depend_iterators
= gimplify_omp_depend (list_p
, pre_p
);
10334 if (handled_depend_iterators
)
10336 if (handled_depend_iterators
== 2)
10340 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
10342 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
10343 NULL
, is_gimple_val
, fb_rvalue
);
10344 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
10346 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
10351 if (OMP_CLAUSE_DECL (c
) != null_pointer_node
)
10353 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
10354 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
10355 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
10361 if (code
== OMP_TASK
)
10362 ctx
->has_depend
= true;
10365 case OMP_CLAUSE_TO
:
10366 case OMP_CLAUSE_FROM
:
10367 case OMP_CLAUSE__CACHE_
:
10368 decl
= OMP_CLAUSE_DECL (c
);
10369 if (error_operand_p (decl
))
10374 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
10375 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
10376 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
10377 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
10378 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
10383 if (!DECL_P (decl
))
10385 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
,
10386 NULL
, is_gimple_lvalue
, fb_lvalue
)
10396 case OMP_CLAUSE_USE_DEVICE_PTR
:
10397 case OMP_CLAUSE_USE_DEVICE_ADDR
:
10398 flags
= GOVD_EXPLICIT
;
10401 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
10402 decl
= OMP_CLAUSE_DECL (c
);
10403 while (TREE_CODE (decl
) == INDIRECT_REF
10404 || TREE_CODE (decl
) == ARRAY_REF
)
10405 decl
= TREE_OPERAND (decl
, 0);
10406 flags
= GOVD_EXPLICIT
;
10409 case OMP_CLAUSE_IS_DEVICE_PTR
:
10410 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
10414 decl
= OMP_CLAUSE_DECL (c
);
10416 if (error_operand_p (decl
))
10421 if (DECL_NAME (decl
) == NULL_TREE
&& (flags
& GOVD_SHARED
) == 0)
10423 tree t
= omp_member_access_dummy_var (decl
);
10426 tree v
= DECL_VALUE_EXPR (decl
);
10427 DECL_NAME (decl
) = DECL_NAME (TREE_OPERAND (v
, 1));
10429 omp_notice_variable (outer_ctx
, t
, true);
10432 if (code
== OACC_DATA
10433 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
10434 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
10435 flags
|= GOVD_MAP_0LEN_ARRAY
;
10436 omp_add_variable (ctx
, decl
, flags
);
10437 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10438 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
10439 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
10440 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
10442 struct gimplify_omp_ctx
*pctx
10443 = code
== OMP_TARGET
? outer_ctx
: ctx
;
10445 omp_add_variable (pctx
, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
10446 GOVD_LOCAL
| GOVD_SEEN
);
10448 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
10449 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c
),
10451 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
10452 NULL
) == NULL_TREE
)
10453 omp_add_variable (pctx
,
10454 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
10455 GOVD_LOCAL
| GOVD_SEEN
);
10456 gimplify_omp_ctxp
= pctx
;
10457 push_gimplify_context ();
10459 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
10460 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
10462 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c
),
10463 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
));
10464 pop_gimplify_context
10465 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
)));
10466 push_gimplify_context ();
10467 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c
),
10468 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
10469 pop_gimplify_context
10470 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
)));
10471 OMP_CLAUSE_REDUCTION_INIT (c
) = NULL_TREE
;
10472 OMP_CLAUSE_REDUCTION_MERGE (c
) = NULL_TREE
;
10474 gimplify_omp_ctxp
= outer_ctx
;
10476 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
10477 && OMP_CLAUSE_LASTPRIVATE_STMT (c
))
10479 gimplify_omp_ctxp
= ctx
;
10480 push_gimplify_context ();
10481 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c
)) != BIND_EXPR
)
10483 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
10485 TREE_SIDE_EFFECTS (bind
) = 1;
10486 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LASTPRIVATE_STMT (c
);
10487 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = bind
;
10489 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c
),
10490 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
10491 pop_gimplify_context
10492 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
)));
10493 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = NULL_TREE
;
10495 gimplify_omp_ctxp
= outer_ctx
;
10497 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10498 && OMP_CLAUSE_LINEAR_STMT (c
))
10500 gimplify_omp_ctxp
= ctx
;
10501 push_gimplify_context ();
10502 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c
)) != BIND_EXPR
)
10504 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
10506 TREE_SIDE_EFFECTS (bind
) = 1;
10507 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LINEAR_STMT (c
);
10508 OMP_CLAUSE_LINEAR_STMT (c
) = bind
;
10510 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c
),
10511 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
10512 pop_gimplify_context
10513 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
)));
10514 OMP_CLAUSE_LINEAR_STMT (c
) = NULL_TREE
;
10516 gimplify_omp_ctxp
= outer_ctx
;
10522 case OMP_CLAUSE_COPYIN
:
10523 case OMP_CLAUSE_COPYPRIVATE
:
10524 decl
= OMP_CLAUSE_DECL (c
);
10525 if (error_operand_p (decl
))
10530 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_COPYPRIVATE
10532 && !omp_check_private (ctx
, decl
, true))
10535 if (is_global_var (decl
))
10537 if (DECL_THREAD_LOCAL_P (decl
))
10539 else if (DECL_HAS_VALUE_EXPR_P (decl
))
10541 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
10545 && DECL_THREAD_LOCAL_P (value
))
10550 error_at (OMP_CLAUSE_LOCATION (c
),
10551 "copyprivate variable %qE is not threadprivate"
10552 " or private in outer context", DECL_NAME (decl
));
10555 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10556 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
10557 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
10559 && ((region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
10560 || (region_type
== ORT_WORKSHARE
10561 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10562 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
10563 || code
== OMP_LOOP
)))
10564 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
10565 || (code
== OMP_LOOP
10566 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10567 && ((outer_ctx
->region_type
& ORT_COMBINED_TEAMS
)
10568 == ORT_COMBINED_TEAMS
))))
10571 = splay_tree_lookup (outer_ctx
->variables
,
10572 (splay_tree_key
)decl
);
10573 if (on
== NULL
|| (on
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
10575 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10576 && TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
10577 && (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
10578 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
10579 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
10580 == POINTER_TYPE
))))
10581 omp_firstprivatize_variable (outer_ctx
, decl
);
10584 omp_add_variable (outer_ctx
, decl
,
10585 GOVD_SEEN
| GOVD_SHARED
);
10586 if (outer_ctx
->outer_context
)
10587 omp_notice_variable (outer_ctx
->outer_context
, decl
,
10593 omp_notice_variable (outer_ctx
, decl
, true);
10594 if (check_non_private
10595 && (region_type
== ORT_WORKSHARE
|| code
== OMP_SCOPE
)
10596 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
10597 || decl
== OMP_CLAUSE_DECL (c
)
10598 || (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
10599 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
10601 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
10602 == POINTER_PLUS_EXPR
10603 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
10604 (OMP_CLAUSE_DECL (c
), 0), 0))
10606 && omp_check_private (ctx
, decl
, false))
10608 error ("%s variable %qE is private in outer context",
10609 check_non_private
, DECL_NAME (decl
));
10614 case OMP_CLAUSE_DETACH
:
10615 flags
= GOVD_FIRSTPRIVATE
| GOVD_SEEN
;
10618 case OMP_CLAUSE_IF
:
10619 if (OMP_CLAUSE_IF_MODIFIER (c
) != ERROR_MARK
10620 && OMP_CLAUSE_IF_MODIFIER (c
) != code
)
10623 for (int i
= 0; i
< 2; i
++)
10624 switch (i
? OMP_CLAUSE_IF_MODIFIER (c
) : code
)
10626 case VOID_CST
: p
[i
] = "cancel"; break;
10627 case OMP_PARALLEL
: p
[i
] = "parallel"; break;
10628 case OMP_SIMD
: p
[i
] = "simd"; break;
10629 case OMP_TASK
: p
[i
] = "task"; break;
10630 case OMP_TASKLOOP
: p
[i
] = "taskloop"; break;
10631 case OMP_TARGET_DATA
: p
[i
] = "target data"; break;
10632 case OMP_TARGET
: p
[i
] = "target"; break;
10633 case OMP_TARGET_UPDATE
: p
[i
] = "target update"; break;
10634 case OMP_TARGET_ENTER_DATA
:
10635 p
[i
] = "target enter data"; break;
10636 case OMP_TARGET_EXIT_DATA
: p
[i
] = "target exit data"; break;
10637 default: gcc_unreachable ();
10639 error_at (OMP_CLAUSE_LOCATION (c
),
10640 "expected %qs %<if%> clause modifier rather than %qs",
10644 /* Fall through. */
10646 case OMP_CLAUSE_FINAL
:
10647 OMP_CLAUSE_OPERAND (c
, 0)
10648 = gimple_boolify (OMP_CLAUSE_OPERAND (c
, 0));
10649 /* Fall through. */
10651 case OMP_CLAUSE_NUM_TEAMS
:
10652 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
10653 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
10654 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)))
10656 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)))
10661 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
10662 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
),
10663 pre_p
, NULL
, true);
10665 /* Fall through. */
10667 case OMP_CLAUSE_SCHEDULE
:
10668 case OMP_CLAUSE_NUM_THREADS
:
10669 case OMP_CLAUSE_THREAD_LIMIT
:
10670 case OMP_CLAUSE_DIST_SCHEDULE
:
10671 case OMP_CLAUSE_DEVICE
:
10672 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEVICE
10673 && OMP_CLAUSE_DEVICE_ANCESTOR (c
))
10675 if (code
!= OMP_TARGET
)
10677 error_at (OMP_CLAUSE_LOCATION (c
),
10678 "%<device%> clause with %<ancestor%> is only "
10679 "allowed on %<target%> construct");
10684 tree clauses
= *orig_list_p
;
10685 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
10686 if (OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_DEVICE
10687 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_FIRSTPRIVATE
10688 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_PRIVATE
10689 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_DEFAULTMAP
10690 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_MAP
10693 error_at (OMP_CLAUSE_LOCATION (c
),
10694 "with %<ancestor%>, only the %<device%>, "
10695 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
10696 "and %<map%> clauses may appear on the "
10702 /* Fall through. */
10704 case OMP_CLAUSE_PRIORITY
:
10705 case OMP_CLAUSE_GRAINSIZE
:
10706 case OMP_CLAUSE_NUM_TASKS
:
10707 case OMP_CLAUSE_FILTER
:
10708 case OMP_CLAUSE_HINT
:
10709 case OMP_CLAUSE_ASYNC
:
10710 case OMP_CLAUSE_WAIT
:
10711 case OMP_CLAUSE_NUM_GANGS
:
10712 case OMP_CLAUSE_NUM_WORKERS
:
10713 case OMP_CLAUSE_VECTOR_LENGTH
:
10714 case OMP_CLAUSE_WORKER
:
10715 case OMP_CLAUSE_VECTOR
:
10716 if (OMP_CLAUSE_OPERAND (c
, 0)
10717 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c
, 0)))
10719 if (error_operand_p (OMP_CLAUSE_OPERAND (c
, 0)))
10724 /* All these clauses care about value, not a particular decl,
10725 so try to force it into a SSA_NAME or fresh temporary. */
10726 OMP_CLAUSE_OPERAND (c
, 0)
10727 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c
, 0),
10728 pre_p
, NULL
, true);
10732 case OMP_CLAUSE_GANG
:
10733 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
10734 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
10736 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 1), pre_p
, NULL
,
10737 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
10741 case OMP_CLAUSE_NOWAIT
:
10745 case OMP_CLAUSE_ORDERED
:
10746 case OMP_CLAUSE_UNTIED
:
10747 case OMP_CLAUSE_COLLAPSE
:
10748 case OMP_CLAUSE_TILE
:
10749 case OMP_CLAUSE_AUTO
:
10750 case OMP_CLAUSE_SEQ
:
10751 case OMP_CLAUSE_INDEPENDENT
:
10752 case OMP_CLAUSE_MERGEABLE
:
10753 case OMP_CLAUSE_PROC_BIND
:
10754 case OMP_CLAUSE_SAFELEN
:
10755 case OMP_CLAUSE_SIMDLEN
:
10756 case OMP_CLAUSE_NOGROUP
:
10757 case OMP_CLAUSE_THREADS
:
10758 case OMP_CLAUSE_SIMD
:
10759 case OMP_CLAUSE_BIND
:
10760 case OMP_CLAUSE_IF_PRESENT
:
10761 case OMP_CLAUSE_FINALIZE
:
10764 case OMP_CLAUSE_ORDER
:
10765 ctx
->order_concurrent
= true;
10768 case OMP_CLAUSE_DEFAULTMAP
:
10769 enum gimplify_defaultmap_kind gdmkmin
, gdmkmax
;
10770 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c
))
10772 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
:
10773 gdmkmin
= GDMK_SCALAR
;
10774 gdmkmax
= GDMK_POINTER
;
10776 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
:
10777 gdmkmin
= GDMK_SCALAR
;
10778 gdmkmax
= GDMK_SCALAR_TARGET
;
10780 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE
:
10781 gdmkmin
= gdmkmax
= GDMK_AGGREGATE
;
10783 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE
:
10784 gdmkmin
= gdmkmax
= GDMK_ALLOCATABLE
;
10786 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER
:
10787 gdmkmin
= gdmkmax
= GDMK_POINTER
;
10790 gcc_unreachable ();
10792 for (int gdmk
= gdmkmin
; gdmk
<= gdmkmax
; gdmk
++)
10793 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c
))
10795 case OMP_CLAUSE_DEFAULTMAP_ALLOC
:
10796 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_ALLOC_ONLY
;
10798 case OMP_CLAUSE_DEFAULTMAP_TO
:
10799 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_TO_ONLY
;
10801 case OMP_CLAUSE_DEFAULTMAP_FROM
:
10802 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_FROM_ONLY
;
10804 case OMP_CLAUSE_DEFAULTMAP_TOFROM
:
10805 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
10807 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
:
10808 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
10810 case OMP_CLAUSE_DEFAULTMAP_NONE
:
10811 ctx
->defaultmap
[gdmk
] = 0;
10813 case OMP_CLAUSE_DEFAULTMAP_DEFAULT
:
10817 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
10819 case GDMK_SCALAR_TARGET
:
10820 ctx
->defaultmap
[gdmk
] = (lang_GNU_Fortran ()
10821 ? GOVD_MAP
: GOVD_FIRSTPRIVATE
);
10823 case GDMK_AGGREGATE
:
10824 case GDMK_ALLOCATABLE
:
10825 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
10828 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
10829 if (!lang_GNU_Fortran ())
10830 ctx
->defaultmap
[gdmk
] |= GOVD_MAP_0LEN_ARRAY
;
10833 gcc_unreachable ();
10837 gcc_unreachable ();
10841 case OMP_CLAUSE_ALIGNED
:
10842 decl
= OMP_CLAUSE_DECL (c
);
10843 if (error_operand_p (decl
))
10848 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c
), pre_p
, NULL
,
10849 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
10854 if (!is_global_var (decl
)
10855 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
10856 omp_add_variable (ctx
, decl
, GOVD_ALIGNED
);
10859 case OMP_CLAUSE_NONTEMPORAL
:
10860 decl
= OMP_CLAUSE_DECL (c
);
10861 if (error_operand_p (decl
))
10866 omp_add_variable (ctx
, decl
, GOVD_NONTEMPORAL
);
10869 case OMP_CLAUSE_ALLOCATE
:
10870 decl
= OMP_CLAUSE_DECL (c
);
10871 if (error_operand_p (decl
))
10876 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
), pre_p
, NULL
,
10877 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
10882 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
10883 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
10886 else if (code
== OMP_TASKLOOP
10887 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
10888 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
10889 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
10890 pre_p
, NULL
, false);
10893 case OMP_CLAUSE_DEFAULT
:
10894 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
10897 case OMP_CLAUSE_INCLUSIVE
:
10898 case OMP_CLAUSE_EXCLUSIVE
:
10899 decl
= OMP_CLAUSE_DECL (c
);
10901 splay_tree_node n
= splay_tree_lookup (outer_ctx
->variables
,
10902 (splay_tree_key
) decl
);
10903 if (n
== NULL
|| (n
->value
& GOVD_REDUCTION
) == 0)
10905 error_at (OMP_CLAUSE_LOCATION (c
),
10906 "%qD specified in %qs clause but not in %<inscan%> "
10907 "%<reduction%> clause on the containing construct",
10908 decl
, omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
10913 n
->value
|= GOVD_REDUCTION_INSCAN
;
10914 if (outer_ctx
->region_type
== ORT_SIMD
10915 && outer_ctx
->outer_context
10916 && outer_ctx
->outer_context
->region_type
== ORT_WORKSHARE
)
10918 n
= splay_tree_lookup (outer_ctx
->outer_context
->variables
,
10919 (splay_tree_key
) decl
);
10920 if (n
&& (n
->value
& GOVD_REDUCTION
) != 0)
10921 n
->value
|= GOVD_REDUCTION_INSCAN
;
10927 case OMP_CLAUSE_NOHOST
:
10929 gcc_unreachable ();
10932 if (code
== OACC_DATA
10933 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
10934 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
10935 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
10938 *list_p
= OMP_CLAUSE_CHAIN (c
);
10940 list_p
= &OMP_CLAUSE_CHAIN (c
);
10943 ctx
->clauses
= *orig_list_p
;
10944 gimplify_omp_ctxp
= ctx
;
10945 if (struct_seen_clause
)
10946 delete struct_seen_clause
;
10947 if (struct_map_to_clause
)
10948 delete struct_map_to_clause
;
10949 if (struct_deref_set
)
10950 delete struct_deref_set
;
10953 /* Return true if DECL is a candidate for shared to firstprivate
10954 optimization. We only consider non-addressable scalars, not
10955 too big, and not references. */
10958 omp_shared_to_firstprivate_optimizable_decl_p (tree decl
)
10960 if (TREE_ADDRESSABLE (decl
))
10962 tree type
= TREE_TYPE (decl
);
10963 if (!is_gimple_reg_type (type
)
10964 || TREE_CODE (type
) == REFERENCE_TYPE
10965 || TREE_ADDRESSABLE (type
))
10967 /* Don't optimize too large decls, as each thread/task will have
10969 HOST_WIDE_INT len
= int_size_in_bytes (type
);
10970 if (len
== -1 || len
> 4 * POINTER_SIZE
/ BITS_PER_UNIT
)
10972 if (omp_privatize_by_reference (decl
))
10977 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
10978 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
10979 GOVD_WRITTEN in outer contexts. */
10982 omp_mark_stores (struct gimplify_omp_ctx
*ctx
, tree decl
)
10984 for (; ctx
; ctx
= ctx
->outer_context
)
10986 splay_tree_node n
= splay_tree_lookup (ctx
->variables
,
10987 (splay_tree_key
) decl
);
10990 else if (n
->value
& GOVD_SHARED
)
10992 n
->value
|= GOVD_WRITTEN
;
10995 else if (n
->value
& GOVD_DATA_SHARE_CLASS
)
11000 /* Helper callback for walk_gimple_seq to discover possible stores
11001 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
11002 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
11006 omp_find_stores_op (tree
*tp
, int *walk_subtrees
, void *data
)
11008 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
11010 *walk_subtrees
= 0;
11017 if (handled_component_p (op
))
11018 op
= TREE_OPERAND (op
, 0);
11019 else if ((TREE_CODE (op
) == MEM_REF
|| TREE_CODE (op
) == TARGET_MEM_REF
)
11020 && TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
11021 op
= TREE_OPERAND (TREE_OPERAND (op
, 0), 0);
11026 if (!DECL_P (op
) || !omp_shared_to_firstprivate_optimizable_decl_p (op
))
11029 omp_mark_stores (gimplify_omp_ctxp
, op
);
11033 /* Helper callback for walk_gimple_seq to discover possible stores
11034 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
11035 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
11039 omp_find_stores_stmt (gimple_stmt_iterator
*gsi_p
,
11040 bool *handled_ops_p
,
11041 struct walk_stmt_info
*wi
)
11043 gimple
*stmt
= gsi_stmt (*gsi_p
);
11044 switch (gimple_code (stmt
))
11046 /* Don't recurse on OpenMP constructs for which
11047 gimplify_adjust_omp_clauses already handled the bodies,
11048 except handle gimple_omp_for_pre_body. */
11049 case GIMPLE_OMP_FOR
:
11050 *handled_ops_p
= true;
11051 if (gimple_omp_for_pre_body (stmt
))
11052 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
11053 omp_find_stores_stmt
, omp_find_stores_op
, wi
);
11055 case GIMPLE_OMP_PARALLEL
:
11056 case GIMPLE_OMP_TASK
:
11057 case GIMPLE_OMP_SECTIONS
:
11058 case GIMPLE_OMP_SINGLE
:
11059 case GIMPLE_OMP_SCOPE
:
11060 case GIMPLE_OMP_TARGET
:
11061 case GIMPLE_OMP_TEAMS
:
11062 case GIMPLE_OMP_CRITICAL
:
11063 *handled_ops_p
= true;
11071 struct gimplify_adjust_omp_clauses_data
11077 /* For all variables that were not actually used within the context,
11078 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
11081 gimplify_adjust_omp_clauses_1 (splay_tree_node n
, void *data
)
11083 tree
*list_p
= ((struct gimplify_adjust_omp_clauses_data
*) data
)->list_p
;
11085 = ((struct gimplify_adjust_omp_clauses_data
*) data
)->pre_p
;
11086 tree decl
= (tree
) n
->key
;
11087 unsigned flags
= n
->value
;
11088 enum omp_clause_code code
;
11090 bool private_debug
;
11092 if (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
11093 && (flags
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0)
11094 flags
= GOVD_SHARED
| GOVD_SEEN
| GOVD_WRITTEN
;
11095 if (flags
& (GOVD_EXPLICIT
| GOVD_LOCAL
))
11097 if ((flags
& GOVD_SEEN
) == 0)
11099 if ((flags
& GOVD_MAP_HAS_ATTACHMENTS
) != 0)
11101 if (flags
& GOVD_DEBUG_PRIVATE
)
11103 gcc_assert ((flags
& GOVD_DATA_SHARE_CLASS
) == GOVD_SHARED
);
11104 private_debug
= true;
11106 else if (flags
& GOVD_MAP
)
11107 private_debug
= false;
11110 = lang_hooks
.decls
.omp_private_debug_clause (decl
,
11111 !!(flags
& GOVD_SHARED
));
11113 code
= OMP_CLAUSE_PRIVATE
;
11114 else if (flags
& GOVD_MAP
)
11116 code
= OMP_CLAUSE_MAP
;
11117 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
11118 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
11120 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl
);
11124 && DECL_IN_CONSTANT_POOL (decl
)
11125 && !lookup_attribute ("omp declare target",
11126 DECL_ATTRIBUTES (decl
)))
11128 tree id
= get_identifier ("omp declare target");
11129 DECL_ATTRIBUTES (decl
)
11130 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
11131 varpool_node
*node
= varpool_node::get (decl
);
11134 node
->offloadable
= 1;
11135 if (ENABLE_OFFLOADING
)
11136 g
->have_offload
= true;
11140 else if (flags
& GOVD_SHARED
)
11142 if (is_global_var (decl
))
11144 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
11145 while (ctx
!= NULL
)
11148 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11149 if (on
&& (on
->value
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
11150 | GOVD_PRIVATE
| GOVD_REDUCTION
11151 | GOVD_LINEAR
| GOVD_MAP
)) != 0)
11153 ctx
= ctx
->outer_context
;
11158 code
= OMP_CLAUSE_SHARED
;
11159 /* Don't optimize shared into firstprivate for read-only vars
11160 on tasks with depend clause, we shouldn't try to copy them
11161 until the dependencies are satisfied. */
11162 if (gimplify_omp_ctxp
->has_depend
)
11163 flags
|= GOVD_WRITTEN
;
11165 else if (flags
& GOVD_PRIVATE
)
11166 code
= OMP_CLAUSE_PRIVATE
;
11167 else if (flags
& GOVD_FIRSTPRIVATE
)
11169 code
= OMP_CLAUSE_FIRSTPRIVATE
;
11170 if ((gimplify_omp_ctxp
->region_type
& ORT_TARGET
)
11171 && (gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
11172 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
11174 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
11175 "%<target%> construct", decl
);
11179 else if (flags
& GOVD_LASTPRIVATE
)
11180 code
= OMP_CLAUSE_LASTPRIVATE
;
11181 else if (flags
& (GOVD_ALIGNED
| GOVD_NONTEMPORAL
))
11183 else if (flags
& GOVD_CONDTEMP
)
11185 code
= OMP_CLAUSE__CONDTEMP_
;
11186 gimple_add_tmp_var (decl
);
11189 gcc_unreachable ();
11191 if (((flags
& GOVD_LASTPRIVATE
)
11192 || (code
== OMP_CLAUSE_SHARED
&& (flags
& GOVD_WRITTEN
)))
11193 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
11194 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
11196 tree chain
= *list_p
;
11197 clause
= build_omp_clause (input_location
, code
);
11198 OMP_CLAUSE_DECL (clause
) = decl
;
11199 OMP_CLAUSE_CHAIN (clause
) = chain
;
11201 OMP_CLAUSE_PRIVATE_DEBUG (clause
) = 1;
11202 else if (code
== OMP_CLAUSE_PRIVATE
&& (flags
& GOVD_PRIVATE_OUTER_REF
))
11203 OMP_CLAUSE_PRIVATE_OUTER_REF (clause
) = 1;
11204 else if (code
== OMP_CLAUSE_SHARED
11205 && (flags
& GOVD_WRITTEN
) == 0
11206 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
11207 OMP_CLAUSE_SHARED_READONLY (clause
) = 1;
11208 else if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_EXPLICIT
) == 0)
11209 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause
) = 1;
11210 else if (code
== OMP_CLAUSE_MAP
&& (flags
& GOVD_MAP_0LEN_ARRAY
) != 0)
11212 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
11213 OMP_CLAUSE_DECL (nc
) = decl
;
11214 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
11215 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
11216 OMP_CLAUSE_DECL (clause
)
11217 = build_simple_mem_ref_loc (input_location
, decl
);
11218 OMP_CLAUSE_DECL (clause
)
11219 = build2 (MEM_REF
, char_type_node
, OMP_CLAUSE_DECL (clause
),
11220 build_int_cst (build_pointer_type (char_type_node
), 0));
11221 OMP_CLAUSE_SIZE (clause
) = size_zero_node
;
11222 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
11223 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_ALLOC
);
11224 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause
) = 1;
11225 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
11226 OMP_CLAUSE_CHAIN (nc
) = chain
;
11227 OMP_CLAUSE_CHAIN (clause
) = nc
;
11228 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
11229 gimplify_omp_ctxp
= ctx
->outer_context
;
11230 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0),
11231 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
11232 gimplify_omp_ctxp
= ctx
;
11234 else if (code
== OMP_CLAUSE_MAP
)
11237 /* Not all combinations of these GOVD_MAP flags are actually valid. */
11238 switch (flags
& (GOVD_MAP_TO_ONLY
11240 | GOVD_MAP_FORCE_PRESENT
11241 | GOVD_MAP_ALLOC_ONLY
11242 | GOVD_MAP_FROM_ONLY
))
11245 kind
= GOMP_MAP_TOFROM
;
11247 case GOVD_MAP_FORCE
:
11248 kind
= GOMP_MAP_TOFROM
| GOMP_MAP_FLAG_FORCE
;
11250 case GOVD_MAP_TO_ONLY
:
11251 kind
= GOMP_MAP_TO
;
11253 case GOVD_MAP_FROM_ONLY
:
11254 kind
= GOMP_MAP_FROM
;
11256 case GOVD_MAP_ALLOC_ONLY
:
11257 kind
= GOMP_MAP_ALLOC
;
11259 case GOVD_MAP_TO_ONLY
| GOVD_MAP_FORCE
:
11260 kind
= GOMP_MAP_TO
| GOMP_MAP_FLAG_FORCE
;
11262 case GOVD_MAP_FORCE_PRESENT
:
11263 kind
= GOMP_MAP_FORCE_PRESENT
;
11266 gcc_unreachable ();
11268 OMP_CLAUSE_SET_MAP_KIND (clause
, kind
);
11269 /* Setting of the implicit flag for the runtime is currently disabled for
11271 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0)
11272 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause
) = 1;
11273 if (DECL_SIZE (decl
)
11274 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
11276 tree decl2
= DECL_VALUE_EXPR (decl
);
11277 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
11278 decl2
= TREE_OPERAND (decl2
, 0);
11279 gcc_assert (DECL_P (decl2
));
11280 tree mem
= build_simple_mem_ref (decl2
);
11281 OMP_CLAUSE_DECL (clause
) = mem
;
11282 OMP_CLAUSE_SIZE (clause
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
11283 if (gimplify_omp_ctxp
->outer_context
)
11285 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
11286 omp_notice_variable (ctx
, decl2
, true);
11287 omp_notice_variable (ctx
, OMP_CLAUSE_SIZE (clause
), true);
11289 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
11291 OMP_CLAUSE_DECL (nc
) = decl
;
11292 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
11293 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
)
11294 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
11296 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
11297 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
11298 OMP_CLAUSE_CHAIN (clause
) = nc
;
11300 else if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
11301 && omp_privatize_by_reference (decl
))
11303 OMP_CLAUSE_DECL (clause
) = build_simple_mem_ref (decl
);
11304 OMP_CLAUSE_SIZE (clause
)
11305 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))));
11306 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
11307 gimplify_omp_ctxp
= ctx
->outer_context
;
11308 gimplify_expr (&OMP_CLAUSE_SIZE (clause
),
11309 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
11310 gimplify_omp_ctxp
= ctx
;
11311 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
11313 OMP_CLAUSE_DECL (nc
) = decl
;
11314 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
11315 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_REFERENCE
);
11316 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
11317 OMP_CLAUSE_CHAIN (clause
) = nc
;
11320 OMP_CLAUSE_SIZE (clause
) = DECL_SIZE_UNIT (decl
);
11322 if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_LASTPRIVATE
) != 0)
11324 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
11325 OMP_CLAUSE_DECL (nc
) = decl
;
11326 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc
) = 1;
11327 OMP_CLAUSE_CHAIN (nc
) = chain
;
11328 OMP_CLAUSE_CHAIN (clause
) = nc
;
11329 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
11330 gimplify_omp_ctxp
= ctx
->outer_context
;
11331 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
11332 (ctx
->region_type
& ORT_ACC
) != 0);
11333 gimplify_omp_ctxp
= ctx
;
11336 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
11337 gimplify_omp_ctxp
= ctx
->outer_context
;
11338 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
11339 in simd. Those are only added for the local vars inside of simd body
11340 and they don't need to be e.g. default constructible. */
11341 if (code
!= OMP_CLAUSE_PRIVATE
|| ctx
->region_type
!= ORT_SIMD
)
11342 lang_hooks
.decls
.omp_finish_clause (clause
, pre_p
,
11343 (ctx
->region_type
& ORT_ACC
) != 0);
11344 if (gimplify_omp_ctxp
)
11345 for (; clause
!= chain
; clause
= OMP_CLAUSE_CHAIN (clause
))
11346 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
11347 && DECL_P (OMP_CLAUSE_SIZE (clause
)))
11348 omp_notice_variable (gimplify_omp_ctxp
, OMP_CLAUSE_SIZE (clause
),
11350 gimplify_omp_ctxp
= ctx
;
11355 gimplify_adjust_omp_clauses (gimple_seq
*pre_p
, gimple_seq body
, tree
*list_p
,
11356 enum tree_code code
)
11358 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
11359 tree
*orig_list_p
= list_p
;
11361 bool has_inscan_reductions
= false;
11365 struct gimplify_omp_ctx
*octx
;
11366 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
11367 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TASK
| ORT_TEAMS
)) != 0)
11371 struct walk_stmt_info wi
;
11372 memset (&wi
, 0, sizeof (wi
));
11373 walk_gimple_seq (body
, omp_find_stores_stmt
,
11374 omp_find_stores_op
, &wi
);
11378 if (ctx
->add_safelen1
)
11380 /* If there are VLAs in the body of simd loop, prevent
11382 gcc_assert (ctx
->region_type
== ORT_SIMD
);
11383 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
11384 OMP_CLAUSE_SAFELEN_EXPR (c
) = integer_one_node
;
11385 OMP_CLAUSE_CHAIN (c
) = *list_p
;
11387 list_p
= &OMP_CLAUSE_CHAIN (c
);
11390 if (ctx
->region_type
== ORT_WORKSHARE
11391 && ctx
->outer_context
11392 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
)
11394 for (c
= ctx
->outer_context
->clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11395 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
11396 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
11398 decl
= OMP_CLAUSE_DECL (c
);
11400 = splay_tree_lookup (ctx
->outer_context
->variables
,
11401 (splay_tree_key
) decl
);
11402 gcc_checking_assert (!splay_tree_lookup (ctx
->variables
,
11403 (splay_tree_key
) decl
));
11404 omp_add_variable (ctx
, decl
, n
->value
);
11405 tree c2
= copy_node (c
);
11406 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
11408 if ((n
->value
& GOVD_FIRSTPRIVATE
) == 0)
11410 c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
11411 OMP_CLAUSE_FIRSTPRIVATE
);
11412 OMP_CLAUSE_DECL (c2
) = decl
;
11413 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
11417 while ((c
= *list_p
) != NULL
)
11420 bool remove
= false;
11422 switch (OMP_CLAUSE_CODE (c
))
11424 case OMP_CLAUSE_FIRSTPRIVATE
:
11425 if ((ctx
->region_type
& ORT_TARGET
)
11426 && (ctx
->region_type
& ORT_ACC
) == 0
11427 && TYPE_ATOMIC (strip_array_types
11428 (TREE_TYPE (OMP_CLAUSE_DECL (c
)))))
11430 error_at (OMP_CLAUSE_LOCATION (c
),
11431 "%<_Atomic%> %qD in %<firstprivate%> clause on "
11432 "%<target%> construct", OMP_CLAUSE_DECL (c
));
11436 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
11438 decl
= OMP_CLAUSE_DECL (c
);
11439 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11440 if ((n
->value
& GOVD_MAP
) != 0)
11445 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c
) = 0;
11446 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
) = 0;
11449 case OMP_CLAUSE_PRIVATE
:
11450 case OMP_CLAUSE_SHARED
:
11451 case OMP_CLAUSE_LINEAR
:
11452 decl
= OMP_CLAUSE_DECL (c
);
11453 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11454 remove
= !(n
->value
& GOVD_SEEN
);
11455 if ((n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0
11456 && code
== OMP_PARALLEL
11457 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
11461 bool shared
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
;
11462 if ((n
->value
& GOVD_DEBUG_PRIVATE
)
11463 || lang_hooks
.decls
.omp_private_debug_clause (decl
, shared
))
11465 gcc_assert ((n
->value
& GOVD_DEBUG_PRIVATE
) == 0
11466 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
11468 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_PRIVATE
);
11469 OMP_CLAUSE_PRIVATE_DEBUG (c
) = 1;
11471 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
11474 n
->value
|= GOVD_WRITTEN
;
11475 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
11476 && (n
->value
& GOVD_WRITTEN
) == 0
11478 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
11479 OMP_CLAUSE_SHARED_READONLY (c
) = 1;
11480 else if (DECL_P (decl
)
11481 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
11482 && (n
->value
& GOVD_WRITTEN
) != 0)
11483 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11484 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
11485 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
11486 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
11489 n
->value
&= ~GOVD_EXPLICIT
;
11492 case OMP_CLAUSE_LASTPRIVATE
:
11493 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
11494 accurately reflect the presence of a FIRSTPRIVATE clause. */
11495 decl
= OMP_CLAUSE_DECL (c
);
11496 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11497 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
11498 = (n
->value
& GOVD_FIRSTPRIVATE
) != 0;
11499 if (code
== OMP_DISTRIBUTE
11500 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
11503 error_at (OMP_CLAUSE_LOCATION (c
),
11504 "same variable used in %<firstprivate%> and "
11505 "%<lastprivate%> clauses on %<distribute%> "
11509 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
11511 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
11512 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
11513 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) && code
== OMP_PARALLEL
)
11517 case OMP_CLAUSE_ALIGNED
:
11518 decl
= OMP_CLAUSE_DECL (c
);
11519 if (!is_global_var (decl
))
11521 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11522 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
11523 if (!remove
&& TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
11525 struct gimplify_omp_ctx
*octx
;
11527 && (n
->value
& (GOVD_DATA_SHARE_CLASS
11528 & ~GOVD_FIRSTPRIVATE
)))
11531 for (octx
= ctx
->outer_context
; octx
;
11532 octx
= octx
->outer_context
)
11534 n
= splay_tree_lookup (octx
->variables
,
11535 (splay_tree_key
) decl
);
11538 if (n
->value
& GOVD_LOCAL
)
11540 /* We have to avoid assigning a shared variable
11541 to itself when trying to add
11542 __builtin_assume_aligned. */
11543 if (n
->value
& GOVD_SHARED
)
11551 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
11553 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11554 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
11559 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
11560 decl
= OMP_CLAUSE_DECL (c
);
11561 while (TREE_CODE (decl
) == INDIRECT_REF
11562 || TREE_CODE (decl
) == ARRAY_REF
)
11563 decl
= TREE_OPERAND (decl
, 0);
11564 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11565 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
11568 case OMP_CLAUSE_IS_DEVICE_PTR
:
11569 case OMP_CLAUSE_NONTEMPORAL
:
11570 decl
= OMP_CLAUSE_DECL (c
);
11571 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11572 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
11575 case OMP_CLAUSE_MAP
:
11576 if (code
== OMP_TARGET_EXIT_DATA
11577 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
11582 decl
= OMP_CLAUSE_DECL (c
);
11583 /* Data clauses associated with reductions must be
11584 compatible with present_or_copy. Warn and adjust the clause
11585 if that is not the case. */
11586 if (ctx
->region_type
== ORT_ACC_PARALLEL
11587 || ctx
->region_type
== ORT_ACC_SERIAL
)
11589 tree t
= DECL_P (decl
) ? decl
: TREE_OPERAND (decl
, 0);
11593 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
11595 if (n
&& (n
->value
& GOVD_REDUCTION
))
11597 enum gomp_map_kind kind
= OMP_CLAUSE_MAP_KIND (c
);
11599 OMP_CLAUSE_MAP_IN_REDUCTION (c
) = 1;
11600 if ((kind
& GOMP_MAP_TOFROM
) != GOMP_MAP_TOFROM
11601 && kind
!= GOMP_MAP_FORCE_PRESENT
11602 && kind
!= GOMP_MAP_POINTER
)
11604 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
11605 "incompatible data clause with reduction "
11606 "on %qE; promoting to %<present_or_copy%>",
11608 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
11612 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
11613 && (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
))
11618 if (!DECL_P (decl
))
11620 if ((ctx
->region_type
& ORT_TARGET
) != 0
11621 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
11623 if (TREE_CODE (decl
) == INDIRECT_REF
11624 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
11625 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
11626 == REFERENCE_TYPE
))
11627 decl
= TREE_OPERAND (decl
, 0);
11628 if (TREE_CODE (decl
) == COMPONENT_REF
)
11630 while (TREE_CODE (decl
) == COMPONENT_REF
)
11631 decl
= TREE_OPERAND (decl
, 0);
11634 n
= splay_tree_lookup (ctx
->variables
,
11635 (splay_tree_key
) decl
);
11636 if (!(n
->value
& GOVD_SEEN
))
11643 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11644 if ((ctx
->region_type
& ORT_TARGET
) != 0
11645 && !(n
->value
& GOVD_SEEN
)
11646 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) == 0
11647 && (!is_global_var (decl
)
11648 || !lookup_attribute ("omp declare target link",
11649 DECL_ATTRIBUTES (decl
))))
11652 /* For struct element mapping, if struct is never referenced
11653 in target block and none of the mapping has always modifier,
11654 remove all the struct element mappings, which immediately
11655 follow the GOMP_MAP_STRUCT map clause. */
11656 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
11658 HOST_WIDE_INT cnt
= tree_to_shwi (OMP_CLAUSE_SIZE (c
));
11660 OMP_CLAUSE_CHAIN (c
)
11661 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c
));
11664 else if (DECL_SIZE (decl
)
11665 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
11666 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_POINTER
11667 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
11668 && (OMP_CLAUSE_MAP_KIND (c
)
11669 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
11671 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
11672 for these, TREE_CODE (DECL_SIZE (decl)) will always be
11674 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DEVICEPTR
);
11676 tree decl2
= DECL_VALUE_EXPR (decl
);
11677 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
11678 decl2
= TREE_OPERAND (decl2
, 0);
11679 gcc_assert (DECL_P (decl2
));
11680 tree mem
= build_simple_mem_ref (decl2
);
11681 OMP_CLAUSE_DECL (c
) = mem
;
11682 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
11683 if (ctx
->outer_context
)
11685 omp_notice_variable (ctx
->outer_context
, decl2
, true);
11686 omp_notice_variable (ctx
->outer_context
,
11687 OMP_CLAUSE_SIZE (c
), true);
11689 if (((ctx
->region_type
& ORT_TARGET
) != 0
11690 || !ctx
->target_firstprivatize_array_bases
)
11691 && ((n
->value
& GOVD_SEEN
) == 0
11692 || (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
)) == 0))
11694 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
11696 OMP_CLAUSE_DECL (nc
) = decl
;
11697 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
11698 if (ctx
->target_firstprivatize_array_bases
)
11699 OMP_CLAUSE_SET_MAP_KIND (nc
,
11700 GOMP_MAP_FIRSTPRIVATE_POINTER
);
11702 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
11703 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (c
);
11704 OMP_CLAUSE_CHAIN (c
) = nc
;
11710 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
11711 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
11712 gcc_assert ((n
->value
& GOVD_SEEN
) == 0
11713 || ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
11718 case OMP_CLAUSE_TO
:
11719 case OMP_CLAUSE_FROM
:
11720 case OMP_CLAUSE__CACHE_
:
11721 decl
= OMP_CLAUSE_DECL (c
);
11722 if (!DECL_P (decl
))
11724 if (DECL_SIZE (decl
)
11725 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
11727 tree decl2
= DECL_VALUE_EXPR (decl
);
11728 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
11729 decl2
= TREE_OPERAND (decl2
, 0);
11730 gcc_assert (DECL_P (decl2
));
11731 tree mem
= build_simple_mem_ref (decl2
);
11732 OMP_CLAUSE_DECL (c
) = mem
;
11733 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
11734 if (ctx
->outer_context
)
11736 omp_notice_variable (ctx
->outer_context
, decl2
, true);
11737 omp_notice_variable (ctx
->outer_context
,
11738 OMP_CLAUSE_SIZE (c
), true);
11741 else if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
11742 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
11745 case OMP_CLAUSE_REDUCTION
:
11746 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
11748 decl
= OMP_CLAUSE_DECL (c
);
11749 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11750 if ((n
->value
& GOVD_REDUCTION_INSCAN
) == 0)
11753 error_at (OMP_CLAUSE_LOCATION (c
),
11754 "%qD specified in %<inscan%> %<reduction%> clause "
11755 "but not in %<scan%> directive clause", decl
);
11758 has_inscan_reductions
= true;
11761 case OMP_CLAUSE_IN_REDUCTION
:
11762 case OMP_CLAUSE_TASK_REDUCTION
:
11763 decl
= OMP_CLAUSE_DECL (c
);
11764 /* OpenACC reductions need a present_or_copy data clause.
11765 Add one if necessary. Emit error when the reduction is private. */
11766 if (ctx
->region_type
== ORT_ACC_PARALLEL
11767 || ctx
->region_type
== ORT_ACC_SERIAL
)
11769 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11770 if (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
11773 error_at (OMP_CLAUSE_LOCATION (c
), "invalid private "
11774 "reduction on %qE", DECL_NAME (decl
));
11776 else if ((n
->value
& GOVD_MAP
) == 0)
11778 tree next
= OMP_CLAUSE_CHAIN (c
);
11779 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_MAP
);
11780 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_TOFROM
);
11781 OMP_CLAUSE_DECL (nc
) = decl
;
11782 OMP_CLAUSE_CHAIN (c
) = nc
;
11783 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
11788 OMP_CLAUSE_MAP_IN_REDUCTION (nc
) = 1;
11789 if (OMP_CLAUSE_CHAIN (nc
) == NULL
)
11791 nc
= OMP_CLAUSE_CHAIN (nc
);
11793 OMP_CLAUSE_CHAIN (nc
) = next
;
11794 n
->value
|= GOVD_MAP
;
11798 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
11799 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
11802 case OMP_CLAUSE_ALLOCATE
:
11803 decl
= OMP_CLAUSE_DECL (c
);
11804 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11805 if (n
!= NULL
&& !(n
->value
& GOVD_SEEN
))
11807 if ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
| GOVD_LINEAR
))
11809 && (n
->value
& (GOVD_REDUCTION
| GOVD_LASTPRIVATE
)) == 0)
11813 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
11814 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)) != INTEGER_CST
11815 && ((ctx
->region_type
& (ORT_PARALLEL
| ORT_TARGET
)) != 0
11816 || (ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASK
11817 || (ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
))
11819 tree allocator
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
11820 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) allocator
);
11823 enum omp_clause_default_kind default_kind
11824 = ctx
->default_kind
;
11825 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
11826 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
11828 ctx
->default_kind
= default_kind
;
11831 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
11836 case OMP_CLAUSE_COPYIN
:
11837 case OMP_CLAUSE_COPYPRIVATE
:
11838 case OMP_CLAUSE_IF
:
11839 case OMP_CLAUSE_NUM_THREADS
:
11840 case OMP_CLAUSE_NUM_TEAMS
:
11841 case OMP_CLAUSE_THREAD_LIMIT
:
11842 case OMP_CLAUSE_DIST_SCHEDULE
:
11843 case OMP_CLAUSE_DEVICE
:
11844 case OMP_CLAUSE_SCHEDULE
:
11845 case OMP_CLAUSE_NOWAIT
:
11846 case OMP_CLAUSE_ORDERED
:
11847 case OMP_CLAUSE_DEFAULT
:
11848 case OMP_CLAUSE_UNTIED
:
11849 case OMP_CLAUSE_COLLAPSE
:
11850 case OMP_CLAUSE_FINAL
:
11851 case OMP_CLAUSE_MERGEABLE
:
11852 case OMP_CLAUSE_PROC_BIND
:
11853 case OMP_CLAUSE_SAFELEN
:
11854 case OMP_CLAUSE_SIMDLEN
:
11855 case OMP_CLAUSE_DEPEND
:
11856 case OMP_CLAUSE_PRIORITY
:
11857 case OMP_CLAUSE_GRAINSIZE
:
11858 case OMP_CLAUSE_NUM_TASKS
:
11859 case OMP_CLAUSE_NOGROUP
:
11860 case OMP_CLAUSE_THREADS
:
11861 case OMP_CLAUSE_SIMD
:
11862 case OMP_CLAUSE_FILTER
:
11863 case OMP_CLAUSE_HINT
:
11864 case OMP_CLAUSE_DEFAULTMAP
:
11865 case OMP_CLAUSE_ORDER
:
11866 case OMP_CLAUSE_BIND
:
11867 case OMP_CLAUSE_DETACH
:
11868 case OMP_CLAUSE_USE_DEVICE_PTR
:
11869 case OMP_CLAUSE_USE_DEVICE_ADDR
:
11870 case OMP_CLAUSE_ASYNC
:
11871 case OMP_CLAUSE_WAIT
:
11872 case OMP_CLAUSE_INDEPENDENT
:
11873 case OMP_CLAUSE_NUM_GANGS
:
11874 case OMP_CLAUSE_NUM_WORKERS
:
11875 case OMP_CLAUSE_VECTOR_LENGTH
:
11876 case OMP_CLAUSE_GANG
:
11877 case OMP_CLAUSE_WORKER
:
11878 case OMP_CLAUSE_VECTOR
:
11879 case OMP_CLAUSE_AUTO
:
11880 case OMP_CLAUSE_SEQ
:
11881 case OMP_CLAUSE_TILE
:
11882 case OMP_CLAUSE_IF_PRESENT
:
11883 case OMP_CLAUSE_FINALIZE
:
11884 case OMP_CLAUSE_INCLUSIVE
:
11885 case OMP_CLAUSE_EXCLUSIVE
:
11888 case OMP_CLAUSE_NOHOST
:
11890 gcc_unreachable ();
11894 *list_p
= OMP_CLAUSE_CHAIN (c
);
11896 list_p
= &OMP_CLAUSE_CHAIN (c
);
11899 /* Add in any implicit data sharing. */
11900 struct gimplify_adjust_omp_clauses_data data
;
11901 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0)
11903 /* OpenMP. Implicit clauses are added at the start of the clause list,
11904 but after any non-map clauses. */
11905 tree
*implicit_add_list_p
= orig_list_p
;
11906 while (*implicit_add_list_p
11907 && OMP_CLAUSE_CODE (*implicit_add_list_p
) != OMP_CLAUSE_MAP
)
11908 implicit_add_list_p
= &OMP_CLAUSE_CHAIN (*implicit_add_list_p
);
11909 data
.list_p
= implicit_add_list_p
;
11913 data
.list_p
= list_p
;
11914 data
.pre_p
= pre_p
;
11915 splay_tree_foreach (ctx
->variables
, gimplify_adjust_omp_clauses_1
, &data
);
11917 if (has_inscan_reductions
)
11918 for (c
= *orig_list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11919 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11920 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
11922 error_at (OMP_CLAUSE_LOCATION (c
),
11923 "%<inscan%> %<reduction%> clause used together with "
11924 "%<linear%> clause for a variable other than loop "
11929 gimplify_omp_ctxp
= ctx
->outer_context
;
11930 delete_omp_context (ctx
);
11933 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
11934 -1 if unknown yet (simd is involved, won't be known until vectorization)
11935 and 1 if they do. If SCORES is non-NULL, it should point to an array
11936 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
11937 of the CONSTRUCTS (position -1 if it will never match) followed by
11938 number of constructs in the OpenMP context construct trait. If the
11939 score depends on whether it will be in a declare simd clone or not,
11940 the function returns 2 and there will be two sets of the scores, the first
11941 one for the case that it is not in a declare simd clone, the other
11942 that it is in a declare simd clone. */
11945 omp_construct_selector_matches (enum tree_code
*constructs
, int nconstructs
,
11948 int matched
= 0, cnt
= 0;
11949 bool simd_seen
= false;
11950 bool target_seen
= false;
11951 int declare_simd_cnt
= -1;
11952 auto_vec
<enum tree_code
, 16> codes
;
11953 for (struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
; ctx
;)
11955 if (((ctx
->region_type
& ORT_PARALLEL
) && ctx
->code
== OMP_PARALLEL
)
11956 || ((ctx
->region_type
& (ORT_TARGET
| ORT_IMPLICIT_TARGET
| ORT_ACC
))
11957 == ORT_TARGET
&& ctx
->code
== OMP_TARGET
)
11958 || ((ctx
->region_type
& ORT_TEAMS
) && ctx
->code
== OMP_TEAMS
)
11959 || (ctx
->region_type
== ORT_WORKSHARE
&& ctx
->code
== OMP_FOR
)
11960 || (ctx
->region_type
== ORT_SIMD
11961 && ctx
->code
== OMP_SIMD
11962 && !omp_find_clause (ctx
->clauses
, OMP_CLAUSE_BIND
)))
11966 codes
.safe_push (ctx
->code
);
11967 else if (matched
< nconstructs
&& ctx
->code
== constructs
[matched
])
11969 if (ctx
->code
== OMP_SIMD
)
11977 if (ctx
->code
== OMP_TARGET
)
11979 if (scores
== NULL
)
11980 return matched
< nconstructs
? 0 : simd_seen
? -1 : 1;
11981 target_seen
= true;
11985 else if (ctx
->region_type
== ORT_WORKSHARE
11986 && ctx
->code
== OMP_LOOP
11987 && ctx
->outer_context
11988 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
11989 && ctx
->outer_context
->outer_context
11990 && ctx
->outer_context
->outer_context
->code
== OMP_LOOP
11991 && ctx
->outer_context
->outer_context
->distribute
)
11992 ctx
= ctx
->outer_context
->outer_context
;
11993 ctx
= ctx
->outer_context
;
11996 && lookup_attribute ("omp declare simd",
11997 DECL_ATTRIBUTES (current_function_decl
)))
11999 /* Declare simd is a maybe case, it is supposed to be added only to the
12000 omp-simd-clone.cc added clones and not to the base function. */
12001 declare_simd_cnt
= cnt
++;
12003 codes
.safe_push (OMP_SIMD
);
12005 && constructs
[0] == OMP_SIMD
)
12007 gcc_assert (matched
== 0);
12009 if (++matched
== nconstructs
)
12013 if (tree attr
= lookup_attribute ("omp declare variant variant",
12014 DECL_ATTRIBUTES (current_function_decl
)))
12016 enum tree_code variant_constructs
[5];
12017 int variant_nconstructs
= 0;
12019 variant_nconstructs
12020 = omp_constructor_traits_to_codes (TREE_VALUE (attr
),
12021 variant_constructs
);
12022 for (int i
= 0; i
< variant_nconstructs
; i
++)
12026 codes
.safe_push (variant_constructs
[i
]);
12027 else if (matched
< nconstructs
12028 && variant_constructs
[i
] == constructs
[matched
])
12030 if (variant_constructs
[i
] == OMP_SIMD
)
12041 && lookup_attribute ("omp declare target block",
12042 DECL_ATTRIBUTES (current_function_decl
)))
12045 codes
.safe_push (OMP_TARGET
);
12046 else if (matched
< nconstructs
&& constructs
[matched
] == OMP_TARGET
)
12051 for (int pass
= 0; pass
< (declare_simd_cnt
== -1 ? 1 : 2); pass
++)
12053 int j
= codes
.length () - 1;
12054 for (int i
= nconstructs
- 1; i
>= 0; i
--)
12057 && (pass
!= 0 || declare_simd_cnt
!= j
)
12058 && constructs
[i
] != codes
[j
])
12060 if (pass
== 0 && declare_simd_cnt
!= -1 && j
> declare_simd_cnt
)
12065 *scores
++ = ((pass
== 0 && declare_simd_cnt
!= -1)
12066 ? codes
.length () - 1 : codes
.length ());
12068 return declare_simd_cnt
== -1 ? 1 : 2;
12070 if (matched
== nconstructs
)
12071 return simd_seen
? -1 : 1;
12075 /* Gimplify OACC_CACHE. */
12078 gimplify_oacc_cache (tree
*expr_p
, gimple_seq
*pre_p
)
12080 tree expr
= *expr_p
;
12082 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr
), pre_p
, ORT_ACC
,
12084 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OACC_CACHE_CLAUSES (expr
),
12087 /* TODO: Do something sensible with this information. */
12089 *expr_p
= NULL_TREE
;
12092 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
12093 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
12094 kind. The entry kind will replace the one in CLAUSE, while the exit
12095 kind will be used in a new omp_clause and returned to the caller. */
12098 gimplify_oacc_declare_1 (tree clause
)
12100 HOST_WIDE_INT kind
, new_op
;
12104 kind
= OMP_CLAUSE_MAP_KIND (clause
);
12108 case GOMP_MAP_ALLOC
:
12109 new_op
= GOMP_MAP_RELEASE
;
12113 case GOMP_MAP_FROM
:
12114 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
12115 new_op
= GOMP_MAP_FROM
;
12119 case GOMP_MAP_TOFROM
:
12120 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_TO
);
12121 new_op
= GOMP_MAP_FROM
;
12125 case GOMP_MAP_DEVICE_RESIDENT
:
12126 case GOMP_MAP_FORCE_DEVICEPTR
:
12127 case GOMP_MAP_FORCE_PRESENT
:
12128 case GOMP_MAP_LINK
:
12129 case GOMP_MAP_POINTER
:
12134 gcc_unreachable ();
12140 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
), OMP_CLAUSE_MAP
);
12141 OMP_CLAUSE_SET_MAP_KIND (c
, new_op
);
12142 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clause
);
12148 /* Gimplify OACC_DECLARE. */
12151 gimplify_oacc_declare (tree
*expr_p
, gimple_seq
*pre_p
)
12153 tree expr
= *expr_p
;
12155 tree clauses
, t
, decl
;
12157 clauses
= OACC_DECLARE_CLAUSES (expr
);
12159 gimplify_scan_omp_clauses (&clauses
, pre_p
, ORT_TARGET_DATA
, OACC_DECLARE
);
12160 gimplify_adjust_omp_clauses (pre_p
, NULL
, &clauses
, OACC_DECLARE
);
12162 for (t
= clauses
; t
; t
= OMP_CLAUSE_CHAIN (t
))
12164 decl
= OMP_CLAUSE_DECL (t
);
12166 if (TREE_CODE (decl
) == MEM_REF
)
12167 decl
= TREE_OPERAND (decl
, 0);
12169 if (VAR_P (decl
) && !is_oacc_declared (decl
))
12171 tree attr
= get_identifier ("oacc declare target");
12172 DECL_ATTRIBUTES (decl
) = tree_cons (attr
, NULL_TREE
,
12173 DECL_ATTRIBUTES (decl
));
12177 && !is_global_var (decl
)
12178 && DECL_CONTEXT (decl
) == current_function_decl
)
12180 tree c
= gimplify_oacc_declare_1 (t
);
12183 if (oacc_declare_returns
== NULL
)
12184 oacc_declare_returns
= new hash_map
<tree
, tree
>;
12186 oacc_declare_returns
->put (decl
, c
);
12190 if (gimplify_omp_ctxp
)
12191 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_SEEN
);
12194 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
12197 gimplify_seq_add_stmt (pre_p
, stmt
);
12199 *expr_p
= NULL_TREE
;
12202 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
12203 gimplification of the body, as well as scanning the body for used
12204 variables. We need to do this scan now, because variable-sized
12205 decls will be decomposed during gimplification. */
12208 gimplify_omp_parallel (tree
*expr_p
, gimple_seq
*pre_p
)
12210 tree expr
= *expr_p
;
12212 gimple_seq body
= NULL
;
12214 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr
), pre_p
,
12215 OMP_PARALLEL_COMBINED (expr
)
12216 ? ORT_COMBINED_PARALLEL
12217 : ORT_PARALLEL
, OMP_PARALLEL
);
12219 push_gimplify_context ();
12221 g
= gimplify_and_return_first (OMP_PARALLEL_BODY (expr
), &body
);
12222 if (gimple_code (g
) == GIMPLE_BIND
)
12223 pop_gimplify_context (g
);
12225 pop_gimplify_context (NULL
);
12227 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_PARALLEL_CLAUSES (expr
),
12230 g
= gimple_build_omp_parallel (body
,
12231 OMP_PARALLEL_CLAUSES (expr
),
12232 NULL_TREE
, NULL_TREE
);
12233 if (OMP_PARALLEL_COMBINED (expr
))
12234 gimple_omp_set_subcode (g
, GF_OMP_PARALLEL_COMBINED
);
12235 gimplify_seq_add_stmt (pre_p
, g
);
12236 *expr_p
= NULL_TREE
;
12239 /* Gimplify the contents of an OMP_TASK statement. This involves
12240 gimplification of the body, as well as scanning the body for used
12241 variables. We need to do this scan now, because variable-sized
12242 decls will be decomposed during gimplification. */
12245 gimplify_omp_task (tree
*expr_p
, gimple_seq
*pre_p
)
12247 tree expr
= *expr_p
;
12249 gimple_seq body
= NULL
;
12251 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
12252 for (tree c
= OMP_TASK_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12253 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
12254 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET
)
12256 error_at (OMP_CLAUSE_LOCATION (c
),
12257 "%<mutexinoutset%> kind in %<depend%> clause on a "
12258 "%<taskwait%> construct");
12262 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr
), pre_p
,
12263 omp_find_clause (OMP_TASK_CLAUSES (expr
),
12265 ? ORT_UNTIED_TASK
: ORT_TASK
, OMP_TASK
);
12267 if (OMP_TASK_BODY (expr
))
12269 push_gimplify_context ();
12271 g
= gimplify_and_return_first (OMP_TASK_BODY (expr
), &body
);
12272 if (gimple_code (g
) == GIMPLE_BIND
)
12273 pop_gimplify_context (g
);
12275 pop_gimplify_context (NULL
);
12278 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_TASK_CLAUSES (expr
),
12281 g
= gimple_build_omp_task (body
,
12282 OMP_TASK_CLAUSES (expr
),
12283 NULL_TREE
, NULL_TREE
,
12284 NULL_TREE
, NULL_TREE
, NULL_TREE
);
12285 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
12286 gimple_omp_task_set_taskwait_p (g
, true);
12287 gimplify_seq_add_stmt (pre_p
, g
);
12288 *expr_p
= NULL_TREE
;
12291 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
12292 force it into a temporary initialized in PRE_P and add firstprivate clause
12293 to ORIG_FOR_STMT. */
12296 gimplify_omp_taskloop_expr (tree type
, tree
*tp
, gimple_seq
*pre_p
,
12297 tree orig_for_stmt
)
12299 if (*tp
== NULL
|| is_gimple_constant (*tp
))
12302 *tp
= get_initialized_tmp_var (*tp
, pre_p
, NULL
, false);
12303 /* Reference to pointer conversion is considered useless,
12304 but is significant for firstprivate clause. Force it
12307 && TREE_CODE (type
) == POINTER_TYPE
12308 && TREE_CODE (TREE_TYPE (*tp
)) == REFERENCE_TYPE
)
12310 tree v
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
12311 tree m
= build2 (INIT_EXPR
, TREE_TYPE (v
), v
, *tp
);
12312 gimplify_and_add (m
, pre_p
);
12316 tree c
= build_omp_clause (input_location
, OMP_CLAUSE_FIRSTPRIVATE
);
12317 OMP_CLAUSE_DECL (c
) = *tp
;
12318 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
12319 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
12322 /* Gimplify the gross structure of an OMP_FOR statement. */
12324 static enum gimplify_status
12325 gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
12327 tree for_stmt
, orig_for_stmt
, inner_for_stmt
= NULL_TREE
, decl
, var
, t
;
12328 enum gimplify_status ret
= GS_ALL_DONE
;
12329 enum gimplify_status tret
;
12331 gimple_seq for_body
, for_pre_body
;
12333 bitmap has_decl_expr
= NULL
;
12334 enum omp_region_type ort
= ORT_WORKSHARE
;
12335 bool openacc
= TREE_CODE (*expr_p
) == OACC_LOOP
;
12337 orig_for_stmt
= for_stmt
= *expr_p
;
12339 bool loop_p
= (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_BIND
)
12341 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
12343 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
12344 gcc_assert (TREE_CODE (for_stmt
) != OACC_LOOP
);
12345 inner_for_stmt
= walk_tree (&OMP_FOR_BODY (for_stmt
),
12346 find_combined_omp_for
, data
, NULL
);
12347 if (inner_for_stmt
== NULL_TREE
)
12349 gcc_assert (seen_error ());
12350 *expr_p
= NULL_TREE
;
12353 if (data
[2] && OMP_FOR_PRE_BODY (*data
[2]))
12355 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data
[2]),
12356 &OMP_FOR_PRE_BODY (for_stmt
));
12357 OMP_FOR_PRE_BODY (*data
[2]) = NULL_TREE
;
12359 if (OMP_FOR_PRE_BODY (inner_for_stmt
))
12361 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt
),
12362 &OMP_FOR_PRE_BODY (for_stmt
));
12363 OMP_FOR_PRE_BODY (inner_for_stmt
) = NULL_TREE
;
12368 /* We have some statements or variable declarations in between
12369 the composite construct directives. Move them around the
12372 for (i
= 0; i
< 3; i
++)
12376 if (i
< 2 && data
[i
+ 1] == &OMP_BODY (t
))
12377 data
[i
+ 1] = data
[i
];
12378 *data
[i
] = OMP_BODY (t
);
12379 tree body
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
12380 NULL_TREE
, make_node (BLOCK
));
12381 OMP_BODY (t
) = body
;
12382 append_to_statement_list_force (inner_for_stmt
,
12383 &BIND_EXPR_BODY (body
));
12385 data
[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body
)));
12386 gcc_assert (*data
[3] == inner_for_stmt
);
12391 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
12393 && OMP_FOR_ORIG_DECLS (inner_for_stmt
)
12394 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
12396 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
12399 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
12400 /* Class iterators aren't allowed on OMP_SIMD, so the only
12401 case we need to solve is distribute parallel for. They are
12402 allowed on the loop construct, but that is already handled
12403 in gimplify_omp_loop. */
12404 gcc_assert (TREE_CODE (inner_for_stmt
) == OMP_FOR
12405 && TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
12407 tree orig_decl
= TREE_PURPOSE (orig
);
12408 tree last
= TREE_VALUE (orig
);
12410 for (pc
= &OMP_FOR_CLAUSES (inner_for_stmt
);
12411 *pc
; pc
= &OMP_CLAUSE_CHAIN (*pc
))
12412 if ((OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
12413 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LASTPRIVATE
)
12414 && OMP_CLAUSE_DECL (*pc
) == orig_decl
)
12416 if (*pc
== NULL_TREE
)
12419 for (spc
= &OMP_PARALLEL_CLAUSES (*data
[1]);
12420 *spc
; spc
= &OMP_CLAUSE_CHAIN (*spc
))
12421 if (OMP_CLAUSE_CODE (*spc
) == OMP_CLAUSE_PRIVATE
12422 && OMP_CLAUSE_DECL (*spc
) == orig_decl
)
12427 *spc
= OMP_CLAUSE_CHAIN (c
);
12428 OMP_CLAUSE_CHAIN (c
) = NULL_TREE
;
12432 if (*pc
== NULL_TREE
)
12434 else if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
)
12436 /* private clause will appear only on inner_for_stmt.
12437 Change it into firstprivate, and add private clause
12439 tree c
= copy_node (*pc
);
12440 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
12441 OMP_FOR_CLAUSES (for_stmt
) = c
;
12442 OMP_CLAUSE_CODE (*pc
) = OMP_CLAUSE_FIRSTPRIVATE
;
12443 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
12447 /* lastprivate clause will appear on both inner_for_stmt
12448 and for_stmt. Add firstprivate clause to
12450 tree c
= build_omp_clause (OMP_CLAUSE_LOCATION (*pc
),
12451 OMP_CLAUSE_FIRSTPRIVATE
);
12452 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (*pc
);
12453 OMP_CLAUSE_CHAIN (c
) = *pc
;
12455 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
12457 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
12458 OMP_CLAUSE_FIRSTPRIVATE
);
12459 OMP_CLAUSE_DECL (c
) = last
;
12460 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
12461 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
12462 c
= build_omp_clause (UNKNOWN_LOCATION
,
12463 *pc
? OMP_CLAUSE_SHARED
12464 : OMP_CLAUSE_FIRSTPRIVATE
);
12465 OMP_CLAUSE_DECL (c
) = orig_decl
;
12466 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
12467 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
12469 /* Similarly, take care of C++ range for temporaries, those should
12470 be firstprivate on OMP_PARALLEL if any. */
12472 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
12473 if (OMP_FOR_ORIG_DECLS (inner_for_stmt
)
12474 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
12476 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
12480 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
12481 tree v
= TREE_CHAIN (orig
);
12482 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
12483 OMP_CLAUSE_FIRSTPRIVATE
);
12484 /* First add firstprivate clause for the __for_end artificial
12486 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 1);
12487 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
12489 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
12490 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
12491 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
12492 if (TREE_VEC_ELT (v
, 0))
12494 /* And now the same for __for_range artificial decl if it
12496 c
= build_omp_clause (UNKNOWN_LOCATION
,
12497 OMP_CLAUSE_FIRSTPRIVATE
);
12498 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 0);
12499 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
12501 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
12502 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
12503 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
12508 switch (TREE_CODE (for_stmt
))
12511 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
))
12513 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
12514 OMP_CLAUSE_SCHEDULE
))
12515 error_at (EXPR_LOCATION (for_stmt
),
12516 "%qs clause may not appear on non-rectangular %qs",
12517 "schedule", lang_GNU_Fortran () ? "do" : "for");
12518 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
))
12519 error_at (EXPR_LOCATION (for_stmt
),
12520 "%qs clause may not appear on non-rectangular %qs",
12521 "ordered", lang_GNU_Fortran () ? "do" : "for");
12524 case OMP_DISTRIBUTE
:
12525 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
)
12526 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
12527 OMP_CLAUSE_DIST_SCHEDULE
))
12528 error_at (EXPR_LOCATION (for_stmt
),
12529 "%qs clause may not appear on non-rectangular %qs",
12530 "dist_schedule", "distribute");
12536 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
))
12538 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
12539 OMP_CLAUSE_GRAINSIZE
))
12540 error_at (EXPR_LOCATION (for_stmt
),
12541 "%qs clause may not appear on non-rectangular %qs",
12542 "grainsize", "taskloop");
12543 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
12544 OMP_CLAUSE_NUM_TASKS
))
12545 error_at (EXPR_LOCATION (for_stmt
),
12546 "%qs clause may not appear on non-rectangular %qs",
12547 "num_tasks", "taskloop");
12549 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_UNTIED
))
12550 ort
= ORT_UNTIED_TASKLOOP
;
12552 ort
= ORT_TASKLOOP
;
12558 gcc_unreachable ();
12561 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
12562 clause for the IV. */
12563 if (ort
== ORT_SIMD
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
12565 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), 0);
12566 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12567 decl
= TREE_OPERAND (t
, 0);
12568 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12569 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
12570 && OMP_CLAUSE_DECL (c
) == decl
)
12572 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
12577 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
)
12578 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt
), pre_p
, ort
,
12579 loop_p
&& TREE_CODE (for_stmt
) != OMP_SIMD
12580 ? OMP_LOOP
: TREE_CODE (for_stmt
));
12582 if (TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
)
12583 gimplify_omp_ctxp
->distribute
= true;
12585 /* Handle OMP_FOR_INIT. */
12586 for_pre_body
= NULL
;
12587 if ((ort
== ORT_SIMD
12588 || (inner_for_stmt
&& TREE_CODE (inner_for_stmt
) == OMP_SIMD
))
12589 && OMP_FOR_PRE_BODY (for_stmt
))
12591 has_decl_expr
= BITMAP_ALLOC (NULL
);
12592 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == DECL_EXPR
12593 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt
)))
12596 t
= OMP_FOR_PRE_BODY (for_stmt
);
12597 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
12599 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == STATEMENT_LIST
)
12601 tree_stmt_iterator si
;
12602 for (si
= tsi_start (OMP_FOR_PRE_BODY (for_stmt
)); !tsi_end_p (si
);
12606 if (TREE_CODE (t
) == DECL_EXPR
12607 && TREE_CODE (DECL_EXPR_DECL (t
)) == VAR_DECL
)
12608 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
12612 if (OMP_FOR_PRE_BODY (for_stmt
))
12614 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
|| gimplify_omp_ctxp
)
12615 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
12618 struct gimplify_omp_ctx ctx
;
12619 memset (&ctx
, 0, sizeof (ctx
));
12620 ctx
.region_type
= ORT_NONE
;
12621 gimplify_omp_ctxp
= &ctx
;
12622 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
12623 gimplify_omp_ctxp
= NULL
;
12626 OMP_FOR_PRE_BODY (for_stmt
) = NULL_TREE
;
12628 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
12629 for_stmt
= inner_for_stmt
;
12631 /* For taskloop, need to gimplify the start, end and step before the
12632 taskloop, outside of the taskloop omp context. */
12633 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
12635 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12637 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
12638 gimple_seq
*for_pre_p
= (gimple_seq_empty_p (for_pre_body
)
12639 ? pre_p
: &for_pre_body
);
12640 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
12641 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
12643 tree v
= TREE_OPERAND (t
, 1);
12644 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
12645 for_pre_p
, orig_for_stmt
);
12646 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
12647 for_pre_p
, orig_for_stmt
);
12650 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
12653 /* Handle OMP_FOR_COND. */
12654 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
12655 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
12657 tree v
= TREE_OPERAND (t
, 1);
12658 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
12659 for_pre_p
, orig_for_stmt
);
12660 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
12661 for_pre_p
, orig_for_stmt
);
12664 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
12667 /* Handle OMP_FOR_INCR. */
12668 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
12669 if (TREE_CODE (t
) == MODIFY_EXPR
)
12671 decl
= TREE_OPERAND (t
, 0);
12672 t
= TREE_OPERAND (t
, 1);
12673 tree
*tp
= &TREE_OPERAND (t
, 1);
12674 if (TREE_CODE (t
) == PLUS_EXPR
&& *tp
== decl
)
12675 tp
= &TREE_OPERAND (t
, 0);
12677 gimplify_omp_taskloop_expr (NULL_TREE
, tp
, for_pre_p
,
12682 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt
), pre_p
, ort
,
12686 if (orig_for_stmt
!= for_stmt
)
12687 gimplify_omp_ctxp
->combined_loop
= true;
12690 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
12691 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt
)));
12692 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
12693 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt
)));
12695 tree c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
);
12696 bool is_doacross
= false;
12697 if (c
&& OMP_CLAUSE_ORDERED_EXPR (c
))
12699 is_doacross
= true;
12700 gimplify_omp_ctxp
->loop_iter_var
.create (TREE_VEC_LENGTH
12701 (OMP_FOR_INIT (for_stmt
))
12704 int collapse
= 1, tile
= 0;
12705 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_COLLAPSE
);
12707 collapse
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c
));
12708 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_TILE
);
12710 tile
= list_length (OMP_CLAUSE_TILE_LIST (c
));
12711 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ALLOCATE
);
12712 hash_set
<tree
> *allocate_uids
= NULL
;
12715 allocate_uids
= new hash_set
<tree
>;
12716 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
12717 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
)
12718 allocate_uids
->add (OMP_CLAUSE_DECL (c
));
12720 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12722 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
12723 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12724 decl
= TREE_OPERAND (t
, 0);
12725 gcc_assert (DECL_P (decl
));
12726 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl
))
12727 || POINTER_TYPE_P (TREE_TYPE (decl
)));
12730 if (TREE_CODE (for_stmt
) == OMP_FOR
&& OMP_FOR_ORIG_DECLS (for_stmt
))
12732 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
12733 if (TREE_CODE (orig_decl
) == TREE_LIST
)
12735 orig_decl
= TREE_PURPOSE (orig_decl
);
12739 gimplify_omp_ctxp
->loop_iter_var
.quick_push (orig_decl
);
12742 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
12743 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
12746 if (for_stmt
== orig_for_stmt
)
12748 tree orig_decl
= decl
;
12749 if (OMP_FOR_ORIG_DECLS (for_stmt
))
12751 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
12752 if (TREE_CODE (orig_decl
) == TREE_LIST
)
12754 orig_decl
= TREE_PURPOSE (orig_decl
);
12759 if (is_global_var (orig_decl
) && DECL_THREAD_LOCAL_P (orig_decl
))
12760 error_at (EXPR_LOCATION (for_stmt
),
12761 "threadprivate iteration variable %qD", orig_decl
);
12764 /* Make sure the iteration variable is private. */
12765 tree c
= NULL_TREE
;
12766 tree c2
= NULL_TREE
;
12767 if (orig_for_stmt
!= for_stmt
)
12769 /* Preserve this information until we gimplify the inner simd. */
12771 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
12772 TREE_PRIVATE (t
) = 1;
12774 else if (ort
== ORT_SIMD
)
12776 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
12777 (splay_tree_key
) decl
);
12778 omp_is_private (gimplify_omp_ctxp
, decl
,
12779 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
12781 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
12783 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
12784 if (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
)
12785 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
12786 OMP_CLAUSE_LASTPRIVATE
);
12787 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
12788 OMP_CLAUSE_LASTPRIVATE
))
12789 if (OMP_CLAUSE_DECL (c3
) == decl
)
12791 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
12792 "conditional %<lastprivate%> on loop "
12793 "iterator %qD ignored", decl
);
12794 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
12795 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
12798 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1 && !loop_p
)
12800 c
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
12801 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
12802 unsigned int flags
= GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
;
12804 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
12805 || TREE_PRIVATE (t
))
12807 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
12808 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
12810 struct gimplify_omp_ctx
*outer
12811 = gimplify_omp_ctxp
->outer_context
;
12812 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
12814 if (outer
->region_type
== ORT_WORKSHARE
12815 && outer
->combined_loop
)
12817 n
= splay_tree_lookup (outer
->variables
,
12818 (splay_tree_key
)decl
);
12819 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
12821 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
12822 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
12826 struct gimplify_omp_ctx
*octx
= outer
->outer_context
;
12828 && octx
->region_type
== ORT_COMBINED_PARALLEL
12829 && octx
->outer_context
12830 && (octx
->outer_context
->region_type
12832 && octx
->outer_context
->combined_loop
)
12834 octx
= octx
->outer_context
;
12835 n
= splay_tree_lookup (octx
->variables
,
12836 (splay_tree_key
)decl
);
12837 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
12839 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
12840 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
12847 OMP_CLAUSE_DECL (c
) = decl
;
12848 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
12849 OMP_FOR_CLAUSES (for_stmt
) = c
;
12850 omp_add_variable (gimplify_omp_ctxp
, decl
, flags
);
12851 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
12852 omp_lastprivate_for_combined_outer_constructs (outer
, decl
,
12859 || !bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)));
12860 if (TREE_PRIVATE (t
))
12861 lastprivate
= false;
12862 if (loop_p
&& OMP_FOR_ORIG_DECLS (for_stmt
))
12864 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
12865 if (TREE_CODE (elt
) == TREE_LIST
&& TREE_PURPOSE (elt
))
12866 lastprivate
= false;
12869 struct gimplify_omp_ctx
*outer
12870 = gimplify_omp_ctxp
->outer_context
;
12871 if (outer
&& lastprivate
)
12872 omp_lastprivate_for_combined_outer_constructs (outer
, decl
,
12875 c
= build_omp_clause (input_location
,
12876 lastprivate
? OMP_CLAUSE_LASTPRIVATE
12877 : OMP_CLAUSE_PRIVATE
);
12878 OMP_CLAUSE_DECL (c
) = decl
;
12879 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
12880 OMP_FOR_CLAUSES (for_stmt
) = c
;
12881 omp_add_variable (gimplify_omp_ctxp
, decl
,
12882 (lastprivate
? GOVD_LASTPRIVATE
: GOVD_PRIVATE
)
12883 | GOVD_EXPLICIT
| GOVD_SEEN
);
12887 else if (omp_is_private (gimplify_omp_ctxp
, decl
, 0))
12889 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
12890 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
12891 (splay_tree_key
) decl
);
12892 if (n
&& (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
))
12893 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
12894 OMP_CLAUSE_LASTPRIVATE
);
12895 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
12896 OMP_CLAUSE_LASTPRIVATE
))
12897 if (OMP_CLAUSE_DECL (c3
) == decl
)
12899 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
12900 "conditional %<lastprivate%> on loop "
12901 "iterator %qD ignored", decl
);
12902 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
12903 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
12907 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_PRIVATE
| GOVD_SEEN
);
12909 /* If DECL is not a gimple register, create a temporary variable to act
12910 as an iteration counter. This is valid, since DECL cannot be
12911 modified in the body of the loop. Similarly for any iteration vars
12912 in simd with collapse > 1 where the iterator vars must be
12913 lastprivate. And similarly for vars mentioned in allocate clauses. */
12914 if (orig_for_stmt
!= for_stmt
)
12916 else if (!is_gimple_reg (decl
)
12917 || (ort
== ORT_SIMD
12918 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) > 1)
12919 || (allocate_uids
&& allocate_uids
->contains (decl
)))
12921 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12922 /* Make sure omp_add_variable is not called on it prematurely.
12923 We call it ourselves a few lines later. */
12924 gimplify_omp_ctxp
= NULL
;
12925 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
12926 gimplify_omp_ctxp
= ctx
;
12927 TREE_OPERAND (t
, 0) = var
;
12929 gimplify_seq_add_stmt (&for_body
, gimple_build_assign (decl
, var
));
12931 if (ort
== ORT_SIMD
12932 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
12934 c2
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
12935 OMP_CLAUSE_LINEAR_NO_COPYIN (c2
) = 1;
12936 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2
) = 1;
12937 OMP_CLAUSE_DECL (c2
) = var
;
12938 OMP_CLAUSE_CHAIN (c2
) = OMP_FOR_CLAUSES (for_stmt
);
12939 OMP_FOR_CLAUSES (for_stmt
) = c2
;
12940 omp_add_variable (gimplify_omp_ctxp
, var
,
12941 GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
);
12942 if (c
== NULL_TREE
)
12949 omp_add_variable (gimplify_omp_ctxp
, var
,
12950 GOVD_PRIVATE
| GOVD_SEEN
);
12955 gimplify_omp_ctxp
->in_for_exprs
= true;
12956 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
12958 tree lb
= TREE_OPERAND (t
, 1);
12959 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 1), &for_pre_body
, NULL
,
12960 is_gimple_val
, fb_rvalue
, false);
12961 ret
= MIN (ret
, tret
);
12962 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 2), &for_pre_body
, NULL
,
12963 is_gimple_val
, fb_rvalue
, false);
12966 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
12967 is_gimple_val
, fb_rvalue
, false);
12968 gimplify_omp_ctxp
->in_for_exprs
= false;
12969 ret
= MIN (ret
, tret
);
12970 if (ret
== GS_ERROR
)
12973 /* Handle OMP_FOR_COND. */
12974 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
12975 gcc_assert (COMPARISON_CLASS_P (t
));
12976 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
12978 gimplify_omp_ctxp
->in_for_exprs
= true;
12979 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
12981 tree ub
= TREE_OPERAND (t
, 1);
12982 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 1), &for_pre_body
, NULL
,
12983 is_gimple_val
, fb_rvalue
, false);
12984 ret
= MIN (ret
, tret
);
12985 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 2), &for_pre_body
, NULL
,
12986 is_gimple_val
, fb_rvalue
, false);
12989 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
12990 is_gimple_val
, fb_rvalue
, false);
12991 gimplify_omp_ctxp
->in_for_exprs
= false;
12992 ret
= MIN (ret
, tret
);
12994 /* Handle OMP_FOR_INCR. */
12995 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
12996 switch (TREE_CODE (t
))
12998 case PREINCREMENT_EXPR
:
12999 case POSTINCREMENT_EXPR
:
13001 tree decl
= TREE_OPERAND (t
, 0);
13002 /* c_omp_for_incr_canonicalize_ptr() should have been
13003 called to massage things appropriately. */
13004 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
13006 if (orig_for_stmt
!= for_stmt
)
13008 t
= build_int_cst (TREE_TYPE (decl
), 1);
13010 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
13011 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
13012 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
13013 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
13017 case PREDECREMENT_EXPR
:
13018 case POSTDECREMENT_EXPR
:
13019 /* c_omp_for_incr_canonicalize_ptr() should have been
13020 called to massage things appropriately. */
13021 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
13022 if (orig_for_stmt
!= for_stmt
)
13024 t
= build_int_cst (TREE_TYPE (decl
), -1);
13026 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
13027 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
13028 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
13029 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
13033 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
13034 TREE_OPERAND (t
, 0) = var
;
13036 t
= TREE_OPERAND (t
, 1);
13037 switch (TREE_CODE (t
))
13040 if (TREE_OPERAND (t
, 1) == decl
)
13042 TREE_OPERAND (t
, 1) = TREE_OPERAND (t
, 0);
13043 TREE_OPERAND (t
, 0) = var
;
13049 case POINTER_PLUS_EXPR
:
13050 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
13051 TREE_OPERAND (t
, 0) = var
;
13054 gcc_unreachable ();
13057 gimplify_omp_ctxp
->in_for_exprs
= true;
13058 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
13059 is_gimple_val
, fb_rvalue
, false);
13060 ret
= MIN (ret
, tret
);
13063 tree step
= TREE_OPERAND (t
, 1);
13064 tree stept
= TREE_TYPE (decl
);
13065 if (POINTER_TYPE_P (stept
))
13067 step
= fold_convert (stept
, step
);
13068 if (TREE_CODE (t
) == MINUS_EXPR
)
13069 step
= fold_build1 (NEGATE_EXPR
, stept
, step
);
13070 OMP_CLAUSE_LINEAR_STEP (c
) = step
;
13071 if (step
!= TREE_OPERAND (t
, 1))
13073 tret
= gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
),
13074 &for_pre_body
, NULL
,
13075 is_gimple_val
, fb_rvalue
, false);
13076 ret
= MIN (ret
, tret
);
13079 gimplify_omp_ctxp
->in_for_exprs
= false;
13083 gcc_unreachable ();
13089 OMP_CLAUSE_LINEAR_STEP (c2
) = OMP_CLAUSE_LINEAR_STEP (c
);
13092 if ((var
!= decl
|| collapse
> 1 || tile
) && orig_for_stmt
== for_stmt
)
13094 for (c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13095 if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
13096 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) == NULL
)
13097 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
13098 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)
13099 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) == NULL
))
13100 && OMP_CLAUSE_DECL (c
) == decl
)
13102 if (is_doacross
&& (collapse
== 1 || i
>= collapse
))
13106 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
13107 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
13108 gcc_assert (TREE_OPERAND (t
, 0) == var
);
13109 t
= TREE_OPERAND (t
, 1);
13110 gcc_assert (TREE_CODE (t
) == PLUS_EXPR
13111 || TREE_CODE (t
) == MINUS_EXPR
13112 || TREE_CODE (t
) == POINTER_PLUS_EXPR
);
13113 gcc_assert (TREE_OPERAND (t
, 0) == var
);
13114 t
= build2 (TREE_CODE (t
), TREE_TYPE (decl
),
13115 is_doacross
? var
: decl
,
13116 TREE_OPERAND (t
, 1));
13119 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
13120 seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
);
13122 seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
);
13123 push_gimplify_context ();
13124 gimplify_assign (decl
, t
, seq
);
13125 gimple
*bind
= NULL
;
13126 if (gimplify_ctxp
->temps
)
13128 bind
= gimple_build_bind (NULL_TREE
, *seq
, NULL_TREE
);
13130 gimplify_seq_add_stmt (seq
, bind
);
13132 pop_gimplify_context (bind
);
13135 if (OMP_FOR_NON_RECTANGULAR (for_stmt
) && var
!= decl
)
13136 for (int j
= i
+ 1; j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
13138 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
13139 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
13140 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
13141 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
13142 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
13143 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
13144 gcc_assert (COMPARISON_CLASS_P (t
));
13145 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
13146 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
13147 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
13151 BITMAP_FREE (has_decl_expr
);
13152 delete allocate_uids
;
13154 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
13155 || (loop_p
&& orig_for_stmt
== for_stmt
))
13157 push_gimplify_context ();
13158 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt
)) != BIND_EXPR
)
13160 OMP_FOR_BODY (orig_for_stmt
)
13161 = build3 (BIND_EXPR
, void_type_node
, NULL
,
13162 OMP_FOR_BODY (orig_for_stmt
), NULL
);
13163 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt
)) = 1;
13167 gimple
*g
= gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt
),
13170 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
13171 || (loop_p
&& orig_for_stmt
== for_stmt
))
13173 if (gimple_code (g
) == GIMPLE_BIND
)
13174 pop_gimplify_context (g
);
13176 pop_gimplify_context (NULL
);
13179 if (orig_for_stmt
!= for_stmt
)
13180 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
13182 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
13183 decl
= TREE_OPERAND (t
, 0);
13184 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
13185 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
13186 gimplify_omp_ctxp
= ctx
->outer_context
;
13187 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
13188 gimplify_omp_ctxp
= ctx
;
13189 omp_add_variable (gimplify_omp_ctxp
, var
, GOVD_PRIVATE
| GOVD_SEEN
);
13190 TREE_OPERAND (t
, 0) = var
;
13191 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
13192 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
13193 TREE_OPERAND (TREE_OPERAND (t
, 1), 0) = var
;
13194 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
13195 for (int j
= i
+ 1;
13196 j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
13198 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
13199 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
13200 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
13201 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
13203 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
13204 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
13206 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
13207 gcc_assert (COMPARISON_CLASS_P (t
));
13208 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
13209 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
13211 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
13212 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
13217 gimplify_adjust_omp_clauses (pre_p
, for_body
,
13218 &OMP_FOR_CLAUSES (orig_for_stmt
),
13219 TREE_CODE (orig_for_stmt
));
13222 switch (TREE_CODE (orig_for_stmt
))
13224 case OMP_FOR
: kind
= GF_OMP_FOR_KIND_FOR
; break;
13225 case OMP_SIMD
: kind
= GF_OMP_FOR_KIND_SIMD
; break;
13226 case OMP_DISTRIBUTE
: kind
= GF_OMP_FOR_KIND_DISTRIBUTE
; break;
13227 case OMP_TASKLOOP
: kind
= GF_OMP_FOR_KIND_TASKLOOP
; break;
13228 case OACC_LOOP
: kind
= GF_OMP_FOR_KIND_OACC_LOOP
; break;
13230 gcc_unreachable ();
13232 if (loop_p
&& kind
== GF_OMP_FOR_KIND_SIMD
)
13234 gimplify_seq_add_seq (pre_p
, for_pre_body
);
13235 for_pre_body
= NULL
;
13237 gfor
= gimple_build_omp_for (for_body
, kind
, OMP_FOR_CLAUSES (orig_for_stmt
),
13238 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)),
13240 if (orig_for_stmt
!= for_stmt
)
13241 gimple_omp_for_set_combined_p (gfor
, true);
13242 if (gimplify_omp_ctxp
13243 && (gimplify_omp_ctxp
->combined_loop
13244 || (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
13245 && gimplify_omp_ctxp
->outer_context
13246 && gimplify_omp_ctxp
->outer_context
->combined_loop
)))
13248 gimple_omp_for_set_combined_into_p (gfor
, true);
13249 if (gimplify_omp_ctxp
->combined_loop
)
13250 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_SIMD
);
13252 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_FOR
);
13255 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
13257 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
13258 gimple_omp_for_set_index (gfor
, i
, TREE_OPERAND (t
, 0));
13259 gimple_omp_for_set_initial (gfor
, i
, TREE_OPERAND (t
, 1));
13260 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
13261 gimple_omp_for_set_cond (gfor
, i
, TREE_CODE (t
));
13262 gimple_omp_for_set_final (gfor
, i
, TREE_OPERAND (t
, 1));
13263 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
13264 gimple_omp_for_set_incr (gfor
, i
, TREE_OPERAND (t
, 1));
13267 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
13268 constructs with GIMPLE_OMP_TASK sandwiched in between them.
13269 The outer taskloop stands for computing the number of iterations,
13270 counts for collapsed loops and holding taskloop specific clauses.
13271 The task construct stands for the effect of data sharing on the
13272 explicit task it creates and the inner taskloop stands for expansion
13273 of the static loop inside of the explicit task construct. */
13274 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
13276 tree
*gfor_clauses_ptr
= gimple_omp_for_clauses_ptr (gfor
);
13277 tree task_clauses
= NULL_TREE
;
13278 tree c
= *gfor_clauses_ptr
;
13279 tree
*gtask_clauses_ptr
= &task_clauses
;
13280 tree outer_for_clauses
= NULL_TREE
;
13281 tree
*gforo_clauses_ptr
= &outer_for_clauses
;
13282 bitmap lastprivate_uids
= NULL
;
13283 if (omp_find_clause (c
, OMP_CLAUSE_ALLOCATE
))
13285 c
= omp_find_clause (c
, OMP_CLAUSE_LASTPRIVATE
);
13288 lastprivate_uids
= BITMAP_ALLOC (NULL
);
13289 for (; c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
13290 OMP_CLAUSE_LASTPRIVATE
))
13291 bitmap_set_bit (lastprivate_uids
,
13292 DECL_UID (OMP_CLAUSE_DECL (c
)));
13294 c
= *gfor_clauses_ptr
;
13296 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
13297 switch (OMP_CLAUSE_CODE (c
))
13299 /* These clauses are allowed on task, move them there. */
13300 case OMP_CLAUSE_SHARED
:
13301 case OMP_CLAUSE_FIRSTPRIVATE
:
13302 case OMP_CLAUSE_DEFAULT
:
13303 case OMP_CLAUSE_IF
:
13304 case OMP_CLAUSE_UNTIED
:
13305 case OMP_CLAUSE_FINAL
:
13306 case OMP_CLAUSE_MERGEABLE
:
13307 case OMP_CLAUSE_PRIORITY
:
13308 case OMP_CLAUSE_REDUCTION
:
13309 case OMP_CLAUSE_IN_REDUCTION
:
13310 *gtask_clauses_ptr
= c
;
13311 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
13313 case OMP_CLAUSE_PRIVATE
:
13314 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c
))
13316 /* We want private on outer for and firstprivate
13319 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
13320 OMP_CLAUSE_FIRSTPRIVATE
);
13321 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
13322 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
13324 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
13325 *gforo_clauses_ptr
= c
;
13326 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
13330 *gtask_clauses_ptr
= c
;
13331 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
13334 /* These clauses go into outer taskloop clauses. */
13335 case OMP_CLAUSE_GRAINSIZE
:
13336 case OMP_CLAUSE_NUM_TASKS
:
13337 case OMP_CLAUSE_NOGROUP
:
13338 *gforo_clauses_ptr
= c
;
13339 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
13341 /* Collapse clause we duplicate on both taskloops. */
13342 case OMP_CLAUSE_COLLAPSE
:
13343 *gfor_clauses_ptr
= c
;
13344 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
13345 *gforo_clauses_ptr
= copy_node (c
);
13346 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
13348 /* For lastprivate, keep the clause on inner taskloop, and add
13349 a shared clause on task. If the same decl is also firstprivate,
13350 add also firstprivate clause on the inner taskloop. */
13351 case OMP_CLAUSE_LASTPRIVATE
:
13352 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
13354 /* For taskloop C++ lastprivate IVs, we want:
13355 1) private on outer taskloop
13356 2) firstprivate and shared on task
13357 3) lastprivate on inner taskloop */
13359 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
13360 OMP_CLAUSE_FIRSTPRIVATE
);
13361 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
13362 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
13364 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
13365 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
) = 1;
13366 *gforo_clauses_ptr
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
13367 OMP_CLAUSE_PRIVATE
);
13368 OMP_CLAUSE_DECL (*gforo_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
13369 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr
) = 1;
13370 TREE_TYPE (*gforo_clauses_ptr
) = TREE_TYPE (c
);
13371 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
13373 *gfor_clauses_ptr
= c
;
13374 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
13376 = build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_SHARED
);
13377 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
13378 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
13379 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr
) = 1;
13381 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
13383 /* Allocate clause we duplicate on task and inner taskloop
13384 if the decl is lastprivate, otherwise just put on task. */
13385 case OMP_CLAUSE_ALLOCATE
:
13386 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
13387 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
13389 /* Additionally, put firstprivate clause on task
13390 for the allocator if it is not constant. */
13392 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
13393 OMP_CLAUSE_FIRSTPRIVATE
);
13394 OMP_CLAUSE_DECL (*gtask_clauses_ptr
)
13395 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
13396 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
13398 if (lastprivate_uids
13399 && bitmap_bit_p (lastprivate_uids
,
13400 DECL_UID (OMP_CLAUSE_DECL (c
))))
13402 *gfor_clauses_ptr
= c
;
13403 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
13404 *gtask_clauses_ptr
= copy_node (c
);
13405 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
13409 *gtask_clauses_ptr
= c
;
13410 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
13414 gcc_unreachable ();
13416 *gfor_clauses_ptr
= NULL_TREE
;
13417 *gtask_clauses_ptr
= NULL_TREE
;
13418 *gforo_clauses_ptr
= NULL_TREE
;
13419 BITMAP_FREE (lastprivate_uids
);
13420 gimple_set_location (gfor
, input_location
);
13421 g
= gimple_build_bind (NULL_TREE
, gfor
, NULL_TREE
);
13422 g
= gimple_build_omp_task (g
, task_clauses
, NULL_TREE
, NULL_TREE
,
13423 NULL_TREE
, NULL_TREE
, NULL_TREE
);
13424 gimple_set_location (g
, input_location
);
13425 gimple_omp_task_set_taskloop_p (g
, true);
13426 g
= gimple_build_bind (NULL_TREE
, g
, NULL_TREE
);
13428 = gimple_build_omp_for (g
, GF_OMP_FOR_KIND_TASKLOOP
, outer_for_clauses
,
13429 gimple_omp_for_collapse (gfor
),
13430 gimple_omp_for_pre_body (gfor
));
13431 gimple_omp_for_set_pre_body (gfor
, NULL
);
13432 gimple_omp_for_set_combined_p (gforo
, true);
13433 gimple_omp_for_set_combined_into_p (gfor
, true);
13434 for (i
= 0; i
< (int) gimple_omp_for_collapse (gfor
); i
++)
13436 tree type
= TREE_TYPE (gimple_omp_for_index (gfor
, i
));
13437 tree v
= create_tmp_var (type
);
13438 gimple_omp_for_set_index (gforo
, i
, v
);
13439 t
= unshare_expr (gimple_omp_for_initial (gfor
, i
));
13440 gimple_omp_for_set_initial (gforo
, i
, t
);
13441 gimple_omp_for_set_cond (gforo
, i
,
13442 gimple_omp_for_cond (gfor
, i
));
13443 t
= unshare_expr (gimple_omp_for_final (gfor
, i
));
13444 gimple_omp_for_set_final (gforo
, i
, t
);
13445 t
= unshare_expr (gimple_omp_for_incr (gfor
, i
));
13446 gcc_assert (TREE_OPERAND (t
, 0) == gimple_omp_for_index (gfor
, i
));
13447 TREE_OPERAND (t
, 0) = v
;
13448 gimple_omp_for_set_incr (gforo
, i
, t
);
13449 t
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
13450 OMP_CLAUSE_DECL (t
) = v
;
13451 OMP_CLAUSE_CHAIN (t
) = gimple_omp_for_clauses (gforo
);
13452 gimple_omp_for_set_clauses (gforo
, t
);
13453 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
13455 tree
*p1
= NULL
, *p2
= NULL
;
13456 t
= gimple_omp_for_initial (gforo
, i
);
13457 if (TREE_CODE (t
) == TREE_VEC
)
13458 p1
= &TREE_VEC_ELT (t
, 0);
13459 t
= gimple_omp_for_final (gforo
, i
);
13460 if (TREE_CODE (t
) == TREE_VEC
)
13463 p2
= &TREE_VEC_ELT (t
, 0);
13465 p1
= &TREE_VEC_ELT (t
, 0);
13470 for (j
= 0; j
< i
; j
++)
13471 if (*p1
== gimple_omp_for_index (gfor
, j
))
13473 *p1
= gimple_omp_for_index (gforo
, j
);
13478 gcc_assert (j
< i
);
13482 gimplify_seq_add_stmt (pre_p
, gforo
);
13485 gimplify_seq_add_stmt (pre_p
, gfor
);
13487 if (TREE_CODE (orig_for_stmt
) == OMP_FOR
)
13489 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
13490 unsigned lastprivate_conditional
= 0;
13492 && (ctx
->region_type
== ORT_TARGET_DATA
13493 || ctx
->region_type
== ORT_TASKGROUP
))
13494 ctx
= ctx
->outer_context
;
13495 if (ctx
&& (ctx
->region_type
& ORT_PARALLEL
) != 0)
13496 for (tree c
= gimple_omp_for_clauses (gfor
);
13497 c
; c
= OMP_CLAUSE_CHAIN (c
))
13498 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
13499 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
13500 ++lastprivate_conditional
;
13501 if (lastprivate_conditional
)
13503 struct omp_for_data fd
;
13504 omp_extract_for_data (gfor
, &fd
, NULL
);
13505 tree type
= build_array_type_nelts (unsigned_type_for (fd
.iter_type
),
13506 lastprivate_conditional
);
13507 tree var
= create_tmp_var_raw (type
);
13508 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
13509 OMP_CLAUSE_DECL (c
) = var
;
13510 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
13511 gimple_omp_for_set_clauses (gfor
, c
);
13512 omp_add_variable (ctx
, var
, GOVD_CONDTEMP
| GOVD_SEEN
);
13515 else if (TREE_CODE (orig_for_stmt
) == OMP_SIMD
)
13517 unsigned lastprivate_conditional
= 0;
13518 for (tree c
= gimple_omp_for_clauses (gfor
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13519 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
13520 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
13521 ++lastprivate_conditional
;
13522 if (lastprivate_conditional
)
13524 struct omp_for_data fd
;
13525 omp_extract_for_data (gfor
, &fd
, NULL
);
13526 tree type
= unsigned_type_for (fd
.iter_type
);
13527 while (lastprivate_conditional
--)
13529 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
13530 OMP_CLAUSE__CONDTEMP_
);
13531 OMP_CLAUSE_DECL (c
) = create_tmp_var (type
);
13532 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
13533 gimple_omp_for_set_clauses (gfor
, c
);
13538 if (ret
!= GS_ALL_DONE
)
13540 *expr_p
= NULL_TREE
;
13541 return GS_ALL_DONE
;
13544 /* Helper for gimplify_omp_loop, called through walk_tree. */
13547 note_no_context_vars (tree
*tp
, int *, void *data
)
13550 && DECL_CONTEXT (*tp
) == NULL_TREE
13551 && !is_global_var (*tp
))
13553 vec
<tree
> *d
= (vec
<tree
> *) data
;
13554 d
->safe_push (*tp
);
13555 DECL_CONTEXT (*tp
) = current_function_decl
;
13560 /* Gimplify the gross structure of an OMP_LOOP statement. */
13562 static enum gimplify_status
13563 gimplify_omp_loop (tree
*expr_p
, gimple_seq
*pre_p
)
13565 tree for_stmt
= *expr_p
;
13566 tree clauses
= OMP_FOR_CLAUSES (for_stmt
);
13567 struct gimplify_omp_ctx
*octx
= gimplify_omp_ctxp
;
13568 enum omp_clause_bind_kind kind
= OMP_CLAUSE_BIND_THREAD
;
13571 /* If order is not present, the behavior is as if order(concurrent)
13573 tree order
= omp_find_clause (clauses
, OMP_CLAUSE_ORDER
);
13574 if (order
== NULL_TREE
)
13576 order
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_ORDER
);
13577 OMP_CLAUSE_CHAIN (order
) = clauses
;
13578 OMP_FOR_CLAUSES (for_stmt
) = clauses
= order
;
13581 tree bind
= omp_find_clause (clauses
, OMP_CLAUSE_BIND
);
13582 if (bind
== NULL_TREE
)
13584 if (!flag_openmp
) /* flag_openmp_simd */
13586 else if (octx
&& (octx
->region_type
& ORT_TEAMS
) != 0)
13587 kind
= OMP_CLAUSE_BIND_TEAMS
;
13588 else if (octx
&& (octx
->region_type
& ORT_PARALLEL
) != 0)
13589 kind
= OMP_CLAUSE_BIND_PARALLEL
;
13592 for (; octx
; octx
= octx
->outer_context
)
13594 if ((octx
->region_type
& ORT_ACC
) != 0
13595 || octx
->region_type
== ORT_NONE
13596 || octx
->region_type
== ORT_IMPLICIT_TARGET
)
13600 if (octx
== NULL
&& !in_omp_construct
)
13601 error_at (EXPR_LOCATION (for_stmt
),
13602 "%<bind%> clause not specified on a %<loop%> "
13603 "construct not nested inside another OpenMP construct");
13605 bind
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_BIND
);
13606 OMP_CLAUSE_CHAIN (bind
) = clauses
;
13607 OMP_CLAUSE_BIND_KIND (bind
) = kind
;
13608 OMP_FOR_CLAUSES (for_stmt
) = bind
;
13611 switch (OMP_CLAUSE_BIND_KIND (bind
))
13613 case OMP_CLAUSE_BIND_THREAD
:
13615 case OMP_CLAUSE_BIND_PARALLEL
:
13616 if (!flag_openmp
) /* flag_openmp_simd */
13618 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
13621 for (; octx
; octx
= octx
->outer_context
)
13622 if (octx
->region_type
== ORT_SIMD
13623 && omp_find_clause (octx
->clauses
, OMP_CLAUSE_BIND
) == NULL_TREE
)
13625 error_at (EXPR_LOCATION (for_stmt
),
13626 "%<bind(parallel)%> on a %<loop%> construct nested "
13627 "inside %<simd%> construct");
13628 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
13631 kind
= OMP_CLAUSE_BIND_PARALLEL
;
13633 case OMP_CLAUSE_BIND_TEAMS
:
13634 if (!flag_openmp
) /* flag_openmp_simd */
13636 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
13640 && octx
->region_type
!= ORT_IMPLICIT_TARGET
13641 && octx
->region_type
!= ORT_NONE
13642 && (octx
->region_type
& ORT_TEAMS
) == 0)
13643 || in_omp_construct
)
13645 error_at (EXPR_LOCATION (for_stmt
),
13646 "%<bind(teams)%> on a %<loop%> region not strictly "
13647 "nested inside of a %<teams%> region");
13648 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
13651 kind
= OMP_CLAUSE_BIND_TEAMS
;
13654 gcc_unreachable ();
13657 for (tree
*pc
= &OMP_FOR_CLAUSES (for_stmt
); *pc
; )
13658 switch (OMP_CLAUSE_CODE (*pc
))
13660 case OMP_CLAUSE_REDUCTION
:
13661 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc
))
13663 error_at (OMP_CLAUSE_LOCATION (*pc
),
13664 "%<inscan%> %<reduction%> clause on "
13665 "%qs construct", "loop");
13666 OMP_CLAUSE_REDUCTION_INSCAN (*pc
) = 0;
13668 if (OMP_CLAUSE_REDUCTION_TASK (*pc
))
13670 error_at (OMP_CLAUSE_LOCATION (*pc
),
13671 "invalid %<task%> reduction modifier on construct "
13672 "other than %<parallel%>, %qs or %<sections%>",
13673 lang_GNU_Fortran () ? "do" : "for");
13674 OMP_CLAUSE_REDUCTION_TASK (*pc
) = 0;
13676 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13678 case OMP_CLAUSE_LASTPRIVATE
:
13679 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
13681 tree t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
13682 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
13683 if (OMP_CLAUSE_DECL (*pc
) == TREE_OPERAND (t
, 0))
13685 if (OMP_FOR_ORIG_DECLS (for_stmt
)
13686 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
13688 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
13691 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
13692 if (OMP_CLAUSE_DECL (*pc
) == TREE_PURPOSE (orig
))
13696 if (i
== TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)))
13698 error_at (OMP_CLAUSE_LOCATION (*pc
),
13699 "%<lastprivate%> clause on a %<loop%> construct refers "
13700 "to a variable %qD which is not the loop iterator",
13701 OMP_CLAUSE_DECL (*pc
));
13702 *pc
= OMP_CLAUSE_CHAIN (*pc
);
13705 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13708 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13712 TREE_SET_CODE (for_stmt
, OMP_SIMD
);
13717 case OMP_CLAUSE_BIND_THREAD
: last
= 0; break;
13718 case OMP_CLAUSE_BIND_PARALLEL
: last
= 1; break;
13719 case OMP_CLAUSE_BIND_TEAMS
: last
= 2; break;
13721 for (int pass
= 1; pass
<= last
; pass
++)
13725 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
,
13726 make_node (BLOCK
));
13727 append_to_statement_list (*expr_p
, &BIND_EXPR_BODY (bind
));
13728 *expr_p
= make_node (OMP_PARALLEL
);
13729 TREE_TYPE (*expr_p
) = void_type_node
;
13730 OMP_PARALLEL_BODY (*expr_p
) = bind
;
13731 OMP_PARALLEL_COMBINED (*expr_p
) = 1;
13732 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (for_stmt
));
13733 tree
*pc
= &OMP_PARALLEL_CLAUSES (*expr_p
);
13734 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
13735 if (OMP_FOR_ORIG_DECLS (for_stmt
)
13736 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
))
13739 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
13740 if (TREE_PURPOSE (elt
) && TREE_VALUE (elt
))
13742 *pc
= build_omp_clause (UNKNOWN_LOCATION
,
13743 OMP_CLAUSE_FIRSTPRIVATE
);
13744 OMP_CLAUSE_DECL (*pc
) = TREE_VALUE (elt
);
13745 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13749 tree t
= make_node (pass
== 2 ? OMP_DISTRIBUTE
: OMP_FOR
);
13750 tree
*pc
= &OMP_FOR_CLAUSES (t
);
13751 TREE_TYPE (t
) = void_type_node
;
13752 OMP_FOR_BODY (t
) = *expr_p
;
13753 SET_EXPR_LOCATION (t
, EXPR_LOCATION (for_stmt
));
13754 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13755 switch (OMP_CLAUSE_CODE (c
))
13757 case OMP_CLAUSE_BIND
:
13758 case OMP_CLAUSE_ORDER
:
13759 case OMP_CLAUSE_COLLAPSE
:
13760 *pc
= copy_node (c
);
13761 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13763 case OMP_CLAUSE_PRIVATE
:
13764 case OMP_CLAUSE_FIRSTPRIVATE
:
13765 /* Only needed on innermost. */
13767 case OMP_CLAUSE_LASTPRIVATE
:
13768 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
) && pass
!= last
)
13770 *pc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
13771 OMP_CLAUSE_FIRSTPRIVATE
);
13772 OMP_CLAUSE_DECL (*pc
) = OMP_CLAUSE_DECL (c
);
13773 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
13774 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13776 *pc
= copy_node (c
);
13777 OMP_CLAUSE_LASTPRIVATE_STMT (*pc
) = NULL_TREE
;
13778 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
13779 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
13782 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc
) = 1;
13784 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
13785 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc
) = 0;
13787 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13789 case OMP_CLAUSE_REDUCTION
:
13790 *pc
= copy_node (c
);
13791 OMP_CLAUSE_DECL (*pc
) = unshare_expr (OMP_CLAUSE_DECL (c
));
13792 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
13793 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
))
13795 auto_vec
<tree
> no_context_vars
;
13796 int walk_subtrees
= 0;
13797 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
13798 &walk_subtrees
, &no_context_vars
);
13799 if (tree p
= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
))
13800 note_no_context_vars (&p
, &walk_subtrees
, &no_context_vars
);
13801 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c
),
13802 note_no_context_vars
,
13804 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c
),
13805 note_no_context_vars
,
13808 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
)
13809 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
));
13810 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
13811 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
)
13812 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
));
13814 hash_map
<tree
, tree
> decl_map
;
13815 decl_map
.put (OMP_CLAUSE_DECL (c
), OMP_CLAUSE_DECL (c
));
13816 decl_map
.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
13817 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
));
13818 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
13819 decl_map
.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
13820 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
));
13823 memset (&id
, 0, sizeof (id
));
13824 id
.src_fn
= current_function_decl
;
13825 id
.dst_fn
= current_function_decl
;
13826 id
.src_cfun
= cfun
;
13827 id
.decl_map
= &decl_map
;
13828 id
.copy_decl
= copy_decl_no_change
;
13829 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
13830 id
.transform_new_cfg
= true;
13831 id
.transform_return_to_modify
= false;
13833 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc
), copy_tree_body_r
,
13835 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc
), copy_tree_body_r
,
13838 for (tree d
: no_context_vars
)
13840 DECL_CONTEXT (d
) = NULL_TREE
;
13841 DECL_CONTEXT (*decl_map
.get (d
)) = NULL_TREE
;
13846 OMP_CLAUSE_REDUCTION_INIT (*pc
)
13847 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c
));
13848 OMP_CLAUSE_REDUCTION_MERGE (*pc
)
13849 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c
));
13851 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13854 gcc_unreachable ();
13859 return gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_stmt
, fb_none
);
13863 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
13864 of OMP_TARGET's body. */
13867 find_omp_teams (tree
*tp
, int *walk_subtrees
, void *)
13869 *walk_subtrees
= 0;
13870 switch (TREE_CODE (*tp
))
13875 case STATEMENT_LIST
:
13876 *walk_subtrees
= 1;
13884 /* Helper function of optimize_target_teams, determine if the expression
13885 can be computed safely before the target construct on the host. */
13888 computable_teams_clause (tree
*tp
, int *walk_subtrees
, void *)
13894 *walk_subtrees
= 0;
13897 switch (TREE_CODE (*tp
))
13902 *walk_subtrees
= 0;
13903 if (error_operand_p (*tp
)
13904 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
13905 || DECL_HAS_VALUE_EXPR_P (*tp
)
13906 || DECL_THREAD_LOCAL_P (*tp
)
13907 || TREE_SIDE_EFFECTS (*tp
)
13908 || TREE_THIS_VOLATILE (*tp
))
13910 if (is_global_var (*tp
)
13911 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp
))
13912 || lookup_attribute ("omp declare target link",
13913 DECL_ATTRIBUTES (*tp
))))
13916 && !DECL_SEEN_IN_BIND_EXPR_P (*tp
)
13917 && !is_global_var (*tp
)
13918 && decl_function_context (*tp
) == current_function_decl
)
13920 n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
13921 (splay_tree_key
) *tp
);
13924 if (gimplify_omp_ctxp
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
13928 else if (n
->value
& GOVD_LOCAL
)
13930 else if (n
->value
& GOVD_FIRSTPRIVATE
)
13932 else if ((n
->value
& (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
13933 == (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
13937 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
13941 if (TARGET_EXPR_INITIAL (*tp
)
13942 || TREE_CODE (TARGET_EXPR_SLOT (*tp
)) != VAR_DECL
)
13944 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp
),
13945 walk_subtrees
, NULL
);
13946 /* Allow some reasonable subset of integral arithmetics. */
13950 case TRUNC_DIV_EXPR
:
13951 case CEIL_DIV_EXPR
:
13952 case FLOOR_DIV_EXPR
:
13953 case ROUND_DIV_EXPR
:
13954 case TRUNC_MOD_EXPR
:
13955 case CEIL_MOD_EXPR
:
13956 case FLOOR_MOD_EXPR
:
13957 case ROUND_MOD_EXPR
:
13959 case EXACT_DIV_EXPR
:
13970 case NON_LVALUE_EXPR
:
13972 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
13975 /* And disallow anything else, except for comparisons. */
13977 if (COMPARISON_CLASS_P (*tp
))
13983 /* Try to determine if the num_teams and/or thread_limit expressions
13984 can have their values determined already before entering the
13986 INTEGER_CSTs trivially are,
13987 integral decls that are firstprivate (explicitly or implicitly)
13988 or explicitly map(always, to:) or map(always, tofrom:) on the target
13989 region too, and expressions involving simple arithmetics on those
13990 too, function calls are not ok, dereferencing something neither etc.
13991 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
13992 EXPR based on what we find:
13993 0 stands for clause not specified at all, use implementation default
13994 -1 stands for value that can't be determined easily before entering
13995 the target construct.
13996 If teams construct is not present at all, use 1 for num_teams
13997 and 0 for thread_limit (only one team is involved, and the thread
13998 limit is implementation defined. */
14001 optimize_target_teams (tree target
, gimple_seq
*pre_p
)
14003 tree body
= OMP_BODY (target
);
14004 tree teams
= walk_tree (&body
, find_omp_teams
, NULL
, NULL
);
14005 tree num_teams_lower
= NULL_TREE
;
14006 tree num_teams_upper
= integer_zero_node
;
14007 tree thread_limit
= integer_zero_node
;
14008 location_t num_teams_loc
= EXPR_LOCATION (target
);
14009 location_t thread_limit_loc
= EXPR_LOCATION (target
);
14011 struct gimplify_omp_ctx
*target_ctx
= gimplify_omp_ctxp
;
14013 if (teams
== NULL_TREE
)
14014 num_teams_upper
= integer_one_node
;
14016 for (c
= OMP_TEAMS_CLAUSES (teams
); c
; c
= OMP_CLAUSE_CHAIN (c
))
14018 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
)
14020 p
= &num_teams_upper
;
14021 num_teams_loc
= OMP_CLAUSE_LOCATION (c
);
14022 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
))
14024 expr
= OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
);
14025 if (TREE_CODE (expr
) == INTEGER_CST
)
14026 num_teams_lower
= expr
;
14027 else if (walk_tree (&expr
, computable_teams_clause
,
14029 num_teams_lower
= integer_minus_one_node
;
14032 num_teams_lower
= expr
;
14033 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
14034 if (gimplify_expr (&num_teams_lower
, pre_p
, NULL
,
14035 is_gimple_val
, fb_rvalue
, false)
14038 gimplify_omp_ctxp
= target_ctx
;
14039 num_teams_lower
= integer_minus_one_node
;
14043 gimplify_omp_ctxp
= target_ctx
;
14044 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
14045 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
14051 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREAD_LIMIT
)
14054 thread_limit_loc
= OMP_CLAUSE_LOCATION (c
);
14058 expr
= OMP_CLAUSE_OPERAND (c
, 0);
14059 if (TREE_CODE (expr
) == INTEGER_CST
)
14064 if (walk_tree (&expr
, computable_teams_clause
, NULL
, NULL
))
14066 *p
= integer_minus_one_node
;
14070 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
14071 if (gimplify_expr (p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
, false)
14074 gimplify_omp_ctxp
= target_ctx
;
14075 *p
= integer_minus_one_node
;
14078 gimplify_omp_ctxp
= target_ctx
;
14079 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
14080 OMP_CLAUSE_OPERAND (c
, 0) = *p
;
14082 if (!omp_find_clause (OMP_TARGET_CLAUSES (target
), OMP_CLAUSE_THREAD_LIMIT
))
14084 c
= build_omp_clause (thread_limit_loc
, OMP_CLAUSE_THREAD_LIMIT
);
14085 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
14086 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
14087 OMP_TARGET_CLAUSES (target
) = c
;
14089 c
= build_omp_clause (num_teams_loc
, OMP_CLAUSE_NUM_TEAMS
);
14090 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c
) = num_teams_upper
;
14091 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
) = num_teams_lower
;
14092 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
14093 OMP_TARGET_CLAUSES (target
) = c
;
14096 /* Gimplify the gross structure of several OMP constructs. */
14099 gimplify_omp_workshare (tree
*expr_p
, gimple_seq
*pre_p
)
14101 tree expr
= *expr_p
;
14103 gimple_seq body
= NULL
;
14104 enum omp_region_type ort
;
14106 switch (TREE_CODE (expr
))
14110 ort
= ORT_WORKSHARE
;
14113 ort
= ORT_TASKGROUP
;
14116 ort
= OMP_TARGET_COMBINED (expr
) ? ORT_COMBINED_TARGET
: ORT_TARGET
;
14119 ort
= ORT_ACC_KERNELS
;
14121 case OACC_PARALLEL
:
14122 ort
= ORT_ACC_PARALLEL
;
14125 ort
= ORT_ACC_SERIAL
;
14128 ort
= ORT_ACC_DATA
;
14130 case OMP_TARGET_DATA
:
14131 ort
= ORT_TARGET_DATA
;
14134 ort
= OMP_TEAMS_COMBINED (expr
) ? ORT_COMBINED_TEAMS
: ORT_TEAMS
;
14135 if (gimplify_omp_ctxp
== NULL
14136 || gimplify_omp_ctxp
->region_type
== ORT_IMPLICIT_TARGET
)
14137 ort
= (enum omp_region_type
) (ort
| ORT_HOST_TEAMS
);
14139 case OACC_HOST_DATA
:
14140 ort
= ORT_ACC_HOST_DATA
;
14143 gcc_unreachable ();
14146 bool save_in_omp_construct
= in_omp_construct
;
14147 if ((ort
& ORT_ACC
) == 0)
14148 in_omp_construct
= false;
14149 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr
), pre_p
, ort
,
14151 if (TREE_CODE (expr
) == OMP_TARGET
)
14152 optimize_target_teams (expr
, pre_p
);
14153 if ((ort
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
14154 || (ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
14156 push_gimplify_context ();
14157 gimple
*g
= gimplify_and_return_first (OMP_BODY (expr
), &body
);
14158 if (gimple_code (g
) == GIMPLE_BIND
)
14159 pop_gimplify_context (g
);
14161 pop_gimplify_context (NULL
);
14162 if ((ort
& ORT_TARGET_DATA
) != 0)
14164 enum built_in_function end_ix
;
14165 switch (TREE_CODE (expr
))
14168 case OACC_HOST_DATA
:
14169 end_ix
= BUILT_IN_GOACC_DATA_END
;
14171 case OMP_TARGET_DATA
:
14172 end_ix
= BUILT_IN_GOMP_TARGET_END_DATA
;
14175 gcc_unreachable ();
14177 tree fn
= builtin_decl_explicit (end_ix
);
14178 g
= gimple_build_call (fn
, 0);
14179 gimple_seq cleanup
= NULL
;
14180 gimple_seq_add_stmt (&cleanup
, g
);
14181 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
14183 gimple_seq_add_stmt (&body
, g
);
14187 gimplify_and_add (OMP_BODY (expr
), &body
);
14188 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_CLAUSES (expr
),
14190 in_omp_construct
= save_in_omp_construct
;
14192 switch (TREE_CODE (expr
))
14195 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_DATA
,
14196 OMP_CLAUSES (expr
));
14198 case OACC_HOST_DATA
:
14199 if (omp_find_clause (OMP_CLAUSES (expr
), OMP_CLAUSE_IF_PRESENT
))
14201 for (tree c
= OMP_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
14202 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
14203 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
) = 1;
14206 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_HOST_DATA
,
14207 OMP_CLAUSES (expr
));
14210 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_KERNELS
,
14211 OMP_CLAUSES (expr
));
14213 case OACC_PARALLEL
:
14214 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_PARALLEL
,
14215 OMP_CLAUSES (expr
));
14218 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_SERIAL
,
14219 OMP_CLAUSES (expr
));
14222 stmt
= gimple_build_omp_sections (body
, OMP_CLAUSES (expr
));
14225 stmt
= gimple_build_omp_single (body
, OMP_CLAUSES (expr
));
14228 stmt
= gimple_build_omp_scope (body
, OMP_CLAUSES (expr
));
14231 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_REGION
,
14232 OMP_CLAUSES (expr
));
14234 case OMP_TARGET_DATA
:
14235 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
14236 to be evaluated before the use_device_{ptr,addr} clauses if they
14237 refer to the same variables. */
14239 tree use_device_clauses
;
14240 tree
*pc
, *uc
= &use_device_clauses
;
14241 for (pc
= &OMP_CLAUSES (expr
); *pc
; )
14242 if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_PTR
14243 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_ADDR
)
14246 *pc
= OMP_CLAUSE_CHAIN (*pc
);
14247 uc
= &OMP_CLAUSE_CHAIN (*uc
);
14250 pc
= &OMP_CLAUSE_CHAIN (*pc
);
14252 *pc
= use_device_clauses
;
14253 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_DATA
,
14254 OMP_CLAUSES (expr
));
14258 stmt
= gimple_build_omp_teams (body
, OMP_CLAUSES (expr
));
14259 if ((ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
14260 gimple_omp_teams_set_host (as_a
<gomp_teams
*> (stmt
), true);
14263 gcc_unreachable ();
14266 gimplify_seq_add_stmt (pre_p
, stmt
);
14267 *expr_p
= NULL_TREE
;
14270 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
14271 target update constructs. */
14274 gimplify_omp_target_update (tree
*expr_p
, gimple_seq
*pre_p
)
14276 tree expr
= *expr_p
;
14279 enum omp_region_type ort
= ORT_WORKSHARE
;
14281 switch (TREE_CODE (expr
))
14283 case OACC_ENTER_DATA
:
14284 kind
= GF_OMP_TARGET_KIND_OACC_ENTER_DATA
;
14287 case OACC_EXIT_DATA
:
14288 kind
= GF_OMP_TARGET_KIND_OACC_EXIT_DATA
;
14292 kind
= GF_OMP_TARGET_KIND_OACC_UPDATE
;
14295 case OMP_TARGET_UPDATE
:
14296 kind
= GF_OMP_TARGET_KIND_UPDATE
;
14298 case OMP_TARGET_ENTER_DATA
:
14299 kind
= GF_OMP_TARGET_KIND_ENTER_DATA
;
14301 case OMP_TARGET_EXIT_DATA
:
14302 kind
= GF_OMP_TARGET_KIND_EXIT_DATA
;
14305 gcc_unreachable ();
14307 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr
), pre_p
,
14308 ort
, TREE_CODE (expr
));
14309 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OMP_STANDALONE_CLAUSES (expr
),
14311 if (TREE_CODE (expr
) == OACC_UPDATE
14312 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
14313 OMP_CLAUSE_IF_PRESENT
))
14315 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
14317 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
14318 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
14319 switch (OMP_CLAUSE_MAP_KIND (c
))
14321 case GOMP_MAP_FORCE_TO
:
14322 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
14324 case GOMP_MAP_FORCE_FROM
:
14325 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FROM
);
14331 else if (TREE_CODE (expr
) == OACC_EXIT_DATA
14332 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
14333 OMP_CLAUSE_FINALIZE
))
14335 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
14337 bool have_clause
= false;
14338 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
14339 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
14340 switch (OMP_CLAUSE_MAP_KIND (c
))
14342 case GOMP_MAP_FROM
:
14343 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_FROM
);
14344 have_clause
= true;
14346 case GOMP_MAP_RELEASE
:
14347 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_DELETE
);
14348 have_clause
= true;
14350 case GOMP_MAP_TO_PSET
:
14351 /* Fortran arrays with descriptors must map that descriptor when
14352 doing standalone "attach" operations (in OpenACC). In that
14353 case GOMP_MAP_TO_PSET appears by itself with no preceding
14354 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
14356 case GOMP_MAP_POINTER
:
14357 /* TODO PR92929: we may see these here, but they'll always follow
14358 one of the clauses above, and will be handled by libgomp as
14359 one group, so no handling required here. */
14360 gcc_assert (have_clause
);
14362 case GOMP_MAP_DETACH
:
14363 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_DETACH
);
14364 have_clause
= false;
14366 case GOMP_MAP_STRUCT
:
14367 have_clause
= false;
14370 gcc_unreachable ();
14373 stmt
= gimple_build_omp_target (NULL
, kind
, OMP_STANDALONE_CLAUSES (expr
));
14375 gimplify_seq_add_stmt (pre_p
, stmt
);
14376 *expr_p
= NULL_TREE
;
14379 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
14380 stabilized the lhs of the atomic operation as *ADDR. Return true if
14381 EXPR is this stabilized form. */
14384 goa_lhs_expr_p (tree expr
, tree addr
)
14386 /* Also include casts to other type variants. The C front end is fond
14387 of adding these for e.g. volatile variables. This is like
14388 STRIP_TYPE_NOPS but includes the main variant lookup. */
14389 STRIP_USELESS_TYPE_CONVERSION (expr
);
14391 if (TREE_CODE (expr
) == INDIRECT_REF
)
14393 expr
= TREE_OPERAND (expr
, 0);
14394 while (expr
!= addr
14395 && (CONVERT_EXPR_P (expr
)
14396 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
14397 && TREE_CODE (expr
) == TREE_CODE (addr
)
14398 && types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (addr
)))
14400 expr
= TREE_OPERAND (expr
, 0);
14401 addr
= TREE_OPERAND (addr
, 0);
14405 return (TREE_CODE (addr
) == ADDR_EXPR
14406 && TREE_CODE (expr
) == ADDR_EXPR
14407 && TREE_OPERAND (addr
, 0) == TREE_OPERAND (expr
, 0));
14409 if (TREE_CODE (addr
) == ADDR_EXPR
&& expr
== TREE_OPERAND (addr
, 0))
14414 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
14415 expression does not involve the lhs, evaluate it into a temporary.
14416 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
14417 or -1 if an error was encountered. */
14420 goa_stabilize_expr (tree
*expr_p
, gimple_seq
*pre_p
, tree lhs_addr
,
14421 tree lhs_var
, tree
&target_expr
, bool rhs
, int depth
)
14423 tree expr
= *expr_p
;
14426 if (goa_lhs_expr_p (expr
, lhs_addr
))
14432 if (is_gimple_val (expr
))
14435 /* Maximum depth of lhs in expression is for the
14436 __builtin_clear_padding (...), __builtin_clear_padding (...),
14437 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
14441 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
14444 case tcc_comparison
:
14445 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
, lhs_addr
,
14446 lhs_var
, target_expr
, true, depth
);
14449 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
, lhs_addr
,
14450 lhs_var
, target_expr
, true, depth
);
14452 case tcc_expression
:
14453 switch (TREE_CODE (expr
))
14455 case TRUTH_ANDIF_EXPR
:
14456 case TRUTH_ORIF_EXPR
:
14457 case TRUTH_AND_EXPR
:
14458 case TRUTH_OR_EXPR
:
14459 case TRUTH_XOR_EXPR
:
14460 case BIT_INSERT_EXPR
:
14461 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
14462 lhs_addr
, lhs_var
, target_expr
, true,
14465 case TRUTH_NOT_EXPR
:
14466 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
14467 lhs_addr
, lhs_var
, target_expr
, true,
14471 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
, lhs_var
,
14472 target_expr
, true, depth
))
14474 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
14475 lhs_addr
, lhs_var
, target_expr
, true,
14477 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
14478 lhs_addr
, lhs_var
, target_expr
, false,
14483 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
, lhs_var
,
14484 target_expr
, true, depth
))
14486 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
14487 lhs_addr
, lhs_var
, target_expr
, false,
14490 case COMPOUND_EXPR
:
14491 /* Break out any preevaluations from cp_build_modify_expr. */
14492 for (; TREE_CODE (expr
) == COMPOUND_EXPR
;
14493 expr
= TREE_OPERAND (expr
, 1))
14495 /* Special-case __builtin_clear_padding call before
14496 __builtin_memcmp. */
14497 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
)
14499 tree fndecl
= get_callee_fndecl (TREE_OPERAND (expr
, 0));
14501 && fndecl_built_in_p (fndecl
, BUILT_IN_CLEAR_PADDING
)
14502 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
14504 || goa_stabilize_expr (&TREE_OPERAND (expr
, 0), NULL
,
14506 target_expr
, true, depth
)))
14510 saw_lhs
= goa_stabilize_expr (&TREE_OPERAND (expr
, 0),
14511 pre_p
, lhs_addr
, lhs_var
,
14512 target_expr
, true, depth
);
14513 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1),
14514 pre_p
, lhs_addr
, lhs_var
,
14515 target_expr
, rhs
, depth
);
14521 gimplify_stmt (&TREE_OPERAND (expr
, 0), pre_p
);
14524 return goa_stabilize_expr (&expr
, pre_p
, lhs_addr
, lhs_var
,
14525 target_expr
, rhs
, depth
);
14527 return goa_stabilize_expr (expr_p
, pre_p
, lhs_addr
, lhs_var
,
14528 target_expr
, rhs
, depth
);
14530 if (!goa_stabilize_expr (&TREE_OPERAND (expr
, 0), NULL
, lhs_addr
,
14531 lhs_var
, target_expr
, true, depth
))
14533 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
14534 lhs_addr
, lhs_var
, target_expr
, true,
14536 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
14537 lhs_addr
, lhs_var
, target_expr
, true,
14539 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 2), pre_p
,
14540 lhs_addr
, lhs_var
, target_expr
, true,
14544 if (TARGET_EXPR_INITIAL (expr
))
14546 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
,
14547 lhs_var
, target_expr
, true,
14550 if (expr
== target_expr
)
14554 saw_lhs
= goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr
),
14555 pre_p
, lhs_addr
, lhs_var
,
14556 target_expr
, true, depth
);
14557 if (saw_lhs
&& target_expr
== NULL_TREE
&& pre_p
)
14558 target_expr
= expr
;
14566 case tcc_reference
:
14567 if (TREE_CODE (expr
) == BIT_FIELD_REF
14568 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
14569 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
14570 lhs_addr
, lhs_var
, target_expr
, true,
14574 if (TREE_CODE (expr
) == CALL_EXPR
)
14576 if (tree fndecl
= get_callee_fndecl (expr
))
14577 if (fndecl_built_in_p (fndecl
, BUILT_IN_CLEAR_PADDING
)
14578 || fndecl_built_in_p (fndecl
, BUILT_IN_MEMCMP
))
14580 int nargs
= call_expr_nargs (expr
);
14581 for (int i
= 0; i
< nargs
; i
++)
14582 saw_lhs
|= goa_stabilize_expr (&CALL_EXPR_ARG (expr
, i
),
14583 pre_p
, lhs_addr
, lhs_var
,
14584 target_expr
, true, depth
);
14593 if (saw_lhs
== 0 && pre_p
)
14595 enum gimplify_status gs
;
14596 if (TREE_CODE (expr
) == CALL_EXPR
&& VOID_TYPE_P (TREE_TYPE (expr
)))
14598 gimplify_stmt (&expr
, pre_p
);
14602 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
14604 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_lvalue
, fb_lvalue
);
14605 if (gs
!= GS_ALL_DONE
)
14612 /* Gimplify an OMP_ATOMIC statement. */
14614 static enum gimplify_status
14615 gimplify_omp_atomic (tree
*expr_p
, gimple_seq
*pre_p
)
14617 tree addr
= TREE_OPERAND (*expr_p
, 0);
14618 tree rhs
= TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
14619 ? NULL
: TREE_OPERAND (*expr_p
, 1);
14620 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr
)));
14622 gomp_atomic_load
*loadstmt
;
14623 gomp_atomic_store
*storestmt
;
14624 tree target_expr
= NULL_TREE
;
14626 tmp_load
= create_tmp_reg (type
);
14628 && goa_stabilize_expr (&rhs
, pre_p
, addr
, tmp_load
, target_expr
,
14632 if (gimplify_expr (&addr
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
14636 loadstmt
= gimple_build_omp_atomic_load (tmp_load
, addr
,
14637 OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
14638 gimplify_seq_add_stmt (pre_p
, loadstmt
);
14641 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
14642 representatives. Use BIT_FIELD_REF on the lhs instead. */
14644 if (TREE_CODE (rhs
) == COND_EXPR
)
14645 rhsarg
= TREE_OPERAND (rhs
, 1);
14646 if (TREE_CODE (rhsarg
) == BIT_INSERT_EXPR
14647 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load
)))
14649 tree bitpos
= TREE_OPERAND (rhsarg
, 2);
14650 tree op1
= TREE_OPERAND (rhsarg
, 1);
14652 tree tmp_store
= tmp_load
;
14653 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_CAPTURE_OLD
)
14654 tmp_store
= get_initialized_tmp_var (tmp_load
, pre_p
);
14655 if (INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
14656 bitsize
= bitsize_int (TYPE_PRECISION (TREE_TYPE (op1
)));
14658 bitsize
= TYPE_SIZE (TREE_TYPE (op1
));
14659 gcc_assert (TREE_OPERAND (rhsarg
, 0) == tmp_load
);
14660 tree t
= build2_loc (EXPR_LOCATION (rhsarg
),
14661 MODIFY_EXPR
, void_type_node
,
14662 build3_loc (EXPR_LOCATION (rhsarg
),
14663 BIT_FIELD_REF
, TREE_TYPE (op1
),
14664 tmp_store
, bitsize
, bitpos
), op1
);
14665 if (TREE_CODE (rhs
) == COND_EXPR
)
14666 t
= build3_loc (EXPR_LOCATION (rhs
), COND_EXPR
, void_type_node
,
14667 TREE_OPERAND (rhs
, 0), t
, void_node
);
14668 gimplify_and_add (t
, pre_p
);
14671 bool save_allow_rhs_cond_expr
= gimplify_ctxp
->allow_rhs_cond_expr
;
14672 if (TREE_CODE (rhs
) == COND_EXPR
)
14673 gimplify_ctxp
->allow_rhs_cond_expr
= true;
14674 enum gimplify_status gs
= gimplify_expr (&rhs
, pre_p
, NULL
,
14675 is_gimple_val
, fb_rvalue
);
14676 gimplify_ctxp
->allow_rhs_cond_expr
= save_allow_rhs_cond_expr
;
14677 if (gs
!= GS_ALL_DONE
)
14681 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
)
14684 = gimple_build_omp_atomic_store (rhs
, OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
14685 if (TREE_CODE (*expr_p
) != OMP_ATOMIC_READ
&& OMP_ATOMIC_WEAK (*expr_p
))
14687 gimple_omp_atomic_set_weak (loadstmt
);
14688 gimple_omp_atomic_set_weak (storestmt
);
14690 gimplify_seq_add_stmt (pre_p
, storestmt
);
14691 switch (TREE_CODE (*expr_p
))
14693 case OMP_ATOMIC_READ
:
14694 case OMP_ATOMIC_CAPTURE_OLD
:
14695 *expr_p
= tmp_load
;
14696 gimple_omp_atomic_set_need_value (loadstmt
);
14698 case OMP_ATOMIC_CAPTURE_NEW
:
14700 gimple_omp_atomic_set_need_value (storestmt
);
14707 return GS_ALL_DONE
;
14710 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
14711 body, and adding some EH bits. */
14713 static enum gimplify_status
14714 gimplify_transaction (tree
*expr_p
, gimple_seq
*pre_p
)
14716 tree expr
= *expr_p
, temp
, tbody
= TRANSACTION_EXPR_BODY (expr
);
14718 gtransaction
*trans_stmt
;
14719 gimple_seq body
= NULL
;
14722 /* Wrap the transaction body in a BIND_EXPR so we have a context
14723 where to put decls for OMP. */
14724 if (TREE_CODE (tbody
) != BIND_EXPR
)
14726 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, tbody
, NULL
);
14727 TREE_SIDE_EFFECTS (bind
) = 1;
14728 SET_EXPR_LOCATION (bind
, EXPR_LOCATION (tbody
));
14729 TRANSACTION_EXPR_BODY (expr
) = bind
;
14732 push_gimplify_context ();
14733 temp
= voidify_wrapper_expr (*expr_p
, NULL
);
14735 body_stmt
= gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr
), &body
);
14736 pop_gimplify_context (body_stmt
);
14738 trans_stmt
= gimple_build_transaction (body
);
14739 if (TRANSACTION_EXPR_OUTER (expr
))
14740 subcode
= GTMA_IS_OUTER
;
14741 else if (TRANSACTION_EXPR_RELAXED (expr
))
14742 subcode
= GTMA_IS_RELAXED
;
14743 gimple_transaction_set_subcode (trans_stmt
, subcode
);
14745 gimplify_seq_add_stmt (pre_p
, trans_stmt
);
14753 *expr_p
= NULL_TREE
;
14754 return GS_ALL_DONE
;
14757 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
14758 is the OMP_BODY of the original EXPR (which has already been
14759 gimplified so it's not present in the EXPR).
14761 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
14764 gimplify_omp_ordered (tree expr
, gimple_seq body
)
14769 tree source_c
= NULL_TREE
;
14770 tree sink_c
= NULL_TREE
;
14772 if (gimplify_omp_ctxp
)
14774 for (c
= OMP_ORDERED_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
14775 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
14776 && gimplify_omp_ctxp
->loop_iter_var
.is_empty ()
14777 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
14778 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
))
14780 error_at (OMP_CLAUSE_LOCATION (c
),
14781 "%<ordered%> construct with %<depend%> clause must be "
14782 "closely nested inside a loop with %<ordered%> clause "
14783 "with a parameter");
14786 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
14787 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
14790 for (decls
= OMP_CLAUSE_DECL (c
), i
= 0;
14791 decls
&& TREE_CODE (decls
) == TREE_LIST
;
14792 decls
= TREE_CHAIN (decls
), ++i
)
14793 if (i
>= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
14795 else if (TREE_VALUE (decls
)
14796 != gimplify_omp_ctxp
->loop_iter_var
[2 * i
])
14798 error_at (OMP_CLAUSE_LOCATION (c
),
14799 "variable %qE is not an iteration "
14800 "of outermost loop %d, expected %qE",
14801 TREE_VALUE (decls
), i
+ 1,
14802 gimplify_omp_ctxp
->loop_iter_var
[2 * i
]);
14808 = gimplify_omp_ctxp
->loop_iter_var
[2 * i
+ 1];
14809 if (!fail
&& i
!= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
14811 error_at (OMP_CLAUSE_LOCATION (c
),
14812 "number of variables in %<depend%> clause with "
14813 "%<sink%> modifier does not match number of "
14814 "iteration variables");
14819 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
14820 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
14824 error_at (OMP_CLAUSE_LOCATION (c
),
14825 "more than one %<depend%> clause with %<source%> "
14826 "modifier on an %<ordered%> construct");
14833 if (source_c
&& sink_c
)
14835 error_at (OMP_CLAUSE_LOCATION (source_c
),
14836 "%<depend%> clause with %<source%> modifier specified "
14837 "together with %<depend%> clauses with %<sink%> modifier "
14838 "on the same construct");
14843 return gimple_build_nop ();
14844 return gimple_build_omp_ordered (body
, OMP_ORDERED_CLAUSES (expr
));
14847 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
14848 expression produces a value to be used as an operand inside a GIMPLE
14849 statement, the value will be stored back in *EXPR_P. This value will
14850 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
14851 an SSA_NAME. The corresponding sequence of GIMPLE statements is
14852 emitted in PRE_P and POST_P.
14854 Additionally, this process may overwrite parts of the input
14855 expression during gimplification. Ideally, it should be
14856 possible to do non-destructive gimplification.
14858 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
14859 the expression needs to evaluate to a value to be used as
14860 an operand in a GIMPLE statement, this value will be stored in
14861 *EXPR_P on exit. This happens when the caller specifies one
14862 of fb_lvalue or fb_rvalue fallback flags.
14864 PRE_P will contain the sequence of GIMPLE statements corresponding
14865 to the evaluation of EXPR and all the side-effects that must
14866 be executed before the main expression. On exit, the last
14867 statement of PRE_P is the core statement being gimplified. For
14868 instance, when gimplifying 'if (++a)' the last statement in
14869 PRE_P will be 'if (t.1)' where t.1 is the result of
14870 pre-incrementing 'a'.
14872 POST_P will contain the sequence of GIMPLE statements corresponding
14873 to the evaluation of all the side-effects that must be executed
14874 after the main expression. If this is NULL, the post
14875 side-effects are stored at the end of PRE_P.
14877 The reason why the output is split in two is to handle post
14878 side-effects explicitly. In some cases, an expression may have
14879 inner and outer post side-effects which need to be emitted in
14880 an order different from the one given by the recursive
14881 traversal. For instance, for the expression (*p--)++ the post
14882 side-effects of '--' must actually occur *after* the post
14883 side-effects of '++'. However, gimplification will first visit
14884 the inner expression, so if a separate POST sequence was not
14885 used, the resulting sequence would be:
14892 However, the post-decrement operation in line #2 must not be
14893 evaluated until after the store to *p at line #4, so the
14894 correct sequence should be:
14901 So, by specifying a separate post queue, it is possible
14902 to emit the post side-effects in the correct order.
14903 If POST_P is NULL, an internal queue will be used. Before
14904 returning to the caller, the sequence POST_P is appended to
14905 the main output sequence PRE_P.
14907 GIMPLE_TEST_F points to a function that takes a tree T and
14908 returns nonzero if T is in the GIMPLE form requested by the
14909 caller. The GIMPLE predicates are in gimple.cc.
14911 FALLBACK tells the function what sort of a temporary we want if
14912 gimplification cannot produce an expression that complies with
14915 fb_none means that no temporary should be generated
14916 fb_rvalue means that an rvalue is OK to generate
14917 fb_lvalue means that an lvalue is OK to generate
14918 fb_either means that either is OK, but an lvalue is preferable.
14919 fb_mayfail means that gimplification may fail (in which case
14920 GS_ERROR will be returned)
14922 The return value is either GS_ERROR or GS_ALL_DONE, since this
14923 function iterates until EXPR is completely gimplified or an error
14926 enum gimplify_status
14927 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
14928 bool (*gimple_test_f
) (tree
), fallback_t fallback
)
14931 gimple_seq internal_pre
= NULL
;
14932 gimple_seq internal_post
= NULL
;
14935 location_t saved_location
;
14936 enum gimplify_status ret
;
14937 gimple_stmt_iterator pre_last_gsi
, post_last_gsi
;
14940 save_expr
= *expr_p
;
14941 if (save_expr
== NULL_TREE
)
14942 return GS_ALL_DONE
;
14944 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
14945 is_statement
= gimple_test_f
== is_gimple_stmt
;
14947 gcc_assert (pre_p
);
14949 /* Consistency checks. */
14950 if (gimple_test_f
== is_gimple_reg
)
14951 gcc_assert (fallback
& (fb_rvalue
| fb_lvalue
));
14952 else if (gimple_test_f
== is_gimple_val
14953 || gimple_test_f
== is_gimple_call_addr
14954 || gimple_test_f
== is_gimple_condexpr
14955 || gimple_test_f
== is_gimple_condexpr_for_cond
14956 || gimple_test_f
== is_gimple_mem_rhs
14957 || gimple_test_f
== is_gimple_mem_rhs_or_call
14958 || gimple_test_f
== is_gimple_reg_rhs
14959 || gimple_test_f
== is_gimple_reg_rhs_or_call
14960 || gimple_test_f
== is_gimple_asm_val
14961 || gimple_test_f
== is_gimple_mem_ref_addr
)
14962 gcc_assert (fallback
& fb_rvalue
);
14963 else if (gimple_test_f
== is_gimple_min_lval
14964 || gimple_test_f
== is_gimple_lvalue
)
14965 gcc_assert (fallback
& fb_lvalue
);
14966 else if (gimple_test_f
== is_gimple_addressable
)
14967 gcc_assert (fallback
& fb_either
);
14968 else if (gimple_test_f
== is_gimple_stmt
)
14969 gcc_assert (fallback
== fb_none
);
14972 /* We should have recognized the GIMPLE_TEST_F predicate to
14973 know what kind of fallback to use in case a temporary is
14974 needed to hold the value or address of *EXPR_P. */
14975 gcc_unreachable ();
14978 /* We used to check the predicate here and return immediately if it
14979 succeeds. This is wrong; the design is for gimplification to be
14980 idempotent, and for the predicates to only test for valid forms, not
14981 whether they are fully simplified. */
14983 pre_p
= &internal_pre
;
14985 if (post_p
== NULL
)
14986 post_p
= &internal_post
;
14988 /* Remember the last statements added to PRE_P and POST_P. Every
14989 new statement added by the gimplification helpers needs to be
14990 annotated with location information. To centralize the
14991 responsibility, we remember the last statement that had been
14992 added to both queues before gimplifying *EXPR_P. If
14993 gimplification produces new statements in PRE_P and POST_P, those
14994 statements will be annotated with the same location information
14996 pre_last_gsi
= gsi_last (*pre_p
);
14997 post_last_gsi
= gsi_last (*post_p
);
14999 saved_location
= input_location
;
15000 if (save_expr
!= error_mark_node
15001 && EXPR_HAS_LOCATION (*expr_p
))
15002 input_location
= EXPR_LOCATION (*expr_p
);
15004 /* Loop over the specific gimplifiers until the toplevel node
15005 remains the same. */
15008 /* Strip away as many useless type conversions as possible
15009 at the toplevel. */
15010 STRIP_USELESS_TYPE_CONVERSION (*expr_p
);
15012 /* Remember the expr. */
15013 save_expr
= *expr_p
;
15015 /* Die, die, die, my darling. */
15016 if (error_operand_p (save_expr
))
15022 /* Do any language-specific gimplification. */
15023 ret
= ((enum gimplify_status
)
15024 lang_hooks
.gimplify_expr (expr_p
, pre_p
, post_p
));
15027 if (*expr_p
== NULL_TREE
)
15029 if (*expr_p
!= save_expr
)
15032 else if (ret
!= GS_UNHANDLED
)
15035 /* Make sure that all the cases set 'ret' appropriately. */
15036 ret
= GS_UNHANDLED
;
15037 switch (TREE_CODE (*expr_p
))
15039 /* First deal with the special cases. */
15041 case POSTINCREMENT_EXPR
:
15042 case POSTDECREMENT_EXPR
:
15043 case PREINCREMENT_EXPR
:
15044 case PREDECREMENT_EXPR
:
15045 ret
= gimplify_self_mod_expr (expr_p
, pre_p
, post_p
,
15046 fallback
!= fb_none
,
15047 TREE_TYPE (*expr_p
));
15050 case VIEW_CONVERT_EXPR
:
15051 if ((fallback
& fb_rvalue
)
15052 && is_gimple_reg_type (TREE_TYPE (*expr_p
))
15053 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
15055 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
15056 post_p
, is_gimple_val
, fb_rvalue
);
15057 recalculate_side_effects (*expr_p
);
15063 case ARRAY_RANGE_REF
:
15064 case REALPART_EXPR
:
15065 case IMAGPART_EXPR
:
15066 case COMPONENT_REF
:
15067 ret
= gimplify_compound_lval (expr_p
, pre_p
, post_p
,
15068 fallback
? fallback
: fb_rvalue
);
15072 ret
= gimplify_cond_expr (expr_p
, pre_p
, fallback
);
15074 /* C99 code may assign to an array in a structure value of a
15075 conditional expression, and this has undefined behavior
15076 only on execution, so create a temporary if an lvalue is
15078 if (fallback
== fb_lvalue
)
15080 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
15081 mark_addressable (*expr_p
);
15087 ret
= gimplify_call_expr (expr_p
, pre_p
, fallback
!= fb_none
);
15089 /* C99 code may assign to an array in a structure returned
15090 from a function, and this has undefined behavior only on
15091 execution, so create a temporary if an lvalue is
15093 if (fallback
== fb_lvalue
)
15095 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
15096 mark_addressable (*expr_p
);
15102 gcc_unreachable ();
15104 case COMPOUND_EXPR
:
15105 ret
= gimplify_compound_expr (expr_p
, pre_p
, fallback
!= fb_none
);
15108 case COMPOUND_LITERAL_EXPR
:
15109 ret
= gimplify_compound_literal_expr (expr_p
, pre_p
,
15110 gimple_test_f
, fallback
);
15115 ret
= gimplify_modify_expr (expr_p
, pre_p
, post_p
,
15116 fallback
!= fb_none
);
15119 case TRUTH_ANDIF_EXPR
:
15120 case TRUTH_ORIF_EXPR
:
15122 /* Preserve the original type of the expression and the
15123 source location of the outer expression. */
15124 tree org_type
= TREE_TYPE (*expr_p
);
15125 *expr_p
= gimple_boolify (*expr_p
);
15126 *expr_p
= build3_loc (input_location
, COND_EXPR
,
15130 org_type
, boolean_true_node
),
15133 org_type
, boolean_false_node
));
15138 case TRUTH_NOT_EXPR
:
15140 tree type
= TREE_TYPE (*expr_p
);
15141 /* The parsers are careful to generate TRUTH_NOT_EXPR
15142 only with operands that are always zero or one.
15143 We do not fold here but handle the only interesting case
15144 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
15145 *expr_p
= gimple_boolify (*expr_p
);
15146 if (TYPE_PRECISION (TREE_TYPE (*expr_p
)) == 1)
15147 *expr_p
= build1_loc (input_location
, BIT_NOT_EXPR
,
15148 TREE_TYPE (*expr_p
),
15149 TREE_OPERAND (*expr_p
, 0));
15151 *expr_p
= build2_loc (input_location
, BIT_XOR_EXPR
,
15152 TREE_TYPE (*expr_p
),
15153 TREE_OPERAND (*expr_p
, 0),
15154 build_int_cst (TREE_TYPE (*expr_p
), 1));
15155 if (!useless_type_conversion_p (type
, TREE_TYPE (*expr_p
)))
15156 *expr_p
= fold_convert_loc (input_location
, type
, *expr_p
);
15162 ret
= gimplify_addr_expr (expr_p
, pre_p
, post_p
);
15165 case ANNOTATE_EXPR
:
15167 tree cond
= TREE_OPERAND (*expr_p
, 0);
15168 tree kind
= TREE_OPERAND (*expr_p
, 1);
15169 tree data
= TREE_OPERAND (*expr_p
, 2);
15170 tree type
= TREE_TYPE (cond
);
15171 if (!INTEGRAL_TYPE_P (type
))
15177 tree tmp
= create_tmp_var (type
);
15178 gimplify_arg (&cond
, pre_p
, EXPR_LOCATION (*expr_p
));
15180 = gimple_build_call_internal (IFN_ANNOTATE
, 3, cond
, kind
, data
);
15181 gimple_call_set_lhs (call
, tmp
);
15182 gimplify_seq_add_stmt (pre_p
, call
);
15189 ret
= gimplify_va_arg_expr (expr_p
, pre_p
, post_p
);
15193 if (IS_EMPTY_STMT (*expr_p
))
15199 if (VOID_TYPE_P (TREE_TYPE (*expr_p
))
15200 || fallback
== fb_none
)
15202 /* Just strip a conversion to void (or in void context) and
15204 *expr_p
= TREE_OPERAND (*expr_p
, 0);
15209 ret
= gimplify_conversion (expr_p
);
15210 if (ret
== GS_ERROR
)
15212 if (*expr_p
!= save_expr
)
15216 case FIX_TRUNC_EXPR
:
15217 /* unary_expr: ... | '(' cast ')' val | ... */
15218 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
15219 is_gimple_val
, fb_rvalue
);
15220 recalculate_side_effects (*expr_p
);
15225 bool volatilep
= TREE_THIS_VOLATILE (*expr_p
);
15226 bool notrap
= TREE_THIS_NOTRAP (*expr_p
);
15227 tree saved_ptr_type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 0));
15229 *expr_p
= fold_indirect_ref_loc (input_location
, *expr_p
);
15230 if (*expr_p
!= save_expr
)
15236 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
15237 is_gimple_reg
, fb_rvalue
);
15238 if (ret
== GS_ERROR
)
15241 recalculate_side_effects (*expr_p
);
15242 *expr_p
= fold_build2_loc (input_location
, MEM_REF
,
15243 TREE_TYPE (*expr_p
),
15244 TREE_OPERAND (*expr_p
, 0),
15245 build_int_cst (saved_ptr_type
, 0));
15246 TREE_THIS_VOLATILE (*expr_p
) = volatilep
;
15247 TREE_THIS_NOTRAP (*expr_p
) = notrap
;
15252 /* We arrive here through the various re-gimplifcation paths. */
15254 /* First try re-folding the whole thing. */
15255 tmp
= fold_binary (MEM_REF
, TREE_TYPE (*expr_p
),
15256 TREE_OPERAND (*expr_p
, 0),
15257 TREE_OPERAND (*expr_p
, 1));
15260 REF_REVERSE_STORAGE_ORDER (tmp
)
15261 = REF_REVERSE_STORAGE_ORDER (*expr_p
);
15263 recalculate_side_effects (*expr_p
);
15267 /* Avoid re-gimplifying the address operand if it is already
15268 in suitable form. Re-gimplifying would mark the address
15269 operand addressable. Always gimplify when not in SSA form
15270 as we still may have to gimplify decls with value-exprs. */
15271 if (!gimplify_ctxp
|| !gimple_in_ssa_p (cfun
)
15272 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p
, 0)))
15274 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
15275 is_gimple_mem_ref_addr
, fb_rvalue
);
15276 if (ret
== GS_ERROR
)
15279 recalculate_side_effects (*expr_p
);
15283 /* Constants need not be gimplified. */
15290 /* Drop the overflow flag on constants, we do not want
15291 that in the GIMPLE IL. */
15292 if (TREE_OVERFLOW_P (*expr_p
))
15293 *expr_p
= drop_tree_overflow (*expr_p
);
15298 /* If we require an lvalue, such as for ADDR_EXPR, retain the
15299 CONST_DECL node. Otherwise the decl is replaceable by its
15301 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
15302 if (fallback
& fb_lvalue
)
15306 *expr_p
= DECL_INITIAL (*expr_p
);
15312 ret
= gimplify_decl_expr (expr_p
, pre_p
);
15316 ret
= gimplify_bind_expr (expr_p
, pre_p
);
15320 ret
= gimplify_loop_expr (expr_p
, pre_p
);
15324 ret
= gimplify_switch_expr (expr_p
, pre_p
);
15328 ret
= gimplify_exit_expr (expr_p
);
15332 /* If the target is not LABEL, then it is a computed jump
15333 and the target needs to be gimplified. */
15334 if (TREE_CODE (GOTO_DESTINATION (*expr_p
)) != LABEL_DECL
)
15336 ret
= gimplify_expr (&GOTO_DESTINATION (*expr_p
), pre_p
,
15337 NULL
, is_gimple_val
, fb_rvalue
);
15338 if (ret
== GS_ERROR
)
15341 gimplify_seq_add_stmt (pre_p
,
15342 gimple_build_goto (GOTO_DESTINATION (*expr_p
)));
15347 gimplify_seq_add_stmt (pre_p
,
15348 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p
),
15349 PREDICT_EXPR_OUTCOME (*expr_p
)));
15354 ret
= gimplify_label_expr (expr_p
, pre_p
);
15355 label
= LABEL_EXPR_LABEL (*expr_p
);
15356 gcc_assert (decl_function_context (label
) == current_function_decl
);
15358 /* If the label is used in a goto statement, or address of the label
15359 is taken, we need to unpoison all variables that were seen so far.
15360 Doing so would prevent us from reporting a false positives. */
15361 if (asan_poisoned_variables
15362 && asan_used_labels
!= NULL
15363 && asan_used_labels
->contains (label
)
15364 && !gimplify_omp_ctxp
)
15365 asan_poison_variables (asan_poisoned_variables
, false, pre_p
);
15368 case CASE_LABEL_EXPR
:
15369 ret
= gimplify_case_label_expr (expr_p
, pre_p
);
15371 if (gimplify_ctxp
->live_switch_vars
)
15372 asan_poison_variables (gimplify_ctxp
->live_switch_vars
, false,
15377 ret
= gimplify_return_expr (*expr_p
, pre_p
);
15381 /* Don't reduce this in place; let gimplify_init_constructor work its
15382 magic. Buf if we're just elaborating this for side effects, just
15383 gimplify any element that has side-effects. */
15384 if (fallback
== fb_none
)
15386 unsigned HOST_WIDE_INT ix
;
15388 tree temp
= NULL_TREE
;
15389 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p
), ix
, val
)
15390 if (TREE_SIDE_EFFECTS (val
))
15391 append_to_statement_list (val
, &temp
);
15394 ret
= temp
? GS_OK
: GS_ALL_DONE
;
15396 /* C99 code may assign to an array in a constructed
15397 structure or union, and this has undefined behavior only
15398 on execution, so create a temporary if an lvalue is
15400 else if (fallback
== fb_lvalue
)
15402 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
15403 mark_addressable (*expr_p
);
15410 /* The following are special cases that are not handled by the
15411 original GIMPLE grammar. */
15413 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
15416 ret
= gimplify_save_expr (expr_p
, pre_p
, post_p
);
15419 case BIT_FIELD_REF
:
15420 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
15421 post_p
, is_gimple_lvalue
, fb_either
);
15422 recalculate_side_effects (*expr_p
);
15425 case TARGET_MEM_REF
:
15427 enum gimplify_status r0
= GS_ALL_DONE
, r1
= GS_ALL_DONE
;
15429 if (TMR_BASE (*expr_p
))
15430 r0
= gimplify_expr (&TMR_BASE (*expr_p
), pre_p
,
15431 post_p
, is_gimple_mem_ref_addr
, fb_either
);
15432 if (TMR_INDEX (*expr_p
))
15433 r1
= gimplify_expr (&TMR_INDEX (*expr_p
), pre_p
,
15434 post_p
, is_gimple_val
, fb_rvalue
);
15435 if (TMR_INDEX2 (*expr_p
))
15436 r1
= gimplify_expr (&TMR_INDEX2 (*expr_p
), pre_p
,
15437 post_p
, is_gimple_val
, fb_rvalue
);
15438 /* TMR_STEP and TMR_OFFSET are always integer constants. */
15439 ret
= MIN (r0
, r1
);
15443 case NON_LVALUE_EXPR
:
15444 /* This should have been stripped above. */
15445 gcc_unreachable ();
15448 ret
= gimplify_asm_expr (expr_p
, pre_p
, post_p
);
15451 case TRY_FINALLY_EXPR
:
15452 case TRY_CATCH_EXPR
:
15454 gimple_seq eval
, cleanup
;
15457 /* Calls to destructors are generated automatically in FINALLY/CATCH
15458 block. They should have location as UNKNOWN_LOCATION. However,
15459 gimplify_call_expr will reset these call stmts to input_location
15460 if it finds stmt's location is unknown. To prevent resetting for
15461 destructors, we set the input_location to unknown.
15462 Note that this only affects the destructor calls in FINALLY/CATCH
15463 block, and will automatically reset to its original value by the
15464 end of gimplify_expr. */
15465 input_location
= UNKNOWN_LOCATION
;
15466 eval
= cleanup
= NULL
;
15467 gimplify_and_add (TREE_OPERAND (*expr_p
, 0), &eval
);
15468 if (TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
15469 && TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == EH_ELSE_EXPR
)
15471 gimple_seq n
= NULL
, e
= NULL
;
15472 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
15474 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
15476 if (!gimple_seq_empty_p (n
) && !gimple_seq_empty_p (e
))
15478 geh_else
*stmt
= gimple_build_eh_else (n
, e
);
15479 gimple_seq_add_stmt (&cleanup
, stmt
);
15483 gimplify_and_add (TREE_OPERAND (*expr_p
, 1), &cleanup
);
15484 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
15485 if (gimple_seq_empty_p (cleanup
))
15487 gimple_seq_add_seq (pre_p
, eval
);
15491 try_
= gimple_build_try (eval
, cleanup
,
15492 TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
15493 ? GIMPLE_TRY_FINALLY
15494 : GIMPLE_TRY_CATCH
);
15495 if (EXPR_HAS_LOCATION (save_expr
))
15496 gimple_set_location (try_
, EXPR_LOCATION (save_expr
));
15497 else if (LOCATION_LOCUS (saved_location
) != UNKNOWN_LOCATION
)
15498 gimple_set_location (try_
, saved_location
);
15499 if (TREE_CODE (*expr_p
) == TRY_CATCH_EXPR
)
15500 gimple_try_set_catch_is_cleanup (try_
,
15501 TRY_CATCH_IS_CLEANUP (*expr_p
));
15502 gimplify_seq_add_stmt (pre_p
, try_
);
15507 case CLEANUP_POINT_EXPR
:
15508 ret
= gimplify_cleanup_point_expr (expr_p
, pre_p
);
15512 ret
= gimplify_target_expr (expr_p
, pre_p
, post_p
);
15518 gimple_seq handler
= NULL
;
15519 gimplify_and_add (CATCH_BODY (*expr_p
), &handler
);
15520 c
= gimple_build_catch (CATCH_TYPES (*expr_p
), handler
);
15521 gimplify_seq_add_stmt (pre_p
, c
);
15526 case EH_FILTER_EXPR
:
15529 gimple_seq failure
= NULL
;
15531 gimplify_and_add (EH_FILTER_FAILURE (*expr_p
), &failure
);
15532 ehf
= gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p
), failure
);
15533 copy_warning (ehf
, *expr_p
);
15534 gimplify_seq_add_stmt (pre_p
, ehf
);
15541 enum gimplify_status r0
, r1
;
15542 r0
= gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p
), pre_p
,
15543 post_p
, is_gimple_val
, fb_rvalue
);
15544 r1
= gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p
), pre_p
,
15545 post_p
, is_gimple_val
, fb_rvalue
);
15546 TREE_SIDE_EFFECTS (*expr_p
) = 0;
15547 ret
= MIN (r0
, r1
);
15552 /* We get here when taking the address of a label. We mark
15553 the label as "forced"; meaning it can never be removed and
15554 it is a potential target for any computed goto. */
15555 FORCED_LABEL (*expr_p
) = 1;
15559 case STATEMENT_LIST
:
15560 ret
= gimplify_statement_list (expr_p
, pre_p
);
15563 case WITH_SIZE_EXPR
:
15565 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
15566 post_p
== &internal_post
? NULL
: post_p
,
15567 gimple_test_f
, fallback
);
15568 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
15569 is_gimple_val
, fb_rvalue
);
15576 ret
= gimplify_var_or_parm_decl (expr_p
);
15580 /* When within an OMP context, notice uses of variables. */
15581 if (gimplify_omp_ctxp
)
15582 omp_notice_variable (gimplify_omp_ctxp
, *expr_p
, true);
15586 case DEBUG_EXPR_DECL
:
15587 gcc_unreachable ();
15589 case DEBUG_BEGIN_STMT
:
15590 gimplify_seq_add_stmt (pre_p
,
15591 gimple_build_debug_begin_stmt
15592 (TREE_BLOCK (*expr_p
),
15593 EXPR_LOCATION (*expr_p
)));
15599 /* Allow callbacks into the gimplifier during optimization. */
15604 gimplify_omp_parallel (expr_p
, pre_p
);
15609 gimplify_omp_task (expr_p
, pre_p
);
15615 /* Temporarily disable into_ssa, as scan_omp_simd
15616 which calls copy_gimple_seq_and_replace_locals can't deal
15617 with SSA_NAMEs defined outside of the body properly. */
15618 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
15619 gimplify_ctxp
->into_ssa
= false;
15620 ret
= gimplify_omp_for (expr_p
, pre_p
);
15621 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
15626 case OMP_DISTRIBUTE
:
15629 ret
= gimplify_omp_for (expr_p
, pre_p
);
15633 ret
= gimplify_omp_loop (expr_p
, pre_p
);
15637 gimplify_oacc_cache (expr_p
, pre_p
);
15642 gimplify_oacc_declare (expr_p
, pre_p
);
15646 case OACC_HOST_DATA
:
15649 case OACC_PARALLEL
:
15655 case OMP_TARGET_DATA
:
15657 gimplify_omp_workshare (expr_p
, pre_p
);
15661 case OACC_ENTER_DATA
:
15662 case OACC_EXIT_DATA
:
15664 case OMP_TARGET_UPDATE
:
15665 case OMP_TARGET_ENTER_DATA
:
15666 case OMP_TARGET_EXIT_DATA
:
15667 gimplify_omp_target_update (expr_p
, pre_p
);
15678 gimple_seq body
= NULL
;
15680 bool saved_in_omp_construct
= in_omp_construct
;
15682 in_omp_construct
= true;
15683 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
15684 in_omp_construct
= saved_in_omp_construct
;
15685 switch (TREE_CODE (*expr_p
))
15688 g
= gimple_build_omp_section (body
);
15691 g
= gimple_build_omp_master (body
);
15694 g
= gimplify_omp_ordered (*expr_p
, body
);
15697 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p
),
15698 pre_p
, ORT_WORKSHARE
, OMP_MASKED
);
15699 gimplify_adjust_omp_clauses (pre_p
, body
,
15700 &OMP_MASKED_CLAUSES (*expr_p
),
15702 g
= gimple_build_omp_masked (body
,
15703 OMP_MASKED_CLAUSES (*expr_p
));
15706 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p
),
15707 pre_p
, ORT_WORKSHARE
, OMP_CRITICAL
);
15708 gimplify_adjust_omp_clauses (pre_p
, body
,
15709 &OMP_CRITICAL_CLAUSES (*expr_p
),
15711 g
= gimple_build_omp_critical (body
,
15712 OMP_CRITICAL_NAME (*expr_p
),
15713 OMP_CRITICAL_CLAUSES (*expr_p
));
15716 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p
),
15717 pre_p
, ORT_WORKSHARE
, OMP_SCAN
);
15718 gimplify_adjust_omp_clauses (pre_p
, body
,
15719 &OMP_SCAN_CLAUSES (*expr_p
),
15721 g
= gimple_build_omp_scan (body
, OMP_SCAN_CLAUSES (*expr_p
));
15724 gcc_unreachable ();
15726 gimplify_seq_add_stmt (pre_p
, g
);
15731 case OMP_TASKGROUP
:
15733 gimple_seq body
= NULL
;
15735 tree
*pclauses
= &OMP_TASKGROUP_CLAUSES (*expr_p
);
15736 bool saved_in_omp_construct
= in_omp_construct
;
15737 gimplify_scan_omp_clauses (pclauses
, pre_p
, ORT_TASKGROUP
,
15739 gimplify_adjust_omp_clauses (pre_p
, NULL
, pclauses
, OMP_TASKGROUP
);
15741 in_omp_construct
= true;
15742 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
15743 in_omp_construct
= saved_in_omp_construct
;
15744 gimple_seq cleanup
= NULL
;
15745 tree fn
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END
);
15746 gimple
*g
= gimple_build_call (fn
, 0);
15747 gimple_seq_add_stmt (&cleanup
, g
);
15748 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
15750 gimple_seq_add_stmt (&body
, g
);
15751 g
= gimple_build_omp_taskgroup (body
, *pclauses
);
15752 gimplify_seq_add_stmt (pre_p
, g
);
15758 case OMP_ATOMIC_READ
:
15759 case OMP_ATOMIC_CAPTURE_OLD
:
15760 case OMP_ATOMIC_CAPTURE_NEW
:
15761 ret
= gimplify_omp_atomic (expr_p
, pre_p
);
15764 case TRANSACTION_EXPR
:
15765 ret
= gimplify_transaction (expr_p
, pre_p
);
15768 case TRUTH_AND_EXPR
:
15769 case TRUTH_OR_EXPR
:
15770 case TRUTH_XOR_EXPR
:
15772 tree orig_type
= TREE_TYPE (*expr_p
);
15773 tree new_type
, xop0
, xop1
;
15774 *expr_p
= gimple_boolify (*expr_p
);
15775 new_type
= TREE_TYPE (*expr_p
);
15776 if (!useless_type_conversion_p (orig_type
, new_type
))
15778 *expr_p
= fold_convert_loc (input_location
, orig_type
, *expr_p
);
15783 /* Boolified binary truth expressions are semantically equivalent
15784 to bitwise binary expressions. Canonicalize them to the
15785 bitwise variant. */
15786 switch (TREE_CODE (*expr_p
))
15788 case TRUTH_AND_EXPR
:
15789 TREE_SET_CODE (*expr_p
, BIT_AND_EXPR
);
15791 case TRUTH_OR_EXPR
:
15792 TREE_SET_CODE (*expr_p
, BIT_IOR_EXPR
);
15794 case TRUTH_XOR_EXPR
:
15795 TREE_SET_CODE (*expr_p
, BIT_XOR_EXPR
);
15800 /* Now make sure that operands have compatible type to
15801 expression's new_type. */
15802 xop0
= TREE_OPERAND (*expr_p
, 0);
15803 xop1
= TREE_OPERAND (*expr_p
, 1);
15804 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop0
)))
15805 TREE_OPERAND (*expr_p
, 0) = fold_convert_loc (input_location
,
15808 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop1
)))
15809 TREE_OPERAND (*expr_p
, 1) = fold_convert_loc (input_location
,
15812 /* Continue classified as tcc_binary. */
15816 case VEC_COND_EXPR
:
15819 case VEC_PERM_EXPR
:
15820 /* Classified as tcc_expression. */
15823 case BIT_INSERT_EXPR
:
15824 /* Argument 3 is a constant. */
15827 case POINTER_PLUS_EXPR
:
15829 enum gimplify_status r0
, r1
;
15830 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
15831 post_p
, is_gimple_val
, fb_rvalue
);
15832 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
15833 post_p
, is_gimple_val
, fb_rvalue
);
15834 recalculate_side_effects (*expr_p
);
15835 ret
= MIN (r0
, r1
);
15840 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p
)))
15842 case tcc_comparison
:
15843 /* Handle comparison of objects of non scalar mode aggregates
15844 with a call to memcmp. It would be nice to only have to do
15845 this for variable-sized objects, but then we'd have to allow
15846 the same nest of reference nodes we allow for MODIFY_EXPR and
15847 that's too complex.
15849 Compare scalar mode aggregates as scalar mode values. Using
15850 memcmp for them would be very inefficient at best, and is
15851 plain wrong if bitfields are involved. */
15853 tree type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 1));
15855 /* Vector comparisons need no boolification. */
15856 if (TREE_CODE (type
) == VECTOR_TYPE
)
15858 else if (!AGGREGATE_TYPE_P (type
))
15860 tree org_type
= TREE_TYPE (*expr_p
);
15861 *expr_p
= gimple_boolify (*expr_p
);
15862 if (!useless_type_conversion_p (org_type
,
15863 TREE_TYPE (*expr_p
)))
15865 *expr_p
= fold_convert_loc (input_location
,
15866 org_type
, *expr_p
);
15872 else if (TYPE_MODE (type
) != BLKmode
)
15873 ret
= gimplify_scalar_mode_aggregate_compare (expr_p
);
15875 ret
= gimplify_variable_sized_compare (expr_p
);
15880 /* If *EXPR_P does not need to be special-cased, handle it
15881 according to its class. */
15883 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
15884 post_p
, is_gimple_val
, fb_rvalue
);
15890 enum gimplify_status r0
, r1
;
15892 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
15893 post_p
, is_gimple_val
, fb_rvalue
);
15894 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
15895 post_p
, is_gimple_val
, fb_rvalue
);
15897 ret
= MIN (r0
, r1
);
15903 enum gimplify_status r0
, r1
, r2
;
15905 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
15906 post_p
, is_gimple_val
, fb_rvalue
);
15907 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
15908 post_p
, is_gimple_val
, fb_rvalue
);
15909 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
15910 post_p
, is_gimple_val
, fb_rvalue
);
15912 ret
= MIN (MIN (r0
, r1
), r2
);
15916 case tcc_declaration
:
15919 goto dont_recalculate
;
15922 gcc_unreachable ();
15925 recalculate_side_effects (*expr_p
);
15931 gcc_assert (*expr_p
|| ret
!= GS_OK
);
15933 while (ret
== GS_OK
);
15935 /* If we encountered an error_mark somewhere nested inside, either
15936 stub out the statement or propagate the error back out. */
15937 if (ret
== GS_ERROR
)
15944 /* This was only valid as a return value from the langhook, which
15945 we handled. Make sure it doesn't escape from any other context. */
15946 gcc_assert (ret
!= GS_UNHANDLED
);
15948 if (fallback
== fb_none
&& *expr_p
&& !is_gimple_stmt (*expr_p
))
15950 /* We aren't looking for a value, and we don't have a valid
15951 statement. If it doesn't have side-effects, throw it away.
15952 We can also get here with code such as "*&&L;", where L is
15953 a LABEL_DECL that is marked as FORCED_LABEL. */
15954 if (TREE_CODE (*expr_p
) == LABEL_DECL
15955 || !TREE_SIDE_EFFECTS (*expr_p
))
15957 else if (!TREE_THIS_VOLATILE (*expr_p
))
15959 /* This is probably a _REF that contains something nested that
15960 has side effects. Recurse through the operands to find it. */
15961 enum tree_code code
= TREE_CODE (*expr_p
);
15965 case COMPONENT_REF
:
15966 case REALPART_EXPR
:
15967 case IMAGPART_EXPR
:
15968 case VIEW_CONVERT_EXPR
:
15969 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
15970 gimple_test_f
, fallback
);
15974 case ARRAY_RANGE_REF
:
15975 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
15976 gimple_test_f
, fallback
);
15977 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
15978 gimple_test_f
, fallback
);
15982 /* Anything else with side-effects must be converted to
15983 a valid statement before we get here. */
15984 gcc_unreachable ();
15989 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p
))
15990 && TYPE_MODE (TREE_TYPE (*expr_p
)) != BLKmode
15991 && !is_empty_type (TREE_TYPE (*expr_p
)))
15993 /* Historically, the compiler has treated a bare reference
15994 to a non-BLKmode volatile lvalue as forcing a load. */
15995 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p
));
15997 /* Normally, we do not want to create a temporary for a
15998 TREE_ADDRESSABLE type because such a type should not be
15999 copied by bitwise-assignment. However, we make an
16000 exception here, as all we are doing here is ensuring that
16001 we read the bytes that make up the type. We use
16002 create_tmp_var_raw because create_tmp_var will abort when
16003 given a TREE_ADDRESSABLE type. */
16004 tree tmp
= create_tmp_var_raw (type
, "vol");
16005 gimple_add_tmp_var (tmp
);
16006 gimplify_assign (tmp
, *expr_p
, pre_p
);
16010 /* We can't do anything useful with a volatile reference to
16011 an incomplete type, so just throw it away. Likewise for
16012 a BLKmode type, since any implicit inner load should
16013 already have been turned into an explicit one by the
16014 gimplification process. */
16018 /* If we are gimplifying at the statement level, we're done. Tack
16019 everything together and return. */
16020 if (fallback
== fb_none
|| is_statement
)
16022 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
16023 it out for GC to reclaim it. */
16024 *expr_p
= NULL_TREE
;
16026 if (!gimple_seq_empty_p (internal_pre
)
16027 || !gimple_seq_empty_p (internal_post
))
16029 gimplify_seq_add_seq (&internal_pre
, internal_post
);
16030 gimplify_seq_add_seq (pre_p
, internal_pre
);
16033 /* The result of gimplifying *EXPR_P is going to be the last few
16034 statements in *PRE_P and *POST_P. Add location information
16035 to all the statements that were added by the gimplification
16037 if (!gimple_seq_empty_p (*pre_p
))
16038 annotate_all_with_location_after (*pre_p
, pre_last_gsi
, input_location
);
16040 if (!gimple_seq_empty_p (*post_p
))
16041 annotate_all_with_location_after (*post_p
, post_last_gsi
,
16047 #ifdef ENABLE_GIMPLE_CHECKING
16050 enum tree_code code
= TREE_CODE (*expr_p
);
16051 /* These expressions should already be in gimple IR form. */
16052 gcc_assert (code
!= MODIFY_EXPR
16053 && code
!= ASM_EXPR
16054 && code
!= BIND_EXPR
16055 && code
!= CATCH_EXPR
16056 && (code
!= COND_EXPR
|| gimplify_ctxp
->allow_rhs_cond_expr
)
16057 && code
!= EH_FILTER_EXPR
16058 && code
!= GOTO_EXPR
16059 && code
!= LABEL_EXPR
16060 && code
!= LOOP_EXPR
16061 && code
!= SWITCH_EXPR
16062 && code
!= TRY_FINALLY_EXPR
16063 && code
!= EH_ELSE_EXPR
16064 && code
!= OACC_PARALLEL
16065 && code
!= OACC_KERNELS
16066 && code
!= OACC_SERIAL
16067 && code
!= OACC_DATA
16068 && code
!= OACC_HOST_DATA
16069 && code
!= OACC_DECLARE
16070 && code
!= OACC_UPDATE
16071 && code
!= OACC_ENTER_DATA
16072 && code
!= OACC_EXIT_DATA
16073 && code
!= OACC_CACHE
16074 && code
!= OMP_CRITICAL
16076 && code
!= OACC_LOOP
16077 && code
!= OMP_MASTER
16078 && code
!= OMP_MASKED
16079 && code
!= OMP_TASKGROUP
16080 && code
!= OMP_ORDERED
16081 && code
!= OMP_PARALLEL
16082 && code
!= OMP_SCAN
16083 && code
!= OMP_SECTIONS
16084 && code
!= OMP_SECTION
16085 && code
!= OMP_SINGLE
16086 && code
!= OMP_SCOPE
);
16090 /* Otherwise we're gimplifying a subexpression, so the resulting
16091 value is interesting. If it's a valid operand that matches
16092 GIMPLE_TEST_F, we're done. Unless we are handling some
16093 post-effects internally; if that's the case, we need to copy into
16094 a temporary before adding the post-effects to POST_P. */
16095 if (gimple_seq_empty_p (internal_post
) && (*gimple_test_f
) (*expr_p
))
16098 /* Otherwise, we need to create a new temporary for the gimplified
16101 /* We can't return an lvalue if we have an internal postqueue. The
16102 object the lvalue refers to would (probably) be modified by the
16103 postqueue; we need to copy the value out first, which means an
16105 if ((fallback
& fb_lvalue
)
16106 && gimple_seq_empty_p (internal_post
)
16107 && is_gimple_addressable (*expr_p
))
16109 /* An lvalue will do. Take the address of the expression, store it
16110 in a temporary, and replace the expression with an INDIRECT_REF of
16112 tree ref_alias_type
= reference_alias_ptr_type (*expr_p
);
16113 unsigned int ref_align
= get_object_alignment (*expr_p
);
16114 tree ref_type
= TREE_TYPE (*expr_p
);
16115 tmp
= build_fold_addr_expr_loc (input_location
, *expr_p
);
16116 gimplify_expr (&tmp
, pre_p
, post_p
, is_gimple_reg
, fb_rvalue
);
16117 if (TYPE_ALIGN (ref_type
) != ref_align
)
16118 ref_type
= build_aligned_type (ref_type
, ref_align
);
16119 *expr_p
= build2 (MEM_REF
, ref_type
,
16120 tmp
, build_zero_cst (ref_alias_type
));
16122 else if ((fallback
& fb_rvalue
) && is_gimple_reg_rhs_or_call (*expr_p
))
16124 /* An rvalue will do. Assign the gimplified expression into a
16125 new temporary TMP and replace the original expression with
16126 TMP. First, make sure that the expression has a type so that
16127 it can be assigned into a temporary. */
16128 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p
)));
16129 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
16133 #ifdef ENABLE_GIMPLE_CHECKING
16134 if (!(fallback
& fb_mayfail
))
16136 fprintf (stderr
, "gimplification failed:\n");
16137 print_generic_expr (stderr
, *expr_p
);
16138 debug_tree (*expr_p
);
16139 internal_error ("gimplification failed");
16142 gcc_assert (fallback
& fb_mayfail
);
16144 /* If this is an asm statement, and the user asked for the
16145 impossible, don't die. Fail and let gimplify_asm_expr
16151 /* Make sure the temporary matches our predicate. */
16152 gcc_assert ((*gimple_test_f
) (*expr_p
));
16154 if (!gimple_seq_empty_p (internal_post
))
16156 annotate_all_with_location (internal_post
, input_location
);
16157 gimplify_seq_add_seq (pre_p
, internal_post
);
16161 input_location
= saved_location
;
16165 /* Like gimplify_expr but make sure the gimplified result is not itself
16166 a SSA name (but a decl if it were). Temporaries required by
16167 evaluating *EXPR_P may be still SSA names. */
16169 static enum gimplify_status
16170 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
16171 bool (*gimple_test_f
) (tree
), fallback_t fallback
,
16174 enum gimplify_status ret
= gimplify_expr (expr_p
, pre_p
, post_p
,
16175 gimple_test_f
, fallback
);
16177 && TREE_CODE (*expr_p
) == SSA_NAME
)
16178 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, NULL
, false);
16182 /* Look through TYPE for variable-sized objects and gimplify each such
16183 size that we find. Add to LIST_P any statements generated. */
16186 gimplify_type_sizes (tree type
, gimple_seq
*list_p
)
16188 if (type
== NULL
|| type
== error_mark_node
)
16191 const bool ignored_p
16193 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
16194 && DECL_IGNORED_P (TYPE_NAME (type
));
16197 /* We first do the main variant, then copy into any other variants. */
16198 type
= TYPE_MAIN_VARIANT (type
);
16200 /* Avoid infinite recursion. */
16201 if (TYPE_SIZES_GIMPLIFIED (type
))
16204 TYPE_SIZES_GIMPLIFIED (type
) = 1;
16206 switch (TREE_CODE (type
))
16209 case ENUMERAL_TYPE
:
16212 case FIXED_POINT_TYPE
:
16213 gimplify_one_sizepos (&TYPE_MIN_VALUE (type
), list_p
);
16214 gimplify_one_sizepos (&TYPE_MAX_VALUE (type
), list_p
);
16216 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
16218 TYPE_MIN_VALUE (t
) = TYPE_MIN_VALUE (type
);
16219 TYPE_MAX_VALUE (t
) = TYPE_MAX_VALUE (type
);
16224 /* These types may not have declarations, so handle them here. */
16225 gimplify_type_sizes (TREE_TYPE (type
), list_p
);
16226 gimplify_type_sizes (TYPE_DOMAIN (type
), list_p
);
16227 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
16228 with assigned stack slots, for -O1+ -g they should be tracked
16231 && TYPE_DOMAIN (type
)
16232 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type
)))
16234 t
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
16235 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
16236 DECL_IGNORED_P (t
) = 0;
16237 t
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
16238 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
16239 DECL_IGNORED_P (t
) = 0;
16245 case QUAL_UNION_TYPE
:
16246 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
16247 if (TREE_CODE (field
) == FIELD_DECL
)
16249 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field
), list_p
);
16250 /* Likewise, ensure variable offsets aren't removed. */
16252 && (t
= DECL_FIELD_OFFSET (field
))
16254 && DECL_ARTIFICIAL (t
))
16255 DECL_IGNORED_P (t
) = 0;
16256 gimplify_one_sizepos (&DECL_SIZE (field
), list_p
);
16257 gimplify_one_sizepos (&DECL_SIZE_UNIT (field
), list_p
);
16258 gimplify_type_sizes (TREE_TYPE (field
), list_p
);
16263 case REFERENCE_TYPE
:
16264 /* We used to recurse on the pointed-to type here, which turned out to
16265 be incorrect because its definition might refer to variables not
16266 yet initialized at this point if a forward declaration is involved.
16268 It was actually useful for anonymous pointed-to types to ensure
16269 that the sizes evaluation dominates every possible later use of the
16270 values. Restricting to such types here would be safe since there
16271 is no possible forward declaration around, but would introduce an
16272 undesirable middle-end semantic to anonymity. We then defer to
16273 front-ends the responsibility of ensuring that the sizes are
16274 evaluated both early and late enough, e.g. by attaching artificial
16275 type declarations to the tree. */
16282 gimplify_one_sizepos (&TYPE_SIZE (type
), list_p
);
16283 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type
), list_p
);
16285 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
16287 TYPE_SIZE (t
) = TYPE_SIZE (type
);
16288 TYPE_SIZE_UNIT (t
) = TYPE_SIZE_UNIT (type
);
16289 TYPE_SIZES_GIMPLIFIED (t
) = 1;
16293 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
16294 a size or position, has had all of its SAVE_EXPRs evaluated.
16295 We add any required statements to *STMT_P. */
16298 gimplify_one_sizepos (tree
*expr_p
, gimple_seq
*stmt_p
)
16300 tree expr
= *expr_p
;
16302 /* We don't do anything if the value isn't there, is constant, or contains
16303 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
16304 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
16305 will want to replace it with a new variable, but that will cause problems
16306 if this type is from outside the function. It's OK to have that here. */
16307 if (expr
== NULL_TREE
16308 || is_gimple_constant (expr
)
16309 || TREE_CODE (expr
) == VAR_DECL
16310 || CONTAINS_PLACEHOLDER_P (expr
))
16313 *expr_p
= unshare_expr (expr
);
16315 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
16316 if the def vanishes. */
16317 gimplify_expr (expr_p
, stmt_p
, NULL
, is_gimple_val
, fb_rvalue
, false);
16319 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
16320 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
16321 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
16322 if (is_gimple_constant (*expr_p
))
16323 *expr_p
= get_initialized_tmp_var (*expr_p
, stmt_p
, NULL
, false);
16326 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
16327 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
16328 is true, also gimplify the parameters. */
16331 gimplify_body (tree fndecl
, bool do_parms
)
16333 location_t saved_location
= input_location
;
16334 gimple_seq parm_stmts
, parm_cleanup
= NULL
, seq
;
16335 gimple
*outer_stmt
;
16338 timevar_push (TV_TREE_GIMPLIFY
);
16340 init_tree_ssa (cfun
);
16342 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
16344 default_rtl_profile ();
16346 gcc_assert (gimplify_ctxp
== NULL
);
16347 push_gimplify_context (true);
16349 if (flag_openacc
|| flag_openmp
)
16351 gcc_assert (gimplify_omp_ctxp
== NULL
);
16352 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl
)))
16353 gimplify_omp_ctxp
= new_omp_context (ORT_IMPLICIT_TARGET
);
16356 /* Unshare most shared trees in the body and in that of any nested functions.
16357 It would seem we don't have to do this for nested functions because
16358 they are supposed to be output and then the outer function gimplified
16359 first, but the g++ front end doesn't always do it that way. */
16360 unshare_body (fndecl
);
16361 unvisit_body (fndecl
);
16363 /* Make sure input_location isn't set to something weird. */
16364 input_location
= DECL_SOURCE_LOCATION (fndecl
);
16366 /* Resolve callee-copies. This has to be done before processing
16367 the body so that DECL_VALUE_EXPR gets processed correctly. */
16368 parm_stmts
= do_parms
? gimplify_parameters (&parm_cleanup
) : NULL
;
16370 /* Gimplify the function's body. */
16372 gimplify_stmt (&DECL_SAVED_TREE (fndecl
), &seq
);
16373 outer_stmt
= gimple_seq_first_nondebug_stmt (seq
);
16376 outer_stmt
= gimple_build_nop ();
16377 gimplify_seq_add_stmt (&seq
, outer_stmt
);
16380 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
16381 not the case, wrap everything in a GIMPLE_BIND to make it so. */
16382 if (gimple_code (outer_stmt
) == GIMPLE_BIND
16383 && (gimple_seq_first_nondebug_stmt (seq
)
16384 == gimple_seq_last_nondebug_stmt (seq
)))
16386 outer_bind
= as_a
<gbind
*> (outer_stmt
);
16387 if (gimple_seq_first_stmt (seq
) != outer_stmt
16388 || gimple_seq_last_stmt (seq
) != outer_stmt
)
16390 /* If there are debug stmts before or after outer_stmt, move them
16391 inside of outer_bind body. */
16392 gimple_stmt_iterator gsi
= gsi_for_stmt (outer_stmt
, &seq
);
16393 gimple_seq second_seq
= NULL
;
16394 if (gimple_seq_first_stmt (seq
) != outer_stmt
16395 && gimple_seq_last_stmt (seq
) != outer_stmt
)
16397 second_seq
= gsi_split_seq_after (gsi
);
16398 gsi_remove (&gsi
, false);
16400 else if (gimple_seq_first_stmt (seq
) != outer_stmt
)
16401 gsi_remove (&gsi
, false);
16404 gsi_remove (&gsi
, false);
16408 gimple_seq_add_seq_without_update (&seq
,
16409 gimple_bind_body (outer_bind
));
16410 gimple_seq_add_seq_without_update (&seq
, second_seq
);
16411 gimple_bind_set_body (outer_bind
, seq
);
16415 outer_bind
= gimple_build_bind (NULL_TREE
, seq
, NULL
);
16417 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
16419 /* If we had callee-copies statements, insert them at the beginning
16420 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
16421 if (!gimple_seq_empty_p (parm_stmts
))
16425 gimplify_seq_add_seq (&parm_stmts
, gimple_bind_body (outer_bind
));
16428 gtry
*g
= gimple_build_try (parm_stmts
, parm_cleanup
,
16429 GIMPLE_TRY_FINALLY
);
16431 gimple_seq_add_stmt (&parm_stmts
, g
);
16433 gimple_bind_set_body (outer_bind
, parm_stmts
);
16435 for (parm
= DECL_ARGUMENTS (current_function_decl
);
16436 parm
; parm
= DECL_CHAIN (parm
))
16437 if (DECL_HAS_VALUE_EXPR_P (parm
))
16439 DECL_HAS_VALUE_EXPR_P (parm
) = 0;
16440 DECL_IGNORED_P (parm
) = 0;
16444 if ((flag_openacc
|| flag_openmp
|| flag_openmp_simd
)
16445 && gimplify_omp_ctxp
)
16447 delete_omp_context (gimplify_omp_ctxp
);
16448 gimplify_omp_ctxp
= NULL
;
16451 pop_gimplify_context (outer_bind
);
16452 gcc_assert (gimplify_ctxp
== NULL
);
16454 if (flag_checking
&& !seen_error ())
16455 verify_gimple_in_seq (gimple_bind_body (outer_bind
));
16457 timevar_pop (TV_TREE_GIMPLIFY
);
16458 input_location
= saved_location
;
16463 typedef char *char_p
; /* For DEF_VEC_P. */
16465 /* Return whether we should exclude FNDECL from instrumentation. */
16468 flag_instrument_functions_exclude_p (tree fndecl
)
16472 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_functions
;
16473 if (v
&& v
->length () > 0)
16479 name
= lang_hooks
.decl_printable_name (fndecl
, 1);
16480 FOR_EACH_VEC_ELT (*v
, i
, s
)
16481 if (strstr (name
, s
) != NULL
)
16485 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_files
;
16486 if (v
&& v
->length () > 0)
16492 name
= DECL_SOURCE_FILE (fndecl
);
16493 FOR_EACH_VEC_ELT (*v
, i
, s
)
16494 if (strstr (name
, s
) != NULL
)
16501 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
16502 node for the function we want to gimplify.
16504 Return the sequence of GIMPLE statements corresponding to the body
16508 gimplify_function_tree (tree fndecl
)
16513 gcc_assert (!gimple_body (fndecl
));
16515 if (DECL_STRUCT_FUNCTION (fndecl
))
16516 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
16518 push_struct_function (fndecl
);
16520 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
16522 cfun
->curr_properties
|= PROP_gimple_lva
;
16524 if (asan_sanitize_use_after_scope ())
16525 asan_poisoned_variables
= new hash_set
<tree
> ();
16526 bind
= gimplify_body (fndecl
, true);
16527 if (asan_poisoned_variables
)
16529 delete asan_poisoned_variables
;
16530 asan_poisoned_variables
= NULL
;
16533 /* The tree body of the function is no longer needed, replace it
16534 with the new GIMPLE body. */
16536 gimple_seq_add_stmt (&seq
, bind
);
16537 gimple_set_body (fndecl
, seq
);
16539 /* If we're instrumenting function entry/exit, then prepend the call to
16540 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
16541 catch the exit hook. */
16542 /* ??? Add some way to ignore exceptions for this TFE. */
16543 if (flag_instrument_function_entry_exit
16544 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
)
16545 /* Do not instrument extern inline functions. */
16546 && !(DECL_DECLARED_INLINE_P (fndecl
)
16547 && DECL_EXTERNAL (fndecl
)
16548 && DECL_DISREGARD_INLINE_LIMITS (fndecl
))
16549 && !flag_instrument_functions_exclude_p (fndecl
))
16554 gimple_seq cleanup
= NULL
, body
= NULL
;
16555 tree tmp_var
, this_fn_addr
;
16558 /* The instrumentation hooks aren't going to call the instrumented
16559 function and the address they receive is expected to be matchable
16560 against symbol addresses. Make sure we don't create a trampoline,
16561 in case the current function is nested. */
16562 this_fn_addr
= build_fold_addr_expr (current_function_decl
);
16563 TREE_NO_TRAMPOLINE (this_fn_addr
) = 1;
16565 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
16566 call
= gimple_build_call (x
, 1, integer_zero_node
);
16567 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
16568 gimple_call_set_lhs (call
, tmp_var
);
16569 gimplify_seq_add_stmt (&cleanup
, call
);
16570 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT
);
16571 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
16572 gimplify_seq_add_stmt (&cleanup
, call
);
16573 tf
= gimple_build_try (seq
, cleanup
, GIMPLE_TRY_FINALLY
);
16575 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
16576 call
= gimple_build_call (x
, 1, integer_zero_node
);
16577 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
16578 gimple_call_set_lhs (call
, tmp_var
);
16579 gimplify_seq_add_stmt (&body
, call
);
16580 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER
);
16581 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
16582 gimplify_seq_add_stmt (&body
, call
);
16583 gimplify_seq_add_stmt (&body
, tf
);
16584 new_bind
= gimple_build_bind (NULL
, body
, NULL
);
16586 /* Replace the current function body with the body
16587 wrapped in the try/finally TF. */
16589 gimple_seq_add_stmt (&seq
, new_bind
);
16590 gimple_set_body (fndecl
, seq
);
16594 if (sanitize_flags_p (SANITIZE_THREAD
)
16595 && param_tsan_instrument_func_entry_exit
)
16597 gcall
*call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
16598 gimple
*tf
= gimple_build_try (seq
, call
, GIMPLE_TRY_FINALLY
);
16599 gbind
*new_bind
= gimple_build_bind (NULL
, tf
, NULL
);
16600 /* Replace the current function body with the body
16601 wrapped in the try/finally TF. */
16603 gimple_seq_add_stmt (&seq
, new_bind
);
16604 gimple_set_body (fndecl
, seq
);
16607 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
16608 cfun
->curr_properties
|= PROP_gimple_any
;
16612 dump_function (TDI_gimple
, fndecl
);
16615 /* Return a dummy expression of type TYPE in order to keep going after an
16619 dummy_object (tree type
)
16621 tree t
= build_int_cst (build_pointer_type (type
), 0);
16622 return build2 (MEM_REF
, type
, t
, t
);
16625 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
16626 builtin function, but a very special sort of operator. */
16628 enum gimplify_status
16629 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
,
16630 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
16632 tree promoted_type
, have_va_type
;
16633 tree valist
= TREE_OPERAND (*expr_p
, 0);
16634 tree type
= TREE_TYPE (*expr_p
);
16635 tree t
, tag
, aptag
;
16636 location_t loc
= EXPR_LOCATION (*expr_p
);
16638 /* Verify that valist is of the proper type. */
16639 have_va_type
= TREE_TYPE (valist
);
16640 if (have_va_type
== error_mark_node
)
16642 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
16643 if (have_va_type
== NULL_TREE
16644 && POINTER_TYPE_P (TREE_TYPE (valist
)))
16645 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
16647 = targetm
.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist
)));
16648 gcc_assert (have_va_type
!= NULL_TREE
);
16650 /* Generate a diagnostic for requesting data of a type that cannot
16651 be passed through `...' due to type promotion at the call site. */
16652 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
16655 static bool gave_help
;
16657 /* Use the expansion point to handle cases such as passing bool (defined
16658 in a system header) through `...'. */
16660 = expansion_point_location_if_in_system_header (loc
);
16662 /* Unfortunately, this is merely undefined, rather than a constraint
16663 violation, so we cannot make this an error. If this call is never
16664 executed, the program is still strictly conforming. */
16665 auto_diagnostic_group d
;
16666 warned
= warning_at (xloc
, 0,
16667 "%qT is promoted to %qT when passed through %<...%>",
16668 type
, promoted_type
);
16669 if (!gave_help
&& warned
)
16672 inform (xloc
, "(so you should pass %qT not %qT to %<va_arg%>)",
16673 promoted_type
, type
);
16676 /* We can, however, treat "undefined" any way we please.
16677 Call abort to encourage the user to fix the program. */
16679 inform (xloc
, "if this code is reached, the program will abort");
16680 /* Before the abort, allow the evaluation of the va_list
16681 expression to exit or longjmp. */
16682 gimplify_and_add (valist
, pre_p
);
16683 t
= build_call_expr_loc (loc
,
16684 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
16685 gimplify_and_add (t
, pre_p
);
16687 /* This is dead code, but go ahead and finish so that the
16688 mode of the result comes out right. */
16689 *expr_p
= dummy_object (type
);
16690 return GS_ALL_DONE
;
16693 tag
= build_int_cst (build_pointer_type (type
), 0);
16694 aptag
= build_int_cst (TREE_TYPE (valist
), 0);
16696 *expr_p
= build_call_expr_internal_loc (loc
, IFN_VA_ARG
, type
, 3,
16697 valist
, tag
, aptag
);
16699 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
16700 needs to be expanded. */
16701 cfun
->curr_properties
&= ~PROP_gimple_lva
;
16706 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
16708 DST/SRC are the destination and source respectively. You can pass
16709 ungimplified trees in DST or SRC, in which case they will be
16710 converted to a gimple operand if necessary.
16712 This function returns the newly created GIMPLE_ASSIGN tuple. */
16715 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
16717 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
16718 gimplify_and_add (t
, seq_p
);
16720 return gimple_seq_last_stmt (*seq_p
);
16724 gimplify_hasher::hash (const elt_t
*p
)
16727 return iterative_hash_expr (t
, 0);
16731 gimplify_hasher::equal (const elt_t
*p1
, const elt_t
*p2
)
16735 enum tree_code code
= TREE_CODE (t1
);
16737 if (TREE_CODE (t2
) != code
16738 || TREE_TYPE (t1
) != TREE_TYPE (t2
))
16741 if (!operand_equal_p (t1
, t2
, 0))
16744 /* Only allow them to compare equal if they also hash equal; otherwise
16745 results are nondeterminate, and we fail bootstrap comparison. */
16746 gcc_checking_assert (hash (p1
) == hash (p2
));