1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2023 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
40 #include "fold-const.h"
45 #include "gimple-iterator.h"
46 #include "gimple-fold.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
56 #include "tree-hash-traits.h"
57 #include "omp-general.h"
59 #include "gimple-low.h"
60 #include "gomp-constants.h"
61 #include "splay-tree.h"
62 #include "gimple-walk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
65 #include "stringpool.h"
69 #include "omp-offload.h"
71 #include "tree-nested.h"
73 /* Hash set of poisoned variables in a bind expr. */
74 static hash_set
<tree
> *asan_poisoned_variables
= NULL
;
76 enum gimplify_omp_var_data
79 GOVD_EXPLICIT
= 0x000002,
80 GOVD_SHARED
= 0x000004,
81 GOVD_PRIVATE
= 0x000008,
82 GOVD_FIRSTPRIVATE
= 0x000010,
83 GOVD_LASTPRIVATE
= 0x000020,
84 GOVD_REDUCTION
= 0x000040,
87 GOVD_DEBUG_PRIVATE
= 0x000200,
88 GOVD_PRIVATE_OUTER_REF
= 0x000400,
89 GOVD_LINEAR
= 0x000800,
90 GOVD_ALIGNED
= 0x001000,
92 /* Flag for GOVD_MAP: don't copy back. */
93 GOVD_MAP_TO_ONLY
= 0x002000,
95 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
96 GOVD_LINEAR_LASTPRIVATE_NO_OUTER
= 0x004000,
98 GOVD_MAP_0LEN_ARRAY
= 0x008000,
100 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
101 GOVD_MAP_ALWAYS_TO
= 0x010000,
103 /* Flag for shared vars that are or might be stored to in the region. */
104 GOVD_WRITTEN
= 0x020000,
106 /* Flag for GOVD_MAP, if it is a forced mapping. */
107 GOVD_MAP_FORCE
= 0x040000,
109 /* Flag for GOVD_MAP: must be present already. */
110 GOVD_MAP_FORCE_PRESENT
= 0x080000,
112 /* Flag for GOVD_MAP: only allocate. */
113 GOVD_MAP_ALLOC_ONLY
= 0x100000,
115 /* Flag for GOVD_MAP: only copy back. */
116 GOVD_MAP_FROM_ONLY
= 0x200000,
118 GOVD_NONTEMPORAL
= 0x400000,
120 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
121 GOVD_LASTPRIVATE_CONDITIONAL
= 0x800000,
123 GOVD_CONDTEMP
= 0x1000000,
125 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
126 GOVD_REDUCTION_INSCAN
= 0x2000000,
128 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
129 GOVD_FIRSTPRIVATE_IMPLICIT
= 0x4000000,
131 GOVD_DATA_SHARE_CLASS
= (GOVD_SHARED
| GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
132 | GOVD_LASTPRIVATE
| GOVD_REDUCTION
| GOVD_LINEAR
139 ORT_WORKSHARE
= 0x00,
140 ORT_TASKGROUP
= 0x01,
144 ORT_COMBINED_PARALLEL
= ORT_PARALLEL
| 1,
147 ORT_UNTIED_TASK
= ORT_TASK
| 1,
148 ORT_TASKLOOP
= ORT_TASK
| 2,
149 ORT_UNTIED_TASKLOOP
= ORT_UNTIED_TASK
| 2,
152 ORT_COMBINED_TEAMS
= ORT_TEAMS
| 1,
153 ORT_HOST_TEAMS
= ORT_TEAMS
| 2,
154 ORT_COMBINED_HOST_TEAMS
= ORT_COMBINED_TEAMS
| 2,
157 ORT_TARGET_DATA
= 0x40,
159 /* Data region with offloading. */
161 ORT_COMBINED_TARGET
= ORT_TARGET
| 1,
162 ORT_IMPLICIT_TARGET
= ORT_TARGET
| 2,
164 /* OpenACC variants. */
165 ORT_ACC
= 0x100, /* A generic OpenACC region. */
166 ORT_ACC_DATA
= ORT_ACC
| ORT_TARGET_DATA
, /* Data construct. */
167 ORT_ACC_PARALLEL
= ORT_ACC
| ORT_TARGET
, /* Parallel construct */
168 ORT_ACC_KERNELS
= ORT_ACC
| ORT_TARGET
| 2, /* Kernels construct. */
169 ORT_ACC_SERIAL
= ORT_ACC
| ORT_TARGET
| 4, /* Serial construct. */
170 ORT_ACC_HOST_DATA
= ORT_ACC
| ORT_TARGET_DATA
| 2, /* Host data. */
172 /* Dummy OpenMP region, used to disable expansion of
173 DECL_VALUE_EXPRs in taskloop pre body. */
177 /* Gimplify hashtable helper. */
179 struct gimplify_hasher
: free_ptr_hash
<elt_t
>
181 static inline hashval_t
hash (const elt_t
*);
182 static inline bool equal (const elt_t
*, const elt_t
*);
187 struct gimplify_ctx
*prev_context
;
189 vec
<gbind
*> bind_expr_stack
;
191 gimple_seq conditional_cleanups
;
195 vec
<tree
> case_labels
;
196 hash_set
<tree
> *live_switch_vars
;
197 /* The formal temporary table. Should this be persistent? */
198 hash_table
<gimplify_hasher
> *temp_htab
;
201 unsigned into_ssa
: 1;
202 unsigned allow_rhs_cond_expr
: 1;
203 unsigned in_cleanup_point_expr
: 1;
204 unsigned keep_stack
: 1;
205 unsigned save_stack
: 1;
206 unsigned in_switch_expr
: 1;
209 enum gimplify_defaultmap_kind
212 GDMK_SCALAR_TARGET
, /* w/ Fortran's target attr, implicit mapping, only. */
218 struct gimplify_omp_ctx
220 struct gimplify_omp_ctx
*outer_context
;
221 splay_tree variables
;
222 hash_set
<tree
> *privatized_types
;
224 /* Iteration variables in an OMP_FOR. */
225 vec
<tree
> loop_iter_var
;
227 enum omp_clause_default_kind default_kind
;
228 enum omp_region_type region_type
;
232 bool target_firstprivatize_array_bases
;
234 bool order_concurrent
;
240 static struct gimplify_ctx
*gimplify_ctxp
;
241 static struct gimplify_omp_ctx
*gimplify_omp_ctxp
;
242 static bool in_omp_construct
;
244 /* Forward declaration. */
245 static enum gimplify_status
gimplify_compound_expr (tree
*, gimple_seq
*, bool);
246 static hash_map
<tree
, tree
> *oacc_declare_returns
;
247 static enum gimplify_status
gimplify_expr (tree
*, gimple_seq
*, gimple_seq
*,
248 bool (*) (tree
), fallback_t
, bool);
249 static void prepare_gimple_addressable (tree
*, gimple_seq
*);
251 /* Shorter alias name for the above function for use in gimplify.cc
255 gimplify_seq_add_stmt (gimple_seq
*seq_p
, gimple
*gs
)
257 gimple_seq_add_stmt_without_update (seq_p
, gs
);
260 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
261 NULL, a new sequence is allocated. This function is
262 similar to gimple_seq_add_seq, but does not scan the operands.
263 During gimplification, we need to manipulate statement sequences
264 before the def/use vectors have been constructed. */
267 gimplify_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
269 gimple_stmt_iterator si
;
274 si
= gsi_last (*dst_p
);
275 gsi_insert_seq_after_without_update (&si
, src
, GSI_NEW_STMT
);
279 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
280 and popping gimplify contexts. */
282 static struct gimplify_ctx
*ctx_pool
= NULL
;
284 /* Return a gimplify context struct from the pool. */
286 static inline struct gimplify_ctx
*
289 struct gimplify_ctx
* c
= ctx_pool
;
292 ctx_pool
= c
->prev_context
;
294 c
= XNEW (struct gimplify_ctx
);
296 memset (c
, '\0', sizeof (*c
));
300 /* Put gimplify context C back into the pool. */
303 ctx_free (struct gimplify_ctx
*c
)
305 c
->prev_context
= ctx_pool
;
309 /* Free allocated ctx stack memory. */
312 free_gimplify_stack (void)
314 struct gimplify_ctx
*c
;
316 while ((c
= ctx_pool
))
318 ctx_pool
= c
->prev_context
;
324 /* Set up a context for the gimplifier. */
327 push_gimplify_context (bool in_ssa
, bool rhs_cond_ok
)
329 struct gimplify_ctx
*c
= ctx_alloc ();
331 c
->prev_context
= gimplify_ctxp
;
333 gimplify_ctxp
->into_ssa
= in_ssa
;
334 gimplify_ctxp
->allow_rhs_cond_expr
= rhs_cond_ok
;
337 /* Tear down a context for the gimplifier. If BODY is non-null, then
338 put the temporaries into the outer BIND_EXPR. Otherwise, put them
341 BODY is not a sequence, but the first tuple in a sequence. */
344 pop_gimplify_context (gimple
*body
)
346 struct gimplify_ctx
*c
= gimplify_ctxp
;
349 && (!c
->bind_expr_stack
.exists ()
350 || c
->bind_expr_stack
.is_empty ()));
351 c
->bind_expr_stack
.release ();
352 gimplify_ctxp
= c
->prev_context
;
355 declare_vars (c
->temps
, body
, false);
357 record_vars (c
->temps
);
364 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
367 gimple_push_bind_expr (gbind
*bind_stmt
)
369 gimplify_ctxp
->bind_expr_stack
.reserve (8);
370 gimplify_ctxp
->bind_expr_stack
.safe_push (bind_stmt
);
373 /* Pop the first element off the stack of bindings. */
376 gimple_pop_bind_expr (void)
378 gimplify_ctxp
->bind_expr_stack
.pop ();
381 /* Return the first element of the stack of bindings. */
384 gimple_current_bind_expr (void)
386 return gimplify_ctxp
->bind_expr_stack
.last ();
389 /* Return the stack of bindings created during gimplification. */
392 gimple_bind_expr_stack (void)
394 return gimplify_ctxp
->bind_expr_stack
;
397 /* Return true iff there is a COND_EXPR between us and the innermost
398 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
401 gimple_conditional_context (void)
403 return gimplify_ctxp
->conditions
> 0;
406 /* Note that we've entered a COND_EXPR. */
409 gimple_push_condition (void)
411 #ifdef ENABLE_GIMPLE_CHECKING
412 if (gimplify_ctxp
->conditions
== 0)
413 gcc_assert (gimple_seq_empty_p (gimplify_ctxp
->conditional_cleanups
));
415 ++(gimplify_ctxp
->conditions
);
418 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
419 now, add any conditional cleanups we've seen to the prequeue. */
422 gimple_pop_condition (gimple_seq
*pre_p
)
424 int conds
= --(gimplify_ctxp
->conditions
);
426 gcc_assert (conds
>= 0);
429 gimplify_seq_add_seq (pre_p
, gimplify_ctxp
->conditional_cleanups
);
430 gimplify_ctxp
->conditional_cleanups
= NULL
;
434 /* A stable comparison routine for use with splay trees and DECLs. */
437 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
442 return DECL_UID (a
) - DECL_UID (b
);
445 /* Create a new omp construct that deals with variable remapping. */
447 static struct gimplify_omp_ctx
*
448 new_omp_context (enum omp_region_type region_type
)
450 struct gimplify_omp_ctx
*c
;
452 c
= XCNEW (struct gimplify_omp_ctx
);
453 c
->outer_context
= gimplify_omp_ctxp
;
454 c
->variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
455 c
->privatized_types
= new hash_set
<tree
>;
456 c
->location
= input_location
;
457 c
->region_type
= region_type
;
458 if ((region_type
& ORT_TASK
) == 0)
459 c
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
461 c
->default_kind
= OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
462 c
->defaultmap
[GDMK_SCALAR
] = GOVD_MAP
;
463 c
->defaultmap
[GDMK_SCALAR_TARGET
] = GOVD_MAP
;
464 c
->defaultmap
[GDMK_AGGREGATE
] = GOVD_MAP
;
465 c
->defaultmap
[GDMK_ALLOCATABLE
] = GOVD_MAP
;
466 c
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
;
471 /* Destroy an omp construct that deals with variable remapping. */
474 delete_omp_context (struct gimplify_omp_ctx
*c
)
476 splay_tree_delete (c
->variables
);
477 delete c
->privatized_types
;
478 c
->loop_iter_var
.release ();
482 static void omp_add_variable (struct gimplify_omp_ctx
*, tree
, unsigned int);
483 static bool omp_notice_variable (struct gimplify_omp_ctx
*, tree
, bool);
485 /* Both gimplify the statement T and append it to *SEQ_P. This function
486 behaves exactly as gimplify_stmt, but you don't have to pass T as a
490 gimplify_and_add (tree t
, gimple_seq
*seq_p
)
492 gimplify_stmt (&t
, seq_p
);
495 /* Gimplify statement T into sequence *SEQ_P, and return the first
496 tuple in the sequence of generated tuples for this statement.
497 Return NULL if gimplifying T produced no tuples. */
500 gimplify_and_return_first (tree t
, gimple_seq
*seq_p
)
502 gimple_stmt_iterator last
= gsi_last (*seq_p
);
504 gimplify_and_add (t
, seq_p
);
506 if (!gsi_end_p (last
))
509 return gsi_stmt (last
);
512 return gimple_seq_first_stmt (*seq_p
);
515 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
516 LHS, or for a call argument. */
519 is_gimple_mem_rhs (tree t
)
521 /* If we're dealing with a renamable type, either source or dest must be
522 a renamed variable. */
523 if (is_gimple_reg_type (TREE_TYPE (t
)))
524 return is_gimple_val (t
);
526 return is_gimple_val (t
) || is_gimple_lvalue (t
);
529 /* Return true if T is a CALL_EXPR or an expression that can be
530 assigned to a temporary. Note that this predicate should only be
531 used during gimplification. See the rationale for this in
532 gimplify_modify_expr. */
535 is_gimple_reg_rhs_or_call (tree t
)
537 return (get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
538 || TREE_CODE (t
) == CALL_EXPR
);
541 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
542 this predicate should only be used during gimplification. See the
543 rationale for this in gimplify_modify_expr. */
546 is_gimple_mem_rhs_or_call (tree t
)
548 /* If we're dealing with a renamable type, either source or dest must be
549 a renamed variable. */
550 if (is_gimple_reg_type (TREE_TYPE (t
)))
551 return is_gimple_val (t
);
553 return (is_gimple_val (t
)
554 || is_gimple_lvalue (t
)
555 || TREE_CLOBBER_P (t
)
556 || TREE_CODE (t
) == CALL_EXPR
);
559 /* Create a temporary with a name derived from VAL. Subroutine of
560 lookup_tmp_var; nobody else should call this function. */
563 create_tmp_from_val (tree val
)
565 /* Drop all qualifiers and address-space information from the value type. */
566 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (val
));
567 tree var
= create_tmp_var (type
, get_name (val
));
571 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
572 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
575 lookup_tmp_var (tree val
, bool is_formal
, bool not_gimple_reg
)
579 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
580 gcc_assert (!is_formal
|| !not_gimple_reg
);
582 /* If not optimizing, never really reuse a temporary. local-alloc
583 won't allocate any variable that is used in more than one basic
584 block, which means it will go into memory, causing much extra
585 work in reload and final and poorer code generation, outweighing
586 the extra memory allocation here. */
587 if (!optimize
|| !is_formal
|| TREE_SIDE_EFFECTS (val
))
589 ret
= create_tmp_from_val (val
);
590 DECL_NOT_GIMPLE_REG_P (ret
) = not_gimple_reg
;
598 if (!gimplify_ctxp
->temp_htab
)
599 gimplify_ctxp
->temp_htab
= new hash_table
<gimplify_hasher
> (1000);
600 slot
= gimplify_ctxp
->temp_htab
->find_slot (&elt
, INSERT
);
603 elt_p
= XNEW (elt_t
);
605 elt_p
->temp
= ret
= create_tmp_from_val (val
);
618 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
621 internal_get_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
622 bool is_formal
, bool allow_ssa
, bool not_gimple_reg
)
626 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
627 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
628 gimplify_expr (&val
, pre_p
, post_p
, is_gimple_reg_rhs_or_call
,
632 && gimplify_ctxp
->into_ssa
633 && is_gimple_reg_type (TREE_TYPE (val
)))
635 t
= make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val
)));
636 if (! gimple_in_ssa_p (cfun
))
638 const char *name
= get_name (val
);
640 SET_SSA_NAME_VAR_OR_IDENTIFIER (t
, create_tmp_var_name (name
));
644 t
= lookup_tmp_var (val
, is_formal
, not_gimple_reg
);
646 mod
= build2 (INIT_EXPR
, TREE_TYPE (t
), t
, unshare_expr (val
));
648 SET_EXPR_LOCATION (mod
, EXPR_LOC_OR_LOC (val
, input_location
));
650 /* gimplify_modify_expr might want to reduce this further. */
651 gimplify_and_add (mod
, pre_p
);
657 /* Return a formal temporary variable initialized with VAL. PRE_P is as
658 in gimplify_expr. Only use this function if:
660 1) The value of the unfactored expression represented by VAL will not
661 change between the initialization and use of the temporary, and
662 2) The temporary will not be otherwise modified.
664 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
665 and #2 means it is inappropriate for && temps.
667 For other cases, use get_initialized_tmp_var instead. */
670 get_formal_tmp_var (tree val
, gimple_seq
*pre_p
)
672 return internal_get_tmp_var (val
, pre_p
, NULL
, true, true, false);
675 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
676 are as in gimplify_expr. */
679 get_initialized_tmp_var (tree val
, gimple_seq
*pre_p
,
680 gimple_seq
*post_p
/* = NULL */,
681 bool allow_ssa
/* = true */)
683 return internal_get_tmp_var (val
, pre_p
, post_p
, false, allow_ssa
, false);
686 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
687 generate debug info for them; otherwise don't. */
690 declare_vars (tree vars
, gimple
*gs
, bool debug_info
)
697 gbind
*scope
= as_a
<gbind
*> (gs
);
699 temps
= nreverse (last
);
701 block
= gimple_bind_block (scope
);
702 gcc_assert (!block
|| TREE_CODE (block
) == BLOCK
);
703 if (!block
|| !debug_info
)
705 DECL_CHAIN (last
) = gimple_bind_vars (scope
);
706 gimple_bind_set_vars (scope
, temps
);
710 /* We need to attach the nodes both to the BIND_EXPR and to its
711 associated BLOCK for debugging purposes. The key point here
712 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
713 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
714 if (BLOCK_VARS (block
))
715 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), temps
);
718 gimple_bind_set_vars (scope
,
719 chainon (gimple_bind_vars (scope
), temps
));
720 BLOCK_VARS (block
) = temps
;
726 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
727 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
728 no such upper bound can be obtained. */
731 force_constant_size (tree var
)
733 /* The only attempt we make is by querying the maximum size of objects
734 of the variable's type. */
736 HOST_WIDE_INT max_size
;
738 gcc_assert (VAR_P (var
));
740 max_size
= max_int_size_in_bytes (TREE_TYPE (var
));
742 gcc_assert (max_size
>= 0);
745 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var
)), max_size
);
747 = build_int_cst (TREE_TYPE (DECL_SIZE (var
)), max_size
* BITS_PER_UNIT
);
750 /* Push the temporary variable TMP into the current binding. */
753 gimple_add_tmp_var_fn (struct function
*fn
, tree tmp
)
755 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
757 /* Later processing assumes that the object size is constant, which might
758 not be true at this point. Force the use of a constant upper bound in
760 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
761 force_constant_size (tmp
);
763 DECL_CONTEXT (tmp
) = fn
->decl
;
764 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
766 record_vars_into (tmp
, fn
->decl
);
769 /* Push the temporary variable TMP into the current binding. */
772 gimple_add_tmp_var (tree tmp
)
774 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
776 /* Later processing assumes that the object size is constant, which might
777 not be true at this point. Force the use of a constant upper bound in
779 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
780 force_constant_size (tmp
);
782 DECL_CONTEXT (tmp
) = current_function_decl
;
783 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
787 DECL_CHAIN (tmp
) = gimplify_ctxp
->temps
;
788 gimplify_ctxp
->temps
= tmp
;
790 /* Mark temporaries local within the nearest enclosing parallel. */
791 if (gimplify_omp_ctxp
)
793 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
794 int flag
= GOVD_LOCAL
| GOVD_SEEN
;
796 && (ctx
->region_type
== ORT_WORKSHARE
797 || ctx
->region_type
== ORT_TASKGROUP
798 || ctx
->region_type
== ORT_SIMD
799 || ctx
->region_type
== ORT_ACC
))
801 if (ctx
->region_type
== ORT_SIMD
802 && TREE_ADDRESSABLE (tmp
)
803 && !TREE_STATIC (tmp
))
805 if (TREE_CODE (DECL_SIZE_UNIT (tmp
)) != INTEGER_CST
)
806 ctx
->add_safelen1
= true;
807 else if (ctx
->in_for_exprs
)
810 flag
= GOVD_PRIVATE
| GOVD_SEEN
;
813 ctx
= ctx
->outer_context
;
816 omp_add_variable (ctx
, tmp
, flag
);
825 /* This case is for nested functions. We need to expose the locals
827 body_seq
= gimple_body (current_function_decl
);
828 declare_vars (tmp
, gimple_seq_first_stmt (body_seq
), false);
834 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
835 nodes that are referenced more than once in GENERIC functions. This is
836 necessary because gimplification (translation into GIMPLE) is performed
837 by modifying tree nodes in-place, so gimplication of a shared node in a
838 first context could generate an invalid GIMPLE form in a second context.
840 This is achieved with a simple mark/copy/unmark algorithm that walks the
841 GENERIC representation top-down, marks nodes with TREE_VISITED the first
842 time it encounters them, duplicates them if they already have TREE_VISITED
843 set, and finally removes the TREE_VISITED marks it has set.
845 The algorithm works only at the function level, i.e. it generates a GENERIC
846 representation of a function with no nodes shared within the function when
847 passed a GENERIC function (except for nodes that are allowed to be shared).
849 At the global level, it is also necessary to unshare tree nodes that are
850 referenced in more than one function, for the same aforementioned reason.
851 This requires some cooperation from the front-end. There are 2 strategies:
853 1. Manual unsharing. The front-end needs to call unshare_expr on every
854 expression that might end up being shared across functions.
856 2. Deep unsharing. This is an extension of regular unsharing. Instead
857 of calling unshare_expr on expressions that might be shared across
858 functions, the front-end pre-marks them with TREE_VISITED. This will
859 ensure that they are unshared on the first reference within functions
860 when the regular unsharing algorithm runs. The counterpart is that
861 this algorithm must look deeper than for manual unsharing, which is
862 specified by LANG_HOOKS_DEEP_UNSHARING.
864 If there are only few specific cases of node sharing across functions, it is
865 probably easier for a front-end to unshare the expressions manually. On the
866 contrary, if the expressions generated at the global level are as widespread
867 as expressions generated within functions, deep unsharing is very likely the
870 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
871 These nodes model computations that must be done once. If we were to
872 unshare something like SAVE_EXPR(i++), the gimplification process would
873 create wrong code. However, if DATA is non-null, it must hold a pointer
874 set that is used to unshare the subtrees of these nodes. */
877 mostly_copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data
)
880 enum tree_code code
= TREE_CODE (t
);
882 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
883 copy their subtrees if we can make sure to do it only once. */
884 if (code
== SAVE_EXPR
|| code
== TARGET_EXPR
|| code
== BIND_EXPR
)
886 if (data
&& !((hash_set
<tree
> *)data
)->add (t
))
892 /* Stop at types, decls, constants like copy_tree_r. */
893 else if (TREE_CODE_CLASS (code
) == tcc_type
894 || TREE_CODE_CLASS (code
) == tcc_declaration
895 || TREE_CODE_CLASS (code
) == tcc_constant
)
898 /* Cope with the statement expression extension. */
899 else if (code
== STATEMENT_LIST
)
902 /* Leave the bulk of the work to copy_tree_r itself. */
904 copy_tree_r (tp
, walk_subtrees
, NULL
);
909 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
910 If *TP has been visited already, then *TP is deeply copied by calling
911 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
914 copy_if_shared_r (tree
*tp
, int *walk_subtrees
, void *data
)
917 enum tree_code code
= TREE_CODE (t
);
919 /* Skip types, decls, and constants. But we do want to look at their
920 types and the bounds of types. Mark them as visited so we properly
921 unmark their subtrees on the unmark pass. If we've already seen them,
922 don't look down further. */
923 if (TREE_CODE_CLASS (code
) == tcc_type
924 || TREE_CODE_CLASS (code
) == tcc_declaration
925 || TREE_CODE_CLASS (code
) == tcc_constant
)
927 if (TREE_VISITED (t
))
930 TREE_VISITED (t
) = 1;
933 /* If this node has been visited already, unshare it and don't look
935 else if (TREE_VISITED (t
))
937 walk_tree (tp
, mostly_copy_tree_r
, data
, NULL
);
941 /* Otherwise, mark the node as visited and keep looking. */
943 TREE_VISITED (t
) = 1;
948 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
949 copy_if_shared_r callback unmodified. */
952 copy_if_shared (tree
*tp
, void *data
)
954 walk_tree (tp
, copy_if_shared_r
, data
, NULL
);
957 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
958 any nested functions. */
961 unshare_body (tree fndecl
)
963 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
964 /* If the language requires deep unsharing, we need a pointer set to make
965 sure we don't repeatedly unshare subtrees of unshareable nodes. */
966 hash_set
<tree
> *visited
967 = lang_hooks
.deep_unsharing
? new hash_set
<tree
> : NULL
;
969 copy_if_shared (&DECL_SAVED_TREE (fndecl
), visited
);
970 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl
)), visited
);
971 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)), visited
);
976 for (cgn
= first_nested_function (cgn
); cgn
;
977 cgn
= next_nested_function (cgn
))
978 unshare_body (cgn
->decl
);
981 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
982 Subtrees are walked until the first unvisited node is encountered. */
985 unmark_visited_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
989 /* If this node has been visited, unmark it and keep looking. */
990 if (TREE_VISITED (t
))
991 TREE_VISITED (t
) = 0;
993 /* Otherwise, don't look any deeper. */
1000 /* Unmark the visited trees rooted at *TP. */
1003 unmark_visited (tree
*tp
)
1005 walk_tree (tp
, unmark_visited_r
, NULL
, NULL
);
1008 /* Likewise, but mark all trees as not visited. */
1011 unvisit_body (tree fndecl
)
1013 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
1015 unmark_visited (&DECL_SAVED_TREE (fndecl
));
1016 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl
)));
1017 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)));
1020 for (cgn
= first_nested_function (cgn
);
1021 cgn
; cgn
= next_nested_function (cgn
))
1022 unvisit_body (cgn
->decl
);
1025 /* Unconditionally make an unshared copy of EXPR. This is used when using
1026 stored expressions which span multiple functions, such as BINFO_VTABLE,
1027 as the normal unsharing process can't tell that they're shared. */
1030 unshare_expr (tree expr
)
1032 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1036 /* Worker for unshare_expr_without_location. */
1039 prune_expr_location (tree
*tp
, int *walk_subtrees
, void *)
1042 SET_EXPR_LOCATION (*tp
, UNKNOWN_LOCATION
);
1048 /* Similar to unshare_expr but also prune all expression locations
1052 unshare_expr_without_location (tree expr
)
1054 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1056 walk_tree (&expr
, prune_expr_location
, NULL
, NULL
);
1060 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1061 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1062 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1063 EXPR is the location of the EXPR. */
1066 rexpr_location (tree expr
, location_t or_else
= UNKNOWN_LOCATION
)
1071 if (EXPR_HAS_LOCATION (expr
))
1072 return EXPR_LOCATION (expr
);
1074 if (TREE_CODE (expr
) != STATEMENT_LIST
)
1077 tree_stmt_iterator i
= tsi_start (expr
);
1080 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
1086 if (!found
|| !tsi_one_before_end_p (i
))
1089 return rexpr_location (tsi_stmt (i
), or_else
);
1092 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1093 rexpr_location for the potential recursion. */
1096 rexpr_has_location (tree expr
)
1098 return rexpr_location (expr
) != UNKNOWN_LOCATION
;
1102 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1103 contain statements and have a value. Assign its value to a temporary
1104 and give it void_type_node. Return the temporary, or NULL_TREE if
1105 WRAPPER was already void. */
1108 voidify_wrapper_expr (tree wrapper
, tree temp
)
1110 tree type
= TREE_TYPE (wrapper
);
1111 if (type
&& !VOID_TYPE_P (type
))
1115 /* Set p to point to the body of the wrapper. Loop until we find
1116 something that isn't a wrapper. */
1117 for (p
= &wrapper
; p
&& *p
; )
1119 switch (TREE_CODE (*p
))
1122 TREE_SIDE_EFFECTS (*p
) = 1;
1123 TREE_TYPE (*p
) = void_type_node
;
1124 /* For a BIND_EXPR, the body is operand 1. */
1125 p
= &BIND_EXPR_BODY (*p
);
1128 case CLEANUP_POINT_EXPR
:
1129 case TRY_FINALLY_EXPR
:
1130 case TRY_CATCH_EXPR
:
1131 TREE_SIDE_EFFECTS (*p
) = 1;
1132 TREE_TYPE (*p
) = void_type_node
;
1133 p
= &TREE_OPERAND (*p
, 0);
1136 case STATEMENT_LIST
:
1138 tree_stmt_iterator i
= tsi_last (*p
);
1139 TREE_SIDE_EFFECTS (*p
) = 1;
1140 TREE_TYPE (*p
) = void_type_node
;
1141 p
= tsi_end_p (i
) ? NULL
: tsi_stmt_ptr (i
);
1146 /* Advance to the last statement. Set all container types to
1148 for (; TREE_CODE (*p
) == COMPOUND_EXPR
; p
= &TREE_OPERAND (*p
, 1))
1150 TREE_SIDE_EFFECTS (*p
) = 1;
1151 TREE_TYPE (*p
) = void_type_node
;
1155 case TRANSACTION_EXPR
:
1156 TREE_SIDE_EFFECTS (*p
) = 1;
1157 TREE_TYPE (*p
) = void_type_node
;
1158 p
= &TRANSACTION_EXPR_BODY (*p
);
1162 /* Assume that any tree upon which voidify_wrapper_expr is
1163 directly called is a wrapper, and that its body is op0. */
1166 TREE_SIDE_EFFECTS (*p
) = 1;
1167 TREE_TYPE (*p
) = void_type_node
;
1168 p
= &TREE_OPERAND (*p
, 0);
1176 if (p
== NULL
|| IS_EMPTY_STMT (*p
))
1180 /* The wrapper is on the RHS of an assignment that we're pushing
1182 gcc_assert (TREE_CODE (temp
) == INIT_EXPR
1183 || TREE_CODE (temp
) == MODIFY_EXPR
);
1184 TREE_OPERAND (temp
, 1) = *p
;
1189 temp
= create_tmp_var (type
, "retval");
1190 *p
= build2 (INIT_EXPR
, type
, temp
, *p
);
1199 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1200 a temporary through which they communicate. */
1203 build_stack_save_restore (gcall
**save
, gcall
**restore
)
1207 *save
= gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE
), 0);
1208 tmp_var
= create_tmp_var (ptr_type_node
, "saved_stack");
1209 gimple_call_set_lhs (*save
, tmp_var
);
1212 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE
),
1216 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1219 build_asan_poison_call_expr (tree decl
)
1221 /* Do not poison variables that have size equal to zero. */
1222 tree unit_size
= DECL_SIZE_UNIT (decl
);
1223 if (zerop (unit_size
))
1226 tree base
= build_fold_addr_expr (decl
);
1228 return build_call_expr_internal_loc (UNKNOWN_LOCATION
, IFN_ASAN_MARK
,
1230 build_int_cst (integer_type_node
,
1235 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1236 on POISON flag, shadow memory of a DECL variable. The call will be
1237 put on location identified by IT iterator, where BEFORE flag drives
1238 position where the stmt will be put. */
1241 asan_poison_variable (tree decl
, bool poison
, gimple_stmt_iterator
*it
,
1244 tree unit_size
= DECL_SIZE_UNIT (decl
);
1245 tree base
= build_fold_addr_expr (decl
);
1247 /* Do not poison variables that have size equal to zero. */
1248 if (zerop (unit_size
))
1251 /* It's necessary to have all stack variables aligned to ASAN granularity
1253 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1254 unsigned shadow_granularity
1255 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE
: ASAN_SHADOW_GRANULARITY
;
1256 if (DECL_ALIGN_UNIT (decl
) <= shadow_granularity
)
1257 SET_DECL_ALIGN (decl
, BITS_PER_UNIT
* shadow_granularity
);
1259 HOST_WIDE_INT flags
= poison
? ASAN_MARK_POISON
: ASAN_MARK_UNPOISON
;
1262 = gimple_build_call_internal (IFN_ASAN_MARK
, 3,
1263 build_int_cst (integer_type_node
, flags
),
1267 gsi_insert_before (it
, g
, GSI_NEW_STMT
);
1269 gsi_insert_after (it
, g
, GSI_NEW_STMT
);
1272 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1273 either poisons or unpoisons a DECL. Created statement is appended
1274 to SEQ_P gimple sequence. */
1277 asan_poison_variable (tree decl
, bool poison
, gimple_seq
*seq_p
)
1279 gimple_stmt_iterator it
= gsi_last (*seq_p
);
1280 bool before
= false;
1285 asan_poison_variable (decl
, poison
, &it
, before
);
1288 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1291 sort_by_decl_uid (const void *a
, const void *b
)
1293 const tree
*t1
= (const tree
*)a
;
1294 const tree
*t2
= (const tree
*)b
;
1296 int uid1
= DECL_UID (*t1
);
1297 int uid2
= DECL_UID (*t2
);
1301 else if (uid1
> uid2
)
1307 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1308 depending on POISON flag. Created statement is appended
1309 to SEQ_P gimple sequence. */
1312 asan_poison_variables (hash_set
<tree
> *variables
, bool poison
, gimple_seq
*seq_p
)
1314 unsigned c
= variables
->elements ();
1318 auto_vec
<tree
> sorted_variables (c
);
1320 for (hash_set
<tree
>::iterator it
= variables
->begin ();
1321 it
!= variables
->end (); ++it
)
1322 sorted_variables
.safe_push (*it
);
1324 sorted_variables
.qsort (sort_by_decl_uid
);
1328 FOR_EACH_VEC_ELT (sorted_variables
, i
, var
)
1330 asan_poison_variable (var
, poison
, seq_p
);
1332 /* Add use_after_scope_memory attribute for the variable in order
1333 to prevent re-written into SSA. */
1334 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
,
1335 DECL_ATTRIBUTES (var
)))
1336 DECL_ATTRIBUTES (var
)
1337 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
),
1339 DECL_ATTRIBUTES (var
));
1343 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1345 static enum gimplify_status
1346 gimplify_bind_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1348 tree bind_expr
= *expr_p
;
1349 bool old_keep_stack
= gimplify_ctxp
->keep_stack
;
1350 bool old_save_stack
= gimplify_ctxp
->save_stack
;
1353 gimple_seq body
, cleanup
;
1355 location_t start_locus
= 0, end_locus
= 0;
1356 tree ret_clauses
= NULL
;
1358 tree temp
= voidify_wrapper_expr (bind_expr
, NULL
);
1360 /* Mark variables seen in this bind expr. */
1361 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1365 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
1367 /* Mark variable as local. */
1368 if (ctx
&& ctx
->region_type
!= ORT_NONE
&& !DECL_EXTERNAL (t
))
1370 if (! DECL_SEEN_IN_BIND_EXPR_P (t
)
1371 || splay_tree_lookup (ctx
->variables
,
1372 (splay_tree_key
) t
) == NULL
)
1374 int flag
= GOVD_LOCAL
;
1375 if (ctx
->region_type
== ORT_SIMD
1376 && TREE_ADDRESSABLE (t
)
1377 && !TREE_STATIC (t
))
1379 if (TREE_CODE (DECL_SIZE_UNIT (t
)) != INTEGER_CST
)
1380 ctx
->add_safelen1
= true;
1382 flag
= GOVD_PRIVATE
;
1384 omp_add_variable (ctx
, t
, flag
| GOVD_SEEN
);
1386 /* Static locals inside of target construct or offloaded
1387 routines need to be "omp declare target". */
1388 if (TREE_STATIC (t
))
1389 for (; ctx
; ctx
= ctx
->outer_context
)
1390 if ((ctx
->region_type
& ORT_TARGET
) != 0)
1392 if (!lookup_attribute ("omp declare target",
1393 DECL_ATTRIBUTES (t
)))
1395 tree id
= get_identifier ("omp declare target");
1397 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (t
));
1398 varpool_node
*node
= varpool_node::get (t
);
1401 node
->offloadable
= 1;
1402 if (ENABLE_OFFLOADING
&& !DECL_EXTERNAL (t
))
1404 g
->have_offload
= true;
1406 vec_safe_push (offload_vars
, t
);
1414 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
1416 if (DECL_HARD_REGISTER (t
) && !is_global_var (t
) && cfun
)
1417 cfun
->has_local_explicit_reg_vars
= true;
1421 bind_stmt
= gimple_build_bind (BIND_EXPR_VARS (bind_expr
), NULL
,
1422 BIND_EXPR_BLOCK (bind_expr
));
1423 gimple_push_bind_expr (bind_stmt
);
1425 gimplify_ctxp
->keep_stack
= false;
1426 gimplify_ctxp
->save_stack
= false;
1428 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1430 gimplify_stmt (&BIND_EXPR_BODY (bind_expr
), &body
);
1431 gimple_bind_set_body (bind_stmt
, body
);
1433 /* Source location wise, the cleanup code (stack_restore and clobbers)
1434 belongs to the end of the block, so propagate what we have. The
1435 stack_save operation belongs to the beginning of block, which we can
1436 infer from the bind_expr directly if the block has no explicit
1438 if (BIND_EXPR_BLOCK (bind_expr
))
1440 end_locus
= BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1441 start_locus
= BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1443 if (start_locus
== 0)
1444 start_locus
= EXPR_LOCATION (bind_expr
);
1449 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1450 the stack space allocated to the VLAs. */
1451 if (gimplify_ctxp
->save_stack
&& !gimplify_ctxp
->keep_stack
)
1453 gcall
*stack_restore
;
1455 /* Save stack on entry and restore it on exit. Add a try_finally
1456 block to achieve this. */
1457 build_stack_save_restore (&stack_save
, &stack_restore
);
1459 gimple_set_location (stack_save
, start_locus
);
1460 gimple_set_location (stack_restore
, end_locus
);
1462 gimplify_seq_add_stmt (&cleanup
, stack_restore
);
1465 /* Add clobbers for all variables that go out of scope. */
1466 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1469 && !is_global_var (t
)
1470 && DECL_CONTEXT (t
) == current_function_decl
)
1472 if (!DECL_HARD_REGISTER (t
)
1473 && !TREE_THIS_VOLATILE (t
)
1474 && !DECL_HAS_VALUE_EXPR_P (t
)
1475 /* Only care for variables that have to be in memory. Others
1476 will be rewritten into SSA names, hence moved to the
1478 && !is_gimple_reg (t
)
1479 && flag_stack_reuse
!= SR_NONE
)
1481 tree clobber
= build_clobber (TREE_TYPE (t
), CLOBBER_EOL
);
1482 gimple
*clobber_stmt
;
1483 clobber_stmt
= gimple_build_assign (t
, clobber
);
1484 gimple_set_location (clobber_stmt
, end_locus
);
1485 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1488 if (flag_openacc
&& oacc_declare_returns
!= NULL
)
1491 if (DECL_HAS_VALUE_EXPR_P (key
))
1493 key
= DECL_VALUE_EXPR (key
);
1494 if (TREE_CODE (key
) == INDIRECT_REF
)
1495 key
= TREE_OPERAND (key
, 0);
1497 tree
*c
= oacc_declare_returns
->get (key
);
1501 OMP_CLAUSE_CHAIN (*c
) = ret_clauses
;
1503 ret_clauses
= unshare_expr (*c
);
1505 oacc_declare_returns
->remove (key
);
1507 if (oacc_declare_returns
->is_empty ())
1509 delete oacc_declare_returns
;
1510 oacc_declare_returns
= NULL
;
1516 if (asan_poisoned_variables
!= NULL
1517 && asan_poisoned_variables
->contains (t
))
1519 asan_poisoned_variables
->remove (t
);
1520 asan_poison_variable (t
, true, &cleanup
);
1523 if (gimplify_ctxp
->live_switch_vars
!= NULL
1524 && gimplify_ctxp
->live_switch_vars
->contains (t
))
1525 gimplify_ctxp
->live_switch_vars
->remove (t
);
1531 gimple_stmt_iterator si
= gsi_start (cleanup
);
1533 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
1535 gsi_insert_seq_before_without_update (&si
, stmt
, GSI_NEW_STMT
);
1541 gimple_seq new_body
;
1544 gs
= gimple_build_try (gimple_bind_body (bind_stmt
), cleanup
,
1545 GIMPLE_TRY_FINALLY
);
1548 gimplify_seq_add_stmt (&new_body
, stack_save
);
1549 gimplify_seq_add_stmt (&new_body
, gs
);
1550 gimple_bind_set_body (bind_stmt
, new_body
);
1553 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1554 if (!gimplify_ctxp
->keep_stack
)
1555 gimplify_ctxp
->keep_stack
= old_keep_stack
;
1556 gimplify_ctxp
->save_stack
= old_save_stack
;
1558 gimple_pop_bind_expr ();
1560 gimplify_seq_add_stmt (pre_p
, bind_stmt
);
1568 *expr_p
= NULL_TREE
;
1572 /* Maybe add early return predict statement to PRE_P sequence. */
1575 maybe_add_early_return_predict_stmt (gimple_seq
*pre_p
)
1577 /* If we are not in a conditional context, add PREDICT statement. */
1578 if (gimple_conditional_context ())
1580 gimple
*predict
= gimple_build_predict (PRED_TREE_EARLY_RETURN
,
1582 gimplify_seq_add_stmt (pre_p
, predict
);
1586 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1587 GIMPLE value, it is assigned to a new temporary and the statement is
1588 re-written to return the temporary.
1590 PRE_P points to the sequence where side effects that must happen before
1591 STMT should be stored. */
1593 static enum gimplify_status
1594 gimplify_return_expr (tree stmt
, gimple_seq
*pre_p
)
1597 tree ret_expr
= TREE_OPERAND (stmt
, 0);
1598 tree result_decl
, result
;
1600 if (ret_expr
== error_mark_node
)
1604 || TREE_CODE (ret_expr
) == RESULT_DECL
)
1606 maybe_add_early_return_predict_stmt (pre_p
);
1607 greturn
*ret
= gimple_build_return (ret_expr
);
1608 copy_warning (ret
, stmt
);
1609 gimplify_seq_add_stmt (pre_p
, ret
);
1613 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1614 result_decl
= NULL_TREE
;
1615 else if (TREE_CODE (ret_expr
) == COMPOUND_EXPR
)
1617 /* Used in C++ for handling EH cleanup of the return value if a local
1618 cleanup throws. Assume the front-end knows what it's doing. */
1619 result_decl
= DECL_RESULT (current_function_decl
);
1620 /* But crash if we end up trying to modify ret_expr below. */
1621 ret_expr
= NULL_TREE
;
1625 result_decl
= TREE_OPERAND (ret_expr
, 0);
1627 /* See through a return by reference. */
1628 if (TREE_CODE (result_decl
) == INDIRECT_REF
)
1629 result_decl
= TREE_OPERAND (result_decl
, 0);
1631 gcc_assert ((TREE_CODE (ret_expr
) == MODIFY_EXPR
1632 || TREE_CODE (ret_expr
) == INIT_EXPR
)
1633 && TREE_CODE (result_decl
) == RESULT_DECL
);
1636 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1637 Recall that aggregate_value_p is FALSE for any aggregate type that is
1638 returned in registers. If we're returning values in registers, then
1639 we don't want to extend the lifetime of the RESULT_DECL, particularly
1640 across another call. In addition, for those aggregates for which
1641 hard_function_value generates a PARALLEL, we'll die during normal
1642 expansion of structure assignments; there's special code in expand_return
1643 to handle this case that does not exist in expand_expr. */
1646 else if (aggregate_value_p (result_decl
, TREE_TYPE (current_function_decl
)))
1648 if (!poly_int_tree_p (DECL_SIZE (result_decl
)))
1650 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl
)))
1651 gimplify_type_sizes (TREE_TYPE (result_decl
), pre_p
);
1652 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1653 should be effectively allocated by the caller, i.e. all calls to
1654 this function must be subject to the Return Slot Optimization. */
1655 gimplify_one_sizepos (&DECL_SIZE (result_decl
), pre_p
);
1656 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl
), pre_p
);
1658 result
= result_decl
;
1660 else if (gimplify_ctxp
->return_temp
)
1661 result
= gimplify_ctxp
->return_temp
;
1664 result
= create_tmp_reg (TREE_TYPE (result_decl
));
1666 /* ??? With complex control flow (usually involving abnormal edges),
1667 we can wind up warning about an uninitialized value for this. Due
1668 to how this variable is constructed and initialized, this is never
1669 true. Give up and never warn. */
1670 suppress_warning (result
, OPT_Wuninitialized
);
1672 gimplify_ctxp
->return_temp
= result
;
1675 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1676 Then gimplify the whole thing. */
1677 if (result
!= result_decl
)
1678 TREE_OPERAND (ret_expr
, 0) = result
;
1680 gimplify_and_add (TREE_OPERAND (stmt
, 0), pre_p
);
1682 maybe_add_early_return_predict_stmt (pre_p
);
1683 ret
= gimple_build_return (result
);
1684 copy_warning (ret
, stmt
);
1685 gimplify_seq_add_stmt (pre_p
, ret
);
1690 /* Gimplify a variable-length array DECL. */
1693 gimplify_vla_decl (tree decl
, gimple_seq
*seq_p
)
1695 /* This is a variable-sized decl. Simplify its size and mark it
1696 for deferred expansion. */
1697 tree t
, addr
, ptr_type
;
1699 gimplify_one_sizepos (&DECL_SIZE (decl
), seq_p
);
1700 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl
), seq_p
);
1702 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1703 if (DECL_HAS_VALUE_EXPR_P (decl
))
1706 /* All occurrences of this decl in final gimplified code will be
1707 replaced by indirection. Setting DECL_VALUE_EXPR does two
1708 things: First, it lets the rest of the gimplifier know what
1709 replacement to use. Second, it lets the debug info know
1710 where to find the value. */
1711 ptr_type
= build_pointer_type (TREE_TYPE (decl
));
1712 addr
= create_tmp_var (ptr_type
, get_name (decl
));
1713 DECL_IGNORED_P (addr
) = 0;
1714 t
= build_fold_indirect_ref (addr
);
1715 TREE_THIS_NOTRAP (t
) = 1;
1716 SET_DECL_VALUE_EXPR (decl
, t
);
1717 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
1719 t
= build_alloca_call_expr (DECL_SIZE_UNIT (decl
), DECL_ALIGN (decl
),
1720 max_int_size_in_bytes (TREE_TYPE (decl
)));
1721 /* The call has been built for a variable-sized object. */
1722 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
1723 t
= fold_convert (ptr_type
, t
);
1724 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
1726 gimplify_and_add (t
, seq_p
);
1728 /* Record the dynamic allocation associated with DECL if requested. */
1729 if (flag_callgraph_info
& CALLGRAPH_INFO_DYNAMIC_ALLOC
)
1730 record_dynamic_alloc (decl
);
1733 /* A helper function to be called via walk_tree. Mark all labels under *TP
1734 as being forced. To be called for DECL_INITIAL of static variables. */
1737 force_labels_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1741 if (TREE_CODE (*tp
) == LABEL_DECL
)
1743 FORCED_LABEL (*tp
) = 1;
1744 cfun
->has_forced_label_in_static
= 1;
1750 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1751 Build a call to internal const function DEFERRED_INIT:
1752 1st argument: SIZE of the DECL;
1753 2nd argument: INIT_TYPE;
1754 3rd argument: NAME of the DECL;
1756 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1759 gimple_add_init_for_auto_var (tree decl
,
1760 enum auto_init_type init_type
,
1763 gcc_assert (auto_var_p (decl
));
1764 gcc_assert (init_type
> AUTO_INIT_UNINITIALIZED
);
1765 location_t loc
= EXPR_LOCATION (decl
);
1766 tree decl_size
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1769 = build_int_cst (integer_type_node
, (int) init_type
);
1771 tree decl_name
= NULL_TREE
;
1772 if (DECL_NAME (decl
))
1774 decl_name
= build_string_literal (DECL_NAME (decl
));
1778 char *decl_name_anonymous
= xasprintf ("D.%u", DECL_UID (decl
));
1779 decl_name
= build_string_literal (decl_name_anonymous
);
1780 free (decl_name_anonymous
);
1783 tree call
= build_call_expr_internal_loc (loc
, IFN_DEFERRED_INIT
,
1784 TREE_TYPE (decl
), 3,
1785 decl_size
, init_type_node
,
1788 gimplify_assign (decl
, call
, seq_p
);
1791 /* Generate padding initialization for automatic vairable DECL.
1792 C guarantees that brace-init with fewer initializers than members
1793 aggregate will initialize the rest of the aggregate as-if it were
1794 static initialization. In turn static initialization guarantees
1795 that padding is initialized to zero. So, we always initialize paddings
1796 to zeroes regardless INIT_TYPE.
1797 To do the padding initialization, we insert a call to
1798 __builtin_clear_padding (&decl, 0, for_auto_init = true).
1799 Note, we add an additional dummy argument for __builtin_clear_padding,
1800 'for_auto_init' to distinguish whether this call is for automatic
1801 variable initialization or not.
1804 gimple_add_padding_init_for_auto_var (tree decl
, bool is_vla
,
1807 tree addr_of_decl
= NULL_TREE
;
1808 tree fn
= builtin_decl_explicit (BUILT_IN_CLEAR_PADDING
);
1812 /* The temporary address variable for this vla should be
1813 created in gimplify_vla_decl. */
1814 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
1815 gcc_assert (TREE_CODE (DECL_VALUE_EXPR (decl
)) == INDIRECT_REF
);
1816 addr_of_decl
= TREE_OPERAND (DECL_VALUE_EXPR (decl
), 0);
1820 mark_addressable (decl
);
1821 addr_of_decl
= build_fold_addr_expr (decl
);
1824 gimple
*call
= gimple_build_call (fn
, 2, addr_of_decl
,
1825 build_one_cst (TREE_TYPE (addr_of_decl
)));
1826 gimplify_seq_add_stmt (seq_p
, call
);
1829 /* Return true if the DECL need to be automaticly initialized by the
1832 is_var_need_auto_init (tree decl
)
1834 if (auto_var_p (decl
)
1835 && (TREE_CODE (decl
) != VAR_DECL
1836 || !DECL_HARD_REGISTER (decl
))
1837 && (flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
)
1838 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl
)))
1839 && !OPAQUE_TYPE_P (TREE_TYPE (decl
))
1840 && !is_empty_type (TREE_TYPE (decl
)))
1845 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1846 and initialization explicit. */
1848 static enum gimplify_status
1849 gimplify_decl_expr (tree
*stmt_p
, gimple_seq
*seq_p
)
1851 tree stmt
= *stmt_p
;
1852 tree decl
= DECL_EXPR_DECL (stmt
);
1854 *stmt_p
= NULL_TREE
;
1856 if (TREE_TYPE (decl
) == error_mark_node
)
1859 if ((TREE_CODE (decl
) == TYPE_DECL
1861 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl
)))
1863 gimplify_type_sizes (TREE_TYPE (decl
), seq_p
);
1864 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1865 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl
)), seq_p
);
1868 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1869 in case its size expressions contain problematic nodes like CALL_EXPR. */
1870 if (TREE_CODE (decl
) == TYPE_DECL
1871 && DECL_ORIGINAL_TYPE (decl
)
1872 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl
)))
1874 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl
), seq_p
);
1875 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl
)) == REFERENCE_TYPE
)
1876 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl
)), seq_p
);
1879 if (VAR_P (decl
) && !DECL_EXTERNAL (decl
))
1881 tree init
= DECL_INITIAL (decl
);
1882 bool is_vla
= false;
1883 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
1884 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
1885 If the decl has VALUE_EXPR that was created by FE (usually
1886 C++FE), it's a proxy varaible, and FE already initialized
1887 the VALUE_EXPR of it, we should not initialize it anymore. */
1888 bool decl_had_value_expr_p
= DECL_HAS_VALUE_EXPR_P (decl
);
1891 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl
), &size
)
1892 || (!TREE_STATIC (decl
)
1893 && flag_stack_check
== GENERIC_STACK_CHECK
1895 (unsigned HOST_WIDE_INT
) STACK_CHECK_MAX_VAR_SIZE
)))
1897 gimplify_vla_decl (decl
, seq_p
);
1901 if (asan_poisoned_variables
1903 && TREE_ADDRESSABLE (decl
)
1904 && !TREE_STATIC (decl
)
1905 && !DECL_HAS_VALUE_EXPR_P (decl
)
1906 && DECL_ALIGN (decl
) <= MAX_SUPPORTED_STACK_ALIGNMENT
1907 && dbg_cnt (asan_use_after_scope
)
1908 && !gimplify_omp_ctxp
1909 /* GNAT introduces temporaries to hold return values of calls in
1910 initializers of variables defined in other units, so the
1911 declaration of the variable is discarded completely. We do not
1912 want to issue poison calls for such dropped variables. */
1913 && (DECL_SEEN_IN_BIND_EXPR_P (decl
)
1914 || (DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)))
1916 asan_poisoned_variables
->add (decl
);
1917 asan_poison_variable (decl
, false, seq_p
);
1918 if (!DECL_ARTIFICIAL (decl
) && gimplify_ctxp
->live_switch_vars
)
1919 gimplify_ctxp
->live_switch_vars
->add (decl
);
1922 /* Some front ends do not explicitly declare all anonymous
1923 artificial variables. We compensate here by declaring the
1924 variables, though it would be better if the front ends would
1925 explicitly declare them. */
1926 if (!DECL_SEEN_IN_BIND_EXPR_P (decl
)
1927 && DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)
1928 gimple_add_tmp_var (decl
);
1930 if (init
&& init
!= error_mark_node
)
1932 if (!TREE_STATIC (decl
))
1934 DECL_INITIAL (decl
) = NULL_TREE
;
1935 init
= build2 (INIT_EXPR
, void_type_node
, decl
, init
);
1936 gimplify_and_add (init
, seq_p
);
1938 /* Clear TREE_READONLY if we really have an initialization. */
1939 if (!DECL_INITIAL (decl
)
1940 && !omp_privatize_by_reference (decl
))
1941 TREE_READONLY (decl
) = 0;
1944 /* We must still examine initializers for static variables
1945 as they may contain a label address. */
1946 walk_tree (&init
, force_labels_r
, NULL
, NULL
);
1948 /* When there is no explicit initializer, if the user requested,
1949 We should insert an artifical initializer for this automatic
1951 else if (is_var_need_auto_init (decl
)
1952 && !decl_had_value_expr_p
)
1954 gimple_add_init_for_auto_var (decl
,
1957 /* The expanding of a call to the above .DEFERRED_INIT will apply
1958 block initialization to the whole space covered by this variable.
1959 As a result, all the paddings will be initialized to zeroes
1960 for zero initialization and 0xFE byte-repeatable patterns for
1961 pattern initialization.
1962 In order to make the paddings as zeroes for pattern init, We
1963 should add a call to __builtin_clear_padding to clear the
1964 paddings to zero in compatiple with CLANG.
1965 We cannot insert this call if the variable is a gimple register
1966 since __builtin_clear_padding will take the address of the
1967 variable. As a result, if a long double/_Complex long double
1968 variable will spilled into stack later, its padding is 0XFE. */
1969 if (flag_auto_var_init
== AUTO_INIT_PATTERN
1970 && !is_gimple_reg (decl
)
1971 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl
)))
1972 gimple_add_padding_init_for_auto_var (decl
, is_vla
, seq_p
);
1979 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1980 and replacing the LOOP_EXPR with goto, but if the loop contains an
1981 EXIT_EXPR, we need to append a label for it to jump to. */
1983 static enum gimplify_status
1984 gimplify_loop_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1986 tree saved_label
= gimplify_ctxp
->exit_label
;
1987 tree start_label
= create_artificial_label (UNKNOWN_LOCATION
);
1989 gimplify_seq_add_stmt (pre_p
, gimple_build_label (start_label
));
1991 gimplify_ctxp
->exit_label
= NULL_TREE
;
1993 gimplify_and_add (LOOP_EXPR_BODY (*expr_p
), pre_p
);
1995 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (start_label
));
1997 if (gimplify_ctxp
->exit_label
)
1998 gimplify_seq_add_stmt (pre_p
,
1999 gimple_build_label (gimplify_ctxp
->exit_label
));
2001 gimplify_ctxp
->exit_label
= saved_label
;
2007 /* Gimplify a statement list onto a sequence. These may be created either
2008 by an enlightened front-end, or by shortcut_cond_expr. */
2010 static enum gimplify_status
2011 gimplify_statement_list (tree
*expr_p
, gimple_seq
*pre_p
)
2013 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
2015 tree_stmt_iterator i
= tsi_start (*expr_p
);
2017 while (!tsi_end_p (i
))
2019 gimplify_stmt (tsi_stmt_ptr (i
), pre_p
);
2033 /* Emit warning for the unreachable statment STMT if needed.
2034 Return the gimple itself when the warning is emitted, otherwise
2037 emit_warn_switch_unreachable (gimple
*stmt
)
2039 if (gimple_code (stmt
) == GIMPLE_GOTO
2040 && TREE_CODE (gimple_goto_dest (stmt
)) == LABEL_DECL
2041 && DECL_ARTIFICIAL (gimple_goto_dest (stmt
)))
2042 /* Don't warn for compiler-generated gotos. These occur
2043 in Duff's devices, for example. */
2045 else if ((flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
)
2046 && ((gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
2047 || (gimple_call_builtin_p (stmt
, BUILT_IN_CLEAR_PADDING
)
2048 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt
, 1)))
2049 || (is_gimple_assign (stmt
)
2050 && gimple_assign_single_p (stmt
)
2051 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
2052 && gimple_call_internal_p (
2053 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
)),
2054 IFN_DEFERRED_INIT
))))
2055 /* Don't warn for compiler-generated initializations for
2056 -ftrivial-auto-var-init.
2058 case 1: a call to .DEFERRED_INIT;
2059 case 2: a call to __builtin_clear_padding with the 2nd argument is
2060 present and non-zero;
2061 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2062 that has the LHS of .DEFERRED_INIT as the RHS as following:
2063 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2067 warning_at (gimple_location (stmt
), OPT_Wswitch_unreachable
,
2068 "statement will never be executed");
2072 /* Callback for walk_gimple_seq. */
2075 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator
*gsi_p
,
2076 bool *handled_ops_p
,
2077 struct walk_stmt_info
*wi
)
2079 gimple
*stmt
= gsi_stmt (*gsi_p
);
2080 bool unreachable_issued
= wi
->info
!= NULL
;
2082 *handled_ops_p
= true;
2083 switch (gimple_code (stmt
))
2086 /* A compiler-generated cleanup or a user-written try block.
2087 If it's empty, don't dive into it--that would result in
2088 worse location info. */
2089 if (gimple_try_eval (stmt
) == NULL
)
2091 if (warn_switch_unreachable
&& !unreachable_issued
)
2092 wi
->info
= emit_warn_switch_unreachable (stmt
);
2094 /* Stop when auto var init warning is not on. */
2095 if (!warn_trivial_auto_var_init
)
2096 return integer_zero_node
;
2101 case GIMPLE_EH_FILTER
:
2102 case GIMPLE_TRANSACTION
:
2103 /* Walk the sub-statements. */
2104 *handled_ops_p
= false;
2108 /* Ignore these. We may generate them before declarations that
2109 are never executed. If there's something to warn about,
2110 there will be non-debug stmts too, and we'll catch those. */
2114 /* Stop till the first Label. */
2115 return integer_zero_node
;
2117 if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2119 *handled_ops_p
= false;
2122 if (warn_trivial_auto_var_init
2123 && flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
2124 && gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
2126 /* Get the variable name from the 3rd argument of call. */
2127 tree var_name
= gimple_call_arg (stmt
, 2);
2128 var_name
= TREE_OPERAND (TREE_OPERAND (var_name
, 0), 0);
2129 const char *var_name_str
= TREE_STRING_POINTER (var_name
);
2131 warning_at (gimple_location (stmt
), OPT_Wtrivial_auto_var_init
,
2132 "%qs cannot be initialized with"
2133 "%<-ftrivial-auto-var_init%>",
2140 /* check the first "real" statement (not a decl/lexical scope/...), issue
2141 warning if needed. */
2142 if (warn_switch_unreachable
&& !unreachable_issued
)
2143 wi
->info
= emit_warn_switch_unreachable (stmt
);
2144 /* Stop when auto var init warning is not on. */
2145 if (!warn_trivial_auto_var_init
)
2146 return integer_zero_node
;
2153 /* Possibly warn about unreachable statements between switch's controlling
2154 expression and the first case. Also warn about -ftrivial-auto-var-init
2155 cannot initialize the auto variable under such situation.
2156 SEQ is the body of a switch expression. */
2159 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq
)
2161 if ((!warn_switch_unreachable
&& !warn_trivial_auto_var_init
)
2162 /* This warning doesn't play well with Fortran when optimizations
2164 || lang_GNU_Fortran ()
2168 struct walk_stmt_info wi
;
2170 memset (&wi
, 0, sizeof (wi
));
2171 walk_gimple_seq (seq
, warn_switch_unreachable_and_auto_init_r
, NULL
, &wi
);
2175 /* A label entry that pairs label and a location. */
2182 /* Find LABEL in vector of label entries VEC. */
2184 static struct label_entry
*
2185 find_label_entry (const auto_vec
<struct label_entry
> *vec
, tree label
)
2188 struct label_entry
*l
;
2190 FOR_EACH_VEC_ELT (*vec
, i
, l
)
2191 if (l
->label
== label
)
2196 /* Return true if LABEL, a LABEL_DECL, represents a case label
2197 in a vector of labels CASES. */
2200 case_label_p (const vec
<tree
> *cases
, tree label
)
2205 FOR_EACH_VEC_ELT (*cases
, i
, l
)
2206 if (CASE_LABEL (l
) == label
)
2211 /* Find the last nondebug statement in a scope STMT. */
2214 last_stmt_in_scope (gimple
*stmt
)
2219 switch (gimple_code (stmt
))
2223 gbind
*bind
= as_a
<gbind
*> (stmt
);
2224 stmt
= gimple_seq_last_nondebug_stmt (gimple_bind_body (bind
));
2225 return last_stmt_in_scope (stmt
);
2230 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
2231 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt
));
2232 gimple
*last_eval
= last_stmt_in_scope (stmt
);
2233 if (gimple_stmt_may_fallthru (last_eval
)
2234 && (last_eval
== NULL
2235 || !gimple_call_internal_p (last_eval
, IFN_FALLTHROUGH
))
2236 && gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
2238 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt
));
2239 return last_stmt_in_scope (stmt
);
2253 /* Collect labels that may fall through into LABELS and return the statement
2254 preceding another case label, or a user-defined label. Store a location
2255 useful to give warnings at *PREVLOC (usually the location of the returned
2256 statement or of its surrounding scope). */
2259 collect_fallthrough_labels (gimple_stmt_iterator
*gsi_p
,
2260 auto_vec
<struct label_entry
> *labels
,
2261 location_t
*prevloc
)
2263 gimple
*prev
= NULL
;
2265 *prevloc
= UNKNOWN_LOCATION
;
2268 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
)
2270 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2271 which starts on a GIMPLE_SWITCH and ends with a break label.
2272 Handle that as a single statement that can fall through. */
2273 gbind
*bind
= as_a
<gbind
*> (gsi_stmt (*gsi_p
));
2274 gimple
*first
= gimple_seq_first_stmt (gimple_bind_body (bind
));
2275 gimple
*last
= gimple_seq_last_stmt (gimple_bind_body (bind
));
2277 && gimple_code (first
) == GIMPLE_SWITCH
2278 && gimple_code (last
) == GIMPLE_LABEL
)
2280 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2281 if (SWITCH_BREAK_LABEL_P (label
))
2289 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
2290 || gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_TRY
)
2292 /* Nested scope. Only look at the last statement of
2293 the innermost scope. */
2294 location_t bind_loc
= gimple_location (gsi_stmt (*gsi_p
));
2295 gimple
*last
= last_stmt_in_scope (gsi_stmt (*gsi_p
));
2299 /* It might be a label without a location. Use the
2300 location of the scope then. */
2301 if (!gimple_has_location (prev
))
2302 *prevloc
= bind_loc
;
2308 /* Ifs are tricky. */
2309 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_COND
)
2311 gcond
*cond_stmt
= as_a
<gcond
*> (gsi_stmt (*gsi_p
));
2312 tree false_lab
= gimple_cond_false_label (cond_stmt
);
2313 location_t if_loc
= gimple_location (cond_stmt
);
2316 if (i > 1) goto <D.2259>; else goto D;
2317 we can't do much with the else-branch. */
2318 if (!DECL_ARTIFICIAL (false_lab
))
2321 /* Go on until the false label, then one step back. */
2322 for (; !gsi_end_p (*gsi_p
); gsi_next (gsi_p
))
2324 gimple
*stmt
= gsi_stmt (*gsi_p
);
2325 if (gimple_code (stmt
) == GIMPLE_LABEL
2326 && gimple_label_label (as_a
<glabel
*> (stmt
)) == false_lab
)
2330 /* Not found? Oops. */
2331 if (gsi_end_p (*gsi_p
))
2334 /* A dead label can't fall through. */
2335 if (!UNUSED_LABEL_P (false_lab
))
2337 struct label_entry l
= { false_lab
, if_loc
};
2338 labels
->safe_push (l
);
2341 /* Go to the last statement of the then branch. */
2344 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2350 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_GOTO
2351 && !gimple_has_location (gsi_stmt (*gsi_p
)))
2353 /* Look at the statement before, it might be
2354 attribute fallthrough, in which case don't warn. */
2356 bool fallthru_before_dest
2357 = gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_FALLTHROUGH
);
2359 tree goto_dest
= gimple_goto_dest (gsi_stmt (*gsi_p
));
2360 if (!fallthru_before_dest
)
2362 struct label_entry l
= { goto_dest
, if_loc
};
2363 labels
->safe_push (l
);
2366 /* This case is about
2367 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2372 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2373 through to #3. So set PREV to #1. */
2374 else if (UNUSED_LABEL_P (false_lab
))
2375 prev
= gsi_stmt (*gsi_p
);
2377 /* And move back. */
2381 /* Remember the last statement. Skip labels that are of no interest
2383 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2385 tree label
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (*gsi_p
)));
2386 if (find_label_entry (labels
, label
))
2387 prev
= gsi_stmt (*gsi_p
);
2389 else if (gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_ASAN_MARK
))
2391 else if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_PREDICT
)
2393 else if (!is_gimple_debug (gsi_stmt (*gsi_p
)))
2394 prev
= gsi_stmt (*gsi_p
);
2397 while (!gsi_end_p (*gsi_p
)
2398 /* Stop if we find a case or a user-defined label. */
2399 && (gimple_code (gsi_stmt (*gsi_p
)) != GIMPLE_LABEL
2400 || !gimple_has_location (gsi_stmt (*gsi_p
))));
2402 if (prev
&& gimple_has_location (prev
))
2403 *prevloc
= gimple_location (prev
);
2407 /* Return true if the switch fallthough warning should occur. LABEL is
2408 the label statement that we're falling through to. */
2411 should_warn_for_implicit_fallthrough (gimple_stmt_iterator
*gsi_p
, tree label
)
2413 gimple_stmt_iterator gsi
= *gsi_p
;
2415 /* Don't warn if the label is marked with a "falls through" comment. */
2416 if (FALLTHROUGH_LABEL_P (label
))
2419 /* Don't warn for non-case labels followed by a statement:
2424 as these are likely intentional. */
2425 if (!case_label_p (&gimplify_ctxp
->case_labels
, label
))
2428 while (!gsi_end_p (gsi
)
2429 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2430 && (l
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi
))))
2431 && !case_label_p (&gimplify_ctxp
->case_labels
, l
))
2432 gsi_next_nondebug (&gsi
);
2433 if (gsi_end_p (gsi
) || gimple_code (gsi_stmt (gsi
)) != GIMPLE_LABEL
)
2437 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2438 immediately breaks. */
2441 /* Skip all immediately following labels. */
2442 while (!gsi_end_p (gsi
)
2443 && (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2444 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_PREDICT
))
2445 gsi_next_nondebug (&gsi
);
2447 /* { ... something; default:; } */
2449 /* { ... something; default: break; } or
2450 { ... something; default: goto L; } */
2451 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_GOTO
2452 /* { ... something; default: return; } */
2453 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
2459 /* Callback for walk_gimple_seq. */
2462 warn_implicit_fallthrough_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2463 struct walk_stmt_info
*)
2465 gimple
*stmt
= gsi_stmt (*gsi_p
);
2467 *handled_ops_p
= true;
2468 switch (gimple_code (stmt
))
2473 case GIMPLE_EH_FILTER
:
2474 case GIMPLE_TRANSACTION
:
2475 /* Walk the sub-statements. */
2476 *handled_ops_p
= false;
2479 /* Find a sequence of form:
2486 and possibly warn. */
2489 /* Found a label. Skip all immediately following labels. */
2490 while (!gsi_end_p (*gsi_p
)
2491 && gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2492 gsi_next_nondebug (gsi_p
);
2494 /* There might be no more statements. */
2495 if (gsi_end_p (*gsi_p
))
2496 return integer_zero_node
;
2498 /* Vector of labels that fall through. */
2499 auto_vec
<struct label_entry
> labels
;
2501 gimple
*prev
= collect_fallthrough_labels (gsi_p
, &labels
, &prevloc
);
2503 /* There might be no more statements. */
2504 if (gsi_end_p (*gsi_p
))
2505 return integer_zero_node
;
2507 gimple
*next
= gsi_stmt (*gsi_p
);
2509 /* If what follows is a label, then we may have a fallthrough. */
2510 if (gimple_code (next
) == GIMPLE_LABEL
2511 && gimple_has_location (next
)
2512 && (label
= gimple_label_label (as_a
<glabel
*> (next
)))
2515 struct label_entry
*l
;
2516 bool warned_p
= false;
2517 auto_diagnostic_group d
;
2518 if (!should_warn_for_implicit_fallthrough (gsi_p
, label
))
2520 else if (gimple_code (prev
) == GIMPLE_LABEL
2521 && (label
= gimple_label_label (as_a
<glabel
*> (prev
)))
2522 && (l
= find_label_entry (&labels
, label
)))
2523 warned_p
= warning_at (l
->loc
, OPT_Wimplicit_fallthrough_
,
2524 "this statement may fall through");
2525 else if (!gimple_call_internal_p (prev
, IFN_FALLTHROUGH
)
2526 /* Try to be clever and don't warn when the statement
2527 can't actually fall through. */
2528 && gimple_stmt_may_fallthru (prev
)
2529 && prevloc
!= UNKNOWN_LOCATION
)
2530 warned_p
= warning_at (prevloc
,
2531 OPT_Wimplicit_fallthrough_
,
2532 "this statement may fall through");
2534 inform (gimple_location (next
), "here");
2536 /* Mark this label as processed so as to prevent multiple
2537 warnings in nested switches. */
2538 FALLTHROUGH_LABEL_P (label
) = true;
2540 /* So that next warn_implicit_fallthrough_r will start looking for
2541 a new sequence starting with this label. */
2552 /* Warn when a switch case falls through. */
2555 maybe_warn_implicit_fallthrough (gimple_seq seq
)
2557 if (!warn_implicit_fallthrough
)
2560 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2563 || lang_GNU_OBJC ()))
2566 struct walk_stmt_info wi
;
2567 memset (&wi
, 0, sizeof (wi
));
2568 walk_gimple_seq (seq
, warn_implicit_fallthrough_r
, NULL
, &wi
);
2571 /* Callback for walk_gimple_seq. */
2574 expand_FALLTHROUGH_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2575 struct walk_stmt_info
*wi
)
2577 gimple
*stmt
= gsi_stmt (*gsi_p
);
2579 *handled_ops_p
= true;
2580 switch (gimple_code (stmt
))
2585 case GIMPLE_EH_FILTER
:
2586 case GIMPLE_TRANSACTION
:
2587 /* Walk the sub-statements. */
2588 *handled_ops_p
= false;
2591 if (gimple_call_internal_p (stmt
, IFN_FALLTHROUGH
))
2593 gsi_remove (gsi_p
, true);
2594 if (gsi_end_p (*gsi_p
))
2596 *static_cast<location_t
*>(wi
->info
) = gimple_location (stmt
);
2597 return integer_zero_node
;
2601 location_t loc
= gimple_location (stmt
);
2603 gimple_stmt_iterator gsi2
= *gsi_p
;
2604 stmt
= gsi_stmt (gsi2
);
2605 if (gimple_code (stmt
) == GIMPLE_GOTO
&& !gimple_has_location (stmt
))
2607 /* Go on until the artificial label. */
2608 tree goto_dest
= gimple_goto_dest (stmt
);
2609 for (; !gsi_end_p (gsi2
); gsi_next (&gsi2
))
2611 if (gimple_code (gsi_stmt (gsi2
)) == GIMPLE_LABEL
2612 && gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi2
)))
2617 /* Not found? Stop. */
2618 if (gsi_end_p (gsi2
))
2621 /* Look one past it. */
2625 /* We're looking for a case label or default label here. */
2626 while (!gsi_end_p (gsi2
))
2628 stmt
= gsi_stmt (gsi2
);
2629 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2631 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
2632 if (gimple_has_location (stmt
) && DECL_ARTIFICIAL (label
))
2638 else if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2640 else if (!is_gimple_debug (stmt
))
2641 /* Anything else is not expected. */
2646 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2647 "a case label or default label");
2656 /* Expand all FALLTHROUGH () calls in SEQ. */
2659 expand_FALLTHROUGH (gimple_seq
*seq_p
)
2661 struct walk_stmt_info wi
;
2663 memset (&wi
, 0, sizeof (wi
));
2664 wi
.info
= (void *) &loc
;
2665 walk_gimple_seq_mod (seq_p
, expand_FALLTHROUGH_r
, NULL
, &wi
);
2666 if (wi
.callback_result
== integer_zero_node
)
2667 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2668 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2669 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2670 "a case label or default label");
2674 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2677 static enum gimplify_status
2678 gimplify_switch_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2680 tree switch_expr
= *expr_p
;
2681 gimple_seq switch_body_seq
= NULL
;
2682 enum gimplify_status ret
;
2683 tree index_type
= TREE_TYPE (switch_expr
);
2684 if (index_type
== NULL_TREE
)
2685 index_type
= TREE_TYPE (SWITCH_COND (switch_expr
));
2687 ret
= gimplify_expr (&SWITCH_COND (switch_expr
), pre_p
, NULL
, is_gimple_val
,
2689 if (ret
== GS_ERROR
|| ret
== GS_UNHANDLED
)
2692 if (SWITCH_BODY (switch_expr
))
2695 vec
<tree
> saved_labels
;
2696 hash_set
<tree
> *saved_live_switch_vars
= NULL
;
2697 tree default_case
= NULL_TREE
;
2698 gswitch
*switch_stmt
;
2700 /* Save old labels, get new ones from body, then restore the old
2701 labels. Save all the things from the switch body to append after. */
2702 saved_labels
= gimplify_ctxp
->case_labels
;
2703 gimplify_ctxp
->case_labels
.create (8);
2705 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2706 saved_live_switch_vars
= gimplify_ctxp
->live_switch_vars
;
2707 tree_code body_type
= TREE_CODE (SWITCH_BODY (switch_expr
));
2708 if (body_type
== BIND_EXPR
|| body_type
== STATEMENT_LIST
)
2709 gimplify_ctxp
->live_switch_vars
= new hash_set
<tree
> (4);
2711 gimplify_ctxp
->live_switch_vars
= NULL
;
2713 bool old_in_switch_expr
= gimplify_ctxp
->in_switch_expr
;
2714 gimplify_ctxp
->in_switch_expr
= true;
2716 gimplify_stmt (&SWITCH_BODY (switch_expr
), &switch_body_seq
);
2718 gimplify_ctxp
->in_switch_expr
= old_in_switch_expr
;
2719 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq
);
2720 maybe_warn_implicit_fallthrough (switch_body_seq
);
2721 /* Only do this for the outermost GIMPLE_SWITCH. */
2722 if (!gimplify_ctxp
->in_switch_expr
)
2723 expand_FALLTHROUGH (&switch_body_seq
);
2725 labels
= gimplify_ctxp
->case_labels
;
2726 gimplify_ctxp
->case_labels
= saved_labels
;
2728 if (gimplify_ctxp
->live_switch_vars
)
2730 gcc_assert (gimplify_ctxp
->live_switch_vars
->is_empty ());
2731 delete gimplify_ctxp
->live_switch_vars
;
2733 gimplify_ctxp
->live_switch_vars
= saved_live_switch_vars
;
2735 preprocess_case_label_vec_for_gimple (labels
, index_type
,
2738 bool add_bind
= false;
2741 glabel
*new_default
;
2744 = build_case_label (NULL_TREE
, NULL_TREE
,
2745 create_artificial_label (UNKNOWN_LOCATION
));
2746 if (old_in_switch_expr
)
2748 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case
)) = 1;
2751 new_default
= gimple_build_label (CASE_LABEL (default_case
));
2752 gimplify_seq_add_stmt (&switch_body_seq
, new_default
);
2754 else if (old_in_switch_expr
)
2756 gimple
*last
= gimple_seq_last_stmt (switch_body_seq
);
2757 if (last
&& gimple_code (last
) == GIMPLE_LABEL
)
2759 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2760 if (SWITCH_BREAK_LABEL_P (label
))
2765 switch_stmt
= gimple_build_switch (SWITCH_COND (switch_expr
),
2766 default_case
, labels
);
2767 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2768 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2769 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2770 so that we can easily find the start and end of the switch
2774 gimple_seq bind_body
= NULL
;
2775 gimplify_seq_add_stmt (&bind_body
, switch_stmt
);
2776 gimple_seq_add_seq (&bind_body
, switch_body_seq
);
2777 gbind
*bind
= gimple_build_bind (NULL_TREE
, bind_body
, NULL_TREE
);
2778 gimple_set_location (bind
, EXPR_LOCATION (switch_expr
));
2779 gimplify_seq_add_stmt (pre_p
, bind
);
2783 gimplify_seq_add_stmt (pre_p
, switch_stmt
);
2784 gimplify_seq_add_seq (pre_p
, switch_body_seq
);
2794 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2796 static enum gimplify_status
2797 gimplify_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2799 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p
))
2800 == current_function_decl
);
2802 tree label
= LABEL_EXPR_LABEL (*expr_p
);
2803 glabel
*label_stmt
= gimple_build_label (label
);
2804 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2805 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2807 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2808 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2810 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2811 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2817 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2819 static enum gimplify_status
2820 gimplify_case_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2822 struct gimplify_ctx
*ctxp
;
2825 /* Invalid programs can play Duff's Device type games with, for example,
2826 #pragma omp parallel. At least in the C front end, we don't
2827 detect such invalid branches until after gimplification, in the
2828 diagnose_omp_blocks pass. */
2829 for (ctxp
= gimplify_ctxp
; ; ctxp
= ctxp
->prev_context
)
2830 if (ctxp
->case_labels
.exists ())
2833 tree label
= CASE_LABEL (*expr_p
);
2834 label_stmt
= gimple_build_label (label
);
2835 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2836 ctxp
->case_labels
.safe_push (*expr_p
);
2837 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2839 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2840 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2842 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2843 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2849 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2853 build_and_jump (tree
*label_p
)
2855 if (label_p
== NULL
)
2856 /* If there's nowhere to jump, just fall through. */
2859 if (*label_p
== NULL_TREE
)
2861 tree label
= create_artificial_label (UNKNOWN_LOCATION
);
2865 return build1 (GOTO_EXPR
, void_type_node
, *label_p
);
2868 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2869 This also involves building a label to jump to and communicating it to
2870 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2872 static enum gimplify_status
2873 gimplify_exit_expr (tree
*expr_p
)
2875 tree cond
= TREE_OPERAND (*expr_p
, 0);
2878 expr
= build_and_jump (&gimplify_ctxp
->exit_label
);
2879 expr
= build3 (COND_EXPR
, void_type_node
, cond
, expr
, NULL_TREE
);
2885 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2886 different from its canonical type, wrap the whole thing inside a
2887 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2890 The canonical type of a COMPONENT_REF is the type of the field being
2891 referenced--unless the field is a bit-field which can be read directly
2892 in a smaller mode, in which case the canonical type is the
2893 sign-appropriate type corresponding to that mode. */
2896 canonicalize_component_ref (tree
*expr_p
)
2898 tree expr
= *expr_p
;
2901 gcc_assert (TREE_CODE (expr
) == COMPONENT_REF
);
2903 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
)))
2904 type
= TREE_TYPE (get_unwidened (expr
, NULL_TREE
));
2906 type
= TREE_TYPE (TREE_OPERAND (expr
, 1));
2908 /* One could argue that all the stuff below is not necessary for
2909 the non-bitfield case and declare it a FE error if type
2910 adjustment would be needed. */
2911 if (TREE_TYPE (expr
) != type
)
2913 #ifdef ENABLE_TYPES_CHECKING
2914 tree old_type
= TREE_TYPE (expr
);
2918 /* We need to preserve qualifiers and propagate them from
2920 type_quals
= TYPE_QUALS (type
)
2921 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr
, 0)));
2922 if (TYPE_QUALS (type
) != type_quals
)
2923 type
= build_qualified_type (TYPE_MAIN_VARIANT (type
), type_quals
);
2925 /* Set the type of the COMPONENT_REF to the underlying type. */
2926 TREE_TYPE (expr
) = type
;
2928 #ifdef ENABLE_TYPES_CHECKING
2929 /* It is now a FE error, if the conversion from the canonical
2930 type to the original expression type is not useless. */
2931 gcc_assert (useless_type_conversion_p (old_type
, type
));
2936 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2937 to foo, embed that change in the ADDR_EXPR by converting
2942 where L is the lower bound. For simplicity, only do this for constant
2944 The constraint is that the type of &array[L] is trivially convertible
2948 canonicalize_addr_expr (tree
*expr_p
)
2950 tree expr
= *expr_p
;
2951 tree addr_expr
= TREE_OPERAND (expr
, 0);
2952 tree datype
, ddatype
, pddatype
;
2954 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2955 if (!POINTER_TYPE_P (TREE_TYPE (expr
))
2956 || TREE_CODE (addr_expr
) != ADDR_EXPR
)
2959 /* The addr_expr type should be a pointer to an array. */
2960 datype
= TREE_TYPE (TREE_TYPE (addr_expr
));
2961 if (TREE_CODE (datype
) != ARRAY_TYPE
)
2964 /* The pointer to element type shall be trivially convertible to
2965 the expression pointer type. */
2966 ddatype
= TREE_TYPE (datype
);
2967 pddatype
= build_pointer_type (ddatype
);
2968 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr
)),
2972 /* The lower bound and element sizes must be constant. */
2973 if (!TYPE_SIZE_UNIT (ddatype
)
2974 || TREE_CODE (TYPE_SIZE_UNIT (ddatype
)) != INTEGER_CST
2975 || !TYPE_DOMAIN (datype
) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))
2976 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))) != INTEGER_CST
)
2979 /* All checks succeeded. Build a new node to merge the cast. */
2980 *expr_p
= build4 (ARRAY_REF
, ddatype
, TREE_OPERAND (addr_expr
, 0),
2981 TYPE_MIN_VALUE (TYPE_DOMAIN (datype
)),
2982 NULL_TREE
, NULL_TREE
);
2983 *expr_p
= build1 (ADDR_EXPR
, pddatype
, *expr_p
);
2985 /* We can have stripped a required restrict qualifier above. */
2986 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
2987 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
2990 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2991 underneath as appropriate. */
2993 static enum gimplify_status
2994 gimplify_conversion (tree
*expr_p
)
2996 location_t loc
= EXPR_LOCATION (*expr_p
);
2997 gcc_assert (CONVERT_EXPR_P (*expr_p
));
2999 /* Then strip away all but the outermost conversion. */
3000 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p
, 0));
3002 /* And remove the outermost conversion if it's useless. */
3003 if (tree_ssa_useless_type_conversion (*expr_p
))
3004 *expr_p
= TREE_OPERAND (*expr_p
, 0);
3006 /* If we still have a conversion at the toplevel,
3007 then canonicalize some constructs. */
3008 if (CONVERT_EXPR_P (*expr_p
))
3010 tree sub
= TREE_OPERAND (*expr_p
, 0);
3012 /* If a NOP conversion is changing the type of a COMPONENT_REF
3013 expression, then canonicalize its type now in order to expose more
3014 redundant conversions. */
3015 if (TREE_CODE (sub
) == COMPONENT_REF
)
3016 canonicalize_component_ref (&TREE_OPERAND (*expr_p
, 0));
3018 /* If a NOP conversion is changing a pointer to array of foo
3019 to a pointer to foo, embed that change in the ADDR_EXPR. */
3020 else if (TREE_CODE (sub
) == ADDR_EXPR
)
3021 canonicalize_addr_expr (expr_p
);
3024 /* If we have a conversion to a non-register type force the
3025 use of a VIEW_CONVERT_EXPR instead. */
3026 if (CONVERT_EXPR_P (*expr_p
) && !is_gimple_reg_type (TREE_TYPE (*expr_p
)))
3027 *expr_p
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (*expr_p
),
3028 TREE_OPERAND (*expr_p
, 0));
3030 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3031 if (TREE_CODE (*expr_p
) == CONVERT_EXPR
)
3032 TREE_SET_CODE (*expr_p
, NOP_EXPR
);
3037 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3038 DECL_VALUE_EXPR, and it's worth re-examining things. */
3040 static enum gimplify_status
3041 gimplify_var_or_parm_decl (tree
*expr_p
)
3043 tree decl
= *expr_p
;
3045 /* ??? If this is a local variable, and it has not been seen in any
3046 outer BIND_EXPR, then it's probably the result of a duplicate
3047 declaration, for which we've already issued an error. It would
3048 be really nice if the front end wouldn't leak these at all.
3049 Currently the only known culprit is C++ destructors, as seen
3050 in g++.old-deja/g++.jason/binding.C.
3051 Another possible culpit are size expressions for variably modified
3052 types which are lost in the FE or not gimplified correctly. */
3054 && !DECL_SEEN_IN_BIND_EXPR_P (decl
)
3055 && !TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)
3056 && decl_function_context (decl
) == current_function_decl
)
3058 gcc_assert (seen_error ());
3062 /* When within an OMP context, notice uses of variables. */
3063 if (gimplify_omp_ctxp
&& omp_notice_variable (gimplify_omp_ctxp
, decl
, true))
3066 /* If the decl is an alias for another expression, substitute it now. */
3067 if (DECL_HAS_VALUE_EXPR_P (decl
))
3069 *expr_p
= unshare_expr (DECL_VALUE_EXPR (decl
));
3076 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3079 recalculate_side_effects (tree t
)
3081 enum tree_code code
= TREE_CODE (t
);
3082 int len
= TREE_OPERAND_LENGTH (t
);
3085 switch (TREE_CODE_CLASS (code
))
3087 case tcc_expression
:
3093 case PREDECREMENT_EXPR
:
3094 case PREINCREMENT_EXPR
:
3095 case POSTDECREMENT_EXPR
:
3096 case POSTINCREMENT_EXPR
:
3097 /* All of these have side-effects, no matter what their
3106 case tcc_comparison
: /* a comparison expression */
3107 case tcc_unary
: /* a unary arithmetic expression */
3108 case tcc_binary
: /* a binary arithmetic expression */
3109 case tcc_reference
: /* a reference */
3110 case tcc_vl_exp
: /* a function call */
3111 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
3112 for (i
= 0; i
< len
; ++i
)
3114 tree op
= TREE_OPERAND (t
, i
);
3115 if (op
&& TREE_SIDE_EFFECTS (op
))
3116 TREE_SIDE_EFFECTS (t
) = 1;
3121 /* No side-effects. */
3129 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3133 : min_lval '[' val ']'
3135 | compound_lval '[' val ']'
3136 | compound_lval '.' ID
3138 This is not part of the original SIMPLE definition, which separates
3139 array and member references, but it seems reasonable to handle them
3140 together. Also, this way we don't run into problems with union
3141 aliasing; gcc requires that for accesses through a union to alias, the
3142 union reference must be explicit, which was not always the case when we
3143 were splitting up array and member refs.
3145 PRE_P points to the sequence where side effects that must happen before
3146 *EXPR_P should be stored.
3148 POST_P points to the sequence where side effects that must happen after
3149 *EXPR_P should be stored. */
3151 static enum gimplify_status
3152 gimplify_compound_lval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3153 fallback_t fallback
)
3156 enum gimplify_status ret
= GS_ALL_DONE
, tret
;
3158 location_t loc
= EXPR_LOCATION (*expr_p
);
3159 tree expr
= *expr_p
;
3161 /* Create a stack of the subexpressions so later we can walk them in
3162 order from inner to outer. */
3163 auto_vec
<tree
, 10> expr_stack
;
3165 /* We can handle anything that get_inner_reference can deal with. */
3166 for (p
= expr_p
; ; p
= &TREE_OPERAND (*p
, 0))
3169 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3170 if (TREE_CODE (*p
) == INDIRECT_REF
)
3171 *p
= fold_indirect_ref_loc (loc
, *p
);
3173 if (handled_component_p (*p
))
3175 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3176 additional COMPONENT_REFs. */
3177 else if ((VAR_P (*p
) || TREE_CODE (*p
) == PARM_DECL
)
3178 && gimplify_var_or_parm_decl (p
) == GS_OK
)
3183 expr_stack
.safe_push (*p
);
3186 gcc_assert (expr_stack
.length ());
3188 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3189 walked through and P points to the innermost expression.
3191 Java requires that we elaborated nodes in source order. That
3192 means we must gimplify the inner expression followed by each of
3193 the indices, in order. But we can't gimplify the inner
3194 expression until we deal with any variable bounds, sizes, or
3195 positions in order to deal with PLACEHOLDER_EXPRs.
3197 The base expression may contain a statement expression that
3198 has declarations used in size expressions, so has to be
3199 gimplified before gimplifying the size expressions.
3201 So we do this in three steps. First we deal with variable
3202 bounds, sizes, and positions, then we gimplify the base and
3203 ensure it is memory if needed, then we deal with the annotations
3204 for any variables in the components and any indices, from left
3207 bool need_non_reg
= false;
3208 for (i
= expr_stack
.length () - 1; i
>= 0; i
--)
3210 tree t
= expr_stack
[i
];
3212 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3214 /* Deal with the low bound and element type size and put them into
3215 the ARRAY_REF. If these values are set, they have already been
3217 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3219 tree low
= unshare_expr (array_ref_low_bound (t
));
3220 if (!is_gimple_min_invariant (low
))
3222 TREE_OPERAND (t
, 2) = low
;
3226 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
3228 tree elmt_size
= array_ref_element_size (t
);
3229 if (!is_gimple_min_invariant (elmt_size
))
3231 elmt_size
= unshare_expr (elmt_size
);
3232 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
, 0)));
3233 tree factor
= size_int (TYPE_ALIGN_UNIT (elmt_type
));
3235 /* Divide the element size by the alignment of the element
3237 elmt_size
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3240 TREE_OPERAND (t
, 3) = elmt_size
;
3243 need_non_reg
= true;
3245 else if (TREE_CODE (t
) == COMPONENT_REF
)
3247 /* Set the field offset into T and gimplify it. */
3248 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3250 tree offset
= component_ref_field_offset (t
);
3251 if (!is_gimple_min_invariant (offset
))
3253 offset
= unshare_expr (offset
);
3254 tree field
= TREE_OPERAND (t
, 1);
3256 = size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
);
3258 /* Divide the offset by its alignment. */
3259 offset
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3262 TREE_OPERAND (t
, 2) = offset
;
3265 need_non_reg
= true;
3269 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3270 so as to match the min_lval predicate. Failure to do so may result
3271 in the creation of large aggregate temporaries. */
3272 tret
= gimplify_expr (p
, pre_p
, post_p
, is_gimple_min_lval
,
3273 fallback
| fb_lvalue
);
3274 ret
= MIN (ret
, tret
);
3275 if (ret
== GS_ERROR
)
3278 /* Step 2a: if we have component references we do not support on
3279 registers then make sure the base isn't a register. Of course
3280 we can only do so if an rvalue is OK. */
3281 if (need_non_reg
&& (fallback
& fb_rvalue
))
3282 prepare_gimple_addressable (p
, pre_p
);
3284 /* Step 3: gimplify size expressions and the indices and operands of
3285 ARRAY_REF. During this loop we also remove any useless conversions. */
3287 for (; expr_stack
.length () > 0; )
3289 tree t
= expr_stack
.pop ();
3291 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3293 /* Gimplify the low bound and element type size. */
3294 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3295 is_gimple_reg
, fb_rvalue
);
3296 ret
= MIN (ret
, tret
);
3298 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
, post_p
,
3299 is_gimple_reg
, fb_rvalue
);
3300 ret
= MIN (ret
, tret
);
3302 /* Gimplify the dimension. */
3303 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, post_p
,
3304 is_gimple_val
, fb_rvalue
);
3305 ret
= MIN (ret
, tret
);
3307 else if (TREE_CODE (t
) == COMPONENT_REF
)
3309 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3310 is_gimple_reg
, fb_rvalue
);
3311 ret
= MIN (ret
, tret
);
3314 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t
, 0));
3316 /* The innermost expression P may have originally had
3317 TREE_SIDE_EFFECTS set which would have caused all the outer
3318 expressions in *EXPR_P leading to P to also have had
3319 TREE_SIDE_EFFECTS set. */
3320 recalculate_side_effects (t
);
3323 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3324 if ((fallback
& fb_rvalue
) && TREE_CODE (*expr_p
) == COMPONENT_REF
)
3326 canonicalize_component_ref (expr_p
);
3329 expr_stack
.release ();
3331 gcc_assert (*expr_p
== expr
|| ret
!= GS_ALL_DONE
);
3336 /* Gimplify the self modifying expression pointed to by EXPR_P
3339 PRE_P points to the list where side effects that must happen before
3340 *EXPR_P should be stored.
3342 POST_P points to the list where side effects that must happen after
3343 *EXPR_P should be stored.
3345 WANT_VALUE is nonzero iff we want to use the value of this expression
3346 in another expression.
3348 ARITH_TYPE is the type the computation should be performed in. */
3350 enum gimplify_status
3351 gimplify_self_mod_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3352 bool want_value
, tree arith_type
)
3354 enum tree_code code
;
3355 tree lhs
, lvalue
, rhs
, t1
;
3356 gimple_seq post
= NULL
, *orig_post_p
= post_p
;
3358 enum tree_code arith_code
;
3359 enum gimplify_status ret
;
3360 location_t loc
= EXPR_LOCATION (*expr_p
);
3362 code
= TREE_CODE (*expr_p
);
3364 gcc_assert (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
3365 || code
== PREINCREMENT_EXPR
|| code
== PREDECREMENT_EXPR
);
3367 /* Prefix or postfix? */
3368 if (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
)
3369 /* Faster to treat as prefix if result is not used. */
3370 postfix
= want_value
;
3374 /* For postfix, make sure the inner expression's post side effects
3375 are executed after side effects from this expression. */
3379 /* Add or subtract? */
3380 if (code
== PREINCREMENT_EXPR
|| code
== POSTINCREMENT_EXPR
)
3381 arith_code
= PLUS_EXPR
;
3383 arith_code
= MINUS_EXPR
;
3385 /* Gimplify the LHS into a GIMPLE lvalue. */
3386 lvalue
= TREE_OPERAND (*expr_p
, 0);
3387 ret
= gimplify_expr (&lvalue
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
3388 if (ret
== GS_ERROR
)
3391 /* Extract the operands to the arithmetic operation. */
3393 rhs
= TREE_OPERAND (*expr_p
, 1);
3395 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3396 that as the result value and in the postqueue operation. */
3399 ret
= gimplify_expr (&lhs
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
3400 if (ret
== GS_ERROR
)
3403 lhs
= get_initialized_tmp_var (lhs
, pre_p
);
3406 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3407 if (POINTER_TYPE_P (TREE_TYPE (lhs
)))
3409 rhs
= convert_to_ptrofftype_loc (loc
, rhs
);
3410 if (arith_code
== MINUS_EXPR
)
3411 rhs
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (rhs
), rhs
);
3412 t1
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (*expr_p
), lhs
, rhs
);
3415 t1
= fold_convert (TREE_TYPE (*expr_p
),
3416 fold_build2 (arith_code
, arith_type
,
3417 fold_convert (arith_type
, lhs
),
3418 fold_convert (arith_type
, rhs
)));
3422 gimplify_assign (lvalue
, t1
, pre_p
);
3423 gimplify_seq_add_seq (orig_post_p
, post
);
3429 *expr_p
= build2 (MODIFY_EXPR
, TREE_TYPE (lvalue
), lvalue
, t1
);
3434 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3437 maybe_with_size_expr (tree
*expr_p
)
3439 tree expr
= *expr_p
;
3440 tree type
= TREE_TYPE (expr
);
3443 /* If we've already wrapped this or the type is error_mark_node, we can't do
3445 if (TREE_CODE (expr
) == WITH_SIZE_EXPR
3446 || type
== error_mark_node
)
3449 /* If the size isn't known or is a constant, we have nothing to do. */
3450 size
= TYPE_SIZE_UNIT (type
);
3451 if (!size
|| poly_int_tree_p (size
))
3454 /* Otherwise, make a WITH_SIZE_EXPR. */
3455 size
= unshare_expr (size
);
3456 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, expr
);
3457 *expr_p
= build2 (WITH_SIZE_EXPR
, type
, expr
, size
);
3460 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3461 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3462 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3463 gimplified to an SSA name. */
3465 enum gimplify_status
3466 gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
3469 bool (*test
) (tree
);
3472 /* In general, we allow lvalues for function arguments to avoid
3473 extra overhead of copying large aggregates out of even larger
3474 aggregates into temporaries only to copy the temporaries to
3475 the argument list. Make optimizers happy by pulling out to
3476 temporaries those types that fit in registers. */
3477 if (is_gimple_reg_type (TREE_TYPE (*arg_p
)))
3478 test
= is_gimple_val
, fb
= fb_rvalue
;
3481 test
= is_gimple_lvalue
, fb
= fb_either
;
3482 /* Also strip a TARGET_EXPR that would force an extra copy. */
3483 if (TREE_CODE (*arg_p
) == TARGET_EXPR
)
3485 tree init
= TARGET_EXPR_INITIAL (*arg_p
);
3487 && !VOID_TYPE_P (TREE_TYPE (init
)))
3492 /* If this is a variable sized type, we must remember the size. */
3493 maybe_with_size_expr (arg_p
);
3495 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3496 /* Make sure arguments have the same location as the function call
3498 protected_set_expr_location (*arg_p
, call_location
);
3500 /* There is a sequence point before a function call. Side effects in
3501 the argument list must occur before the actual call. So, when
3502 gimplifying arguments, force gimplify_expr to use an internal
3503 post queue which is then appended to the end of PRE_P. */
3504 return gimplify_expr (arg_p
, pre_p
, NULL
, test
, fb
, allow_ssa
);
3507 /* Don't fold inside offloading or taskreg regions: it can break code by
3508 adding decl references that weren't in the source. We'll do it during
3509 omplower pass instead. */
3512 maybe_fold_stmt (gimple_stmt_iterator
*gsi
)
3514 struct gimplify_omp_ctx
*ctx
;
3515 for (ctx
= gimplify_omp_ctxp
; ctx
; ctx
= ctx
->outer_context
)
3516 if ((ctx
->region_type
& (ORT_TARGET
| ORT_PARALLEL
| ORT_TASK
)) != 0)
3518 else if ((ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
3520 /* Delay folding of builtins until the IL is in consistent state
3521 so the diagnostic machinery can do a better job. */
3522 if (gimple_call_builtin_p (gsi_stmt (*gsi
)))
3524 return fold_stmt (gsi
);
3527 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3528 WANT_VALUE is true if the result of the call is desired. */
3530 static enum gimplify_status
3531 gimplify_call_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
3533 tree fndecl
, parms
, p
, fnptrtype
;
3534 enum gimplify_status ret
;
3537 bool builtin_va_start_p
= false;
3538 location_t loc
= EXPR_LOCATION (*expr_p
);
3540 gcc_assert (TREE_CODE (*expr_p
) == CALL_EXPR
);
3542 /* For reliable diagnostics during inlining, it is necessary that
3543 every call_expr be annotated with file and line. */
3544 if (! EXPR_HAS_LOCATION (*expr_p
))
3545 SET_EXPR_LOCATION (*expr_p
, input_location
);
3547 /* Gimplify internal functions created in the FEs. */
3548 if (CALL_EXPR_FN (*expr_p
) == NULL_TREE
)
3553 nargs
= call_expr_nargs (*expr_p
);
3554 enum internal_fn ifn
= CALL_EXPR_IFN (*expr_p
);
3555 auto_vec
<tree
> vargs (nargs
);
3557 if (ifn
== IFN_ASSUME
)
3559 if (simple_condition_p (CALL_EXPR_ARG (*expr_p
, 0)))
3561 /* If the [[assume (cond)]]; condition is simple
3562 enough and can be evaluated unconditionally
3563 without side-effects, expand it as
3564 if (!cond) __builtin_unreachable (); */
3565 tree fndecl
= builtin_decl_explicit (BUILT_IN_UNREACHABLE
);
3566 *expr_p
= build3 (COND_EXPR
, void_type_node
,
3567 CALL_EXPR_ARG (*expr_p
, 0), void_node
,
3568 build_call_expr_loc (EXPR_LOCATION (*expr_p
),
3572 /* If not optimizing, ignore the assumptions. */
3573 if (!optimize
|| seen_error ())
3575 *expr_p
= NULL_TREE
;
3578 /* Temporarily, until gimple lowering, transform
3585 such that gimple lowering can outline the condition into
3586 a separate function easily. */
3587 tree guard
= create_tmp_var (boolean_type_node
);
3588 *expr_p
= build2 (MODIFY_EXPR
, void_type_node
, guard
,
3589 gimple_boolify (CALL_EXPR_ARG (*expr_p
, 0)));
3590 *expr_p
= build3 (BIND_EXPR
, void_type_node
, NULL
, *expr_p
, NULL
);
3591 push_gimplify_context ();
3592 gimple_seq body
= NULL
;
3593 gimple
*g
= gimplify_and_return_first (*expr_p
, &body
);
3594 pop_gimplify_context (g
);
3595 g
= gimple_build_assume (guard
, body
);
3596 gimple_set_location (g
, loc
);
3597 gimplify_seq_add_stmt (pre_p
, g
);
3598 *expr_p
= NULL_TREE
;
3602 for (i
= 0; i
< nargs
; i
++)
3604 gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3605 EXPR_LOCATION (*expr_p
));
3606 vargs
.quick_push (CALL_EXPR_ARG (*expr_p
, i
));
3609 gcall
*call
= gimple_build_call_internal_vec (ifn
, vargs
);
3610 gimple_call_set_nothrow (call
, TREE_NOTHROW (*expr_p
));
3611 gimplify_seq_add_stmt (pre_p
, call
);
3615 /* This may be a call to a builtin function.
3617 Builtin function calls may be transformed into different
3618 (and more efficient) builtin function calls under certain
3619 circumstances. Unfortunately, gimplification can muck things
3620 up enough that the builtin expanders are not aware that certain
3621 transformations are still valid.
3623 So we attempt transformation/gimplification of the call before
3624 we gimplify the CALL_EXPR. At this time we do not manage to
3625 transform all calls in the same manner as the expanders do, but
3626 we do transform most of them. */
3627 fndecl
= get_callee_fndecl (*expr_p
);
3628 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
3629 switch (DECL_FUNCTION_CODE (fndecl
))
3631 CASE_BUILT_IN_ALLOCA
:
3632 /* If the call has been built for a variable-sized object, then we
3633 want to restore the stack level when the enclosing BIND_EXPR is
3634 exited to reclaim the allocated space; otherwise, we precisely
3635 need to do the opposite and preserve the latest stack level. */
3636 if (CALL_ALLOCA_FOR_VAR_P (*expr_p
))
3637 gimplify_ctxp
->save_stack
= true;
3639 gimplify_ctxp
->keep_stack
= true;
3642 case BUILT_IN_VA_START
:
3644 builtin_va_start_p
= TRUE
;
3645 if (call_expr_nargs (*expr_p
) < 2)
3647 error ("too few arguments to function %<va_start%>");
3648 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3652 if (fold_builtin_next_arg (*expr_p
, true))
3654 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3660 case BUILT_IN_EH_RETURN
:
3661 cfun
->calls_eh_return
= true;
3664 case BUILT_IN_CLEAR_PADDING
:
3665 if (call_expr_nargs (*expr_p
) == 1)
3667 /* Remember the original type of the argument in an internal
3668 dummy second argument, as in GIMPLE pointer conversions are
3669 useless. Also mark this call as not for automatic
3670 initialization in the internal dummy third argument. */
3671 p
= CALL_EXPR_ARG (*expr_p
, 0);
3673 = build_call_expr_loc (EXPR_LOCATION (*expr_p
), fndecl
, 2, p
,
3674 build_zero_cst (TREE_TYPE (p
)));
3682 if (fndecl
&& fndecl_built_in_p (fndecl
))
3684 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3685 if (new_tree
&& new_tree
!= *expr_p
)
3687 /* There was a transformation of this call which computes the
3688 same value, but in a more efficient way. Return and try
3695 /* Remember the original function pointer type. */
3696 fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
3701 && (cfun
->curr_properties
& PROP_gimple_any
) == 0)
3703 tree variant
= omp_resolve_declare_variant (fndecl
);
3704 if (variant
!= fndecl
)
3705 CALL_EXPR_FN (*expr_p
) = build1 (ADDR_EXPR
, fnptrtype
, variant
);
3708 /* There is a sequence point before the call, so any side effects in
3709 the calling expression must occur before the actual call. Force
3710 gimplify_expr to use an internal post queue. */
3711 ret
= gimplify_expr (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
3712 is_gimple_call_addr
, fb_rvalue
);
3714 if (ret
== GS_ERROR
)
3717 nargs
= call_expr_nargs (*expr_p
);
3719 /* Get argument types for verification. */
3720 fndecl
= get_callee_fndecl (*expr_p
);
3723 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3725 parms
= TYPE_ARG_TYPES (TREE_TYPE (fnptrtype
));
3727 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
3728 p
= DECL_ARGUMENTS (fndecl
);
3733 for (i
= 0; i
< nargs
&& p
; i
++, p
= TREE_CHAIN (p
))
3736 /* If the last argument is __builtin_va_arg_pack () and it is not
3737 passed as a named argument, decrease the number of CALL_EXPR
3738 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3741 && TREE_CODE (CALL_EXPR_ARG (*expr_p
, nargs
- 1)) == CALL_EXPR
)
3743 tree last_arg
= CALL_EXPR_ARG (*expr_p
, nargs
- 1);
3744 tree last_arg_fndecl
= get_callee_fndecl (last_arg
);
3747 && fndecl_built_in_p (last_arg_fndecl
, BUILT_IN_VA_ARG_PACK
))
3749 tree call
= *expr_p
;
3752 *expr_p
= build_call_array_loc (loc
, TREE_TYPE (call
),
3753 CALL_EXPR_FN (call
),
3754 nargs
, CALL_EXPR_ARGP (call
));
3756 /* Copy all CALL_EXPR flags, location and block, except
3757 CALL_EXPR_VA_ARG_PACK flag. */
3758 CALL_EXPR_STATIC_CHAIN (*expr_p
) = CALL_EXPR_STATIC_CHAIN (call
);
3759 CALL_EXPR_TAILCALL (*expr_p
) = CALL_EXPR_TAILCALL (call
);
3760 CALL_EXPR_RETURN_SLOT_OPT (*expr_p
)
3761 = CALL_EXPR_RETURN_SLOT_OPT (call
);
3762 CALL_FROM_THUNK_P (*expr_p
) = CALL_FROM_THUNK_P (call
);
3763 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (call
));
3765 /* Set CALL_EXPR_VA_ARG_PACK. */
3766 CALL_EXPR_VA_ARG_PACK (*expr_p
) = 1;
3770 /* If the call returns twice then after building the CFG the call
3771 argument computations will no longer dominate the call because
3772 we add an abnormal incoming edge to the call. So do not use SSA
3774 bool returns_twice
= call_expr_flags (*expr_p
) & ECF_RETURNS_TWICE
;
3776 /* Gimplify the function arguments. */
3779 for (i
= (PUSH_ARGS_REVERSED
? nargs
- 1 : 0);
3780 PUSH_ARGS_REVERSED
? i
>= 0 : i
< nargs
;
3781 PUSH_ARGS_REVERSED
? i
-- : i
++)
3783 enum gimplify_status t
;
3785 /* Avoid gimplifying the second argument to va_start, which needs to
3786 be the plain PARM_DECL. */
3787 if ((i
!= 1) || !builtin_va_start_p
)
3789 t
= gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3790 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3798 /* Gimplify the static chain. */
3799 if (CALL_EXPR_STATIC_CHAIN (*expr_p
))
3801 if (fndecl
&& !DECL_STATIC_CHAIN (fndecl
))
3802 CALL_EXPR_STATIC_CHAIN (*expr_p
) = NULL
;
3805 enum gimplify_status t
;
3806 t
= gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p
), pre_p
,
3807 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3813 /* Verify the function result. */
3814 if (want_value
&& fndecl
3815 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype
))))
3817 error_at (loc
, "using result of function returning %<void%>");
3821 /* Try this again in case gimplification exposed something. */
3822 if (ret
!= GS_ERROR
)
3824 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3826 if (new_tree
&& new_tree
!= *expr_p
)
3828 /* There was a transformation of this call which computes the
3829 same value, but in a more efficient way. Return and try
3837 *expr_p
= error_mark_node
;
3841 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3842 decl. This allows us to eliminate redundant or useless
3843 calls to "const" functions. */
3844 if (TREE_CODE (*expr_p
) == CALL_EXPR
)
3846 int flags
= call_expr_flags (*expr_p
);
3847 if (flags
& (ECF_CONST
| ECF_PURE
)
3848 /* An infinite loop is considered a side effect. */
3849 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
3850 TREE_SIDE_EFFECTS (*expr_p
) = 0;
3853 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3854 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3855 form and delegate the creation of a GIMPLE_CALL to
3856 gimplify_modify_expr. This is always possible because when
3857 WANT_VALUE is true, the caller wants the result of this call into
3858 a temporary, which means that we will emit an INIT_EXPR in
3859 internal_get_tmp_var which will then be handled by
3860 gimplify_modify_expr. */
3863 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3864 have to do is replicate it as a GIMPLE_CALL tuple. */
3865 gimple_stmt_iterator gsi
;
3866 call
= gimple_build_call_from_tree (*expr_p
, fnptrtype
);
3867 notice_special_calls (call
);
3868 gimplify_seq_add_stmt (pre_p
, call
);
3869 gsi
= gsi_last (*pre_p
);
3870 maybe_fold_stmt (&gsi
);
3871 *expr_p
= NULL_TREE
;
3874 /* Remember the original function type. */
3875 CALL_EXPR_FN (*expr_p
) = build1 (NOP_EXPR
, fnptrtype
,
3876 CALL_EXPR_FN (*expr_p
));
3881 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3882 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3884 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3885 condition is true or false, respectively. If null, we should generate
3886 our own to skip over the evaluation of this specific expression.
3888 LOCUS is the source location of the COND_EXPR.
3890 This function is the tree equivalent of do_jump.
3892 shortcut_cond_r should only be called by shortcut_cond_expr. */
3895 shortcut_cond_r (tree pred
, tree
*true_label_p
, tree
*false_label_p
,
3898 tree local_label
= NULL_TREE
;
3899 tree t
, expr
= NULL
;
3901 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3902 retain the shortcut semantics. Just insert the gotos here;
3903 shortcut_cond_expr will append the real blocks later. */
3904 if (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3906 location_t new_locus
;
3908 /* Turn if (a && b) into
3910 if (a); else goto no;
3911 if (b) goto yes; else goto no;
3914 if (false_label_p
== NULL
)
3915 false_label_p
= &local_label
;
3917 /* Keep the original source location on the first 'if'. */
3918 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), NULL
, false_label_p
, locus
);
3919 append_to_statement_list (t
, &expr
);
3921 /* Set the source location of the && on the second 'if'. */
3922 new_locus
= rexpr_location (pred
, locus
);
3923 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3925 append_to_statement_list (t
, &expr
);
3927 else if (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3929 location_t new_locus
;
3931 /* Turn if (a || b) into
3934 if (b) goto yes; else goto no;
3937 if (true_label_p
== NULL
)
3938 true_label_p
= &local_label
;
3940 /* Keep the original source location on the first 'if'. */
3941 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), true_label_p
, NULL
, locus
);
3942 append_to_statement_list (t
, &expr
);
3944 /* Set the source location of the || on the second 'if'. */
3945 new_locus
= rexpr_location (pred
, locus
);
3946 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3948 append_to_statement_list (t
, &expr
);
3950 else if (TREE_CODE (pred
) == COND_EXPR
3951 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 1)))
3952 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 2))))
3954 location_t new_locus
;
3956 /* As long as we're messing with gotos, turn if (a ? b : c) into
3958 if (b) goto yes; else goto no;
3960 if (c) goto yes; else goto no;
3962 Don't do this if one of the arms has void type, which can happen
3963 in C++ when the arm is throw. */
3965 /* Keep the original source location on the first 'if'. Set the source
3966 location of the ? on the second 'if'. */
3967 new_locus
= rexpr_location (pred
, locus
);
3968 expr
= build3 (COND_EXPR
, void_type_node
, TREE_OPERAND (pred
, 0),
3969 shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
,
3970 false_label_p
, locus
),
3971 shortcut_cond_r (TREE_OPERAND (pred
, 2), true_label_p
,
3972 false_label_p
, new_locus
));
3976 expr
= build3 (COND_EXPR
, void_type_node
, pred
,
3977 build_and_jump (true_label_p
),
3978 build_and_jump (false_label_p
));
3979 SET_EXPR_LOCATION (expr
, locus
);
3984 t
= build1 (LABEL_EXPR
, void_type_node
, local_label
);
3985 append_to_statement_list (t
, &expr
);
3991 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3992 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3993 statement, if it is the last one. Otherwise, return NULL. */
3996 find_goto (tree expr
)
4001 if (TREE_CODE (expr
) == GOTO_EXPR
)
4004 if (TREE_CODE (expr
) != STATEMENT_LIST
)
4007 tree_stmt_iterator i
= tsi_start (expr
);
4009 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
4012 if (!tsi_one_before_end_p (i
))
4015 return find_goto (tsi_stmt (i
));
4018 /* Same as find_goto, except that it returns NULL if the destination
4019 is not a LABEL_DECL. */
4022 find_goto_label (tree expr
)
4024 tree dest
= find_goto (expr
);
4025 if (dest
&& TREE_CODE (GOTO_DESTINATION (dest
)) == LABEL_DECL
)
4030 /* Given a conditional expression EXPR with short-circuit boolean
4031 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4032 predicate apart into the equivalent sequence of conditionals. */
4035 shortcut_cond_expr (tree expr
)
4037 tree pred
= TREE_OPERAND (expr
, 0);
4038 tree then_
= TREE_OPERAND (expr
, 1);
4039 tree else_
= TREE_OPERAND (expr
, 2);
4040 tree true_label
, false_label
, end_label
, t
;
4042 tree
*false_label_p
;
4043 bool emit_end
, emit_false
, jump_over_else
;
4044 bool then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
4045 bool else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
4047 /* First do simple transformations. */
4050 /* If there is no 'else', turn
4053 if (a) if (b) then c. */
4054 while (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
4056 /* Keep the original source location on the first 'if'. */
4057 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
4058 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
4059 /* Set the source location of the && on the second 'if'. */
4060 if (rexpr_has_location (pred
))
4061 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
4062 then_
= shortcut_cond_expr (expr
);
4063 then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
4064 pred
= TREE_OPERAND (pred
, 0);
4065 expr
= build3 (COND_EXPR
, void_type_node
, pred
, then_
, NULL_TREE
);
4066 SET_EXPR_LOCATION (expr
, locus
);
4072 /* If there is no 'then', turn
4075 if (a); else if (b); else d. */
4076 while (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
4078 /* Keep the original source location on the first 'if'. */
4079 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
4080 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
4081 /* Set the source location of the || on the second 'if'. */
4082 if (rexpr_has_location (pred
))
4083 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
4084 else_
= shortcut_cond_expr (expr
);
4085 else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
4086 pred
= TREE_OPERAND (pred
, 0);
4087 expr
= build3 (COND_EXPR
, void_type_node
, pred
, NULL_TREE
, else_
);
4088 SET_EXPR_LOCATION (expr
, locus
);
4092 /* If we're done, great. */
4093 if (TREE_CODE (pred
) != TRUTH_ANDIF_EXPR
4094 && TREE_CODE (pred
) != TRUTH_ORIF_EXPR
)
4097 /* Otherwise we need to mess with gotos. Change
4100 if (a); else goto no;
4103 and recursively gimplify the condition. */
4105 true_label
= false_label
= end_label
= NULL_TREE
;
4107 /* If our arms just jump somewhere, hijack those labels so we don't
4108 generate jumps to jumps. */
4110 if (tree then_goto
= find_goto_label (then_
))
4112 true_label
= GOTO_DESTINATION (then_goto
);
4117 if (tree else_goto
= find_goto_label (else_
))
4119 false_label
= GOTO_DESTINATION (else_goto
);
4124 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4126 true_label_p
= &true_label
;
4128 true_label_p
= NULL
;
4130 /* The 'else' branch also needs a label if it contains interesting code. */
4131 if (false_label
|| else_se
)
4132 false_label_p
= &false_label
;
4134 false_label_p
= NULL
;
4136 /* If there was nothing else in our arms, just forward the label(s). */
4137 if (!then_se
&& !else_se
)
4138 return shortcut_cond_r (pred
, true_label_p
, false_label_p
,
4139 EXPR_LOC_OR_LOC (expr
, input_location
));
4141 /* If our last subexpression already has a terminal label, reuse it. */
4143 t
= expr_last (else_
);
4145 t
= expr_last (then_
);
4148 if (t
&& TREE_CODE (t
) == LABEL_EXPR
)
4149 end_label
= LABEL_EXPR_LABEL (t
);
4151 /* If we don't care about jumping to the 'else' branch, jump to the end
4152 if the condition is false. */
4154 false_label_p
= &end_label
;
4156 /* We only want to emit these labels if we aren't hijacking them. */
4157 emit_end
= (end_label
== NULL_TREE
);
4158 emit_false
= (false_label
== NULL_TREE
);
4160 /* We only emit the jump over the else clause if we have to--if the
4161 then clause may fall through. Otherwise we can wind up with a
4162 useless jump and a useless label at the end of gimplified code,
4163 which will cause us to think that this conditional as a whole
4164 falls through even if it doesn't. If we then inline a function
4165 which ends with such a condition, that can cause us to issue an
4166 inappropriate warning about control reaching the end of a
4167 non-void function. */
4168 jump_over_else
= block_may_fallthru (then_
);
4170 pred
= shortcut_cond_r (pred
, true_label_p
, false_label_p
,
4171 EXPR_LOC_OR_LOC (expr
, input_location
));
4174 append_to_statement_list (pred
, &expr
);
4176 append_to_statement_list (then_
, &expr
);
4181 tree last
= expr_last (expr
);
4182 t
= build_and_jump (&end_label
);
4183 if (rexpr_has_location (last
))
4184 SET_EXPR_LOCATION (t
, rexpr_location (last
));
4185 append_to_statement_list (t
, &expr
);
4189 t
= build1 (LABEL_EXPR
, void_type_node
, false_label
);
4190 append_to_statement_list (t
, &expr
);
4192 append_to_statement_list (else_
, &expr
);
4194 if (emit_end
&& end_label
)
4196 t
= build1 (LABEL_EXPR
, void_type_node
, end_label
);
4197 append_to_statement_list (t
, &expr
);
4203 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4206 gimple_boolify (tree expr
)
4208 tree type
= TREE_TYPE (expr
);
4209 location_t loc
= EXPR_LOCATION (expr
);
4211 if (TREE_CODE (expr
) == NE_EXPR
4212 && TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
4213 && integer_zerop (TREE_OPERAND (expr
, 1)))
4215 tree call
= TREE_OPERAND (expr
, 0);
4216 tree fn
= get_callee_fndecl (call
);
4218 /* For __builtin_expect ((long) (x), y) recurse into x as well
4219 if x is truth_value_p. */
4221 && fndecl_built_in_p (fn
, BUILT_IN_EXPECT
)
4222 && call_expr_nargs (call
) == 2)
4224 tree arg
= CALL_EXPR_ARG (call
, 0);
4227 if (TREE_CODE (arg
) == NOP_EXPR
4228 && TREE_TYPE (arg
) == TREE_TYPE (call
))
4229 arg
= TREE_OPERAND (arg
, 0);
4230 if (truth_value_p (TREE_CODE (arg
)))
4232 arg
= gimple_boolify (arg
);
4233 CALL_EXPR_ARG (call
, 0)
4234 = fold_convert_loc (loc
, TREE_TYPE (call
), arg
);
4240 switch (TREE_CODE (expr
))
4242 case TRUTH_AND_EXPR
:
4244 case TRUTH_XOR_EXPR
:
4245 case TRUTH_ANDIF_EXPR
:
4246 case TRUTH_ORIF_EXPR
:
4247 /* Also boolify the arguments of truth exprs. */
4248 TREE_OPERAND (expr
, 1) = gimple_boolify (TREE_OPERAND (expr
, 1));
4251 case TRUTH_NOT_EXPR
:
4252 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4254 /* These expressions always produce boolean results. */
4255 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4256 TREE_TYPE (expr
) = boolean_type_node
;
4260 switch ((enum annot_expr_kind
) TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)))
4262 case annot_expr_ivdep_kind
:
4263 case annot_expr_unroll_kind
:
4264 case annot_expr_no_vector_kind
:
4265 case annot_expr_vector_kind
:
4266 case annot_expr_parallel_kind
:
4267 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4268 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4269 TREE_TYPE (expr
) = boolean_type_node
;
4276 if (COMPARISON_CLASS_P (expr
))
4278 /* These expressions always produce boolean results. */
4279 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4280 TREE_TYPE (expr
) = boolean_type_node
;
4283 /* Other expressions that get here must have boolean values, but
4284 might need to be converted to the appropriate mode. */
4285 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
4287 return fold_convert_loc (loc
, boolean_type_node
, expr
);
4291 /* Given a conditional expression *EXPR_P without side effects, gimplify
4292 its operands. New statements are inserted to PRE_P. */
4294 static enum gimplify_status
4295 gimplify_pure_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
)
4297 tree expr
= *expr_p
, cond
;
4298 enum gimplify_status ret
, tret
;
4299 enum tree_code code
;
4301 cond
= gimple_boolify (COND_EXPR_COND (expr
));
4303 /* We need to handle && and || specially, as their gimplification
4304 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4305 code
= TREE_CODE (cond
);
4306 if (code
== TRUTH_ANDIF_EXPR
)
4307 TREE_SET_CODE (cond
, TRUTH_AND_EXPR
);
4308 else if (code
== TRUTH_ORIF_EXPR
)
4309 TREE_SET_CODE (cond
, TRUTH_OR_EXPR
);
4310 ret
= gimplify_expr (&cond
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
4311 COND_EXPR_COND (*expr_p
) = cond
;
4313 tret
= gimplify_expr (&COND_EXPR_THEN (expr
), pre_p
, NULL
,
4314 is_gimple_val
, fb_rvalue
);
4315 ret
= MIN (ret
, tret
);
4316 tret
= gimplify_expr (&COND_EXPR_ELSE (expr
), pre_p
, NULL
,
4317 is_gimple_val
, fb_rvalue
);
4319 return MIN (ret
, tret
);
4322 /* Return true if evaluating EXPR could trap.
4323 EXPR is GENERIC, while tree_could_trap_p can be called
4327 generic_expr_could_trap_p (tree expr
)
4331 if (!expr
|| is_gimple_val (expr
))
4334 if (!EXPR_P (expr
) || tree_could_trap_p (expr
))
4337 n
= TREE_OPERAND_LENGTH (expr
);
4338 for (i
= 0; i
< n
; i
++)
4339 if (generic_expr_could_trap_p (TREE_OPERAND (expr
, i
)))
4345 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4354 The second form is used when *EXPR_P is of type void.
4356 PRE_P points to the list where side effects that must happen before
4357 *EXPR_P should be stored. */
4359 static enum gimplify_status
4360 gimplify_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
, fallback_t fallback
)
4362 tree expr
= *expr_p
;
4363 tree type
= TREE_TYPE (expr
);
4364 location_t loc
= EXPR_LOCATION (expr
);
4365 tree tmp
, arm1
, arm2
;
4366 enum gimplify_status ret
;
4367 tree label_true
, label_false
, label_cont
;
4368 bool have_then_clause_p
, have_else_clause_p
;
4370 enum tree_code pred_code
;
4371 gimple_seq seq
= NULL
;
4373 /* If this COND_EXPR has a value, copy the values into a temporary within
4375 if (!VOID_TYPE_P (type
))
4377 tree then_
= TREE_OPERAND (expr
, 1), else_
= TREE_OPERAND (expr
, 2);
4380 /* If either an rvalue is ok or we do not require an lvalue, create the
4381 temporary. But we cannot do that if the type is addressable. */
4382 if (((fallback
& fb_rvalue
) || !(fallback
& fb_lvalue
))
4383 && !TREE_ADDRESSABLE (type
))
4385 if (gimplify_ctxp
->allow_rhs_cond_expr
4386 /* If either branch has side effects or could trap, it can't be
4387 evaluated unconditionally. */
4388 && !TREE_SIDE_EFFECTS (then_
)
4389 && !generic_expr_could_trap_p (then_
)
4390 && !TREE_SIDE_EFFECTS (else_
)
4391 && !generic_expr_could_trap_p (else_
))
4392 return gimplify_pure_cond_expr (expr_p
, pre_p
);
4394 tmp
= create_tmp_var (type
, "iftmp");
4398 /* Otherwise, only create and copy references to the values. */
4401 type
= build_pointer_type (type
);
4403 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4404 then_
= build_fold_addr_expr_loc (loc
, then_
);
4406 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4407 else_
= build_fold_addr_expr_loc (loc
, else_
);
4410 = build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0), then_
, else_
);
4412 tmp
= create_tmp_var (type
, "iftmp");
4413 result
= build_simple_mem_ref_loc (loc
, tmp
);
4416 /* Build the new then clause, `tmp = then_;'. But don't build the
4417 assignment if the value is void; in C++ it can be if it's a throw. */
4418 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4419 TREE_OPERAND (expr
, 1) = build2 (INIT_EXPR
, type
, tmp
, then_
);
4421 /* Similarly, build the new else clause, `tmp = else_;'. */
4422 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4423 TREE_OPERAND (expr
, 2) = build2 (INIT_EXPR
, type
, tmp
, else_
);
4425 TREE_TYPE (expr
) = void_type_node
;
4426 recalculate_side_effects (expr
);
4428 /* Move the COND_EXPR to the prequeue. */
4429 gimplify_stmt (&expr
, pre_p
);
4435 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4436 STRIP_TYPE_NOPS (TREE_OPERAND (expr
, 0));
4437 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == COMPOUND_EXPR
)
4438 gimplify_compound_expr (&TREE_OPERAND (expr
, 0), pre_p
, true);
4440 /* Make sure the condition has BOOLEAN_TYPE. */
4441 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4443 /* Break apart && and || conditions. */
4444 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ANDIF_EXPR
4445 || TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ORIF_EXPR
)
4447 expr
= shortcut_cond_expr (expr
);
4449 if (expr
!= *expr_p
)
4453 /* We can't rely on gimplify_expr to re-gimplify the expanded
4454 form properly, as cleanups might cause the target labels to be
4455 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4456 set up a conditional context. */
4457 gimple_push_condition ();
4458 gimplify_stmt (expr_p
, &seq
);
4459 gimple_pop_condition (pre_p
);
4460 gimple_seq_add_seq (pre_p
, seq
);
4466 /* Now do the normal gimplification. */
4468 /* Gimplify condition. */
4469 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, NULL
,
4470 is_gimple_condexpr_for_cond
, fb_rvalue
);
4471 if (ret
== GS_ERROR
)
4473 gcc_assert (TREE_OPERAND (expr
, 0) != NULL_TREE
);
4475 gimple_push_condition ();
4477 have_then_clause_p
= have_else_clause_p
= false;
4478 label_true
= find_goto_label (TREE_OPERAND (expr
, 1));
4480 && DECL_CONTEXT (GOTO_DESTINATION (label_true
)) == current_function_decl
4481 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4482 have different locations, otherwise we end up with incorrect
4483 location information on the branches. */
4485 || !EXPR_HAS_LOCATION (expr
)
4486 || !rexpr_has_location (label_true
)
4487 || EXPR_LOCATION (expr
) == rexpr_location (label_true
)))
4489 have_then_clause_p
= true;
4490 label_true
= GOTO_DESTINATION (label_true
);
4493 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
4494 label_false
= find_goto_label (TREE_OPERAND (expr
, 2));
4496 && DECL_CONTEXT (GOTO_DESTINATION (label_false
)) == current_function_decl
4497 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4498 have different locations, otherwise we end up with incorrect
4499 location information on the branches. */
4501 || !EXPR_HAS_LOCATION (expr
)
4502 || !rexpr_has_location (label_false
)
4503 || EXPR_LOCATION (expr
) == rexpr_location (label_false
)))
4505 have_else_clause_p
= true;
4506 label_false
= GOTO_DESTINATION (label_false
);
4509 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
4511 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr
), &pred_code
, &arm1
,
4513 cond_stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
,
4515 gimple_set_location (cond_stmt
, EXPR_LOCATION (expr
));
4516 copy_warning (cond_stmt
, COND_EXPR_COND (expr
));
4517 gimplify_seq_add_stmt (&seq
, cond_stmt
);
4518 gimple_stmt_iterator gsi
= gsi_last (seq
);
4519 maybe_fold_stmt (&gsi
);
4521 label_cont
= NULL_TREE
;
4522 if (!have_then_clause_p
)
4524 /* For if (...) {} else { code; } put label_true after
4526 if (TREE_OPERAND (expr
, 1) == NULL_TREE
4527 && !have_else_clause_p
4528 && TREE_OPERAND (expr
, 2) != NULL_TREE
)
4530 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4531 handling that label_cont == label_true can be only reached
4532 through fallthrough from { code; }. */
4533 if (integer_zerop (COND_EXPR_COND (expr
)))
4534 UNUSED_LABEL_P (label_true
) = 1;
4535 label_cont
= label_true
;
4539 bool then_side_effects
4540 = (TREE_OPERAND (expr
, 1)
4541 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr
, 1)));
4542 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_true
));
4543 have_then_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 1), &seq
);
4544 /* For if (...) { code; } else {} or
4545 if (...) { code; } else goto label; or
4546 if (...) { code; return; } else { ... }
4547 label_cont isn't needed. */
4548 if (!have_else_clause_p
4549 && TREE_OPERAND (expr
, 2) != NULL_TREE
4550 && gimple_seq_may_fallthru (seq
))
4553 label_cont
= create_artificial_label (UNKNOWN_LOCATION
);
4555 /* For if (0) { non-side-effect-code } else { code }
4556 tell -Wimplicit-fallthrough handling that label_cont can
4557 be only reached through fallthrough from { code }. */
4558 if (integer_zerop (COND_EXPR_COND (expr
)))
4560 UNUSED_LABEL_P (label_true
) = 1;
4561 if (!then_side_effects
)
4562 UNUSED_LABEL_P (label_cont
) = 1;
4565 g
= gimple_build_goto (label_cont
);
4567 /* GIMPLE_COND's are very low level; they have embedded
4568 gotos. This particular embedded goto should not be marked
4569 with the location of the original COND_EXPR, as it would
4570 correspond to the COND_EXPR's condition, not the ELSE or the
4571 THEN arms. To avoid marking it with the wrong location, flag
4572 it as "no location". */
4573 gimple_set_do_not_emit_location (g
);
4575 gimplify_seq_add_stmt (&seq
, g
);
4579 if (!have_else_clause_p
)
4581 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4582 tell -Wimplicit-fallthrough handling that label_false can be only
4583 reached through fallthrough from { code }. */
4584 if (integer_nonzerop (COND_EXPR_COND (expr
))
4585 && (TREE_OPERAND (expr
, 2) == NULL_TREE
4586 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr
, 2))))
4587 UNUSED_LABEL_P (label_false
) = 1;
4588 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_false
));
4589 have_else_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 2), &seq
);
4592 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_cont
));
4594 gimple_pop_condition (pre_p
);
4595 gimple_seq_add_seq (pre_p
, seq
);
4597 if (ret
== GS_ERROR
)
4599 else if (have_then_clause_p
|| have_else_clause_p
)
4603 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4604 expr
= TREE_OPERAND (expr
, 0);
4605 gimplify_stmt (&expr
, pre_p
);
4612 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4613 to be marked addressable.
4615 We cannot rely on such an expression being directly markable if a temporary
4616 has been created by the gimplification. In this case, we create another
4617 temporary and initialize it with a copy, which will become a store after we
4618 mark it addressable. This can happen if the front-end passed us something
4619 that it could not mark addressable yet, like a Fortran pass-by-reference
4620 parameter (int) floatvar. */
4623 prepare_gimple_addressable (tree
*expr_p
, gimple_seq
*seq_p
)
4625 while (handled_component_p (*expr_p
))
4626 expr_p
= &TREE_OPERAND (*expr_p
, 0);
4628 /* Do not allow an SSA name as the temporary. */
4629 if (is_gimple_reg (*expr_p
))
4630 *expr_p
= internal_get_tmp_var (*expr_p
, seq_p
, NULL
, false, false, true);
4633 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4634 a call to __builtin_memcpy. */
4636 static enum gimplify_status
4637 gimplify_modify_expr_to_memcpy (tree
*expr_p
, tree size
, bool want_value
,
4640 tree t
, to
, to_ptr
, from
, from_ptr
;
4642 location_t loc
= EXPR_LOCATION (*expr_p
);
4644 to
= TREE_OPERAND (*expr_p
, 0);
4645 from
= TREE_OPERAND (*expr_p
, 1);
4647 /* Mark the RHS addressable. Beware that it may not be possible to do so
4648 directly if a temporary has been created by the gimplification. */
4649 prepare_gimple_addressable (&from
, seq_p
);
4651 mark_addressable (from
);
4652 from_ptr
= build_fold_addr_expr_loc (loc
, from
);
4653 gimplify_arg (&from_ptr
, seq_p
, loc
);
4655 mark_addressable (to
);
4656 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4657 gimplify_arg (&to_ptr
, seq_p
, loc
);
4659 t
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
4661 gs
= gimple_build_call (t
, 3, to_ptr
, from_ptr
, size
);
4662 gimple_call_set_alloca_for_var (gs
, true);
4666 /* tmp = memcpy() */
4667 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4668 gimple_call_set_lhs (gs
, t
);
4669 gimplify_seq_add_stmt (seq_p
, gs
);
4671 *expr_p
= build_simple_mem_ref (t
);
4675 gimplify_seq_add_stmt (seq_p
, gs
);
4680 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4681 a call to __builtin_memset. In this case we know that the RHS is
4682 a CONSTRUCTOR with an empty element list. */
4684 static enum gimplify_status
4685 gimplify_modify_expr_to_memset (tree
*expr_p
, tree size
, bool want_value
,
4688 tree t
, from
, to
, to_ptr
;
4690 location_t loc
= EXPR_LOCATION (*expr_p
);
4692 /* Assert our assumptions, to abort instead of producing wrong code
4693 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4694 not be immediately exposed. */
4695 from
= TREE_OPERAND (*expr_p
, 1);
4696 if (TREE_CODE (from
) == WITH_SIZE_EXPR
)
4697 from
= TREE_OPERAND (from
, 0);
4699 gcc_assert (TREE_CODE (from
) == CONSTRUCTOR
4700 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from
)));
4703 to
= TREE_OPERAND (*expr_p
, 0);
4705 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4706 gimplify_arg (&to_ptr
, seq_p
, loc
);
4707 t
= builtin_decl_implicit (BUILT_IN_MEMSET
);
4709 gs
= gimple_build_call (t
, 3, to_ptr
, integer_zero_node
, size
);
4713 /* tmp = memset() */
4714 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4715 gimple_call_set_lhs (gs
, t
);
4716 gimplify_seq_add_stmt (seq_p
, gs
);
4718 *expr_p
= build1 (INDIRECT_REF
, TREE_TYPE (to
), t
);
4722 gimplify_seq_add_stmt (seq_p
, gs
);
4727 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4728 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4729 assignment. Return non-null if we detect a potential overlap. */
4731 struct gimplify_init_ctor_preeval_data
4733 /* The base decl of the lhs object. May be NULL, in which case we
4734 have to assume the lhs is indirect. */
4737 /* The alias set of the lhs object. */
4738 alias_set_type lhs_alias_set
;
4742 gimplify_init_ctor_preeval_1 (tree
*tp
, int *walk_subtrees
, void *xdata
)
4744 struct gimplify_init_ctor_preeval_data
*data
4745 = (struct gimplify_init_ctor_preeval_data
*) xdata
;
4748 /* If we find the base object, obviously we have overlap. */
4749 if (data
->lhs_base_decl
== t
)
4752 /* If the constructor component is indirect, determine if we have a
4753 potential overlap with the lhs. The only bits of information we
4754 have to go on at this point are addressability and alias sets. */
4755 if ((INDIRECT_REF_P (t
)
4756 || TREE_CODE (t
) == MEM_REF
)
4757 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4758 && alias_sets_conflict_p (data
->lhs_alias_set
, get_alias_set (t
)))
4761 /* If the constructor component is a call, determine if it can hide a
4762 potential overlap with the lhs through an INDIRECT_REF like above.
4763 ??? Ugh - this is completely broken. In fact this whole analysis
4764 doesn't look conservative. */
4765 if (TREE_CODE (t
) == CALL_EXPR
)
4767 tree type
, fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t
)));
4769 for (type
= TYPE_ARG_TYPES (fntype
); type
; type
= TREE_CHAIN (type
))
4770 if (POINTER_TYPE_P (TREE_VALUE (type
))
4771 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4772 && alias_sets_conflict_p (data
->lhs_alias_set
,
4774 (TREE_TYPE (TREE_VALUE (type
)))))
4778 if (IS_TYPE_OR_DECL_P (t
))
4783 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4784 force values that overlap with the lhs (as described by *DATA)
4785 into temporaries. */
4788 gimplify_init_ctor_preeval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4789 struct gimplify_init_ctor_preeval_data
*data
)
4791 enum gimplify_status one
;
4793 /* If the value is constant, then there's nothing to pre-evaluate. */
4794 if (TREE_CONSTANT (*expr_p
))
4796 /* Ensure it does not have side effects, it might contain a reference to
4797 the object we're initializing. */
4798 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p
));
4802 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4803 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)))
4806 /* Recurse for nested constructors. */
4807 if (TREE_CODE (*expr_p
) == CONSTRUCTOR
)
4809 unsigned HOST_WIDE_INT ix
;
4810 constructor_elt
*ce
;
4811 vec
<constructor_elt
, va_gc
> *v
= CONSTRUCTOR_ELTS (*expr_p
);
4813 FOR_EACH_VEC_SAFE_ELT (v
, ix
, ce
)
4814 gimplify_init_ctor_preeval (&ce
->value
, pre_p
, post_p
, data
);
4819 /* If this is a variable sized type, we must remember the size. */
4820 maybe_with_size_expr (expr_p
);
4822 /* Gimplify the constructor element to something appropriate for the rhs
4823 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4824 the gimplifier will consider this a store to memory. Doing this
4825 gimplification now means that we won't have to deal with complicated
4826 language-specific trees, nor trees like SAVE_EXPR that can induce
4827 exponential search behavior. */
4828 one
= gimplify_expr (expr_p
, pre_p
, post_p
, is_gimple_mem_rhs
, fb_rvalue
);
4829 if (one
== GS_ERROR
)
4835 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4836 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4837 always be true for all scalars, since is_gimple_mem_rhs insists on a
4838 temporary variable for them. */
4839 if (DECL_P (*expr_p
))
4842 /* If this is of variable size, we have no choice but to assume it doesn't
4843 overlap since we can't make a temporary for it. */
4844 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p
))) != INTEGER_CST
)
4847 /* Otherwise, we must search for overlap ... */
4848 if (!walk_tree (expr_p
, gimplify_init_ctor_preeval_1
, data
, NULL
))
4851 /* ... and if found, force the value into a temporary. */
4852 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
4855 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4856 a RANGE_EXPR in a CONSTRUCTOR for an array.
4860 object[var] = value;
4867 We increment var _after_ the loop exit check because we might otherwise
4868 fail if upper == TYPE_MAX_VALUE (type for upper).
4870 Note that we never have to deal with SAVE_EXPRs here, because this has
4871 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4873 static void gimplify_init_ctor_eval (tree
, vec
<constructor_elt
, va_gc
> *,
4874 gimple_seq
*, bool);
4877 gimplify_init_ctor_eval_range (tree object
, tree lower
, tree upper
,
4878 tree value
, tree array_elt_type
,
4879 gimple_seq
*pre_p
, bool cleared
)
4881 tree loop_entry_label
, loop_exit_label
, fall_thru_label
;
4882 tree var
, var_type
, cref
, tmp
;
4884 loop_entry_label
= create_artificial_label (UNKNOWN_LOCATION
);
4885 loop_exit_label
= create_artificial_label (UNKNOWN_LOCATION
);
4886 fall_thru_label
= create_artificial_label (UNKNOWN_LOCATION
);
4888 /* Create and initialize the index variable. */
4889 var_type
= TREE_TYPE (upper
);
4890 var
= create_tmp_var (var_type
);
4891 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, lower
));
4893 /* Add the loop entry label. */
4894 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_entry_label
));
4896 /* Build the reference. */
4897 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4898 var
, NULL_TREE
, NULL_TREE
);
4900 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4901 the store. Otherwise just assign value to the reference. */
4903 if (TREE_CODE (value
) == CONSTRUCTOR
)
4904 /* NB we might have to call ourself recursively through
4905 gimplify_init_ctor_eval if the value is a constructor. */
4906 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4910 if (gimplify_expr (&value
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
4912 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (cref
, value
));
4915 /* We exit the loop when the index var is equal to the upper bound. */
4916 gimplify_seq_add_stmt (pre_p
,
4917 gimple_build_cond (EQ_EXPR
, var
, upper
,
4918 loop_exit_label
, fall_thru_label
));
4920 gimplify_seq_add_stmt (pre_p
, gimple_build_label (fall_thru_label
));
4922 /* Otherwise, increment the index var... */
4923 tmp
= build2 (PLUS_EXPR
, var_type
, var
,
4924 fold_convert (var_type
, integer_one_node
));
4925 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, tmp
));
4927 /* ...and jump back to the loop entry. */
4928 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (loop_entry_label
));
4930 /* Add the loop exit label. */
4931 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_exit_label
));
4934 /* A subroutine of gimplify_init_constructor. Generate individual
4935 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4936 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4937 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4941 gimplify_init_ctor_eval (tree object
, vec
<constructor_elt
, va_gc
> *elts
,
4942 gimple_seq
*pre_p
, bool cleared
)
4944 tree array_elt_type
= NULL
;
4945 unsigned HOST_WIDE_INT ix
;
4946 tree purpose
, value
;
4948 if (TREE_CODE (TREE_TYPE (object
)) == ARRAY_TYPE
)
4949 array_elt_type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object
)));
4951 FOR_EACH_CONSTRUCTOR_ELT (elts
, ix
, purpose
, value
)
4955 /* NULL values are created above for gimplification errors. */
4959 if (cleared
&& initializer_zerop (value
))
4962 /* ??? Here's to hoping the front end fills in all of the indices,
4963 so we don't have to figure out what's missing ourselves. */
4964 gcc_assert (purpose
);
4966 /* Skip zero-sized fields, unless value has side-effects. This can
4967 happen with calls to functions returning a empty type, which
4968 we shouldn't discard. As a number of downstream passes don't
4969 expect sets of empty type fields, we rely on the gimplification of
4970 the MODIFY_EXPR we make below to drop the assignment statement. */
4971 if (!TREE_SIDE_EFFECTS (value
)
4972 && TREE_CODE (purpose
) == FIELD_DECL
4973 && is_empty_type (TREE_TYPE (purpose
)))
4976 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4978 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4980 tree lower
= TREE_OPERAND (purpose
, 0);
4981 tree upper
= TREE_OPERAND (purpose
, 1);
4983 /* If the lower bound is equal to upper, just treat it as if
4984 upper was the index. */
4985 if (simple_cst_equal (lower
, upper
))
4989 gimplify_init_ctor_eval_range (object
, lower
, upper
, value
,
4990 array_elt_type
, pre_p
, cleared
);
4997 /* Do not use bitsizetype for ARRAY_REF indices. */
4998 if (TYPE_DOMAIN (TREE_TYPE (object
)))
5000 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object
))),
5002 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
5003 purpose
, NULL_TREE
, NULL_TREE
);
5007 gcc_assert (TREE_CODE (purpose
) == FIELD_DECL
);
5008 cref
= build3 (COMPONENT_REF
, TREE_TYPE (purpose
),
5009 unshare_expr (object
), purpose
, NULL_TREE
);
5012 if (TREE_CODE (value
) == CONSTRUCTOR
5013 && TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
)
5014 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
5018 tree init
= build2 (INIT_EXPR
, TREE_TYPE (cref
), cref
, value
);
5019 gimplify_and_add (init
, pre_p
);
5025 /* Return the appropriate RHS predicate for this LHS. */
5028 rhs_predicate_for (tree lhs
)
5030 if (is_gimple_reg (lhs
))
5031 return is_gimple_reg_rhs_or_call
;
5033 return is_gimple_mem_rhs_or_call
;
5036 /* Return the initial guess for an appropriate RHS predicate for this LHS,
5037 before the LHS has been gimplified. */
5039 static gimple_predicate
5040 initial_rhs_predicate_for (tree lhs
)
5042 if (is_gimple_reg_type (TREE_TYPE (lhs
)))
5043 return is_gimple_reg_rhs_or_call
;
5045 return is_gimple_mem_rhs_or_call
;
5048 /* Gimplify a C99 compound literal expression. This just means adding
5049 the DECL_EXPR before the current statement and using its anonymous
5052 static enum gimplify_status
5053 gimplify_compound_literal_expr (tree
*expr_p
, gimple_seq
*pre_p
,
5054 bool (*gimple_test_f
) (tree
),
5055 fallback_t fallback
)
5057 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p
);
5058 tree decl
= DECL_EXPR_DECL (decl_s
);
5059 tree init
= DECL_INITIAL (decl
);
5060 /* Mark the decl as addressable if the compound literal
5061 expression is addressable now, otherwise it is marked too late
5062 after we gimplify the initialization expression. */
5063 if (TREE_ADDRESSABLE (*expr_p
))
5064 TREE_ADDRESSABLE (decl
) = 1;
5065 /* Otherwise, if we don't need an lvalue and have a literal directly
5066 substitute it. Check if it matches the gimple predicate, as
5067 otherwise we'd generate a new temporary, and we can as well just
5068 use the decl we already have. */
5069 else if (!TREE_ADDRESSABLE (decl
)
5070 && !TREE_THIS_VOLATILE (decl
)
5072 && (fallback
& fb_lvalue
) == 0
5073 && gimple_test_f (init
))
5079 /* If the decl is not addressable, then it is being used in some
5080 expression or on the right hand side of a statement, and it can
5081 be put into a readonly data section. */
5082 if (!TREE_ADDRESSABLE (decl
) && (fallback
& fb_lvalue
) == 0)
5083 TREE_READONLY (decl
) = 1;
5085 /* This decl isn't mentioned in the enclosing block, so add it to the
5086 list of temps. FIXME it seems a bit of a kludge to say that
5087 anonymous artificial vars aren't pushed, but everything else is. */
5088 if (DECL_NAME (decl
) == NULL_TREE
&& !DECL_SEEN_IN_BIND_EXPR_P (decl
))
5089 gimple_add_tmp_var (decl
);
5091 gimplify_and_add (decl_s
, pre_p
);
5096 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5097 return a new CONSTRUCTOR if something changed. */
5100 optimize_compound_literals_in_ctor (tree orig_ctor
)
5102 tree ctor
= orig_ctor
;
5103 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (ctor
);
5104 unsigned int idx
, num
= vec_safe_length (elts
);
5106 for (idx
= 0; idx
< num
; idx
++)
5108 tree value
= (*elts
)[idx
].value
;
5109 tree newval
= value
;
5110 if (TREE_CODE (value
) == CONSTRUCTOR
)
5111 newval
= optimize_compound_literals_in_ctor (value
);
5112 else if (TREE_CODE (value
) == COMPOUND_LITERAL_EXPR
)
5114 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (value
);
5115 tree decl
= DECL_EXPR_DECL (decl_s
);
5116 tree init
= DECL_INITIAL (decl
);
5118 if (!TREE_ADDRESSABLE (value
)
5119 && !TREE_ADDRESSABLE (decl
)
5121 && TREE_CODE (init
) == CONSTRUCTOR
)
5122 newval
= optimize_compound_literals_in_ctor (init
);
5124 if (newval
== value
)
5127 if (ctor
== orig_ctor
)
5129 ctor
= copy_node (orig_ctor
);
5130 CONSTRUCTOR_ELTS (ctor
) = vec_safe_copy (elts
);
5131 elts
= CONSTRUCTOR_ELTS (ctor
);
5133 (*elts
)[idx
].value
= newval
;
5138 /* A subroutine of gimplify_modify_expr. Break out elements of a
5139 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5141 Note that we still need to clear any elements that don't have explicit
5142 initializers, so if not all elements are initialized we keep the
5143 original MODIFY_EXPR, we just remove all of the constructor elements.
5145 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5146 GS_ERROR if we would have to create a temporary when gimplifying
5147 this constructor. Otherwise, return GS_OK.
5149 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5151 static enum gimplify_status
5152 gimplify_init_constructor (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5153 bool want_value
, bool notify_temp_creation
)
5155 tree object
, ctor
, type
;
5156 enum gimplify_status ret
;
5157 vec
<constructor_elt
, va_gc
> *elts
;
5158 bool cleared
= false;
5159 bool is_empty_ctor
= false;
5160 bool is_init_expr
= (TREE_CODE (*expr_p
) == INIT_EXPR
);
5162 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == CONSTRUCTOR
);
5164 if (!notify_temp_creation
)
5166 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
5167 is_gimple_lvalue
, fb_lvalue
);
5168 if (ret
== GS_ERROR
)
5172 object
= TREE_OPERAND (*expr_p
, 0);
5173 ctor
= TREE_OPERAND (*expr_p
, 1)
5174 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p
, 1));
5175 type
= TREE_TYPE (ctor
);
5176 elts
= CONSTRUCTOR_ELTS (ctor
);
5179 switch (TREE_CODE (type
))
5183 case QUAL_UNION_TYPE
:
5186 /* Use readonly data for initializers of this or smaller size
5187 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5189 const HOST_WIDE_INT min_unique_size
= 64;
5190 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5191 is smaller than this, use readonly data. */
5192 const int unique_nonzero_ratio
= 8;
5193 /* True if a single access of the object must be ensured. This is the
5194 case if the target is volatile, the type is non-addressable and more
5195 than one field need to be assigned. */
5196 const bool ensure_single_access
5197 = TREE_THIS_VOLATILE (object
)
5198 && !TREE_ADDRESSABLE (type
)
5199 && vec_safe_length (elts
) > 1;
5200 struct gimplify_init_ctor_preeval_data preeval_data
;
5201 HOST_WIDE_INT num_ctor_elements
, num_nonzero_elements
;
5202 HOST_WIDE_INT num_unique_nonzero_elements
;
5203 bool complete_p
, valid_const_initializer
;
5205 /* Aggregate types must lower constructors to initialization of
5206 individual elements. The exception is that a CONSTRUCTOR node
5207 with no elements indicates zero-initialization of the whole. */
5208 if (vec_safe_is_empty (elts
))
5210 if (notify_temp_creation
)
5213 /* The var will be initialized and so appear on lhs of
5214 assignment, it can't be TREE_READONLY anymore. */
5216 TREE_READONLY (object
) = 0;
5218 is_empty_ctor
= true;
5222 /* Fetch information about the constructor to direct later processing.
5223 We might want to make static versions of it in various cases, and
5224 can only do so if it known to be a valid constant initializer. */
5225 valid_const_initializer
5226 = categorize_ctor_elements (ctor
, &num_nonzero_elements
,
5227 &num_unique_nonzero_elements
,
5228 &num_ctor_elements
, &complete_p
);
5230 /* If a const aggregate variable is being initialized, then it
5231 should never be a lose to promote the variable to be static. */
5232 if (valid_const_initializer
5233 && num_nonzero_elements
> 1
5234 && TREE_READONLY (object
)
5236 && !DECL_REGISTER (object
)
5237 && (flag_merge_constants
>= 2 || !TREE_ADDRESSABLE (object
))
5238 /* For ctors that have many repeated nonzero elements
5239 represented through RANGE_EXPRs, prefer initializing
5240 those through runtime loops over copies of large amounts
5241 of data from readonly data section. */
5242 && (num_unique_nonzero_elements
5243 > num_nonzero_elements
/ unique_nonzero_ratio
5244 || ((unsigned HOST_WIDE_INT
) int_size_in_bytes (type
)
5245 <= (unsigned HOST_WIDE_INT
) min_unique_size
)))
5247 if (notify_temp_creation
)
5250 DECL_INITIAL (object
) = ctor
;
5251 TREE_STATIC (object
) = 1;
5252 if (!DECL_NAME (object
))
5253 DECL_NAME (object
) = create_tmp_var_name ("C");
5254 walk_tree (&DECL_INITIAL (object
), force_labels_r
, NULL
, NULL
);
5256 /* ??? C++ doesn't automatically append a .<number> to the
5257 assembler name, and even when it does, it looks at FE private
5258 data structures to figure out what that number should be,
5259 which are not set for this variable. I suppose this is
5260 important for local statics for inline functions, which aren't
5261 "local" in the object file sense. So in order to get a unique
5262 TU-local symbol, we must invoke the lhd version now. */
5263 lhd_set_decl_assembler_name (object
);
5265 *expr_p
= NULL_TREE
;
5269 /* The var will be initialized and so appear on lhs of
5270 assignment, it can't be TREE_READONLY anymore. */
5271 if (VAR_P (object
) && !notify_temp_creation
)
5272 TREE_READONLY (object
) = 0;
5274 /* If there are "lots" of initialized elements, even discounting
5275 those that are not address constants (and thus *must* be
5276 computed at runtime), then partition the constructor into
5277 constant and non-constant parts. Block copy the constant
5278 parts in, then generate code for the non-constant parts. */
5279 /* TODO. There's code in cp/typeck.cc to do this. */
5281 if (int_size_in_bytes (TREE_TYPE (ctor
)) < 0)
5282 /* store_constructor will ignore the clearing of variable-sized
5283 objects. Initializers for such objects must explicitly set
5284 every field that needs to be set. */
5286 else if (!complete_p
)
5287 /* If the constructor isn't complete, clear the whole object
5288 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5290 ??? This ought not to be needed. For any element not present
5291 in the initializer, we should simply set them to zero. Except
5292 we'd need to *find* the elements that are not present, and that
5293 requires trickery to avoid quadratic compile-time behavior in
5294 large cases or excessive memory use in small cases. */
5295 cleared
= !CONSTRUCTOR_NO_CLEARING (ctor
);
5296 else if (num_ctor_elements
- num_nonzero_elements
5297 > CLEAR_RATIO (optimize_function_for_speed_p (cfun
))
5298 && num_nonzero_elements
< num_ctor_elements
/ 4)
5299 /* If there are "lots" of zeros, it's more efficient to clear
5300 the memory and then set the nonzero elements. */
5302 else if (ensure_single_access
&& num_nonzero_elements
== 0)
5303 /* If a single access to the target must be ensured and all elements
5304 are zero, then it's optimal to clear whatever their number. */
5309 /* If there are "lots" of initialized elements, and all of them
5310 are valid address constants, then the entire initializer can
5311 be dropped to memory, and then memcpy'd out. Don't do this
5312 for sparse arrays, though, as it's more efficient to follow
5313 the standard CONSTRUCTOR behavior of memset followed by
5314 individual element initialization. Also don't do this for small
5315 all-zero initializers (which aren't big enough to merit
5316 clearing), and don't try to make bitwise copies of
5317 TREE_ADDRESSABLE types. */
5318 if (valid_const_initializer
5320 && !(cleared
|| num_nonzero_elements
== 0)
5321 && !TREE_ADDRESSABLE (type
))
5323 HOST_WIDE_INT size
= int_size_in_bytes (type
);
5326 /* ??? We can still get unbounded array types, at least
5327 from the C++ front end. This seems wrong, but attempt
5328 to work around it for now. */
5331 size
= int_size_in_bytes (TREE_TYPE (object
));
5333 TREE_TYPE (ctor
) = type
= TREE_TYPE (object
);
5336 /* Find the maximum alignment we can assume for the object. */
5337 /* ??? Make use of DECL_OFFSET_ALIGN. */
5338 if (DECL_P (object
))
5339 align
= DECL_ALIGN (object
);
5341 align
= TYPE_ALIGN (type
);
5343 /* Do a block move either if the size is so small as to make
5344 each individual move a sub-unit move on average, or if it
5345 is so large as to make individual moves inefficient. */
5347 && num_nonzero_elements
> 1
5348 /* For ctors that have many repeated nonzero elements
5349 represented through RANGE_EXPRs, prefer initializing
5350 those through runtime loops over copies of large amounts
5351 of data from readonly data section. */
5352 && (num_unique_nonzero_elements
5353 > num_nonzero_elements
/ unique_nonzero_ratio
5354 || size
<= min_unique_size
)
5355 && (size
< num_nonzero_elements
5356 || !can_move_by_pieces (size
, align
)))
5358 if (notify_temp_creation
)
5361 walk_tree (&ctor
, force_labels_r
, NULL
, NULL
);
5362 ctor
= tree_output_constant_def (ctor
);
5363 if (!useless_type_conversion_p (type
, TREE_TYPE (ctor
)))
5364 ctor
= build1 (VIEW_CONVERT_EXPR
, type
, ctor
);
5365 TREE_OPERAND (*expr_p
, 1) = ctor
;
5367 /* This is no longer an assignment of a CONSTRUCTOR, but
5368 we still may have processing to do on the LHS. So
5369 pretend we didn't do anything here to let that happen. */
5370 return GS_UNHANDLED
;
5374 /* If a single access to the target must be ensured and there are
5375 nonzero elements or the zero elements are not assigned en masse,
5376 initialize the target from a temporary. */
5377 if (ensure_single_access
&& (num_nonzero_elements
> 0 || !cleared
))
5379 if (notify_temp_creation
)
5382 tree temp
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
5383 TREE_OPERAND (*expr_p
, 0) = temp
;
5384 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
5386 build2 (MODIFY_EXPR
, void_type_node
,
5391 if (notify_temp_creation
)
5394 /* If there are nonzero elements and if needed, pre-evaluate to capture
5395 elements overlapping with the lhs into temporaries. We must do this
5396 before clearing to fetch the values before they are zeroed-out. */
5397 if (num_nonzero_elements
> 0 && TREE_CODE (*expr_p
) != INIT_EXPR
)
5399 preeval_data
.lhs_base_decl
= get_base_address (object
);
5400 if (!DECL_P (preeval_data
.lhs_base_decl
))
5401 preeval_data
.lhs_base_decl
= NULL
;
5402 preeval_data
.lhs_alias_set
= get_alias_set (object
);
5404 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p
, 1),
5405 pre_p
, post_p
, &preeval_data
);
5408 bool ctor_has_side_effects_p
5409 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p
, 1));
5413 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5414 Note that we still have to gimplify, in order to handle the
5415 case of variable sized types. Avoid shared tree structures. */
5416 CONSTRUCTOR_ELTS (ctor
) = NULL
;
5417 TREE_SIDE_EFFECTS (ctor
) = 0;
5418 object
= unshare_expr (object
);
5419 gimplify_stmt (expr_p
, pre_p
);
5422 /* If we have not block cleared the object, or if there are nonzero
5423 elements in the constructor, or if the constructor has side effects,
5424 add assignments to the individual scalar fields of the object. */
5426 || num_nonzero_elements
> 0
5427 || ctor_has_side_effects_p
)
5428 gimplify_init_ctor_eval (object
, elts
, pre_p
, cleared
);
5430 *expr_p
= NULL_TREE
;
5438 if (notify_temp_creation
)
5441 /* Extract the real and imaginary parts out of the ctor. */
5442 gcc_assert (elts
->length () == 2);
5443 r
= (*elts
)[0].value
;
5444 i
= (*elts
)[1].value
;
5445 if (r
== NULL
|| i
== NULL
)
5447 tree zero
= build_zero_cst (TREE_TYPE (type
));
5454 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5455 represent creation of a complex value. */
5456 if (TREE_CONSTANT (r
) && TREE_CONSTANT (i
))
5458 ctor
= build_complex (type
, r
, i
);
5459 TREE_OPERAND (*expr_p
, 1) = ctor
;
5463 ctor
= build2 (COMPLEX_EXPR
, type
, r
, i
);
5464 TREE_OPERAND (*expr_p
, 1) = ctor
;
5465 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1),
5468 rhs_predicate_for (TREE_OPERAND (*expr_p
, 0)),
5476 unsigned HOST_WIDE_INT ix
;
5477 constructor_elt
*ce
;
5479 if (notify_temp_creation
)
5482 /* Vector types use CONSTRUCTOR all the way through gimple
5483 compilation as a general initializer. */
5484 FOR_EACH_VEC_SAFE_ELT (elts
, ix
, ce
)
5486 enum gimplify_status tret
;
5487 tret
= gimplify_expr (&ce
->value
, pre_p
, post_p
, is_gimple_val
,
5489 if (tret
== GS_ERROR
)
5491 else if (TREE_STATIC (ctor
)
5492 && !initializer_constant_valid_p (ce
->value
,
5493 TREE_TYPE (ce
->value
)))
5494 TREE_STATIC (ctor
) = 0;
5496 recompute_constructor_flags (ctor
);
5498 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5499 if (TREE_CONSTANT (ctor
))
5501 bool constant_p
= true;
5504 /* Even when ctor is constant, it might contain non-*_CST
5505 elements, such as addresses or trapping values like
5506 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5507 in VECTOR_CST nodes. */
5508 FOR_EACH_CONSTRUCTOR_VALUE (elts
, ix
, value
)
5509 if (!CONSTANT_CLASS_P (value
))
5517 TREE_OPERAND (*expr_p
, 1) = build_vector_from_ctor (type
, elts
);
5522 if (!is_gimple_reg (TREE_OPERAND (*expr_p
, 0)))
5523 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (ctor
, pre_p
);
5528 /* So how did we get a CONSTRUCTOR for a scalar type? */
5532 if (ret
== GS_ERROR
)
5534 /* If we have gimplified both sides of the initializer but have
5535 not emitted an assignment, do so now. */
5537 /* If the type is an empty type, we don't need to emit the
5539 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
5541 tree lhs
= TREE_OPERAND (*expr_p
, 0);
5542 tree rhs
= TREE_OPERAND (*expr_p
, 1);
5543 if (want_value
&& object
== lhs
)
5544 lhs
= unshare_expr (lhs
);
5545 gassign
*init
= gimple_build_assign (lhs
, rhs
);
5546 gimplify_seq_add_stmt (pre_p
, init
);
5559 /* If the user requests to initialize automatic variables, we
5560 should initialize paddings inside the variable. Add a call to
5561 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5562 initialize paddings of object always to zero regardless of
5563 INIT_TYPE. Note, we will not insert this call if the aggregate
5564 variable has be completely cleared already or it's initialized
5565 with an empty constructor. We cannot insert this call if the
5566 variable is a gimple register since __builtin_clear_padding will take
5567 the address of the variable. As a result, if a long double/_Complex long
5568 double variable will be spilled into stack later, its padding cannot
5569 be cleared with __builtin_clear_padding. We should clear its padding
5570 when it is spilled into memory. */
5572 && !is_gimple_reg (object
)
5573 && clear_padding_type_may_have_padding_p (type
)
5574 && ((AGGREGATE_TYPE_P (type
) && !cleared
&& !is_empty_ctor
)
5575 || !AGGREGATE_TYPE_P (type
))
5576 && is_var_need_auto_init (object
))
5577 gimple_add_padding_init_for_auto_var (object
, false, pre_p
);
5582 /* Given a pointer value OP0, return a simplified version of an
5583 indirection through OP0, or NULL_TREE if no simplification is
5584 possible. This may only be applied to a rhs of an expression.
5585 Note that the resulting type may be different from the type pointed
5586 to in the sense that it is still compatible from the langhooks
5590 gimple_fold_indirect_ref_rhs (tree t
)
5592 return gimple_fold_indirect_ref (t
);
5595 /* Subroutine of gimplify_modify_expr to do simplifications of
5596 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5597 something changes. */
5599 static enum gimplify_status
5600 gimplify_modify_expr_rhs (tree
*expr_p
, tree
*from_p
, tree
*to_p
,
5601 gimple_seq
*pre_p
, gimple_seq
*post_p
,
5604 enum gimplify_status ret
= GS_UNHANDLED
;
5610 switch (TREE_CODE (*from_p
))
5613 /* If we're assigning from a read-only variable initialized with
5614 a constructor and not volatile, do the direct assignment from
5615 the constructor, but only if the target is not volatile either
5616 since this latter assignment might end up being done on a per
5617 field basis. However, if the target is volatile and the type
5618 is aggregate and non-addressable, gimplify_init_constructor
5619 knows that it needs to ensure a single access to the target
5620 and it will return GS_OK only in this case. */
5621 if (TREE_READONLY (*from_p
)
5622 && DECL_INITIAL (*from_p
)
5623 && TREE_CODE (DECL_INITIAL (*from_p
)) == CONSTRUCTOR
5624 && !TREE_THIS_VOLATILE (*from_p
)
5625 && (!TREE_THIS_VOLATILE (*to_p
)
5626 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p
))
5627 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p
)))))
5629 tree old_from
= *from_p
;
5630 enum gimplify_status subret
;
5632 /* Move the constructor into the RHS. */
5633 *from_p
= unshare_expr (DECL_INITIAL (*from_p
));
5635 /* Let's see if gimplify_init_constructor will need to put
5637 subret
= gimplify_init_constructor (expr_p
, NULL
, NULL
,
5639 if (subret
== GS_ERROR
)
5641 /* If so, revert the change. */
5652 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p
)))
5653 /* If we have code like
5657 where the type of "x" is a (possibly cv-qualified variant
5658 of "A"), treat the entire expression as identical to "x".
5659 This kind of code arises in C++ when an object is bound
5660 to a const reference, and if "x" is a TARGET_EXPR we want
5661 to take advantage of the optimization below. But not if
5662 the type is TREE_ADDRESSABLE; then C++17 says that the
5663 TARGET_EXPR needs to be a temporary. */
5665 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p
, 0)))
5667 bool volatile_p
= TREE_THIS_VOLATILE (*from_p
);
5668 if (TREE_THIS_VOLATILE (t
) != volatile_p
)
5671 t
= build_simple_mem_ref_loc (EXPR_LOCATION (*from_p
),
5672 build_fold_addr_expr (t
));
5673 if (REFERENCE_CLASS_P (t
))
5674 TREE_THIS_VOLATILE (t
) = volatile_p
;
5684 /* If we are initializing something from a TARGET_EXPR, strip the
5685 TARGET_EXPR and initialize it directly, if possible. This can't
5686 be done if the initializer is void, since that implies that the
5687 temporary is set in some non-trivial way.
5689 ??? What about code that pulls out the temp and uses it
5690 elsewhere? I think that such code never uses the TARGET_EXPR as
5691 an initializer. If I'm wrong, we'll die because the temp won't
5692 have any RTL. In that case, I guess we'll need to replace
5693 references somehow. */
5694 tree init
= TARGET_EXPR_INITIAL (*from_p
);
5697 && (TREE_CODE (*expr_p
) != MODIFY_EXPR
5698 || !TARGET_EXPR_NO_ELIDE (*from_p
))
5699 && !VOID_TYPE_P (TREE_TYPE (init
)))
5709 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5711 gimplify_compound_expr (from_p
, pre_p
, true);
5717 /* If we already made some changes, let the front end have a
5718 crack at this before we break it down. */
5719 if (ret
!= GS_UNHANDLED
)
5722 /* If we're initializing from a CONSTRUCTOR, break this into
5723 individual MODIFY_EXPRs. */
5724 ret
= gimplify_init_constructor (expr_p
, pre_p
, post_p
, want_value
,
5729 /* If we're assigning to a non-register type, push the assignment
5730 down into the branches. This is mandatory for ADDRESSABLE types,
5731 since we cannot generate temporaries for such, but it saves a
5732 copy in other cases as well. */
5733 if (!is_gimple_reg_type (TREE_TYPE (*from_p
)))
5735 /* This code should mirror the code in gimplify_cond_expr. */
5736 enum tree_code code
= TREE_CODE (*expr_p
);
5737 tree cond
= *from_p
;
5738 tree result
= *to_p
;
5740 ret
= gimplify_expr (&result
, pre_p
, post_p
,
5741 is_gimple_lvalue
, fb_lvalue
);
5742 if (ret
!= GS_ERROR
)
5745 /* If we are going to write RESULT more than once, clear
5746 TREE_READONLY flag, otherwise we might incorrectly promote
5747 the variable to static const and initialize it at compile
5748 time in one of the branches. */
5750 && TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
5751 && TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5752 TREE_READONLY (result
) = 0;
5753 if (TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
)
5754 TREE_OPERAND (cond
, 1)
5755 = build2 (code
, void_type_node
, result
,
5756 TREE_OPERAND (cond
, 1));
5757 if (TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5758 TREE_OPERAND (cond
, 2)
5759 = build2 (code
, void_type_node
, unshare_expr (result
),
5760 TREE_OPERAND (cond
, 2));
5762 TREE_TYPE (cond
) = void_type_node
;
5763 recalculate_side_effects (cond
);
5767 gimplify_and_add (cond
, pre_p
);
5768 *expr_p
= unshare_expr (result
);
5777 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5778 return slot so that we don't generate a temporary. */
5779 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p
)
5780 && aggregate_value_p (*from_p
, *from_p
))
5784 if (!(rhs_predicate_for (*to_p
))(*from_p
))
5785 /* If we need a temporary, *to_p isn't accurate. */
5787 /* It's OK to use the return slot directly unless it's an NRV. */
5788 else if (TREE_CODE (*to_p
) == RESULT_DECL
5789 && DECL_NAME (*to_p
) == NULL_TREE
5790 && needs_to_live_in_memory (*to_p
))
5792 else if (is_gimple_reg_type (TREE_TYPE (*to_p
))
5793 || (DECL_P (*to_p
) && DECL_REGISTER (*to_p
)))
5794 /* Don't force regs into memory. */
5796 else if (TREE_CODE (*expr_p
) == INIT_EXPR
)
5797 /* It's OK to use the target directly if it's being
5800 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p
)))
5802 /* Always use the target and thus RSO for variable-sized types.
5803 GIMPLE cannot deal with a variable-sized assignment
5804 embedded in a call statement. */
5806 else if (TREE_CODE (*to_p
) != SSA_NAME
5807 && (!is_gimple_variable (*to_p
)
5808 || needs_to_live_in_memory (*to_p
)))
5809 /* Don't use the original target if it's already addressable;
5810 if its address escapes, and the called function uses the
5811 NRV optimization, a conforming program could see *to_p
5812 change before the called function returns; see c++/19317.
5813 When optimizing, the return_slot pass marks more functions
5814 as safe after we have escape info. */
5821 CALL_EXPR_RETURN_SLOT_OPT (*from_p
) = 1;
5822 mark_addressable (*to_p
);
5827 case WITH_SIZE_EXPR
:
5828 /* Likewise for calls that return an aggregate of non-constant size,
5829 since we would not be able to generate a temporary at all. */
5830 if (TREE_CODE (TREE_OPERAND (*from_p
, 0)) == CALL_EXPR
)
5832 *from_p
= TREE_OPERAND (*from_p
, 0);
5833 /* We don't change ret in this case because the
5834 WITH_SIZE_EXPR might have been added in
5835 gimplify_modify_expr, so returning GS_OK would lead to an
5841 /* If we're initializing from a container, push the initialization
5843 case CLEANUP_POINT_EXPR
:
5845 case STATEMENT_LIST
:
5847 tree wrap
= *from_p
;
5850 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_min_lval
,
5852 if (ret
!= GS_ERROR
)
5855 t
= voidify_wrapper_expr (wrap
, *expr_p
);
5856 gcc_assert (t
== *expr_p
);
5860 gimplify_and_add (wrap
, pre_p
);
5861 *expr_p
= unshare_expr (*to_p
);
5869 /* Pull out compound literal expressions from a NOP_EXPR.
5870 Those are created in the C FE to drop qualifiers during
5871 lvalue conversion. */
5872 if ((TREE_CODE (TREE_OPERAND (*from_p
, 0)) == COMPOUND_LITERAL_EXPR
)
5873 && tree_ssa_useless_type_conversion (*from_p
))
5875 *from_p
= TREE_OPERAND (*from_p
, 0);
5881 case COMPOUND_LITERAL_EXPR
:
5883 tree complit
= TREE_OPERAND (*expr_p
, 1);
5884 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (complit
);
5885 tree decl
= DECL_EXPR_DECL (decl_s
);
5886 tree init
= DECL_INITIAL (decl
);
5888 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5889 into struct T x = { 0, 1, 2 } if the address of the
5890 compound literal has never been taken. */
5891 if (!TREE_ADDRESSABLE (complit
)
5892 && !TREE_ADDRESSABLE (decl
)
5895 *expr_p
= copy_node (*expr_p
);
5896 TREE_OPERAND (*expr_p
, 1) = init
;
5911 /* Return true if T looks like a valid GIMPLE statement. */
5914 is_gimple_stmt (tree t
)
5916 const enum tree_code code
= TREE_CODE (t
);
5921 /* The only valid NOP_EXPR is the empty statement. */
5922 return IS_EMPTY_STMT (t
);
5926 /* These are only valid if they're void. */
5927 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
5933 case CASE_LABEL_EXPR
:
5934 case TRY_CATCH_EXPR
:
5935 case TRY_FINALLY_EXPR
:
5936 case EH_FILTER_EXPR
:
5939 case STATEMENT_LIST
:
5944 case OACC_HOST_DATA
:
5947 case OACC_ENTER_DATA
:
5948 case OACC_EXIT_DATA
:
5953 case OMP_DISTRIBUTE
:
5968 case OMP_TARGET_DATA
:
5969 case OMP_TARGET_UPDATE
:
5970 case OMP_TARGET_ENTER_DATA
:
5971 case OMP_TARGET_EXIT_DATA
:
5974 /* These are always void. */
5980 /* These are valid regardless of their type. */
5989 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5990 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5992 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5993 other, unmodified part of the complex object just before the total store.
5994 As a consequence, if the object is still uninitialized, an undefined value
5995 will be loaded into a register, which may result in a spurious exception
5996 if the register is floating-point and the value happens to be a signaling
5997 NaN for example. Then the fully-fledged complex operations lowering pass
5998 followed by a DCE pass are necessary in order to fix things up. */
6000 static enum gimplify_status
6001 gimplify_modify_expr_complex_part (tree
*expr_p
, gimple_seq
*pre_p
,
6004 enum tree_code code
, ocode
;
6005 tree lhs
, rhs
, new_rhs
, other
, realpart
, imagpart
;
6007 lhs
= TREE_OPERAND (*expr_p
, 0);
6008 rhs
= TREE_OPERAND (*expr_p
, 1);
6009 code
= TREE_CODE (lhs
);
6010 lhs
= TREE_OPERAND (lhs
, 0);
6012 ocode
= code
== REALPART_EXPR
? IMAGPART_EXPR
: REALPART_EXPR
;
6013 other
= build1 (ocode
, TREE_TYPE (rhs
), lhs
);
6014 suppress_warning (other
);
6015 other
= get_formal_tmp_var (other
, pre_p
);
6017 realpart
= code
== REALPART_EXPR
? rhs
: other
;
6018 imagpart
= code
== REALPART_EXPR
? other
: rhs
;
6020 if (TREE_CONSTANT (realpart
) && TREE_CONSTANT (imagpart
))
6021 new_rhs
= build_complex (TREE_TYPE (lhs
), realpart
, imagpart
);
6023 new_rhs
= build2 (COMPLEX_EXPR
, TREE_TYPE (lhs
), realpart
, imagpart
);
6025 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (lhs
, new_rhs
));
6026 *expr_p
= (want_value
) ? rhs
: NULL_TREE
;
6031 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6037 PRE_P points to the list where side effects that must happen before
6038 *EXPR_P should be stored.
6040 POST_P points to the list where side effects that must happen after
6041 *EXPR_P should be stored.
6043 WANT_VALUE is nonzero iff we want to use the value of this expression
6044 in another expression. */
6046 static enum gimplify_status
6047 gimplify_modify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
6050 tree
*from_p
= &TREE_OPERAND (*expr_p
, 1);
6051 tree
*to_p
= &TREE_OPERAND (*expr_p
, 0);
6052 enum gimplify_status ret
= GS_UNHANDLED
;
6054 location_t loc
= EXPR_LOCATION (*expr_p
);
6055 gimple_stmt_iterator gsi
;
6057 if (error_operand_p (*from_p
) || error_operand_p (*to_p
))
6060 gcc_assert (TREE_CODE (*expr_p
) == MODIFY_EXPR
6061 || TREE_CODE (*expr_p
) == INIT_EXPR
);
6063 /* Trying to simplify a clobber using normal logic doesn't work,
6064 so handle it here. */
6065 if (TREE_CLOBBER_P (*from_p
))
6067 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
6068 if (ret
== GS_ERROR
)
6070 gcc_assert (!want_value
);
6071 if (!VAR_P (*to_p
) && TREE_CODE (*to_p
) != MEM_REF
)
6073 tree addr
= get_initialized_tmp_var (build_fold_addr_expr (*to_p
),
6075 *to_p
= build_simple_mem_ref_loc (EXPR_LOCATION (*to_p
), addr
);
6077 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (*to_p
, *from_p
));
6082 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6084 if (TREE_TYPE (*from_p
) != error_mark_node
6085 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p
))
6086 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p
)))
6087 && TREE_CODE (*from_p
) == CONSTRUCTOR
6088 && CONSTRUCTOR_NELTS (*from_p
) == 0)
6090 maybe_with_size_expr (from_p
);
6091 gcc_assert (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
);
6092 return gimplify_modify_expr_to_memset (expr_p
,
6093 TREE_OPERAND (*from_p
, 1),
6097 /* Insert pointer conversions required by the middle-end that are not
6098 required by the frontend. This fixes middle-end type checking for
6099 for example gcc.dg/redecl-6.c. */
6100 if (POINTER_TYPE_P (TREE_TYPE (*to_p
)))
6102 STRIP_USELESS_TYPE_CONVERSION (*from_p
);
6103 if (!useless_type_conversion_p (TREE_TYPE (*to_p
), TREE_TYPE (*from_p
)))
6104 *from_p
= fold_convert_loc (loc
, TREE_TYPE (*to_p
), *from_p
);
6107 /* See if any simplifications can be done based on what the RHS is. */
6108 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
6110 if (ret
!= GS_UNHANDLED
)
6113 /* For empty types only gimplify the left hand side and right hand
6114 side as statements and throw away the assignment. Do this after
6115 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6117 if (is_empty_type (TREE_TYPE (*from_p
))
6119 /* Don't do this for calls that return addressable types, expand_call
6120 relies on those having a lhs. */
6121 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p
))
6122 && TREE_CODE (*from_p
) == CALL_EXPR
))
6124 gimplify_stmt (from_p
, pre_p
);
6125 gimplify_stmt (to_p
, pre_p
);
6126 *expr_p
= NULL_TREE
;
6130 /* If the value being copied is of variable width, compute the length
6131 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6132 before gimplifying any of the operands so that we can resolve any
6133 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6134 the size of the expression to be copied, not of the destination, so
6135 that is what we must do here. */
6136 maybe_with_size_expr (from_p
);
6138 /* As a special case, we have to temporarily allow for assignments
6139 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6140 a toplevel statement, when gimplifying the GENERIC expression
6141 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6142 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6144 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6145 prevent gimplify_expr from trying to create a new temporary for
6146 foo's LHS, we tell it that it should only gimplify until it
6147 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6148 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6149 and all we need to do here is set 'a' to be its LHS. */
6151 /* Gimplify the RHS first for C++17 and bug 71104. */
6152 gimple_predicate initial_pred
= initial_rhs_predicate_for (*to_p
);
6153 ret
= gimplify_expr (from_p
, pre_p
, post_p
, initial_pred
, fb_rvalue
);
6154 if (ret
== GS_ERROR
)
6157 /* Then gimplify the LHS. */
6158 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6159 twice we have to make sure to gimplify into non-SSA as otherwise
6160 the abnormal edge added later will make those defs not dominate
6162 ??? Technically this applies only to the registers used in the
6163 resulting non-register *TO_P. */
6164 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
6166 && TREE_CODE (*from_p
) == CALL_EXPR
6167 && call_expr_flags (*from_p
) & ECF_RETURNS_TWICE
)
6168 gimplify_ctxp
->into_ssa
= false;
6169 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
6170 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
6171 if (ret
== GS_ERROR
)
6174 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6175 guess for the predicate was wrong. */
6176 gimple_predicate final_pred
= rhs_predicate_for (*to_p
);
6177 if (final_pred
!= initial_pred
)
6179 ret
= gimplify_expr (from_p
, pre_p
, post_p
, final_pred
, fb_rvalue
);
6180 if (ret
== GS_ERROR
)
6184 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6185 size as argument to the call. */
6186 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
6188 tree call
= TREE_OPERAND (*from_p
, 0);
6189 tree vlasize
= TREE_OPERAND (*from_p
, 1);
6191 if (TREE_CODE (call
) == CALL_EXPR
6192 && CALL_EXPR_IFN (call
) == IFN_VA_ARG
)
6194 int nargs
= call_expr_nargs (call
);
6195 tree type
= TREE_TYPE (call
);
6196 tree ap
= CALL_EXPR_ARG (call
, 0);
6197 tree tag
= CALL_EXPR_ARG (call
, 1);
6198 tree aptag
= CALL_EXPR_ARG (call
, 2);
6199 tree newcall
= build_call_expr_internal_loc (EXPR_LOCATION (call
),
6203 TREE_OPERAND (*from_p
, 0) = newcall
;
6207 /* Now see if the above changed *from_p to something we handle specially. */
6208 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
6210 if (ret
!= GS_UNHANDLED
)
6213 /* If we've got a variable sized assignment between two lvalues (i.e. does
6214 not involve a call), then we can make things a bit more straightforward
6215 by converting the assignment to memcpy or memset. */
6216 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
6218 tree from
= TREE_OPERAND (*from_p
, 0);
6219 tree size
= TREE_OPERAND (*from_p
, 1);
6221 if (TREE_CODE (from
) == CONSTRUCTOR
)
6222 return gimplify_modify_expr_to_memset (expr_p
, size
, want_value
, pre_p
);
6224 if (is_gimple_addressable (from
))
6227 return gimplify_modify_expr_to_memcpy (expr_p
, size
, want_value
,
6232 /* Transform partial stores to non-addressable complex variables into
6233 total stores. This allows us to use real instead of virtual operands
6234 for these variables, which improves optimization. */
6235 if ((TREE_CODE (*to_p
) == REALPART_EXPR
6236 || TREE_CODE (*to_p
) == IMAGPART_EXPR
)
6237 && is_gimple_reg (TREE_OPERAND (*to_p
, 0)))
6238 return gimplify_modify_expr_complex_part (expr_p
, pre_p
, want_value
);
6240 /* Try to alleviate the effects of the gimplification creating artificial
6241 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6242 make sure not to create DECL_DEBUG_EXPR links across functions. */
6243 if (!gimplify_ctxp
->into_ssa
6245 && DECL_IGNORED_P (*from_p
)
6247 && !DECL_IGNORED_P (*to_p
)
6248 && decl_function_context (*to_p
) == current_function_decl
6249 && decl_function_context (*from_p
) == current_function_decl
)
6251 if (!DECL_NAME (*from_p
) && DECL_NAME (*to_p
))
6253 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p
)));
6254 DECL_HAS_DEBUG_EXPR_P (*from_p
) = 1;
6255 SET_DECL_DEBUG_EXPR (*from_p
, *to_p
);
6258 if (want_value
&& TREE_THIS_VOLATILE (*to_p
))
6259 *from_p
= get_initialized_tmp_var (*from_p
, pre_p
, post_p
);
6261 if (TREE_CODE (*from_p
) == CALL_EXPR
)
6263 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6264 instead of a GIMPLE_ASSIGN. */
6266 if (CALL_EXPR_FN (*from_p
) == NULL_TREE
)
6268 /* Gimplify internal functions created in the FEs. */
6269 int nargs
= call_expr_nargs (*from_p
), i
;
6270 enum internal_fn ifn
= CALL_EXPR_IFN (*from_p
);
6271 auto_vec
<tree
> vargs (nargs
);
6273 for (i
= 0; i
< nargs
; i
++)
6275 gimplify_arg (&CALL_EXPR_ARG (*from_p
, i
), pre_p
,
6276 EXPR_LOCATION (*from_p
));
6277 vargs
.quick_push (CALL_EXPR_ARG (*from_p
, i
));
6279 call_stmt
= gimple_build_call_internal_vec (ifn
, vargs
);
6280 gimple_call_set_nothrow (call_stmt
, TREE_NOTHROW (*from_p
));
6281 gimple_set_location (call_stmt
, EXPR_LOCATION (*expr_p
));
6285 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*from_p
));
6286 CALL_EXPR_FN (*from_p
) = TREE_OPERAND (CALL_EXPR_FN (*from_p
), 0);
6287 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p
));
6288 tree fndecl
= get_callee_fndecl (*from_p
);
6290 && fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
6291 && call_expr_nargs (*from_p
) == 3)
6292 call_stmt
= gimple_build_call_internal (IFN_BUILTIN_EXPECT
, 3,
6293 CALL_EXPR_ARG (*from_p
, 0),
6294 CALL_EXPR_ARG (*from_p
, 1),
6295 CALL_EXPR_ARG (*from_p
, 2));
6298 call_stmt
= gimple_build_call_from_tree (*from_p
, fnptrtype
);
6301 notice_special_calls (call_stmt
);
6302 if (!gimple_call_noreturn_p (call_stmt
) || !should_remove_lhs_p (*to_p
))
6303 gimple_call_set_lhs (call_stmt
, *to_p
);
6304 else if (TREE_CODE (*to_p
) == SSA_NAME
)
6305 /* The above is somewhat premature, avoid ICEing later for a
6306 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6307 ??? This doesn't make it a default-def. */
6308 SSA_NAME_DEF_STMT (*to_p
) = gimple_build_nop ();
6314 assign
= gimple_build_assign (*to_p
, *from_p
);
6315 gimple_set_location (assign
, EXPR_LOCATION (*expr_p
));
6316 if (COMPARISON_CLASS_P (*from_p
))
6317 copy_warning (assign
, *from_p
);
6320 if (gimplify_ctxp
->into_ssa
&& is_gimple_reg (*to_p
))
6322 /* We should have got an SSA name from the start. */
6323 gcc_assert (TREE_CODE (*to_p
) == SSA_NAME
6324 || ! gimple_in_ssa_p (cfun
));
6327 gimplify_seq_add_stmt (pre_p
, assign
);
6328 gsi
= gsi_last (*pre_p
);
6329 maybe_fold_stmt (&gsi
);
6333 *expr_p
= TREE_THIS_VOLATILE (*to_p
) ? *from_p
: unshare_expr (*to_p
);
6342 /* Gimplify a comparison between two variable-sized objects. Do this
6343 with a call to BUILT_IN_MEMCMP. */
6345 static enum gimplify_status
6346 gimplify_variable_sized_compare (tree
*expr_p
)
6348 location_t loc
= EXPR_LOCATION (*expr_p
);
6349 tree op0
= TREE_OPERAND (*expr_p
, 0);
6350 tree op1
= TREE_OPERAND (*expr_p
, 1);
6351 tree t
, arg
, dest
, src
, expr
;
6353 arg
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
6354 arg
= unshare_expr (arg
);
6355 arg
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg
, op0
);
6356 src
= build_fold_addr_expr_loc (loc
, op1
);
6357 dest
= build_fold_addr_expr_loc (loc
, op0
);
6358 t
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
6359 t
= build_call_expr_loc (loc
, t
, 3, dest
, src
, arg
);
6362 = build2 (TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), t
, integer_zero_node
);
6363 SET_EXPR_LOCATION (expr
, loc
);
6369 /* Gimplify a comparison between two aggregate objects of integral scalar
6370 mode as a comparison between the bitwise equivalent scalar values. */
6372 static enum gimplify_status
6373 gimplify_scalar_mode_aggregate_compare (tree
*expr_p
)
6375 location_t loc
= EXPR_LOCATION (*expr_p
);
6376 tree op0
= TREE_OPERAND (*expr_p
, 0);
6377 tree op1
= TREE_OPERAND (*expr_p
, 1);
6379 tree type
= TREE_TYPE (op0
);
6380 tree scalar_type
= lang_hooks
.types
.type_for_mode (TYPE_MODE (type
), 1);
6382 op0
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op0
);
6383 op1
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op1
);
6386 = fold_build2_loc (loc
, TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), op0
, op1
);
6391 /* Gimplify an expression sequence. This function gimplifies each
6392 expression and rewrites the original expression with the last
6393 expression of the sequence in GIMPLE form.
6395 PRE_P points to the list where the side effects for all the
6396 expressions in the sequence will be emitted.
6398 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6400 static enum gimplify_status
6401 gimplify_compound_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
6407 tree
*sub_p
= &TREE_OPERAND (t
, 0);
6409 if (TREE_CODE (*sub_p
) == COMPOUND_EXPR
)
6410 gimplify_compound_expr (sub_p
, pre_p
, false);
6412 gimplify_stmt (sub_p
, pre_p
);
6414 t
= TREE_OPERAND (t
, 1);
6416 while (TREE_CODE (t
) == COMPOUND_EXPR
);
6423 gimplify_stmt (expr_p
, pre_p
);
6428 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6429 gimplify. After gimplification, EXPR_P will point to a new temporary
6430 that holds the original value of the SAVE_EXPR node.
6432 PRE_P points to the list where side effects that must happen before
6433 *EXPR_P should be stored. */
6435 static enum gimplify_status
6436 gimplify_save_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6438 enum gimplify_status ret
= GS_ALL_DONE
;
6441 gcc_assert (TREE_CODE (*expr_p
) == SAVE_EXPR
);
6442 val
= TREE_OPERAND (*expr_p
, 0);
6444 if (TREE_TYPE (val
) == error_mark_node
)
6447 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6448 if (!SAVE_EXPR_RESOLVED_P (*expr_p
))
6450 /* The operand may be a void-valued expression. It is
6451 being executed only for its side-effects. */
6452 if (TREE_TYPE (val
) == void_type_node
)
6454 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
6455 is_gimple_stmt
, fb_none
);
6459 /* The temporary may not be an SSA name as later abnormal and EH
6460 control flow may invalidate use/def domination. When in SSA
6461 form then assume there are no such issues and SAVE_EXPRs only
6462 appear via GENERIC foldings. */
6463 val
= get_initialized_tmp_var (val
, pre_p
, post_p
,
6464 gimple_in_ssa_p (cfun
));
6466 TREE_OPERAND (*expr_p
, 0) = val
;
6467 SAVE_EXPR_RESOLVED_P (*expr_p
) = 1;
6475 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6482 PRE_P points to the list where side effects that must happen before
6483 *EXPR_P should be stored.
6485 POST_P points to the list where side effects that must happen after
6486 *EXPR_P should be stored. */
6488 static enum gimplify_status
6489 gimplify_addr_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6491 tree expr
= *expr_p
;
6492 tree op0
= TREE_OPERAND (expr
, 0);
6493 enum gimplify_status ret
;
6494 location_t loc
= EXPR_LOCATION (*expr_p
);
6496 switch (TREE_CODE (op0
))
6500 /* Check if we are dealing with an expression of the form '&*ptr'.
6501 While the front end folds away '&*ptr' into 'ptr', these
6502 expressions may be generated internally by the compiler (e.g.,
6503 builtins like __builtin_va_end). */
6504 /* Caution: the silent array decomposition semantics we allow for
6505 ADDR_EXPR means we can't always discard the pair. */
6506 /* Gimplification of the ADDR_EXPR operand may drop
6507 cv-qualification conversions, so make sure we add them if
6510 tree op00
= TREE_OPERAND (op0
, 0);
6511 tree t_expr
= TREE_TYPE (expr
);
6512 tree t_op00
= TREE_TYPE (op00
);
6514 if (!useless_type_conversion_p (t_expr
, t_op00
))
6515 op00
= fold_convert_loc (loc
, TREE_TYPE (expr
), op00
);
6521 case VIEW_CONVERT_EXPR
:
6522 /* Take the address of our operand and then convert it to the type of
6525 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6526 all clear. The impact of this transformation is even less clear. */
6528 /* If the operand is a useless conversion, look through it. Doing so
6529 guarantees that the ADDR_EXPR and its operand will remain of the
6531 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0
, 0)))
6532 op0
= TREE_OPERAND (op0
, 0);
6534 *expr_p
= fold_convert_loc (loc
, TREE_TYPE (expr
),
6535 build_fold_addr_expr_loc (loc
,
6536 TREE_OPERAND (op0
, 0)));
6541 if (integer_zerop (TREE_OPERAND (op0
, 1)))
6542 goto do_indirect_ref
;
6547 /* If we see a call to a declared builtin or see its address
6548 being taken (we can unify those cases here) then we can mark
6549 the builtin for implicit generation by GCC. */
6550 if (TREE_CODE (op0
) == FUNCTION_DECL
6551 && fndecl_built_in_p (op0
, BUILT_IN_NORMAL
)
6552 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0
)))
6553 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0
), true);
6555 /* We use fb_either here because the C frontend sometimes takes
6556 the address of a call that returns a struct; see
6557 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6558 the implied temporary explicit. */
6560 /* Make the operand addressable. */
6561 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, post_p
,
6562 is_gimple_addressable
, fb_either
);
6563 if (ret
== GS_ERROR
)
6566 /* Then mark it. Beware that it may not be possible to do so directly
6567 if a temporary has been created by the gimplification. */
6568 prepare_gimple_addressable (&TREE_OPERAND (expr
, 0), pre_p
);
6570 op0
= TREE_OPERAND (expr
, 0);
6572 /* For various reasons, the gimplification of the expression
6573 may have made a new INDIRECT_REF. */
6574 if (TREE_CODE (op0
) == INDIRECT_REF
6575 || (TREE_CODE (op0
) == MEM_REF
6576 && integer_zerop (TREE_OPERAND (op0
, 1))))
6577 goto do_indirect_ref
;
6579 mark_addressable (TREE_OPERAND (expr
, 0));
6581 /* The FEs may end up building ADDR_EXPRs early on a decl with
6582 an incomplete type. Re-build ADDR_EXPRs in canonical form
6584 if (!types_compatible_p (TREE_TYPE (op0
), TREE_TYPE (TREE_TYPE (expr
))))
6585 *expr_p
= build_fold_addr_expr (op0
);
6587 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6588 recompute_tree_invariant_for_addr_expr (*expr_p
);
6590 /* If we re-built the ADDR_EXPR add a conversion to the original type
6592 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
6593 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
6601 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6602 value; output operands should be a gimple lvalue. */
6604 static enum gimplify_status
6605 gimplify_asm_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6609 const char **oconstraints
;
6612 const char *constraint
;
6613 bool allows_mem
, allows_reg
, is_inout
;
6614 enum gimplify_status ret
, tret
;
6616 vec
<tree
, va_gc
> *inputs
;
6617 vec
<tree
, va_gc
> *outputs
;
6618 vec
<tree
, va_gc
> *clobbers
;
6619 vec
<tree
, va_gc
> *labels
;
6623 noutputs
= list_length (ASM_OUTPUTS (expr
));
6624 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
6632 link_next
= NULL_TREE
;
6633 for (i
= 0, link
= ASM_OUTPUTS (expr
); link
; ++i
, link
= link_next
)
6636 size_t constraint_len
;
6638 link_next
= TREE_CHAIN (link
);
6642 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6643 constraint_len
= strlen (constraint
);
6644 if (constraint_len
== 0)
6647 ok
= parse_output_constraint (&constraint
, i
, 0, 0,
6648 &allows_mem
, &allows_reg
, &is_inout
);
6655 /* If we can't make copies, we can only accept memory.
6656 Similarly for VLAs. */
6657 tree outtype
= TREE_TYPE (TREE_VALUE (link
));
6658 if (outtype
!= error_mark_node
6659 && (TREE_ADDRESSABLE (outtype
)
6660 || !COMPLETE_TYPE_P (outtype
)
6661 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype
))))
6667 error ("impossible constraint in %<asm%>");
6668 error ("non-memory output %d must stay in memory", i
);
6673 if (!allows_reg
&& allows_mem
)
6674 mark_addressable (TREE_VALUE (link
));
6676 tree orig
= TREE_VALUE (link
);
6677 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6678 is_inout
? is_gimple_min_lval
: is_gimple_lvalue
,
6679 fb_lvalue
| fb_mayfail
);
6680 if (tret
== GS_ERROR
)
6682 if (orig
!= error_mark_node
)
6683 error ("invalid lvalue in %<asm%> output %d", i
);
6687 /* If the constraint does not allow memory make sure we gimplify
6688 it to a register if it is not already but its base is. This
6689 happens for complex and vector components. */
6692 tree op
= TREE_VALUE (link
);
6693 if (! is_gimple_val (op
)
6694 && is_gimple_reg_type (TREE_TYPE (op
))
6695 && is_gimple_reg (get_base_address (op
)))
6697 tree tem
= create_tmp_reg (TREE_TYPE (op
));
6701 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
),
6702 tem
, unshare_expr (op
));
6703 gimplify_and_add (ass
, pre_p
);
6705 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
), op
, tem
);
6706 gimplify_and_add (ass
, post_p
);
6708 TREE_VALUE (link
) = tem
;
6713 vec_safe_push (outputs
, link
);
6714 TREE_CHAIN (link
) = NULL_TREE
;
6718 /* An input/output operand. To give the optimizers more
6719 flexibility, split it into separate input and output
6722 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6725 /* Turn the in/out constraint into an output constraint. */
6726 char *p
= xstrdup (constraint
);
6728 TREE_VALUE (TREE_PURPOSE (link
)) = build_string (constraint_len
, p
);
6730 /* And add a matching input constraint. */
6733 sprintf (buf
, "%u", i
);
6735 /* If there are multiple alternatives in the constraint,
6736 handle each of them individually. Those that allow register
6737 will be replaced with operand number, the others will stay
6739 if (strchr (p
, ',') != NULL
)
6741 size_t len
= 0, buflen
= strlen (buf
);
6742 char *beg
, *end
, *str
, *dst
;
6746 end
= strchr (beg
, ',');
6748 end
= strchr (beg
, '\0');
6749 if ((size_t) (end
- beg
) < buflen
)
6752 len
+= end
- beg
+ 1;
6759 str
= (char *) alloca (len
);
6760 for (beg
= p
+ 1, dst
= str
;;)
6763 bool mem_p
, reg_p
, inout_p
;
6765 end
= strchr (beg
, ',');
6770 parse_output_constraint (&tem
, i
, 0, 0,
6771 &mem_p
, ®_p
, &inout_p
);
6776 memcpy (dst
, buf
, buflen
);
6785 memcpy (dst
, beg
, len
);
6794 input
= build_string (dst
- str
, str
);
6797 input
= build_string (strlen (buf
), buf
);
6800 input
= build_string (constraint_len
- 1, constraint
+ 1);
6804 input
= build_tree_list (build_tree_list (NULL_TREE
, input
),
6805 unshare_expr (TREE_VALUE (link
)));
6806 ASM_INPUTS (expr
) = chainon (ASM_INPUTS (expr
), input
);
6810 link_next
= NULL_TREE
;
6811 for (link
= ASM_INPUTS (expr
); link
; ++i
, link
= link_next
)
6813 link_next
= TREE_CHAIN (link
);
6814 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6815 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
6816 oconstraints
, &allows_mem
, &allows_reg
);
6818 /* If we can't make copies, we can only accept memory. */
6819 tree intype
= TREE_TYPE (TREE_VALUE (link
));
6820 if (intype
!= error_mark_node
6821 && (TREE_ADDRESSABLE (intype
)
6822 || !COMPLETE_TYPE_P (intype
)
6823 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype
))))
6829 error ("impossible constraint in %<asm%>");
6830 error ("non-memory input %d must stay in memory", i
);
6835 /* If the operand is a memory input, it should be an lvalue. */
6836 if (!allows_reg
&& allows_mem
)
6838 tree inputv
= TREE_VALUE (link
);
6839 STRIP_NOPS (inputv
);
6840 if (TREE_CODE (inputv
) == PREDECREMENT_EXPR
6841 || TREE_CODE (inputv
) == PREINCREMENT_EXPR
6842 || TREE_CODE (inputv
) == POSTDECREMENT_EXPR
6843 || TREE_CODE (inputv
) == POSTINCREMENT_EXPR
6844 || TREE_CODE (inputv
) == MODIFY_EXPR
)
6845 TREE_VALUE (link
) = error_mark_node
;
6846 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6847 is_gimple_lvalue
, fb_lvalue
| fb_mayfail
);
6848 if (tret
!= GS_ERROR
)
6850 /* Unlike output operands, memory inputs are not guaranteed
6851 to be lvalues by the FE, and while the expressions are
6852 marked addressable there, if it is e.g. a statement
6853 expression, temporaries in it might not end up being
6854 addressable. They might be already used in the IL and thus
6855 it is too late to make them addressable now though. */
6856 tree x
= TREE_VALUE (link
);
6857 while (handled_component_p (x
))
6858 x
= TREE_OPERAND (x
, 0);
6859 if (TREE_CODE (x
) == MEM_REF
6860 && TREE_CODE (TREE_OPERAND (x
, 0)) == ADDR_EXPR
)
6861 x
= TREE_OPERAND (TREE_OPERAND (x
, 0), 0);
6863 || TREE_CODE (x
) == PARM_DECL
6864 || TREE_CODE (x
) == RESULT_DECL
)
6865 && !TREE_ADDRESSABLE (x
)
6866 && is_gimple_reg (x
))
6868 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
),
6870 "memory input %d is not directly addressable",
6872 prepare_gimple_addressable (&TREE_VALUE (link
), pre_p
);
6875 mark_addressable (TREE_VALUE (link
));
6876 if (tret
== GS_ERROR
)
6878 if (inputv
!= error_mark_node
)
6879 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
), input_location
),
6880 "memory input %d is not directly addressable", i
);
6886 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6887 is_gimple_asm_val
, fb_rvalue
);
6888 if (tret
== GS_ERROR
)
6892 TREE_CHAIN (link
) = NULL_TREE
;
6893 vec_safe_push (inputs
, link
);
6896 link_next
= NULL_TREE
;
6897 for (link
= ASM_CLOBBERS (expr
); link
; ++i
, link
= link_next
)
6899 link_next
= TREE_CHAIN (link
);
6900 TREE_CHAIN (link
) = NULL_TREE
;
6901 vec_safe_push (clobbers
, link
);
6904 link_next
= NULL_TREE
;
6905 for (link
= ASM_LABELS (expr
); link
; ++i
, link
= link_next
)
6907 link_next
= TREE_CHAIN (link
);
6908 TREE_CHAIN (link
) = NULL_TREE
;
6909 vec_safe_push (labels
, link
);
6912 /* Do not add ASMs with errors to the gimple IL stream. */
6913 if (ret
!= GS_ERROR
)
6915 stmt
= gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr
)),
6916 inputs
, outputs
, clobbers
, labels
);
6918 gimple_asm_set_volatile (stmt
, ASM_VOLATILE_P (expr
) || noutputs
== 0);
6919 gimple_asm_set_input (stmt
, ASM_INPUT_P (expr
));
6920 gimple_asm_set_inline (stmt
, ASM_INLINE_P (expr
));
6922 gimplify_seq_add_stmt (pre_p
, stmt
);
6928 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6929 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6930 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6931 return to this function.
6933 FIXME should we complexify the prequeue handling instead? Or use flags
6934 for all the cleanups and let the optimizer tighten them up? The current
6935 code seems pretty fragile; it will break on a cleanup within any
6936 non-conditional nesting. But any such nesting would be broken, anyway;
6937 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6938 and continues out of it. We can do that at the RTL level, though, so
6939 having an optimizer to tighten up try/finally regions would be a Good
6942 static enum gimplify_status
6943 gimplify_cleanup_point_expr (tree
*expr_p
, gimple_seq
*pre_p
)
6945 gimple_stmt_iterator iter
;
6946 gimple_seq body_sequence
= NULL
;
6948 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
6950 /* We only care about the number of conditions between the innermost
6951 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6952 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6953 int old_conds
= gimplify_ctxp
->conditions
;
6954 gimple_seq old_cleanups
= gimplify_ctxp
->conditional_cleanups
;
6955 bool old_in_cleanup_point_expr
= gimplify_ctxp
->in_cleanup_point_expr
;
6956 gimplify_ctxp
->conditions
= 0;
6957 gimplify_ctxp
->conditional_cleanups
= NULL
;
6958 gimplify_ctxp
->in_cleanup_point_expr
= true;
6960 gimplify_stmt (&TREE_OPERAND (*expr_p
, 0), &body_sequence
);
6962 gimplify_ctxp
->conditions
= old_conds
;
6963 gimplify_ctxp
->conditional_cleanups
= old_cleanups
;
6964 gimplify_ctxp
->in_cleanup_point_expr
= old_in_cleanup_point_expr
;
6966 for (iter
= gsi_start (body_sequence
); !gsi_end_p (iter
); )
6968 gimple
*wce
= gsi_stmt (iter
);
6970 if (gimple_code (wce
) == GIMPLE_WITH_CLEANUP_EXPR
)
6972 if (gsi_one_before_end_p (iter
))
6974 /* Note that gsi_insert_seq_before and gsi_remove do not
6975 scan operands, unlike some other sequence mutators. */
6976 if (!gimple_wce_cleanup_eh_only (wce
))
6977 gsi_insert_seq_before_without_update (&iter
,
6978 gimple_wce_cleanup (wce
),
6980 gsi_remove (&iter
, true);
6987 enum gimple_try_flags kind
;
6989 if (gimple_wce_cleanup_eh_only (wce
))
6990 kind
= GIMPLE_TRY_CATCH
;
6992 kind
= GIMPLE_TRY_FINALLY
;
6993 seq
= gsi_split_seq_after (iter
);
6995 gtry
= gimple_build_try (seq
, gimple_wce_cleanup (wce
), kind
);
6996 /* Do not use gsi_replace here, as it may scan operands.
6997 We want to do a simple structural modification only. */
6998 gsi_set_stmt (&iter
, gtry
);
6999 iter
= gsi_start (gtry
->eval
);
7006 gimplify_seq_add_seq (pre_p
, body_sequence
);
7019 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
7020 is the cleanup action required. EH_ONLY is true if the cleanup should
7021 only be executed if an exception is thrown, not on normal exit.
7022 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
7023 only valid for clobbers. */
7026 gimple_push_cleanup (tree var
, tree cleanup
, bool eh_only
, gimple_seq
*pre_p
,
7027 bool force_uncond
= false)
7030 gimple_seq cleanup_stmts
= NULL
;
7032 /* Errors can result in improperly nested cleanups. Which results in
7033 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7037 if (gimple_conditional_context ())
7039 /* If we're in a conditional context, this is more complex. We only
7040 want to run the cleanup if we actually ran the initialization that
7041 necessitates it, but we want to run it after the end of the
7042 conditional context. So we wrap the try/finally around the
7043 condition and use a flag to determine whether or not to actually
7044 run the destructor. Thus
7048 becomes (approximately)
7052 if (test) { A::A(temp); flag = 1; val = f(temp); }
7055 if (flag) A::~A(temp);
7061 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7062 wce
= gimple_build_wce (cleanup_stmts
);
7063 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
7067 tree flag
= create_tmp_var (boolean_type_node
, "cleanup");
7068 gassign
*ffalse
= gimple_build_assign (flag
, boolean_false_node
);
7069 gassign
*ftrue
= gimple_build_assign (flag
, boolean_true_node
);
7071 cleanup
= build3 (COND_EXPR
, void_type_node
, flag
, cleanup
, NULL
);
7072 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7073 wce
= gimple_build_wce (cleanup_stmts
);
7074 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
7076 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, ffalse
);
7077 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
7078 gimplify_seq_add_stmt (pre_p
, ftrue
);
7080 /* Because of this manipulation, and the EH edges that jump
7081 threading cannot redirect, the temporary (VAR) will appear
7082 to be used uninitialized. Don't warn. */
7083 suppress_warning (var
, OPT_Wuninitialized
);
7088 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7089 wce
= gimple_build_wce (cleanup_stmts
);
7090 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
7091 gimplify_seq_add_stmt (pre_p
, wce
);
7095 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7097 static enum gimplify_status
7098 gimplify_target_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
7100 tree targ
= *expr_p
;
7101 tree temp
= TARGET_EXPR_SLOT (targ
);
7102 tree init
= TARGET_EXPR_INITIAL (targ
);
7103 enum gimplify_status ret
;
7105 bool unpoison_empty_seq
= false;
7106 gimple_stmt_iterator unpoison_it
;
7110 gimple_seq init_pre_p
= NULL
;
7112 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7113 to the temps list. Handle also variable length TARGET_EXPRs. */
7114 if (!poly_int_tree_p (DECL_SIZE (temp
)))
7116 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp
)))
7117 gimplify_type_sizes (TREE_TYPE (temp
), &init_pre_p
);
7118 /* FIXME: this is correct only when the size of the type does
7119 not depend on expressions evaluated in init. */
7120 gimplify_vla_decl (temp
, &init_pre_p
);
7124 /* Save location where we need to place unpoisoning. It's possible
7125 that a variable will be converted to needs_to_live_in_memory. */
7126 unpoison_it
= gsi_last (*pre_p
);
7127 unpoison_empty_seq
= gsi_end_p (unpoison_it
);
7129 gimple_add_tmp_var (temp
);
7132 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7133 expression is supposed to initialize the slot. */
7134 if (VOID_TYPE_P (TREE_TYPE (init
)))
7135 ret
= gimplify_expr (&init
, &init_pre_p
, post_p
, is_gimple_stmt
,
7139 tree init_expr
= build2 (INIT_EXPR
, void_type_node
, temp
, init
);
7141 ret
= gimplify_expr (&init
, &init_pre_p
, post_p
, is_gimple_stmt
,
7144 ggc_free (init_expr
);
7146 if (ret
== GS_ERROR
)
7148 /* PR c++/28266 Make sure this is expanded only once. */
7149 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
7154 gimplify_and_add (init
, &init_pre_p
);
7156 /* Add a clobber for the temporary going out of scope, like
7157 gimplify_bind_expr. */
7158 if (gimplify_ctxp
->in_cleanup_point_expr
7159 && needs_to_live_in_memory (temp
))
7161 if (flag_stack_reuse
== SR_ALL
)
7163 tree clobber
= build_clobber (TREE_TYPE (temp
), CLOBBER_EOL
);
7164 clobber
= build2 (MODIFY_EXPR
, TREE_TYPE (temp
), temp
, clobber
);
7165 gimple_push_cleanup (temp
, clobber
, false, pre_p
, true);
7167 if (asan_poisoned_variables
7168 && DECL_ALIGN (temp
) <= MAX_SUPPORTED_STACK_ALIGNMENT
7169 && !TREE_STATIC (temp
)
7170 && dbg_cnt (asan_use_after_scope
)
7171 && !gimplify_omp_ctxp
)
7173 tree asan_cleanup
= build_asan_poison_call_expr (temp
);
7176 if (unpoison_empty_seq
)
7177 unpoison_it
= gsi_start (*pre_p
);
7179 asan_poison_variable (temp
, false, &unpoison_it
,
7180 unpoison_empty_seq
);
7181 gimple_push_cleanup (temp
, asan_cleanup
, false, pre_p
);
7186 gimple_seq_add_seq (pre_p
, init_pre_p
);
7188 /* If needed, push the cleanup for the temp. */
7189 if (TARGET_EXPR_CLEANUP (targ
))
7190 gimple_push_cleanup (temp
, TARGET_EXPR_CLEANUP (targ
),
7191 CLEANUP_EH_ONLY (targ
), pre_p
);
7193 /* Only expand this once. */
7194 TREE_OPERAND (targ
, 3) = init
;
7195 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
7198 /* We should have expanded this before. */
7199 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp
));
7205 /* Gimplification of expression trees. */
7207 /* Gimplify an expression which appears at statement context. The
7208 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7209 NULL, a new sequence is allocated.
7211 Return true if we actually added a statement to the queue. */
7214 gimplify_stmt (tree
*stmt_p
, gimple_seq
*seq_p
)
7216 gimple_seq_node last
;
7218 last
= gimple_seq_last (*seq_p
);
7219 gimplify_expr (stmt_p
, seq_p
, NULL
, is_gimple_stmt
, fb_none
);
7220 return last
!= gimple_seq_last (*seq_p
);
7223 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7224 to CTX. If entries already exist, force them to be some flavor of private.
7225 If there is no enclosing parallel, do nothing. */
7228 omp_firstprivatize_variable (struct gimplify_omp_ctx
*ctx
, tree decl
)
7232 if (decl
== NULL
|| !DECL_P (decl
) || ctx
->region_type
== ORT_NONE
)
7237 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7240 if (n
->value
& GOVD_SHARED
)
7241 n
->value
= GOVD_FIRSTPRIVATE
| (n
->value
& GOVD_SEEN
);
7242 else if (n
->value
& GOVD_MAP
)
7243 n
->value
|= GOVD_MAP_TO_ONLY
;
7247 else if ((ctx
->region_type
& ORT_TARGET
) != 0)
7249 if (ctx
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
7250 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
7252 omp_add_variable (ctx
, decl
, GOVD_MAP
| GOVD_MAP_TO_ONLY
);
7254 else if (ctx
->region_type
!= ORT_WORKSHARE
7255 && ctx
->region_type
!= ORT_TASKGROUP
7256 && ctx
->region_type
!= ORT_SIMD
7257 && ctx
->region_type
!= ORT_ACC
7258 && !(ctx
->region_type
& ORT_TARGET_DATA
))
7259 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
7261 ctx
= ctx
->outer_context
;
7266 /* Similarly for each of the type sizes of TYPE. */
7269 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
7271 if (type
== NULL
|| type
== error_mark_node
)
7273 type
= TYPE_MAIN_VARIANT (type
);
7275 if (ctx
->privatized_types
->add (type
))
7278 switch (TREE_CODE (type
))
7284 case FIXED_POINT_TYPE
:
7285 omp_firstprivatize_variable (ctx
, TYPE_MIN_VALUE (type
));
7286 omp_firstprivatize_variable (ctx
, TYPE_MAX_VALUE (type
));
7290 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
7291 omp_firstprivatize_type_sizes (ctx
, TYPE_DOMAIN (type
));
7296 case QUAL_UNION_TYPE
:
7299 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
7300 if (TREE_CODE (field
) == FIELD_DECL
)
7302 omp_firstprivatize_variable (ctx
, DECL_FIELD_OFFSET (field
));
7303 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (field
));
7309 case REFERENCE_TYPE
:
7310 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
7317 omp_firstprivatize_variable (ctx
, TYPE_SIZE (type
));
7318 omp_firstprivatize_variable (ctx
, TYPE_SIZE_UNIT (type
));
7319 lang_hooks
.types
.omp_firstprivatize_type_sizes (ctx
, type
);
7322 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7325 omp_add_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned int flags
)
7328 unsigned int nflags
;
7331 if (error_operand_p (decl
) || ctx
->region_type
== ORT_NONE
)
7334 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7335 there are constructors involved somewhere. Exception is a shared clause,
7336 there is nothing privatized in that case. */
7337 if ((flags
& GOVD_SHARED
) == 0
7338 && (TREE_ADDRESSABLE (TREE_TYPE (decl
))
7339 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl
))))
7342 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7343 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7345 /* We shouldn't be re-adding the decl with the same data
7347 gcc_assert ((n
->value
& GOVD_DATA_SHARE_CLASS
& flags
) == 0);
7348 nflags
= n
->value
| flags
;
7349 /* The only combination of data sharing classes we should see is
7350 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7351 reduction variables to be used in data sharing clauses. */
7352 gcc_assert ((ctx
->region_type
& ORT_ACC
) != 0
7353 || ((nflags
& GOVD_DATA_SHARE_CLASS
)
7354 == (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
))
7355 || (flags
& GOVD_DATA_SHARE_CLASS
) == 0);
7360 /* When adding a variable-sized variable, we have to handle all sorts
7361 of additional bits of data: the pointer replacement variable, and
7362 the parameters of the type. */
7363 if (DECL_SIZE (decl
) && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7365 /* Add the pointer replacement variable as PRIVATE if the variable
7366 replacement is private, else FIRSTPRIVATE since we'll need the
7367 address of the original variable either for SHARED, or for the
7368 copy into or out of the context. */
7369 if (!(flags
& GOVD_LOCAL
) && ctx
->region_type
!= ORT_TASKGROUP
)
7371 if (flags
& GOVD_MAP
)
7372 nflags
= GOVD_MAP
| GOVD_MAP_TO_ONLY
| GOVD_EXPLICIT
;
7373 else if (flags
& GOVD_PRIVATE
)
7374 nflags
= GOVD_PRIVATE
;
7375 else if (((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7376 && (flags
& GOVD_FIRSTPRIVATE
))
7377 || (ctx
->region_type
== ORT_TARGET_DATA
7378 && (flags
& GOVD_DATA_SHARE_CLASS
) == 0))
7379 nflags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
7381 nflags
= GOVD_FIRSTPRIVATE
;
7382 nflags
|= flags
& GOVD_SEEN
;
7383 t
= DECL_VALUE_EXPR (decl
);
7384 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
7385 t
= TREE_OPERAND (t
, 0);
7386 gcc_assert (DECL_P (t
));
7387 omp_add_variable (ctx
, t
, nflags
);
7390 /* Add all of the variable and type parameters (which should have
7391 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7392 omp_firstprivatize_variable (ctx
, DECL_SIZE_UNIT (decl
));
7393 omp_firstprivatize_variable (ctx
, DECL_SIZE (decl
));
7394 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7396 /* The variable-sized variable itself is never SHARED, only some form
7397 of PRIVATE. The sharing would take place via the pointer variable
7398 which we remapped above. */
7399 if (flags
& GOVD_SHARED
)
7400 flags
= GOVD_SHARED
| GOVD_DEBUG_PRIVATE
7401 | (flags
& (GOVD_SEEN
| GOVD_EXPLICIT
));
7403 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7404 alloca statement we generate for the variable, so make sure it
7405 is available. This isn't automatically needed for the SHARED
7406 case, since we won't be allocating local storage then.
7407 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7408 in this case omp_notice_variable will be called later
7409 on when it is gimplified. */
7410 else if (! (flags
& (GOVD_LOCAL
| GOVD_MAP
))
7411 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl
))))
7412 omp_notice_variable (ctx
, TYPE_SIZE_UNIT (TREE_TYPE (decl
)), true);
7414 else if ((flags
& (GOVD_MAP
| GOVD_LOCAL
)) == 0
7415 && omp_privatize_by_reference (decl
))
7417 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7419 /* Similar to the direct variable sized case above, we'll need the
7420 size of references being privatized. */
7421 if ((flags
& GOVD_SHARED
) == 0)
7423 t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7424 if (t
&& DECL_P (t
))
7425 omp_notice_variable (ctx
, t
, true);
7432 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, flags
);
7434 /* For reductions clauses in OpenACC loop directives, by default create a
7435 copy clause on the enclosing parallel construct for carrying back the
7437 if (ctx
->region_type
== ORT_ACC
&& (flags
& GOVD_REDUCTION
))
7439 struct gimplify_omp_ctx
*outer_ctx
= ctx
->outer_context
;
7442 n
= splay_tree_lookup (outer_ctx
->variables
, (splay_tree_key
)decl
);
7445 /* Ignore local variables and explicitly declared clauses. */
7446 if (n
->value
& (GOVD_LOCAL
| GOVD_EXPLICIT
))
7448 else if (outer_ctx
->region_type
== ORT_ACC_KERNELS
)
7450 /* According to the OpenACC spec, such a reduction variable
7451 should already have a copy map on a kernels construct,
7452 verify that here. */
7453 gcc_assert (!(n
->value
& GOVD_FIRSTPRIVATE
)
7454 && (n
->value
& GOVD_MAP
));
7456 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7458 /* Remove firstprivate and make it a copy map. */
7459 n
->value
&= ~GOVD_FIRSTPRIVATE
;
7460 n
->value
|= GOVD_MAP
;
7463 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7465 splay_tree_insert (outer_ctx
->variables
, (splay_tree_key
)decl
,
7466 GOVD_MAP
| GOVD_SEEN
);
7469 outer_ctx
= outer_ctx
->outer_context
;
7474 /* Notice a threadprivate variable DECL used in OMP context CTX.
7475 This just prints out diagnostics about threadprivate variable uses
7476 in untied tasks. If DECL2 is non-NULL, prevent this warning
7477 on that variable. */
7480 omp_notice_threadprivate_variable (struct gimplify_omp_ctx
*ctx
, tree decl
,
7484 struct gimplify_omp_ctx
*octx
;
7486 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
7487 if ((octx
->region_type
& ORT_TARGET
) != 0
7488 || octx
->order_concurrent
)
7490 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
)decl
);
7493 if (octx
->order_concurrent
)
7495 error ("threadprivate variable %qE used in a region with"
7496 " %<order(concurrent)%> clause", DECL_NAME (decl
));
7497 inform (octx
->location
, "enclosing region");
7501 error ("threadprivate variable %qE used in target region",
7503 inform (octx
->location
, "enclosing target region");
7505 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl
, 0);
7508 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl2
, 0);
7511 if (ctx
->region_type
!= ORT_UNTIED_TASK
)
7513 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7516 error ("threadprivate variable %qE used in untied task",
7518 inform (ctx
->location
, "enclosing task");
7519 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, 0);
7522 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl2
, 0);
7526 /* Return true if global var DECL is device resident. */
7529 device_resident_p (tree decl
)
7531 tree attr
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl
));
7536 for (tree t
= TREE_VALUE (attr
); t
; t
= TREE_PURPOSE (t
))
7538 tree c
= TREE_VALUE (t
);
7539 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DEVICE_RESIDENT
)
7546 /* Return true if DECL has an ACC DECLARE attribute. */
7549 is_oacc_declared (tree decl
)
7551 tree t
= TREE_CODE (decl
) == MEM_REF
? TREE_OPERAND (decl
, 0) : decl
;
7552 tree declared
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t
));
7553 return declared
!= NULL_TREE
;
7556 /* Determine outer default flags for DECL mentioned in an OMP region
7557 but not declared in an enclosing clause.
7559 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7560 remapped firstprivate instead of shared. To some extent this is
7561 addressed in omp_firstprivatize_type_sizes, but not
7565 omp_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
,
7566 bool in_code
, unsigned flags
)
7568 enum omp_clause_default_kind default_kind
= ctx
->default_kind
;
7569 enum omp_clause_default_kind kind
;
7571 kind
= lang_hooks
.decls
.omp_predetermined_sharing (decl
);
7572 if (ctx
->region_type
& ORT_TASK
)
7574 tree detach_clause
= omp_find_clause (ctx
->clauses
, OMP_CLAUSE_DETACH
);
7576 /* The event-handle specified by a detach clause should always be firstprivate,
7577 regardless of the current default. */
7578 if (detach_clause
&& OMP_CLAUSE_DECL (detach_clause
) == decl
)
7579 kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
7581 if (kind
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
7582 default_kind
= kind
;
7583 else if (VAR_P (decl
) && TREE_STATIC (decl
) && DECL_IN_CONSTANT_POOL (decl
))
7584 default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
7585 /* For C/C++ default({,first}private), variables with static storage duration
7586 declared in a namespace or global scope and referenced in construct
7587 must be explicitly specified, i.e. acts as default(none). */
7588 else if ((default_kind
== OMP_CLAUSE_DEFAULT_PRIVATE
7589 || default_kind
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
7591 && is_global_var (decl
)
7592 && (DECL_FILE_SCOPE_P (decl
)
7593 || (DECL_CONTEXT (decl
)
7594 && TREE_CODE (DECL_CONTEXT (decl
)) == NAMESPACE_DECL
))
7595 && !lang_GNU_Fortran ())
7596 default_kind
= OMP_CLAUSE_DEFAULT_NONE
;
7598 switch (default_kind
)
7600 case OMP_CLAUSE_DEFAULT_NONE
:
7604 if (ctx
->region_type
& ORT_PARALLEL
)
7606 else if ((ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
)
7608 else if (ctx
->region_type
& ORT_TASK
)
7610 else if (ctx
->region_type
& ORT_TEAMS
)
7615 error ("%qE not specified in enclosing %qs",
7616 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rtype
);
7617 inform (ctx
->location
, "enclosing %qs", rtype
);
7620 case OMP_CLAUSE_DEFAULT_SHARED
:
7621 flags
|= GOVD_SHARED
;
7623 case OMP_CLAUSE_DEFAULT_PRIVATE
:
7624 flags
|= GOVD_PRIVATE
;
7626 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
:
7627 flags
|= GOVD_FIRSTPRIVATE
;
7629 case OMP_CLAUSE_DEFAULT_UNSPECIFIED
:
7630 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7631 gcc_assert ((ctx
->region_type
& ORT_TASK
) != 0);
7632 if (struct gimplify_omp_ctx
*octx
= ctx
->outer_context
)
7634 omp_notice_variable (octx
, decl
, in_code
);
7635 for (; octx
; octx
= octx
->outer_context
)
7639 n2
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
7640 if ((octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)) != 0
7641 && (n2
== NULL
|| (n2
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7643 if (n2
&& (n2
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
)
7645 flags
|= GOVD_FIRSTPRIVATE
;
7648 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TEAMS
)) != 0)
7650 flags
|= GOVD_SHARED
;
7656 if (TREE_CODE (decl
) == PARM_DECL
7657 || (!is_global_var (decl
)
7658 && DECL_CONTEXT (decl
) == current_function_decl
))
7659 flags
|= GOVD_FIRSTPRIVATE
;
7661 flags
|= GOVD_SHARED
;
7673 /* Determine outer default flags for DECL mentioned in an OACC region
7674 but not declared in an enclosing clause. */
7677 oacc_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned flags
)
7680 bool on_device
= false;
7681 bool is_private
= false;
7682 bool declared
= is_oacc_declared (decl
);
7683 tree type
= TREE_TYPE (decl
);
7685 if (omp_privatize_by_reference (decl
))
7686 type
= TREE_TYPE (type
);
7688 /* For Fortran COMMON blocks, only used variables in those blocks are
7689 transfered and remapped. The block itself will have a private clause to
7690 avoid transfering the data twice.
7691 The hook evaluates to false by default. For a variable in Fortran's COMMON
7692 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7693 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7694 the whole block. For C++ and Fortran, it can also be true under certain
7695 other conditions, if DECL_HAS_VALUE_EXPR. */
7696 if (RECORD_OR_UNION_TYPE_P (type
))
7697 is_private
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7699 if ((ctx
->region_type
& (ORT_ACC_PARALLEL
| ORT_ACC_KERNELS
)) != 0
7700 && is_global_var (decl
)
7701 && device_resident_p (decl
)
7705 flags
|= GOVD_MAP_TO_ONLY
;
7708 switch (ctx
->region_type
)
7710 case ORT_ACC_KERNELS
:
7714 flags
|= GOVD_FIRSTPRIVATE
;
7715 else if (AGGREGATE_TYPE_P (type
))
7717 /* Aggregates default to 'present_or_copy', or 'present'. */
7718 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7721 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7724 /* Scalars default to 'copy'. */
7725 flags
|= GOVD_MAP
| GOVD_MAP_FORCE
;
7729 case ORT_ACC_PARALLEL
:
7730 case ORT_ACC_SERIAL
:
7731 rkind
= ctx
->region_type
== ORT_ACC_PARALLEL
? "parallel" : "serial";
7734 flags
|= GOVD_FIRSTPRIVATE
;
7735 else if (on_device
|| declared
)
7737 else if (AGGREGATE_TYPE_P (type
))
7739 /* Aggregates default to 'present_or_copy', or 'present'. */
7740 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7743 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7746 /* Scalars default to 'firstprivate'. */
7747 flags
|= GOVD_FIRSTPRIVATE
;
7755 if (DECL_ARTIFICIAL (decl
))
7756 ; /* We can get compiler-generated decls, and should not complain
7758 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_NONE
)
7760 error ("%qE not specified in enclosing OpenACC %qs construct",
7761 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rkind
);
7762 inform (ctx
->location
, "enclosing OpenACC %qs construct", rkind
);
7764 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_PRESENT
)
7765 ; /* Handled above. */
7767 gcc_checking_assert (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
);
7772 /* Record the fact that DECL was used within the OMP context CTX.
7773 IN_CODE is true when real code uses DECL, and false when we should
7774 merely emit default(none) errors. Return true if DECL is going to
7775 be remapped and thus DECL shouldn't be gimplified into its
7776 DECL_VALUE_EXPR (if any). */
7779 omp_notice_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, bool in_code
)
7782 unsigned flags
= in_code
? GOVD_SEEN
: 0;
7783 bool ret
= false, shared
;
7785 if (error_operand_p (decl
))
7788 if (ctx
->region_type
== ORT_NONE
)
7789 return lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7791 if (is_global_var (decl
))
7793 /* Threadprivate variables are predetermined. */
7794 if (DECL_THREAD_LOCAL_P (decl
))
7795 return omp_notice_threadprivate_variable (ctx
, decl
, NULL_TREE
);
7797 if (DECL_HAS_VALUE_EXPR_P (decl
))
7799 if (ctx
->region_type
& ORT_ACC
)
7800 /* For OpenACC, defer expansion of value to avoid transfering
7801 privatized common block data instead of im-/explicitly transfered
7802 variables which are in common blocks. */
7806 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
7808 if (value
&& DECL_P (value
) && DECL_THREAD_LOCAL_P (value
))
7809 return omp_notice_threadprivate_variable (ctx
, decl
, value
);
7813 if (gimplify_omp_ctxp
->outer_context
== NULL
7815 && oacc_get_fn_attrib (current_function_decl
))
7817 location_t loc
= DECL_SOURCE_LOCATION (decl
);
7819 if (lookup_attribute ("omp declare target link",
7820 DECL_ATTRIBUTES (decl
)))
7823 "%qE with %<link%> clause used in %<routine%> function",
7827 else if (!lookup_attribute ("omp declare target",
7828 DECL_ATTRIBUTES (decl
)))
7831 "%qE requires a %<declare%> directive for use "
7832 "in a %<routine%> function", DECL_NAME (decl
));
7838 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7839 if ((ctx
->region_type
& ORT_TARGET
) != 0)
7841 if (ctx
->region_type
& ORT_ACC
)
7842 /* For OpenACC, as remarked above, defer expansion. */
7847 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7850 unsigned nflags
= flags
;
7851 if ((ctx
->region_type
& ORT_ACC
) == 0)
7853 bool is_declare_target
= false;
7854 if (is_global_var (decl
)
7855 && varpool_node::get_create (decl
)->offloadable
)
7857 struct gimplify_omp_ctx
*octx
;
7858 for (octx
= ctx
->outer_context
;
7859 octx
; octx
= octx
->outer_context
)
7861 n
= splay_tree_lookup (octx
->variables
,
7862 (splay_tree_key
)decl
);
7864 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
7865 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7868 is_declare_target
= octx
== NULL
;
7870 if (!is_declare_target
)
7873 enum omp_clause_defaultmap_kind kind
;
7874 if (lang_hooks
.decls
.omp_allocatable_p (decl
))
7875 gdmk
= GDMK_ALLOCATABLE
;
7876 else if (lang_hooks
.decls
.omp_scalar_target_p (decl
))
7877 gdmk
= GDMK_SCALAR_TARGET
;
7878 else if (lang_hooks
.decls
.omp_scalar_p (decl
, false))
7880 else if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
7881 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
7882 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
7884 gdmk
= GDMK_POINTER
;
7886 gdmk
= GDMK_AGGREGATE
;
7887 kind
= lang_hooks
.decls
.omp_predetermined_mapping (decl
);
7888 if (kind
!= OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
)
7890 if (kind
== OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
)
7891 nflags
|= GOVD_FIRSTPRIVATE
;
7892 else if (kind
== OMP_CLAUSE_DEFAULTMAP_TO
)
7893 nflags
|= GOVD_MAP
| GOVD_MAP_TO_ONLY
;
7897 else if (ctx
->defaultmap
[gdmk
] == 0)
7899 tree d
= lang_hooks
.decls
.omp_report_decl (decl
);
7900 error ("%qE not specified in enclosing %<target%>",
7902 inform (ctx
->location
, "enclosing %<target%>");
7904 else if (ctx
->defaultmap
[gdmk
]
7905 & (GOVD_MAP_0LEN_ARRAY
| GOVD_FIRSTPRIVATE
))
7906 nflags
|= ctx
->defaultmap
[gdmk
];
7909 gcc_assert (ctx
->defaultmap
[gdmk
] & GOVD_MAP
);
7910 nflags
|= ctx
->defaultmap
[gdmk
] & ~GOVD_MAP
;
7915 struct gimplify_omp_ctx
*octx
= ctx
->outer_context
;
7916 if ((ctx
->region_type
& ORT_ACC
) && octx
)
7918 /* Look in outer OpenACC contexts, to see if there's a
7919 data attribute for this variable. */
7920 omp_notice_variable (octx
, decl
, in_code
);
7922 for (; octx
; octx
= octx
->outer_context
)
7924 if (!(octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)))
7927 = splay_tree_lookup (octx
->variables
,
7928 (splay_tree_key
) decl
);
7931 if (octx
->region_type
== ORT_ACC_HOST_DATA
)
7932 error ("variable %qE declared in enclosing "
7933 "%<host_data%> region", DECL_NAME (decl
));
7935 if (octx
->region_type
== ORT_ACC_DATA
7936 && (n2
->value
& GOVD_MAP_0LEN_ARRAY
))
7937 nflags
|= GOVD_MAP_0LEN_ARRAY
;
7943 if ((nflags
& ~(GOVD_MAP_TO_ONLY
| GOVD_MAP_FROM_ONLY
7944 | GOVD_MAP_ALLOC_ONLY
)) == flags
)
7946 tree type
= TREE_TYPE (decl
);
7948 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
7949 && omp_privatize_by_reference (decl
))
7950 type
= TREE_TYPE (type
);
7951 if (!omp_mappable_type (type
))
7953 error ("%qD referenced in target region does not have "
7954 "a mappable type", decl
);
7955 nflags
|= GOVD_MAP
| GOVD_EXPLICIT
;
7959 if ((ctx
->region_type
& ORT_ACC
) != 0)
7960 nflags
= oacc_default_clause (ctx
, decl
, flags
);
7966 omp_add_variable (ctx
, decl
, nflags
);
7970 /* If nothing changed, there's nothing left to do. */
7971 if ((n
->value
& flags
) == flags
)
7981 if (ctx
->region_type
== ORT_WORKSHARE
7982 || ctx
->region_type
== ORT_TASKGROUP
7983 || ctx
->region_type
== ORT_SIMD
7984 || ctx
->region_type
== ORT_ACC
7985 || (ctx
->region_type
& ORT_TARGET_DATA
) != 0)
7988 flags
= omp_default_clause (ctx
, decl
, in_code
, flags
);
7990 if ((flags
& GOVD_PRIVATE
)
7991 && lang_hooks
.decls
.omp_private_outer_ref (decl
))
7992 flags
|= GOVD_PRIVATE_OUTER_REF
;
7994 omp_add_variable (ctx
, decl
, flags
);
7996 shared
= (flags
& GOVD_SHARED
) != 0;
7997 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
8001 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
8002 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
8003 if (ctx
->region_type
== ORT_SIMD
8004 && ctx
->in_for_exprs
8005 && ((n
->value
& (GOVD_PRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
))
8007 flags
&= ~GOVD_SEEN
;
8009 if ((n
->value
& (GOVD_SEEN
| GOVD_LOCAL
)) == 0
8010 && (flags
& (GOVD_SEEN
| GOVD_LOCAL
)) == GOVD_SEEN
8011 && DECL_SIZE (decl
))
8013 if (TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
8016 tree t
= DECL_VALUE_EXPR (decl
);
8017 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
8018 t
= TREE_OPERAND (t
, 0);
8019 gcc_assert (DECL_P (t
));
8020 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
8021 n2
->value
|= GOVD_SEEN
;
8023 else if (omp_privatize_by_reference (decl
)
8024 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)))
8025 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))))
8029 tree t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
8030 gcc_assert (DECL_P (t
));
8031 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
8033 omp_notice_variable (ctx
, t
, true);
8037 if (ctx
->region_type
& ORT_ACC
)
8038 /* For OpenACC, as remarked above, defer expansion. */
8041 shared
= ((flags
| n
->value
) & GOVD_SHARED
) != 0;
8042 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
8044 /* If nothing changed, there's nothing left to do. */
8045 if ((n
->value
& flags
) == flags
)
8051 /* If the variable is private in the current context, then we don't
8052 need to propagate anything to an outer context. */
8053 if ((flags
& GOVD_PRIVATE
) && !(flags
& GOVD_PRIVATE_OUTER_REF
))
8055 if ((flags
& (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8056 == (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8058 if ((flags
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
8059 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8060 == (GOVD_LASTPRIVATE
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8062 if (ctx
->outer_context
8063 && omp_notice_variable (ctx
->outer_context
, decl
, in_code
))
8068 /* Verify that DECL is private within CTX. If there's specific information
8069 to the contrary in the innermost scope, generate an error. */
8072 omp_is_private (struct gimplify_omp_ctx
*ctx
, tree decl
, int simd
)
8076 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
8079 if (n
->value
& GOVD_SHARED
)
8081 if (ctx
== gimplify_omp_ctxp
)
8084 error ("iteration variable %qE is predetermined linear",
8087 error ("iteration variable %qE should be private",
8089 n
->value
= GOVD_PRIVATE
;
8095 else if ((n
->value
& GOVD_EXPLICIT
) != 0
8096 && (ctx
== gimplify_omp_ctxp
8097 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
8098 && gimplify_omp_ctxp
->outer_context
== ctx
)))
8100 if ((n
->value
& GOVD_FIRSTPRIVATE
) != 0)
8101 error ("iteration variable %qE should not be firstprivate",
8103 else if ((n
->value
& GOVD_REDUCTION
) != 0)
8104 error ("iteration variable %qE should not be reduction",
8106 else if (simd
!= 1 && (n
->value
& GOVD_LINEAR
) != 0)
8107 error ("iteration variable %qE should not be linear",
8110 return (ctx
== gimplify_omp_ctxp
8111 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
8112 && gimplify_omp_ctxp
->outer_context
== ctx
));
8115 if (ctx
->region_type
!= ORT_WORKSHARE
8116 && ctx
->region_type
!= ORT_TASKGROUP
8117 && ctx
->region_type
!= ORT_SIMD
8118 && ctx
->region_type
!= ORT_ACC
)
8120 else if (ctx
->outer_context
)
8121 return omp_is_private (ctx
->outer_context
, decl
, simd
);
8125 /* Return true if DECL is private within a parallel region
8126 that binds to the current construct's context or in parallel
8127 region's REDUCTION clause. */
8130 omp_check_private (struct gimplify_omp_ctx
*ctx
, tree decl
, bool copyprivate
)
8136 ctx
= ctx
->outer_context
;
8139 if (is_global_var (decl
))
8142 /* References might be private, but might be shared too,
8143 when checking for copyprivate, assume they might be
8144 private, otherwise assume they might be shared. */
8148 if (omp_privatize_by_reference (decl
))
8151 /* Treat C++ privatized non-static data members outside
8152 of the privatization the same. */
8153 if (omp_member_access_dummy_var (decl
))
8159 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8161 if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
8162 && (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
8164 if ((ctx
->region_type
& ORT_TARGET_DATA
) != 0
8166 || (n
->value
& GOVD_MAP
) == 0)
8173 if ((n
->value
& GOVD_LOCAL
) != 0
8174 && omp_member_access_dummy_var (decl
))
8176 return (n
->value
& GOVD_SHARED
) == 0;
8179 if (ctx
->region_type
== ORT_WORKSHARE
8180 || ctx
->region_type
== ORT_TASKGROUP
8181 || ctx
->region_type
== ORT_SIMD
8182 || ctx
->region_type
== ORT_ACC
)
8191 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8194 find_decl_expr (tree
*tp
, int *walk_subtrees
, void *data
)
8198 /* If this node has been visited, unmark it and keep looking. */
8199 if (TREE_CODE (t
) == DECL_EXPR
&& DECL_EXPR_DECL (t
) == (tree
) data
)
8202 if (IS_TYPE_OR_DECL_P (t
))
8208 /* Gimplify the affinity clause but effectively ignore it.
8211 if ((step > 1) ? var <= end : var > end)
8212 locatator_var_expr; */
8215 gimplify_omp_affinity (tree
*list_p
, gimple_seq
*pre_p
)
8217 tree last_iter
= NULL_TREE
;
8218 tree last_bind
= NULL_TREE
;
8219 tree label
= NULL_TREE
;
8220 tree
*last_body
= NULL
;
8221 for (tree c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8222 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_AFFINITY
)
8224 tree t
= OMP_CLAUSE_DECL (c
);
8225 if (TREE_CODE (t
) == TREE_LIST
8227 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8229 if (TREE_VALUE (t
) == null_pointer_node
)
8231 if (TREE_PURPOSE (t
) != last_iter
)
8235 append_to_statement_list (label
, last_body
);
8236 gimplify_and_add (last_bind
, pre_p
);
8237 last_bind
= NULL_TREE
;
8239 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8241 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
8242 is_gimple_val
, fb_rvalue
) == GS_ERROR
8243 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
8244 is_gimple_val
, fb_rvalue
) == GS_ERROR
8245 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
8246 is_gimple_val
, fb_rvalue
) == GS_ERROR
8247 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
8248 is_gimple_val
, fb_rvalue
)
8252 last_iter
= TREE_PURPOSE (t
);
8253 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
8254 last_bind
= build3 (BIND_EXPR
, void_type_node
, BLOCK_VARS (block
),
8256 last_body
= &BIND_EXPR_BODY (last_bind
);
8257 tree cond
= NULL_TREE
;
8258 location_t loc
= OMP_CLAUSE_LOCATION (c
);
8259 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8261 tree var
= TREE_VEC_ELT (it
, 0);
8262 tree begin
= TREE_VEC_ELT (it
, 1);
8263 tree end
= TREE_VEC_ELT (it
, 2);
8264 tree step
= TREE_VEC_ELT (it
, 3);
8265 loc
= DECL_SOURCE_LOCATION (var
);
8266 tree tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8268 append_to_statement_list_force (tem
, last_body
);
8270 tree cond1
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8271 step
, build_zero_cst (TREE_TYPE (step
)));
8272 tree cond2
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
,
8274 tree cond3
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8276 cond1
= fold_build3_loc (loc
, COND_EXPR
, boolean_type_node
,
8277 cond1
, cond2
, cond3
);
8279 cond
= fold_build2_loc (loc
, TRUTH_AND_EXPR
,
8280 boolean_type_node
, cond
, cond1
);
8284 tree cont_label
= create_artificial_label (loc
);
8285 label
= build1 (LABEL_EXPR
, void_type_node
, cont_label
);
8286 tree tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, cond
,
8288 build_and_jump (&cont_label
));
8289 append_to_statement_list_force (tem
, last_body
);
8291 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
8293 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
), 0),
8295 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
8297 if (error_operand_p (TREE_VALUE (t
)))
8299 append_to_statement_list_force (TREE_VALUE (t
), last_body
);
8300 TREE_VALUE (t
) = null_pointer_node
;
8306 append_to_statement_list (label
, last_body
);
8307 gimplify_and_add (last_bind
, pre_p
);
8308 last_bind
= NULL_TREE
;
8310 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8312 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8313 NULL
, is_gimple_val
, fb_rvalue
);
8314 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8316 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8318 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8319 is_gimple_lvalue
, fb_lvalue
) == GS_ERROR
)
8321 gimplify_and_add (OMP_CLAUSE_DECL (c
), pre_p
);
8326 append_to_statement_list (label
, last_body
);
8327 gimplify_and_add (last_bind
, pre_p
);
8332 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8333 lower all the depend clauses by populating corresponding depend
8334 array. Returns 0 if there are no such depend clauses, or
8335 2 if all depend clauses should be removed, 1 otherwise. */
8338 gimplify_omp_depend (tree
*list_p
, gimple_seq
*pre_p
)
8342 size_t n
[5] = { 0, 0, 0, 0, 0 };
8344 tree counts
[5] = { NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
};
8345 tree last_iter
= NULL_TREE
, last_count
= NULL_TREE
;
8347 location_t first_loc
= UNKNOWN_LOCATION
;
8349 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8350 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8352 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8354 case OMP_CLAUSE_DEPEND_IN
:
8357 case OMP_CLAUSE_DEPEND_OUT
:
8358 case OMP_CLAUSE_DEPEND_INOUT
:
8361 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8364 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8367 case OMP_CLAUSE_DEPEND_INOUTSET
:
8373 tree t
= OMP_CLAUSE_DECL (c
);
8374 if (first_loc
== UNKNOWN_LOCATION
)
8375 first_loc
= OMP_CLAUSE_LOCATION (c
);
8376 if (TREE_CODE (t
) == TREE_LIST
8378 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8380 if (TREE_PURPOSE (t
) != last_iter
)
8382 tree tcnt
= size_one_node
;
8383 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8385 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
8386 is_gimple_val
, fb_rvalue
) == GS_ERROR
8387 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
8388 is_gimple_val
, fb_rvalue
) == GS_ERROR
8389 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
8390 is_gimple_val
, fb_rvalue
) == GS_ERROR
8391 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
8392 is_gimple_val
, fb_rvalue
)
8395 tree var
= TREE_VEC_ELT (it
, 0);
8396 tree begin
= TREE_VEC_ELT (it
, 1);
8397 tree end
= TREE_VEC_ELT (it
, 2);
8398 tree step
= TREE_VEC_ELT (it
, 3);
8399 tree orig_step
= TREE_VEC_ELT (it
, 4);
8400 tree type
= TREE_TYPE (var
);
8401 tree stype
= TREE_TYPE (step
);
8402 location_t loc
= DECL_SOURCE_LOCATION (var
);
8404 /* Compute count for this iterator as
8406 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8407 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8408 and compute product of those for the entire depend
8410 if (POINTER_TYPE_P (type
))
8411 endmbegin
= fold_build2_loc (loc
, POINTER_DIFF_EXPR
,
8414 endmbegin
= fold_build2_loc (loc
, MINUS_EXPR
, type
,
8416 tree stepm1
= fold_build2_loc (loc
, MINUS_EXPR
, stype
,
8418 build_int_cst (stype
, 1));
8419 tree stepp1
= fold_build2_loc (loc
, PLUS_EXPR
, stype
, step
,
8420 build_int_cst (stype
, 1));
8421 tree pos
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
8422 unshare_expr (endmbegin
),
8424 pos
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
8426 tree neg
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
8428 if (TYPE_UNSIGNED (stype
))
8430 neg
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, neg
);
8431 step
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, step
);
8433 neg
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
8436 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8439 pos
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, pos
,
8440 build_int_cst (stype
, 0));
8441 cond
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
,
8443 neg
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, neg
,
8444 build_int_cst (stype
, 0));
8445 tree osteptype
= TREE_TYPE (orig_step
);
8446 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8448 build_int_cst (osteptype
, 0));
8449 tree cnt
= fold_build3_loc (loc
, COND_EXPR
, stype
,
8451 cnt
= fold_convert_loc (loc
, sizetype
, cnt
);
8452 if (gimplify_expr (&cnt
, pre_p
, NULL
, is_gimple_val
,
8453 fb_rvalue
) == GS_ERROR
)
8455 tcnt
= size_binop_loc (loc
, MULT_EXPR
, tcnt
, cnt
);
8457 if (gimplify_expr (&tcnt
, pre_p
, NULL
, is_gimple_val
,
8458 fb_rvalue
) == GS_ERROR
)
8460 last_iter
= TREE_PURPOSE (t
);
8463 if (counts
[i
] == NULL_TREE
)
8464 counts
[i
] = last_count
;
8466 counts
[i
] = size_binop_loc (OMP_CLAUSE_LOCATION (c
),
8467 PLUS_EXPR
, counts
[i
], last_count
);
8472 for (i
= 0; i
< 5; i
++)
8478 tree total
= size_zero_node
;
8479 for (i
= 0; i
< 5; i
++)
8481 unused
[i
] = counts
[i
] == NULL_TREE
&& n
[i
] == 0;
8482 if (counts
[i
] == NULL_TREE
)
8483 counts
[i
] = size_zero_node
;
8485 counts
[i
] = size_binop (PLUS_EXPR
, counts
[i
], size_int (n
[i
]));
8486 if (gimplify_expr (&counts
[i
], pre_p
, NULL
, is_gimple_val
,
8487 fb_rvalue
) == GS_ERROR
)
8489 total
= size_binop (PLUS_EXPR
, total
, counts
[i
]);
8492 if (gimplify_expr (&total
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
8495 bool is_old
= unused
[1] && unused
[3] && unused
[4];
8496 tree totalpx
= size_binop (PLUS_EXPR
, unshare_expr (total
),
8497 size_int (is_old
? 1 : 4));
8499 totalpx
= size_binop (PLUS_EXPR
, totalpx
,
8500 size_binop (MULT_EXPR
, counts
[4], size_int (2)));
8501 tree type
= build_array_type (ptr_type_node
, build_index_type (totalpx
));
8502 tree array
= create_tmp_var_raw (type
);
8503 TREE_ADDRESSABLE (array
) = 1;
8504 if (!poly_int_tree_p (totalpx
))
8506 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array
)))
8507 gimplify_type_sizes (TREE_TYPE (array
), pre_p
);
8508 if (gimplify_omp_ctxp
)
8510 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8512 && (ctx
->region_type
== ORT_WORKSHARE
8513 || ctx
->region_type
== ORT_TASKGROUP
8514 || ctx
->region_type
== ORT_SIMD
8515 || ctx
->region_type
== ORT_ACC
))
8516 ctx
= ctx
->outer_context
;
8518 omp_add_variable (ctx
, array
, GOVD_LOCAL
| GOVD_SEEN
);
8520 gimplify_vla_decl (array
, pre_p
);
8523 gimple_add_tmp_var (array
);
8524 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
8529 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8530 build_int_cst (ptr_type_node
, 0));
8531 gimplify_and_add (tem
, pre_p
);
8532 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
8535 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8536 fold_convert (ptr_type_node
, total
));
8537 gimplify_and_add (tem
, pre_p
);
8538 for (i
= 1; i
< (is_old
? 2 : 4); i
++)
8540 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (i
+ !is_old
),
8541 NULL_TREE
, NULL_TREE
);
8542 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, counts
[i
- 1]);
8543 gimplify_and_add (tem
, pre_p
);
8550 for (i
= 0; i
< 5; i
++)
8552 if (i
&& (i
>= j
|| unused
[i
- 1]))
8554 cnts
[i
] = cnts
[i
- 1];
8557 cnts
[i
] = create_tmp_var (sizetype
);
8559 g
= gimple_build_assign (cnts
[i
], size_int (is_old
? 2 : 5));
8564 t
= size_binop (PLUS_EXPR
, counts
[0], size_int (2));
8566 t
= size_binop (PLUS_EXPR
, cnts
[i
- 1], counts
[i
- 1]);
8567 if (gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
8570 g
= gimple_build_assign (cnts
[i
], t
);
8572 gimple_seq_add_stmt (pre_p
, g
);
8575 cnts
[5] = NULL_TREE
;
8578 tree t
= size_binop (PLUS_EXPR
, total
, size_int (5));
8579 cnts
[5] = create_tmp_var (sizetype
);
8580 g
= gimple_build_assign (cnts
[i
], t
);
8581 gimple_seq_add_stmt (pre_p
, g
);
8584 last_iter
= NULL_TREE
;
8585 tree last_bind
= NULL_TREE
;
8586 tree
*last_body
= NULL
;
8587 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8588 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8590 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8592 case OMP_CLAUSE_DEPEND_IN
:
8595 case OMP_CLAUSE_DEPEND_OUT
:
8596 case OMP_CLAUSE_DEPEND_INOUT
:
8599 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8602 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8605 case OMP_CLAUSE_DEPEND_INOUTSET
:
8611 tree t
= OMP_CLAUSE_DECL (c
);
8612 if (TREE_CODE (t
) == TREE_LIST
8614 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8616 if (TREE_PURPOSE (t
) != last_iter
)
8619 gimplify_and_add (last_bind
, pre_p
);
8620 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
8621 last_bind
= build3 (BIND_EXPR
, void_type_node
,
8622 BLOCK_VARS (block
), NULL
, block
);
8623 TREE_SIDE_EFFECTS (last_bind
) = 1;
8624 SET_EXPR_LOCATION (last_bind
, OMP_CLAUSE_LOCATION (c
));
8625 tree
*p
= &BIND_EXPR_BODY (last_bind
);
8626 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8628 tree var
= TREE_VEC_ELT (it
, 0);
8629 tree begin
= TREE_VEC_ELT (it
, 1);
8630 tree end
= TREE_VEC_ELT (it
, 2);
8631 tree step
= TREE_VEC_ELT (it
, 3);
8632 tree orig_step
= TREE_VEC_ELT (it
, 4);
8633 tree type
= TREE_TYPE (var
);
8634 location_t loc
= DECL_SOURCE_LOCATION (var
);
8642 if (orig_step > 0) {
8643 if (var < end) goto beg_label;
8645 if (var > end) goto beg_label;
8647 for each iterator, with inner iterators added to
8649 tree beg_label
= create_artificial_label (loc
);
8650 tree cond_label
= NULL_TREE
;
8651 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8653 append_to_statement_list_force (tem
, p
);
8654 tem
= build_and_jump (&cond_label
);
8655 append_to_statement_list_force (tem
, p
);
8656 tem
= build1 (LABEL_EXPR
, void_type_node
, beg_label
);
8657 append_to_statement_list (tem
, p
);
8658 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
8659 NULL_TREE
, NULL_TREE
);
8660 TREE_SIDE_EFFECTS (bind
) = 1;
8661 SET_EXPR_LOCATION (bind
, loc
);
8662 append_to_statement_list_force (bind
, p
);
8663 if (POINTER_TYPE_P (type
))
8664 tem
= build2_loc (loc
, POINTER_PLUS_EXPR
, type
,
8665 var
, fold_convert_loc (loc
, sizetype
,
8668 tem
= build2_loc (loc
, PLUS_EXPR
, type
, var
, step
);
8669 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8671 append_to_statement_list_force (tem
, p
);
8672 tem
= build1 (LABEL_EXPR
, void_type_node
, cond_label
);
8673 append_to_statement_list (tem
, p
);
8674 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8678 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8679 cond
, build_and_jump (&beg_label
),
8681 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8684 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8685 cond
, build_and_jump (&beg_label
),
8687 tree osteptype
= TREE_TYPE (orig_step
);
8688 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8690 build_int_cst (osteptype
, 0));
8691 tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8693 append_to_statement_list_force (tem
, p
);
8694 p
= &BIND_EXPR_BODY (bind
);
8698 last_iter
= TREE_PURPOSE (t
);
8699 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
8701 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
),
8703 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
8705 if (error_operand_p (TREE_VALUE (t
)))
8707 if (TREE_VALUE (t
) != null_pointer_node
)
8708 TREE_VALUE (t
) = build_fold_addr_expr (TREE_VALUE (t
));
8711 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8712 NULL_TREE
, NULL_TREE
);
8713 tree r2
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[5],
8714 NULL_TREE
, NULL_TREE
);
8715 r2
= build_fold_addr_expr_with_type (r2
, ptr_type_node
);
8716 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8717 void_type_node
, r
, r2
);
8718 append_to_statement_list_force (tem
, last_body
);
8719 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8720 void_type_node
, cnts
[i
],
8721 size_binop (PLUS_EXPR
, cnts
[i
],
8723 append_to_statement_list_force (tem
, last_body
);
8726 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8727 NULL_TREE
, NULL_TREE
);
8728 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8729 void_type_node
, r
, TREE_VALUE (t
));
8730 append_to_statement_list_force (tem
, last_body
);
8733 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
8734 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)),
8735 NULL_TREE
, NULL_TREE
);
8736 tem
= build_int_cst (ptr_type_node
, GOMP_DEPEND_INOUTSET
);
8737 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8738 void_type_node
, r
, tem
);
8739 append_to_statement_list_force (tem
, last_body
);
8741 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8742 void_type_node
, cnts
[i
],
8743 size_binop (PLUS_EXPR
, cnts
[i
],
8744 size_int (1 + (i
== 5))));
8745 append_to_statement_list_force (tem
, last_body
);
8746 TREE_VALUE (t
) = null_pointer_node
;
8752 gimplify_and_add (last_bind
, pre_p
);
8753 last_bind
= NULL_TREE
;
8755 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8757 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8758 NULL
, is_gimple_val
, fb_rvalue
);
8759 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8761 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8763 if (OMP_CLAUSE_DECL (c
) != null_pointer_node
)
8764 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
8765 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8766 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8770 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8771 NULL_TREE
, NULL_TREE
);
8772 tree r2
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[5],
8773 NULL_TREE
, NULL_TREE
);
8774 r2
= build_fold_addr_expr_with_type (r2
, ptr_type_node
);
8775 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, r2
);
8776 gimplify_and_add (tem
, pre_p
);
8777 g
= gimple_build_assign (cnts
[i
], size_binop (PLUS_EXPR
,
8780 gimple_seq_add_stmt (pre_p
, g
);
8783 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8784 NULL_TREE
, NULL_TREE
);
8785 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, OMP_CLAUSE_DECL (c
));
8786 gimplify_and_add (tem
, pre_p
);
8789 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
8790 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)),
8791 NULL_TREE
, NULL_TREE
);
8792 tem
= build_int_cst (ptr_type_node
, GOMP_DEPEND_INOUTSET
);
8793 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, tem
);
8794 append_to_statement_list_force (tem
, last_body
);
8795 gimplify_and_add (tem
, pre_p
);
8797 g
= gimple_build_assign (cnts
[i
],
8798 size_binop (PLUS_EXPR
, cnts
[i
],
8799 size_int (1 + (i
== 5))));
8800 gimple_seq_add_stmt (pre_p
, g
);
8804 gimplify_and_add (last_bind
, pre_p
);
8805 tree cond
= boolean_false_node
;
8809 cond
= build2_loc (first_loc
, NE_EXPR
, boolean_type_node
, cnts
[0],
8810 size_binop_loc (first_loc
, PLUS_EXPR
, counts
[0],
8813 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8814 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8816 size_binop_loc (first_loc
, PLUS_EXPR
,
8822 tree prev
= size_int (5);
8823 for (i
= 0; i
< 5; i
++)
8827 prev
= size_binop_loc (first_loc
, PLUS_EXPR
, counts
[i
], prev
);
8828 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8829 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8830 cnts
[i
], unshare_expr (prev
)));
8833 tem
= build3_loc (first_loc
, COND_EXPR
, void_type_node
, cond
,
8834 build_call_expr_loc (first_loc
,
8835 builtin_decl_explicit (BUILT_IN_TRAP
),
8837 gimplify_and_add (tem
, pre_p
);
8838 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
8839 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
8840 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
8841 OMP_CLAUSE_CHAIN (c
) = *list_p
;
8846 /* For a set of mappings describing an array section pointed to by a struct
8847 (or derived type, etc.) component, create an "alloc" or "release" node to
8848 insert into a list following a GOMP_MAP_STRUCT node. For some types of
8849 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
8850 be created that is inserted into the list of mapping nodes attached to the
8851 directive being processed -- not part of the sorted list of nodes after
8854 CODE is the code of the directive being processed. GRP_START and GRP_END
8855 are the first and last of two or three nodes representing this array section
8856 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
8857 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
8858 filled with the additional node described above, if needed.
8860 This function does not add the new nodes to any lists itself. It is the
8861 responsibility of the caller to do that. */
8864 build_omp_struct_comp_nodes (enum tree_code code
, tree grp_start
, tree grp_end
,
8867 enum gomp_map_kind mkind
8868 = (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
)
8869 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
8871 gcc_assert (grp_start
!= grp_end
);
8873 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
), OMP_CLAUSE_MAP
);
8874 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
8875 OMP_CLAUSE_DECL (c2
) = unshare_expr (OMP_CLAUSE_DECL (grp_end
));
8876 OMP_CLAUSE_CHAIN (c2
) = NULL_TREE
;
8877 tree grp_mid
= NULL_TREE
;
8878 if (OMP_CLAUSE_CHAIN (grp_start
) != grp_end
)
8879 grp_mid
= OMP_CLAUSE_CHAIN (grp_start
);
8882 && OMP_CLAUSE_CODE (grp_mid
) == OMP_CLAUSE_MAP
8883 && OMP_CLAUSE_MAP_KIND (grp_mid
) == GOMP_MAP_TO_PSET
)
8884 OMP_CLAUSE_SIZE (c2
) = OMP_CLAUSE_SIZE (grp_mid
);
8886 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (ptr_type_node
);
8889 && OMP_CLAUSE_CODE (grp_mid
) == OMP_CLAUSE_MAP
8890 && (OMP_CLAUSE_MAP_KIND (grp_mid
) == GOMP_MAP_ALWAYS_POINTER
8891 || OMP_CLAUSE_MAP_KIND (grp_mid
) == GOMP_MAP_ATTACH_DETACH
))
8894 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
), OMP_CLAUSE_MAP
);
8895 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
8896 OMP_CLAUSE_DECL (c3
) = unshare_expr (OMP_CLAUSE_DECL (grp_mid
));
8897 OMP_CLAUSE_SIZE (c3
) = TYPE_SIZE_UNIT (ptr_type_node
);
8898 OMP_CLAUSE_CHAIN (c3
) = NULL_TREE
;
8903 *extra_node
= NULL_TREE
;
8908 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8909 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8910 If BASE_REF is non-NULL and the containing object is a reference, set
8911 *BASE_REF to that reference before dereferencing the object.
8912 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8913 has array type, else return NULL. */
8916 extract_base_bit_offset (tree base
, poly_int64
*bitposp
,
8917 poly_offset_int
*poffsetp
)
8920 poly_int64 bitsize
, bitpos
;
8922 int unsignedp
, reversep
, volatilep
= 0;
8923 poly_offset_int poffset
;
8927 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
, &mode
,
8928 &unsignedp
, &reversep
, &volatilep
);
8932 if (offset
&& poly_int_tree_p (offset
))
8934 poffset
= wi::to_poly_offset (offset
);
8940 if (maybe_ne (bitpos
, 0))
8941 poffset
+= bits_to_bytes_round_down (bitpos
);
8944 *poffsetp
= poffset
;
8949 /* Used for topological sorting of mapping groups. UNVISITED means we haven't
8950 started processing the group yet. The TEMPORARY mark is used when we first
8951 encounter a group on a depth-first traversal, and the PERMANENT mark is used
8952 when we have processed all the group's children (i.e. all the base pointers
8953 referred to by the group's mapping nodes, recursively). */
8955 enum omp_tsort_mark
{
8961 /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
8964 struct omp_mapping_group
{
8967 omp_tsort_mark mark
;
8968 /* If we've removed the group but need to reindex, mark the group as
8971 struct omp_mapping_group
*sibling
;
8972 struct omp_mapping_group
*next
;
8976 debug_mapping_group (omp_mapping_group
*grp
)
8978 tree tmp
= OMP_CLAUSE_CHAIN (grp
->grp_end
);
8979 OMP_CLAUSE_CHAIN (grp
->grp_end
) = NULL
;
8980 debug_generic_expr (*grp
->grp_start
);
8981 OMP_CLAUSE_CHAIN (grp
->grp_end
) = tmp
;
8984 /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
8988 omp_get_base_pointer (tree expr
)
8990 while (TREE_CODE (expr
) == ARRAY_REF
8991 || TREE_CODE (expr
) == COMPONENT_REF
)
8992 expr
= TREE_OPERAND (expr
, 0);
8994 if (TREE_CODE (expr
) == INDIRECT_REF
8995 || (TREE_CODE (expr
) == MEM_REF
8996 && integer_zerop (TREE_OPERAND (expr
, 1))))
8998 expr
= TREE_OPERAND (expr
, 0);
8999 while (TREE_CODE (expr
) == COMPOUND_EXPR
)
9000 expr
= TREE_OPERAND (expr
, 1);
9001 if (TREE_CODE (expr
) == POINTER_PLUS_EXPR
)
9002 expr
= TREE_OPERAND (expr
, 0);
9003 if (TREE_CODE (expr
) == SAVE_EXPR
)
9004 expr
= TREE_OPERAND (expr
, 0);
9012 /* Remove COMPONENT_REFS and indirections from EXPR. */
9015 omp_strip_components_and_deref (tree expr
)
9017 while (TREE_CODE (expr
) == COMPONENT_REF
9018 || TREE_CODE (expr
) == INDIRECT_REF
9019 || (TREE_CODE (expr
) == MEM_REF
9020 && integer_zerop (TREE_OPERAND (expr
, 1)))
9021 || TREE_CODE (expr
) == POINTER_PLUS_EXPR
9022 || TREE_CODE (expr
) == COMPOUND_EXPR
)
9023 if (TREE_CODE (expr
) == COMPOUND_EXPR
)
9024 expr
= TREE_OPERAND (expr
, 1);
9026 expr
= TREE_OPERAND (expr
, 0);
9034 omp_strip_indirections (tree expr
)
9036 while (TREE_CODE (expr
) == INDIRECT_REF
9037 || (TREE_CODE (expr
) == MEM_REF
9038 && integer_zerop (TREE_OPERAND (expr
, 1))))
9039 expr
= TREE_OPERAND (expr
, 0);
9044 /* An attach or detach operation depends directly on the address being
9045 attached/detached. Return that address, or none if there are no
9046 attachments/detachments. */
9049 omp_get_attachment (omp_mapping_group
*grp
)
9051 tree node
= *grp
->grp_start
;
9053 switch (OMP_CLAUSE_MAP_KIND (node
))
9057 case GOMP_MAP_TOFROM
:
9058 case GOMP_MAP_ALWAYS_FROM
:
9059 case GOMP_MAP_ALWAYS_TO
:
9060 case GOMP_MAP_ALWAYS_TOFROM
:
9061 case GOMP_MAP_FORCE_FROM
:
9062 case GOMP_MAP_FORCE_TO
:
9063 case GOMP_MAP_FORCE_TOFROM
:
9064 case GOMP_MAP_FORCE_PRESENT
:
9065 case GOMP_MAP_ALLOC
:
9066 case GOMP_MAP_RELEASE
:
9067 case GOMP_MAP_DELETE
:
9068 case GOMP_MAP_FORCE_ALLOC
:
9069 if (node
== grp
->grp_end
)
9072 node
= OMP_CLAUSE_CHAIN (node
);
9073 if (node
&& OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_TO_PSET
)
9075 gcc_assert (node
!= grp
->grp_end
);
9076 node
= OMP_CLAUSE_CHAIN (node
);
9079 switch (OMP_CLAUSE_MAP_KIND (node
))
9081 case GOMP_MAP_POINTER
:
9082 case GOMP_MAP_ALWAYS_POINTER
:
9083 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9084 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9085 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
9088 case GOMP_MAP_ATTACH_DETACH
:
9089 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9090 return OMP_CLAUSE_DECL (node
);
9093 internal_error ("unexpected mapping node");
9095 return error_mark_node
;
9097 case GOMP_MAP_TO_PSET
:
9098 gcc_assert (node
!= grp
->grp_end
);
9099 node
= OMP_CLAUSE_CHAIN (node
);
9100 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_ATTACH
9101 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_DETACH
)
9102 return OMP_CLAUSE_DECL (node
);
9104 internal_error ("unexpected mapping node");
9105 return error_mark_node
;
9107 case GOMP_MAP_ATTACH
:
9108 case GOMP_MAP_DETACH
:
9109 node
= OMP_CLAUSE_CHAIN (node
);
9110 if (!node
|| *grp
->grp_start
== grp
->grp_end
)
9111 return OMP_CLAUSE_DECL (*grp
->grp_start
);
9112 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9113 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9114 return OMP_CLAUSE_DECL (*grp
->grp_start
);
9116 internal_error ("unexpected mapping node");
9117 return error_mark_node
;
9119 case GOMP_MAP_STRUCT
:
9120 case GOMP_MAP_FORCE_DEVICEPTR
:
9121 case GOMP_MAP_DEVICE_RESIDENT
:
9123 case GOMP_MAP_IF_PRESENT
:
9124 case GOMP_MAP_FIRSTPRIVATE
:
9125 case GOMP_MAP_FIRSTPRIVATE_INT
:
9126 case GOMP_MAP_USE_DEVICE_PTR
:
9127 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9131 internal_error ("unexpected mapping node");
9134 return error_mark_node
;
9137 /* Given a pointer START_P to the start of a group of related (e.g. pointer)
9138 mappings, return the chain pointer to the end of that group in the list. */
9141 omp_group_last (tree
*start_p
)
9143 tree c
= *start_p
, nc
, *grp_last_p
= start_p
;
9145 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
);
9147 nc
= OMP_CLAUSE_CHAIN (c
);
9149 if (!nc
|| OMP_CLAUSE_CODE (nc
) != OMP_CLAUSE_MAP
)
9152 switch (OMP_CLAUSE_MAP_KIND (c
))
9156 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
9157 && (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9158 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9159 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ATTACH_DETACH
9160 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
9161 || (OMP_CLAUSE_MAP_KIND (nc
)
9162 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
)
9163 || (OMP_CLAUSE_MAP_KIND (nc
)
9164 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
)
9165 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ALWAYS_POINTER
9166 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_TO_PSET
))
9168 grp_last_p
= &OMP_CLAUSE_CHAIN (c
);
9170 tree nc2
= OMP_CLAUSE_CHAIN (nc
);
9172 && OMP_CLAUSE_CODE (nc2
) == OMP_CLAUSE_MAP
9173 && (OMP_CLAUSE_MAP_KIND (nc
)
9174 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
)
9175 && OMP_CLAUSE_MAP_KIND (nc2
) == GOMP_MAP_ATTACH
)
9177 grp_last_p
= &OMP_CLAUSE_CHAIN (nc
);
9179 nc2
= OMP_CLAUSE_CHAIN (nc2
);
9185 case GOMP_MAP_ATTACH
:
9186 case GOMP_MAP_DETACH
:
9187 /* This is a weird artifact of how directives are parsed: bare attach or
9188 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9189 FIRSTPRIVATE_REFERENCE node. FIXME. */
9191 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
9192 && (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9193 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
))
9194 grp_last_p
= &OMP_CLAUSE_CHAIN (c
);
9197 case GOMP_MAP_TO_PSET
:
9198 if (OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
9199 && (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ATTACH
9200 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_DETACH
))
9201 grp_last_p
= &OMP_CLAUSE_CHAIN (c
);
9204 case GOMP_MAP_STRUCT
:
9206 unsigned HOST_WIDE_INT num_mappings
9207 = tree_to_uhwi (OMP_CLAUSE_SIZE (c
));
9208 if (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9209 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9210 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ATTACH_DETACH
)
9211 grp_last_p
= &OMP_CLAUSE_CHAIN (*grp_last_p
);
9212 for (unsigned i
= 0; i
< num_mappings
; i
++)
9213 grp_last_p
= &OMP_CLAUSE_CHAIN (*grp_last_p
);
9221 /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9222 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9223 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9224 if we have more than one such group, else return NULL. */
9227 omp_gather_mapping_groups_1 (tree
*list_p
, vec
<omp_mapping_group
> *groups
,
9228 tree gather_sentinel
)
9230 for (tree
*cp
= list_p
;
9231 *cp
&& *cp
!= gather_sentinel
;
9232 cp
= &OMP_CLAUSE_CHAIN (*cp
))
9234 if (OMP_CLAUSE_CODE (*cp
) != OMP_CLAUSE_MAP
)
9237 tree
*grp_last_p
= omp_group_last (cp
);
9238 omp_mapping_group grp
;
9241 grp
.grp_end
= *grp_last_p
;
9242 grp
.mark
= UNVISITED
;
9244 grp
.deleted
= false;
9246 groups
->safe_push (grp
);
9252 static vec
<omp_mapping_group
> *
9253 omp_gather_mapping_groups (tree
*list_p
)
9255 vec
<omp_mapping_group
> *groups
= new vec
<omp_mapping_group
> ();
9257 omp_gather_mapping_groups_1 (list_p
, groups
, NULL_TREE
);
9259 if (groups
->length () > 0)
9268 /* A pointer mapping group GRP may define a block of memory starting at some
9269 base address, and maybe also define a firstprivate pointer or firstprivate
9270 reference that points to that block. The return value is a node containing
9271 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9272 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9273 return the number of consecutive chained nodes in CHAINED. */
9276 omp_group_base (omp_mapping_group
*grp
, unsigned int *chained
,
9279 tree node
= *grp
->grp_start
;
9281 *firstprivate
= NULL_TREE
;
9284 switch (OMP_CLAUSE_MAP_KIND (node
))
9288 case GOMP_MAP_TOFROM
:
9289 case GOMP_MAP_ALWAYS_FROM
:
9290 case GOMP_MAP_ALWAYS_TO
:
9291 case GOMP_MAP_ALWAYS_TOFROM
:
9292 case GOMP_MAP_FORCE_FROM
:
9293 case GOMP_MAP_FORCE_TO
:
9294 case GOMP_MAP_FORCE_TOFROM
:
9295 case GOMP_MAP_FORCE_PRESENT
:
9296 case GOMP_MAP_ALLOC
:
9297 case GOMP_MAP_RELEASE
:
9298 case GOMP_MAP_DELETE
:
9299 case GOMP_MAP_FORCE_ALLOC
:
9300 case GOMP_MAP_IF_PRESENT
:
9301 if (node
== grp
->grp_end
)
9304 node
= OMP_CLAUSE_CHAIN (node
);
9305 if (node
&& OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_TO_PSET
)
9307 if (node
== grp
->grp_end
)
9308 return *grp
->grp_start
;
9309 node
= OMP_CLAUSE_CHAIN (node
);
9312 switch (OMP_CLAUSE_MAP_KIND (node
))
9314 case GOMP_MAP_POINTER
:
9315 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9316 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9317 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
9318 *firstprivate
= OMP_CLAUSE_DECL (node
);
9319 return *grp
->grp_start
;
9321 case GOMP_MAP_ALWAYS_POINTER
:
9322 case GOMP_MAP_ATTACH_DETACH
:
9323 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9324 return *grp
->grp_start
;
9327 internal_error ("unexpected mapping node");
9330 internal_error ("unexpected mapping node");
9331 return error_mark_node
;
9333 case GOMP_MAP_TO_PSET
:
9334 gcc_assert (node
!= grp
->grp_end
);
9335 node
= OMP_CLAUSE_CHAIN (node
);
9336 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_ATTACH
9337 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_DETACH
)
9340 internal_error ("unexpected mapping node");
9341 return error_mark_node
;
9343 case GOMP_MAP_ATTACH
:
9344 case GOMP_MAP_DETACH
:
9345 node
= OMP_CLAUSE_CHAIN (node
);
9346 if (!node
|| *grp
->grp_start
== grp
->grp_end
)
9348 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9349 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9351 /* We're mapping the base pointer itself in a bare attach or detach
9352 node. This is a side effect of how parsing works, and the mapping
9353 will be removed anyway (at least for enter/exit data directives).
9354 We should ignore the mapping here. FIXME. */
9358 internal_error ("unexpected mapping node");
9359 return error_mark_node
;
9361 case GOMP_MAP_STRUCT
:
9363 unsigned HOST_WIDE_INT num_mappings
9364 = tree_to_uhwi (OMP_CLAUSE_SIZE (node
));
9365 node
= OMP_CLAUSE_CHAIN (node
);
9366 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9367 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9369 *firstprivate
= OMP_CLAUSE_DECL (node
);
9370 node
= OMP_CLAUSE_CHAIN (node
);
9372 *chained
= num_mappings
;
9376 case GOMP_MAP_FORCE_DEVICEPTR
:
9377 case GOMP_MAP_DEVICE_RESIDENT
:
9379 case GOMP_MAP_FIRSTPRIVATE
:
9380 case GOMP_MAP_FIRSTPRIVATE_INT
:
9381 case GOMP_MAP_USE_DEVICE_PTR
:
9382 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9385 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9386 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9387 case GOMP_MAP_POINTER
:
9388 case GOMP_MAP_ALWAYS_POINTER
:
9389 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
9390 /* These shouldn't appear by themselves. */
9392 internal_error ("unexpected pointer mapping node");
9393 return error_mark_node
;
9399 return error_mark_node
;
9402 /* Given a vector of omp_mapping_groups, build a hash table so we can look up
9403 nodes by tree_operand_hash. */
9406 omp_index_mapping_groups_1 (hash_map
<tree_operand_hash
,
9407 omp_mapping_group
*> *grpmap
,
9408 vec
<omp_mapping_group
> *groups
,
9409 tree reindex_sentinel
)
9411 omp_mapping_group
*grp
;
9413 bool reindexing
= reindex_sentinel
!= NULL_TREE
, above_hwm
= false;
9415 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
9417 if (reindexing
&& *grp
->grp_start
== reindex_sentinel
)
9420 if (reindexing
&& !above_hwm
)
9424 unsigned int chained
;
9425 tree node
= omp_group_base (grp
, &chained
, &fpp
);
9427 if (node
== error_mark_node
|| (!node
&& !fpp
))
9430 for (unsigned j
= 0;
9431 node
&& j
< chained
;
9432 node
= OMP_CLAUSE_CHAIN (node
), j
++)
9434 tree decl
= OMP_CLAUSE_DECL (node
);
9436 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
9437 meaning node-hash lookups don't work. This is a workaround for
9438 that, but ideally we should just create the INDIRECT_REF at
9439 source instead. FIXME. */
9440 if (TREE_CODE (decl
) == MEM_REF
9441 && integer_zerop (TREE_OPERAND (decl
, 1)))
9442 decl
= build_fold_indirect_ref (TREE_OPERAND (decl
, 0));
9444 omp_mapping_group
**prev
= grpmap
->get (decl
);
9446 if (prev
&& *prev
== grp
)
9450 /* Mapping the same thing twice is normally diagnosed as an error,
9451 but can happen under some circumstances, e.g. in pr99928-16.c,
9454 #pragma omp target simd reduction(+:a[:3]) \
9455 map(always, tofrom: a[:6])
9458 will result in two "a[0]" mappings (of different sizes). */
9460 grp
->sibling
= (*prev
)->sibling
;
9461 (*prev
)->sibling
= grp
;
9464 grpmap
->put (decl
, grp
);
9470 omp_mapping_group
**prev
= grpmap
->get (fpp
);
9471 if (prev
&& *prev
!= grp
)
9473 grp
->sibling
= (*prev
)->sibling
;
9474 (*prev
)->sibling
= grp
;
9477 grpmap
->put (fpp
, grp
);
9481 static hash_map
<tree_operand_hash
, omp_mapping_group
*> *
9482 omp_index_mapping_groups (vec
<omp_mapping_group
> *groups
)
9484 hash_map
<tree_operand_hash
, omp_mapping_group
*> *grpmap
9485 = new hash_map
<tree_operand_hash
, omp_mapping_group
*>;
9487 omp_index_mapping_groups_1 (grpmap
, groups
, NULL_TREE
);
9492 /* Rebuild group map from partially-processed clause list (during
9493 omp_build_struct_sibling_lists). We have already processed nodes up until
9494 a high-water mark (HWM). This is a bit tricky because the list is being
9495 reordered as it is scanned, but we know:
9497 1. The list after HWM has not been touched yet, so we can reindex it safely.
9499 2. The list before and including HWM has been altered, but remains
9500 well-formed throughout the sibling-list building operation.
9502 so, we can do the reindex operation in two parts, on the processed and
9503 then the unprocessed halves of the list. */
9505 static hash_map
<tree_operand_hash
, omp_mapping_group
*> *
9506 omp_reindex_mapping_groups (tree
*list_p
,
9507 vec
<omp_mapping_group
> *groups
,
9508 vec
<omp_mapping_group
> *processed_groups
,
9511 hash_map
<tree_operand_hash
, omp_mapping_group
*> *grpmap
9512 = new hash_map
<tree_operand_hash
, omp_mapping_group
*>;
9514 processed_groups
->truncate (0);
9516 omp_gather_mapping_groups_1 (list_p
, processed_groups
, sentinel
);
9517 omp_index_mapping_groups_1 (grpmap
, processed_groups
, NULL_TREE
);
9519 omp_index_mapping_groups_1 (grpmap
, groups
, sentinel
);
9524 /* Find the immediately-containing struct for a component ref (etc.)
9528 omp_containing_struct (tree expr
)
9534 /* Note: don't strip NOPs unless we're also stripping off array refs or a
9536 if (TREE_CODE (expr
) != ARRAY_REF
&& TREE_CODE (expr
) != COMPONENT_REF
)
9539 while (TREE_CODE (expr
) == ARRAY_REF
)
9540 expr
= TREE_OPERAND (expr
, 0);
9542 if (TREE_CODE (expr
) == COMPONENT_REF
)
9543 expr
= TREE_OPERAND (expr
, 0);
9548 /* Return TRUE if DECL describes a component that is part of a whole structure
9549 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
9550 that maps that structure, if present. */
9553 omp_mapped_by_containing_struct (hash_map
<tree_operand_hash
,
9554 omp_mapping_group
*> *grpmap
,
9556 omp_mapping_group
**mapped_by_group
)
9558 tree wsdecl
= NULL_TREE
;
9560 *mapped_by_group
= NULL
;
9564 wsdecl
= omp_containing_struct (decl
);
9567 omp_mapping_group
**wholestruct
= grpmap
->get (wsdecl
);
9569 && TREE_CODE (wsdecl
) == MEM_REF
9570 && integer_zerop (TREE_OPERAND (wsdecl
, 1)))
9572 tree deref
= TREE_OPERAND (wsdecl
, 0);
9573 deref
= build_fold_indirect_ref (deref
);
9574 wholestruct
= grpmap
->get (deref
);
9578 *mapped_by_group
= *wholestruct
;
9587 /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
9591 omp_tsort_mapping_groups_1 (omp_mapping_group
***outlist
,
9592 vec
<omp_mapping_group
> *groups
,
9593 hash_map
<tree_operand_hash
, omp_mapping_group
*>
9595 omp_mapping_group
*grp
)
9597 if (grp
->mark
== PERMANENT
)
9599 if (grp
->mark
== TEMPORARY
)
9601 fprintf (stderr
, "when processing group:\n");
9602 debug_mapping_group (grp
);
9603 internal_error ("base pointer cycle detected");
9606 grp
->mark
= TEMPORARY
;
9608 tree attaches_to
= omp_get_attachment (grp
);
9612 omp_mapping_group
**basep
= grpmap
->get (attaches_to
);
9614 if (basep
&& *basep
!= grp
)
9616 for (omp_mapping_group
*w
= *basep
; w
; w
= w
->sibling
)
9617 if (!omp_tsort_mapping_groups_1 (outlist
, groups
, grpmap
, w
))
9622 tree decl
= OMP_CLAUSE_DECL (*grp
->grp_start
);
9626 tree base
= omp_get_base_pointer (decl
);
9631 omp_mapping_group
**innerp
= grpmap
->get (base
);
9632 omp_mapping_group
*wholestruct
;
9634 /* We should treat whole-structure mappings as if all (pointer, in this
9635 case) members are mapped as individual list items. Check if we have
9636 such a whole-structure mapping, if we don't have an explicit reference
9637 to the pointer member itself. */
9639 && TREE_CODE (base
) == COMPONENT_REF
9640 && omp_mapped_by_containing_struct (grpmap
, base
, &wholestruct
))
9641 innerp
= &wholestruct
;
9643 if (innerp
&& *innerp
!= grp
)
9645 for (omp_mapping_group
*w
= *innerp
; w
; w
= w
->sibling
)
9646 if (!omp_tsort_mapping_groups_1 (outlist
, groups
, grpmap
, w
))
9654 grp
->mark
= PERMANENT
;
9656 /* Emit grp to output list. */
9659 *outlist
= &grp
->next
;
9664 /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
9665 before mappings that use those pointers. This is an implementation of the
9666 depth-first search algorithm, described e.g. at:
9668 https://en.wikipedia.org/wiki/Topological_sorting
9671 static omp_mapping_group
*
9672 omp_tsort_mapping_groups (vec
<omp_mapping_group
> *groups
,
9673 hash_map
<tree_operand_hash
, omp_mapping_group
*>
9676 omp_mapping_group
*grp
, *outlist
= NULL
, **cursor
;
9681 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
9683 if (grp
->mark
!= PERMANENT
)
9684 if (!omp_tsort_mapping_groups_1 (&cursor
, groups
, grpmap
, grp
))
9691 /* Split INLIST into two parts, moving groups corresponding to
9692 ALLOC/RELEASE/DELETE mappings to one list, and other mappings to another.
9693 The former list is then appended to the latter. Each sub-list retains the
9694 order of the original list.
9695 Note that ATTACH nodes are later moved to the end of the list in
9696 gimplify_adjust_omp_clauses, for target regions. */
9698 static omp_mapping_group
*
9699 omp_segregate_mapping_groups (omp_mapping_group
*inlist
)
9701 omp_mapping_group
*ard_groups
= NULL
, *tf_groups
= NULL
;
9702 omp_mapping_group
**ard_tail
= &ard_groups
, **tf_tail
= &tf_groups
;
9704 for (omp_mapping_group
*w
= inlist
; w
;)
9706 tree c
= *w
->grp_start
;
9707 omp_mapping_group
*next
= w
->next
;
9709 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
);
9711 switch (OMP_CLAUSE_MAP_KIND (c
))
9713 case GOMP_MAP_ALLOC
:
9714 case GOMP_MAP_RELEASE
:
9715 case GOMP_MAP_DELETE
:
9718 ard_tail
= &w
->next
;
9730 /* Now splice the lists together... */
9731 *tf_tail
= ard_groups
;
9736 /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
9737 those groups based on the output list of omp_tsort_mapping_groups --
9738 singly-linked, threaded through each element's NEXT pointer starting at
9739 HEAD. Each list element appears exactly once in that linked list.
9741 Each element of GROUPS may correspond to one or several mapping nodes.
9742 Node groups are kept together, and in the reordered list, the positions of
9743 the original groups are reused for the positions of the reordered list.
9744 Hence if we have e.g.
9746 {to ptr ptr} firstprivate {tofrom ptr} ...
9748 first group non-"map" second group
9750 and say the second group contains a base pointer for the first so must be
9751 moved before it, the resulting list will contain:
9753 {tofrom ptr} firstprivate {to ptr ptr} ...
9754 ^ prev. second group ^ prev. first group
9758 omp_reorder_mapping_groups (vec
<omp_mapping_group
> *groups
,
9759 omp_mapping_group
*head
,
9762 omp_mapping_group
*grp
;
9764 unsigned numgroups
= groups
->length ();
9765 auto_vec
<tree
> old_heads (numgroups
);
9766 auto_vec
<tree
*> old_headps (numgroups
);
9767 auto_vec
<tree
> new_heads (numgroups
);
9768 auto_vec
<tree
> old_succs (numgroups
);
9769 bool map_at_start
= (list_p
== (*groups
)[0].grp_start
);
9771 tree
*new_grp_tail
= NULL
;
9773 /* Stash the start & end nodes of each mapping group before we start
9774 modifying the list. */
9775 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
9777 old_headps
.quick_push (grp
->grp_start
);
9778 old_heads
.quick_push (*grp
->grp_start
);
9779 old_succs
.quick_push (OMP_CLAUSE_CHAIN (grp
->grp_end
));
9782 /* And similarly, the heads of the groups in the order we want to rearrange
9784 for (omp_mapping_group
*w
= head
; w
; w
= w
->next
)
9785 new_heads
.quick_push (*w
->grp_start
);
9787 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
9791 if (new_grp_tail
&& old_succs
[i
- 1] == old_heads
[i
])
9793 /* a {b c d} {e f g} h i j (original)
9795 a {k l m} {e f g} h i j (inserted new group on last iter)
9797 a {k l m} {n o p} h i j (this time, chain last group to new one)
9800 *new_grp_tail
= new_heads
[i
];
9802 else if (new_grp_tail
)
9804 /* a {b c d} e {f g h} i j k (original)
9806 a {l m n} e {f g h} i j k (gap after last iter's group)
9808 a {l m n} e {o p q} h i j (chain last group to old successor)
9811 *new_grp_tail
= old_succs
[i
- 1];
9812 *old_headps
[i
] = new_heads
[i
];
9816 /* The first inserted group -- point to new group, and leave end
9822 *grp
->grp_start
= new_heads
[i
];
9825 new_grp_tail
= &OMP_CLAUSE_CHAIN (head
->grp_end
);
9831 *new_grp_tail
= old_succs
[numgroups
- 1];
9835 return map_at_start
? (*groups
)[0].grp_start
: list_p
;
9838 /* DECL is supposed to have lastprivate semantics in the outer contexts
9839 of combined/composite constructs, starting with OCTX.
9840 Add needed lastprivate, shared or map clause if no data sharing or
9841 mapping clause are present. IMPLICIT_P is true if it is an implicit
9842 clause (IV on simd), in which case the lastprivate will not be
9843 copied to some constructs. */
9846 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx
*octx
,
9847 tree decl
, bool implicit_p
)
9849 struct gimplify_omp_ctx
*orig_octx
= octx
;
9850 for (; octx
; octx
= octx
->outer_context
)
9852 if ((octx
->region_type
== ORT_COMBINED_PARALLEL
9853 || (octx
->region_type
& ORT_COMBINED_TEAMS
) == ORT_COMBINED_TEAMS
)
9854 && splay_tree_lookup (octx
->variables
,
9855 (splay_tree_key
) decl
) == NULL
)
9857 omp_add_variable (octx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
9860 if ((octx
->region_type
& ORT_TASK
) != 0
9861 && octx
->combined_loop
9862 && splay_tree_lookup (octx
->variables
,
9863 (splay_tree_key
) decl
) == NULL
)
9865 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
9869 && octx
->region_type
== ORT_WORKSHARE
9870 && octx
->combined_loop
9871 && splay_tree_lookup (octx
->variables
,
9872 (splay_tree_key
) decl
) == NULL
9873 && octx
->outer_context
9874 && octx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
9875 && splay_tree_lookup (octx
->outer_context
->variables
,
9876 (splay_tree_key
) decl
) == NULL
)
9878 octx
= octx
->outer_context
;
9879 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
9882 if ((octx
->region_type
== ORT_WORKSHARE
|| octx
->region_type
== ORT_ACC
)
9883 && octx
->combined_loop
9884 && splay_tree_lookup (octx
->variables
,
9885 (splay_tree_key
) decl
) == NULL
9886 && !omp_check_private (octx
, decl
, false))
9888 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
9891 if (octx
->region_type
== ORT_COMBINED_TARGET
)
9893 splay_tree_node n
= splay_tree_lookup (octx
->variables
,
9894 (splay_tree_key
) decl
);
9897 omp_add_variable (octx
, decl
, GOVD_MAP
| GOVD_SEEN
);
9898 octx
= octx
->outer_context
;
9900 else if (!implicit_p
9901 && (n
->value
& GOVD_FIRSTPRIVATE_IMPLICIT
))
9903 n
->value
&= ~(GOVD_FIRSTPRIVATE
9904 | GOVD_FIRSTPRIVATE_IMPLICIT
9906 omp_add_variable (octx
, decl
, GOVD_MAP
| GOVD_SEEN
);
9907 octx
= octx
->outer_context
;
9912 if (octx
&& (implicit_p
|| octx
!= orig_octx
))
9913 omp_notice_variable (octx
, decl
, true);
9916 /* If we have mappings INNER and OUTER, where INNER is a component access and
9917 OUTER is a mapping of the whole containing struct, check that the mappings
9918 are compatible. We'll be deleting the inner mapping, so we need to make
9919 sure the outer mapping does (at least) the same transfers to/from the device
9920 as the inner mapping. */
9923 omp_check_mapping_compatibility (location_t loc
,
9924 omp_mapping_group
*outer
,
9925 omp_mapping_group
*inner
)
9927 tree first_outer
= *outer
->grp_start
, first_inner
= *inner
->grp_start
;
9929 gcc_assert (OMP_CLAUSE_CODE (first_outer
) == OMP_CLAUSE_MAP
);
9930 gcc_assert (OMP_CLAUSE_CODE (first_inner
) == OMP_CLAUSE_MAP
);
9932 enum gomp_map_kind outer_kind
= OMP_CLAUSE_MAP_KIND (first_outer
);
9933 enum gomp_map_kind inner_kind
= OMP_CLAUSE_MAP_KIND (first_inner
);
9935 if (outer_kind
== inner_kind
)
9940 case GOMP_MAP_ALWAYS_TO
:
9941 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
9942 || inner_kind
== GOMP_MAP_ALLOC
9943 || inner_kind
== GOMP_MAP_TO
)
9947 case GOMP_MAP_ALWAYS_FROM
:
9948 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
9949 || inner_kind
== GOMP_MAP_ALLOC
9950 || inner_kind
== GOMP_MAP_FROM
)
9956 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
9957 || inner_kind
== GOMP_MAP_ALLOC
)
9961 case GOMP_MAP_ALWAYS_TOFROM
:
9962 case GOMP_MAP_TOFROM
:
9963 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
9964 || inner_kind
== GOMP_MAP_ALLOC
9965 || inner_kind
== GOMP_MAP_TO
9966 || inner_kind
== GOMP_MAP_FROM
9967 || inner_kind
== GOMP_MAP_TOFROM
)
9975 error_at (loc
, "data movement for component %qE is not compatible with "
9976 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner
),
9977 OMP_CLAUSE_DECL (first_outer
));
9982 /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
9983 clause dependencies we handle for now are struct element mappings and
9984 whole-struct mappings on the same directive, and duplicate clause
9988 oacc_resolve_clause_dependencies (vec
<omp_mapping_group
> *groups
,
9989 hash_map
<tree_operand_hash
,
9990 omp_mapping_group
*> *grpmap
)
9993 omp_mapping_group
*grp
;
9994 hash_set
<tree_operand_hash
> *seen_components
= NULL
;
9995 hash_set
<tree_operand_hash
> *shown_error
= NULL
;
9997 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
9999 tree grp_end
= grp
->grp_end
;
10000 tree decl
= OMP_CLAUSE_DECL (grp_end
);
10002 gcc_assert (OMP_CLAUSE_CODE (grp_end
) == OMP_CLAUSE_MAP
);
10004 if (DECL_P (grp_end
))
10007 tree c
= OMP_CLAUSE_DECL (*grp
->grp_start
);
10008 while (TREE_CODE (c
) == ARRAY_REF
)
10009 c
= TREE_OPERAND (c
, 0);
10010 if (TREE_CODE (c
) != COMPONENT_REF
)
10012 if (!seen_components
)
10013 seen_components
= new hash_set
<tree_operand_hash
> ();
10015 shown_error
= new hash_set
<tree_operand_hash
> ();
10016 if (seen_components
->contains (c
)
10017 && !shown_error
->contains (c
))
10019 error_at (OMP_CLAUSE_LOCATION (grp_end
),
10020 "%qE appears more than once in map clauses",
10021 OMP_CLAUSE_DECL (grp_end
));
10022 shown_error
->add (c
);
10025 seen_components
->add (c
);
10027 omp_mapping_group
*struct_group
;
10028 if (omp_mapped_by_containing_struct (grpmap
, decl
, &struct_group
)
10029 && *grp
->grp_start
== grp_end
)
10031 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end
),
10032 struct_group
, grp
);
10033 /* Remove the whole of this mapping -- redundant. */
10034 grp
->deleted
= true;
10038 if (seen_components
)
10039 delete seen_components
;
10041 delete shown_error
;
10044 /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
10045 is linked to the previous node pointed to by INSERT_AT. */
10048 omp_siblist_insert_node_after (tree newnode
, tree
*insert_at
)
10050 OMP_CLAUSE_CHAIN (newnode
) = *insert_at
;
10051 *insert_at
= newnode
;
10052 return &OMP_CLAUSE_CHAIN (newnode
);
10055 /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
10056 pointed to by chain MOVE_AFTER instead. */
10059 omp_siblist_move_node_after (tree node
, tree
*old_pos
, tree
*move_after
)
10061 gcc_assert (node
== *old_pos
);
10062 *old_pos
= OMP_CLAUSE_CHAIN (node
);
10063 OMP_CLAUSE_CHAIN (node
) = *move_after
;
10064 *move_after
= node
;
10067 /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
10068 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
10069 new nodes are prepended to the list before splicing into the new position.
10070 Return the position we should continue scanning the list at, or NULL to
10071 stay where we were. */
10074 omp_siblist_move_nodes_after (tree
*first_ptr
, tree last_node
,
10077 if (first_ptr
== move_after
)
10080 tree tmp
= *first_ptr
;
10081 *first_ptr
= OMP_CLAUSE_CHAIN (last_node
);
10082 OMP_CLAUSE_CHAIN (last_node
) = *move_after
;
10088 /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
10089 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
10090 pointer MOVE_AFTER.
10092 The latter list was previously part of the OMP clause list, and the former
10093 (prepended) part is comprised of new nodes.
10095 We start with a list of nodes starting with a struct mapping node. We
10096 rearrange the list so that new nodes starting from FIRST_NEW and whose last
10097 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
10098 the group of mapping nodes we are currently processing (from the chain
10099 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
10100 we should continue processing from, or NULL to stay where we were.
10102 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
10103 different) is worked through below. Here we are processing LAST_NODE, and
10104 FIRST_PTR points at the preceding mapping clause:
10106 #. mapping node chain
10107 ---------------------------------------------------
10108 A. struct_node [->B]
10110 C. comp_2 [->D (move_after)]
10112 E. attach_3 [->F (first_ptr)]
10113 F. map_to_4 [->G (continue_at)]
10114 G. attach_4 (last_node) [->H]
10117 *last_new_tail = *first_ptr;
10119 I. new_node (first_new) [->F (last_new_tail)]
10121 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
10123 #. mapping node chain
10124 ----------------------------------------------------
10125 A. struct_node [->B]
10127 C. comp_2 [->D (move_after)]
10129 E. attach_3 [->H (first_ptr)]
10130 F. map_to_4 [->G (continue_at)]
10131 G. attach_4 (last_node) [->H]
10134 I. new_node (first_new) [->F (last_new_tail)]
10136 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10138 #. mapping node chain
10139 ---------------------------------------------------
10140 A. struct_node [->B]
10142 C. comp_2 [->D (move_after)]
10144 E. attach_3 [->H (continue_at)]
10146 G. attach_4 (last_node) [->D]
10149 I. new_node (first_new) [->F (last_new_tail)]
10151 *move_after = first_new;
10153 #. mapping node chain
10154 ---------------------------------------------------
10155 A. struct_node [->B]
10157 C. comp_2 [->I (move_after)]
10159 E. attach_3 [->H (continue_at)]
10161 G. attach_4 (last_node) [->D]
10163 I. new_node (first_new) [->F (last_new_tail)]
10167 #. mapping node chain
10168 ---------------------------------------------------
10169 A. struct_node [->B]
10171 C. comp_2 [->I (move_after)]
10172 I. new_node (first_new) [->F (last_new_tail)]
10174 G. attach_4 (last_node) [->D]
10176 E. attach_3 [->H (continue_at)]
10181 omp_siblist_move_concat_nodes_after (tree first_new
, tree
*last_new_tail
,
10182 tree
*first_ptr
, tree last_node
,
10185 tree
*continue_at
= NULL
;
10186 *last_new_tail
= *first_ptr
;
10187 if (first_ptr
== move_after
)
10188 *move_after
= first_new
;
10191 *first_ptr
= OMP_CLAUSE_CHAIN (last_node
);
10192 continue_at
= first_ptr
;
10193 OMP_CLAUSE_CHAIN (last_node
) = *move_after
;
10194 *move_after
= first_new
;
10196 return continue_at
;
10199 /* Mapping struct members causes an additional set of nodes to be created,
10200 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
10201 number of members being mapped, in order of ascending position (address or
10204 We scan through the list of mapping clauses, calling this function for each
10205 struct member mapping we find, and build up the list of mappings after the
10206 initial GOMP_MAP_STRUCT node. For pointer members, these will be
10207 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
10208 moved into place in the sorted list.
10217 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
10220 GOMP_MAP_STRUCT (4)
10221 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
10222 GOMP_MAP_ALLOC (struct.a)
10223 GOMP_MAP_ALLOC (struct.b)
10224 GOMP_MAP_TO (struct.c)
10225 GOMP_MAP_ALLOC (struct.d)
10228 In the case where we are mapping references to pointers, or in Fortran if
10229 we are mapping an array with a descriptor, additional nodes may be created
10230 after the struct node list also.
10232 The return code is either a pointer to the next node to process (if the
10233 list has been rearranged), else NULL to continue with the next node in the
10237 omp_accumulate_sibling_list (enum omp_region_type region_type
,
10238 enum tree_code code
,
10239 hash_map
<tree_operand_hash
, tree
>
10240 *&struct_map_to_clause
, tree
*grp_start_p
,
10241 tree grp_end
, tree
*inner
)
10243 poly_offset_int coffset
;
10244 poly_int64 cbitpos
;
10245 tree ocd
= OMP_CLAUSE_DECL (grp_end
);
10246 bool openmp
= !(region_type
& ORT_ACC
);
10247 tree
*continue_at
= NULL
;
10249 while (TREE_CODE (ocd
) == ARRAY_REF
)
10250 ocd
= TREE_OPERAND (ocd
, 0);
10252 if (TREE_CODE (ocd
) == INDIRECT_REF
)
10253 ocd
= TREE_OPERAND (ocd
, 0);
10255 tree base
= extract_base_bit_offset (ocd
, &cbitpos
, &coffset
);
10257 bool ptr
= (OMP_CLAUSE_MAP_KIND (grp_end
) == GOMP_MAP_ALWAYS_POINTER
);
10258 bool attach_detach
= ((OMP_CLAUSE_MAP_KIND (grp_end
)
10259 == GOMP_MAP_ATTACH_DETACH
)
10260 || (OMP_CLAUSE_MAP_KIND (grp_end
)
10261 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
));
10262 bool attach
= (OMP_CLAUSE_MAP_KIND (grp_end
) == GOMP_MAP_ATTACH
10263 || OMP_CLAUSE_MAP_KIND (grp_end
) == GOMP_MAP_DETACH
);
10265 /* FIXME: If we're not mapping the base pointer in some other clause on this
10266 directive, I think we want to create ALLOC/RELEASE here -- i.e. not
10268 if (openmp
&& attach_detach
)
10271 if (!struct_map_to_clause
|| struct_map_to_clause
->get (base
) == NULL
)
10273 tree l
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
), OMP_CLAUSE_MAP
);
10274 gomp_map_kind k
= attach
? GOMP_MAP_FORCE_PRESENT
: GOMP_MAP_STRUCT
;
10276 OMP_CLAUSE_SET_MAP_KIND (l
, k
);
10278 OMP_CLAUSE_DECL (l
) = unshare_expr (base
);
10280 OMP_CLAUSE_SIZE (l
)
10281 = (!attach
? size_int (1)
10282 : (DECL_P (OMP_CLAUSE_DECL (l
))
10283 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l
))
10284 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l
)))));
10285 if (struct_map_to_clause
== NULL
)
10286 struct_map_to_clause
= new hash_map
<tree_operand_hash
, tree
>;
10287 struct_map_to_clause
->put (base
, l
);
10289 if (ptr
|| attach_detach
)
10293 = build_omp_struct_comp_nodes (code
, *grp_start_p
, grp_end
,
10295 OMP_CLAUSE_CHAIN (l
) = alloc_node
;
10297 tree
*insert_node_pos
= grp_start_p
;
10301 OMP_CLAUSE_CHAIN (extra_node
) = *insert_node_pos
;
10302 OMP_CLAUSE_CHAIN (alloc_node
) = extra_node
;
10305 OMP_CLAUSE_CHAIN (alloc_node
) = *insert_node_pos
;
10307 *insert_node_pos
= l
;
10311 gcc_assert (*grp_start_p
== grp_end
);
10312 grp_start_p
= omp_siblist_insert_node_after (l
, grp_start_p
);
10315 tree noind
= omp_strip_indirections (base
);
10318 && (region_type
& ORT_TARGET
)
10319 && TREE_CODE (noind
) == COMPONENT_REF
)
10321 /* The base for this component access is a struct component access
10322 itself. Insert a node to be processed on the next iteration of
10323 our caller's loop, which will subsequently be turned into a new,
10324 inner GOMP_MAP_STRUCT mapping.
10326 We need to do this else the non-DECL_P base won't be
10327 rewritten correctly in the offloaded region. */
10328 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
),
10330 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_FORCE_PRESENT
);
10331 OMP_CLAUSE_DECL (c2
) = unshare_expr (noind
);
10332 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (TREE_TYPE (noind
));
10337 tree sdecl
= omp_strip_components_and_deref (base
);
10339 if (POINTER_TYPE_P (TREE_TYPE (sdecl
)) && (region_type
& ORT_TARGET
))
10341 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
),
10344 = (TREE_CODE (base
) == INDIRECT_REF
10345 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0)))
10347 || ((TREE_CODE (TREE_OPERAND (base
, 0))
10349 && (TREE_CODE (TREE_TYPE (TREE_OPERAND
10350 (TREE_OPERAND (base
, 0), 0)))
10351 == REFERENCE_TYPE
))));
10352 enum gomp_map_kind mkind
= base_ref
? GOMP_MAP_FIRSTPRIVATE_REFERENCE
10353 : GOMP_MAP_FIRSTPRIVATE_POINTER
;
10354 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
10355 OMP_CLAUSE_DECL (c2
) = sdecl
;
10356 tree baddr
= build_fold_addr_expr (base
);
10357 baddr
= fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end
),
10358 ptrdiff_type_node
, baddr
);
10359 /* This isn't going to be good enough when we add support for more
10360 complicated lvalue expressions. FIXME. */
10361 if (TREE_CODE (TREE_TYPE (sdecl
)) == REFERENCE_TYPE
10362 && TREE_CODE (TREE_TYPE (TREE_TYPE (sdecl
))) == POINTER_TYPE
)
10363 sdecl
= build_simple_mem_ref (sdecl
);
10364 tree decladdr
= fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end
),
10365 ptrdiff_type_node
, sdecl
);
10366 OMP_CLAUSE_SIZE (c2
)
10367 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end
), MINUS_EXPR
,
10368 ptrdiff_type_node
, baddr
, decladdr
);
10369 /* Insert after struct node. */
10370 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (l
);
10371 OMP_CLAUSE_CHAIN (l
) = c2
;
10376 else if (struct_map_to_clause
)
10378 tree
*osc
= struct_map_to_clause
->get (base
);
10379 tree
*sc
= NULL
, *scp
= NULL
;
10380 sc
= &OMP_CLAUSE_CHAIN (*osc
);
10381 /* The struct mapping might be immediately followed by a
10382 FIRSTPRIVATE_POINTER and/or FIRSTPRIVATE_REFERENCE -- if it's an
10383 indirect access or a reference, or both. (This added node is removed
10384 in omp-low.c after it has been processed there.) */
10386 && (OMP_CLAUSE_MAP_KIND (*sc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
10387 || OMP_CLAUSE_MAP_KIND (*sc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
10388 sc
= &OMP_CLAUSE_CHAIN (*sc
);
10389 for (; *sc
!= grp_end
; sc
= &OMP_CLAUSE_CHAIN (*sc
))
10390 if ((ptr
|| attach_detach
) && sc
== grp_start_p
)
10392 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc
)) != COMPONENT_REF
10393 && TREE_CODE (OMP_CLAUSE_DECL (*sc
)) != INDIRECT_REF
10394 && TREE_CODE (OMP_CLAUSE_DECL (*sc
)) != ARRAY_REF
)
10398 tree sc_decl
= OMP_CLAUSE_DECL (*sc
);
10399 poly_offset_int offset
;
10402 if (TREE_CODE (sc_decl
) == ARRAY_REF
)
10404 while (TREE_CODE (sc_decl
) == ARRAY_REF
)
10405 sc_decl
= TREE_OPERAND (sc_decl
, 0);
10406 if (TREE_CODE (sc_decl
) != COMPONENT_REF
10407 || TREE_CODE (TREE_TYPE (sc_decl
)) != ARRAY_TYPE
)
10410 else if (TREE_CODE (sc_decl
) == INDIRECT_REF
10411 && TREE_CODE (TREE_OPERAND (sc_decl
, 0)) == COMPONENT_REF
10412 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl
, 0)))
10413 == REFERENCE_TYPE
))
10414 sc_decl
= TREE_OPERAND (sc_decl
, 0);
10416 tree base2
= extract_base_bit_offset (sc_decl
, &bitpos
, &offset
);
10417 if (!base2
|| !operand_equal_p (base2
, base
, 0))
10421 if (maybe_lt (coffset
, offset
)
10422 || (known_eq (coffset
, offset
)
10423 && maybe_lt (cbitpos
, bitpos
)))
10425 if (ptr
|| attach_detach
)
10433 OMP_CLAUSE_SIZE (*osc
)
10434 = size_binop (PLUS_EXPR
, OMP_CLAUSE_SIZE (*osc
), size_one_node
);
10435 if (ptr
|| attach_detach
)
10437 tree cl
= NULL_TREE
, extra_node
;
10438 tree alloc_node
= build_omp_struct_comp_nodes (code
, *grp_start_p
,
10439 grp_end
, &extra_node
);
10440 tree
*tail_chain
= NULL
;
10444 grp_end : the last (or only) node in this group.
10445 grp_start_p : pointer to the first node in a pointer mapping group
10446 up to and including GRP_END.
10447 sc : pointer to the chain for the end of the struct component
10449 scp : pointer to the chain for the sorted position at which we
10450 should insert in the middle of the struct component list
10451 (else NULL to insert at end).
10452 alloc_node : the "alloc" node for the structure (pointer-type)
10453 component. We insert at SCP (if present), else SC
10454 (the end of the struct component list).
10455 extra_node : a newly-synthesized node for an additional indirect
10456 pointer mapping or a Fortran pointer set, if needed.
10457 cl : first node to prepend before grp_start_p.
10458 tail_chain : pointer to chain of last prepended node.
10460 The general idea is we move the nodes for this struct mapping
10461 together: the alloc node goes into the sorted list directly after
10462 the struct mapping, and any extra nodes (together with the nodes
10463 mapping arrays pointed to by struct components) get moved after
10464 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
10465 the end of the struct component mapping list. It's important that
10466 the alloc_node comes first in that case because it's part of the
10467 sorted component mapping list (but subsequent nodes are not!). */
10470 omp_siblist_insert_node_after (alloc_node
, scp
);
10472 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
10473 already inserted it) and the extra_node (if it is present). The
10474 list can be empty if we added alloc_node above and there is no
10476 if (scp
&& extra_node
)
10479 tail_chain
= &OMP_CLAUSE_CHAIN (extra_node
);
10481 else if (extra_node
)
10483 OMP_CLAUSE_CHAIN (alloc_node
) = extra_node
;
10485 tail_chain
= &OMP_CLAUSE_CHAIN (extra_node
);
10490 tail_chain
= &OMP_CLAUSE_CHAIN (alloc_node
);
10494 = cl
? omp_siblist_move_concat_nodes_after (cl
, tail_chain
,
10495 grp_start_p
, grp_end
,
10497 : omp_siblist_move_nodes_after (grp_start_p
, grp_end
, sc
);
10499 else if (*sc
!= grp_end
)
10501 gcc_assert (*grp_start_p
== grp_end
);
10503 /* We are moving the current node back to a previous struct node:
10504 the node that used to point to the current node will now point to
10506 continue_at
= grp_start_p
;
10507 /* In the non-pointer case, the mapping clause itself is moved into
10508 the correct position in the struct component list, which in this
10509 case is just SC. */
10510 omp_siblist_move_node_after (*grp_start_p
, grp_start_p
, sc
);
10513 return continue_at
;
10516 /* Scan through GROUPS, and create sorted structure sibling lists without
10520 omp_build_struct_sibling_lists (enum tree_code code
,
10521 enum omp_region_type region_type
,
10522 vec
<omp_mapping_group
> *groups
,
10523 hash_map
<tree_operand_hash
, omp_mapping_group
*>
10528 omp_mapping_group
*grp
;
10529 hash_map
<tree_operand_hash
, tree
> *struct_map_to_clause
= NULL
;
10530 bool success
= true;
10531 tree
*new_next
= NULL
;
10532 tree
*tail
= &OMP_CLAUSE_CHAIN ((*groups
)[groups
->length () - 1].grp_end
);
10533 auto_vec
<omp_mapping_group
> pre_hwm_groups
;
10535 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10537 tree c
= grp
->grp_end
;
10538 tree decl
= OMP_CLAUSE_DECL (c
);
10539 tree grp_end
= grp
->grp_end
;
10540 tree sentinel
= OMP_CLAUSE_CHAIN (grp_end
);
10543 grp
->grp_start
= new_next
;
10547 tree
*grp_start_p
= grp
->grp_start
;
10552 /* Skip groups we marked for deletion in
10553 oacc_resolve_clause_dependencies. */
10557 if (OMP_CLAUSE_CHAIN (*grp_start_p
)
10558 && OMP_CLAUSE_CHAIN (*grp_start_p
) != grp_end
)
10560 /* Don't process an array descriptor that isn't inside a derived type
10561 as a struct (the GOMP_MAP_POINTER following will have the form
10562 "var.data", but such mappings are handled specially). */
10563 tree grpmid
= OMP_CLAUSE_CHAIN (*grp_start_p
);
10564 if (OMP_CLAUSE_CODE (grpmid
) == OMP_CLAUSE_MAP
10565 && OMP_CLAUSE_MAP_KIND (grpmid
) == GOMP_MAP_TO_PSET
10566 && DECL_P (OMP_CLAUSE_DECL (grpmid
)))
10571 if (TREE_CODE (d
) == ARRAY_REF
)
10573 while (TREE_CODE (d
) == ARRAY_REF
)
10574 d
= TREE_OPERAND (d
, 0);
10575 if (TREE_CODE (d
) == COMPONENT_REF
10576 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
10580 && TREE_CODE (decl
) == INDIRECT_REF
10581 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
10582 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
10584 && (OMP_CLAUSE_MAP_KIND (c
)
10585 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
))
10586 decl
= TREE_OPERAND (decl
, 0);
10590 if (TREE_CODE (decl
) != COMPONENT_REF
)
10593 /* If we're mapping the whole struct in another node, skip adding this
10594 node to a sibling list. */
10595 omp_mapping_group
*wholestruct
;
10596 if (omp_mapped_by_containing_struct (*grpmap
, OMP_CLAUSE_DECL (c
),
10599 if (!(region_type
& ORT_ACC
)
10600 && *grp_start_p
== grp_end
)
10601 /* Remove the whole of this mapping -- redundant. */
10602 grp
->deleted
= true;
10607 if (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
10608 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
10609 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
10610 && code
!= OACC_UPDATE
10611 && code
!= OMP_TARGET_UPDATE
)
10613 if (error_operand_p (decl
))
10619 tree stype
= TREE_TYPE (decl
);
10620 if (TREE_CODE (stype
) == REFERENCE_TYPE
)
10621 stype
= TREE_TYPE (stype
);
10622 if (TYPE_SIZE_UNIT (stype
) == NULL
10623 || TREE_CODE (TYPE_SIZE_UNIT (stype
)) != INTEGER_CST
)
10625 error_at (OMP_CLAUSE_LOCATION (c
),
10626 "mapping field %qE of variable length "
10627 "structure", OMP_CLAUSE_DECL (c
));
10632 tree inner
= NULL_TREE
;
10635 = omp_accumulate_sibling_list (region_type
, code
,
10636 struct_map_to_clause
, grp_start_p
,
10641 if (new_next
&& *new_next
== NULL_TREE
)
10646 OMP_CLAUSE_CHAIN (inner
) = NULL_TREE
;
10647 omp_mapping_group newgrp
;
10648 newgrp
.grp_start
= new_next
? new_next
: tail
;
10649 newgrp
.grp_end
= inner
;
10650 newgrp
.mark
= UNVISITED
;
10651 newgrp
.sibling
= NULL
;
10652 newgrp
.deleted
= false;
10653 newgrp
.next
= NULL
;
10654 groups
->safe_push (newgrp
);
10656 /* !!! Growing GROUPS might invalidate the pointers in the group
10657 map. Rebuild it here. This is a bit inefficient, but
10658 shouldn't happen very often. */
10661 = omp_reindex_mapping_groups (list_p
, groups
, &pre_hwm_groups
,
10664 tail
= &OMP_CLAUSE_CHAIN (inner
);
10669 /* Delete groups marked for deletion above. At this point the order of the
10670 groups may no longer correspond to the order of the underlying list,
10671 which complicates this a little. First clear out OMP_CLAUSE_DECL for
10672 deleted nodes... */
10674 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10676 for (tree d
= *grp
->grp_start
;
10677 d
!= OMP_CLAUSE_CHAIN (grp
->grp_end
);
10678 d
= OMP_CLAUSE_CHAIN (d
))
10679 OMP_CLAUSE_DECL (d
) = NULL_TREE
;
10681 /* ...then sweep through the list removing the now-empty nodes. */
10686 if (OMP_CLAUSE_CODE (*tail
) == OMP_CLAUSE_MAP
10687 && OMP_CLAUSE_DECL (*tail
) == NULL_TREE
)
10688 *tail
= OMP_CLAUSE_CHAIN (*tail
);
10690 tail
= &OMP_CLAUSE_CHAIN (*tail
);
10694 if (struct_map_to_clause
)
10695 delete struct_map_to_clause
;
10700 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
10701 and previous omp contexts. */
10704 gimplify_scan_omp_clauses (tree
*list_p
, gimple_seq
*pre_p
,
10705 enum omp_region_type region_type
,
10706 enum tree_code code
)
10708 struct gimplify_omp_ctx
*ctx
, *outer_ctx
;
10710 tree
*prev_list_p
= NULL
, *orig_list_p
= list_p
;
10711 int handled_depend_iterators
= -1;
10714 ctx
= new_omp_context (region_type
);
10716 outer_ctx
= ctx
->outer_context
;
10717 if (code
== OMP_TARGET
)
10719 if (!lang_GNU_Fortran ())
10720 ctx
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
10721 ctx
->defaultmap
[GDMK_SCALAR
] = GOVD_FIRSTPRIVATE
;
10722 ctx
->defaultmap
[GDMK_SCALAR_TARGET
] = (lang_GNU_Fortran ()
10723 ? GOVD_MAP
: GOVD_FIRSTPRIVATE
);
10725 if (!lang_GNU_Fortran ())
10729 case OMP_TARGET_DATA
:
10730 case OMP_TARGET_ENTER_DATA
:
10731 case OMP_TARGET_EXIT_DATA
:
10733 case OACC_HOST_DATA
:
10734 case OACC_PARALLEL
:
10736 ctx
->target_firstprivatize_array_bases
= true;
10741 if (code
== OMP_TARGET
10742 || code
== OMP_TARGET_DATA
10743 || code
== OMP_TARGET_ENTER_DATA
10744 || code
== OMP_TARGET_EXIT_DATA
)
10746 vec
<omp_mapping_group
> *groups
;
10747 groups
= omp_gather_mapping_groups (list_p
);
10750 hash_map
<tree_operand_hash
, omp_mapping_group
*> *grpmap
;
10751 grpmap
= omp_index_mapping_groups (groups
);
10753 omp_build_struct_sibling_lists (code
, region_type
, groups
, &grpmap
,
10756 omp_mapping_group
*outlist
= NULL
;
10758 /* Topological sorting may fail if we have duplicate nodes, which
10759 we should have detected and shown an error for already. Skip
10760 sorting in that case. */
10767 /* Rebuild now we have struct sibling lists. */
10768 groups
= omp_gather_mapping_groups (list_p
);
10769 grpmap
= omp_index_mapping_groups (groups
);
10771 outlist
= omp_tsort_mapping_groups (groups
, grpmap
);
10772 outlist
= omp_segregate_mapping_groups (outlist
);
10773 list_p
= omp_reorder_mapping_groups (groups
, outlist
, list_p
);
10780 else if (region_type
& ORT_ACC
)
10782 vec
<omp_mapping_group
> *groups
;
10783 groups
= omp_gather_mapping_groups (list_p
);
10786 hash_map
<tree_operand_hash
, omp_mapping_group
*> *grpmap
;
10787 grpmap
= omp_index_mapping_groups (groups
);
10789 oacc_resolve_clause_dependencies (groups
, grpmap
);
10790 omp_build_struct_sibling_lists (code
, region_type
, groups
, &grpmap
,
10798 while ((c
= *list_p
) != NULL
)
10800 bool remove
= false;
10801 bool notice_outer
= true;
10802 const char *check_non_private
= NULL
;
10803 unsigned int flags
;
10806 switch (OMP_CLAUSE_CODE (c
))
10808 case OMP_CLAUSE_PRIVATE
:
10809 flags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
10810 if (lang_hooks
.decls
.omp_private_outer_ref (OMP_CLAUSE_DECL (c
)))
10812 flags
|= GOVD_PRIVATE_OUTER_REF
;
10813 OMP_CLAUSE_PRIVATE_OUTER_REF (c
) = 1;
10816 notice_outer
= false;
10818 case OMP_CLAUSE_SHARED
:
10819 flags
= GOVD_SHARED
| GOVD_EXPLICIT
;
10821 case OMP_CLAUSE_FIRSTPRIVATE
:
10822 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
10823 check_non_private
= "firstprivate";
10824 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
10826 gcc_assert (code
== OMP_TARGET
);
10827 flags
|= GOVD_FIRSTPRIVATE_IMPLICIT
;
10830 case OMP_CLAUSE_LASTPRIVATE
:
10831 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
10834 case OMP_DISTRIBUTE
:
10835 error_at (OMP_CLAUSE_LOCATION (c
),
10836 "conditional %<lastprivate%> clause on "
10837 "%qs construct", "distribute");
10838 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
10841 error_at (OMP_CLAUSE_LOCATION (c
),
10842 "conditional %<lastprivate%> clause on "
10843 "%qs construct", "taskloop");
10844 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
10849 flags
= GOVD_LASTPRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
;
10850 if (code
!= OMP_LOOP
)
10851 check_non_private
= "lastprivate";
10852 decl
= OMP_CLAUSE_DECL (c
);
10853 if (error_operand_p (decl
))
10855 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
10856 && !lang_hooks
.decls
.omp_scalar_p (decl
, true))
10858 error_at (OMP_CLAUSE_LOCATION (c
),
10859 "non-scalar variable %qD in conditional "
10860 "%<lastprivate%> clause", decl
);
10861 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
10863 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
10864 flags
|= GOVD_LASTPRIVATE_CONDITIONAL
;
10865 omp_lastprivate_for_combined_outer_constructs (outer_ctx
, decl
,
10868 case OMP_CLAUSE_REDUCTION
:
10869 if (OMP_CLAUSE_REDUCTION_TASK (c
))
10871 if (region_type
== ORT_WORKSHARE
|| code
== OMP_SCOPE
)
10874 nowait
= omp_find_clause (*list_p
,
10875 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
10877 && (outer_ctx
== NULL
10878 || outer_ctx
->region_type
!= ORT_COMBINED_PARALLEL
))
10880 error_at (OMP_CLAUSE_LOCATION (c
),
10881 "%<task%> reduction modifier on a construct "
10882 "with a %<nowait%> clause");
10883 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
10886 else if ((region_type
& ORT_PARALLEL
) != ORT_PARALLEL
)
10888 error_at (OMP_CLAUSE_LOCATION (c
),
10889 "invalid %<task%> reduction modifier on construct "
10890 "other than %<parallel%>, %qs, %<sections%> or "
10891 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
10892 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
10895 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
10899 error_at (OMP_CLAUSE_LOCATION (c
),
10900 "%<inscan%> %<reduction%> clause on "
10901 "%qs construct", "sections");
10902 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
10905 error_at (OMP_CLAUSE_LOCATION (c
),
10906 "%<inscan%> %<reduction%> clause on "
10907 "%qs construct", "parallel");
10908 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
10911 error_at (OMP_CLAUSE_LOCATION (c
),
10912 "%<inscan%> %<reduction%> clause on "
10913 "%qs construct", "teams");
10914 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
10917 error_at (OMP_CLAUSE_LOCATION (c
),
10918 "%<inscan%> %<reduction%> clause on "
10919 "%qs construct", "taskloop");
10920 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
10923 error_at (OMP_CLAUSE_LOCATION (c
),
10924 "%<inscan%> %<reduction%> clause on "
10925 "%qs construct", "scope");
10926 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
10932 case OMP_CLAUSE_IN_REDUCTION
:
10933 case OMP_CLAUSE_TASK_REDUCTION
:
10934 flags
= GOVD_REDUCTION
| GOVD_SEEN
| GOVD_EXPLICIT
;
10935 /* OpenACC permits reductions on private variables. */
10936 if (!(region_type
& ORT_ACC
)
10937 /* taskgroup is actually not a worksharing region. */
10938 && code
!= OMP_TASKGROUP
)
10939 check_non_private
= omp_clause_code_name
[OMP_CLAUSE_CODE (c
)];
10940 decl
= OMP_CLAUSE_DECL (c
);
10941 if (TREE_CODE (decl
) == MEM_REF
)
10943 tree type
= TREE_TYPE (decl
);
10944 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
10945 gimplify_ctxp
->into_ssa
= false;
10946 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type
)), pre_p
,
10947 NULL
, is_gimple_val
, fb_rvalue
, false)
10950 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
10954 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
10955 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
10958 omp_firstprivatize_variable (ctx
, v
);
10959 omp_notice_variable (ctx
, v
, true);
10961 decl
= TREE_OPERAND (decl
, 0);
10962 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
10964 gimplify_ctxp
->into_ssa
= false;
10965 if (gimplify_expr (&TREE_OPERAND (decl
, 1), pre_p
,
10966 NULL
, is_gimple_val
, fb_rvalue
, false)
10969 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
10973 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
10974 v
= TREE_OPERAND (decl
, 1);
10977 omp_firstprivatize_variable (ctx
, v
);
10978 omp_notice_variable (ctx
, v
, true);
10980 decl
= TREE_OPERAND (decl
, 0);
10982 if (TREE_CODE (decl
) == ADDR_EXPR
10983 || TREE_CODE (decl
) == INDIRECT_REF
)
10984 decl
= TREE_OPERAND (decl
, 0);
10987 case OMP_CLAUSE_LINEAR
:
10988 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
), pre_p
, NULL
,
10989 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
10996 if (code
== OMP_SIMD
10997 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
10999 struct gimplify_omp_ctx
*octx
= outer_ctx
;
11001 && octx
->region_type
== ORT_WORKSHARE
11002 && octx
->combined_loop
11003 && !octx
->distribute
)
11005 if (octx
->outer_context
11006 && (octx
->outer_context
->region_type
11007 == ORT_COMBINED_PARALLEL
))
11008 octx
= octx
->outer_context
->outer_context
;
11010 octx
= octx
->outer_context
;
11013 && octx
->region_type
== ORT_WORKSHARE
11014 && octx
->combined_loop
11015 && octx
->distribute
)
11017 error_at (OMP_CLAUSE_LOCATION (c
),
11018 "%<linear%> clause for variable other than "
11019 "loop iterator specified on construct "
11020 "combined with %<distribute%>");
11025 /* For combined #pragma omp parallel for simd, need to put
11026 lastprivate and perhaps firstprivate too on the
11027 parallel. Similarly for #pragma omp for simd. */
11028 struct gimplify_omp_ctx
*octx
= outer_ctx
;
11029 bool taskloop_seen
= false;
11033 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
11034 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11036 decl
= OMP_CLAUSE_DECL (c
);
11037 if (error_operand_p (decl
))
11043 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
11044 flags
|= GOVD_FIRSTPRIVATE
;
11045 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11046 flags
|= GOVD_LASTPRIVATE
;
11048 && octx
->region_type
== ORT_WORKSHARE
11049 && octx
->combined_loop
)
11051 if (octx
->outer_context
11052 && (octx
->outer_context
->region_type
11053 == ORT_COMBINED_PARALLEL
))
11054 octx
= octx
->outer_context
;
11055 else if (omp_check_private (octx
, decl
, false))
11059 && (octx
->region_type
& ORT_TASK
) != 0
11060 && octx
->combined_loop
)
11061 taskloop_seen
= true;
11063 && octx
->region_type
== ORT_COMBINED_PARALLEL
11064 && ((ctx
->region_type
== ORT_WORKSHARE
11065 && octx
== outer_ctx
)
11067 flags
= GOVD_SEEN
| GOVD_SHARED
;
11069 && ((octx
->region_type
& ORT_COMBINED_TEAMS
)
11070 == ORT_COMBINED_TEAMS
))
11071 flags
= GOVD_SEEN
| GOVD_SHARED
;
11073 && octx
->region_type
== ORT_COMBINED_TARGET
)
11075 if (flags
& GOVD_LASTPRIVATE
)
11076 flags
= GOVD_SEEN
| GOVD_MAP
;
11081 = splay_tree_lookup (octx
->variables
,
11082 (splay_tree_key
) decl
);
11083 if (on
&& (on
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
11088 omp_add_variable (octx
, decl
, flags
);
11089 if (octx
->outer_context
== NULL
)
11091 octx
= octx
->outer_context
;
11096 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
11097 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
11098 omp_notice_variable (octx
, decl
, true);
11100 flags
= GOVD_LINEAR
| GOVD_EXPLICIT
;
11101 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
11102 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11104 notice_outer
= false;
11105 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
11109 case OMP_CLAUSE_MAP
:
11110 decl
= OMP_CLAUSE_DECL (c
);
11111 if (error_operand_p (decl
))
11118 if (TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
)
11121 case OMP_TARGET_DATA
:
11122 case OMP_TARGET_ENTER_DATA
:
11123 case OMP_TARGET_EXIT_DATA
:
11124 case OACC_ENTER_DATA
:
11125 case OACC_EXIT_DATA
:
11126 case OACC_HOST_DATA
:
11127 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11128 || (OMP_CLAUSE_MAP_KIND (c
)
11129 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
11130 /* For target {,enter ,exit }data only the array slice is
11131 mapped, but not the pointer to it. */
11137 /* For Fortran, not only the pointer to the data is mapped but also
11138 the address of the pointer, the array descriptor etc.; for
11139 'exit data' - and in particular for 'delete:' - having an 'alloc:'
11140 does not make sense. Likewise, for 'update' only transferring the
11141 data itself is needed as the rest has been handled in previous
11142 directives. However, for 'exit data', the array descriptor needs
11143 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE.
11145 NOTE: Generally, it is not safe to perform "enter data" operations
11146 on arrays where the data *or the descriptor* may go out of scope
11147 before a corresponding "exit data" operation -- and such a
11148 descriptor may be synthesized temporarily, e.g. to pass an
11149 explicit-shape array to a function expecting an assumed-shape
11150 argument. Performing "enter data" inside the called function
11151 would thus be problematic. */
11152 if (code
== OMP_TARGET_EXIT_DATA
11153 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
)
11154 OMP_CLAUSE_SET_MAP_KIND (c
, OMP_CLAUSE_MAP_KIND (*prev_list_p
)
11156 ? GOMP_MAP_DELETE
: GOMP_MAP_RELEASE
);
11157 else if ((code
== OMP_TARGET_EXIT_DATA
|| code
== OMP_TARGET_UPDATE
)
11158 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11159 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
))
11164 if (DECL_P (decl
) && outer_ctx
&& (region_type
& ORT_ACC
))
11166 struct gimplify_omp_ctx
*octx
;
11167 for (octx
= outer_ctx
; octx
; octx
= octx
->outer_context
)
11169 if (octx
->region_type
!= ORT_ACC_HOST_DATA
)
11172 = splay_tree_lookup (octx
->variables
,
11173 (splay_tree_key
) decl
);
11175 error_at (OMP_CLAUSE_LOCATION (c
), "variable %qE "
11176 "declared in enclosing %<host_data%> region",
11180 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
11181 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
11182 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
11183 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
11184 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
11189 else if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11190 || (OMP_CLAUSE_MAP_KIND (c
)
11191 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
11192 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
11193 && TREE_CODE (OMP_CLAUSE_SIZE (c
)) != INTEGER_CST
)
11195 OMP_CLAUSE_SIZE (c
)
11196 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c
), pre_p
, NULL
,
11198 if ((region_type
& ORT_TARGET
) != 0)
11199 omp_add_variable (ctx
, OMP_CLAUSE_SIZE (c
),
11200 GOVD_FIRSTPRIVATE
| GOVD_SEEN
);
11203 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
11205 tree base
= omp_strip_components_and_deref (decl
);
11210 = splay_tree_lookup (ctx
->variables
,
11211 (splay_tree_key
) decl
);
11214 && (n
->value
& (GOVD_MAP
| GOVD_FIRSTPRIVATE
)) != 0)
11219 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
11225 if (TREE_CODE (decl
) == TARGET_EXPR
)
11227 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
11228 is_gimple_lvalue
, fb_lvalue
)
11232 else if (!DECL_P (decl
))
11234 tree d
= decl
, *pd
;
11235 if (TREE_CODE (d
) == ARRAY_REF
)
11237 while (TREE_CODE (d
) == ARRAY_REF
)
11238 d
= TREE_OPERAND (d
, 0);
11239 if (TREE_CODE (d
) == COMPONENT_REF
11240 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
11243 pd
= &OMP_CLAUSE_DECL (c
);
11245 && TREE_CODE (decl
) == INDIRECT_REF
11246 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
11247 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
11249 && (OMP_CLAUSE_MAP_KIND (c
)
11250 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
))
11252 pd
= &TREE_OPERAND (decl
, 0);
11253 decl
= TREE_OPERAND (decl
, 0);
11255 /* An "attach/detach" operation on an update directive should
11256 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
11257 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
11258 depends on the previous mapping. */
11259 if (code
== OACC_UPDATE
11260 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
11261 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_ALWAYS_POINTER
);
11263 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
11265 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
11270 gomp_map_kind k
= ((code
== OACC_EXIT_DATA
11271 || code
== OMP_TARGET_EXIT_DATA
)
11272 ? GOMP_MAP_DETACH
: GOMP_MAP_ATTACH
);
11273 OMP_CLAUSE_SET_MAP_KIND (c
, k
);
11279 while (TREE_CODE (cref
) == ARRAY_REF
)
11280 cref
= TREE_OPERAND (cref
, 0);
11282 if (TREE_CODE (cref
) == INDIRECT_REF
)
11283 cref
= TREE_OPERAND (cref
, 0);
11285 if (TREE_CODE (cref
) == COMPONENT_REF
)
11288 while (base
&& !DECL_P (base
))
11290 tree innerbase
= omp_get_base_pointer (base
);
11297 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
))
11298 && POINTER_TYPE_P (TREE_TYPE (base
)))
11301 = splay_tree_lookup (ctx
->variables
,
11302 (splay_tree_key
) base
);
11303 n
->value
|= GOVD_SEEN
;
11307 if (code
== OMP_TARGET
&& OMP_CLAUSE_MAP_IN_REDUCTION (c
))
11309 /* Don't gimplify *pd fully at this point, as the base
11310 will need to be adjusted during omp lowering. */
11311 auto_vec
<tree
, 10> expr_stack
;
11313 while (handled_component_p (*p
)
11314 || TREE_CODE (*p
) == INDIRECT_REF
11315 || TREE_CODE (*p
) == ADDR_EXPR
11316 || TREE_CODE (*p
) == MEM_REF
11317 || TREE_CODE (*p
) == NON_LVALUE_EXPR
)
11319 expr_stack
.safe_push (*p
);
11320 p
= &TREE_OPERAND (*p
, 0);
11322 for (int i
= expr_stack
.length () - 1; i
>= 0; i
--)
11324 tree t
= expr_stack
[i
];
11325 if (TREE_CODE (t
) == ARRAY_REF
11326 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
11328 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
11330 tree low
= unshare_expr (array_ref_low_bound (t
));
11331 if (!is_gimple_min_invariant (low
))
11333 TREE_OPERAND (t
, 2) = low
;
11334 if (gimplify_expr (&TREE_OPERAND (t
, 2),
11337 fb_rvalue
) == GS_ERROR
)
11341 else if (gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
11342 NULL
, is_gimple_reg
,
11343 fb_rvalue
) == GS_ERROR
)
11345 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
11347 tree elmt_size
= array_ref_element_size (t
);
11348 if (!is_gimple_min_invariant (elmt_size
))
11350 elmt_size
= unshare_expr (elmt_size
);
11352 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
,
11355 = size_int (TYPE_ALIGN_UNIT (elmt_type
));
11357 = size_binop (EXACT_DIV_EXPR
, elmt_size
,
11359 TREE_OPERAND (t
, 3) = elmt_size
;
11360 if (gimplify_expr (&TREE_OPERAND (t
, 3),
11363 fb_rvalue
) == GS_ERROR
)
11367 else if (gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
11368 NULL
, is_gimple_reg
,
11369 fb_rvalue
) == GS_ERROR
)
11372 else if (TREE_CODE (t
) == COMPONENT_REF
)
11374 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
11376 tree offset
= component_ref_field_offset (t
);
11377 if (!is_gimple_min_invariant (offset
))
11379 offset
= unshare_expr (offset
);
11380 tree field
= TREE_OPERAND (t
, 1);
11382 = size_int (DECL_OFFSET_ALIGN (field
)
11384 offset
= size_binop (EXACT_DIV_EXPR
, offset
,
11386 TREE_OPERAND (t
, 2) = offset
;
11387 if (gimplify_expr (&TREE_OPERAND (t
, 2),
11390 fb_rvalue
) == GS_ERROR
)
11394 else if (gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
11395 NULL
, is_gimple_reg
,
11396 fb_rvalue
) == GS_ERROR
)
11400 for (; expr_stack
.length () > 0; )
11402 tree t
= expr_stack
.pop ();
11404 if (TREE_CODE (t
) == ARRAY_REF
11405 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
11407 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1))
11408 && gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
,
11409 NULL
, is_gimple_val
,
11410 fb_rvalue
) == GS_ERROR
)
11415 else if (gimplify_expr (pd
, pre_p
, NULL
, is_gimple_lvalue
,
11416 fb_lvalue
) == GS_ERROR
)
11423 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_POINTER
11424 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH_DETACH
11425 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
11426 && OMP_CLAUSE_CHAIN (c
)
11427 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
)) == OMP_CLAUSE_MAP
11428 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
11429 == GOMP_MAP_ALWAYS_POINTER
)
11430 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
11431 == GOMP_MAP_ATTACH_DETACH
)
11432 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
11433 == GOMP_MAP_TO_PSET
)))
11434 prev_list_p
= list_p
;
11438 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
11439 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TO
11440 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TOFROM
)
11441 flags
|= GOVD_MAP_ALWAYS_TO
;
11443 if ((code
== OMP_TARGET
11444 || code
== OMP_TARGET_DATA
11445 || code
== OMP_TARGET_ENTER_DATA
11446 || code
== OMP_TARGET_EXIT_DATA
)
11447 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
11449 for (struct gimplify_omp_ctx
*octx
= outer_ctx
; octx
;
11450 octx
= octx
->outer_context
)
11453 = splay_tree_lookup (octx
->variables
,
11454 (splay_tree_key
) OMP_CLAUSE_DECL (c
));
11455 /* If this is contained in an outer OpenMP region as a
11456 firstprivate value, remove the attach/detach. */
11457 if (n
&& (n
->value
& GOVD_FIRSTPRIVATE
))
11459 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
11464 enum gomp_map_kind map_kind
= (code
== OMP_TARGET_EXIT_DATA
11466 : GOMP_MAP_ATTACH
);
11467 OMP_CLAUSE_SET_MAP_KIND (c
, map_kind
);
11472 case OMP_CLAUSE_AFFINITY
:
11473 gimplify_omp_affinity (list_p
, pre_p
);
11476 case OMP_CLAUSE_DOACROSS
:
11477 if (OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SINK
)
11479 tree deps
= OMP_CLAUSE_DECL (c
);
11480 while (deps
&& TREE_CODE (deps
) == TREE_LIST
)
11482 if (TREE_CODE (TREE_PURPOSE (deps
)) == TRUNC_DIV_EXPR
11483 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps
), 1)))
11484 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps
), 1),
11485 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
11486 deps
= TREE_CHAIN (deps
);
11490 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c
)
11491 == OMP_CLAUSE_DOACROSS_SOURCE
);
11493 case OMP_CLAUSE_DEPEND
:
11494 if (handled_depend_iterators
== -1)
11495 handled_depend_iterators
= gimplify_omp_depend (list_p
, pre_p
);
11496 if (handled_depend_iterators
)
11498 if (handled_depend_iterators
== 2)
11502 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
11504 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
11505 NULL
, is_gimple_val
, fb_rvalue
);
11506 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
11508 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
11513 if (OMP_CLAUSE_DECL (c
) != null_pointer_node
)
11515 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
11516 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
11517 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
11523 if (code
== OMP_TASK
)
11524 ctx
->has_depend
= true;
11527 case OMP_CLAUSE_TO
:
11528 case OMP_CLAUSE_FROM
:
11529 case OMP_CLAUSE__CACHE_
:
11530 decl
= OMP_CLAUSE_DECL (c
);
11531 if (error_operand_p (decl
))
11536 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
11537 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
11538 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
11539 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
11540 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
11545 if (!DECL_P (decl
))
11547 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
,
11548 NULL
, is_gimple_lvalue
, fb_lvalue
)
11558 case OMP_CLAUSE_USE_DEVICE_PTR
:
11559 case OMP_CLAUSE_USE_DEVICE_ADDR
:
11560 flags
= GOVD_EXPLICIT
;
11563 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
11564 decl
= OMP_CLAUSE_DECL (c
);
11565 while (TREE_CODE (decl
) == INDIRECT_REF
11566 || TREE_CODE (decl
) == ARRAY_REF
)
11567 decl
= TREE_OPERAND (decl
, 0);
11568 flags
= GOVD_EXPLICIT
;
11571 case OMP_CLAUSE_IS_DEVICE_PTR
:
11572 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
11576 decl
= OMP_CLAUSE_DECL (c
);
11578 if (error_operand_p (decl
))
11583 if (DECL_NAME (decl
) == NULL_TREE
&& (flags
& GOVD_SHARED
) == 0)
11585 tree t
= omp_member_access_dummy_var (decl
);
11588 tree v
= DECL_VALUE_EXPR (decl
);
11589 DECL_NAME (decl
) = DECL_NAME (TREE_OPERAND (v
, 1));
11591 omp_notice_variable (outer_ctx
, t
, true);
11594 if (code
== OACC_DATA
11595 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11596 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
11597 flags
|= GOVD_MAP_0LEN_ARRAY
;
11598 omp_add_variable (ctx
, decl
, flags
);
11599 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11600 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
11601 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
11602 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
11604 struct gimplify_omp_ctx
*pctx
11605 = code
== OMP_TARGET
? outer_ctx
: ctx
;
11607 omp_add_variable (pctx
, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
11608 GOVD_LOCAL
| GOVD_SEEN
);
11610 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
11611 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c
),
11613 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
11614 NULL
) == NULL_TREE
)
11615 omp_add_variable (pctx
,
11616 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
11617 GOVD_LOCAL
| GOVD_SEEN
);
11618 gimplify_omp_ctxp
= pctx
;
11619 push_gimplify_context ();
11621 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
11622 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
11624 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c
),
11625 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
));
11626 pop_gimplify_context
11627 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
)));
11628 push_gimplify_context ();
11629 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c
),
11630 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
11631 pop_gimplify_context
11632 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
)));
11633 OMP_CLAUSE_REDUCTION_INIT (c
) = NULL_TREE
;
11634 OMP_CLAUSE_REDUCTION_MERGE (c
) = NULL_TREE
;
11636 gimplify_omp_ctxp
= outer_ctx
;
11638 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
11639 && OMP_CLAUSE_LASTPRIVATE_STMT (c
))
11641 gimplify_omp_ctxp
= ctx
;
11642 push_gimplify_context ();
11643 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c
)) != BIND_EXPR
)
11645 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
11647 TREE_SIDE_EFFECTS (bind
) = 1;
11648 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LASTPRIVATE_STMT (c
);
11649 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = bind
;
11651 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c
),
11652 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
11653 pop_gimplify_context
11654 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
)));
11655 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = NULL_TREE
;
11657 gimplify_omp_ctxp
= outer_ctx
;
11659 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11660 && OMP_CLAUSE_LINEAR_STMT (c
))
11662 gimplify_omp_ctxp
= ctx
;
11663 push_gimplify_context ();
11664 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c
)) != BIND_EXPR
)
11666 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
11668 TREE_SIDE_EFFECTS (bind
) = 1;
11669 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LINEAR_STMT (c
);
11670 OMP_CLAUSE_LINEAR_STMT (c
) = bind
;
11672 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c
),
11673 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
11674 pop_gimplify_context
11675 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
)));
11676 OMP_CLAUSE_LINEAR_STMT (c
) = NULL_TREE
;
11678 gimplify_omp_ctxp
= outer_ctx
;
11684 case OMP_CLAUSE_COPYIN
:
11685 case OMP_CLAUSE_COPYPRIVATE
:
11686 decl
= OMP_CLAUSE_DECL (c
);
11687 if (error_operand_p (decl
))
11692 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_COPYPRIVATE
11694 && !omp_check_private (ctx
, decl
, true))
11697 if (is_global_var (decl
))
11699 if (DECL_THREAD_LOCAL_P (decl
))
11701 else if (DECL_HAS_VALUE_EXPR_P (decl
))
11703 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
11707 && DECL_THREAD_LOCAL_P (value
))
11712 error_at (OMP_CLAUSE_LOCATION (c
),
11713 "copyprivate variable %qE is not threadprivate"
11714 " or private in outer context", DECL_NAME (decl
));
11717 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11718 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
11719 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
11721 && ((region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
11722 || (region_type
== ORT_WORKSHARE
11723 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11724 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
11725 || code
== OMP_LOOP
)))
11726 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
11727 || (code
== OMP_LOOP
11728 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11729 && ((outer_ctx
->region_type
& ORT_COMBINED_TEAMS
)
11730 == ORT_COMBINED_TEAMS
))))
11733 = splay_tree_lookup (outer_ctx
->variables
,
11734 (splay_tree_key
)decl
);
11735 if (on
== NULL
|| (on
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11737 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11738 && TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
11739 && (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
11740 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
11741 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
11742 == POINTER_TYPE
))))
11743 omp_firstprivatize_variable (outer_ctx
, decl
);
11746 omp_add_variable (outer_ctx
, decl
,
11747 GOVD_SEEN
| GOVD_SHARED
);
11748 if (outer_ctx
->outer_context
)
11749 omp_notice_variable (outer_ctx
->outer_context
, decl
,
11755 omp_notice_variable (outer_ctx
, decl
, true);
11756 if (check_non_private
11757 && (region_type
== ORT_WORKSHARE
|| code
== OMP_SCOPE
)
11758 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
11759 || decl
== OMP_CLAUSE_DECL (c
)
11760 || (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
11761 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
11763 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
11764 == POINTER_PLUS_EXPR
11765 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
11766 (OMP_CLAUSE_DECL (c
), 0), 0))
11768 && omp_check_private (ctx
, decl
, false))
11770 error ("%s variable %qE is private in outer context",
11771 check_non_private
, DECL_NAME (decl
));
11776 case OMP_CLAUSE_DETACH
:
11777 flags
= GOVD_FIRSTPRIVATE
| GOVD_SEEN
;
11780 case OMP_CLAUSE_IF
:
11781 if (OMP_CLAUSE_IF_MODIFIER (c
) != ERROR_MARK
11782 && OMP_CLAUSE_IF_MODIFIER (c
) != code
)
11785 for (int i
= 0; i
< 2; i
++)
11786 switch (i
? OMP_CLAUSE_IF_MODIFIER (c
) : code
)
11788 case VOID_CST
: p
[i
] = "cancel"; break;
11789 case OMP_PARALLEL
: p
[i
] = "parallel"; break;
11790 case OMP_SIMD
: p
[i
] = "simd"; break;
11791 case OMP_TASK
: p
[i
] = "task"; break;
11792 case OMP_TASKLOOP
: p
[i
] = "taskloop"; break;
11793 case OMP_TARGET_DATA
: p
[i
] = "target data"; break;
11794 case OMP_TARGET
: p
[i
] = "target"; break;
11795 case OMP_TARGET_UPDATE
: p
[i
] = "target update"; break;
11796 case OMP_TARGET_ENTER_DATA
:
11797 p
[i
] = "target enter data"; break;
11798 case OMP_TARGET_EXIT_DATA
: p
[i
] = "target exit data"; break;
11799 default: gcc_unreachable ();
11801 error_at (OMP_CLAUSE_LOCATION (c
),
11802 "expected %qs %<if%> clause modifier rather than %qs",
11806 /* Fall through. */
11808 case OMP_CLAUSE_FINAL
:
11809 OMP_CLAUSE_OPERAND (c
, 0)
11810 = gimple_boolify (OMP_CLAUSE_OPERAND (c
, 0));
11811 /* Fall through. */
11813 case OMP_CLAUSE_NUM_TEAMS
:
11814 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
11815 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
11816 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)))
11818 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)))
11823 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
11824 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
),
11825 pre_p
, NULL
, true);
11827 /* Fall through. */
11829 case OMP_CLAUSE_SCHEDULE
:
11830 case OMP_CLAUSE_NUM_THREADS
:
11831 case OMP_CLAUSE_THREAD_LIMIT
:
11832 case OMP_CLAUSE_DIST_SCHEDULE
:
11833 case OMP_CLAUSE_DEVICE
:
11834 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEVICE
11835 && OMP_CLAUSE_DEVICE_ANCESTOR (c
))
11837 if (code
!= OMP_TARGET
)
11839 error_at (OMP_CLAUSE_LOCATION (c
),
11840 "%<device%> clause with %<ancestor%> is only "
11841 "allowed on %<target%> construct");
11846 tree clauses
= *orig_list_p
;
11847 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
11848 if (OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_DEVICE
11849 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_FIRSTPRIVATE
11850 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_PRIVATE
11851 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_DEFAULTMAP
11852 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_MAP
11855 error_at (OMP_CLAUSE_LOCATION (c
),
11856 "with %<ancestor%>, only the %<device%>, "
11857 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
11858 "and %<map%> clauses may appear on the "
11864 /* Fall through. */
11866 case OMP_CLAUSE_PRIORITY
:
11867 case OMP_CLAUSE_GRAINSIZE
:
11868 case OMP_CLAUSE_NUM_TASKS
:
11869 case OMP_CLAUSE_FILTER
:
11870 case OMP_CLAUSE_HINT
:
11871 case OMP_CLAUSE_ASYNC
:
11872 case OMP_CLAUSE_WAIT
:
11873 case OMP_CLAUSE_NUM_GANGS
:
11874 case OMP_CLAUSE_NUM_WORKERS
:
11875 case OMP_CLAUSE_VECTOR_LENGTH
:
11876 case OMP_CLAUSE_WORKER
:
11877 case OMP_CLAUSE_VECTOR
:
11878 if (OMP_CLAUSE_OPERAND (c
, 0)
11879 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c
, 0)))
11881 if (error_operand_p (OMP_CLAUSE_OPERAND (c
, 0)))
11886 /* All these clauses care about value, not a particular decl,
11887 so try to force it into a SSA_NAME or fresh temporary. */
11888 OMP_CLAUSE_OPERAND (c
, 0)
11889 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c
, 0),
11890 pre_p
, NULL
, true);
11894 case OMP_CLAUSE_GANG
:
11895 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
11896 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
11898 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 1), pre_p
, NULL
,
11899 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
11903 case OMP_CLAUSE_NOWAIT
:
11907 case OMP_CLAUSE_ORDERED
:
11908 case OMP_CLAUSE_UNTIED
:
11909 case OMP_CLAUSE_COLLAPSE
:
11910 case OMP_CLAUSE_TILE
:
11911 case OMP_CLAUSE_AUTO
:
11912 case OMP_CLAUSE_SEQ
:
11913 case OMP_CLAUSE_INDEPENDENT
:
11914 case OMP_CLAUSE_MERGEABLE
:
11915 case OMP_CLAUSE_PROC_BIND
:
11916 case OMP_CLAUSE_SAFELEN
:
11917 case OMP_CLAUSE_SIMDLEN
:
11918 case OMP_CLAUSE_NOGROUP
:
11919 case OMP_CLAUSE_THREADS
:
11920 case OMP_CLAUSE_SIMD
:
11921 case OMP_CLAUSE_BIND
:
11922 case OMP_CLAUSE_IF_PRESENT
:
11923 case OMP_CLAUSE_FINALIZE
:
11926 case OMP_CLAUSE_ORDER
:
11927 ctx
->order_concurrent
= true;
11930 case OMP_CLAUSE_DEFAULTMAP
:
11931 enum gimplify_defaultmap_kind gdmkmin
, gdmkmax
;
11932 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c
))
11934 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
:
11935 gdmkmin
= GDMK_SCALAR
;
11936 gdmkmax
= GDMK_POINTER
;
11938 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
:
11939 gdmkmin
= GDMK_SCALAR
;
11940 gdmkmax
= GDMK_SCALAR_TARGET
;
11942 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE
:
11943 gdmkmin
= gdmkmax
= GDMK_AGGREGATE
;
11945 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE
:
11946 gdmkmin
= gdmkmax
= GDMK_ALLOCATABLE
;
11948 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER
:
11949 gdmkmin
= gdmkmax
= GDMK_POINTER
;
11952 gcc_unreachable ();
11954 for (int gdmk
= gdmkmin
; gdmk
<= gdmkmax
; gdmk
++)
11955 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c
))
11957 case OMP_CLAUSE_DEFAULTMAP_ALLOC
:
11958 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_ALLOC_ONLY
;
11960 case OMP_CLAUSE_DEFAULTMAP_TO
:
11961 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_TO_ONLY
;
11963 case OMP_CLAUSE_DEFAULTMAP_FROM
:
11964 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_FROM_ONLY
;
11966 case OMP_CLAUSE_DEFAULTMAP_TOFROM
:
11967 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
11969 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
:
11970 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
11972 case OMP_CLAUSE_DEFAULTMAP_NONE
:
11973 ctx
->defaultmap
[gdmk
] = 0;
11975 case OMP_CLAUSE_DEFAULTMAP_DEFAULT
:
11979 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
11981 case GDMK_SCALAR_TARGET
:
11982 ctx
->defaultmap
[gdmk
] = (lang_GNU_Fortran ()
11983 ? GOVD_MAP
: GOVD_FIRSTPRIVATE
);
11985 case GDMK_AGGREGATE
:
11986 case GDMK_ALLOCATABLE
:
11987 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
11990 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
11991 if (!lang_GNU_Fortran ())
11992 ctx
->defaultmap
[gdmk
] |= GOVD_MAP_0LEN_ARRAY
;
11995 gcc_unreachable ();
11999 gcc_unreachable ();
12003 case OMP_CLAUSE_ALIGNED
:
12004 decl
= OMP_CLAUSE_DECL (c
);
12005 if (error_operand_p (decl
))
12010 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c
), pre_p
, NULL
,
12011 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
12016 if (!is_global_var (decl
)
12017 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
12018 omp_add_variable (ctx
, decl
, GOVD_ALIGNED
);
12021 case OMP_CLAUSE_NONTEMPORAL
:
12022 decl
= OMP_CLAUSE_DECL (c
);
12023 if (error_operand_p (decl
))
12028 omp_add_variable (ctx
, decl
, GOVD_NONTEMPORAL
);
12031 case OMP_CLAUSE_ALLOCATE
:
12032 decl
= OMP_CLAUSE_DECL (c
);
12033 if (error_operand_p (decl
))
12038 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
), pre_p
, NULL
,
12039 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
12044 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
12045 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
12048 else if (code
== OMP_TASKLOOP
12049 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
12050 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
12051 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
12052 pre_p
, NULL
, false);
12055 case OMP_CLAUSE_DEFAULT
:
12056 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
12059 case OMP_CLAUSE_INCLUSIVE
:
12060 case OMP_CLAUSE_EXCLUSIVE
:
12061 decl
= OMP_CLAUSE_DECL (c
);
12063 splay_tree_node n
= splay_tree_lookup (outer_ctx
->variables
,
12064 (splay_tree_key
) decl
);
12065 if (n
== NULL
|| (n
->value
& GOVD_REDUCTION
) == 0)
12067 error_at (OMP_CLAUSE_LOCATION (c
),
12068 "%qD specified in %qs clause but not in %<inscan%> "
12069 "%<reduction%> clause on the containing construct",
12070 decl
, omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
12075 n
->value
|= GOVD_REDUCTION_INSCAN
;
12076 if (outer_ctx
->region_type
== ORT_SIMD
12077 && outer_ctx
->outer_context
12078 && outer_ctx
->outer_context
->region_type
== ORT_WORKSHARE
)
12080 n
= splay_tree_lookup (outer_ctx
->outer_context
->variables
,
12081 (splay_tree_key
) decl
);
12082 if (n
&& (n
->value
& GOVD_REDUCTION
) != 0)
12083 n
->value
|= GOVD_REDUCTION_INSCAN
;
12089 case OMP_CLAUSE_NOHOST
:
12091 gcc_unreachable ();
12094 if (code
== OACC_DATA
12095 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12096 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12097 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
12100 *list_p
= OMP_CLAUSE_CHAIN (c
);
12102 list_p
= &OMP_CLAUSE_CHAIN (c
);
12105 ctx
->clauses
= *orig_list_p
;
12106 gimplify_omp_ctxp
= ctx
;
12109 /* Return true if DECL is a candidate for shared to firstprivate
12110 optimization. We only consider non-addressable scalars, not
12111 too big, and not references. */
12114 omp_shared_to_firstprivate_optimizable_decl_p (tree decl
)
12116 if (TREE_ADDRESSABLE (decl
))
12118 tree type
= TREE_TYPE (decl
);
12119 if (!is_gimple_reg_type (type
)
12120 || TREE_CODE (type
) == REFERENCE_TYPE
12121 || TREE_ADDRESSABLE (type
))
12123 /* Don't optimize too large decls, as each thread/task will have
12125 HOST_WIDE_INT len
= int_size_in_bytes (type
);
12126 if (len
== -1 || len
> 4 * POINTER_SIZE
/ BITS_PER_UNIT
)
12128 if (omp_privatize_by_reference (decl
))
12133 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
12134 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
12135 GOVD_WRITTEN in outer contexts. */
12138 omp_mark_stores (struct gimplify_omp_ctx
*ctx
, tree decl
)
12140 for (; ctx
; ctx
= ctx
->outer_context
)
12142 splay_tree_node n
= splay_tree_lookup (ctx
->variables
,
12143 (splay_tree_key
) decl
);
12146 else if (n
->value
& GOVD_SHARED
)
12148 n
->value
|= GOVD_WRITTEN
;
12151 else if (n
->value
& GOVD_DATA_SHARE_CLASS
)
12156 /* Helper callback for walk_gimple_seq to discover possible stores
12157 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12158 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12162 omp_find_stores_op (tree
*tp
, int *walk_subtrees
, void *data
)
12164 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
12166 *walk_subtrees
= 0;
12173 if (handled_component_p (op
))
12174 op
= TREE_OPERAND (op
, 0);
12175 else if ((TREE_CODE (op
) == MEM_REF
|| TREE_CODE (op
) == TARGET_MEM_REF
)
12176 && TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
12177 op
= TREE_OPERAND (TREE_OPERAND (op
, 0), 0);
12182 if (!DECL_P (op
) || !omp_shared_to_firstprivate_optimizable_decl_p (op
))
12185 omp_mark_stores (gimplify_omp_ctxp
, op
);
12189 /* Helper callback for walk_gimple_seq to discover possible stores
12190 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12191 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12195 omp_find_stores_stmt (gimple_stmt_iterator
*gsi_p
,
12196 bool *handled_ops_p
,
12197 struct walk_stmt_info
*wi
)
12199 gimple
*stmt
= gsi_stmt (*gsi_p
);
12200 switch (gimple_code (stmt
))
12202 /* Don't recurse on OpenMP constructs for which
12203 gimplify_adjust_omp_clauses already handled the bodies,
12204 except handle gimple_omp_for_pre_body. */
12205 case GIMPLE_OMP_FOR
:
12206 *handled_ops_p
= true;
12207 if (gimple_omp_for_pre_body (stmt
))
12208 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
12209 omp_find_stores_stmt
, omp_find_stores_op
, wi
);
12211 case GIMPLE_OMP_PARALLEL
:
12212 case GIMPLE_OMP_TASK
:
12213 case GIMPLE_OMP_SECTIONS
:
12214 case GIMPLE_OMP_SINGLE
:
12215 case GIMPLE_OMP_SCOPE
:
12216 case GIMPLE_OMP_TARGET
:
12217 case GIMPLE_OMP_TEAMS
:
12218 case GIMPLE_OMP_CRITICAL
:
12219 *handled_ops_p
= true;
12227 struct gimplify_adjust_omp_clauses_data
12233 /* For all variables that were not actually used within the context,
12234 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
12237 gimplify_adjust_omp_clauses_1 (splay_tree_node n
, void *data
)
12239 tree
*list_p
= ((struct gimplify_adjust_omp_clauses_data
*) data
)->list_p
;
12241 = ((struct gimplify_adjust_omp_clauses_data
*) data
)->pre_p
;
12242 tree decl
= (tree
) n
->key
;
12243 unsigned flags
= n
->value
;
12244 enum omp_clause_code code
;
12246 bool private_debug
;
12248 if (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
12249 && (flags
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0)
12250 flags
= GOVD_SHARED
| GOVD_SEEN
| GOVD_WRITTEN
;
12251 if (flags
& (GOVD_EXPLICIT
| GOVD_LOCAL
))
12253 if ((flags
& GOVD_SEEN
) == 0)
12255 if (flags
& GOVD_DEBUG_PRIVATE
)
12257 gcc_assert ((flags
& GOVD_DATA_SHARE_CLASS
) == GOVD_SHARED
);
12258 private_debug
= true;
12260 else if (flags
& GOVD_MAP
)
12261 private_debug
= false;
12264 = lang_hooks
.decls
.omp_private_debug_clause (decl
,
12265 !!(flags
& GOVD_SHARED
));
12267 code
= OMP_CLAUSE_PRIVATE
;
12268 else if (flags
& GOVD_MAP
)
12270 code
= OMP_CLAUSE_MAP
;
12271 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
12272 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
12274 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl
);
12278 && DECL_IN_CONSTANT_POOL (decl
)
12279 && !lookup_attribute ("omp declare target",
12280 DECL_ATTRIBUTES (decl
)))
12282 tree id
= get_identifier ("omp declare target");
12283 DECL_ATTRIBUTES (decl
)
12284 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
12285 varpool_node
*node
= varpool_node::get (decl
);
12288 node
->offloadable
= 1;
12289 if (ENABLE_OFFLOADING
)
12290 g
->have_offload
= true;
12294 else if (flags
& GOVD_SHARED
)
12296 if (is_global_var (decl
))
12298 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
12299 while (ctx
!= NULL
)
12302 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12303 if (on
&& (on
->value
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
12304 | GOVD_PRIVATE
| GOVD_REDUCTION
12305 | GOVD_LINEAR
| GOVD_MAP
)) != 0)
12307 ctx
= ctx
->outer_context
;
12312 code
= OMP_CLAUSE_SHARED
;
12313 /* Don't optimize shared into firstprivate for read-only vars
12314 on tasks with depend clause, we shouldn't try to copy them
12315 until the dependencies are satisfied. */
12316 if (gimplify_omp_ctxp
->has_depend
)
12317 flags
|= GOVD_WRITTEN
;
12319 else if (flags
& GOVD_PRIVATE
)
12320 code
= OMP_CLAUSE_PRIVATE
;
12321 else if (flags
& GOVD_FIRSTPRIVATE
)
12323 code
= OMP_CLAUSE_FIRSTPRIVATE
;
12324 if ((gimplify_omp_ctxp
->region_type
& ORT_TARGET
)
12325 && (gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
12326 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
12328 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
12329 "%<target%> construct", decl
);
12333 else if (flags
& GOVD_LASTPRIVATE
)
12334 code
= OMP_CLAUSE_LASTPRIVATE
;
12335 else if (flags
& (GOVD_ALIGNED
| GOVD_NONTEMPORAL
))
12337 else if (flags
& GOVD_CONDTEMP
)
12339 code
= OMP_CLAUSE__CONDTEMP_
;
12340 gimple_add_tmp_var (decl
);
12343 gcc_unreachable ();
12345 if (((flags
& GOVD_LASTPRIVATE
)
12346 || (code
== OMP_CLAUSE_SHARED
&& (flags
& GOVD_WRITTEN
)))
12347 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12348 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
12350 tree chain
= *list_p
;
12351 clause
= build_omp_clause (input_location
, code
);
12352 OMP_CLAUSE_DECL (clause
) = decl
;
12353 OMP_CLAUSE_CHAIN (clause
) = chain
;
12355 OMP_CLAUSE_PRIVATE_DEBUG (clause
) = 1;
12356 else if (code
== OMP_CLAUSE_PRIVATE
&& (flags
& GOVD_PRIVATE_OUTER_REF
))
12357 OMP_CLAUSE_PRIVATE_OUTER_REF (clause
) = 1;
12358 else if (code
== OMP_CLAUSE_SHARED
12359 && (flags
& GOVD_WRITTEN
) == 0
12360 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12361 OMP_CLAUSE_SHARED_READONLY (clause
) = 1;
12362 else if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_EXPLICIT
) == 0)
12363 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause
) = 1;
12364 else if (code
== OMP_CLAUSE_MAP
&& (flags
& GOVD_MAP_0LEN_ARRAY
) != 0)
12366 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
12367 OMP_CLAUSE_DECL (nc
) = decl
;
12368 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
12369 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
12370 OMP_CLAUSE_DECL (clause
)
12371 = build_simple_mem_ref_loc (input_location
, decl
);
12372 OMP_CLAUSE_DECL (clause
)
12373 = build2 (MEM_REF
, char_type_node
, OMP_CLAUSE_DECL (clause
),
12374 build_int_cst (build_pointer_type (char_type_node
), 0));
12375 OMP_CLAUSE_SIZE (clause
) = size_zero_node
;
12376 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
12377 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_ALLOC
);
12378 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause
) = 1;
12379 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
12380 OMP_CLAUSE_CHAIN (nc
) = chain
;
12381 OMP_CLAUSE_CHAIN (clause
) = nc
;
12382 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12383 gimplify_omp_ctxp
= ctx
->outer_context
;
12384 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0),
12385 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
12386 gimplify_omp_ctxp
= ctx
;
12388 else if (code
== OMP_CLAUSE_MAP
)
12391 /* Not all combinations of these GOVD_MAP flags are actually valid. */
12392 switch (flags
& (GOVD_MAP_TO_ONLY
12394 | GOVD_MAP_FORCE_PRESENT
12395 | GOVD_MAP_ALLOC_ONLY
12396 | GOVD_MAP_FROM_ONLY
))
12399 kind
= GOMP_MAP_TOFROM
;
12401 case GOVD_MAP_FORCE
:
12402 kind
= GOMP_MAP_TOFROM
| GOMP_MAP_FLAG_FORCE
;
12404 case GOVD_MAP_TO_ONLY
:
12405 kind
= GOMP_MAP_TO
;
12407 case GOVD_MAP_FROM_ONLY
:
12408 kind
= GOMP_MAP_FROM
;
12410 case GOVD_MAP_ALLOC_ONLY
:
12411 kind
= GOMP_MAP_ALLOC
;
12413 case GOVD_MAP_TO_ONLY
| GOVD_MAP_FORCE
:
12414 kind
= GOMP_MAP_TO
| GOMP_MAP_FLAG_FORCE
;
12416 case GOVD_MAP_FORCE_PRESENT
:
12417 kind
= GOMP_MAP_FORCE_PRESENT
;
12420 gcc_unreachable ();
12422 OMP_CLAUSE_SET_MAP_KIND (clause
, kind
);
12423 /* Setting of the implicit flag for the runtime is currently disabled for
12425 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0)
12426 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause
) = 1;
12427 if (DECL_SIZE (decl
)
12428 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
12430 tree decl2
= DECL_VALUE_EXPR (decl
);
12431 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
12432 decl2
= TREE_OPERAND (decl2
, 0);
12433 gcc_assert (DECL_P (decl2
));
12434 tree mem
= build_simple_mem_ref (decl2
);
12435 OMP_CLAUSE_DECL (clause
) = mem
;
12436 OMP_CLAUSE_SIZE (clause
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
12437 if (gimplify_omp_ctxp
->outer_context
)
12439 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
12440 omp_notice_variable (ctx
, decl2
, true);
12441 omp_notice_variable (ctx
, OMP_CLAUSE_SIZE (clause
), true);
12443 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
12445 OMP_CLAUSE_DECL (nc
) = decl
;
12446 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
12447 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
)
12448 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
12450 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
12451 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
12452 OMP_CLAUSE_CHAIN (clause
) = nc
;
12454 else if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
12455 && omp_privatize_by_reference (decl
))
12457 OMP_CLAUSE_DECL (clause
) = build_simple_mem_ref (decl
);
12458 OMP_CLAUSE_SIZE (clause
)
12459 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))));
12460 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12461 gimplify_omp_ctxp
= ctx
->outer_context
;
12462 gimplify_expr (&OMP_CLAUSE_SIZE (clause
),
12463 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
12464 gimplify_omp_ctxp
= ctx
;
12465 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
12467 OMP_CLAUSE_DECL (nc
) = decl
;
12468 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
12469 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_REFERENCE
);
12470 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
12471 OMP_CLAUSE_CHAIN (clause
) = nc
;
12474 OMP_CLAUSE_SIZE (clause
) = DECL_SIZE_UNIT (decl
);
12476 if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_LASTPRIVATE
) != 0)
12478 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
12479 OMP_CLAUSE_DECL (nc
) = decl
;
12480 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc
) = 1;
12481 OMP_CLAUSE_CHAIN (nc
) = chain
;
12482 OMP_CLAUSE_CHAIN (clause
) = nc
;
12483 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12484 gimplify_omp_ctxp
= ctx
->outer_context
;
12485 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
12486 (ctx
->region_type
& ORT_ACC
) != 0);
12487 gimplify_omp_ctxp
= ctx
;
12490 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12491 gimplify_omp_ctxp
= ctx
->outer_context
;
12492 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
12493 in simd. Those are only added for the local vars inside of simd body
12494 and they don't need to be e.g. default constructible. */
12495 if (code
!= OMP_CLAUSE_PRIVATE
|| ctx
->region_type
!= ORT_SIMD
)
12496 lang_hooks
.decls
.omp_finish_clause (clause
, pre_p
,
12497 (ctx
->region_type
& ORT_ACC
) != 0);
12498 if (gimplify_omp_ctxp
)
12499 for (; clause
!= chain
; clause
= OMP_CLAUSE_CHAIN (clause
))
12500 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
12501 && DECL_P (OMP_CLAUSE_SIZE (clause
)))
12502 omp_notice_variable (gimplify_omp_ctxp
, OMP_CLAUSE_SIZE (clause
),
12504 gimplify_omp_ctxp
= ctx
;
12509 gimplify_adjust_omp_clauses (gimple_seq
*pre_p
, gimple_seq body
, tree
*list_p
,
12510 enum tree_code code
)
12512 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12513 tree
*orig_list_p
= list_p
;
12515 bool has_inscan_reductions
= false;
12519 struct gimplify_omp_ctx
*octx
;
12520 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
12521 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TASK
| ORT_TEAMS
)) != 0)
12525 struct walk_stmt_info wi
;
12526 memset (&wi
, 0, sizeof (wi
));
12527 walk_gimple_seq (body
, omp_find_stores_stmt
,
12528 omp_find_stores_op
, &wi
);
12532 if (ctx
->add_safelen1
)
12534 /* If there are VLAs in the body of simd loop, prevent
12536 gcc_assert (ctx
->region_type
== ORT_SIMD
);
12537 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
12538 OMP_CLAUSE_SAFELEN_EXPR (c
) = integer_one_node
;
12539 OMP_CLAUSE_CHAIN (c
) = *list_p
;
12541 list_p
= &OMP_CLAUSE_CHAIN (c
);
12544 if (ctx
->region_type
== ORT_WORKSHARE
12545 && ctx
->outer_context
12546 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
)
12548 for (c
= ctx
->outer_context
->clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12549 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12550 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
12552 decl
= OMP_CLAUSE_DECL (c
);
12554 = splay_tree_lookup (ctx
->outer_context
->variables
,
12555 (splay_tree_key
) decl
);
12556 gcc_checking_assert (!splay_tree_lookup (ctx
->variables
,
12557 (splay_tree_key
) decl
));
12558 omp_add_variable (ctx
, decl
, n
->value
);
12559 tree c2
= copy_node (c
);
12560 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
12562 if ((n
->value
& GOVD_FIRSTPRIVATE
) == 0)
12564 c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12565 OMP_CLAUSE_FIRSTPRIVATE
);
12566 OMP_CLAUSE_DECL (c2
) = decl
;
12567 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
12572 tree attach_list
= NULL_TREE
;
12573 tree
*attach_tail
= &attach_list
;
12575 while ((c
= *list_p
) != NULL
)
12578 bool remove
= false;
12579 bool move_attach
= false;
12581 switch (OMP_CLAUSE_CODE (c
))
12583 case OMP_CLAUSE_FIRSTPRIVATE
:
12584 if ((ctx
->region_type
& ORT_TARGET
)
12585 && (ctx
->region_type
& ORT_ACC
) == 0
12586 && TYPE_ATOMIC (strip_array_types
12587 (TREE_TYPE (OMP_CLAUSE_DECL (c
)))))
12589 error_at (OMP_CLAUSE_LOCATION (c
),
12590 "%<_Atomic%> %qD in %<firstprivate%> clause on "
12591 "%<target%> construct", OMP_CLAUSE_DECL (c
));
12595 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
12597 decl
= OMP_CLAUSE_DECL (c
);
12598 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12599 if ((n
->value
& GOVD_MAP
) != 0)
12604 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c
) = 0;
12605 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
) = 0;
12608 case OMP_CLAUSE_PRIVATE
:
12609 case OMP_CLAUSE_SHARED
:
12610 case OMP_CLAUSE_LINEAR
:
12611 decl
= OMP_CLAUSE_DECL (c
);
12612 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12613 remove
= !(n
->value
& GOVD_SEEN
);
12614 if ((n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0
12615 && code
== OMP_PARALLEL
12616 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12620 bool shared
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
;
12621 if ((n
->value
& GOVD_DEBUG_PRIVATE
)
12622 || lang_hooks
.decls
.omp_private_debug_clause (decl
, shared
))
12624 gcc_assert ((n
->value
& GOVD_DEBUG_PRIVATE
) == 0
12625 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
12627 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_PRIVATE
);
12628 OMP_CLAUSE_PRIVATE_DEBUG (c
) = 1;
12630 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
12633 n
->value
|= GOVD_WRITTEN
;
12634 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
12635 && (n
->value
& GOVD_WRITTEN
) == 0
12637 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12638 OMP_CLAUSE_SHARED_READONLY (c
) = 1;
12639 else if (DECL_P (decl
)
12640 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
12641 && (n
->value
& GOVD_WRITTEN
) != 0)
12642 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
12643 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
12644 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12645 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
12648 n
->value
&= ~GOVD_EXPLICIT
;
12651 case OMP_CLAUSE_LASTPRIVATE
:
12652 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
12653 accurately reflect the presence of a FIRSTPRIVATE clause. */
12654 decl
= OMP_CLAUSE_DECL (c
);
12655 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12656 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
12657 = (n
->value
& GOVD_FIRSTPRIVATE
) != 0;
12658 if (code
== OMP_DISTRIBUTE
12659 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
12662 error_at (OMP_CLAUSE_LOCATION (c
),
12663 "same variable used in %<firstprivate%> and "
12664 "%<lastprivate%> clauses on %<distribute%> "
12668 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12670 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12671 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
12672 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) && code
== OMP_PARALLEL
)
12676 case OMP_CLAUSE_ALIGNED
:
12677 decl
= OMP_CLAUSE_DECL (c
);
12678 if (!is_global_var (decl
))
12680 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12681 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
12682 if (!remove
&& TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
12684 struct gimplify_omp_ctx
*octx
;
12686 && (n
->value
& (GOVD_DATA_SHARE_CLASS
12687 & ~GOVD_FIRSTPRIVATE
)))
12690 for (octx
= ctx
->outer_context
; octx
;
12691 octx
= octx
->outer_context
)
12693 n
= splay_tree_lookup (octx
->variables
,
12694 (splay_tree_key
) decl
);
12697 if (n
->value
& GOVD_LOCAL
)
12699 /* We have to avoid assigning a shared variable
12700 to itself when trying to add
12701 __builtin_assume_aligned. */
12702 if (n
->value
& GOVD_SHARED
)
12710 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
12712 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12713 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
12718 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
12719 decl
= OMP_CLAUSE_DECL (c
);
12720 while (TREE_CODE (decl
) == INDIRECT_REF
12721 || TREE_CODE (decl
) == ARRAY_REF
)
12722 decl
= TREE_OPERAND (decl
, 0);
12723 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12724 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
12727 case OMP_CLAUSE_IS_DEVICE_PTR
:
12728 case OMP_CLAUSE_NONTEMPORAL
:
12729 decl
= OMP_CLAUSE_DECL (c
);
12730 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12731 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
12734 case OMP_CLAUSE_MAP
:
12735 if (code
== OMP_TARGET_EXIT_DATA
12736 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
12741 /* If we have a target region, we can push all the attaches to the
12742 end of the list (we may have standalone "attach" operations
12743 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
12744 the attachment point AND the pointed-to block have been mapped).
12745 If we have something else, e.g. "enter data", we need to keep
12746 "attach" nodes together with the previous node they attach to so
12747 that separate "exit data" operations work properly (see
12748 libgomp/target.c). */
12749 if ((ctx
->region_type
& ORT_TARGET
) != 0
12750 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12751 || (OMP_CLAUSE_MAP_KIND (c
)
12752 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
)))
12753 move_attach
= true;
12754 decl
= OMP_CLAUSE_DECL (c
);
12755 /* Data clauses associated with reductions must be
12756 compatible with present_or_copy. Warn and adjust the clause
12757 if that is not the case. */
12758 if (ctx
->region_type
== ORT_ACC_PARALLEL
12759 || ctx
->region_type
== ORT_ACC_SERIAL
)
12761 tree t
= DECL_P (decl
) ? decl
: TREE_OPERAND (decl
, 0);
12765 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
12767 if (n
&& (n
->value
& GOVD_REDUCTION
))
12769 enum gomp_map_kind kind
= OMP_CLAUSE_MAP_KIND (c
);
12771 OMP_CLAUSE_MAP_IN_REDUCTION (c
) = 1;
12772 if ((kind
& GOMP_MAP_TOFROM
) != GOMP_MAP_TOFROM
12773 && kind
!= GOMP_MAP_FORCE_PRESENT
12774 && kind
!= GOMP_MAP_POINTER
)
12776 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
12777 "incompatible data clause with reduction "
12778 "on %qE; promoting to %<present_or_copy%>",
12780 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
12784 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
12785 && (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
))
12790 if (!DECL_P (decl
))
12792 if ((ctx
->region_type
& ORT_TARGET
) != 0
12793 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
12795 if (TREE_CODE (decl
) == INDIRECT_REF
12796 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
12797 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
12798 == REFERENCE_TYPE
))
12799 decl
= TREE_OPERAND (decl
, 0);
12800 if (TREE_CODE (decl
) == COMPONENT_REF
)
12802 while (TREE_CODE (decl
) == COMPONENT_REF
)
12803 decl
= TREE_OPERAND (decl
, 0);
12806 n
= splay_tree_lookup (ctx
->variables
,
12807 (splay_tree_key
) decl
);
12808 if (!(n
->value
& GOVD_SEEN
))
12815 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12816 if ((ctx
->region_type
& ORT_TARGET
) != 0
12817 && !(n
->value
& GOVD_SEEN
)
12818 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) == 0
12819 && (!is_global_var (decl
)
12820 || !lookup_attribute ("omp declare target link",
12821 DECL_ATTRIBUTES (decl
))))
12824 /* For struct element mapping, if struct is never referenced
12825 in target block and none of the mapping has always modifier,
12826 remove all the struct element mappings, which immediately
12827 follow the GOMP_MAP_STRUCT map clause. */
12828 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
12830 HOST_WIDE_INT cnt
= tree_to_shwi (OMP_CLAUSE_SIZE (c
));
12832 OMP_CLAUSE_CHAIN (c
)
12833 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c
));
12836 else if (DECL_SIZE (decl
)
12837 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
12838 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_POINTER
12839 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
12840 && (OMP_CLAUSE_MAP_KIND (c
)
12841 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
12843 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
12844 for these, TREE_CODE (DECL_SIZE (decl)) will always be
12846 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DEVICEPTR
);
12848 tree decl2
= DECL_VALUE_EXPR (decl
);
12849 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
12850 decl2
= TREE_OPERAND (decl2
, 0);
12851 gcc_assert (DECL_P (decl2
));
12852 tree mem
= build_simple_mem_ref (decl2
);
12853 OMP_CLAUSE_DECL (c
) = mem
;
12854 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
12855 if (ctx
->outer_context
)
12857 omp_notice_variable (ctx
->outer_context
, decl2
, true);
12858 omp_notice_variable (ctx
->outer_context
,
12859 OMP_CLAUSE_SIZE (c
), true);
12861 if (((ctx
->region_type
& ORT_TARGET
) != 0
12862 || !ctx
->target_firstprivatize_array_bases
)
12863 && ((n
->value
& GOVD_SEEN
) == 0
12864 || (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
)) == 0))
12866 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12868 OMP_CLAUSE_DECL (nc
) = decl
;
12869 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
12870 if (ctx
->target_firstprivatize_array_bases
)
12871 OMP_CLAUSE_SET_MAP_KIND (nc
,
12872 GOMP_MAP_FIRSTPRIVATE_POINTER
);
12874 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
12875 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (c
);
12876 OMP_CLAUSE_CHAIN (c
) = nc
;
12882 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
12883 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
12884 gcc_assert ((n
->value
& GOVD_SEEN
) == 0
12885 || ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
12890 case OMP_CLAUSE_TO
:
12891 case OMP_CLAUSE_FROM
:
12892 case OMP_CLAUSE__CACHE_
:
12893 decl
= OMP_CLAUSE_DECL (c
);
12894 if (!DECL_P (decl
))
12896 if (DECL_SIZE (decl
)
12897 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
12899 tree decl2
= DECL_VALUE_EXPR (decl
);
12900 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
12901 decl2
= TREE_OPERAND (decl2
, 0);
12902 gcc_assert (DECL_P (decl2
));
12903 tree mem
= build_simple_mem_ref (decl2
);
12904 OMP_CLAUSE_DECL (c
) = mem
;
12905 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
12906 if (ctx
->outer_context
)
12908 omp_notice_variable (ctx
->outer_context
, decl2
, true);
12909 omp_notice_variable (ctx
->outer_context
,
12910 OMP_CLAUSE_SIZE (c
), true);
12913 else if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
12914 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
12917 case OMP_CLAUSE_REDUCTION
:
12918 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
12920 decl
= OMP_CLAUSE_DECL (c
);
12921 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12922 if ((n
->value
& GOVD_REDUCTION_INSCAN
) == 0)
12925 error_at (OMP_CLAUSE_LOCATION (c
),
12926 "%qD specified in %<inscan%> %<reduction%> clause "
12927 "but not in %<scan%> directive clause", decl
);
12930 has_inscan_reductions
= true;
12933 case OMP_CLAUSE_IN_REDUCTION
:
12934 case OMP_CLAUSE_TASK_REDUCTION
:
12935 decl
= OMP_CLAUSE_DECL (c
);
12936 /* OpenACC reductions need a present_or_copy data clause.
12937 Add one if necessary. Emit error when the reduction is private. */
12938 if (ctx
->region_type
== ORT_ACC_PARALLEL
12939 || ctx
->region_type
== ORT_ACC_SERIAL
)
12941 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12942 if (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
12945 error_at (OMP_CLAUSE_LOCATION (c
), "invalid private "
12946 "reduction on %qE", DECL_NAME (decl
));
12948 else if ((n
->value
& GOVD_MAP
) == 0)
12950 tree next
= OMP_CLAUSE_CHAIN (c
);
12951 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_MAP
);
12952 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_TOFROM
);
12953 OMP_CLAUSE_DECL (nc
) = decl
;
12954 OMP_CLAUSE_CHAIN (c
) = nc
;
12955 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
12960 OMP_CLAUSE_MAP_IN_REDUCTION (nc
) = 1;
12961 if (OMP_CLAUSE_CHAIN (nc
) == NULL
)
12963 nc
= OMP_CLAUSE_CHAIN (nc
);
12965 OMP_CLAUSE_CHAIN (nc
) = next
;
12966 n
->value
|= GOVD_MAP
;
12970 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12971 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
12974 case OMP_CLAUSE_ALLOCATE
:
12975 decl
= OMP_CLAUSE_DECL (c
);
12976 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12977 if (n
!= NULL
&& !(n
->value
& GOVD_SEEN
))
12979 if ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
| GOVD_LINEAR
))
12981 && (n
->value
& (GOVD_REDUCTION
| GOVD_LASTPRIVATE
)) == 0)
12985 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
12986 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)) != INTEGER_CST
12987 && ((ctx
->region_type
& (ORT_PARALLEL
| ORT_TARGET
)) != 0
12988 || (ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASK
12989 || (ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
))
12991 tree allocator
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
12992 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) allocator
);
12995 enum omp_clause_default_kind default_kind
12996 = ctx
->default_kind
;
12997 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
12998 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
13000 ctx
->default_kind
= default_kind
;
13003 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
13008 case OMP_CLAUSE_COPYIN
:
13009 case OMP_CLAUSE_COPYPRIVATE
:
13010 case OMP_CLAUSE_IF
:
13011 case OMP_CLAUSE_NUM_THREADS
:
13012 case OMP_CLAUSE_NUM_TEAMS
:
13013 case OMP_CLAUSE_THREAD_LIMIT
:
13014 case OMP_CLAUSE_DIST_SCHEDULE
:
13015 case OMP_CLAUSE_DEVICE
:
13016 case OMP_CLAUSE_SCHEDULE
:
13017 case OMP_CLAUSE_NOWAIT
:
13018 case OMP_CLAUSE_ORDERED
:
13019 case OMP_CLAUSE_DEFAULT
:
13020 case OMP_CLAUSE_UNTIED
:
13021 case OMP_CLAUSE_COLLAPSE
:
13022 case OMP_CLAUSE_FINAL
:
13023 case OMP_CLAUSE_MERGEABLE
:
13024 case OMP_CLAUSE_PROC_BIND
:
13025 case OMP_CLAUSE_SAFELEN
:
13026 case OMP_CLAUSE_SIMDLEN
:
13027 case OMP_CLAUSE_DEPEND
:
13028 case OMP_CLAUSE_DOACROSS
:
13029 case OMP_CLAUSE_PRIORITY
:
13030 case OMP_CLAUSE_GRAINSIZE
:
13031 case OMP_CLAUSE_NUM_TASKS
:
13032 case OMP_CLAUSE_NOGROUP
:
13033 case OMP_CLAUSE_THREADS
:
13034 case OMP_CLAUSE_SIMD
:
13035 case OMP_CLAUSE_FILTER
:
13036 case OMP_CLAUSE_HINT
:
13037 case OMP_CLAUSE_DEFAULTMAP
:
13038 case OMP_CLAUSE_ORDER
:
13039 case OMP_CLAUSE_BIND
:
13040 case OMP_CLAUSE_DETACH
:
13041 case OMP_CLAUSE_USE_DEVICE_PTR
:
13042 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13043 case OMP_CLAUSE_ASYNC
:
13044 case OMP_CLAUSE_WAIT
:
13045 case OMP_CLAUSE_INDEPENDENT
:
13046 case OMP_CLAUSE_NUM_GANGS
:
13047 case OMP_CLAUSE_NUM_WORKERS
:
13048 case OMP_CLAUSE_VECTOR_LENGTH
:
13049 case OMP_CLAUSE_GANG
:
13050 case OMP_CLAUSE_WORKER
:
13051 case OMP_CLAUSE_VECTOR
:
13052 case OMP_CLAUSE_AUTO
:
13053 case OMP_CLAUSE_SEQ
:
13054 case OMP_CLAUSE_TILE
:
13055 case OMP_CLAUSE_IF_PRESENT
:
13056 case OMP_CLAUSE_FINALIZE
:
13057 case OMP_CLAUSE_INCLUSIVE
:
13058 case OMP_CLAUSE_EXCLUSIVE
:
13061 case OMP_CLAUSE_NOHOST
:
13063 gcc_unreachable ();
13067 *list_p
= OMP_CLAUSE_CHAIN (c
);
13068 else if (move_attach
)
13070 /* Remove attach node from here, separate out into its own list. */
13072 *list_p
= OMP_CLAUSE_CHAIN (c
);
13073 OMP_CLAUSE_CHAIN (c
) = NULL_TREE
;
13074 attach_tail
= &OMP_CLAUSE_CHAIN (c
);
13077 list_p
= &OMP_CLAUSE_CHAIN (c
);
13080 /* Splice attach nodes at the end of the list. */
13083 *list_p
= attach_list
;
13084 list_p
= attach_tail
;
13087 /* Add in any implicit data sharing. */
13088 struct gimplify_adjust_omp_clauses_data data
;
13089 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0)
13091 /* OpenMP. Implicit clauses are added at the start of the clause list,
13092 but after any non-map clauses. */
13093 tree
*implicit_add_list_p
= orig_list_p
;
13094 while (*implicit_add_list_p
13095 && OMP_CLAUSE_CODE (*implicit_add_list_p
) != OMP_CLAUSE_MAP
)
13096 implicit_add_list_p
= &OMP_CLAUSE_CHAIN (*implicit_add_list_p
);
13097 data
.list_p
= implicit_add_list_p
;
13101 data
.list_p
= list_p
;
13102 data
.pre_p
= pre_p
;
13103 splay_tree_foreach (ctx
->variables
, gimplify_adjust_omp_clauses_1
, &data
);
13105 if (has_inscan_reductions
)
13106 for (c
= *orig_list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13107 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
13108 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
13110 error_at (OMP_CLAUSE_LOCATION (c
),
13111 "%<inscan%> %<reduction%> clause used together with "
13112 "%<linear%> clause for a variable other than loop "
13117 gimplify_omp_ctxp
= ctx
->outer_context
;
13118 delete_omp_context (ctx
);
13121 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
13122 -1 if unknown yet (simd is involved, won't be known until vectorization)
13123 and 1 if they do. If SCORES is non-NULL, it should point to an array
13124 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
13125 of the CONSTRUCTS (position -1 if it will never match) followed by
13126 number of constructs in the OpenMP context construct trait. If the
13127 score depends on whether it will be in a declare simd clone or not,
13128 the function returns 2 and there will be two sets of the scores, the first
13129 one for the case that it is not in a declare simd clone, the other
13130 that it is in a declare simd clone. */
13133 omp_construct_selector_matches (enum tree_code
*constructs
, int nconstructs
,
13136 int matched
= 0, cnt
= 0;
13137 bool simd_seen
= false;
13138 bool target_seen
= false;
13139 int declare_simd_cnt
= -1;
13140 auto_vec
<enum tree_code
, 16> codes
;
13141 for (struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
; ctx
;)
13143 if (((ctx
->region_type
& ORT_PARALLEL
) && ctx
->code
== OMP_PARALLEL
)
13144 || ((ctx
->region_type
& (ORT_TARGET
| ORT_IMPLICIT_TARGET
| ORT_ACC
))
13145 == ORT_TARGET
&& ctx
->code
== OMP_TARGET
)
13146 || ((ctx
->region_type
& ORT_TEAMS
) && ctx
->code
== OMP_TEAMS
)
13147 || (ctx
->region_type
== ORT_WORKSHARE
&& ctx
->code
== OMP_FOR
)
13148 || (ctx
->region_type
== ORT_SIMD
13149 && ctx
->code
== OMP_SIMD
13150 && !omp_find_clause (ctx
->clauses
, OMP_CLAUSE_BIND
)))
13154 codes
.safe_push (ctx
->code
);
13155 else if (matched
< nconstructs
&& ctx
->code
== constructs
[matched
])
13157 if (ctx
->code
== OMP_SIMD
)
13165 if (ctx
->code
== OMP_TARGET
)
13167 if (scores
== NULL
)
13168 return matched
< nconstructs
? 0 : simd_seen
? -1 : 1;
13169 target_seen
= true;
13173 else if (ctx
->region_type
== ORT_WORKSHARE
13174 && ctx
->code
== OMP_LOOP
13175 && ctx
->outer_context
13176 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
13177 && ctx
->outer_context
->outer_context
13178 && ctx
->outer_context
->outer_context
->code
== OMP_LOOP
13179 && ctx
->outer_context
->outer_context
->distribute
)
13180 ctx
= ctx
->outer_context
->outer_context
;
13181 ctx
= ctx
->outer_context
;
13184 && lookup_attribute ("omp declare simd",
13185 DECL_ATTRIBUTES (current_function_decl
)))
13187 /* Declare simd is a maybe case, it is supposed to be added only to the
13188 omp-simd-clone.cc added clones and not to the base function. */
13189 declare_simd_cnt
= cnt
++;
13191 codes
.safe_push (OMP_SIMD
);
13193 && constructs
[0] == OMP_SIMD
)
13195 gcc_assert (matched
== 0);
13197 if (++matched
== nconstructs
)
13201 if (tree attr
= lookup_attribute ("omp declare variant variant",
13202 DECL_ATTRIBUTES (current_function_decl
)))
13204 enum tree_code variant_constructs
[5];
13205 int variant_nconstructs
= 0;
13207 variant_nconstructs
13208 = omp_constructor_traits_to_codes (TREE_VALUE (attr
),
13209 variant_constructs
);
13210 for (int i
= 0; i
< variant_nconstructs
; i
++)
13214 codes
.safe_push (variant_constructs
[i
]);
13215 else if (matched
< nconstructs
13216 && variant_constructs
[i
] == constructs
[matched
])
13218 if (variant_constructs
[i
] == OMP_SIMD
)
13229 && lookup_attribute ("omp declare target block",
13230 DECL_ATTRIBUTES (current_function_decl
)))
13233 codes
.safe_push (OMP_TARGET
);
13234 else if (matched
< nconstructs
&& constructs
[matched
] == OMP_TARGET
)
13239 for (int pass
= 0; pass
< (declare_simd_cnt
== -1 ? 1 : 2); pass
++)
13241 int j
= codes
.length () - 1;
13242 for (int i
= nconstructs
- 1; i
>= 0; i
--)
13245 && (pass
!= 0 || declare_simd_cnt
!= j
)
13246 && constructs
[i
] != codes
[j
])
13248 if (pass
== 0 && declare_simd_cnt
!= -1 && j
> declare_simd_cnt
)
13253 *scores
++ = ((pass
== 0 && declare_simd_cnt
!= -1)
13254 ? codes
.length () - 1 : codes
.length ());
13256 return declare_simd_cnt
== -1 ? 1 : 2;
13258 if (matched
== nconstructs
)
13259 return simd_seen
? -1 : 1;
13263 /* Gimplify OACC_CACHE. */
13266 gimplify_oacc_cache (tree
*expr_p
, gimple_seq
*pre_p
)
13268 tree expr
= *expr_p
;
13270 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr
), pre_p
, ORT_ACC
,
13272 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OACC_CACHE_CLAUSES (expr
),
13275 /* TODO: Do something sensible with this information. */
13277 *expr_p
= NULL_TREE
;
13280 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
13281 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
13282 kind. The entry kind will replace the one in CLAUSE, while the exit
13283 kind will be used in a new omp_clause and returned to the caller. */
13286 gimplify_oacc_declare_1 (tree clause
)
13288 HOST_WIDE_INT kind
, new_op
;
13292 kind
= OMP_CLAUSE_MAP_KIND (clause
);
13296 case GOMP_MAP_ALLOC
:
13297 new_op
= GOMP_MAP_RELEASE
;
13301 case GOMP_MAP_FROM
:
13302 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
13303 new_op
= GOMP_MAP_FROM
;
13307 case GOMP_MAP_TOFROM
:
13308 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_TO
);
13309 new_op
= GOMP_MAP_FROM
;
13313 case GOMP_MAP_DEVICE_RESIDENT
:
13314 case GOMP_MAP_FORCE_DEVICEPTR
:
13315 case GOMP_MAP_FORCE_PRESENT
:
13316 case GOMP_MAP_LINK
:
13317 case GOMP_MAP_POINTER
:
13322 gcc_unreachable ();
13328 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
), OMP_CLAUSE_MAP
);
13329 OMP_CLAUSE_SET_MAP_KIND (c
, new_op
);
13330 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clause
);
13336 /* Gimplify OACC_DECLARE. */
13339 gimplify_oacc_declare (tree
*expr_p
, gimple_seq
*pre_p
)
13341 tree expr
= *expr_p
;
13343 tree clauses
, t
, decl
;
13345 clauses
= OACC_DECLARE_CLAUSES (expr
);
13347 gimplify_scan_omp_clauses (&clauses
, pre_p
, ORT_TARGET_DATA
, OACC_DECLARE
);
13348 gimplify_adjust_omp_clauses (pre_p
, NULL
, &clauses
, OACC_DECLARE
);
13350 for (t
= clauses
; t
; t
= OMP_CLAUSE_CHAIN (t
))
13352 decl
= OMP_CLAUSE_DECL (t
);
13354 if (TREE_CODE (decl
) == MEM_REF
)
13355 decl
= TREE_OPERAND (decl
, 0);
13357 if (VAR_P (decl
) && !is_oacc_declared (decl
))
13359 tree attr
= get_identifier ("oacc declare target");
13360 DECL_ATTRIBUTES (decl
) = tree_cons (attr
, NULL_TREE
,
13361 DECL_ATTRIBUTES (decl
));
13365 && !is_global_var (decl
)
13366 && DECL_CONTEXT (decl
) == current_function_decl
)
13368 tree c
= gimplify_oacc_declare_1 (t
);
13371 if (oacc_declare_returns
== NULL
)
13372 oacc_declare_returns
= new hash_map
<tree
, tree
>;
13374 oacc_declare_returns
->put (decl
, c
);
13378 if (gimplify_omp_ctxp
)
13379 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_SEEN
);
13382 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
13385 gimplify_seq_add_stmt (pre_p
, stmt
);
13387 *expr_p
= NULL_TREE
;
13390 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
13391 gimplification of the body, as well as scanning the body for used
13392 variables. We need to do this scan now, because variable-sized
13393 decls will be decomposed during gimplification. */
13396 gimplify_omp_parallel (tree
*expr_p
, gimple_seq
*pre_p
)
13398 tree expr
= *expr_p
;
13400 gimple_seq body
= NULL
;
13402 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr
), pre_p
,
13403 OMP_PARALLEL_COMBINED (expr
)
13404 ? ORT_COMBINED_PARALLEL
13405 : ORT_PARALLEL
, OMP_PARALLEL
);
13407 push_gimplify_context ();
13409 g
= gimplify_and_return_first (OMP_PARALLEL_BODY (expr
), &body
);
13410 if (gimple_code (g
) == GIMPLE_BIND
)
13411 pop_gimplify_context (g
);
13413 pop_gimplify_context (NULL
);
13415 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_PARALLEL_CLAUSES (expr
),
13418 g
= gimple_build_omp_parallel (body
,
13419 OMP_PARALLEL_CLAUSES (expr
),
13420 NULL_TREE
, NULL_TREE
);
13421 if (OMP_PARALLEL_COMBINED (expr
))
13422 gimple_omp_set_subcode (g
, GF_OMP_PARALLEL_COMBINED
);
13423 gimplify_seq_add_stmt (pre_p
, g
);
13424 *expr_p
= NULL_TREE
;
13427 /* Gimplify the contents of an OMP_TASK statement. This involves
13428 gimplification of the body, as well as scanning the body for used
13429 variables. We need to do this scan now, because variable-sized
13430 decls will be decomposed during gimplification. */
13433 gimplify_omp_task (tree
*expr_p
, gimple_seq
*pre_p
)
13435 tree expr
= *expr_p
;
13437 gimple_seq body
= NULL
;
13438 bool nowait
= false;
13439 bool has_depend
= false;
13441 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
13443 for (tree c
= OMP_TASK_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13444 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
13447 if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET
)
13449 error_at (OMP_CLAUSE_LOCATION (c
),
13450 "%<mutexinoutset%> kind in %<depend%> clause on a "
13451 "%<taskwait%> construct");
13455 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NOWAIT
)
13457 if (nowait
&& !has_depend
)
13459 error_at (EXPR_LOCATION (expr
),
13460 "%<taskwait%> construct with %<nowait%> clause but no "
13461 "%<depend%> clauses");
13462 *expr_p
= NULL_TREE
;
13467 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr
), pre_p
,
13468 omp_find_clause (OMP_TASK_CLAUSES (expr
),
13470 ? ORT_UNTIED_TASK
: ORT_TASK
, OMP_TASK
);
13472 if (OMP_TASK_BODY (expr
))
13474 push_gimplify_context ();
13476 g
= gimplify_and_return_first (OMP_TASK_BODY (expr
), &body
);
13477 if (gimple_code (g
) == GIMPLE_BIND
)
13478 pop_gimplify_context (g
);
13480 pop_gimplify_context (NULL
);
13483 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_TASK_CLAUSES (expr
),
13486 g
= gimple_build_omp_task (body
,
13487 OMP_TASK_CLAUSES (expr
),
13488 NULL_TREE
, NULL_TREE
,
13489 NULL_TREE
, NULL_TREE
, NULL_TREE
);
13490 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
13491 gimple_omp_task_set_taskwait_p (g
, true);
13492 gimplify_seq_add_stmt (pre_p
, g
);
13493 *expr_p
= NULL_TREE
;
13496 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
13497 force it into a temporary initialized in PRE_P and add firstprivate clause
13498 to ORIG_FOR_STMT. */
13501 gimplify_omp_taskloop_expr (tree type
, tree
*tp
, gimple_seq
*pre_p
,
13502 tree orig_for_stmt
)
13504 if (*tp
== NULL
|| is_gimple_constant (*tp
))
13507 *tp
= get_initialized_tmp_var (*tp
, pre_p
, NULL
, false);
13508 /* Reference to pointer conversion is considered useless,
13509 but is significant for firstprivate clause. Force it
13512 && TREE_CODE (type
) == POINTER_TYPE
13513 && TREE_CODE (TREE_TYPE (*tp
)) == REFERENCE_TYPE
)
13515 tree v
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
13516 tree m
= build2 (INIT_EXPR
, TREE_TYPE (v
), v
, *tp
);
13517 gimplify_and_add (m
, pre_p
);
13521 tree c
= build_omp_clause (input_location
, OMP_CLAUSE_FIRSTPRIVATE
);
13522 OMP_CLAUSE_DECL (c
) = *tp
;
13523 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
13524 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
13527 /* Helper function of gimplify_omp_for, find OMP_ORDERED with
13528 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
13531 find_standalone_omp_ordered (tree
*tp
, int *walk_subtrees
, void *)
13533 switch (TREE_CODE (*tp
))
13536 if (OMP_ORDERED_BODY (*tp
) == NULL_TREE
)
13542 *walk_subtrees
= 0;
13550 /* Gimplify the gross structure of an OMP_FOR statement. */
13552 static enum gimplify_status
13553 gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
13555 tree for_stmt
, orig_for_stmt
, inner_for_stmt
= NULL_TREE
, decl
, var
, t
;
13556 enum gimplify_status ret
= GS_ALL_DONE
;
13557 enum gimplify_status tret
;
13559 gimple_seq for_body
, for_pre_body
;
13561 bitmap has_decl_expr
= NULL
;
13562 enum omp_region_type ort
= ORT_WORKSHARE
;
13563 bool openacc
= TREE_CODE (*expr_p
) == OACC_LOOP
;
13565 orig_for_stmt
= for_stmt
= *expr_p
;
13567 bool loop_p
= (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_BIND
)
13569 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
13571 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
13572 gcc_assert (TREE_CODE (for_stmt
) != OACC_LOOP
);
13573 inner_for_stmt
= walk_tree (&OMP_FOR_BODY (for_stmt
),
13574 find_combined_omp_for
, data
, NULL
);
13575 if (inner_for_stmt
== NULL_TREE
)
13577 gcc_assert (seen_error ());
13578 *expr_p
= NULL_TREE
;
13581 if (data
[2] && OMP_FOR_PRE_BODY (*data
[2]))
13583 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data
[2]),
13584 &OMP_FOR_PRE_BODY (for_stmt
));
13585 OMP_FOR_PRE_BODY (*data
[2]) = NULL_TREE
;
13587 if (OMP_FOR_PRE_BODY (inner_for_stmt
))
13589 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt
),
13590 &OMP_FOR_PRE_BODY (for_stmt
));
13591 OMP_FOR_PRE_BODY (inner_for_stmt
) = NULL_TREE
;
13596 /* We have some statements or variable declarations in between
13597 the composite construct directives. Move them around the
13600 for (i
= 0; i
< 3; i
++)
13604 if (i
< 2 && data
[i
+ 1] == &OMP_BODY (t
))
13605 data
[i
+ 1] = data
[i
];
13606 *data
[i
] = OMP_BODY (t
);
13607 tree body
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
13608 NULL_TREE
, make_node (BLOCK
));
13609 OMP_BODY (t
) = body
;
13610 append_to_statement_list_force (inner_for_stmt
,
13611 &BIND_EXPR_BODY (body
));
13613 data
[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body
)));
13614 gcc_assert (*data
[3] == inner_for_stmt
);
13619 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
13621 && OMP_FOR_ORIG_DECLS (inner_for_stmt
)
13622 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
13624 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
13627 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
13628 /* Class iterators aren't allowed on OMP_SIMD, so the only
13629 case we need to solve is distribute parallel for. They are
13630 allowed on the loop construct, but that is already handled
13631 in gimplify_omp_loop. */
13632 gcc_assert (TREE_CODE (inner_for_stmt
) == OMP_FOR
13633 && TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
13635 tree orig_decl
= TREE_PURPOSE (orig
);
13636 tree last
= TREE_VALUE (orig
);
13638 for (pc
= &OMP_FOR_CLAUSES (inner_for_stmt
);
13639 *pc
; pc
= &OMP_CLAUSE_CHAIN (*pc
))
13640 if ((OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
13641 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LASTPRIVATE
)
13642 && OMP_CLAUSE_DECL (*pc
) == orig_decl
)
13644 if (*pc
== NULL_TREE
)
13647 for (spc
= &OMP_PARALLEL_CLAUSES (*data
[1]);
13648 *spc
; spc
= &OMP_CLAUSE_CHAIN (*spc
))
13649 if (OMP_CLAUSE_CODE (*spc
) == OMP_CLAUSE_PRIVATE
13650 && OMP_CLAUSE_DECL (*spc
) == orig_decl
)
13655 *spc
= OMP_CLAUSE_CHAIN (c
);
13656 OMP_CLAUSE_CHAIN (c
) = NULL_TREE
;
13660 if (*pc
== NULL_TREE
)
13662 else if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
)
13664 /* private clause will appear only on inner_for_stmt.
13665 Change it into firstprivate, and add private clause
13667 tree c
= copy_node (*pc
);
13668 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
13669 OMP_FOR_CLAUSES (for_stmt
) = c
;
13670 OMP_CLAUSE_CODE (*pc
) = OMP_CLAUSE_FIRSTPRIVATE
;
13671 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
13675 /* lastprivate clause will appear on both inner_for_stmt
13676 and for_stmt. Add firstprivate clause to
13678 tree c
= build_omp_clause (OMP_CLAUSE_LOCATION (*pc
),
13679 OMP_CLAUSE_FIRSTPRIVATE
);
13680 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (*pc
);
13681 OMP_CLAUSE_CHAIN (c
) = *pc
;
13683 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
13685 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
13686 OMP_CLAUSE_FIRSTPRIVATE
);
13687 OMP_CLAUSE_DECL (c
) = last
;
13688 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
13689 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
13690 c
= build_omp_clause (UNKNOWN_LOCATION
,
13691 *pc
? OMP_CLAUSE_SHARED
13692 : OMP_CLAUSE_FIRSTPRIVATE
);
13693 OMP_CLAUSE_DECL (c
) = orig_decl
;
13694 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
13695 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
13697 /* Similarly, take care of C++ range for temporaries, those should
13698 be firstprivate on OMP_PARALLEL if any. */
13700 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
13701 if (OMP_FOR_ORIG_DECLS (inner_for_stmt
)
13702 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
13704 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
13708 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
13709 tree v
= TREE_CHAIN (orig
);
13710 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
13711 OMP_CLAUSE_FIRSTPRIVATE
);
13712 /* First add firstprivate clause for the __for_end artificial
13714 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 1);
13715 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
13717 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
13718 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
13719 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
13720 if (TREE_VEC_ELT (v
, 0))
13722 /* And now the same for __for_range artificial decl if it
13724 c
= build_omp_clause (UNKNOWN_LOCATION
,
13725 OMP_CLAUSE_FIRSTPRIVATE
);
13726 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 0);
13727 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
13729 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
13730 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
13731 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
13736 switch (TREE_CODE (for_stmt
))
13739 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
))
13741 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
13742 OMP_CLAUSE_SCHEDULE
))
13743 error_at (EXPR_LOCATION (for_stmt
),
13744 "%qs clause may not appear on non-rectangular %qs",
13745 "schedule", lang_GNU_Fortran () ? "do" : "for");
13746 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
))
13747 error_at (EXPR_LOCATION (for_stmt
),
13748 "%qs clause may not appear on non-rectangular %qs",
13749 "ordered", lang_GNU_Fortran () ? "do" : "for");
13752 case OMP_DISTRIBUTE
:
13753 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
)
13754 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
13755 OMP_CLAUSE_DIST_SCHEDULE
))
13756 error_at (EXPR_LOCATION (for_stmt
),
13757 "%qs clause may not appear on non-rectangular %qs",
13758 "dist_schedule", "distribute");
13764 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
))
13766 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
13767 OMP_CLAUSE_GRAINSIZE
))
13768 error_at (EXPR_LOCATION (for_stmt
),
13769 "%qs clause may not appear on non-rectangular %qs",
13770 "grainsize", "taskloop");
13771 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
13772 OMP_CLAUSE_NUM_TASKS
))
13773 error_at (EXPR_LOCATION (for_stmt
),
13774 "%qs clause may not appear on non-rectangular %qs",
13775 "num_tasks", "taskloop");
13777 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_UNTIED
))
13778 ort
= ORT_UNTIED_TASKLOOP
;
13780 ort
= ORT_TASKLOOP
;
13786 gcc_unreachable ();
13789 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
13790 clause for the IV. */
13791 if (ort
== ORT_SIMD
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
13793 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), 0);
13794 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
13795 decl
= TREE_OPERAND (t
, 0);
13796 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13797 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
13798 && OMP_CLAUSE_DECL (c
) == decl
)
13800 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
13805 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
)
13806 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt
), pre_p
, ort
,
13807 loop_p
&& TREE_CODE (for_stmt
) != OMP_SIMD
13808 ? OMP_LOOP
: TREE_CODE (for_stmt
));
13810 if (TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
)
13811 gimplify_omp_ctxp
->distribute
= true;
13813 /* Handle OMP_FOR_INIT. */
13814 for_pre_body
= NULL
;
13815 if ((ort
== ORT_SIMD
13816 || (inner_for_stmt
&& TREE_CODE (inner_for_stmt
) == OMP_SIMD
))
13817 && OMP_FOR_PRE_BODY (for_stmt
))
13819 has_decl_expr
= BITMAP_ALLOC (NULL
);
13820 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == DECL_EXPR
13821 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt
)))
13824 t
= OMP_FOR_PRE_BODY (for_stmt
);
13825 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
13827 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == STATEMENT_LIST
)
13829 tree_stmt_iterator si
;
13830 for (si
= tsi_start (OMP_FOR_PRE_BODY (for_stmt
)); !tsi_end_p (si
);
13834 if (TREE_CODE (t
) == DECL_EXPR
13835 && TREE_CODE (DECL_EXPR_DECL (t
)) == VAR_DECL
)
13836 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
13840 if (OMP_FOR_PRE_BODY (for_stmt
))
13842 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
|| gimplify_omp_ctxp
)
13843 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
13846 struct gimplify_omp_ctx ctx
;
13847 memset (&ctx
, 0, sizeof (ctx
));
13848 ctx
.region_type
= ORT_NONE
;
13849 gimplify_omp_ctxp
= &ctx
;
13850 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
13851 gimplify_omp_ctxp
= NULL
;
13854 OMP_FOR_PRE_BODY (for_stmt
) = NULL_TREE
;
13856 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
13857 for_stmt
= inner_for_stmt
;
13859 /* For taskloop, need to gimplify the start, end and step before the
13860 taskloop, outside of the taskloop omp context. */
13861 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
13863 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
13865 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
13866 gimple_seq
*for_pre_p
= (gimple_seq_empty_p (for_pre_body
)
13867 ? pre_p
: &for_pre_body
);
13868 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
13869 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
13871 tree v
= TREE_OPERAND (t
, 1);
13872 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
13873 for_pre_p
, orig_for_stmt
);
13874 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
13875 for_pre_p
, orig_for_stmt
);
13878 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
13881 /* Handle OMP_FOR_COND. */
13882 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
13883 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
13885 tree v
= TREE_OPERAND (t
, 1);
13886 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
13887 for_pre_p
, orig_for_stmt
);
13888 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
13889 for_pre_p
, orig_for_stmt
);
13892 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
13895 /* Handle OMP_FOR_INCR. */
13896 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
13897 if (TREE_CODE (t
) == MODIFY_EXPR
)
13899 decl
= TREE_OPERAND (t
, 0);
13900 t
= TREE_OPERAND (t
, 1);
13901 tree
*tp
= &TREE_OPERAND (t
, 1);
13902 if (TREE_CODE (t
) == PLUS_EXPR
&& *tp
== decl
)
13903 tp
= &TREE_OPERAND (t
, 0);
13905 gimplify_omp_taskloop_expr (NULL_TREE
, tp
, for_pre_p
,
13910 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt
), pre_p
, ort
,
13914 if (orig_for_stmt
!= for_stmt
)
13915 gimplify_omp_ctxp
->combined_loop
= true;
13918 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
13919 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt
)));
13920 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
13921 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt
)));
13923 tree c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
);
13924 bool is_doacross
= false;
13925 if (c
&& walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt
),
13926 find_standalone_omp_ordered
, NULL
))
13928 OMP_CLAUSE_ORDERED_DOACROSS (c
) = 1;
13929 is_doacross
= true;
13930 int len
= TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
));
13931 gimplify_omp_ctxp
->loop_iter_var
.create (len
* 2);
13932 for (tree
*pc
= &OMP_FOR_CLAUSES (for_stmt
); *pc
; )
13933 if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LINEAR
)
13935 error_at (OMP_CLAUSE_LOCATION (*pc
),
13936 "%<linear%> clause may not be specified together "
13937 "with %<ordered%> clause if stand-alone %<ordered%> "
13938 "construct is nested in it");
13939 *pc
= OMP_CLAUSE_CHAIN (*pc
);
13942 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13944 int collapse
= 1, tile
= 0;
13945 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_COLLAPSE
);
13947 collapse
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c
));
13948 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_TILE
);
13950 tile
= list_length (OMP_CLAUSE_TILE_LIST (c
));
13951 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ALLOCATE
);
13952 hash_set
<tree
> *allocate_uids
= NULL
;
13955 allocate_uids
= new hash_set
<tree
>;
13956 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
13957 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
)
13958 allocate_uids
->add (OMP_CLAUSE_DECL (c
));
13960 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
13962 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
13963 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
13964 decl
= TREE_OPERAND (t
, 0);
13965 gcc_assert (DECL_P (decl
));
13966 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl
))
13967 || POINTER_TYPE_P (TREE_TYPE (decl
)));
13970 if (TREE_CODE (for_stmt
) == OMP_FOR
&& OMP_FOR_ORIG_DECLS (for_stmt
))
13972 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
13973 if (TREE_CODE (orig_decl
) == TREE_LIST
)
13975 orig_decl
= TREE_PURPOSE (orig_decl
);
13979 gimplify_omp_ctxp
->loop_iter_var
.quick_push (orig_decl
);
13982 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
13983 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
13986 if (for_stmt
== orig_for_stmt
)
13988 tree orig_decl
= decl
;
13989 if (OMP_FOR_ORIG_DECLS (for_stmt
))
13991 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
13992 if (TREE_CODE (orig_decl
) == TREE_LIST
)
13994 orig_decl
= TREE_PURPOSE (orig_decl
);
13999 if (is_global_var (orig_decl
) && DECL_THREAD_LOCAL_P (orig_decl
))
14000 error_at (EXPR_LOCATION (for_stmt
),
14001 "threadprivate iteration variable %qD", orig_decl
);
14004 /* Make sure the iteration variable is private. */
14005 tree c
= NULL_TREE
;
14006 tree c2
= NULL_TREE
;
14007 if (orig_for_stmt
!= for_stmt
)
14009 /* Preserve this information until we gimplify the inner simd. */
14011 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
14012 TREE_PRIVATE (t
) = 1;
14014 else if (ort
== ORT_SIMD
)
14016 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
14017 (splay_tree_key
) decl
);
14018 omp_is_private (gimplify_omp_ctxp
, decl
,
14019 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
14021 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
14023 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
14024 if (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
)
14025 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
14026 OMP_CLAUSE_LASTPRIVATE
);
14027 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
14028 OMP_CLAUSE_LASTPRIVATE
))
14029 if (OMP_CLAUSE_DECL (c3
) == decl
)
14031 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
14032 "conditional %<lastprivate%> on loop "
14033 "iterator %qD ignored", decl
);
14034 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
14035 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
14038 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1 && !loop_p
)
14040 c
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
14041 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
14042 unsigned int flags
= GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
;
14044 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
14045 || TREE_PRIVATE (t
))
14047 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
14048 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
14050 struct gimplify_omp_ctx
*outer
14051 = gimplify_omp_ctxp
->outer_context
;
14052 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
14054 if (outer
->region_type
== ORT_WORKSHARE
14055 && outer
->combined_loop
)
14057 n
= splay_tree_lookup (outer
->variables
,
14058 (splay_tree_key
)decl
);
14059 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
14061 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
14062 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
14066 struct gimplify_omp_ctx
*octx
= outer
->outer_context
;
14068 && octx
->region_type
== ORT_COMBINED_PARALLEL
14069 && octx
->outer_context
14070 && (octx
->outer_context
->region_type
14072 && octx
->outer_context
->combined_loop
)
14074 octx
= octx
->outer_context
;
14075 n
= splay_tree_lookup (octx
->variables
,
14076 (splay_tree_key
)decl
);
14077 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
14079 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
14080 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
14087 OMP_CLAUSE_DECL (c
) = decl
;
14088 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
14089 OMP_FOR_CLAUSES (for_stmt
) = c
;
14090 omp_add_variable (gimplify_omp_ctxp
, decl
, flags
);
14091 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
14092 omp_lastprivate_for_combined_outer_constructs (outer
, decl
,
14099 || !bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)));
14100 if (TREE_PRIVATE (t
))
14101 lastprivate
= false;
14102 if (loop_p
&& OMP_FOR_ORIG_DECLS (for_stmt
))
14104 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
14105 if (TREE_CODE (elt
) == TREE_LIST
&& TREE_PURPOSE (elt
))
14106 lastprivate
= false;
14109 struct gimplify_omp_ctx
*outer
14110 = gimplify_omp_ctxp
->outer_context
;
14111 if (outer
&& lastprivate
)
14112 omp_lastprivate_for_combined_outer_constructs (outer
, decl
,
14115 c
= build_omp_clause (input_location
,
14116 lastprivate
? OMP_CLAUSE_LASTPRIVATE
14117 : OMP_CLAUSE_PRIVATE
);
14118 OMP_CLAUSE_DECL (c
) = decl
;
14119 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
14120 OMP_FOR_CLAUSES (for_stmt
) = c
;
14121 omp_add_variable (gimplify_omp_ctxp
, decl
,
14122 (lastprivate
? GOVD_LASTPRIVATE
: GOVD_PRIVATE
)
14123 | GOVD_EXPLICIT
| GOVD_SEEN
);
14127 else if (omp_is_private (gimplify_omp_ctxp
, decl
, 0))
14129 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
14130 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
14131 (splay_tree_key
) decl
);
14132 if (n
&& (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
))
14133 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
14134 OMP_CLAUSE_LASTPRIVATE
);
14135 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
14136 OMP_CLAUSE_LASTPRIVATE
))
14137 if (OMP_CLAUSE_DECL (c3
) == decl
)
14139 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
14140 "conditional %<lastprivate%> on loop "
14141 "iterator %qD ignored", decl
);
14142 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
14143 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
14147 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_PRIVATE
| GOVD_SEEN
);
14149 /* If DECL is not a gimple register, create a temporary variable to act
14150 as an iteration counter. This is valid, since DECL cannot be
14151 modified in the body of the loop. Similarly for any iteration vars
14152 in simd with collapse > 1 where the iterator vars must be
14153 lastprivate. And similarly for vars mentioned in allocate clauses. */
14154 if (orig_for_stmt
!= for_stmt
)
14156 else if (!is_gimple_reg (decl
)
14157 || (ort
== ORT_SIMD
14158 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) > 1)
14159 || (allocate_uids
&& allocate_uids
->contains (decl
)))
14161 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
14162 /* Make sure omp_add_variable is not called on it prematurely.
14163 We call it ourselves a few lines later. */
14164 gimplify_omp_ctxp
= NULL
;
14165 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
14166 gimplify_omp_ctxp
= ctx
;
14167 TREE_OPERAND (t
, 0) = var
;
14169 gimplify_seq_add_stmt (&for_body
, gimple_build_assign (decl
, var
));
14171 if (ort
== ORT_SIMD
14172 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
14174 c2
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
14175 OMP_CLAUSE_LINEAR_NO_COPYIN (c2
) = 1;
14176 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2
) = 1;
14177 OMP_CLAUSE_DECL (c2
) = var
;
14178 OMP_CLAUSE_CHAIN (c2
) = OMP_FOR_CLAUSES (for_stmt
);
14179 OMP_FOR_CLAUSES (for_stmt
) = c2
;
14180 omp_add_variable (gimplify_omp_ctxp
, var
,
14181 GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
);
14182 if (c
== NULL_TREE
)
14189 omp_add_variable (gimplify_omp_ctxp
, var
,
14190 GOVD_PRIVATE
| GOVD_SEEN
);
14195 gimplify_omp_ctxp
->in_for_exprs
= true;
14196 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
14198 tree lb
= TREE_OPERAND (t
, 1);
14199 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 1), &for_pre_body
, NULL
,
14200 is_gimple_val
, fb_rvalue
, false);
14201 ret
= MIN (ret
, tret
);
14202 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 2), &for_pre_body
, NULL
,
14203 is_gimple_val
, fb_rvalue
, false);
14206 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
14207 is_gimple_val
, fb_rvalue
, false);
14208 gimplify_omp_ctxp
->in_for_exprs
= false;
14209 ret
= MIN (ret
, tret
);
14210 if (ret
== GS_ERROR
)
14213 /* Handle OMP_FOR_COND. */
14214 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
14215 gcc_assert (COMPARISON_CLASS_P (t
));
14216 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
14218 gimplify_omp_ctxp
->in_for_exprs
= true;
14219 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
14221 tree ub
= TREE_OPERAND (t
, 1);
14222 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 1), &for_pre_body
, NULL
,
14223 is_gimple_val
, fb_rvalue
, false);
14224 ret
= MIN (ret
, tret
);
14225 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 2), &for_pre_body
, NULL
,
14226 is_gimple_val
, fb_rvalue
, false);
14229 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
14230 is_gimple_val
, fb_rvalue
, false);
14231 gimplify_omp_ctxp
->in_for_exprs
= false;
14232 ret
= MIN (ret
, tret
);
14234 /* Handle OMP_FOR_INCR. */
14235 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
14236 switch (TREE_CODE (t
))
14238 case PREINCREMENT_EXPR
:
14239 case POSTINCREMENT_EXPR
:
14241 tree decl
= TREE_OPERAND (t
, 0);
14242 /* c_omp_for_incr_canonicalize_ptr() should have been
14243 called to massage things appropriately. */
14244 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
14246 if (orig_for_stmt
!= for_stmt
)
14248 t
= build_int_cst (TREE_TYPE (decl
), 1);
14250 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
14251 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
14252 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
14253 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
14257 case PREDECREMENT_EXPR
:
14258 case POSTDECREMENT_EXPR
:
14259 /* c_omp_for_incr_canonicalize_ptr() should have been
14260 called to massage things appropriately. */
14261 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
14262 if (orig_for_stmt
!= for_stmt
)
14264 t
= build_int_cst (TREE_TYPE (decl
), -1);
14266 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
14267 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
14268 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
14269 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
14273 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
14274 TREE_OPERAND (t
, 0) = var
;
14276 t
= TREE_OPERAND (t
, 1);
14277 switch (TREE_CODE (t
))
14280 if (TREE_OPERAND (t
, 1) == decl
)
14282 TREE_OPERAND (t
, 1) = TREE_OPERAND (t
, 0);
14283 TREE_OPERAND (t
, 0) = var
;
14289 case POINTER_PLUS_EXPR
:
14290 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
14291 TREE_OPERAND (t
, 0) = var
;
14294 gcc_unreachable ();
14297 gimplify_omp_ctxp
->in_for_exprs
= true;
14298 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
14299 is_gimple_val
, fb_rvalue
, false);
14300 ret
= MIN (ret
, tret
);
14303 tree step
= TREE_OPERAND (t
, 1);
14304 tree stept
= TREE_TYPE (decl
);
14305 if (POINTER_TYPE_P (stept
))
14307 step
= fold_convert (stept
, step
);
14308 if (TREE_CODE (t
) == MINUS_EXPR
)
14309 step
= fold_build1 (NEGATE_EXPR
, stept
, step
);
14310 OMP_CLAUSE_LINEAR_STEP (c
) = step
;
14311 if (step
!= TREE_OPERAND (t
, 1))
14313 tret
= gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
),
14314 &for_pre_body
, NULL
,
14315 is_gimple_val
, fb_rvalue
, false);
14316 ret
= MIN (ret
, tret
);
14319 gimplify_omp_ctxp
->in_for_exprs
= false;
14323 gcc_unreachable ();
14329 OMP_CLAUSE_LINEAR_STEP (c2
) = OMP_CLAUSE_LINEAR_STEP (c
);
14332 if ((var
!= decl
|| collapse
> 1 || tile
) && orig_for_stmt
== for_stmt
)
14334 for (c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
14335 if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
14336 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) == NULL
)
14337 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
14338 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)
14339 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) == NULL
))
14340 && OMP_CLAUSE_DECL (c
) == decl
)
14342 if (is_doacross
&& (collapse
== 1 || i
>= collapse
))
14346 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
14347 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
14348 gcc_assert (TREE_OPERAND (t
, 0) == var
);
14349 t
= TREE_OPERAND (t
, 1);
14350 gcc_assert (TREE_CODE (t
) == PLUS_EXPR
14351 || TREE_CODE (t
) == MINUS_EXPR
14352 || TREE_CODE (t
) == POINTER_PLUS_EXPR
);
14353 gcc_assert (TREE_OPERAND (t
, 0) == var
);
14354 t
= build2 (TREE_CODE (t
), TREE_TYPE (decl
),
14355 is_doacross
? var
: decl
,
14356 TREE_OPERAND (t
, 1));
14359 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
14360 seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
);
14362 seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
);
14363 push_gimplify_context ();
14364 gimplify_assign (decl
, t
, seq
);
14365 gimple
*bind
= NULL
;
14366 if (gimplify_ctxp
->temps
)
14368 bind
= gimple_build_bind (NULL_TREE
, *seq
, NULL_TREE
);
14370 gimplify_seq_add_stmt (seq
, bind
);
14372 pop_gimplify_context (bind
);
14375 if (OMP_FOR_NON_RECTANGULAR (for_stmt
) && var
!= decl
)
14376 for (int j
= i
+ 1; j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
14378 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
14379 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
14380 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
14381 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
14382 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
14383 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
14384 gcc_assert (COMPARISON_CLASS_P (t
));
14385 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
14386 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
14387 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
14391 BITMAP_FREE (has_decl_expr
);
14392 delete allocate_uids
;
14394 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
14395 || (loop_p
&& orig_for_stmt
== for_stmt
))
14397 push_gimplify_context ();
14398 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt
)) != BIND_EXPR
)
14400 OMP_FOR_BODY (orig_for_stmt
)
14401 = build3 (BIND_EXPR
, void_type_node
, NULL
,
14402 OMP_FOR_BODY (orig_for_stmt
), NULL
);
14403 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt
)) = 1;
14407 gimple
*g
= gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt
),
14410 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
14411 || (loop_p
&& orig_for_stmt
== for_stmt
))
14413 if (gimple_code (g
) == GIMPLE_BIND
)
14414 pop_gimplify_context (g
);
14416 pop_gimplify_context (NULL
);
14419 if (orig_for_stmt
!= for_stmt
)
14420 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
14422 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
14423 decl
= TREE_OPERAND (t
, 0);
14424 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
14425 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
14426 gimplify_omp_ctxp
= ctx
->outer_context
;
14427 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
14428 gimplify_omp_ctxp
= ctx
;
14429 omp_add_variable (gimplify_omp_ctxp
, var
, GOVD_PRIVATE
| GOVD_SEEN
);
14430 TREE_OPERAND (t
, 0) = var
;
14431 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
14432 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
14433 TREE_OPERAND (TREE_OPERAND (t
, 1), 0) = var
;
14434 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
14435 for (int j
= i
+ 1;
14436 j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
14438 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
14439 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
14440 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
14441 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
14443 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
14444 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
14446 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
14447 gcc_assert (COMPARISON_CLASS_P (t
));
14448 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
14449 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
14451 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
14452 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
14457 gimplify_adjust_omp_clauses (pre_p
, for_body
,
14458 &OMP_FOR_CLAUSES (orig_for_stmt
),
14459 TREE_CODE (orig_for_stmt
));
14462 switch (TREE_CODE (orig_for_stmt
))
14464 case OMP_FOR
: kind
= GF_OMP_FOR_KIND_FOR
; break;
14465 case OMP_SIMD
: kind
= GF_OMP_FOR_KIND_SIMD
; break;
14466 case OMP_DISTRIBUTE
: kind
= GF_OMP_FOR_KIND_DISTRIBUTE
; break;
14467 case OMP_TASKLOOP
: kind
= GF_OMP_FOR_KIND_TASKLOOP
; break;
14468 case OACC_LOOP
: kind
= GF_OMP_FOR_KIND_OACC_LOOP
; break;
14470 gcc_unreachable ();
14472 if (loop_p
&& kind
== GF_OMP_FOR_KIND_SIMD
)
14474 gimplify_seq_add_seq (pre_p
, for_pre_body
);
14475 for_pre_body
= NULL
;
14477 gfor
= gimple_build_omp_for (for_body
, kind
, OMP_FOR_CLAUSES (orig_for_stmt
),
14478 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)),
14480 if (orig_for_stmt
!= for_stmt
)
14481 gimple_omp_for_set_combined_p (gfor
, true);
14482 if (gimplify_omp_ctxp
14483 && (gimplify_omp_ctxp
->combined_loop
14484 || (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
14485 && gimplify_omp_ctxp
->outer_context
14486 && gimplify_omp_ctxp
->outer_context
->combined_loop
)))
14488 gimple_omp_for_set_combined_into_p (gfor
, true);
14489 if (gimplify_omp_ctxp
->combined_loop
)
14490 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_SIMD
);
14492 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_FOR
);
14495 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
14497 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
14498 gimple_omp_for_set_index (gfor
, i
, TREE_OPERAND (t
, 0));
14499 gimple_omp_for_set_initial (gfor
, i
, TREE_OPERAND (t
, 1));
14500 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
14501 gimple_omp_for_set_cond (gfor
, i
, TREE_CODE (t
));
14502 gimple_omp_for_set_final (gfor
, i
, TREE_OPERAND (t
, 1));
14503 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
14504 gimple_omp_for_set_incr (gfor
, i
, TREE_OPERAND (t
, 1));
14507 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
14508 constructs with GIMPLE_OMP_TASK sandwiched in between them.
14509 The outer taskloop stands for computing the number of iterations,
14510 counts for collapsed loops and holding taskloop specific clauses.
14511 The task construct stands for the effect of data sharing on the
14512 explicit task it creates and the inner taskloop stands for expansion
14513 of the static loop inside of the explicit task construct. */
14514 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
14516 tree
*gfor_clauses_ptr
= gimple_omp_for_clauses_ptr (gfor
);
14517 tree task_clauses
= NULL_TREE
;
14518 tree c
= *gfor_clauses_ptr
;
14519 tree
*gtask_clauses_ptr
= &task_clauses
;
14520 tree outer_for_clauses
= NULL_TREE
;
14521 tree
*gforo_clauses_ptr
= &outer_for_clauses
;
14522 bitmap lastprivate_uids
= NULL
;
14523 if (omp_find_clause (c
, OMP_CLAUSE_ALLOCATE
))
14525 c
= omp_find_clause (c
, OMP_CLAUSE_LASTPRIVATE
);
14528 lastprivate_uids
= BITMAP_ALLOC (NULL
);
14529 for (; c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
14530 OMP_CLAUSE_LASTPRIVATE
))
14531 bitmap_set_bit (lastprivate_uids
,
14532 DECL_UID (OMP_CLAUSE_DECL (c
)));
14534 c
= *gfor_clauses_ptr
;
14536 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
14537 switch (OMP_CLAUSE_CODE (c
))
14539 /* These clauses are allowed on task, move them there. */
14540 case OMP_CLAUSE_SHARED
:
14541 case OMP_CLAUSE_FIRSTPRIVATE
:
14542 case OMP_CLAUSE_DEFAULT
:
14543 case OMP_CLAUSE_IF
:
14544 case OMP_CLAUSE_UNTIED
:
14545 case OMP_CLAUSE_FINAL
:
14546 case OMP_CLAUSE_MERGEABLE
:
14547 case OMP_CLAUSE_PRIORITY
:
14548 case OMP_CLAUSE_REDUCTION
:
14549 case OMP_CLAUSE_IN_REDUCTION
:
14550 *gtask_clauses_ptr
= c
;
14551 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14553 case OMP_CLAUSE_PRIVATE
:
14554 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c
))
14556 /* We want private on outer for and firstprivate
14559 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
14560 OMP_CLAUSE_FIRSTPRIVATE
);
14561 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
14562 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
14564 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
14565 *gforo_clauses_ptr
= c
;
14566 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14570 *gtask_clauses_ptr
= c
;
14571 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14574 /* These clauses go into outer taskloop clauses. */
14575 case OMP_CLAUSE_GRAINSIZE
:
14576 case OMP_CLAUSE_NUM_TASKS
:
14577 case OMP_CLAUSE_NOGROUP
:
14578 *gforo_clauses_ptr
= c
;
14579 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14581 /* Collapse clause we duplicate on both taskloops. */
14582 case OMP_CLAUSE_COLLAPSE
:
14583 *gfor_clauses_ptr
= c
;
14584 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14585 *gforo_clauses_ptr
= copy_node (c
);
14586 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
14588 /* For lastprivate, keep the clause on inner taskloop, and add
14589 a shared clause on task. If the same decl is also firstprivate,
14590 add also firstprivate clause on the inner taskloop. */
14591 case OMP_CLAUSE_LASTPRIVATE
:
14592 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
14594 /* For taskloop C++ lastprivate IVs, we want:
14595 1) private on outer taskloop
14596 2) firstprivate and shared on task
14597 3) lastprivate on inner taskloop */
14599 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
14600 OMP_CLAUSE_FIRSTPRIVATE
);
14601 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
14602 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
14604 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
14605 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
) = 1;
14606 *gforo_clauses_ptr
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
14607 OMP_CLAUSE_PRIVATE
);
14608 OMP_CLAUSE_DECL (*gforo_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
14609 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr
) = 1;
14610 TREE_TYPE (*gforo_clauses_ptr
) = TREE_TYPE (c
);
14611 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
14613 *gfor_clauses_ptr
= c
;
14614 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14616 = build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_SHARED
);
14617 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
14618 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
14619 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr
) = 1;
14621 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
14623 /* Allocate clause we duplicate on task and inner taskloop
14624 if the decl is lastprivate, otherwise just put on task. */
14625 case OMP_CLAUSE_ALLOCATE
:
14626 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
14627 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
14629 /* Additionally, put firstprivate clause on task
14630 for the allocator if it is not constant. */
14632 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
14633 OMP_CLAUSE_FIRSTPRIVATE
);
14634 OMP_CLAUSE_DECL (*gtask_clauses_ptr
)
14635 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
14636 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
14638 if (lastprivate_uids
14639 && bitmap_bit_p (lastprivate_uids
,
14640 DECL_UID (OMP_CLAUSE_DECL (c
))))
14642 *gfor_clauses_ptr
= c
;
14643 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14644 *gtask_clauses_ptr
= copy_node (c
);
14645 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
14649 *gtask_clauses_ptr
= c
;
14650 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14654 gcc_unreachable ();
14656 *gfor_clauses_ptr
= NULL_TREE
;
14657 *gtask_clauses_ptr
= NULL_TREE
;
14658 *gforo_clauses_ptr
= NULL_TREE
;
14659 BITMAP_FREE (lastprivate_uids
);
14660 gimple_set_location (gfor
, input_location
);
14661 g
= gimple_build_bind (NULL_TREE
, gfor
, NULL_TREE
);
14662 g
= gimple_build_omp_task (g
, task_clauses
, NULL_TREE
, NULL_TREE
,
14663 NULL_TREE
, NULL_TREE
, NULL_TREE
);
14664 gimple_set_location (g
, input_location
);
14665 gimple_omp_task_set_taskloop_p (g
, true);
14666 g
= gimple_build_bind (NULL_TREE
, g
, NULL_TREE
);
14668 = gimple_build_omp_for (g
, GF_OMP_FOR_KIND_TASKLOOP
, outer_for_clauses
,
14669 gimple_omp_for_collapse (gfor
),
14670 gimple_omp_for_pre_body (gfor
));
14671 gimple_omp_for_set_pre_body (gfor
, NULL
);
14672 gimple_omp_for_set_combined_p (gforo
, true);
14673 gimple_omp_for_set_combined_into_p (gfor
, true);
14674 for (i
= 0; i
< (int) gimple_omp_for_collapse (gfor
); i
++)
14676 tree type
= TREE_TYPE (gimple_omp_for_index (gfor
, i
));
14677 tree v
= create_tmp_var (type
);
14678 gimple_omp_for_set_index (gforo
, i
, v
);
14679 t
= unshare_expr (gimple_omp_for_initial (gfor
, i
));
14680 gimple_omp_for_set_initial (gforo
, i
, t
);
14681 gimple_omp_for_set_cond (gforo
, i
,
14682 gimple_omp_for_cond (gfor
, i
));
14683 t
= unshare_expr (gimple_omp_for_final (gfor
, i
));
14684 gimple_omp_for_set_final (gforo
, i
, t
);
14685 t
= unshare_expr (gimple_omp_for_incr (gfor
, i
));
14686 gcc_assert (TREE_OPERAND (t
, 0) == gimple_omp_for_index (gfor
, i
));
14687 TREE_OPERAND (t
, 0) = v
;
14688 gimple_omp_for_set_incr (gforo
, i
, t
);
14689 t
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
14690 OMP_CLAUSE_DECL (t
) = v
;
14691 OMP_CLAUSE_CHAIN (t
) = gimple_omp_for_clauses (gforo
);
14692 gimple_omp_for_set_clauses (gforo
, t
);
14693 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
14695 tree
*p1
= NULL
, *p2
= NULL
;
14696 t
= gimple_omp_for_initial (gforo
, i
);
14697 if (TREE_CODE (t
) == TREE_VEC
)
14698 p1
= &TREE_VEC_ELT (t
, 0);
14699 t
= gimple_omp_for_final (gforo
, i
);
14700 if (TREE_CODE (t
) == TREE_VEC
)
14703 p2
= &TREE_VEC_ELT (t
, 0);
14705 p1
= &TREE_VEC_ELT (t
, 0);
14710 for (j
= 0; j
< i
; j
++)
14711 if (*p1
== gimple_omp_for_index (gfor
, j
))
14713 *p1
= gimple_omp_for_index (gforo
, j
);
14718 gcc_assert (j
< i
);
14722 gimplify_seq_add_stmt (pre_p
, gforo
);
14725 gimplify_seq_add_stmt (pre_p
, gfor
);
14727 if (TREE_CODE (orig_for_stmt
) == OMP_FOR
)
14729 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
14730 unsigned lastprivate_conditional
= 0;
14732 && (ctx
->region_type
== ORT_TARGET_DATA
14733 || ctx
->region_type
== ORT_TASKGROUP
))
14734 ctx
= ctx
->outer_context
;
14735 if (ctx
&& (ctx
->region_type
& ORT_PARALLEL
) != 0)
14736 for (tree c
= gimple_omp_for_clauses (gfor
);
14737 c
; c
= OMP_CLAUSE_CHAIN (c
))
14738 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
14739 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
14740 ++lastprivate_conditional
;
14741 if (lastprivate_conditional
)
14743 struct omp_for_data fd
;
14744 omp_extract_for_data (gfor
, &fd
, NULL
);
14745 tree type
= build_array_type_nelts (unsigned_type_for (fd
.iter_type
),
14746 lastprivate_conditional
);
14747 tree var
= create_tmp_var_raw (type
);
14748 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
14749 OMP_CLAUSE_DECL (c
) = var
;
14750 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
14751 gimple_omp_for_set_clauses (gfor
, c
);
14752 omp_add_variable (ctx
, var
, GOVD_CONDTEMP
| GOVD_SEEN
);
14755 else if (TREE_CODE (orig_for_stmt
) == OMP_SIMD
)
14757 unsigned lastprivate_conditional
= 0;
14758 for (tree c
= gimple_omp_for_clauses (gfor
); c
; c
= OMP_CLAUSE_CHAIN (c
))
14759 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
14760 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
14761 ++lastprivate_conditional
;
14762 if (lastprivate_conditional
)
14764 struct omp_for_data fd
;
14765 omp_extract_for_data (gfor
, &fd
, NULL
);
14766 tree type
= unsigned_type_for (fd
.iter_type
);
14767 while (lastprivate_conditional
--)
14769 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
14770 OMP_CLAUSE__CONDTEMP_
);
14771 OMP_CLAUSE_DECL (c
) = create_tmp_var (type
);
14772 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
14773 gimple_omp_for_set_clauses (gfor
, c
);
14778 if (ret
!= GS_ALL_DONE
)
14780 *expr_p
= NULL_TREE
;
14781 return GS_ALL_DONE
;
14784 /* Helper for gimplify_omp_loop, called through walk_tree. */
14787 note_no_context_vars (tree
*tp
, int *, void *data
)
14790 && DECL_CONTEXT (*tp
) == NULL_TREE
14791 && !is_global_var (*tp
))
14793 vec
<tree
> *d
= (vec
<tree
> *) data
;
14794 d
->safe_push (*tp
);
14795 DECL_CONTEXT (*tp
) = current_function_decl
;
14800 /* Gimplify the gross structure of an OMP_LOOP statement. */
14802 static enum gimplify_status
14803 gimplify_omp_loop (tree
*expr_p
, gimple_seq
*pre_p
)
14805 tree for_stmt
= *expr_p
;
14806 tree clauses
= OMP_FOR_CLAUSES (for_stmt
);
14807 struct gimplify_omp_ctx
*octx
= gimplify_omp_ctxp
;
14808 enum omp_clause_bind_kind kind
= OMP_CLAUSE_BIND_THREAD
;
14811 /* If order is not present, the behavior is as if order(concurrent)
14813 tree order
= omp_find_clause (clauses
, OMP_CLAUSE_ORDER
);
14814 if (order
== NULL_TREE
)
14816 order
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_ORDER
);
14817 OMP_CLAUSE_CHAIN (order
) = clauses
;
14818 OMP_FOR_CLAUSES (for_stmt
) = clauses
= order
;
14821 tree bind
= omp_find_clause (clauses
, OMP_CLAUSE_BIND
);
14822 if (bind
== NULL_TREE
)
14824 if (!flag_openmp
) /* flag_openmp_simd */
14826 else if (octx
&& (octx
->region_type
& ORT_TEAMS
) != 0)
14827 kind
= OMP_CLAUSE_BIND_TEAMS
;
14828 else if (octx
&& (octx
->region_type
& ORT_PARALLEL
) != 0)
14829 kind
= OMP_CLAUSE_BIND_PARALLEL
;
14832 for (; octx
; octx
= octx
->outer_context
)
14834 if ((octx
->region_type
& ORT_ACC
) != 0
14835 || octx
->region_type
== ORT_NONE
14836 || octx
->region_type
== ORT_IMPLICIT_TARGET
)
14840 if (octx
== NULL
&& !in_omp_construct
)
14841 error_at (EXPR_LOCATION (for_stmt
),
14842 "%<bind%> clause not specified on a %<loop%> "
14843 "construct not nested inside another OpenMP construct");
14845 bind
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_BIND
);
14846 OMP_CLAUSE_CHAIN (bind
) = clauses
;
14847 OMP_CLAUSE_BIND_KIND (bind
) = kind
;
14848 OMP_FOR_CLAUSES (for_stmt
) = bind
;
14851 switch (OMP_CLAUSE_BIND_KIND (bind
))
14853 case OMP_CLAUSE_BIND_THREAD
:
14855 case OMP_CLAUSE_BIND_PARALLEL
:
14856 if (!flag_openmp
) /* flag_openmp_simd */
14858 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
14861 for (; octx
; octx
= octx
->outer_context
)
14862 if (octx
->region_type
== ORT_SIMD
14863 && omp_find_clause (octx
->clauses
, OMP_CLAUSE_BIND
) == NULL_TREE
)
14865 error_at (EXPR_LOCATION (for_stmt
),
14866 "%<bind(parallel)%> on a %<loop%> construct nested "
14867 "inside %<simd%> construct");
14868 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
14871 kind
= OMP_CLAUSE_BIND_PARALLEL
;
14873 case OMP_CLAUSE_BIND_TEAMS
:
14874 if (!flag_openmp
) /* flag_openmp_simd */
14876 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
14880 && octx
->region_type
!= ORT_IMPLICIT_TARGET
14881 && octx
->region_type
!= ORT_NONE
14882 && (octx
->region_type
& ORT_TEAMS
) == 0)
14883 || in_omp_construct
)
14885 error_at (EXPR_LOCATION (for_stmt
),
14886 "%<bind(teams)%> on a %<loop%> region not strictly "
14887 "nested inside of a %<teams%> region");
14888 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
14891 kind
= OMP_CLAUSE_BIND_TEAMS
;
14894 gcc_unreachable ();
14897 for (tree
*pc
= &OMP_FOR_CLAUSES (for_stmt
); *pc
; )
14898 switch (OMP_CLAUSE_CODE (*pc
))
14900 case OMP_CLAUSE_REDUCTION
:
14901 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc
))
14903 error_at (OMP_CLAUSE_LOCATION (*pc
),
14904 "%<inscan%> %<reduction%> clause on "
14905 "%qs construct", "loop");
14906 OMP_CLAUSE_REDUCTION_INSCAN (*pc
) = 0;
14908 if (OMP_CLAUSE_REDUCTION_TASK (*pc
))
14910 error_at (OMP_CLAUSE_LOCATION (*pc
),
14911 "invalid %<task%> reduction modifier on construct "
14912 "other than %<parallel%>, %qs or %<sections%>",
14913 lang_GNU_Fortran () ? "do" : "for");
14914 OMP_CLAUSE_REDUCTION_TASK (*pc
) = 0;
14916 pc
= &OMP_CLAUSE_CHAIN (*pc
);
14918 case OMP_CLAUSE_LASTPRIVATE
:
14919 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
14921 tree t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
14922 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
14923 if (OMP_CLAUSE_DECL (*pc
) == TREE_OPERAND (t
, 0))
14925 if (OMP_FOR_ORIG_DECLS (for_stmt
)
14926 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
14928 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
14931 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
14932 if (OMP_CLAUSE_DECL (*pc
) == TREE_PURPOSE (orig
))
14936 if (i
== TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)))
14938 error_at (OMP_CLAUSE_LOCATION (*pc
),
14939 "%<lastprivate%> clause on a %<loop%> construct refers "
14940 "to a variable %qD which is not the loop iterator",
14941 OMP_CLAUSE_DECL (*pc
));
14942 *pc
= OMP_CLAUSE_CHAIN (*pc
);
14945 pc
= &OMP_CLAUSE_CHAIN (*pc
);
14948 pc
= &OMP_CLAUSE_CHAIN (*pc
);
14952 TREE_SET_CODE (for_stmt
, OMP_SIMD
);
14957 case OMP_CLAUSE_BIND_THREAD
: last
= 0; break;
14958 case OMP_CLAUSE_BIND_PARALLEL
: last
= 1; break;
14959 case OMP_CLAUSE_BIND_TEAMS
: last
= 2; break;
14961 for (int pass
= 1; pass
<= last
; pass
++)
14965 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
,
14966 make_node (BLOCK
));
14967 append_to_statement_list (*expr_p
, &BIND_EXPR_BODY (bind
));
14968 *expr_p
= make_node (OMP_PARALLEL
);
14969 TREE_TYPE (*expr_p
) = void_type_node
;
14970 OMP_PARALLEL_BODY (*expr_p
) = bind
;
14971 OMP_PARALLEL_COMBINED (*expr_p
) = 1;
14972 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (for_stmt
));
14973 tree
*pc
= &OMP_PARALLEL_CLAUSES (*expr_p
);
14974 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
14975 if (OMP_FOR_ORIG_DECLS (for_stmt
)
14976 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
))
14979 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
14980 if (TREE_PURPOSE (elt
) && TREE_VALUE (elt
))
14982 *pc
= build_omp_clause (UNKNOWN_LOCATION
,
14983 OMP_CLAUSE_FIRSTPRIVATE
);
14984 OMP_CLAUSE_DECL (*pc
) = TREE_VALUE (elt
);
14985 pc
= &OMP_CLAUSE_CHAIN (*pc
);
14989 tree t
= make_node (pass
== 2 ? OMP_DISTRIBUTE
: OMP_FOR
);
14990 tree
*pc
= &OMP_FOR_CLAUSES (t
);
14991 TREE_TYPE (t
) = void_type_node
;
14992 OMP_FOR_BODY (t
) = *expr_p
;
14993 SET_EXPR_LOCATION (t
, EXPR_LOCATION (for_stmt
));
14994 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
14995 switch (OMP_CLAUSE_CODE (c
))
14997 case OMP_CLAUSE_BIND
:
14998 case OMP_CLAUSE_ORDER
:
14999 case OMP_CLAUSE_COLLAPSE
:
15000 *pc
= copy_node (c
);
15001 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15003 case OMP_CLAUSE_PRIVATE
:
15004 case OMP_CLAUSE_FIRSTPRIVATE
:
15005 /* Only needed on innermost. */
15007 case OMP_CLAUSE_LASTPRIVATE
:
15008 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
) && pass
!= last
)
15010 *pc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
15011 OMP_CLAUSE_FIRSTPRIVATE
);
15012 OMP_CLAUSE_DECL (*pc
) = OMP_CLAUSE_DECL (c
);
15013 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
15014 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15016 *pc
= copy_node (c
);
15017 OMP_CLAUSE_LASTPRIVATE_STMT (*pc
) = NULL_TREE
;
15018 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
15019 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
15022 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc
) = 1;
15024 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
15025 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc
) = 0;
15027 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15029 case OMP_CLAUSE_REDUCTION
:
15030 *pc
= copy_node (c
);
15031 OMP_CLAUSE_DECL (*pc
) = unshare_expr (OMP_CLAUSE_DECL (c
));
15032 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
15033 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
))
15035 auto_vec
<tree
> no_context_vars
;
15036 int walk_subtrees
= 0;
15037 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
15038 &walk_subtrees
, &no_context_vars
);
15039 if (tree p
= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
))
15040 note_no_context_vars (&p
, &walk_subtrees
, &no_context_vars
);
15041 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c
),
15042 note_no_context_vars
,
15044 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c
),
15045 note_no_context_vars
,
15048 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
)
15049 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
));
15050 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
15051 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
)
15052 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
));
15054 hash_map
<tree
, tree
> decl_map
;
15055 decl_map
.put (OMP_CLAUSE_DECL (c
), OMP_CLAUSE_DECL (c
));
15056 decl_map
.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
15057 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
));
15058 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
15059 decl_map
.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
15060 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
));
15063 memset (&id
, 0, sizeof (id
));
15064 id
.src_fn
= current_function_decl
;
15065 id
.dst_fn
= current_function_decl
;
15066 id
.src_cfun
= cfun
;
15067 id
.decl_map
= &decl_map
;
15068 id
.copy_decl
= copy_decl_no_change
;
15069 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
15070 id
.transform_new_cfg
= true;
15071 id
.transform_return_to_modify
= false;
15073 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc
), copy_tree_body_r
,
15075 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc
), copy_tree_body_r
,
15078 for (tree d
: no_context_vars
)
15080 DECL_CONTEXT (d
) = NULL_TREE
;
15081 DECL_CONTEXT (*decl_map
.get (d
)) = NULL_TREE
;
15086 OMP_CLAUSE_REDUCTION_INIT (*pc
)
15087 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c
));
15088 OMP_CLAUSE_REDUCTION_MERGE (*pc
)
15089 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c
));
15091 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15094 gcc_unreachable ();
15099 return gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_stmt
, fb_none
);
15103 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
15104 of OMP_TARGET's body. */
15107 find_omp_teams (tree
*tp
, int *walk_subtrees
, void *)
15109 *walk_subtrees
= 0;
15110 switch (TREE_CODE (*tp
))
15115 case STATEMENT_LIST
:
15116 *walk_subtrees
= 1;
15124 /* Helper function of optimize_target_teams, determine if the expression
15125 can be computed safely before the target construct on the host. */
15128 computable_teams_clause (tree
*tp
, int *walk_subtrees
, void *)
15134 *walk_subtrees
= 0;
15137 switch (TREE_CODE (*tp
))
15142 *walk_subtrees
= 0;
15143 if (error_operand_p (*tp
)
15144 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
15145 || DECL_HAS_VALUE_EXPR_P (*tp
)
15146 || DECL_THREAD_LOCAL_P (*tp
)
15147 || TREE_SIDE_EFFECTS (*tp
)
15148 || TREE_THIS_VOLATILE (*tp
))
15150 if (is_global_var (*tp
)
15151 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp
))
15152 || lookup_attribute ("omp declare target link",
15153 DECL_ATTRIBUTES (*tp
))))
15156 && !DECL_SEEN_IN_BIND_EXPR_P (*tp
)
15157 && !is_global_var (*tp
)
15158 && decl_function_context (*tp
) == current_function_decl
)
15160 n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
15161 (splay_tree_key
) *tp
);
15164 if (gimplify_omp_ctxp
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
15168 else if (n
->value
& GOVD_LOCAL
)
15170 else if (n
->value
& GOVD_FIRSTPRIVATE
)
15172 else if ((n
->value
& (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
15173 == (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
15177 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
15181 if (TARGET_EXPR_INITIAL (*tp
)
15182 || TREE_CODE (TARGET_EXPR_SLOT (*tp
)) != VAR_DECL
)
15184 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp
),
15185 walk_subtrees
, NULL
);
15186 /* Allow some reasonable subset of integral arithmetics. */
15190 case TRUNC_DIV_EXPR
:
15191 case CEIL_DIV_EXPR
:
15192 case FLOOR_DIV_EXPR
:
15193 case ROUND_DIV_EXPR
:
15194 case TRUNC_MOD_EXPR
:
15195 case CEIL_MOD_EXPR
:
15196 case FLOOR_MOD_EXPR
:
15197 case ROUND_MOD_EXPR
:
15199 case EXACT_DIV_EXPR
:
15210 case NON_LVALUE_EXPR
:
15212 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
15215 /* And disallow anything else, except for comparisons. */
15217 if (COMPARISON_CLASS_P (*tp
))
15223 /* Try to determine if the num_teams and/or thread_limit expressions
15224 can have their values determined already before entering the
15226 INTEGER_CSTs trivially are,
15227 integral decls that are firstprivate (explicitly or implicitly)
15228 or explicitly map(always, to:) or map(always, tofrom:) on the target
15229 region too, and expressions involving simple arithmetics on those
15230 too, function calls are not ok, dereferencing something neither etc.
15231 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
15232 EXPR based on what we find:
15233 0 stands for clause not specified at all, use implementation default
15234 -1 stands for value that can't be determined easily before entering
15235 the target construct.
15236 -2 means that no explicit teams construct was specified
15237 If teams construct is not present at all, use 1 for num_teams
15238 and 0 for thread_limit (only one team is involved, and the thread
15239 limit is implementation defined. */
15242 optimize_target_teams (tree target
, gimple_seq
*pre_p
)
15244 tree body
= OMP_BODY (target
);
15245 tree teams
= walk_tree (&body
, find_omp_teams
, NULL
, NULL
);
15246 tree num_teams_lower
= NULL_TREE
;
15247 tree num_teams_upper
= integer_zero_node
;
15248 tree thread_limit
= integer_zero_node
;
15249 location_t num_teams_loc
= EXPR_LOCATION (target
);
15250 location_t thread_limit_loc
= EXPR_LOCATION (target
);
15252 struct gimplify_omp_ctx
*target_ctx
= gimplify_omp_ctxp
;
15254 if (teams
== NULL_TREE
)
15255 num_teams_upper
= build_int_cst (integer_type_node
, -2);
15257 for (c
= OMP_TEAMS_CLAUSES (teams
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15259 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
)
15261 p
= &num_teams_upper
;
15262 num_teams_loc
= OMP_CLAUSE_LOCATION (c
);
15263 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
))
15265 expr
= OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
);
15266 if (TREE_CODE (expr
) == INTEGER_CST
)
15267 num_teams_lower
= expr
;
15268 else if (walk_tree (&expr
, computable_teams_clause
,
15270 num_teams_lower
= integer_minus_one_node
;
15273 num_teams_lower
= expr
;
15274 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
15275 if (gimplify_expr (&num_teams_lower
, pre_p
, NULL
,
15276 is_gimple_val
, fb_rvalue
, false)
15279 gimplify_omp_ctxp
= target_ctx
;
15280 num_teams_lower
= integer_minus_one_node
;
15284 gimplify_omp_ctxp
= target_ctx
;
15285 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
15286 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
15292 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREAD_LIMIT
)
15295 thread_limit_loc
= OMP_CLAUSE_LOCATION (c
);
15299 expr
= OMP_CLAUSE_OPERAND (c
, 0);
15300 if (TREE_CODE (expr
) == INTEGER_CST
)
15305 if (walk_tree (&expr
, computable_teams_clause
, NULL
, NULL
))
15307 *p
= integer_minus_one_node
;
15311 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
15312 if (gimplify_expr (p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
, false)
15315 gimplify_omp_ctxp
= target_ctx
;
15316 *p
= integer_minus_one_node
;
15319 gimplify_omp_ctxp
= target_ctx
;
15320 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
15321 OMP_CLAUSE_OPERAND (c
, 0) = *p
;
15323 if (!omp_find_clause (OMP_TARGET_CLAUSES (target
), OMP_CLAUSE_THREAD_LIMIT
))
15325 c
= build_omp_clause (thread_limit_loc
, OMP_CLAUSE_THREAD_LIMIT
);
15326 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
15327 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
15328 OMP_TARGET_CLAUSES (target
) = c
;
15330 c
= build_omp_clause (num_teams_loc
, OMP_CLAUSE_NUM_TEAMS
);
15331 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c
) = num_teams_upper
;
15332 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
) = num_teams_lower
;
15333 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
15334 OMP_TARGET_CLAUSES (target
) = c
;
15337 /* Gimplify the gross structure of several OMP constructs. */
15340 gimplify_omp_workshare (tree
*expr_p
, gimple_seq
*pre_p
)
15342 tree expr
= *expr_p
;
15344 gimple_seq body
= NULL
;
15345 enum omp_region_type ort
;
15347 switch (TREE_CODE (expr
))
15351 ort
= ORT_WORKSHARE
;
15354 ort
= ORT_TASKGROUP
;
15357 ort
= OMP_TARGET_COMBINED (expr
) ? ORT_COMBINED_TARGET
: ORT_TARGET
;
15360 ort
= ORT_ACC_KERNELS
;
15362 case OACC_PARALLEL
:
15363 ort
= ORT_ACC_PARALLEL
;
15366 ort
= ORT_ACC_SERIAL
;
15369 ort
= ORT_ACC_DATA
;
15371 case OMP_TARGET_DATA
:
15372 ort
= ORT_TARGET_DATA
;
15375 ort
= OMP_TEAMS_COMBINED (expr
) ? ORT_COMBINED_TEAMS
: ORT_TEAMS
;
15376 if (gimplify_omp_ctxp
== NULL
15377 || gimplify_omp_ctxp
->region_type
== ORT_IMPLICIT_TARGET
)
15378 ort
= (enum omp_region_type
) (ort
| ORT_HOST_TEAMS
);
15380 case OACC_HOST_DATA
:
15381 ort
= ORT_ACC_HOST_DATA
;
15384 gcc_unreachable ();
15387 bool save_in_omp_construct
= in_omp_construct
;
15388 if ((ort
& ORT_ACC
) == 0)
15389 in_omp_construct
= false;
15390 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr
), pre_p
, ort
,
15392 if (TREE_CODE (expr
) == OMP_TARGET
)
15393 optimize_target_teams (expr
, pre_p
);
15394 if ((ort
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
15395 || (ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
15397 push_gimplify_context ();
15398 gimple
*g
= gimplify_and_return_first (OMP_BODY (expr
), &body
);
15399 if (gimple_code (g
) == GIMPLE_BIND
)
15400 pop_gimplify_context (g
);
15402 pop_gimplify_context (NULL
);
15403 if ((ort
& ORT_TARGET_DATA
) != 0)
15405 enum built_in_function end_ix
;
15406 switch (TREE_CODE (expr
))
15409 case OACC_HOST_DATA
:
15410 end_ix
= BUILT_IN_GOACC_DATA_END
;
15412 case OMP_TARGET_DATA
:
15413 end_ix
= BUILT_IN_GOMP_TARGET_END_DATA
;
15416 gcc_unreachable ();
15418 tree fn
= builtin_decl_explicit (end_ix
);
15419 g
= gimple_build_call (fn
, 0);
15420 gimple_seq cleanup
= NULL
;
15421 gimple_seq_add_stmt (&cleanup
, g
);
15422 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
15424 gimple_seq_add_stmt (&body
, g
);
15428 gimplify_and_add (OMP_BODY (expr
), &body
);
15429 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_CLAUSES (expr
),
15431 in_omp_construct
= save_in_omp_construct
;
15433 switch (TREE_CODE (expr
))
15436 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_DATA
,
15437 OMP_CLAUSES (expr
));
15439 case OACC_HOST_DATA
:
15440 if (omp_find_clause (OMP_CLAUSES (expr
), OMP_CLAUSE_IF_PRESENT
))
15442 for (tree c
= OMP_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15443 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
15444 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
) = 1;
15447 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_HOST_DATA
,
15448 OMP_CLAUSES (expr
));
15451 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_KERNELS
,
15452 OMP_CLAUSES (expr
));
15454 case OACC_PARALLEL
:
15455 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_PARALLEL
,
15456 OMP_CLAUSES (expr
));
15459 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_SERIAL
,
15460 OMP_CLAUSES (expr
));
15463 stmt
= gimple_build_omp_sections (body
, OMP_CLAUSES (expr
));
15466 stmt
= gimple_build_omp_single (body
, OMP_CLAUSES (expr
));
15469 stmt
= gimple_build_omp_scope (body
, OMP_CLAUSES (expr
));
15472 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_REGION
,
15473 OMP_CLAUSES (expr
));
15475 case OMP_TARGET_DATA
:
15476 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
15477 to be evaluated before the use_device_{ptr,addr} clauses if they
15478 refer to the same variables. */
15480 tree use_device_clauses
;
15481 tree
*pc
, *uc
= &use_device_clauses
;
15482 for (pc
= &OMP_CLAUSES (expr
); *pc
; )
15483 if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_PTR
15484 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_ADDR
)
15487 *pc
= OMP_CLAUSE_CHAIN (*pc
);
15488 uc
= &OMP_CLAUSE_CHAIN (*uc
);
15491 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15493 *pc
= use_device_clauses
;
15494 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_DATA
,
15495 OMP_CLAUSES (expr
));
15499 stmt
= gimple_build_omp_teams (body
, OMP_CLAUSES (expr
));
15500 if ((ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
15501 gimple_omp_teams_set_host (as_a
<gomp_teams
*> (stmt
), true);
15504 gcc_unreachable ();
15507 gimplify_seq_add_stmt (pre_p
, stmt
);
15508 *expr_p
= NULL_TREE
;
15511 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
15512 target update constructs. */
15515 gimplify_omp_target_update (tree
*expr_p
, gimple_seq
*pre_p
)
15517 tree expr
= *expr_p
;
15520 enum omp_region_type ort
= ORT_WORKSHARE
;
15522 switch (TREE_CODE (expr
))
15524 case OACC_ENTER_DATA
:
15525 kind
= GF_OMP_TARGET_KIND_OACC_ENTER_DATA
;
15528 case OACC_EXIT_DATA
:
15529 kind
= GF_OMP_TARGET_KIND_OACC_EXIT_DATA
;
15533 kind
= GF_OMP_TARGET_KIND_OACC_UPDATE
;
15536 case OMP_TARGET_UPDATE
:
15537 kind
= GF_OMP_TARGET_KIND_UPDATE
;
15539 case OMP_TARGET_ENTER_DATA
:
15540 kind
= GF_OMP_TARGET_KIND_ENTER_DATA
;
15542 case OMP_TARGET_EXIT_DATA
:
15543 kind
= GF_OMP_TARGET_KIND_EXIT_DATA
;
15546 gcc_unreachable ();
15548 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr
), pre_p
,
15549 ort
, TREE_CODE (expr
));
15550 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OMP_STANDALONE_CLAUSES (expr
),
15552 if (TREE_CODE (expr
) == OACC_UPDATE
15553 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
15554 OMP_CLAUSE_IF_PRESENT
))
15556 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
15558 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15559 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
15560 switch (OMP_CLAUSE_MAP_KIND (c
))
15562 case GOMP_MAP_FORCE_TO
:
15563 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
15565 case GOMP_MAP_FORCE_FROM
:
15566 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FROM
);
15572 else if (TREE_CODE (expr
) == OACC_EXIT_DATA
15573 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
15574 OMP_CLAUSE_FINALIZE
))
15576 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
15578 bool have_clause
= false;
15579 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15580 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
15581 switch (OMP_CLAUSE_MAP_KIND (c
))
15583 case GOMP_MAP_FROM
:
15584 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_FROM
);
15585 have_clause
= true;
15587 case GOMP_MAP_RELEASE
:
15588 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_DELETE
);
15589 have_clause
= true;
15591 case GOMP_MAP_TO_PSET
:
15592 /* Fortran arrays with descriptors must map that descriptor when
15593 doing standalone "attach" operations (in OpenACC). In that
15594 case GOMP_MAP_TO_PSET appears by itself with no preceding
15595 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
15597 case GOMP_MAP_POINTER
:
15598 /* TODO PR92929: we may see these here, but they'll always follow
15599 one of the clauses above, and will be handled by libgomp as
15600 one group, so no handling required here. */
15601 gcc_assert (have_clause
);
15603 case GOMP_MAP_DETACH
:
15604 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_DETACH
);
15605 have_clause
= false;
15607 case GOMP_MAP_STRUCT
:
15608 have_clause
= false;
15611 gcc_unreachable ();
15614 stmt
= gimple_build_omp_target (NULL
, kind
, OMP_STANDALONE_CLAUSES (expr
));
15616 gimplify_seq_add_stmt (pre_p
, stmt
);
15617 *expr_p
= NULL_TREE
;
15620 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
15621 stabilized the lhs of the atomic operation as *ADDR. Return true if
15622 EXPR is this stabilized form. */
15625 goa_lhs_expr_p (tree expr
, tree addr
)
15627 /* Also include casts to other type variants. The C front end is fond
15628 of adding these for e.g. volatile variables. This is like
15629 STRIP_TYPE_NOPS but includes the main variant lookup. */
15630 STRIP_USELESS_TYPE_CONVERSION (expr
);
15632 if (TREE_CODE (expr
) == INDIRECT_REF
)
15634 expr
= TREE_OPERAND (expr
, 0);
15635 while (expr
!= addr
15636 && (CONVERT_EXPR_P (expr
)
15637 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
15638 && TREE_CODE (expr
) == TREE_CODE (addr
)
15639 && types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (addr
)))
15641 expr
= TREE_OPERAND (expr
, 0);
15642 addr
= TREE_OPERAND (addr
, 0);
15646 return (TREE_CODE (addr
) == ADDR_EXPR
15647 && TREE_CODE (expr
) == ADDR_EXPR
15648 && TREE_OPERAND (addr
, 0) == TREE_OPERAND (expr
, 0));
15650 if (TREE_CODE (addr
) == ADDR_EXPR
&& expr
== TREE_OPERAND (addr
, 0))
15655 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
15656 expression does not involve the lhs, evaluate it into a temporary.
15657 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
15658 or -1 if an error was encountered. */
15661 goa_stabilize_expr (tree
*expr_p
, gimple_seq
*pre_p
, tree lhs_addr
,
15662 tree lhs_var
, tree
&target_expr
, bool rhs
, int depth
)
15664 tree expr
= *expr_p
;
15667 if (goa_lhs_expr_p (expr
, lhs_addr
))
15673 if (is_gimple_val (expr
))
15676 /* Maximum depth of lhs in expression is for the
15677 __builtin_clear_padding (...), __builtin_clear_padding (...),
15678 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
15682 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
15685 case tcc_comparison
:
15686 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
, lhs_addr
,
15687 lhs_var
, target_expr
, true, depth
);
15690 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
, lhs_addr
,
15691 lhs_var
, target_expr
, true, depth
);
15693 case tcc_expression
:
15694 switch (TREE_CODE (expr
))
15696 case TRUTH_ANDIF_EXPR
:
15697 case TRUTH_ORIF_EXPR
:
15698 case TRUTH_AND_EXPR
:
15699 case TRUTH_OR_EXPR
:
15700 case TRUTH_XOR_EXPR
:
15701 case BIT_INSERT_EXPR
:
15702 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
15703 lhs_addr
, lhs_var
, target_expr
, true,
15706 case TRUTH_NOT_EXPR
:
15707 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
15708 lhs_addr
, lhs_var
, target_expr
, true,
15712 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
, lhs_var
,
15713 target_expr
, true, depth
))
15715 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
15716 lhs_addr
, lhs_var
, target_expr
, true,
15718 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
15719 lhs_addr
, lhs_var
, target_expr
, false,
15724 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
, lhs_var
,
15725 target_expr
, true, depth
))
15727 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
15728 lhs_addr
, lhs_var
, target_expr
, false,
15731 case COMPOUND_EXPR
:
15732 /* Break out any preevaluations from cp_build_modify_expr. */
15733 for (; TREE_CODE (expr
) == COMPOUND_EXPR
;
15734 expr
= TREE_OPERAND (expr
, 1))
15736 /* Special-case __builtin_clear_padding call before
15737 __builtin_memcmp. */
15738 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
)
15740 tree fndecl
= get_callee_fndecl (TREE_OPERAND (expr
, 0));
15742 && fndecl_built_in_p (fndecl
, BUILT_IN_CLEAR_PADDING
)
15743 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
15745 || goa_stabilize_expr (&TREE_OPERAND (expr
, 0), NULL
,
15747 target_expr
, true, depth
)))
15751 saw_lhs
= goa_stabilize_expr (&TREE_OPERAND (expr
, 0),
15752 pre_p
, lhs_addr
, lhs_var
,
15753 target_expr
, true, depth
);
15754 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1),
15755 pre_p
, lhs_addr
, lhs_var
,
15756 target_expr
, rhs
, depth
);
15762 gimplify_stmt (&TREE_OPERAND (expr
, 0), pre_p
);
15765 return goa_stabilize_expr (&expr
, pre_p
, lhs_addr
, lhs_var
,
15766 target_expr
, rhs
, depth
);
15768 return goa_stabilize_expr (expr_p
, pre_p
, lhs_addr
, lhs_var
,
15769 target_expr
, rhs
, depth
);
15771 if (!goa_stabilize_expr (&TREE_OPERAND (expr
, 0), NULL
, lhs_addr
,
15772 lhs_var
, target_expr
, true, depth
))
15774 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
15775 lhs_addr
, lhs_var
, target_expr
, true,
15777 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
15778 lhs_addr
, lhs_var
, target_expr
, true,
15780 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 2), pre_p
,
15781 lhs_addr
, lhs_var
, target_expr
, true,
15785 if (TARGET_EXPR_INITIAL (expr
))
15787 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
,
15788 lhs_var
, target_expr
, true,
15791 if (expr
== target_expr
)
15795 saw_lhs
= goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr
),
15796 pre_p
, lhs_addr
, lhs_var
,
15797 target_expr
, true, depth
);
15798 if (saw_lhs
&& target_expr
== NULL_TREE
&& pre_p
)
15799 target_expr
= expr
;
15807 case tcc_reference
:
15808 if (TREE_CODE (expr
) == BIT_FIELD_REF
15809 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
15810 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
15811 lhs_addr
, lhs_var
, target_expr
, true,
15815 if (TREE_CODE (expr
) == CALL_EXPR
)
15817 if (tree fndecl
= get_callee_fndecl (expr
))
15818 if (fndecl_built_in_p (fndecl
, BUILT_IN_CLEAR_PADDING
)
15819 || fndecl_built_in_p (fndecl
, BUILT_IN_MEMCMP
))
15821 int nargs
= call_expr_nargs (expr
);
15822 for (int i
= 0; i
< nargs
; i
++)
15823 saw_lhs
|= goa_stabilize_expr (&CALL_EXPR_ARG (expr
, i
),
15824 pre_p
, lhs_addr
, lhs_var
,
15825 target_expr
, true, depth
);
15834 if (saw_lhs
== 0 && pre_p
)
15836 enum gimplify_status gs
;
15837 if (TREE_CODE (expr
) == CALL_EXPR
&& VOID_TYPE_P (TREE_TYPE (expr
)))
15839 gimplify_stmt (&expr
, pre_p
);
15843 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
15845 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_lvalue
, fb_lvalue
);
15846 if (gs
!= GS_ALL_DONE
)
15853 /* Gimplify an OMP_ATOMIC statement. */
15855 static enum gimplify_status
15856 gimplify_omp_atomic (tree
*expr_p
, gimple_seq
*pre_p
)
15858 tree addr
= TREE_OPERAND (*expr_p
, 0);
15859 tree rhs
= TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
15860 ? NULL
: TREE_OPERAND (*expr_p
, 1);
15861 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr
)));
15863 gomp_atomic_load
*loadstmt
;
15864 gomp_atomic_store
*storestmt
;
15865 tree target_expr
= NULL_TREE
;
15867 tmp_load
= create_tmp_reg (type
);
15869 && goa_stabilize_expr (&rhs
, pre_p
, addr
, tmp_load
, target_expr
,
15873 if (gimplify_expr (&addr
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
15877 loadstmt
= gimple_build_omp_atomic_load (tmp_load
, addr
,
15878 OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
15879 gimplify_seq_add_stmt (pre_p
, loadstmt
);
15882 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
15883 representatives. Use BIT_FIELD_REF on the lhs instead. */
15885 if (TREE_CODE (rhs
) == COND_EXPR
)
15886 rhsarg
= TREE_OPERAND (rhs
, 1);
15887 if (TREE_CODE (rhsarg
) == BIT_INSERT_EXPR
15888 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load
)))
15890 tree bitpos
= TREE_OPERAND (rhsarg
, 2);
15891 tree op1
= TREE_OPERAND (rhsarg
, 1);
15893 tree tmp_store
= tmp_load
;
15894 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_CAPTURE_OLD
)
15895 tmp_store
= get_initialized_tmp_var (tmp_load
, pre_p
);
15896 if (INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
15897 bitsize
= bitsize_int (TYPE_PRECISION (TREE_TYPE (op1
)));
15899 bitsize
= TYPE_SIZE (TREE_TYPE (op1
));
15900 gcc_assert (TREE_OPERAND (rhsarg
, 0) == tmp_load
);
15901 tree t
= build2_loc (EXPR_LOCATION (rhsarg
),
15902 MODIFY_EXPR
, void_type_node
,
15903 build3_loc (EXPR_LOCATION (rhsarg
),
15904 BIT_FIELD_REF
, TREE_TYPE (op1
),
15905 tmp_store
, bitsize
, bitpos
), op1
);
15906 if (TREE_CODE (rhs
) == COND_EXPR
)
15907 t
= build3_loc (EXPR_LOCATION (rhs
), COND_EXPR
, void_type_node
,
15908 TREE_OPERAND (rhs
, 0), t
, void_node
);
15909 gimplify_and_add (t
, pre_p
);
15912 bool save_allow_rhs_cond_expr
= gimplify_ctxp
->allow_rhs_cond_expr
;
15913 if (TREE_CODE (rhs
) == COND_EXPR
)
15914 gimplify_ctxp
->allow_rhs_cond_expr
= true;
15915 enum gimplify_status gs
= gimplify_expr (&rhs
, pre_p
, NULL
,
15916 is_gimple_val
, fb_rvalue
);
15917 gimplify_ctxp
->allow_rhs_cond_expr
= save_allow_rhs_cond_expr
;
15918 if (gs
!= GS_ALL_DONE
)
15922 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
)
15925 = gimple_build_omp_atomic_store (rhs
, OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
15926 if (TREE_CODE (*expr_p
) != OMP_ATOMIC_READ
&& OMP_ATOMIC_WEAK (*expr_p
))
15928 gimple_omp_atomic_set_weak (loadstmt
);
15929 gimple_omp_atomic_set_weak (storestmt
);
15931 gimplify_seq_add_stmt (pre_p
, storestmt
);
15932 switch (TREE_CODE (*expr_p
))
15934 case OMP_ATOMIC_READ
:
15935 case OMP_ATOMIC_CAPTURE_OLD
:
15936 *expr_p
= tmp_load
;
15937 gimple_omp_atomic_set_need_value (loadstmt
);
15939 case OMP_ATOMIC_CAPTURE_NEW
:
15941 gimple_omp_atomic_set_need_value (storestmt
);
15948 return GS_ALL_DONE
;
15951 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
15952 body, and adding some EH bits. */
15954 static enum gimplify_status
15955 gimplify_transaction (tree
*expr_p
, gimple_seq
*pre_p
)
15957 tree expr
= *expr_p
, temp
, tbody
= TRANSACTION_EXPR_BODY (expr
);
15959 gtransaction
*trans_stmt
;
15960 gimple_seq body
= NULL
;
15963 /* Wrap the transaction body in a BIND_EXPR so we have a context
15964 where to put decls for OMP. */
15965 if (TREE_CODE (tbody
) != BIND_EXPR
)
15967 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, tbody
, NULL
);
15968 TREE_SIDE_EFFECTS (bind
) = 1;
15969 SET_EXPR_LOCATION (bind
, EXPR_LOCATION (tbody
));
15970 TRANSACTION_EXPR_BODY (expr
) = bind
;
15973 push_gimplify_context ();
15974 temp
= voidify_wrapper_expr (*expr_p
, NULL
);
15976 body_stmt
= gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr
), &body
);
15977 pop_gimplify_context (body_stmt
);
15979 trans_stmt
= gimple_build_transaction (body
);
15980 if (TRANSACTION_EXPR_OUTER (expr
))
15981 subcode
= GTMA_IS_OUTER
;
15982 else if (TRANSACTION_EXPR_RELAXED (expr
))
15983 subcode
= GTMA_IS_RELAXED
;
15984 gimple_transaction_set_subcode (trans_stmt
, subcode
);
15986 gimplify_seq_add_stmt (pre_p
, trans_stmt
);
15994 *expr_p
= NULL_TREE
;
15995 return GS_ALL_DONE
;
15998 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
15999 is the OMP_BODY of the original EXPR (which has already been
16000 gimplified so it's not present in the EXPR).
16002 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
16005 gimplify_omp_ordered (tree expr
, gimple_seq body
)
16010 tree source_c
= NULL_TREE
;
16011 tree sink_c
= NULL_TREE
;
16013 if (gimplify_omp_ctxp
)
16015 for (c
= OMP_ORDERED_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
16016 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
16017 && gimplify_omp_ctxp
->loop_iter_var
.is_empty ())
16019 error_at (OMP_CLAUSE_LOCATION (c
),
16020 "%<ordered%> construct with %qs clause must be "
16021 "closely nested inside a loop with %<ordered%> clause",
16022 OMP_CLAUSE_DOACROSS_DEPEND (c
) ? "depend" : "doacross");
16025 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
16026 && OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SINK
)
16030 if (OMP_CLAUSE_DECL (c
) == NULL_TREE
)
16031 continue; /* omp_cur_iteration - 1 */
16032 for (decls
= OMP_CLAUSE_DECL (c
), i
= 0;
16033 decls
&& TREE_CODE (decls
) == TREE_LIST
;
16034 decls
= TREE_CHAIN (decls
), ++i
)
16035 if (i
>= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
16037 else if (TREE_VALUE (decls
)
16038 != gimplify_omp_ctxp
->loop_iter_var
[2 * i
])
16040 error_at (OMP_CLAUSE_LOCATION (c
),
16041 "variable %qE is not an iteration "
16042 "of outermost loop %d, expected %qE",
16043 TREE_VALUE (decls
), i
+ 1,
16044 gimplify_omp_ctxp
->loop_iter_var
[2 * i
]);
16050 = gimplify_omp_ctxp
->loop_iter_var
[2 * i
+ 1];
16051 if (!fail
&& i
!= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
16053 error_at (OMP_CLAUSE_LOCATION (c
),
16054 "number of variables in %qs clause with "
16055 "%<sink%> modifier does not match number of "
16056 "iteration variables",
16057 OMP_CLAUSE_DOACROSS_DEPEND (c
)
16058 ? "depend" : "doacross");
16062 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
16063 && OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SOURCE
)
16067 error_at (OMP_CLAUSE_LOCATION (c
),
16068 "more than one %qs clause with %<source%> "
16069 "modifier on an %<ordered%> construct",
16070 OMP_CLAUSE_DOACROSS_DEPEND (source_c
)
16071 ? "depend" : "doacross");
16078 if (source_c
&& sink_c
)
16080 error_at (OMP_CLAUSE_LOCATION (source_c
),
16081 "%qs clause with %<source%> modifier specified "
16082 "together with %qs clauses with %<sink%> modifier "
16083 "on the same construct",
16084 OMP_CLAUSE_DOACROSS_DEPEND (source_c
) ? "depend" : "doacross",
16085 OMP_CLAUSE_DOACROSS_DEPEND (sink_c
) ? "depend" : "doacross");
16090 return gimple_build_nop ();
16091 return gimple_build_omp_ordered (body
, OMP_ORDERED_CLAUSES (expr
));
16094 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
16095 expression produces a value to be used as an operand inside a GIMPLE
16096 statement, the value will be stored back in *EXPR_P. This value will
16097 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
16098 an SSA_NAME. The corresponding sequence of GIMPLE statements is
16099 emitted in PRE_P and POST_P.
16101 Additionally, this process may overwrite parts of the input
16102 expression during gimplification. Ideally, it should be
16103 possible to do non-destructive gimplification.
16105 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
16106 the expression needs to evaluate to a value to be used as
16107 an operand in a GIMPLE statement, this value will be stored in
16108 *EXPR_P on exit. This happens when the caller specifies one
16109 of fb_lvalue or fb_rvalue fallback flags.
16111 PRE_P will contain the sequence of GIMPLE statements corresponding
16112 to the evaluation of EXPR and all the side-effects that must
16113 be executed before the main expression. On exit, the last
16114 statement of PRE_P is the core statement being gimplified. For
16115 instance, when gimplifying 'if (++a)' the last statement in
16116 PRE_P will be 'if (t.1)' where t.1 is the result of
16117 pre-incrementing 'a'.
16119 POST_P will contain the sequence of GIMPLE statements corresponding
16120 to the evaluation of all the side-effects that must be executed
16121 after the main expression. If this is NULL, the post
16122 side-effects are stored at the end of PRE_P.
16124 The reason why the output is split in two is to handle post
16125 side-effects explicitly. In some cases, an expression may have
16126 inner and outer post side-effects which need to be emitted in
16127 an order different from the one given by the recursive
16128 traversal. For instance, for the expression (*p--)++ the post
16129 side-effects of '--' must actually occur *after* the post
16130 side-effects of '++'. However, gimplification will first visit
16131 the inner expression, so if a separate POST sequence was not
16132 used, the resulting sequence would be:
16139 However, the post-decrement operation in line #2 must not be
16140 evaluated until after the store to *p at line #4, so the
16141 correct sequence should be:
16148 So, by specifying a separate post queue, it is possible
16149 to emit the post side-effects in the correct order.
16150 If POST_P is NULL, an internal queue will be used. Before
16151 returning to the caller, the sequence POST_P is appended to
16152 the main output sequence PRE_P.
16154 GIMPLE_TEST_F points to a function that takes a tree T and
16155 returns nonzero if T is in the GIMPLE form requested by the
16156 caller. The GIMPLE predicates are in gimple.cc.
16158 FALLBACK tells the function what sort of a temporary we want if
16159 gimplification cannot produce an expression that complies with
16162 fb_none means that no temporary should be generated
16163 fb_rvalue means that an rvalue is OK to generate
16164 fb_lvalue means that an lvalue is OK to generate
16165 fb_either means that either is OK, but an lvalue is preferable.
16166 fb_mayfail means that gimplification may fail (in which case
16167 GS_ERROR will be returned)
16169 The return value is either GS_ERROR or GS_ALL_DONE, since this
16170 function iterates until EXPR is completely gimplified or an error
16173 enum gimplify_status
16174 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
16175 bool (*gimple_test_f
) (tree
), fallback_t fallback
)
16178 gimple_seq internal_pre
= NULL
;
16179 gimple_seq internal_post
= NULL
;
16182 location_t saved_location
;
16183 enum gimplify_status ret
;
16184 gimple_stmt_iterator pre_last_gsi
, post_last_gsi
;
16187 save_expr
= *expr_p
;
16188 if (save_expr
== NULL_TREE
)
16189 return GS_ALL_DONE
;
16191 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
16192 is_statement
= gimple_test_f
== is_gimple_stmt
;
16194 gcc_assert (pre_p
);
16196 /* Consistency checks. */
16197 if (gimple_test_f
== is_gimple_reg
)
16198 gcc_assert (fallback
& (fb_rvalue
| fb_lvalue
));
16199 else if (gimple_test_f
== is_gimple_val
16200 || gimple_test_f
== is_gimple_call_addr
16201 || gimple_test_f
== is_gimple_condexpr_for_cond
16202 || gimple_test_f
== is_gimple_mem_rhs
16203 || gimple_test_f
== is_gimple_mem_rhs_or_call
16204 || gimple_test_f
== is_gimple_reg_rhs
16205 || gimple_test_f
== is_gimple_reg_rhs_or_call
16206 || gimple_test_f
== is_gimple_asm_val
16207 || gimple_test_f
== is_gimple_mem_ref_addr
)
16208 gcc_assert (fallback
& fb_rvalue
);
16209 else if (gimple_test_f
== is_gimple_min_lval
16210 || gimple_test_f
== is_gimple_lvalue
)
16211 gcc_assert (fallback
& fb_lvalue
);
16212 else if (gimple_test_f
== is_gimple_addressable
)
16213 gcc_assert (fallback
& fb_either
);
16214 else if (gimple_test_f
== is_gimple_stmt
)
16215 gcc_assert (fallback
== fb_none
);
16218 /* We should have recognized the GIMPLE_TEST_F predicate to
16219 know what kind of fallback to use in case a temporary is
16220 needed to hold the value or address of *EXPR_P. */
16221 gcc_unreachable ();
16224 /* We used to check the predicate here and return immediately if it
16225 succeeds. This is wrong; the design is for gimplification to be
16226 idempotent, and for the predicates to only test for valid forms, not
16227 whether they are fully simplified. */
16229 pre_p
= &internal_pre
;
16231 if (post_p
== NULL
)
16232 post_p
= &internal_post
;
16234 /* Remember the last statements added to PRE_P and POST_P. Every
16235 new statement added by the gimplification helpers needs to be
16236 annotated with location information. To centralize the
16237 responsibility, we remember the last statement that had been
16238 added to both queues before gimplifying *EXPR_P. If
16239 gimplification produces new statements in PRE_P and POST_P, those
16240 statements will be annotated with the same location information
16242 pre_last_gsi
= gsi_last (*pre_p
);
16243 post_last_gsi
= gsi_last (*post_p
);
16245 saved_location
= input_location
;
16246 if (save_expr
!= error_mark_node
16247 && EXPR_HAS_LOCATION (*expr_p
))
16248 input_location
= EXPR_LOCATION (*expr_p
);
16250 /* Loop over the specific gimplifiers until the toplevel node
16251 remains the same. */
16254 /* Strip away as many useless type conversions as possible
16255 at the toplevel. */
16256 STRIP_USELESS_TYPE_CONVERSION (*expr_p
);
16258 /* Remember the expr. */
16259 save_expr
= *expr_p
;
16261 /* Die, die, die, my darling. */
16262 if (error_operand_p (save_expr
))
16268 /* Do any language-specific gimplification. */
16269 ret
= ((enum gimplify_status
)
16270 lang_hooks
.gimplify_expr (expr_p
, pre_p
, post_p
));
16273 if (*expr_p
== NULL_TREE
)
16275 if (*expr_p
!= save_expr
)
16278 else if (ret
!= GS_UNHANDLED
)
16281 /* Make sure that all the cases set 'ret' appropriately. */
16282 ret
= GS_UNHANDLED
;
16283 switch (TREE_CODE (*expr_p
))
16285 /* First deal with the special cases. */
16287 case POSTINCREMENT_EXPR
:
16288 case POSTDECREMENT_EXPR
:
16289 case PREINCREMENT_EXPR
:
16290 case PREDECREMENT_EXPR
:
16291 ret
= gimplify_self_mod_expr (expr_p
, pre_p
, post_p
,
16292 fallback
!= fb_none
,
16293 TREE_TYPE (*expr_p
));
16296 case VIEW_CONVERT_EXPR
:
16297 if ((fallback
& fb_rvalue
)
16298 && is_gimple_reg_type (TREE_TYPE (*expr_p
))
16299 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
16301 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
16302 post_p
, is_gimple_val
, fb_rvalue
);
16303 recalculate_side_effects (*expr_p
);
16309 case ARRAY_RANGE_REF
:
16310 case REALPART_EXPR
:
16311 case IMAGPART_EXPR
:
16312 case COMPONENT_REF
:
16313 ret
= gimplify_compound_lval (expr_p
, pre_p
, post_p
,
16314 fallback
? fallback
: fb_rvalue
);
16318 ret
= gimplify_cond_expr (expr_p
, pre_p
, fallback
);
16320 /* C99 code may assign to an array in a structure value of a
16321 conditional expression, and this has undefined behavior
16322 only on execution, so create a temporary if an lvalue is
16324 if (fallback
== fb_lvalue
)
16326 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
16327 mark_addressable (*expr_p
);
16333 ret
= gimplify_call_expr (expr_p
, pre_p
, fallback
!= fb_none
);
16335 /* C99 code may assign to an array in a structure returned
16336 from a function, and this has undefined behavior only on
16337 execution, so create a temporary if an lvalue is
16339 if (fallback
== fb_lvalue
)
16341 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
16342 mark_addressable (*expr_p
);
16348 gcc_unreachable ();
16350 case COMPOUND_EXPR
:
16351 ret
= gimplify_compound_expr (expr_p
, pre_p
, fallback
!= fb_none
);
16354 case COMPOUND_LITERAL_EXPR
:
16355 ret
= gimplify_compound_literal_expr (expr_p
, pre_p
,
16356 gimple_test_f
, fallback
);
16361 ret
= gimplify_modify_expr (expr_p
, pre_p
, post_p
,
16362 fallback
!= fb_none
);
16365 case TRUTH_ANDIF_EXPR
:
16366 case TRUTH_ORIF_EXPR
:
16368 /* Preserve the original type of the expression and the
16369 source location of the outer expression. */
16370 tree org_type
= TREE_TYPE (*expr_p
);
16371 *expr_p
= gimple_boolify (*expr_p
);
16372 *expr_p
= build3_loc (input_location
, COND_EXPR
,
16376 org_type
, boolean_true_node
),
16379 org_type
, boolean_false_node
));
16384 case TRUTH_NOT_EXPR
:
16386 tree type
= TREE_TYPE (*expr_p
);
16387 /* The parsers are careful to generate TRUTH_NOT_EXPR
16388 only with operands that are always zero or one.
16389 We do not fold here but handle the only interesting case
16390 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
16391 *expr_p
= gimple_boolify (*expr_p
);
16392 if (TYPE_PRECISION (TREE_TYPE (*expr_p
)) == 1)
16393 *expr_p
= build1_loc (input_location
, BIT_NOT_EXPR
,
16394 TREE_TYPE (*expr_p
),
16395 TREE_OPERAND (*expr_p
, 0));
16397 *expr_p
= build2_loc (input_location
, BIT_XOR_EXPR
,
16398 TREE_TYPE (*expr_p
),
16399 TREE_OPERAND (*expr_p
, 0),
16400 build_int_cst (TREE_TYPE (*expr_p
), 1));
16401 if (!useless_type_conversion_p (type
, TREE_TYPE (*expr_p
)))
16402 *expr_p
= fold_convert_loc (input_location
, type
, *expr_p
);
16408 ret
= gimplify_addr_expr (expr_p
, pre_p
, post_p
);
16411 case ANNOTATE_EXPR
:
16413 tree cond
= TREE_OPERAND (*expr_p
, 0);
16414 tree kind
= TREE_OPERAND (*expr_p
, 1);
16415 tree data
= TREE_OPERAND (*expr_p
, 2);
16416 tree type
= TREE_TYPE (cond
);
16417 if (!INTEGRAL_TYPE_P (type
))
16423 tree tmp
= create_tmp_var (type
);
16424 gimplify_arg (&cond
, pre_p
, EXPR_LOCATION (*expr_p
));
16426 = gimple_build_call_internal (IFN_ANNOTATE
, 3, cond
, kind
, data
);
16427 gimple_call_set_lhs (call
, tmp
);
16428 gimplify_seq_add_stmt (pre_p
, call
);
16435 ret
= gimplify_va_arg_expr (expr_p
, pre_p
, post_p
);
16439 if (IS_EMPTY_STMT (*expr_p
))
16445 if (VOID_TYPE_P (TREE_TYPE (*expr_p
))
16446 || fallback
== fb_none
)
16448 /* Just strip a conversion to void (or in void context) and
16450 *expr_p
= TREE_OPERAND (*expr_p
, 0);
16455 ret
= gimplify_conversion (expr_p
);
16456 if (ret
== GS_ERROR
)
16458 if (*expr_p
!= save_expr
)
16462 case FIX_TRUNC_EXPR
:
16463 /* unary_expr: ... | '(' cast ')' val | ... */
16464 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
16465 is_gimple_val
, fb_rvalue
);
16466 recalculate_side_effects (*expr_p
);
16471 bool volatilep
= TREE_THIS_VOLATILE (*expr_p
);
16472 bool notrap
= TREE_THIS_NOTRAP (*expr_p
);
16473 tree saved_ptr_type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 0));
16475 *expr_p
= fold_indirect_ref_loc (input_location
, *expr_p
);
16476 if (*expr_p
!= save_expr
)
16482 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
16483 is_gimple_reg
, fb_rvalue
);
16484 if (ret
== GS_ERROR
)
16487 recalculate_side_effects (*expr_p
);
16488 *expr_p
= fold_build2_loc (input_location
, MEM_REF
,
16489 TREE_TYPE (*expr_p
),
16490 TREE_OPERAND (*expr_p
, 0),
16491 build_int_cst (saved_ptr_type
, 0));
16492 TREE_THIS_VOLATILE (*expr_p
) = volatilep
;
16493 TREE_THIS_NOTRAP (*expr_p
) = notrap
;
16498 /* We arrive here through the various re-gimplifcation paths. */
16500 /* First try re-folding the whole thing. */
16501 tmp
= fold_binary (MEM_REF
, TREE_TYPE (*expr_p
),
16502 TREE_OPERAND (*expr_p
, 0),
16503 TREE_OPERAND (*expr_p
, 1));
16506 REF_REVERSE_STORAGE_ORDER (tmp
)
16507 = REF_REVERSE_STORAGE_ORDER (*expr_p
);
16509 recalculate_side_effects (*expr_p
);
16513 /* Avoid re-gimplifying the address operand if it is already
16514 in suitable form. Re-gimplifying would mark the address
16515 operand addressable. Always gimplify when not in SSA form
16516 as we still may have to gimplify decls with value-exprs. */
16517 if (!gimplify_ctxp
|| !gimple_in_ssa_p (cfun
)
16518 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p
, 0)))
16520 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
16521 is_gimple_mem_ref_addr
, fb_rvalue
);
16522 if (ret
== GS_ERROR
)
16525 recalculate_side_effects (*expr_p
);
16529 /* Constants need not be gimplified. */
16536 /* Drop the overflow flag on constants, we do not want
16537 that in the GIMPLE IL. */
16538 if (TREE_OVERFLOW_P (*expr_p
))
16539 *expr_p
= drop_tree_overflow (*expr_p
);
16544 /* If we require an lvalue, such as for ADDR_EXPR, retain the
16545 CONST_DECL node. Otherwise the decl is replaceable by its
16547 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
16548 if (fallback
& fb_lvalue
)
16552 *expr_p
= DECL_INITIAL (*expr_p
);
16558 ret
= gimplify_decl_expr (expr_p
, pre_p
);
16562 ret
= gimplify_bind_expr (expr_p
, pre_p
);
16566 ret
= gimplify_loop_expr (expr_p
, pre_p
);
16570 ret
= gimplify_switch_expr (expr_p
, pre_p
);
16574 ret
= gimplify_exit_expr (expr_p
);
16578 /* If the target is not LABEL, then it is a computed jump
16579 and the target needs to be gimplified. */
16580 if (TREE_CODE (GOTO_DESTINATION (*expr_p
)) != LABEL_DECL
)
16582 ret
= gimplify_expr (&GOTO_DESTINATION (*expr_p
), pre_p
,
16583 NULL
, is_gimple_val
, fb_rvalue
);
16584 if (ret
== GS_ERROR
)
16587 gimplify_seq_add_stmt (pre_p
,
16588 gimple_build_goto (GOTO_DESTINATION (*expr_p
)));
16593 gimplify_seq_add_stmt (pre_p
,
16594 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p
),
16595 PREDICT_EXPR_OUTCOME (*expr_p
)));
16600 ret
= gimplify_label_expr (expr_p
, pre_p
);
16601 label
= LABEL_EXPR_LABEL (*expr_p
);
16602 gcc_assert (decl_function_context (label
) == current_function_decl
);
16604 /* If the label is used in a goto statement, or address of the label
16605 is taken, we need to unpoison all variables that were seen so far.
16606 Doing so would prevent us from reporting a false positives. */
16607 if (asan_poisoned_variables
16608 && asan_used_labels
!= NULL
16609 && asan_used_labels
->contains (label
)
16610 && !gimplify_omp_ctxp
)
16611 asan_poison_variables (asan_poisoned_variables
, false, pre_p
);
16614 case CASE_LABEL_EXPR
:
16615 ret
= gimplify_case_label_expr (expr_p
, pre_p
);
16617 if (gimplify_ctxp
->live_switch_vars
)
16618 asan_poison_variables (gimplify_ctxp
->live_switch_vars
, false,
16623 ret
= gimplify_return_expr (*expr_p
, pre_p
);
16627 /* Don't reduce this in place; let gimplify_init_constructor work its
16628 magic. Buf if we're just elaborating this for side effects, just
16629 gimplify any element that has side-effects. */
16630 if (fallback
== fb_none
)
16632 unsigned HOST_WIDE_INT ix
;
16634 tree temp
= NULL_TREE
;
16635 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p
), ix
, val
)
16636 if (TREE_SIDE_EFFECTS (val
))
16637 append_to_statement_list (val
, &temp
);
16640 ret
= temp
? GS_OK
: GS_ALL_DONE
;
16642 /* C99 code may assign to an array in a constructed
16643 structure or union, and this has undefined behavior only
16644 on execution, so create a temporary if an lvalue is
16646 else if (fallback
== fb_lvalue
)
16648 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
16649 mark_addressable (*expr_p
);
16656 /* The following are special cases that are not handled by the
16657 original GIMPLE grammar. */
16659 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
16662 ret
= gimplify_save_expr (expr_p
, pre_p
, post_p
);
16665 case BIT_FIELD_REF
:
16666 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
16667 post_p
, is_gimple_lvalue
, fb_either
);
16668 recalculate_side_effects (*expr_p
);
16671 case TARGET_MEM_REF
:
16673 enum gimplify_status r0
= GS_ALL_DONE
, r1
= GS_ALL_DONE
;
16675 if (TMR_BASE (*expr_p
))
16676 r0
= gimplify_expr (&TMR_BASE (*expr_p
), pre_p
,
16677 post_p
, is_gimple_mem_ref_addr
, fb_either
);
16678 if (TMR_INDEX (*expr_p
))
16679 r1
= gimplify_expr (&TMR_INDEX (*expr_p
), pre_p
,
16680 post_p
, is_gimple_val
, fb_rvalue
);
16681 if (TMR_INDEX2 (*expr_p
))
16682 r1
= gimplify_expr (&TMR_INDEX2 (*expr_p
), pre_p
,
16683 post_p
, is_gimple_val
, fb_rvalue
);
16684 /* TMR_STEP and TMR_OFFSET are always integer constants. */
16685 ret
= MIN (r0
, r1
);
16689 case NON_LVALUE_EXPR
:
16690 /* This should have been stripped above. */
16691 gcc_unreachable ();
16694 ret
= gimplify_asm_expr (expr_p
, pre_p
, post_p
);
16697 case TRY_FINALLY_EXPR
:
16698 case TRY_CATCH_EXPR
:
16700 gimple_seq eval
, cleanup
;
16703 /* Calls to destructors are generated automatically in FINALLY/CATCH
16704 block. They should have location as UNKNOWN_LOCATION. However,
16705 gimplify_call_expr will reset these call stmts to input_location
16706 if it finds stmt's location is unknown. To prevent resetting for
16707 destructors, we set the input_location to unknown.
16708 Note that this only affects the destructor calls in FINALLY/CATCH
16709 block, and will automatically reset to its original value by the
16710 end of gimplify_expr. */
16711 input_location
= UNKNOWN_LOCATION
;
16712 eval
= cleanup
= NULL
;
16713 gimplify_and_add (TREE_OPERAND (*expr_p
, 0), &eval
);
16714 if (TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
16715 && TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == EH_ELSE_EXPR
)
16717 gimple_seq n
= NULL
, e
= NULL
;
16718 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
16720 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
16722 if (!gimple_seq_empty_p (n
) && !gimple_seq_empty_p (e
))
16724 geh_else
*stmt
= gimple_build_eh_else (n
, e
);
16725 gimple_seq_add_stmt (&cleanup
, stmt
);
16729 gimplify_and_add (TREE_OPERAND (*expr_p
, 1), &cleanup
);
16730 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
16731 if (gimple_seq_empty_p (cleanup
))
16733 gimple_seq_add_seq (pre_p
, eval
);
16737 try_
= gimple_build_try (eval
, cleanup
,
16738 TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
16739 ? GIMPLE_TRY_FINALLY
16740 : GIMPLE_TRY_CATCH
);
16741 if (EXPR_HAS_LOCATION (save_expr
))
16742 gimple_set_location (try_
, EXPR_LOCATION (save_expr
));
16743 else if (LOCATION_LOCUS (saved_location
) != UNKNOWN_LOCATION
)
16744 gimple_set_location (try_
, saved_location
);
16745 if (TREE_CODE (*expr_p
) == TRY_CATCH_EXPR
)
16746 gimple_try_set_catch_is_cleanup (try_
,
16747 TRY_CATCH_IS_CLEANUP (*expr_p
));
16748 gimplify_seq_add_stmt (pre_p
, try_
);
16753 case CLEANUP_POINT_EXPR
:
16754 ret
= gimplify_cleanup_point_expr (expr_p
, pre_p
);
16758 ret
= gimplify_target_expr (expr_p
, pre_p
, post_p
);
16764 gimple_seq handler
= NULL
;
16765 gimplify_and_add (CATCH_BODY (*expr_p
), &handler
);
16766 c
= gimple_build_catch (CATCH_TYPES (*expr_p
), handler
);
16767 gimplify_seq_add_stmt (pre_p
, c
);
16772 case EH_FILTER_EXPR
:
16775 gimple_seq failure
= NULL
;
16777 gimplify_and_add (EH_FILTER_FAILURE (*expr_p
), &failure
);
16778 ehf
= gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p
), failure
);
16779 copy_warning (ehf
, *expr_p
);
16780 gimplify_seq_add_stmt (pre_p
, ehf
);
16787 enum gimplify_status r0
, r1
;
16788 r0
= gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p
), pre_p
,
16789 post_p
, is_gimple_val
, fb_rvalue
);
16790 r1
= gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p
), pre_p
,
16791 post_p
, is_gimple_val
, fb_rvalue
);
16792 TREE_SIDE_EFFECTS (*expr_p
) = 0;
16793 ret
= MIN (r0
, r1
);
16798 /* We get here when taking the address of a label. We mark
16799 the label as "forced"; meaning it can never be removed and
16800 it is a potential target for any computed goto. */
16801 FORCED_LABEL (*expr_p
) = 1;
16805 case STATEMENT_LIST
:
16806 ret
= gimplify_statement_list (expr_p
, pre_p
);
16809 case WITH_SIZE_EXPR
:
16811 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
16812 post_p
== &internal_post
? NULL
: post_p
,
16813 gimple_test_f
, fallback
);
16814 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
16815 is_gimple_val
, fb_rvalue
);
16822 ret
= gimplify_var_or_parm_decl (expr_p
);
16826 /* When within an OMP context, notice uses of variables. */
16827 if (gimplify_omp_ctxp
)
16828 omp_notice_variable (gimplify_omp_ctxp
, *expr_p
, true);
16832 case DEBUG_EXPR_DECL
:
16833 gcc_unreachable ();
16835 case DEBUG_BEGIN_STMT
:
16836 gimplify_seq_add_stmt (pre_p
,
16837 gimple_build_debug_begin_stmt
16838 (TREE_BLOCK (*expr_p
),
16839 EXPR_LOCATION (*expr_p
)));
16845 /* Allow callbacks into the gimplifier during optimization. */
16850 gimplify_omp_parallel (expr_p
, pre_p
);
16855 gimplify_omp_task (expr_p
, pre_p
);
16861 /* Temporarily disable into_ssa, as scan_omp_simd
16862 which calls copy_gimple_seq_and_replace_locals can't deal
16863 with SSA_NAMEs defined outside of the body properly. */
16864 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
16865 gimplify_ctxp
->into_ssa
= false;
16866 ret
= gimplify_omp_for (expr_p
, pre_p
);
16867 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
16872 case OMP_DISTRIBUTE
:
16875 ret
= gimplify_omp_for (expr_p
, pre_p
);
16879 ret
= gimplify_omp_loop (expr_p
, pre_p
);
16883 gimplify_oacc_cache (expr_p
, pre_p
);
16888 gimplify_oacc_declare (expr_p
, pre_p
);
16892 case OACC_HOST_DATA
:
16895 case OACC_PARALLEL
:
16901 case OMP_TARGET_DATA
:
16903 gimplify_omp_workshare (expr_p
, pre_p
);
16907 case OACC_ENTER_DATA
:
16908 case OACC_EXIT_DATA
:
16910 case OMP_TARGET_UPDATE
:
16911 case OMP_TARGET_ENTER_DATA
:
16912 case OMP_TARGET_EXIT_DATA
:
16913 gimplify_omp_target_update (expr_p
, pre_p
);
16924 gimple_seq body
= NULL
;
16926 bool saved_in_omp_construct
= in_omp_construct
;
16928 in_omp_construct
= true;
16929 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
16930 in_omp_construct
= saved_in_omp_construct
;
16931 switch (TREE_CODE (*expr_p
))
16934 g
= gimple_build_omp_section (body
);
16937 g
= gimple_build_omp_master (body
);
16940 g
= gimplify_omp_ordered (*expr_p
, body
);
16941 if (OMP_BODY (*expr_p
) == NULL_TREE
16942 && gimple_code (g
) == GIMPLE_OMP_ORDERED
)
16943 gimple_omp_ordered_standalone (g
);
16946 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p
),
16947 pre_p
, ORT_WORKSHARE
, OMP_MASKED
);
16948 gimplify_adjust_omp_clauses (pre_p
, body
,
16949 &OMP_MASKED_CLAUSES (*expr_p
),
16951 g
= gimple_build_omp_masked (body
,
16952 OMP_MASKED_CLAUSES (*expr_p
));
16955 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p
),
16956 pre_p
, ORT_WORKSHARE
, OMP_CRITICAL
);
16957 gimplify_adjust_omp_clauses (pre_p
, body
,
16958 &OMP_CRITICAL_CLAUSES (*expr_p
),
16960 g
= gimple_build_omp_critical (body
,
16961 OMP_CRITICAL_NAME (*expr_p
),
16962 OMP_CRITICAL_CLAUSES (*expr_p
));
16965 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p
),
16966 pre_p
, ORT_WORKSHARE
, OMP_SCAN
);
16967 gimplify_adjust_omp_clauses (pre_p
, body
,
16968 &OMP_SCAN_CLAUSES (*expr_p
),
16970 g
= gimple_build_omp_scan (body
, OMP_SCAN_CLAUSES (*expr_p
));
16973 gcc_unreachable ();
16975 gimplify_seq_add_stmt (pre_p
, g
);
16980 case OMP_TASKGROUP
:
16982 gimple_seq body
= NULL
;
16984 tree
*pclauses
= &OMP_TASKGROUP_CLAUSES (*expr_p
);
16985 bool saved_in_omp_construct
= in_omp_construct
;
16986 gimplify_scan_omp_clauses (pclauses
, pre_p
, ORT_TASKGROUP
,
16988 gimplify_adjust_omp_clauses (pre_p
, NULL
, pclauses
, OMP_TASKGROUP
);
16990 in_omp_construct
= true;
16991 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
16992 in_omp_construct
= saved_in_omp_construct
;
16993 gimple_seq cleanup
= NULL
;
16994 tree fn
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END
);
16995 gimple
*g
= gimple_build_call (fn
, 0);
16996 gimple_seq_add_stmt (&cleanup
, g
);
16997 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
16999 gimple_seq_add_stmt (&body
, g
);
17000 g
= gimple_build_omp_taskgroup (body
, *pclauses
);
17001 gimplify_seq_add_stmt (pre_p
, g
);
17007 case OMP_ATOMIC_READ
:
17008 case OMP_ATOMIC_CAPTURE_OLD
:
17009 case OMP_ATOMIC_CAPTURE_NEW
:
17010 ret
= gimplify_omp_atomic (expr_p
, pre_p
);
17013 case TRANSACTION_EXPR
:
17014 ret
= gimplify_transaction (expr_p
, pre_p
);
17017 case TRUTH_AND_EXPR
:
17018 case TRUTH_OR_EXPR
:
17019 case TRUTH_XOR_EXPR
:
17021 tree orig_type
= TREE_TYPE (*expr_p
);
17022 tree new_type
, xop0
, xop1
;
17023 *expr_p
= gimple_boolify (*expr_p
);
17024 new_type
= TREE_TYPE (*expr_p
);
17025 if (!useless_type_conversion_p (orig_type
, new_type
))
17027 *expr_p
= fold_convert_loc (input_location
, orig_type
, *expr_p
);
17032 /* Boolified binary truth expressions are semantically equivalent
17033 to bitwise binary expressions. Canonicalize them to the
17034 bitwise variant. */
17035 switch (TREE_CODE (*expr_p
))
17037 case TRUTH_AND_EXPR
:
17038 TREE_SET_CODE (*expr_p
, BIT_AND_EXPR
);
17040 case TRUTH_OR_EXPR
:
17041 TREE_SET_CODE (*expr_p
, BIT_IOR_EXPR
);
17043 case TRUTH_XOR_EXPR
:
17044 TREE_SET_CODE (*expr_p
, BIT_XOR_EXPR
);
17049 /* Now make sure that operands have compatible type to
17050 expression's new_type. */
17051 xop0
= TREE_OPERAND (*expr_p
, 0);
17052 xop1
= TREE_OPERAND (*expr_p
, 1);
17053 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop0
)))
17054 TREE_OPERAND (*expr_p
, 0) = fold_convert_loc (input_location
,
17057 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop1
)))
17058 TREE_OPERAND (*expr_p
, 1) = fold_convert_loc (input_location
,
17061 /* Continue classified as tcc_binary. */
17065 case VEC_COND_EXPR
:
17068 case VEC_PERM_EXPR
:
17069 /* Classified as tcc_expression. */
17072 case BIT_INSERT_EXPR
:
17073 /* Argument 3 is a constant. */
17076 case POINTER_PLUS_EXPR
:
17078 enum gimplify_status r0
, r1
;
17079 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
17080 post_p
, is_gimple_val
, fb_rvalue
);
17081 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
17082 post_p
, is_gimple_val
, fb_rvalue
);
17083 recalculate_side_effects (*expr_p
);
17084 ret
= MIN (r0
, r1
);
17089 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p
)))
17091 case tcc_comparison
:
17092 /* Handle comparison of objects of non scalar mode aggregates
17093 with a call to memcmp. It would be nice to only have to do
17094 this for variable-sized objects, but then we'd have to allow
17095 the same nest of reference nodes we allow for MODIFY_EXPR and
17096 that's too complex.
17098 Compare scalar mode aggregates as scalar mode values. Using
17099 memcmp for them would be very inefficient at best, and is
17100 plain wrong if bitfields are involved. */
17101 if (error_operand_p (TREE_OPERAND (*expr_p
, 1)))
17105 tree type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 1));
17107 /* Vector comparisons need no boolification. */
17108 if (TREE_CODE (type
) == VECTOR_TYPE
)
17110 else if (!AGGREGATE_TYPE_P (type
))
17112 tree org_type
= TREE_TYPE (*expr_p
);
17113 *expr_p
= gimple_boolify (*expr_p
);
17114 if (!useless_type_conversion_p (org_type
,
17115 TREE_TYPE (*expr_p
)))
17117 *expr_p
= fold_convert_loc (input_location
,
17118 org_type
, *expr_p
);
17124 else if (TYPE_MODE (type
) != BLKmode
)
17125 ret
= gimplify_scalar_mode_aggregate_compare (expr_p
);
17127 ret
= gimplify_variable_sized_compare (expr_p
);
17131 /* If *EXPR_P does not need to be special-cased, handle it
17132 according to its class. */
17134 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
17135 post_p
, is_gimple_val
, fb_rvalue
);
17141 enum gimplify_status r0
, r1
;
17143 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
17144 post_p
, is_gimple_val
, fb_rvalue
);
17145 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
17146 post_p
, is_gimple_val
, fb_rvalue
);
17148 ret
= MIN (r0
, r1
);
17154 enum gimplify_status r0
, r1
, r2
;
17156 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
17157 post_p
, is_gimple_val
, fb_rvalue
);
17158 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
17159 post_p
, is_gimple_val
, fb_rvalue
);
17160 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
17161 post_p
, is_gimple_val
, fb_rvalue
);
17163 ret
= MIN (MIN (r0
, r1
), r2
);
17167 case tcc_declaration
:
17170 goto dont_recalculate
;
17173 gcc_unreachable ();
17176 recalculate_side_effects (*expr_p
);
17182 gcc_assert (*expr_p
|| ret
!= GS_OK
);
17184 while (ret
== GS_OK
);
17186 /* If we encountered an error_mark somewhere nested inside, either
17187 stub out the statement or propagate the error back out. */
17188 if (ret
== GS_ERROR
)
17195 /* This was only valid as a return value from the langhook, which
17196 we handled. Make sure it doesn't escape from any other context. */
17197 gcc_assert (ret
!= GS_UNHANDLED
);
17199 if (fallback
== fb_none
&& *expr_p
&& !is_gimple_stmt (*expr_p
))
17201 /* We aren't looking for a value, and we don't have a valid
17202 statement. If it doesn't have side-effects, throw it away.
17203 We can also get here with code such as "*&&L;", where L is
17204 a LABEL_DECL that is marked as FORCED_LABEL. */
17205 if (TREE_CODE (*expr_p
) == LABEL_DECL
17206 || !TREE_SIDE_EFFECTS (*expr_p
))
17208 else if (!TREE_THIS_VOLATILE (*expr_p
))
17210 /* This is probably a _REF that contains something nested that
17211 has side effects. Recurse through the operands to find it. */
17212 enum tree_code code
= TREE_CODE (*expr_p
);
17216 case COMPONENT_REF
:
17217 case REALPART_EXPR
:
17218 case IMAGPART_EXPR
:
17219 case VIEW_CONVERT_EXPR
:
17220 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
17221 gimple_test_f
, fallback
);
17225 case ARRAY_RANGE_REF
:
17226 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
17227 gimple_test_f
, fallback
);
17228 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
17229 gimple_test_f
, fallback
);
17233 /* Anything else with side-effects must be converted to
17234 a valid statement before we get here. */
17235 gcc_unreachable ();
17240 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p
))
17241 && TYPE_MODE (TREE_TYPE (*expr_p
)) != BLKmode
17242 && !is_empty_type (TREE_TYPE (*expr_p
)))
17244 /* Historically, the compiler has treated a bare reference
17245 to a non-BLKmode volatile lvalue as forcing a load. */
17246 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p
));
17248 /* Normally, we do not want to create a temporary for a
17249 TREE_ADDRESSABLE type because such a type should not be
17250 copied by bitwise-assignment. However, we make an
17251 exception here, as all we are doing here is ensuring that
17252 we read the bytes that make up the type. We use
17253 create_tmp_var_raw because create_tmp_var will abort when
17254 given a TREE_ADDRESSABLE type. */
17255 tree tmp
= create_tmp_var_raw (type
, "vol");
17256 gimple_add_tmp_var (tmp
);
17257 gimplify_assign (tmp
, *expr_p
, pre_p
);
17261 /* We can't do anything useful with a volatile reference to
17262 an incomplete type, so just throw it away. Likewise for
17263 a BLKmode type, since any implicit inner load should
17264 already have been turned into an explicit one by the
17265 gimplification process. */
17269 /* If we are gimplifying at the statement level, we're done. Tack
17270 everything together and return. */
17271 if (fallback
== fb_none
|| is_statement
)
17273 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
17274 it out for GC to reclaim it. */
17275 *expr_p
= NULL_TREE
;
17277 if (!gimple_seq_empty_p (internal_pre
)
17278 || !gimple_seq_empty_p (internal_post
))
17280 gimplify_seq_add_seq (&internal_pre
, internal_post
);
17281 gimplify_seq_add_seq (pre_p
, internal_pre
);
17284 /* The result of gimplifying *EXPR_P is going to be the last few
17285 statements in *PRE_P and *POST_P. Add location information
17286 to all the statements that were added by the gimplification
17288 if (!gimple_seq_empty_p (*pre_p
))
17289 annotate_all_with_location_after (*pre_p
, pre_last_gsi
, input_location
);
17291 if (!gimple_seq_empty_p (*post_p
))
17292 annotate_all_with_location_after (*post_p
, post_last_gsi
,
17298 #ifdef ENABLE_GIMPLE_CHECKING
17301 enum tree_code code
= TREE_CODE (*expr_p
);
17302 /* These expressions should already be in gimple IR form. */
17303 gcc_assert (code
!= MODIFY_EXPR
17304 && code
!= ASM_EXPR
17305 && code
!= BIND_EXPR
17306 && code
!= CATCH_EXPR
17307 && (code
!= COND_EXPR
|| gimplify_ctxp
->allow_rhs_cond_expr
)
17308 && code
!= EH_FILTER_EXPR
17309 && code
!= GOTO_EXPR
17310 && code
!= LABEL_EXPR
17311 && code
!= LOOP_EXPR
17312 && code
!= SWITCH_EXPR
17313 && code
!= TRY_FINALLY_EXPR
17314 && code
!= EH_ELSE_EXPR
17315 && code
!= OACC_PARALLEL
17316 && code
!= OACC_KERNELS
17317 && code
!= OACC_SERIAL
17318 && code
!= OACC_DATA
17319 && code
!= OACC_HOST_DATA
17320 && code
!= OACC_DECLARE
17321 && code
!= OACC_UPDATE
17322 && code
!= OACC_ENTER_DATA
17323 && code
!= OACC_EXIT_DATA
17324 && code
!= OACC_CACHE
17325 && code
!= OMP_CRITICAL
17327 && code
!= OACC_LOOP
17328 && code
!= OMP_MASTER
17329 && code
!= OMP_MASKED
17330 && code
!= OMP_TASKGROUP
17331 && code
!= OMP_ORDERED
17332 && code
!= OMP_PARALLEL
17333 && code
!= OMP_SCAN
17334 && code
!= OMP_SECTIONS
17335 && code
!= OMP_SECTION
17336 && code
!= OMP_SINGLE
17337 && code
!= OMP_SCOPE
);
17341 /* Otherwise we're gimplifying a subexpression, so the resulting
17342 value is interesting. If it's a valid operand that matches
17343 GIMPLE_TEST_F, we're done. Unless we are handling some
17344 post-effects internally; if that's the case, we need to copy into
17345 a temporary before adding the post-effects to POST_P. */
17346 if (gimple_seq_empty_p (internal_post
) && (*gimple_test_f
) (*expr_p
))
17349 /* Otherwise, we need to create a new temporary for the gimplified
17352 /* We can't return an lvalue if we have an internal postqueue. The
17353 object the lvalue refers to would (probably) be modified by the
17354 postqueue; we need to copy the value out first, which means an
17356 if ((fallback
& fb_lvalue
)
17357 && gimple_seq_empty_p (internal_post
)
17358 && is_gimple_addressable (*expr_p
))
17360 /* An lvalue will do. Take the address of the expression, store it
17361 in a temporary, and replace the expression with an INDIRECT_REF of
17363 tree ref_alias_type
= reference_alias_ptr_type (*expr_p
);
17364 unsigned int ref_align
= get_object_alignment (*expr_p
);
17365 tree ref_type
= TREE_TYPE (*expr_p
);
17366 tmp
= build_fold_addr_expr_loc (input_location
, *expr_p
);
17367 gimplify_expr (&tmp
, pre_p
, post_p
, is_gimple_reg
, fb_rvalue
);
17368 if (TYPE_ALIGN (ref_type
) != ref_align
)
17369 ref_type
= build_aligned_type (ref_type
, ref_align
);
17370 *expr_p
= build2 (MEM_REF
, ref_type
,
17371 tmp
, build_zero_cst (ref_alias_type
));
17373 else if ((fallback
& fb_rvalue
) && is_gimple_reg_rhs_or_call (*expr_p
))
17375 /* An rvalue will do. Assign the gimplified expression into a
17376 new temporary TMP and replace the original expression with
17377 TMP. First, make sure that the expression has a type so that
17378 it can be assigned into a temporary. */
17379 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p
)));
17380 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
17384 #ifdef ENABLE_GIMPLE_CHECKING
17385 if (!(fallback
& fb_mayfail
))
17387 fprintf (stderr
, "gimplification failed:\n");
17388 print_generic_expr (stderr
, *expr_p
);
17389 debug_tree (*expr_p
);
17390 internal_error ("gimplification failed");
17393 gcc_assert (fallback
& fb_mayfail
);
17395 /* If this is an asm statement, and the user asked for the
17396 impossible, don't die. Fail and let gimplify_asm_expr
17402 /* Make sure the temporary matches our predicate. */
17403 gcc_assert ((*gimple_test_f
) (*expr_p
));
17405 if (!gimple_seq_empty_p (internal_post
))
17407 annotate_all_with_location (internal_post
, input_location
);
17408 gimplify_seq_add_seq (pre_p
, internal_post
);
17412 input_location
= saved_location
;
17416 /* Like gimplify_expr but make sure the gimplified result is not itself
17417 a SSA name (but a decl if it were). Temporaries required by
17418 evaluating *EXPR_P may be still SSA names. */
17420 static enum gimplify_status
17421 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
17422 bool (*gimple_test_f
) (tree
), fallback_t fallback
,
17425 enum gimplify_status ret
= gimplify_expr (expr_p
, pre_p
, post_p
,
17426 gimple_test_f
, fallback
);
17428 && TREE_CODE (*expr_p
) == SSA_NAME
)
17429 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, NULL
, false);
17433 /* Look through TYPE for variable-sized objects and gimplify each such
17434 size that we find. Add to LIST_P any statements generated. */
17437 gimplify_type_sizes (tree type
, gimple_seq
*list_p
)
17439 if (type
== NULL
|| type
== error_mark_node
)
17442 const bool ignored_p
17444 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
17445 && DECL_IGNORED_P (TYPE_NAME (type
));
17448 /* We first do the main variant, then copy into any other variants. */
17449 type
= TYPE_MAIN_VARIANT (type
);
17451 /* Avoid infinite recursion. */
17452 if (TYPE_SIZES_GIMPLIFIED (type
))
17455 TYPE_SIZES_GIMPLIFIED (type
) = 1;
17457 switch (TREE_CODE (type
))
17460 case ENUMERAL_TYPE
:
17463 case FIXED_POINT_TYPE
:
17464 gimplify_one_sizepos (&TYPE_MIN_VALUE (type
), list_p
);
17465 gimplify_one_sizepos (&TYPE_MAX_VALUE (type
), list_p
);
17467 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
17469 TYPE_MIN_VALUE (t
) = TYPE_MIN_VALUE (type
);
17470 TYPE_MAX_VALUE (t
) = TYPE_MAX_VALUE (type
);
17475 /* These types may not have declarations, so handle them here. */
17476 gimplify_type_sizes (TREE_TYPE (type
), list_p
);
17477 gimplify_type_sizes (TYPE_DOMAIN (type
), list_p
);
17478 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
17479 with assigned stack slots, for -O1+ -g they should be tracked
17482 && TYPE_DOMAIN (type
)
17483 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type
)))
17485 t
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
17486 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
17487 DECL_IGNORED_P (t
) = 0;
17488 t
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
17489 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
17490 DECL_IGNORED_P (t
) = 0;
17496 case QUAL_UNION_TYPE
:
17497 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
17498 if (TREE_CODE (field
) == FIELD_DECL
)
17500 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field
), list_p
);
17501 /* Likewise, ensure variable offsets aren't removed. */
17503 && (t
= DECL_FIELD_OFFSET (field
))
17505 && DECL_ARTIFICIAL (t
))
17506 DECL_IGNORED_P (t
) = 0;
17507 gimplify_one_sizepos (&DECL_SIZE (field
), list_p
);
17508 gimplify_one_sizepos (&DECL_SIZE_UNIT (field
), list_p
);
17509 gimplify_type_sizes (TREE_TYPE (field
), list_p
);
17514 case REFERENCE_TYPE
:
17515 /* We used to recurse on the pointed-to type here, which turned out to
17516 be incorrect because its definition might refer to variables not
17517 yet initialized at this point if a forward declaration is involved.
17519 It was actually useful for anonymous pointed-to types to ensure
17520 that the sizes evaluation dominates every possible later use of the
17521 values. Restricting to such types here would be safe since there
17522 is no possible forward declaration around, but would introduce an
17523 undesirable middle-end semantic to anonymity. We then defer to
17524 front-ends the responsibility of ensuring that the sizes are
17525 evaluated both early and late enough, e.g. by attaching artificial
17526 type declarations to the tree. */
17533 gimplify_one_sizepos (&TYPE_SIZE (type
), list_p
);
17534 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type
), list_p
);
17536 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
17538 TYPE_SIZE (t
) = TYPE_SIZE (type
);
17539 TYPE_SIZE_UNIT (t
) = TYPE_SIZE_UNIT (type
);
17540 TYPE_SIZES_GIMPLIFIED (t
) = 1;
17544 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
17545 a size or position, has had all of its SAVE_EXPRs evaluated.
17546 We add any required statements to *STMT_P. */
17549 gimplify_one_sizepos (tree
*expr_p
, gimple_seq
*stmt_p
)
17551 tree expr
= *expr_p
;
17553 /* We don't do anything if the value isn't there, is constant, or contains
17554 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
17555 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
17556 will want to replace it with a new variable, but that will cause problems
17557 if this type is from outside the function. It's OK to have that here. */
17558 if (expr
== NULL_TREE
17559 || is_gimple_constant (expr
)
17560 || TREE_CODE (expr
) == VAR_DECL
17561 || CONTAINS_PLACEHOLDER_P (expr
))
17564 *expr_p
= unshare_expr (expr
);
17566 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
17567 if the def vanishes. */
17568 gimplify_expr (expr_p
, stmt_p
, NULL
, is_gimple_val
, fb_rvalue
, false);
17570 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
17571 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
17572 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
17573 if (is_gimple_constant (*expr_p
))
17574 *expr_p
= get_initialized_tmp_var (*expr_p
, stmt_p
, NULL
, false);
17577 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
17578 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
17579 is true, also gimplify the parameters. */
17582 gimplify_body (tree fndecl
, bool do_parms
)
17584 location_t saved_location
= input_location
;
17585 gimple_seq parm_stmts
, parm_cleanup
= NULL
, seq
;
17586 gimple
*outer_stmt
;
17589 timevar_push (TV_TREE_GIMPLIFY
);
17591 init_tree_ssa (cfun
);
17593 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
17595 default_rtl_profile ();
17597 gcc_assert (gimplify_ctxp
== NULL
);
17598 push_gimplify_context (true);
17600 if (flag_openacc
|| flag_openmp
)
17602 gcc_assert (gimplify_omp_ctxp
== NULL
);
17603 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl
)))
17604 gimplify_omp_ctxp
= new_omp_context (ORT_IMPLICIT_TARGET
);
17607 /* Unshare most shared trees in the body and in that of any nested functions.
17608 It would seem we don't have to do this for nested functions because
17609 they are supposed to be output and then the outer function gimplified
17610 first, but the g++ front end doesn't always do it that way. */
17611 unshare_body (fndecl
);
17612 unvisit_body (fndecl
);
17614 /* Make sure input_location isn't set to something weird. */
17615 input_location
= DECL_SOURCE_LOCATION (fndecl
);
17617 /* Resolve callee-copies. This has to be done before processing
17618 the body so that DECL_VALUE_EXPR gets processed correctly. */
17619 parm_stmts
= do_parms
? gimplify_parameters (&parm_cleanup
) : NULL
;
17621 /* Gimplify the function's body. */
17623 gimplify_stmt (&DECL_SAVED_TREE (fndecl
), &seq
);
17624 outer_stmt
= gimple_seq_first_nondebug_stmt (seq
);
17627 outer_stmt
= gimple_build_nop ();
17628 gimplify_seq_add_stmt (&seq
, outer_stmt
);
17631 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
17632 not the case, wrap everything in a GIMPLE_BIND to make it so. */
17633 if (gimple_code (outer_stmt
) == GIMPLE_BIND
17634 && (gimple_seq_first_nondebug_stmt (seq
)
17635 == gimple_seq_last_nondebug_stmt (seq
)))
17637 outer_bind
= as_a
<gbind
*> (outer_stmt
);
17638 if (gimple_seq_first_stmt (seq
) != outer_stmt
17639 || gimple_seq_last_stmt (seq
) != outer_stmt
)
17641 /* If there are debug stmts before or after outer_stmt, move them
17642 inside of outer_bind body. */
17643 gimple_stmt_iterator gsi
= gsi_for_stmt (outer_stmt
, &seq
);
17644 gimple_seq second_seq
= NULL
;
17645 if (gimple_seq_first_stmt (seq
) != outer_stmt
17646 && gimple_seq_last_stmt (seq
) != outer_stmt
)
17648 second_seq
= gsi_split_seq_after (gsi
);
17649 gsi_remove (&gsi
, false);
17651 else if (gimple_seq_first_stmt (seq
) != outer_stmt
)
17652 gsi_remove (&gsi
, false);
17655 gsi_remove (&gsi
, false);
17659 gimple_seq_add_seq_without_update (&seq
,
17660 gimple_bind_body (outer_bind
));
17661 gimple_seq_add_seq_without_update (&seq
, second_seq
);
17662 gimple_bind_set_body (outer_bind
, seq
);
17666 outer_bind
= gimple_build_bind (NULL_TREE
, seq
, NULL
);
17668 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
17670 /* If we had callee-copies statements, insert them at the beginning
17671 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
17672 if (!gimple_seq_empty_p (parm_stmts
))
17676 gimplify_seq_add_seq (&parm_stmts
, gimple_bind_body (outer_bind
));
17679 gtry
*g
= gimple_build_try (parm_stmts
, parm_cleanup
,
17680 GIMPLE_TRY_FINALLY
);
17682 gimple_seq_add_stmt (&parm_stmts
, g
);
17684 gimple_bind_set_body (outer_bind
, parm_stmts
);
17686 for (parm
= DECL_ARGUMENTS (current_function_decl
);
17687 parm
; parm
= DECL_CHAIN (parm
))
17688 if (DECL_HAS_VALUE_EXPR_P (parm
))
17690 DECL_HAS_VALUE_EXPR_P (parm
) = 0;
17691 DECL_IGNORED_P (parm
) = 0;
17695 if ((flag_openacc
|| flag_openmp
|| flag_openmp_simd
)
17696 && gimplify_omp_ctxp
)
17698 delete_omp_context (gimplify_omp_ctxp
);
17699 gimplify_omp_ctxp
= NULL
;
17702 pop_gimplify_context (outer_bind
);
17703 gcc_assert (gimplify_ctxp
== NULL
);
17705 if (flag_checking
&& !seen_error ())
17706 verify_gimple_in_seq (gimple_bind_body (outer_bind
));
17708 timevar_pop (TV_TREE_GIMPLIFY
);
17709 input_location
= saved_location
;
17714 typedef char *char_p
; /* For DEF_VEC_P. */
17716 /* Return whether we should exclude FNDECL from instrumentation. */
17719 flag_instrument_functions_exclude_p (tree fndecl
)
17723 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_functions
;
17724 if (v
&& v
->length () > 0)
17730 name
= lang_hooks
.decl_printable_name (fndecl
, 1);
17731 FOR_EACH_VEC_ELT (*v
, i
, s
)
17732 if (strstr (name
, s
) != NULL
)
17736 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_files
;
17737 if (v
&& v
->length () > 0)
17743 name
= DECL_SOURCE_FILE (fndecl
);
17744 FOR_EACH_VEC_ELT (*v
, i
, s
)
17745 if (strstr (name
, s
) != NULL
)
17752 /* Build a call to the instrumentation function FNCODE and add it to SEQ.
17753 If COND_VAR is not NULL, it is a boolean variable guarding the call to
17754 the instrumentation function. IF STMT is not NULL, it is a statement
17755 to be executed just before the call to the instrumentation function. */
17758 build_instrumentation_call (gimple_seq
*seq
, enum built_in_function fncode
,
17759 tree cond_var
, gimple
*stmt
)
17761 /* The instrumentation hooks aren't going to call the instrumented
17762 function and the address they receive is expected to be matchable
17763 against symbol addresses. Make sure we don't create a trampoline,
17764 in case the current function is nested. */
17765 tree this_fn_addr
= build_fold_addr_expr (current_function_decl
);
17766 TREE_NO_TRAMPOLINE (this_fn_addr
) = 1;
17768 tree label_true
, label_false
;
17771 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
17772 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
17773 gcond
*cond
= gimple_build_cond (EQ_EXPR
, cond_var
, boolean_false_node
,
17774 label_true
, label_false
);
17775 gimplify_seq_add_stmt (seq
, cond
);
17776 gimplify_seq_add_stmt (seq
, gimple_build_label (label_true
));
17777 gimplify_seq_add_stmt (seq
, gimple_build_predict (PRED_COLD_LABEL
,
17782 gimplify_seq_add_stmt (seq
, stmt
);
17784 tree x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
17785 gcall
*call
= gimple_build_call (x
, 1, integer_zero_node
);
17786 tree tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
17787 gimple_call_set_lhs (call
, tmp_var
);
17788 gimplify_seq_add_stmt (seq
, call
);
17789 x
= builtin_decl_implicit (fncode
);
17790 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
17791 gimplify_seq_add_stmt (seq
, call
);
17794 gimplify_seq_add_stmt (seq
, gimple_build_label (label_false
));
17797 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
17798 node for the function we want to gimplify.
17800 Return the sequence of GIMPLE statements corresponding to the body
17804 gimplify_function_tree (tree fndecl
)
17809 gcc_assert (!gimple_body (fndecl
));
17811 if (DECL_STRUCT_FUNCTION (fndecl
))
17812 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
17814 push_struct_function (fndecl
);
17816 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
17818 cfun
->curr_properties
|= PROP_gimple_lva
;
17820 if (asan_sanitize_use_after_scope ())
17821 asan_poisoned_variables
= new hash_set
<tree
> ();
17822 bind
= gimplify_body (fndecl
, true);
17823 if (asan_poisoned_variables
)
17825 delete asan_poisoned_variables
;
17826 asan_poisoned_variables
= NULL
;
17829 /* The tree body of the function is no longer needed, replace it
17830 with the new GIMPLE body. */
17832 gimple_seq_add_stmt (&seq
, bind
);
17833 gimple_set_body (fndecl
, seq
);
17835 /* If we're instrumenting function entry/exit, then prepend the call to
17836 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
17837 catch the exit hook. */
17838 /* ??? Add some way to ignore exceptions for this TFE. */
17839 if (flag_instrument_function_entry_exit
17840 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
)
17841 /* Do not instrument extern inline functions. */
17842 && !(DECL_DECLARED_INLINE_P (fndecl
)
17843 && DECL_EXTERNAL (fndecl
)
17844 && DECL_DISREGARD_INLINE_LIMITS (fndecl
))
17845 && !flag_instrument_functions_exclude_p (fndecl
))
17847 gimple_seq body
= NULL
, cleanup
= NULL
;
17851 /* If -finstrument-functions-once is specified, generate:
17853 static volatile bool C.0 = false;
17860 [call profiling enter function]
17863 without specific protection for data races. */
17864 if (flag_instrument_function_entry_exit
> 1)
17867 = build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
17869 create_tmp_var_name ("C"),
17870 boolean_type_node
);
17871 DECL_ARTIFICIAL (first_var
) = 1;
17872 DECL_IGNORED_P (first_var
) = 1;
17873 TREE_STATIC (first_var
) = 1;
17874 TREE_THIS_VOLATILE (first_var
) = 1;
17875 TREE_USED (first_var
) = 1;
17876 DECL_INITIAL (first_var
) = boolean_false_node
;
17877 varpool_node::add (first_var
);
17879 cond_var
= create_tmp_var (boolean_type_node
, "tmp_called");
17880 assign
= gimple_build_assign (cond_var
, first_var
);
17881 gimplify_seq_add_stmt (&body
, assign
);
17883 assign
= gimple_build_assign (first_var
, boolean_true_node
);
17888 cond_var
= NULL_TREE
;
17892 build_instrumentation_call (&body
, BUILT_IN_PROFILE_FUNC_ENTER
,
17895 /* If -finstrument-functions-once is specified, generate:
17898 [call profiling exit function]
17900 without specific protection for data races. */
17901 build_instrumentation_call (&cleanup
, BUILT_IN_PROFILE_FUNC_EXIT
,
17904 gimple
*tf
= gimple_build_try (seq
, cleanup
, GIMPLE_TRY_FINALLY
);
17905 gimplify_seq_add_stmt (&body
, tf
);
17906 gbind
*new_bind
= gimple_build_bind (NULL
, body
, NULL
);
17908 /* Replace the current function body with the body
17909 wrapped in the try/finally TF. */
17911 gimple_seq_add_stmt (&seq
, new_bind
);
17912 gimple_set_body (fndecl
, seq
);
17916 if (sanitize_flags_p (SANITIZE_THREAD
)
17917 && param_tsan_instrument_func_entry_exit
)
17919 gcall
*call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
17920 gimple
*tf
= gimple_build_try (seq
, call
, GIMPLE_TRY_FINALLY
);
17921 gbind
*new_bind
= gimple_build_bind (NULL
, tf
, NULL
);
17922 /* Replace the current function body with the body
17923 wrapped in the try/finally TF. */
17925 gimple_seq_add_stmt (&seq
, new_bind
);
17926 gimple_set_body (fndecl
, seq
);
17929 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
17930 cfun
->curr_properties
|= PROP_gimple_any
;
17934 dump_function (TDI_gimple
, fndecl
);
17937 /* Return a dummy expression of type TYPE in order to keep going after an
17941 dummy_object (tree type
)
17943 tree t
= build_int_cst (build_pointer_type (type
), 0);
17944 return build2 (MEM_REF
, type
, t
, t
);
17947 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
17948 builtin function, but a very special sort of operator. */
17950 enum gimplify_status
17951 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
,
17952 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
17954 tree promoted_type
, have_va_type
;
17955 tree valist
= TREE_OPERAND (*expr_p
, 0);
17956 tree type
= TREE_TYPE (*expr_p
);
17957 tree t
, tag
, aptag
;
17958 location_t loc
= EXPR_LOCATION (*expr_p
);
17960 /* Verify that valist is of the proper type. */
17961 have_va_type
= TREE_TYPE (valist
);
17962 if (have_va_type
== error_mark_node
)
17964 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
17965 if (have_va_type
== NULL_TREE
17966 && POINTER_TYPE_P (TREE_TYPE (valist
)))
17967 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
17969 = targetm
.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist
)));
17970 gcc_assert (have_va_type
!= NULL_TREE
);
17972 /* Generate a diagnostic for requesting data of a type that cannot
17973 be passed through `...' due to type promotion at the call site. */
17974 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
17977 static bool gave_help
;
17979 /* Use the expansion point to handle cases such as passing bool (defined
17980 in a system header) through `...'. */
17982 = expansion_point_location_if_in_system_header (loc
);
17984 /* Unfortunately, this is merely undefined, rather than a constraint
17985 violation, so we cannot make this an error. If this call is never
17986 executed, the program is still strictly conforming. */
17987 auto_diagnostic_group d
;
17988 warned
= warning_at (xloc
, 0,
17989 "%qT is promoted to %qT when passed through %<...%>",
17990 type
, promoted_type
);
17991 if (!gave_help
&& warned
)
17994 inform (xloc
, "(so you should pass %qT not %qT to %<va_arg%>)",
17995 promoted_type
, type
);
17998 /* We can, however, treat "undefined" any way we please.
17999 Call abort to encourage the user to fix the program. */
18001 inform (xloc
, "if this code is reached, the program will abort");
18002 /* Before the abort, allow the evaluation of the va_list
18003 expression to exit or longjmp. */
18004 gimplify_and_add (valist
, pre_p
);
18005 t
= build_call_expr_loc (loc
,
18006 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
18007 gimplify_and_add (t
, pre_p
);
18009 /* This is dead code, but go ahead and finish so that the
18010 mode of the result comes out right. */
18011 *expr_p
= dummy_object (type
);
18012 return GS_ALL_DONE
;
18015 tag
= build_int_cst (build_pointer_type (type
), 0);
18016 aptag
= build_int_cst (TREE_TYPE (valist
), 0);
18018 *expr_p
= build_call_expr_internal_loc (loc
, IFN_VA_ARG
, type
, 3,
18019 valist
, tag
, aptag
);
18021 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
18022 needs to be expanded. */
18023 cfun
->curr_properties
&= ~PROP_gimple_lva
;
18028 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
18030 DST/SRC are the destination and source respectively. You can pass
18031 ungimplified trees in DST or SRC, in which case they will be
18032 converted to a gimple operand if necessary.
18034 This function returns the newly created GIMPLE_ASSIGN tuple. */
18037 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
18039 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
18040 gimplify_and_add (t
, seq_p
);
18042 return gimple_seq_last_stmt (*seq_p
);
18046 gimplify_hasher::hash (const elt_t
*p
)
18049 return iterative_hash_expr (t
, 0);
18053 gimplify_hasher::equal (const elt_t
*p1
, const elt_t
*p2
)
18057 enum tree_code code
= TREE_CODE (t1
);
18059 if (TREE_CODE (t2
) != code
18060 || TREE_TYPE (t1
) != TREE_TYPE (t2
))
18063 if (!operand_equal_p (t1
, t2
, 0))
18066 /* Only allow them to compare equal if they also hash equal; otherwise
18067 results are nondeterminate, and we fail bootstrap comparison. */
18068 gcc_checking_assert (hash (p1
) == hash (p2
));