1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2022 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
40 #include "fold-const.h"
45 #include "gimple-iterator.h"
46 #include "gimple-fold.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
56 #include "tree-hash-traits.h"
57 #include "omp-general.h"
59 #include "gimple-low.h"
60 #include "gomp-constants.h"
61 #include "splay-tree.h"
62 #include "gimple-walk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
65 #include "stringpool.h"
69 #include "omp-offload.h"
71 #include "tree-nested.h"
73 /* Hash set of poisoned variables in a bind expr. */
74 static hash_set
<tree
> *asan_poisoned_variables
= NULL
;
76 enum gimplify_omp_var_data
79 GOVD_EXPLICIT
= 0x000002,
80 GOVD_SHARED
= 0x000004,
81 GOVD_PRIVATE
= 0x000008,
82 GOVD_FIRSTPRIVATE
= 0x000010,
83 GOVD_LASTPRIVATE
= 0x000020,
84 GOVD_REDUCTION
= 0x000040,
87 GOVD_DEBUG_PRIVATE
= 0x000200,
88 GOVD_PRIVATE_OUTER_REF
= 0x000400,
89 GOVD_LINEAR
= 0x000800,
90 GOVD_ALIGNED
= 0x001000,
92 /* Flag for GOVD_MAP: don't copy back. */
93 GOVD_MAP_TO_ONLY
= 0x002000,
95 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
96 GOVD_LINEAR_LASTPRIVATE_NO_OUTER
= 0x004000,
98 GOVD_MAP_0LEN_ARRAY
= 0x008000,
100 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
101 GOVD_MAP_ALWAYS_TO
= 0x010000,
103 /* Flag for shared vars that are or might be stored to in the region. */
104 GOVD_WRITTEN
= 0x020000,
106 /* Flag for GOVD_MAP, if it is a forced mapping. */
107 GOVD_MAP_FORCE
= 0x040000,
109 /* Flag for GOVD_MAP: must be present already. */
110 GOVD_MAP_FORCE_PRESENT
= 0x080000,
112 /* Flag for GOVD_MAP: only allocate. */
113 GOVD_MAP_ALLOC_ONLY
= 0x100000,
115 /* Flag for GOVD_MAP: only copy back. */
116 GOVD_MAP_FROM_ONLY
= 0x200000,
118 GOVD_NONTEMPORAL
= 0x400000,
120 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
121 GOVD_LASTPRIVATE_CONDITIONAL
= 0x800000,
123 GOVD_CONDTEMP
= 0x1000000,
125 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
126 GOVD_REDUCTION_INSCAN
= 0x2000000,
128 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
129 GOVD_FIRSTPRIVATE_IMPLICIT
= 0x4000000,
131 GOVD_DATA_SHARE_CLASS
= (GOVD_SHARED
| GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
132 | GOVD_LASTPRIVATE
| GOVD_REDUCTION
| GOVD_LINEAR
139 ORT_WORKSHARE
= 0x00,
140 ORT_TASKGROUP
= 0x01,
144 ORT_COMBINED_PARALLEL
= ORT_PARALLEL
| 1,
147 ORT_UNTIED_TASK
= ORT_TASK
| 1,
148 ORT_TASKLOOP
= ORT_TASK
| 2,
149 ORT_UNTIED_TASKLOOP
= ORT_UNTIED_TASK
| 2,
152 ORT_COMBINED_TEAMS
= ORT_TEAMS
| 1,
153 ORT_HOST_TEAMS
= ORT_TEAMS
| 2,
154 ORT_COMBINED_HOST_TEAMS
= ORT_COMBINED_TEAMS
| 2,
157 ORT_TARGET_DATA
= 0x40,
159 /* Data region with offloading. */
161 ORT_COMBINED_TARGET
= ORT_TARGET
| 1,
162 ORT_IMPLICIT_TARGET
= ORT_TARGET
| 2,
164 /* OpenACC variants. */
165 ORT_ACC
= 0x100, /* A generic OpenACC region. */
166 ORT_ACC_DATA
= ORT_ACC
| ORT_TARGET_DATA
, /* Data construct. */
167 ORT_ACC_PARALLEL
= ORT_ACC
| ORT_TARGET
, /* Parallel construct */
168 ORT_ACC_KERNELS
= ORT_ACC
| ORT_TARGET
| 2, /* Kernels construct. */
169 ORT_ACC_SERIAL
= ORT_ACC
| ORT_TARGET
| 4, /* Serial construct. */
170 ORT_ACC_HOST_DATA
= ORT_ACC
| ORT_TARGET_DATA
| 2, /* Host data. */
172 /* Dummy OpenMP region, used to disable expansion of
173 DECL_VALUE_EXPRs in taskloop pre body. */
177 /* Gimplify hashtable helper. */
179 struct gimplify_hasher
: free_ptr_hash
<elt_t
>
181 static inline hashval_t
hash (const elt_t
*);
182 static inline bool equal (const elt_t
*, const elt_t
*);
187 struct gimplify_ctx
*prev_context
;
189 vec
<gbind
*> bind_expr_stack
;
191 gimple_seq conditional_cleanups
;
195 vec
<tree
> case_labels
;
196 hash_set
<tree
> *live_switch_vars
;
197 /* The formal temporary table. Should this be persistent? */
198 hash_table
<gimplify_hasher
> *temp_htab
;
201 unsigned into_ssa
: 1;
202 unsigned allow_rhs_cond_expr
: 1;
203 unsigned in_cleanup_point_expr
: 1;
204 unsigned keep_stack
: 1;
205 unsigned save_stack
: 1;
206 unsigned in_switch_expr
: 1;
209 enum gimplify_defaultmap_kind
212 GDMK_SCALAR_TARGET
, /* w/ Fortran's target attr, implicit mapping, only. */
218 struct gimplify_omp_ctx
220 struct gimplify_omp_ctx
*outer_context
;
221 splay_tree variables
;
222 hash_set
<tree
> *privatized_types
;
224 /* Iteration variables in an OMP_FOR. */
225 vec
<tree
> loop_iter_var
;
227 enum omp_clause_default_kind default_kind
;
228 enum omp_region_type region_type
;
232 bool target_firstprivatize_array_bases
;
234 bool order_concurrent
;
240 static struct gimplify_ctx
*gimplify_ctxp
;
241 static struct gimplify_omp_ctx
*gimplify_omp_ctxp
;
242 static bool in_omp_construct
;
244 /* Forward declaration. */
245 static enum gimplify_status
gimplify_compound_expr (tree
*, gimple_seq
*, bool);
246 static hash_map
<tree
, tree
> *oacc_declare_returns
;
247 static enum gimplify_status
gimplify_expr (tree
*, gimple_seq
*, gimple_seq
*,
248 bool (*) (tree
), fallback_t
, bool);
249 static void prepare_gimple_addressable (tree
*, gimple_seq
*);
251 /* Shorter alias name for the above function for use in gimplify.cc
255 gimplify_seq_add_stmt (gimple_seq
*seq_p
, gimple
*gs
)
257 gimple_seq_add_stmt_without_update (seq_p
, gs
);
260 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
261 NULL, a new sequence is allocated. This function is
262 similar to gimple_seq_add_seq, but does not scan the operands.
263 During gimplification, we need to manipulate statement sequences
264 before the def/use vectors have been constructed. */
267 gimplify_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
269 gimple_stmt_iterator si
;
274 si
= gsi_last (*dst_p
);
275 gsi_insert_seq_after_without_update (&si
, src
, GSI_NEW_STMT
);
279 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
280 and popping gimplify contexts. */
282 static struct gimplify_ctx
*ctx_pool
= NULL
;
284 /* Return a gimplify context struct from the pool. */
286 static inline struct gimplify_ctx
*
289 struct gimplify_ctx
* c
= ctx_pool
;
292 ctx_pool
= c
->prev_context
;
294 c
= XNEW (struct gimplify_ctx
);
296 memset (c
, '\0', sizeof (*c
));
300 /* Put gimplify context C back into the pool. */
303 ctx_free (struct gimplify_ctx
*c
)
305 c
->prev_context
= ctx_pool
;
309 /* Free allocated ctx stack memory. */
312 free_gimplify_stack (void)
314 struct gimplify_ctx
*c
;
316 while ((c
= ctx_pool
))
318 ctx_pool
= c
->prev_context
;
324 /* Set up a context for the gimplifier. */
327 push_gimplify_context (bool in_ssa
, bool rhs_cond_ok
)
329 struct gimplify_ctx
*c
= ctx_alloc ();
331 c
->prev_context
= gimplify_ctxp
;
333 gimplify_ctxp
->into_ssa
= in_ssa
;
334 gimplify_ctxp
->allow_rhs_cond_expr
= rhs_cond_ok
;
337 /* Tear down a context for the gimplifier. If BODY is non-null, then
338 put the temporaries into the outer BIND_EXPR. Otherwise, put them
341 BODY is not a sequence, but the first tuple in a sequence. */
344 pop_gimplify_context (gimple
*body
)
346 struct gimplify_ctx
*c
= gimplify_ctxp
;
349 && (!c
->bind_expr_stack
.exists ()
350 || c
->bind_expr_stack
.is_empty ()));
351 c
->bind_expr_stack
.release ();
352 gimplify_ctxp
= c
->prev_context
;
355 declare_vars (c
->temps
, body
, false);
357 record_vars (c
->temps
);
364 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
367 gimple_push_bind_expr (gbind
*bind_stmt
)
369 gimplify_ctxp
->bind_expr_stack
.reserve (8);
370 gimplify_ctxp
->bind_expr_stack
.safe_push (bind_stmt
);
373 /* Pop the first element off the stack of bindings. */
376 gimple_pop_bind_expr (void)
378 gimplify_ctxp
->bind_expr_stack
.pop ();
381 /* Return the first element of the stack of bindings. */
384 gimple_current_bind_expr (void)
386 return gimplify_ctxp
->bind_expr_stack
.last ();
389 /* Return the stack of bindings created during gimplification. */
392 gimple_bind_expr_stack (void)
394 return gimplify_ctxp
->bind_expr_stack
;
397 /* Return true iff there is a COND_EXPR between us and the innermost
398 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
401 gimple_conditional_context (void)
403 return gimplify_ctxp
->conditions
> 0;
406 /* Note that we've entered a COND_EXPR. */
409 gimple_push_condition (void)
411 #ifdef ENABLE_GIMPLE_CHECKING
412 if (gimplify_ctxp
->conditions
== 0)
413 gcc_assert (gimple_seq_empty_p (gimplify_ctxp
->conditional_cleanups
));
415 ++(gimplify_ctxp
->conditions
);
418 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
419 now, add any conditional cleanups we've seen to the prequeue. */
422 gimple_pop_condition (gimple_seq
*pre_p
)
424 int conds
= --(gimplify_ctxp
->conditions
);
426 gcc_assert (conds
>= 0);
429 gimplify_seq_add_seq (pre_p
, gimplify_ctxp
->conditional_cleanups
);
430 gimplify_ctxp
->conditional_cleanups
= NULL
;
434 /* A stable comparison routine for use with splay trees and DECLs. */
437 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
442 return DECL_UID (a
) - DECL_UID (b
);
445 /* Create a new omp construct that deals with variable remapping. */
447 static struct gimplify_omp_ctx
*
448 new_omp_context (enum omp_region_type region_type
)
450 struct gimplify_omp_ctx
*c
;
452 c
= XCNEW (struct gimplify_omp_ctx
);
453 c
->outer_context
= gimplify_omp_ctxp
;
454 c
->variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
455 c
->privatized_types
= new hash_set
<tree
>;
456 c
->location
= input_location
;
457 c
->region_type
= region_type
;
458 if ((region_type
& ORT_TASK
) == 0)
459 c
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
461 c
->default_kind
= OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
462 c
->defaultmap
[GDMK_SCALAR
] = GOVD_MAP
;
463 c
->defaultmap
[GDMK_SCALAR_TARGET
] = GOVD_MAP
;
464 c
->defaultmap
[GDMK_AGGREGATE
] = GOVD_MAP
;
465 c
->defaultmap
[GDMK_ALLOCATABLE
] = GOVD_MAP
;
466 c
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
;
471 /* Destroy an omp construct that deals with variable remapping. */
474 delete_omp_context (struct gimplify_omp_ctx
*c
)
476 splay_tree_delete (c
->variables
);
477 delete c
->privatized_types
;
478 c
->loop_iter_var
.release ();
482 static void omp_add_variable (struct gimplify_omp_ctx
*, tree
, unsigned int);
483 static bool omp_notice_variable (struct gimplify_omp_ctx
*, tree
, bool);
485 /* Both gimplify the statement T and append it to *SEQ_P. This function
486 behaves exactly as gimplify_stmt, but you don't have to pass T as a
490 gimplify_and_add (tree t
, gimple_seq
*seq_p
)
492 gimplify_stmt (&t
, seq_p
);
495 /* Gimplify statement T into sequence *SEQ_P, and return the first
496 tuple in the sequence of generated tuples for this statement.
497 Return NULL if gimplifying T produced no tuples. */
500 gimplify_and_return_first (tree t
, gimple_seq
*seq_p
)
502 gimple_stmt_iterator last
= gsi_last (*seq_p
);
504 gimplify_and_add (t
, seq_p
);
506 if (!gsi_end_p (last
))
509 return gsi_stmt (last
);
512 return gimple_seq_first_stmt (*seq_p
);
515 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
516 LHS, or for a call argument. */
519 is_gimple_mem_rhs (tree t
)
521 /* If we're dealing with a renamable type, either source or dest must be
522 a renamed variable. */
523 if (is_gimple_reg_type (TREE_TYPE (t
)))
524 return is_gimple_val (t
);
526 return is_gimple_val (t
) || is_gimple_lvalue (t
);
529 /* Return true if T is a CALL_EXPR or an expression that can be
530 assigned to a temporary. Note that this predicate should only be
531 used during gimplification. See the rationale for this in
532 gimplify_modify_expr. */
535 is_gimple_reg_rhs_or_call (tree t
)
537 return (get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
538 || TREE_CODE (t
) == CALL_EXPR
);
541 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
542 this predicate should only be used during gimplification. See the
543 rationale for this in gimplify_modify_expr. */
546 is_gimple_mem_rhs_or_call (tree t
)
548 /* If we're dealing with a renamable type, either source or dest must be
549 a renamed variable. */
550 if (is_gimple_reg_type (TREE_TYPE (t
)))
551 return is_gimple_val (t
);
553 return (is_gimple_val (t
)
554 || is_gimple_lvalue (t
)
555 || TREE_CLOBBER_P (t
)
556 || TREE_CODE (t
) == CALL_EXPR
);
559 /* Create a temporary with a name derived from VAL. Subroutine of
560 lookup_tmp_var; nobody else should call this function. */
563 create_tmp_from_val (tree val
)
565 /* Drop all qualifiers and address-space information from the value type. */
566 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (val
));
567 tree var
= create_tmp_var (type
, get_name (val
));
571 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
572 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
575 lookup_tmp_var (tree val
, bool is_formal
, bool not_gimple_reg
)
579 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
580 gcc_assert (!is_formal
|| !not_gimple_reg
);
582 /* If not optimizing, never really reuse a temporary. local-alloc
583 won't allocate any variable that is used in more than one basic
584 block, which means it will go into memory, causing much extra
585 work in reload and final and poorer code generation, outweighing
586 the extra memory allocation here. */
587 if (!optimize
|| !is_formal
|| TREE_SIDE_EFFECTS (val
))
589 ret
= create_tmp_from_val (val
);
590 DECL_NOT_GIMPLE_REG_P (ret
) = not_gimple_reg
;
598 if (!gimplify_ctxp
->temp_htab
)
599 gimplify_ctxp
->temp_htab
= new hash_table
<gimplify_hasher
> (1000);
600 slot
= gimplify_ctxp
->temp_htab
->find_slot (&elt
, INSERT
);
603 elt_p
= XNEW (elt_t
);
605 elt_p
->temp
= ret
= create_tmp_from_val (val
);
618 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
621 internal_get_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
622 bool is_formal
, bool allow_ssa
, bool not_gimple_reg
)
626 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
627 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
628 gimplify_expr (&val
, pre_p
, post_p
, is_gimple_reg_rhs_or_call
,
632 && gimplify_ctxp
->into_ssa
633 && is_gimple_reg_type (TREE_TYPE (val
)))
635 t
= make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val
)));
636 if (! gimple_in_ssa_p (cfun
))
638 const char *name
= get_name (val
);
640 SET_SSA_NAME_VAR_OR_IDENTIFIER (t
, create_tmp_var_name (name
));
644 t
= lookup_tmp_var (val
, is_formal
, not_gimple_reg
);
646 mod
= build2 (INIT_EXPR
, TREE_TYPE (t
), t
, unshare_expr (val
));
648 SET_EXPR_LOCATION (mod
, EXPR_LOC_OR_LOC (val
, input_location
));
650 /* gimplify_modify_expr might want to reduce this further. */
651 gimplify_and_add (mod
, pre_p
);
657 /* Return a formal temporary variable initialized with VAL. PRE_P is as
658 in gimplify_expr. Only use this function if:
660 1) The value of the unfactored expression represented by VAL will not
661 change between the initialization and use of the temporary, and
662 2) The temporary will not be otherwise modified.
664 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
665 and #2 means it is inappropriate for && temps.
667 For other cases, use get_initialized_tmp_var instead. */
670 get_formal_tmp_var (tree val
, gimple_seq
*pre_p
)
672 return internal_get_tmp_var (val
, pre_p
, NULL
, true, true, false);
675 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
676 are as in gimplify_expr. */
679 get_initialized_tmp_var (tree val
, gimple_seq
*pre_p
,
680 gimple_seq
*post_p
/* = NULL */,
681 bool allow_ssa
/* = true */)
683 return internal_get_tmp_var (val
, pre_p
, post_p
, false, allow_ssa
, false);
686 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
687 generate debug info for them; otherwise don't. */
690 declare_vars (tree vars
, gimple
*gs
, bool debug_info
)
697 gbind
*scope
= as_a
<gbind
*> (gs
);
699 temps
= nreverse (last
);
701 block
= gimple_bind_block (scope
);
702 gcc_assert (!block
|| TREE_CODE (block
) == BLOCK
);
703 if (!block
|| !debug_info
)
705 DECL_CHAIN (last
) = gimple_bind_vars (scope
);
706 gimple_bind_set_vars (scope
, temps
);
710 /* We need to attach the nodes both to the BIND_EXPR and to its
711 associated BLOCK for debugging purposes. The key point here
712 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
713 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
714 if (BLOCK_VARS (block
))
715 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), temps
);
718 gimple_bind_set_vars (scope
,
719 chainon (gimple_bind_vars (scope
), temps
));
720 BLOCK_VARS (block
) = temps
;
726 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
727 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
728 no such upper bound can be obtained. */
731 force_constant_size (tree var
)
733 /* The only attempt we make is by querying the maximum size of objects
734 of the variable's type. */
736 HOST_WIDE_INT max_size
;
738 gcc_assert (VAR_P (var
));
740 max_size
= max_int_size_in_bytes (TREE_TYPE (var
));
742 gcc_assert (max_size
>= 0);
745 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var
)), max_size
);
747 = build_int_cst (TREE_TYPE (DECL_SIZE (var
)), max_size
* BITS_PER_UNIT
);
750 /* Push the temporary variable TMP into the current binding. */
753 gimple_add_tmp_var_fn (struct function
*fn
, tree tmp
)
755 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
757 /* Later processing assumes that the object size is constant, which might
758 not be true at this point. Force the use of a constant upper bound in
760 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
761 force_constant_size (tmp
);
763 DECL_CONTEXT (tmp
) = fn
->decl
;
764 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
766 record_vars_into (tmp
, fn
->decl
);
769 /* Push the temporary variable TMP into the current binding. */
772 gimple_add_tmp_var (tree tmp
)
774 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
776 /* Later processing assumes that the object size is constant, which might
777 not be true at this point. Force the use of a constant upper bound in
779 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
780 force_constant_size (tmp
);
782 DECL_CONTEXT (tmp
) = current_function_decl
;
783 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
787 DECL_CHAIN (tmp
) = gimplify_ctxp
->temps
;
788 gimplify_ctxp
->temps
= tmp
;
790 /* Mark temporaries local within the nearest enclosing parallel. */
791 if (gimplify_omp_ctxp
)
793 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
794 int flag
= GOVD_LOCAL
| GOVD_SEEN
;
796 && (ctx
->region_type
== ORT_WORKSHARE
797 || ctx
->region_type
== ORT_TASKGROUP
798 || ctx
->region_type
== ORT_SIMD
799 || ctx
->region_type
== ORT_ACC
))
801 if (ctx
->region_type
== ORT_SIMD
802 && TREE_ADDRESSABLE (tmp
)
803 && !TREE_STATIC (tmp
))
805 if (TREE_CODE (DECL_SIZE_UNIT (tmp
)) != INTEGER_CST
)
806 ctx
->add_safelen1
= true;
807 else if (ctx
->in_for_exprs
)
810 flag
= GOVD_PRIVATE
| GOVD_SEEN
;
813 ctx
= ctx
->outer_context
;
816 omp_add_variable (ctx
, tmp
, flag
);
825 /* This case is for nested functions. We need to expose the locals
827 body_seq
= gimple_body (current_function_decl
);
828 declare_vars (tmp
, gimple_seq_first_stmt (body_seq
), false);
834 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
835 nodes that are referenced more than once in GENERIC functions. This is
836 necessary because gimplification (translation into GIMPLE) is performed
837 by modifying tree nodes in-place, so gimplication of a shared node in a
838 first context could generate an invalid GIMPLE form in a second context.
840 This is achieved with a simple mark/copy/unmark algorithm that walks the
841 GENERIC representation top-down, marks nodes with TREE_VISITED the first
842 time it encounters them, duplicates them if they already have TREE_VISITED
843 set, and finally removes the TREE_VISITED marks it has set.
845 The algorithm works only at the function level, i.e. it generates a GENERIC
846 representation of a function with no nodes shared within the function when
847 passed a GENERIC function (except for nodes that are allowed to be shared).
849 At the global level, it is also necessary to unshare tree nodes that are
850 referenced in more than one function, for the same aforementioned reason.
851 This requires some cooperation from the front-end. There are 2 strategies:
853 1. Manual unsharing. The front-end needs to call unshare_expr on every
854 expression that might end up being shared across functions.
856 2. Deep unsharing. This is an extension of regular unsharing. Instead
857 of calling unshare_expr on expressions that might be shared across
858 functions, the front-end pre-marks them with TREE_VISITED. This will
859 ensure that they are unshared on the first reference within functions
860 when the regular unsharing algorithm runs. The counterpart is that
861 this algorithm must look deeper than for manual unsharing, which is
862 specified by LANG_HOOKS_DEEP_UNSHARING.
864 If there are only few specific cases of node sharing across functions, it is
865 probably easier for a front-end to unshare the expressions manually. On the
866 contrary, if the expressions generated at the global level are as widespread
867 as expressions generated within functions, deep unsharing is very likely the
870 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
871 These nodes model computations that must be done once. If we were to
872 unshare something like SAVE_EXPR(i++), the gimplification process would
873 create wrong code. However, if DATA is non-null, it must hold a pointer
874 set that is used to unshare the subtrees of these nodes. */
877 mostly_copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data
)
880 enum tree_code code
= TREE_CODE (t
);
882 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
883 copy their subtrees if we can make sure to do it only once. */
884 if (code
== SAVE_EXPR
|| code
== TARGET_EXPR
|| code
== BIND_EXPR
)
886 if (data
&& !((hash_set
<tree
> *)data
)->add (t
))
892 /* Stop at types, decls, constants like copy_tree_r. */
893 else if (TREE_CODE_CLASS (code
) == tcc_type
894 || TREE_CODE_CLASS (code
) == tcc_declaration
895 || TREE_CODE_CLASS (code
) == tcc_constant
)
898 /* Cope with the statement expression extension. */
899 else if (code
== STATEMENT_LIST
)
902 /* Leave the bulk of the work to copy_tree_r itself. */
904 copy_tree_r (tp
, walk_subtrees
, NULL
);
909 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
910 If *TP has been visited already, then *TP is deeply copied by calling
911 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
914 copy_if_shared_r (tree
*tp
, int *walk_subtrees
, void *data
)
917 enum tree_code code
= TREE_CODE (t
);
919 /* Skip types, decls, and constants. But we do want to look at their
920 types and the bounds of types. Mark them as visited so we properly
921 unmark their subtrees on the unmark pass. If we've already seen them,
922 don't look down further. */
923 if (TREE_CODE_CLASS (code
) == tcc_type
924 || TREE_CODE_CLASS (code
) == tcc_declaration
925 || TREE_CODE_CLASS (code
) == tcc_constant
)
927 if (TREE_VISITED (t
))
930 TREE_VISITED (t
) = 1;
933 /* If this node has been visited already, unshare it and don't look
935 else if (TREE_VISITED (t
))
937 walk_tree (tp
, mostly_copy_tree_r
, data
, NULL
);
941 /* Otherwise, mark the node as visited and keep looking. */
943 TREE_VISITED (t
) = 1;
948 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
949 copy_if_shared_r callback unmodified. */
952 copy_if_shared (tree
*tp
, void *data
)
954 walk_tree (tp
, copy_if_shared_r
, data
, NULL
);
957 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
958 any nested functions. */
961 unshare_body (tree fndecl
)
963 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
964 /* If the language requires deep unsharing, we need a pointer set to make
965 sure we don't repeatedly unshare subtrees of unshareable nodes. */
966 hash_set
<tree
> *visited
967 = lang_hooks
.deep_unsharing
? new hash_set
<tree
> : NULL
;
969 copy_if_shared (&DECL_SAVED_TREE (fndecl
), visited
);
970 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl
)), visited
);
971 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)), visited
);
976 for (cgn
= first_nested_function (cgn
); cgn
;
977 cgn
= next_nested_function (cgn
))
978 unshare_body (cgn
->decl
);
981 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
982 Subtrees are walked until the first unvisited node is encountered. */
985 unmark_visited_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
989 /* If this node has been visited, unmark it and keep looking. */
990 if (TREE_VISITED (t
))
991 TREE_VISITED (t
) = 0;
993 /* Otherwise, don't look any deeper. */
1000 /* Unmark the visited trees rooted at *TP. */
1003 unmark_visited (tree
*tp
)
1005 walk_tree (tp
, unmark_visited_r
, NULL
, NULL
);
1008 /* Likewise, but mark all trees as not visited. */
1011 unvisit_body (tree fndecl
)
1013 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
1015 unmark_visited (&DECL_SAVED_TREE (fndecl
));
1016 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl
)));
1017 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)));
1020 for (cgn
= first_nested_function (cgn
);
1021 cgn
; cgn
= next_nested_function (cgn
))
1022 unvisit_body (cgn
->decl
);
1025 /* Unconditionally make an unshared copy of EXPR. This is used when using
1026 stored expressions which span multiple functions, such as BINFO_VTABLE,
1027 as the normal unsharing process can't tell that they're shared. */
1030 unshare_expr (tree expr
)
1032 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1036 /* Worker for unshare_expr_without_location. */
1039 prune_expr_location (tree
*tp
, int *walk_subtrees
, void *)
1042 SET_EXPR_LOCATION (*tp
, UNKNOWN_LOCATION
);
1048 /* Similar to unshare_expr but also prune all expression locations
1052 unshare_expr_without_location (tree expr
)
1054 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1056 walk_tree (&expr
, prune_expr_location
, NULL
, NULL
);
1060 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1061 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1062 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1063 EXPR is the location of the EXPR. */
1066 rexpr_location (tree expr
, location_t or_else
= UNKNOWN_LOCATION
)
1071 if (EXPR_HAS_LOCATION (expr
))
1072 return EXPR_LOCATION (expr
);
1074 if (TREE_CODE (expr
) != STATEMENT_LIST
)
1077 tree_stmt_iterator i
= tsi_start (expr
);
1080 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
1086 if (!found
|| !tsi_one_before_end_p (i
))
1089 return rexpr_location (tsi_stmt (i
), or_else
);
1092 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1093 rexpr_location for the potential recursion. */
1096 rexpr_has_location (tree expr
)
1098 return rexpr_location (expr
) != UNKNOWN_LOCATION
;
1102 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1103 contain statements and have a value. Assign its value to a temporary
1104 and give it void_type_node. Return the temporary, or NULL_TREE if
1105 WRAPPER was already void. */
1108 voidify_wrapper_expr (tree wrapper
, tree temp
)
1110 tree type
= TREE_TYPE (wrapper
);
1111 if (type
&& !VOID_TYPE_P (type
))
1115 /* Set p to point to the body of the wrapper. Loop until we find
1116 something that isn't a wrapper. */
1117 for (p
= &wrapper
; p
&& *p
; )
1119 switch (TREE_CODE (*p
))
1122 TREE_SIDE_EFFECTS (*p
) = 1;
1123 TREE_TYPE (*p
) = void_type_node
;
1124 /* For a BIND_EXPR, the body is operand 1. */
1125 p
= &BIND_EXPR_BODY (*p
);
1128 case CLEANUP_POINT_EXPR
:
1129 case TRY_FINALLY_EXPR
:
1130 case TRY_CATCH_EXPR
:
1131 TREE_SIDE_EFFECTS (*p
) = 1;
1132 TREE_TYPE (*p
) = void_type_node
;
1133 p
= &TREE_OPERAND (*p
, 0);
1136 case STATEMENT_LIST
:
1138 tree_stmt_iterator i
= tsi_last (*p
);
1139 TREE_SIDE_EFFECTS (*p
) = 1;
1140 TREE_TYPE (*p
) = void_type_node
;
1141 p
= tsi_end_p (i
) ? NULL
: tsi_stmt_ptr (i
);
1146 /* Advance to the last statement. Set all container types to
1148 for (; TREE_CODE (*p
) == COMPOUND_EXPR
; p
= &TREE_OPERAND (*p
, 1))
1150 TREE_SIDE_EFFECTS (*p
) = 1;
1151 TREE_TYPE (*p
) = void_type_node
;
1155 case TRANSACTION_EXPR
:
1156 TREE_SIDE_EFFECTS (*p
) = 1;
1157 TREE_TYPE (*p
) = void_type_node
;
1158 p
= &TRANSACTION_EXPR_BODY (*p
);
1162 /* Assume that any tree upon which voidify_wrapper_expr is
1163 directly called is a wrapper, and that its body is op0. */
1166 TREE_SIDE_EFFECTS (*p
) = 1;
1167 TREE_TYPE (*p
) = void_type_node
;
1168 p
= &TREE_OPERAND (*p
, 0);
1176 if (p
== NULL
|| IS_EMPTY_STMT (*p
))
1180 /* The wrapper is on the RHS of an assignment that we're pushing
1182 gcc_assert (TREE_CODE (temp
) == INIT_EXPR
1183 || TREE_CODE (temp
) == MODIFY_EXPR
);
1184 TREE_OPERAND (temp
, 1) = *p
;
1189 temp
= create_tmp_var (type
, "retval");
1190 *p
= build2 (INIT_EXPR
, type
, temp
, *p
);
1199 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1200 a temporary through which they communicate. */
1203 build_stack_save_restore (gcall
**save
, gcall
**restore
)
1207 *save
= gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE
), 0);
1208 tmp_var
= create_tmp_var (ptr_type_node
, "saved_stack");
1209 gimple_call_set_lhs (*save
, tmp_var
);
1212 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE
),
1216 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1219 build_asan_poison_call_expr (tree decl
)
1221 /* Do not poison variables that have size equal to zero. */
1222 tree unit_size
= DECL_SIZE_UNIT (decl
);
1223 if (zerop (unit_size
))
1226 tree base
= build_fold_addr_expr (decl
);
1228 return build_call_expr_internal_loc (UNKNOWN_LOCATION
, IFN_ASAN_MARK
,
1230 build_int_cst (integer_type_node
,
1235 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1236 on POISON flag, shadow memory of a DECL variable. The call will be
1237 put on location identified by IT iterator, where BEFORE flag drives
1238 position where the stmt will be put. */
1241 asan_poison_variable (tree decl
, bool poison
, gimple_stmt_iterator
*it
,
1244 tree unit_size
= DECL_SIZE_UNIT (decl
);
1245 tree base
= build_fold_addr_expr (decl
);
1247 /* Do not poison variables that have size equal to zero. */
1248 if (zerop (unit_size
))
1251 /* It's necessary to have all stack variables aligned to ASAN granularity
1253 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1254 unsigned shadow_granularity
1255 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE
: ASAN_SHADOW_GRANULARITY
;
1256 if (DECL_ALIGN_UNIT (decl
) <= shadow_granularity
)
1257 SET_DECL_ALIGN (decl
, BITS_PER_UNIT
* shadow_granularity
);
1259 HOST_WIDE_INT flags
= poison
? ASAN_MARK_POISON
: ASAN_MARK_UNPOISON
;
1262 = gimple_build_call_internal (IFN_ASAN_MARK
, 3,
1263 build_int_cst (integer_type_node
, flags
),
1267 gsi_insert_before (it
, g
, GSI_NEW_STMT
);
1269 gsi_insert_after (it
, g
, GSI_NEW_STMT
);
1272 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1273 either poisons or unpoisons a DECL. Created statement is appended
1274 to SEQ_P gimple sequence. */
1277 asan_poison_variable (tree decl
, bool poison
, gimple_seq
*seq_p
)
1279 gimple_stmt_iterator it
= gsi_last (*seq_p
);
1280 bool before
= false;
1285 asan_poison_variable (decl
, poison
, &it
, before
);
1288 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1291 sort_by_decl_uid (const void *a
, const void *b
)
1293 const tree
*t1
= (const tree
*)a
;
1294 const tree
*t2
= (const tree
*)b
;
1296 int uid1
= DECL_UID (*t1
);
1297 int uid2
= DECL_UID (*t2
);
1301 else if (uid1
> uid2
)
1307 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1308 depending on POISON flag. Created statement is appended
1309 to SEQ_P gimple sequence. */
1312 asan_poison_variables (hash_set
<tree
> *variables
, bool poison
, gimple_seq
*seq_p
)
1314 unsigned c
= variables
->elements ();
1318 auto_vec
<tree
> sorted_variables (c
);
1320 for (hash_set
<tree
>::iterator it
= variables
->begin ();
1321 it
!= variables
->end (); ++it
)
1322 sorted_variables
.safe_push (*it
);
1324 sorted_variables
.qsort (sort_by_decl_uid
);
1328 FOR_EACH_VEC_ELT (sorted_variables
, i
, var
)
1330 asan_poison_variable (var
, poison
, seq_p
);
1332 /* Add use_after_scope_memory attribute for the variable in order
1333 to prevent re-written into SSA. */
1334 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
,
1335 DECL_ATTRIBUTES (var
)))
1336 DECL_ATTRIBUTES (var
)
1337 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
),
1339 DECL_ATTRIBUTES (var
));
1343 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1345 static enum gimplify_status
1346 gimplify_bind_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1348 tree bind_expr
= *expr_p
;
1349 bool old_keep_stack
= gimplify_ctxp
->keep_stack
;
1350 bool old_save_stack
= gimplify_ctxp
->save_stack
;
1353 gimple_seq body
, cleanup
;
1355 location_t start_locus
= 0, end_locus
= 0;
1356 tree ret_clauses
= NULL
;
1358 tree temp
= voidify_wrapper_expr (bind_expr
, NULL
);
1360 /* Mark variables seen in this bind expr. */
1361 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1365 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
1367 /* Mark variable as local. */
1368 if (ctx
&& ctx
->region_type
!= ORT_NONE
&& !DECL_EXTERNAL (t
))
1370 if (! DECL_SEEN_IN_BIND_EXPR_P (t
)
1371 || splay_tree_lookup (ctx
->variables
,
1372 (splay_tree_key
) t
) == NULL
)
1374 int flag
= GOVD_LOCAL
;
1375 if (ctx
->region_type
== ORT_SIMD
1376 && TREE_ADDRESSABLE (t
)
1377 && !TREE_STATIC (t
))
1379 if (TREE_CODE (DECL_SIZE_UNIT (t
)) != INTEGER_CST
)
1380 ctx
->add_safelen1
= true;
1382 flag
= GOVD_PRIVATE
;
1384 omp_add_variable (ctx
, t
, flag
| GOVD_SEEN
);
1386 /* Static locals inside of target construct or offloaded
1387 routines need to be "omp declare target". */
1388 if (TREE_STATIC (t
))
1389 for (; ctx
; ctx
= ctx
->outer_context
)
1390 if ((ctx
->region_type
& ORT_TARGET
) != 0)
1392 if (!lookup_attribute ("omp declare target",
1393 DECL_ATTRIBUTES (t
)))
1395 tree id
= get_identifier ("omp declare target");
1397 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (t
));
1398 varpool_node
*node
= varpool_node::get (t
);
1401 node
->offloadable
= 1;
1402 if (ENABLE_OFFLOADING
&& !DECL_EXTERNAL (t
))
1404 g
->have_offload
= true;
1406 vec_safe_push (offload_vars
, t
);
1414 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
1416 if (DECL_HARD_REGISTER (t
) && !is_global_var (t
) && cfun
)
1417 cfun
->has_local_explicit_reg_vars
= true;
1421 bind_stmt
= gimple_build_bind (BIND_EXPR_VARS (bind_expr
), NULL
,
1422 BIND_EXPR_BLOCK (bind_expr
));
1423 gimple_push_bind_expr (bind_stmt
);
1425 gimplify_ctxp
->keep_stack
= false;
1426 gimplify_ctxp
->save_stack
= false;
1428 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1430 gimplify_stmt (&BIND_EXPR_BODY (bind_expr
), &body
);
1431 gimple_bind_set_body (bind_stmt
, body
);
1433 /* Source location wise, the cleanup code (stack_restore and clobbers)
1434 belongs to the end of the block, so propagate what we have. The
1435 stack_save operation belongs to the beginning of block, which we can
1436 infer from the bind_expr directly if the block has no explicit
1438 if (BIND_EXPR_BLOCK (bind_expr
))
1440 end_locus
= BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1441 start_locus
= BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1443 if (start_locus
== 0)
1444 start_locus
= EXPR_LOCATION (bind_expr
);
1449 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1450 the stack space allocated to the VLAs. */
1451 if (gimplify_ctxp
->save_stack
&& !gimplify_ctxp
->keep_stack
)
1453 gcall
*stack_restore
;
1455 /* Save stack on entry and restore it on exit. Add a try_finally
1456 block to achieve this. */
1457 build_stack_save_restore (&stack_save
, &stack_restore
);
1459 gimple_set_location (stack_save
, start_locus
);
1460 gimple_set_location (stack_restore
, end_locus
);
1462 gimplify_seq_add_stmt (&cleanup
, stack_restore
);
1465 /* Add clobbers for all variables that go out of scope. */
1466 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1469 && !is_global_var (t
)
1470 && DECL_CONTEXT (t
) == current_function_decl
)
1472 if (!DECL_HARD_REGISTER (t
)
1473 && !TREE_THIS_VOLATILE (t
)
1474 && !DECL_HAS_VALUE_EXPR_P (t
)
1475 /* Only care for variables that have to be in memory. Others
1476 will be rewritten into SSA names, hence moved to the
1478 && !is_gimple_reg (t
)
1479 && flag_stack_reuse
!= SR_NONE
)
1481 tree clobber
= build_clobber (TREE_TYPE (t
), CLOBBER_EOL
);
1482 gimple
*clobber_stmt
;
1483 clobber_stmt
= gimple_build_assign (t
, clobber
);
1484 gimple_set_location (clobber_stmt
, end_locus
);
1485 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1488 if (flag_openacc
&& oacc_declare_returns
!= NULL
)
1491 if (DECL_HAS_VALUE_EXPR_P (key
))
1493 key
= DECL_VALUE_EXPR (key
);
1494 if (TREE_CODE (key
) == INDIRECT_REF
)
1495 key
= TREE_OPERAND (key
, 0);
1497 tree
*c
= oacc_declare_returns
->get (key
);
1501 OMP_CLAUSE_CHAIN (*c
) = ret_clauses
;
1503 ret_clauses
= unshare_expr (*c
);
1505 oacc_declare_returns
->remove (key
);
1507 if (oacc_declare_returns
->is_empty ())
1509 delete oacc_declare_returns
;
1510 oacc_declare_returns
= NULL
;
1516 if (asan_poisoned_variables
!= NULL
1517 && asan_poisoned_variables
->contains (t
))
1519 asan_poisoned_variables
->remove (t
);
1520 asan_poison_variable (t
, true, &cleanup
);
1523 if (gimplify_ctxp
->live_switch_vars
!= NULL
1524 && gimplify_ctxp
->live_switch_vars
->contains (t
))
1525 gimplify_ctxp
->live_switch_vars
->remove (t
);
1531 gimple_stmt_iterator si
= gsi_start (cleanup
);
1533 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
1535 gsi_insert_seq_before_without_update (&si
, stmt
, GSI_NEW_STMT
);
1541 gimple_seq new_body
;
1544 gs
= gimple_build_try (gimple_bind_body (bind_stmt
), cleanup
,
1545 GIMPLE_TRY_FINALLY
);
1548 gimplify_seq_add_stmt (&new_body
, stack_save
);
1549 gimplify_seq_add_stmt (&new_body
, gs
);
1550 gimple_bind_set_body (bind_stmt
, new_body
);
1553 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1554 if (!gimplify_ctxp
->keep_stack
)
1555 gimplify_ctxp
->keep_stack
= old_keep_stack
;
1556 gimplify_ctxp
->save_stack
= old_save_stack
;
1558 gimple_pop_bind_expr ();
1560 gimplify_seq_add_stmt (pre_p
, bind_stmt
);
1568 *expr_p
= NULL_TREE
;
1572 /* Maybe add early return predict statement to PRE_P sequence. */
1575 maybe_add_early_return_predict_stmt (gimple_seq
*pre_p
)
1577 /* If we are not in a conditional context, add PREDICT statement. */
1578 if (gimple_conditional_context ())
1580 gimple
*predict
= gimple_build_predict (PRED_TREE_EARLY_RETURN
,
1582 gimplify_seq_add_stmt (pre_p
, predict
);
1586 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1587 GIMPLE value, it is assigned to a new temporary and the statement is
1588 re-written to return the temporary.
1590 PRE_P points to the sequence where side effects that must happen before
1591 STMT should be stored. */
1593 static enum gimplify_status
1594 gimplify_return_expr (tree stmt
, gimple_seq
*pre_p
)
1597 tree ret_expr
= TREE_OPERAND (stmt
, 0);
1598 tree result_decl
, result
;
1600 if (ret_expr
== error_mark_node
)
1604 || TREE_CODE (ret_expr
) == RESULT_DECL
)
1606 maybe_add_early_return_predict_stmt (pre_p
);
1607 greturn
*ret
= gimple_build_return (ret_expr
);
1608 copy_warning (ret
, stmt
);
1609 gimplify_seq_add_stmt (pre_p
, ret
);
1613 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1614 result_decl
= NULL_TREE
;
1615 else if (TREE_CODE (ret_expr
) == COMPOUND_EXPR
)
1617 /* Used in C++ for handling EH cleanup of the return value if a local
1618 cleanup throws. Assume the front-end knows what it's doing. */
1619 result_decl
= DECL_RESULT (current_function_decl
);
1620 /* But crash if we end up trying to modify ret_expr below. */
1621 ret_expr
= NULL_TREE
;
1625 result_decl
= TREE_OPERAND (ret_expr
, 0);
1627 /* See through a return by reference. */
1628 if (TREE_CODE (result_decl
) == INDIRECT_REF
)
1629 result_decl
= TREE_OPERAND (result_decl
, 0);
1631 gcc_assert ((TREE_CODE (ret_expr
) == MODIFY_EXPR
1632 || TREE_CODE (ret_expr
) == INIT_EXPR
)
1633 && TREE_CODE (result_decl
) == RESULT_DECL
);
1636 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1637 Recall that aggregate_value_p is FALSE for any aggregate type that is
1638 returned in registers. If we're returning values in registers, then
1639 we don't want to extend the lifetime of the RESULT_DECL, particularly
1640 across another call. In addition, for those aggregates for which
1641 hard_function_value generates a PARALLEL, we'll die during normal
1642 expansion of structure assignments; there's special code in expand_return
1643 to handle this case that does not exist in expand_expr. */
1646 else if (aggregate_value_p (result_decl
, TREE_TYPE (current_function_decl
)))
1648 if (!poly_int_tree_p (DECL_SIZE (result_decl
)))
1650 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl
)))
1651 gimplify_type_sizes (TREE_TYPE (result_decl
), pre_p
);
1652 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1653 should be effectively allocated by the caller, i.e. all calls to
1654 this function must be subject to the Return Slot Optimization. */
1655 gimplify_one_sizepos (&DECL_SIZE (result_decl
), pre_p
);
1656 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl
), pre_p
);
1658 result
= result_decl
;
1660 else if (gimplify_ctxp
->return_temp
)
1661 result
= gimplify_ctxp
->return_temp
;
1664 result
= create_tmp_reg (TREE_TYPE (result_decl
));
1666 /* ??? With complex control flow (usually involving abnormal edges),
1667 we can wind up warning about an uninitialized value for this. Due
1668 to how this variable is constructed and initialized, this is never
1669 true. Give up and never warn. */
1670 suppress_warning (result
, OPT_Wuninitialized
);
1672 gimplify_ctxp
->return_temp
= result
;
1675 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1676 Then gimplify the whole thing. */
1677 if (result
!= result_decl
)
1678 TREE_OPERAND (ret_expr
, 0) = result
;
1680 gimplify_and_add (TREE_OPERAND (stmt
, 0), pre_p
);
1682 maybe_add_early_return_predict_stmt (pre_p
);
1683 ret
= gimple_build_return (result
);
1684 copy_warning (ret
, stmt
);
1685 gimplify_seq_add_stmt (pre_p
, ret
);
1690 /* Gimplify a variable-length array DECL. */
1693 gimplify_vla_decl (tree decl
, gimple_seq
*seq_p
)
1695 /* This is a variable-sized decl. Simplify its size and mark it
1696 for deferred expansion. */
1697 tree t
, addr
, ptr_type
;
1699 gimplify_one_sizepos (&DECL_SIZE (decl
), seq_p
);
1700 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl
), seq_p
);
1702 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1703 if (DECL_HAS_VALUE_EXPR_P (decl
))
1706 /* All occurrences of this decl in final gimplified code will be
1707 replaced by indirection. Setting DECL_VALUE_EXPR does two
1708 things: First, it lets the rest of the gimplifier know what
1709 replacement to use. Second, it lets the debug info know
1710 where to find the value. */
1711 ptr_type
= build_pointer_type (TREE_TYPE (decl
));
1712 addr
= create_tmp_var (ptr_type
, get_name (decl
));
1713 DECL_IGNORED_P (addr
) = 0;
1714 t
= build_fold_indirect_ref (addr
);
1715 TREE_THIS_NOTRAP (t
) = 1;
1716 SET_DECL_VALUE_EXPR (decl
, t
);
1717 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
1719 t
= build_alloca_call_expr (DECL_SIZE_UNIT (decl
), DECL_ALIGN (decl
),
1720 max_int_size_in_bytes (TREE_TYPE (decl
)));
1721 /* The call has been built for a variable-sized object. */
1722 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
1723 t
= fold_convert (ptr_type
, t
);
1724 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
1726 gimplify_and_add (t
, seq_p
);
1728 /* Record the dynamic allocation associated with DECL if requested. */
1729 if (flag_callgraph_info
& CALLGRAPH_INFO_DYNAMIC_ALLOC
)
1730 record_dynamic_alloc (decl
);
1733 /* A helper function to be called via walk_tree. Mark all labels under *TP
1734 as being forced. To be called for DECL_INITIAL of static variables. */
1737 force_labels_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1741 if (TREE_CODE (*tp
) == LABEL_DECL
)
1743 FORCED_LABEL (*tp
) = 1;
1744 cfun
->has_forced_label_in_static
= 1;
1750 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1751 Build a call to internal const function DEFERRED_INIT:
1752 1st argument: SIZE of the DECL;
1753 2nd argument: INIT_TYPE;
1754 3rd argument: NAME of the DECL;
1756 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1759 gimple_add_init_for_auto_var (tree decl
,
1760 enum auto_init_type init_type
,
1763 gcc_assert (auto_var_p (decl
));
1764 gcc_assert (init_type
> AUTO_INIT_UNINITIALIZED
);
1765 location_t loc
= EXPR_LOCATION (decl
);
1766 tree decl_size
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1769 = build_int_cst (integer_type_node
, (int) init_type
);
1771 tree decl_name
= NULL_TREE
;
1772 if (DECL_NAME (decl
))
1774 decl_name
= build_string_literal (DECL_NAME (decl
));
1778 char *decl_name_anonymous
= xasprintf ("D.%u", DECL_UID (decl
));
1779 decl_name
= build_string_literal (decl_name_anonymous
);
1780 free (decl_name_anonymous
);
1783 tree call
= build_call_expr_internal_loc (loc
, IFN_DEFERRED_INIT
,
1784 TREE_TYPE (decl
), 3,
1785 decl_size
, init_type_node
,
1788 gimplify_assign (decl
, call
, seq_p
);
1791 /* Generate padding initialization for automatic vairable DECL.
1792 C guarantees that brace-init with fewer initializers than members
1793 aggregate will initialize the rest of the aggregate as-if it were
1794 static initialization. In turn static initialization guarantees
1795 that padding is initialized to zero. So, we always initialize paddings
1796 to zeroes regardless INIT_TYPE.
1797 To do the padding initialization, we insert a call to
1798 __builtin_clear_padding (&decl, 0, for_auto_init = true).
1799 Note, we add an additional dummy argument for __builtin_clear_padding,
1800 'for_auto_init' to distinguish whether this call is for automatic
1801 variable initialization or not.
1804 gimple_add_padding_init_for_auto_var (tree decl
, bool is_vla
,
1807 tree addr_of_decl
= NULL_TREE
;
1808 tree fn
= builtin_decl_explicit (BUILT_IN_CLEAR_PADDING
);
1812 /* The temporary address variable for this vla should be
1813 created in gimplify_vla_decl. */
1814 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
1815 gcc_assert (TREE_CODE (DECL_VALUE_EXPR (decl
)) == INDIRECT_REF
);
1816 addr_of_decl
= TREE_OPERAND (DECL_VALUE_EXPR (decl
), 0);
1820 mark_addressable (decl
);
1821 addr_of_decl
= build_fold_addr_expr (decl
);
1824 gimple
*call
= gimple_build_call (fn
, 2, addr_of_decl
,
1825 build_one_cst (TREE_TYPE (addr_of_decl
)));
1826 gimplify_seq_add_stmt (seq_p
, call
);
1829 /* Return true if the DECL need to be automaticly initialized by the
1832 is_var_need_auto_init (tree decl
)
1834 if (auto_var_p (decl
)
1835 && (TREE_CODE (decl
) != VAR_DECL
1836 || !DECL_HARD_REGISTER (decl
))
1837 && (flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
)
1838 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl
)))
1839 && !OPAQUE_TYPE_P (TREE_TYPE (decl
))
1840 && !is_empty_type (TREE_TYPE (decl
)))
1845 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1846 and initialization explicit. */
1848 static enum gimplify_status
1849 gimplify_decl_expr (tree
*stmt_p
, gimple_seq
*seq_p
)
1851 tree stmt
= *stmt_p
;
1852 tree decl
= DECL_EXPR_DECL (stmt
);
1854 *stmt_p
= NULL_TREE
;
1856 if (TREE_TYPE (decl
) == error_mark_node
)
1859 if ((TREE_CODE (decl
) == TYPE_DECL
1861 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl
)))
1863 gimplify_type_sizes (TREE_TYPE (decl
), seq_p
);
1864 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1865 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl
)), seq_p
);
1868 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1869 in case its size expressions contain problematic nodes like CALL_EXPR. */
1870 if (TREE_CODE (decl
) == TYPE_DECL
1871 && DECL_ORIGINAL_TYPE (decl
)
1872 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl
)))
1874 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl
), seq_p
);
1875 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl
)) == REFERENCE_TYPE
)
1876 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl
)), seq_p
);
1879 if (VAR_P (decl
) && !DECL_EXTERNAL (decl
))
1881 tree init
= DECL_INITIAL (decl
);
1882 bool is_vla
= false;
1883 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
1884 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
1885 If the decl has VALUE_EXPR that was created by FE (usually
1886 C++FE), it's a proxy varaible, and FE already initialized
1887 the VALUE_EXPR of it, we should not initialize it anymore. */
1888 bool decl_had_value_expr_p
= DECL_HAS_VALUE_EXPR_P (decl
);
1891 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl
), &size
)
1892 || (!TREE_STATIC (decl
)
1893 && flag_stack_check
== GENERIC_STACK_CHECK
1895 (unsigned HOST_WIDE_INT
) STACK_CHECK_MAX_VAR_SIZE
)))
1897 gimplify_vla_decl (decl
, seq_p
);
1901 if (asan_poisoned_variables
1903 && TREE_ADDRESSABLE (decl
)
1904 && !TREE_STATIC (decl
)
1905 && !DECL_HAS_VALUE_EXPR_P (decl
)
1906 && DECL_ALIGN (decl
) <= MAX_SUPPORTED_STACK_ALIGNMENT
1907 && dbg_cnt (asan_use_after_scope
)
1908 && !gimplify_omp_ctxp
1909 /* GNAT introduces temporaries to hold return values of calls in
1910 initializers of variables defined in other units, so the
1911 declaration of the variable is discarded completely. We do not
1912 want to issue poison calls for such dropped variables. */
1913 && (DECL_SEEN_IN_BIND_EXPR_P (decl
)
1914 || (DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)))
1916 asan_poisoned_variables
->add (decl
);
1917 asan_poison_variable (decl
, false, seq_p
);
1918 if (!DECL_ARTIFICIAL (decl
) && gimplify_ctxp
->live_switch_vars
)
1919 gimplify_ctxp
->live_switch_vars
->add (decl
);
1922 /* Some front ends do not explicitly declare all anonymous
1923 artificial variables. We compensate here by declaring the
1924 variables, though it would be better if the front ends would
1925 explicitly declare them. */
1926 if (!DECL_SEEN_IN_BIND_EXPR_P (decl
)
1927 && DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)
1928 gimple_add_tmp_var (decl
);
1930 if (init
&& init
!= error_mark_node
)
1932 if (!TREE_STATIC (decl
))
1934 DECL_INITIAL (decl
) = NULL_TREE
;
1935 init
= build2 (INIT_EXPR
, void_type_node
, decl
, init
);
1936 gimplify_and_add (init
, seq_p
);
1938 /* Clear TREE_READONLY if we really have an initialization. */
1939 if (!DECL_INITIAL (decl
)
1940 && !omp_privatize_by_reference (decl
))
1941 TREE_READONLY (decl
) = 0;
1944 /* We must still examine initializers for static variables
1945 as they may contain a label address. */
1946 walk_tree (&init
, force_labels_r
, NULL
, NULL
);
1948 /* When there is no explicit initializer, if the user requested,
1949 We should insert an artifical initializer for this automatic
1951 else if (is_var_need_auto_init (decl
)
1952 && !decl_had_value_expr_p
)
1954 gimple_add_init_for_auto_var (decl
,
1957 /* The expanding of a call to the above .DEFERRED_INIT will apply
1958 block initialization to the whole space covered by this variable.
1959 As a result, all the paddings will be initialized to zeroes
1960 for zero initialization and 0xFE byte-repeatable patterns for
1961 pattern initialization.
1962 In order to make the paddings as zeroes for pattern init, We
1963 should add a call to __builtin_clear_padding to clear the
1964 paddings to zero in compatiple with CLANG.
1965 We cannot insert this call if the variable is a gimple register
1966 since __builtin_clear_padding will take the address of the
1967 variable. As a result, if a long double/_Complex long double
1968 variable will spilled into stack later, its padding is 0XFE. */
1969 if (flag_auto_var_init
== AUTO_INIT_PATTERN
1970 && !is_gimple_reg (decl
)
1971 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl
)))
1972 gimple_add_padding_init_for_auto_var (decl
, is_vla
, seq_p
);
1979 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1980 and replacing the LOOP_EXPR with goto, but if the loop contains an
1981 EXIT_EXPR, we need to append a label for it to jump to. */
1983 static enum gimplify_status
1984 gimplify_loop_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1986 tree saved_label
= gimplify_ctxp
->exit_label
;
1987 tree start_label
= create_artificial_label (UNKNOWN_LOCATION
);
1989 gimplify_seq_add_stmt (pre_p
, gimple_build_label (start_label
));
1991 gimplify_ctxp
->exit_label
= NULL_TREE
;
1993 gimplify_and_add (LOOP_EXPR_BODY (*expr_p
), pre_p
);
1995 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (start_label
));
1997 if (gimplify_ctxp
->exit_label
)
1998 gimplify_seq_add_stmt (pre_p
,
1999 gimple_build_label (gimplify_ctxp
->exit_label
));
2001 gimplify_ctxp
->exit_label
= saved_label
;
2007 /* Gimplify a statement list onto a sequence. These may be created either
2008 by an enlightened front-end, or by shortcut_cond_expr. */
2010 static enum gimplify_status
2011 gimplify_statement_list (tree
*expr_p
, gimple_seq
*pre_p
)
2013 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
2015 tree_stmt_iterator i
= tsi_start (*expr_p
);
2017 while (!tsi_end_p (i
))
2019 gimplify_stmt (tsi_stmt_ptr (i
), pre_p
);
2033 /* Emit warning for the unreachable statment STMT if needed.
2034 Return the gimple itself when the warning is emitted, otherwise
2037 emit_warn_switch_unreachable (gimple
*stmt
)
2039 if (gimple_code (stmt
) == GIMPLE_GOTO
2040 && TREE_CODE (gimple_goto_dest (stmt
)) == LABEL_DECL
2041 && DECL_ARTIFICIAL (gimple_goto_dest (stmt
)))
2042 /* Don't warn for compiler-generated gotos. These occur
2043 in Duff's devices, for example. */
2045 else if ((flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
)
2046 && ((gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
2047 || (gimple_call_builtin_p (stmt
, BUILT_IN_CLEAR_PADDING
)
2048 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt
, 1)))
2049 || (is_gimple_assign (stmt
)
2050 && gimple_assign_single_p (stmt
)
2051 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
2052 && gimple_call_internal_p (
2053 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
)),
2054 IFN_DEFERRED_INIT
))))
2055 /* Don't warn for compiler-generated initializations for
2056 -ftrivial-auto-var-init.
2058 case 1: a call to .DEFERRED_INIT;
2059 case 2: a call to __builtin_clear_padding with the 2nd argument is
2060 present and non-zero;
2061 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2062 that has the LHS of .DEFERRED_INIT as the RHS as following:
2063 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2067 warning_at (gimple_location (stmt
), OPT_Wswitch_unreachable
,
2068 "statement will never be executed");
2072 /* Callback for walk_gimple_seq. */
2075 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator
*gsi_p
,
2076 bool *handled_ops_p
,
2077 struct walk_stmt_info
*wi
)
2079 gimple
*stmt
= gsi_stmt (*gsi_p
);
2080 bool unreachable_issued
= wi
->info
!= NULL
;
2082 *handled_ops_p
= true;
2083 switch (gimple_code (stmt
))
2086 /* A compiler-generated cleanup or a user-written try block.
2087 If it's empty, don't dive into it--that would result in
2088 worse location info. */
2089 if (gimple_try_eval (stmt
) == NULL
)
2091 if (warn_switch_unreachable
&& !unreachable_issued
)
2092 wi
->info
= emit_warn_switch_unreachable (stmt
);
2094 /* Stop when auto var init warning is not on. */
2095 if (!warn_trivial_auto_var_init
)
2096 return integer_zero_node
;
2101 case GIMPLE_EH_FILTER
:
2102 case GIMPLE_TRANSACTION
:
2103 /* Walk the sub-statements. */
2104 *handled_ops_p
= false;
2108 /* Ignore these. We may generate them before declarations that
2109 are never executed. If there's something to warn about,
2110 there will be non-debug stmts too, and we'll catch those. */
2114 /* Stop till the first Label. */
2115 return integer_zero_node
;
2117 if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2119 *handled_ops_p
= false;
2122 if (warn_trivial_auto_var_init
2123 && flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
2124 && gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
2126 /* Get the variable name from the 3rd argument of call. */
2127 tree var_name
= gimple_call_arg (stmt
, 2);
2128 var_name
= TREE_OPERAND (TREE_OPERAND (var_name
, 0), 0);
2129 const char *var_name_str
= TREE_STRING_POINTER (var_name
);
2131 warning_at (gimple_location (stmt
), OPT_Wtrivial_auto_var_init
,
2132 "%qs cannot be initialized with"
2133 "%<-ftrivial-auto-var_init%>",
2140 /* check the first "real" statement (not a decl/lexical scope/...), issue
2141 warning if needed. */
2142 if (warn_switch_unreachable
&& !unreachable_issued
)
2143 wi
->info
= emit_warn_switch_unreachable (stmt
);
2144 /* Stop when auto var init warning is not on. */
2145 if (!warn_trivial_auto_var_init
)
2146 return integer_zero_node
;
2153 /* Possibly warn about unreachable statements between switch's controlling
2154 expression and the first case. Also warn about -ftrivial-auto-var-init
2155 cannot initialize the auto variable under such situation.
2156 SEQ is the body of a switch expression. */
2159 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq
)
2161 if ((!warn_switch_unreachable
&& !warn_trivial_auto_var_init
)
2162 /* This warning doesn't play well with Fortran when optimizations
2164 || lang_GNU_Fortran ()
2168 struct walk_stmt_info wi
;
2170 memset (&wi
, 0, sizeof (wi
));
2171 walk_gimple_seq (seq
, warn_switch_unreachable_and_auto_init_r
, NULL
, &wi
);
2175 /* A label entry that pairs label and a location. */
2182 /* Find LABEL in vector of label entries VEC. */
2184 static struct label_entry
*
2185 find_label_entry (const auto_vec
<struct label_entry
> *vec
, tree label
)
2188 struct label_entry
*l
;
2190 FOR_EACH_VEC_ELT (*vec
, i
, l
)
2191 if (l
->label
== label
)
2196 /* Return true if LABEL, a LABEL_DECL, represents a case label
2197 in a vector of labels CASES. */
2200 case_label_p (const vec
<tree
> *cases
, tree label
)
2205 FOR_EACH_VEC_ELT (*cases
, i
, l
)
2206 if (CASE_LABEL (l
) == label
)
2211 /* Find the last nondebug statement in a scope STMT. */
2214 last_stmt_in_scope (gimple
*stmt
)
2219 switch (gimple_code (stmt
))
2223 gbind
*bind
= as_a
<gbind
*> (stmt
);
2224 stmt
= gimple_seq_last_nondebug_stmt (gimple_bind_body (bind
));
2225 return last_stmt_in_scope (stmt
);
2230 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
2231 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt
));
2232 gimple
*last_eval
= last_stmt_in_scope (stmt
);
2233 if (gimple_stmt_may_fallthru (last_eval
)
2234 && (last_eval
== NULL
2235 || !gimple_call_internal_p (last_eval
, IFN_FALLTHROUGH
))
2236 && gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
2238 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt
));
2239 return last_stmt_in_scope (stmt
);
2253 /* Collect labels that may fall through into LABELS and return the statement
2254 preceding another case label, or a user-defined label. Store a location
2255 useful to give warnings at *PREVLOC (usually the location of the returned
2256 statement or of its surrounding scope). */
2259 collect_fallthrough_labels (gimple_stmt_iterator
*gsi_p
,
2260 auto_vec
<struct label_entry
> *labels
,
2261 location_t
*prevloc
)
2263 gimple
*prev
= NULL
;
2265 *prevloc
= UNKNOWN_LOCATION
;
2268 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
)
2270 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2271 which starts on a GIMPLE_SWITCH and ends with a break label.
2272 Handle that as a single statement that can fall through. */
2273 gbind
*bind
= as_a
<gbind
*> (gsi_stmt (*gsi_p
));
2274 gimple
*first
= gimple_seq_first_stmt (gimple_bind_body (bind
));
2275 gimple
*last
= gimple_seq_last_stmt (gimple_bind_body (bind
));
2277 && gimple_code (first
) == GIMPLE_SWITCH
2278 && gimple_code (last
) == GIMPLE_LABEL
)
2280 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2281 if (SWITCH_BREAK_LABEL_P (label
))
2289 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
2290 || gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_TRY
)
2292 /* Nested scope. Only look at the last statement of
2293 the innermost scope. */
2294 location_t bind_loc
= gimple_location (gsi_stmt (*gsi_p
));
2295 gimple
*last
= last_stmt_in_scope (gsi_stmt (*gsi_p
));
2299 /* It might be a label without a location. Use the
2300 location of the scope then. */
2301 if (!gimple_has_location (prev
))
2302 *prevloc
= bind_loc
;
2308 /* Ifs are tricky. */
2309 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_COND
)
2311 gcond
*cond_stmt
= as_a
<gcond
*> (gsi_stmt (*gsi_p
));
2312 tree false_lab
= gimple_cond_false_label (cond_stmt
);
2313 location_t if_loc
= gimple_location (cond_stmt
);
2316 if (i > 1) goto <D.2259>; else goto D;
2317 we can't do much with the else-branch. */
2318 if (!DECL_ARTIFICIAL (false_lab
))
2321 /* Go on until the false label, then one step back. */
2322 for (; !gsi_end_p (*gsi_p
); gsi_next (gsi_p
))
2324 gimple
*stmt
= gsi_stmt (*gsi_p
);
2325 if (gimple_code (stmt
) == GIMPLE_LABEL
2326 && gimple_label_label (as_a
<glabel
*> (stmt
)) == false_lab
)
2330 /* Not found? Oops. */
2331 if (gsi_end_p (*gsi_p
))
2334 /* A dead label can't fall through. */
2335 if (!UNUSED_LABEL_P (false_lab
))
2337 struct label_entry l
= { false_lab
, if_loc
};
2338 labels
->safe_push (l
);
2341 /* Go to the last statement of the then branch. */
2344 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2350 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_GOTO
2351 && !gimple_has_location (gsi_stmt (*gsi_p
)))
2353 /* Look at the statement before, it might be
2354 attribute fallthrough, in which case don't warn. */
2356 bool fallthru_before_dest
2357 = gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_FALLTHROUGH
);
2359 tree goto_dest
= gimple_goto_dest (gsi_stmt (*gsi_p
));
2360 if (!fallthru_before_dest
)
2362 struct label_entry l
= { goto_dest
, if_loc
};
2363 labels
->safe_push (l
);
2366 /* This case is about
2367 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2372 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2373 through to #3. So set PREV to #1. */
2374 else if (UNUSED_LABEL_P (false_lab
))
2375 prev
= gsi_stmt (*gsi_p
);
2377 /* And move back. */
2381 /* Remember the last statement. Skip labels that are of no interest
2383 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2385 tree label
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (*gsi_p
)));
2386 if (find_label_entry (labels
, label
))
2387 prev
= gsi_stmt (*gsi_p
);
2389 else if (gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_ASAN_MARK
))
2391 else if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_PREDICT
)
2393 else if (!is_gimple_debug (gsi_stmt (*gsi_p
)))
2394 prev
= gsi_stmt (*gsi_p
);
2397 while (!gsi_end_p (*gsi_p
)
2398 /* Stop if we find a case or a user-defined label. */
2399 && (gimple_code (gsi_stmt (*gsi_p
)) != GIMPLE_LABEL
2400 || !gimple_has_location (gsi_stmt (*gsi_p
))));
2402 if (prev
&& gimple_has_location (prev
))
2403 *prevloc
= gimple_location (prev
);
2407 /* Return true if the switch fallthough warning should occur. LABEL is
2408 the label statement that we're falling through to. */
2411 should_warn_for_implicit_fallthrough (gimple_stmt_iterator
*gsi_p
, tree label
)
2413 gimple_stmt_iterator gsi
= *gsi_p
;
2415 /* Don't warn if the label is marked with a "falls through" comment. */
2416 if (FALLTHROUGH_LABEL_P (label
))
2419 /* Don't warn for non-case labels followed by a statement:
2424 as these are likely intentional. */
2425 if (!case_label_p (&gimplify_ctxp
->case_labels
, label
))
2428 while (!gsi_end_p (gsi
)
2429 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2430 && (l
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi
))))
2431 && !case_label_p (&gimplify_ctxp
->case_labels
, l
))
2432 gsi_next_nondebug (&gsi
);
2433 if (gsi_end_p (gsi
) || gimple_code (gsi_stmt (gsi
)) != GIMPLE_LABEL
)
2437 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2438 immediately breaks. */
2441 /* Skip all immediately following labels. */
2442 while (!gsi_end_p (gsi
)
2443 && (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2444 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_PREDICT
))
2445 gsi_next_nondebug (&gsi
);
2447 /* { ... something; default:; } */
2449 /* { ... something; default: break; } or
2450 { ... something; default: goto L; } */
2451 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_GOTO
2452 /* { ... something; default: return; } */
2453 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
2459 /* Callback for walk_gimple_seq. */
2462 warn_implicit_fallthrough_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2463 struct walk_stmt_info
*)
2465 gimple
*stmt
= gsi_stmt (*gsi_p
);
2467 *handled_ops_p
= true;
2468 switch (gimple_code (stmt
))
2473 case GIMPLE_EH_FILTER
:
2474 case GIMPLE_TRANSACTION
:
2475 /* Walk the sub-statements. */
2476 *handled_ops_p
= false;
2479 /* Find a sequence of form:
2486 and possibly warn. */
2489 /* Found a label. Skip all immediately following labels. */
2490 while (!gsi_end_p (*gsi_p
)
2491 && gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2492 gsi_next_nondebug (gsi_p
);
2494 /* There might be no more statements. */
2495 if (gsi_end_p (*gsi_p
))
2496 return integer_zero_node
;
2498 /* Vector of labels that fall through. */
2499 auto_vec
<struct label_entry
> labels
;
2501 gimple
*prev
= collect_fallthrough_labels (gsi_p
, &labels
, &prevloc
);
2503 /* There might be no more statements. */
2504 if (gsi_end_p (*gsi_p
))
2505 return integer_zero_node
;
2507 gimple
*next
= gsi_stmt (*gsi_p
);
2509 /* If what follows is a label, then we may have a fallthrough. */
2510 if (gimple_code (next
) == GIMPLE_LABEL
2511 && gimple_has_location (next
)
2512 && (label
= gimple_label_label (as_a
<glabel
*> (next
)))
2515 struct label_entry
*l
;
2516 bool warned_p
= false;
2517 auto_diagnostic_group d
;
2518 if (!should_warn_for_implicit_fallthrough (gsi_p
, label
))
2520 else if (gimple_code (prev
) == GIMPLE_LABEL
2521 && (label
= gimple_label_label (as_a
<glabel
*> (prev
)))
2522 && (l
= find_label_entry (&labels
, label
)))
2523 warned_p
= warning_at (l
->loc
, OPT_Wimplicit_fallthrough_
,
2524 "this statement may fall through");
2525 else if (!gimple_call_internal_p (prev
, IFN_FALLTHROUGH
)
2526 /* Try to be clever and don't warn when the statement
2527 can't actually fall through. */
2528 && gimple_stmt_may_fallthru (prev
)
2529 && prevloc
!= UNKNOWN_LOCATION
)
2530 warned_p
= warning_at (prevloc
,
2531 OPT_Wimplicit_fallthrough_
,
2532 "this statement may fall through");
2534 inform (gimple_location (next
), "here");
2536 /* Mark this label as processed so as to prevent multiple
2537 warnings in nested switches. */
2538 FALLTHROUGH_LABEL_P (label
) = true;
2540 /* So that next warn_implicit_fallthrough_r will start looking for
2541 a new sequence starting with this label. */
2552 /* Warn when a switch case falls through. */
2555 maybe_warn_implicit_fallthrough (gimple_seq seq
)
2557 if (!warn_implicit_fallthrough
)
2560 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2563 || lang_GNU_OBJC ()))
2566 struct walk_stmt_info wi
;
2567 memset (&wi
, 0, sizeof (wi
));
2568 walk_gimple_seq (seq
, warn_implicit_fallthrough_r
, NULL
, &wi
);
2571 /* Callback for walk_gimple_seq. */
2574 expand_FALLTHROUGH_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2575 struct walk_stmt_info
*wi
)
2577 gimple
*stmt
= gsi_stmt (*gsi_p
);
2579 *handled_ops_p
= true;
2580 switch (gimple_code (stmt
))
2585 case GIMPLE_EH_FILTER
:
2586 case GIMPLE_TRANSACTION
:
2587 /* Walk the sub-statements. */
2588 *handled_ops_p
= false;
2591 if (gimple_call_internal_p (stmt
, IFN_FALLTHROUGH
))
2593 gsi_remove (gsi_p
, true);
2594 if (gsi_end_p (*gsi_p
))
2596 *static_cast<location_t
*>(wi
->info
) = gimple_location (stmt
);
2597 return integer_zero_node
;
2601 location_t loc
= gimple_location (stmt
);
2603 gimple_stmt_iterator gsi2
= *gsi_p
;
2604 stmt
= gsi_stmt (gsi2
);
2605 if (gimple_code (stmt
) == GIMPLE_GOTO
&& !gimple_has_location (stmt
))
2607 /* Go on until the artificial label. */
2608 tree goto_dest
= gimple_goto_dest (stmt
);
2609 for (; !gsi_end_p (gsi2
); gsi_next (&gsi2
))
2611 if (gimple_code (gsi_stmt (gsi2
)) == GIMPLE_LABEL
2612 && gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi2
)))
2617 /* Not found? Stop. */
2618 if (gsi_end_p (gsi2
))
2621 /* Look one past it. */
2625 /* We're looking for a case label or default label here. */
2626 while (!gsi_end_p (gsi2
))
2628 stmt
= gsi_stmt (gsi2
);
2629 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2631 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
2632 if (gimple_has_location (stmt
) && DECL_ARTIFICIAL (label
))
2638 else if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2640 else if (!is_gimple_debug (stmt
))
2641 /* Anything else is not expected. */
2646 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2647 "a case label or default label");
2656 /* Expand all FALLTHROUGH () calls in SEQ. */
2659 expand_FALLTHROUGH (gimple_seq
*seq_p
)
2661 struct walk_stmt_info wi
;
2663 memset (&wi
, 0, sizeof (wi
));
2664 wi
.info
= (void *) &loc
;
2665 walk_gimple_seq_mod (seq_p
, expand_FALLTHROUGH_r
, NULL
, &wi
);
2666 if (wi
.callback_result
== integer_zero_node
)
2667 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2668 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2669 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2670 "a case label or default label");
2674 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2677 static enum gimplify_status
2678 gimplify_switch_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2680 tree switch_expr
= *expr_p
;
2681 gimple_seq switch_body_seq
= NULL
;
2682 enum gimplify_status ret
;
2683 tree index_type
= TREE_TYPE (switch_expr
);
2684 if (index_type
== NULL_TREE
)
2685 index_type
= TREE_TYPE (SWITCH_COND (switch_expr
));
2687 ret
= gimplify_expr (&SWITCH_COND (switch_expr
), pre_p
, NULL
, is_gimple_val
,
2689 if (ret
== GS_ERROR
|| ret
== GS_UNHANDLED
)
2692 if (SWITCH_BODY (switch_expr
))
2695 vec
<tree
> saved_labels
;
2696 hash_set
<tree
> *saved_live_switch_vars
= NULL
;
2697 tree default_case
= NULL_TREE
;
2698 gswitch
*switch_stmt
;
2700 /* Save old labels, get new ones from body, then restore the old
2701 labels. Save all the things from the switch body to append after. */
2702 saved_labels
= gimplify_ctxp
->case_labels
;
2703 gimplify_ctxp
->case_labels
.create (8);
2705 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2706 saved_live_switch_vars
= gimplify_ctxp
->live_switch_vars
;
2707 tree_code body_type
= TREE_CODE (SWITCH_BODY (switch_expr
));
2708 if (body_type
== BIND_EXPR
|| body_type
== STATEMENT_LIST
)
2709 gimplify_ctxp
->live_switch_vars
= new hash_set
<tree
> (4);
2711 gimplify_ctxp
->live_switch_vars
= NULL
;
2713 bool old_in_switch_expr
= gimplify_ctxp
->in_switch_expr
;
2714 gimplify_ctxp
->in_switch_expr
= true;
2716 gimplify_stmt (&SWITCH_BODY (switch_expr
), &switch_body_seq
);
2718 gimplify_ctxp
->in_switch_expr
= old_in_switch_expr
;
2719 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq
);
2720 maybe_warn_implicit_fallthrough (switch_body_seq
);
2721 /* Only do this for the outermost GIMPLE_SWITCH. */
2722 if (!gimplify_ctxp
->in_switch_expr
)
2723 expand_FALLTHROUGH (&switch_body_seq
);
2725 labels
= gimplify_ctxp
->case_labels
;
2726 gimplify_ctxp
->case_labels
= saved_labels
;
2728 if (gimplify_ctxp
->live_switch_vars
)
2730 gcc_assert (gimplify_ctxp
->live_switch_vars
->is_empty ());
2731 delete gimplify_ctxp
->live_switch_vars
;
2733 gimplify_ctxp
->live_switch_vars
= saved_live_switch_vars
;
2735 preprocess_case_label_vec_for_gimple (labels
, index_type
,
2738 bool add_bind
= false;
2741 glabel
*new_default
;
2744 = build_case_label (NULL_TREE
, NULL_TREE
,
2745 create_artificial_label (UNKNOWN_LOCATION
));
2746 if (old_in_switch_expr
)
2748 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case
)) = 1;
2751 new_default
= gimple_build_label (CASE_LABEL (default_case
));
2752 gimplify_seq_add_stmt (&switch_body_seq
, new_default
);
2754 else if (old_in_switch_expr
)
2756 gimple
*last
= gimple_seq_last_stmt (switch_body_seq
);
2757 if (last
&& gimple_code (last
) == GIMPLE_LABEL
)
2759 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2760 if (SWITCH_BREAK_LABEL_P (label
))
2765 switch_stmt
= gimple_build_switch (SWITCH_COND (switch_expr
),
2766 default_case
, labels
);
2767 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2768 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2769 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2770 so that we can easily find the start and end of the switch
2774 gimple_seq bind_body
= NULL
;
2775 gimplify_seq_add_stmt (&bind_body
, switch_stmt
);
2776 gimple_seq_add_seq (&bind_body
, switch_body_seq
);
2777 gbind
*bind
= gimple_build_bind (NULL_TREE
, bind_body
, NULL_TREE
);
2778 gimple_set_location (bind
, EXPR_LOCATION (switch_expr
));
2779 gimplify_seq_add_stmt (pre_p
, bind
);
2783 gimplify_seq_add_stmt (pre_p
, switch_stmt
);
2784 gimplify_seq_add_seq (pre_p
, switch_body_seq
);
2794 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2796 static enum gimplify_status
2797 gimplify_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2799 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p
))
2800 == current_function_decl
);
2802 tree label
= LABEL_EXPR_LABEL (*expr_p
);
2803 glabel
*label_stmt
= gimple_build_label (label
);
2804 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2805 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2807 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2808 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2810 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2811 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2817 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2819 static enum gimplify_status
2820 gimplify_case_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2822 struct gimplify_ctx
*ctxp
;
2825 /* Invalid programs can play Duff's Device type games with, for example,
2826 #pragma omp parallel. At least in the C front end, we don't
2827 detect such invalid branches until after gimplification, in the
2828 diagnose_omp_blocks pass. */
2829 for (ctxp
= gimplify_ctxp
; ; ctxp
= ctxp
->prev_context
)
2830 if (ctxp
->case_labels
.exists ())
2833 tree label
= CASE_LABEL (*expr_p
);
2834 label_stmt
= gimple_build_label (label
);
2835 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2836 ctxp
->case_labels
.safe_push (*expr_p
);
2837 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2839 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2840 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2842 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2843 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2849 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2853 build_and_jump (tree
*label_p
)
2855 if (label_p
== NULL
)
2856 /* If there's nowhere to jump, just fall through. */
2859 if (*label_p
== NULL_TREE
)
2861 tree label
= create_artificial_label (UNKNOWN_LOCATION
);
2865 return build1 (GOTO_EXPR
, void_type_node
, *label_p
);
2868 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2869 This also involves building a label to jump to and communicating it to
2870 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2872 static enum gimplify_status
2873 gimplify_exit_expr (tree
*expr_p
)
2875 tree cond
= TREE_OPERAND (*expr_p
, 0);
2878 expr
= build_and_jump (&gimplify_ctxp
->exit_label
);
2879 expr
= build3 (COND_EXPR
, void_type_node
, cond
, expr
, NULL_TREE
);
2885 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2886 different from its canonical type, wrap the whole thing inside a
2887 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2890 The canonical type of a COMPONENT_REF is the type of the field being
2891 referenced--unless the field is a bit-field which can be read directly
2892 in a smaller mode, in which case the canonical type is the
2893 sign-appropriate type corresponding to that mode. */
2896 canonicalize_component_ref (tree
*expr_p
)
2898 tree expr
= *expr_p
;
2901 gcc_assert (TREE_CODE (expr
) == COMPONENT_REF
);
2903 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
)))
2904 type
= TREE_TYPE (get_unwidened (expr
, NULL_TREE
));
2906 type
= TREE_TYPE (TREE_OPERAND (expr
, 1));
2908 /* One could argue that all the stuff below is not necessary for
2909 the non-bitfield case and declare it a FE error if type
2910 adjustment would be needed. */
2911 if (TREE_TYPE (expr
) != type
)
2913 #ifdef ENABLE_TYPES_CHECKING
2914 tree old_type
= TREE_TYPE (expr
);
2918 /* We need to preserve qualifiers and propagate them from
2920 type_quals
= TYPE_QUALS (type
)
2921 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr
, 0)));
2922 if (TYPE_QUALS (type
) != type_quals
)
2923 type
= build_qualified_type (TYPE_MAIN_VARIANT (type
), type_quals
);
2925 /* Set the type of the COMPONENT_REF to the underlying type. */
2926 TREE_TYPE (expr
) = type
;
2928 #ifdef ENABLE_TYPES_CHECKING
2929 /* It is now a FE error, if the conversion from the canonical
2930 type to the original expression type is not useless. */
2931 gcc_assert (useless_type_conversion_p (old_type
, type
));
2936 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2937 to foo, embed that change in the ADDR_EXPR by converting
2942 where L is the lower bound. For simplicity, only do this for constant
2944 The constraint is that the type of &array[L] is trivially convertible
2948 canonicalize_addr_expr (tree
*expr_p
)
2950 tree expr
= *expr_p
;
2951 tree addr_expr
= TREE_OPERAND (expr
, 0);
2952 tree datype
, ddatype
, pddatype
;
2954 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2955 if (!POINTER_TYPE_P (TREE_TYPE (expr
))
2956 || TREE_CODE (addr_expr
) != ADDR_EXPR
)
2959 /* The addr_expr type should be a pointer to an array. */
2960 datype
= TREE_TYPE (TREE_TYPE (addr_expr
));
2961 if (TREE_CODE (datype
) != ARRAY_TYPE
)
2964 /* The pointer to element type shall be trivially convertible to
2965 the expression pointer type. */
2966 ddatype
= TREE_TYPE (datype
);
2967 pddatype
= build_pointer_type (ddatype
);
2968 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr
)),
2972 /* The lower bound and element sizes must be constant. */
2973 if (!TYPE_SIZE_UNIT (ddatype
)
2974 || TREE_CODE (TYPE_SIZE_UNIT (ddatype
)) != INTEGER_CST
2975 || !TYPE_DOMAIN (datype
) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))
2976 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))) != INTEGER_CST
)
2979 /* All checks succeeded. Build a new node to merge the cast. */
2980 *expr_p
= build4 (ARRAY_REF
, ddatype
, TREE_OPERAND (addr_expr
, 0),
2981 TYPE_MIN_VALUE (TYPE_DOMAIN (datype
)),
2982 NULL_TREE
, NULL_TREE
);
2983 *expr_p
= build1 (ADDR_EXPR
, pddatype
, *expr_p
);
2985 /* We can have stripped a required restrict qualifier above. */
2986 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
2987 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
2990 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2991 underneath as appropriate. */
2993 static enum gimplify_status
2994 gimplify_conversion (tree
*expr_p
)
2996 location_t loc
= EXPR_LOCATION (*expr_p
);
2997 gcc_assert (CONVERT_EXPR_P (*expr_p
));
2999 /* Then strip away all but the outermost conversion. */
3000 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p
, 0));
3002 /* And remove the outermost conversion if it's useless. */
3003 if (tree_ssa_useless_type_conversion (*expr_p
))
3004 *expr_p
= TREE_OPERAND (*expr_p
, 0);
3006 /* If we still have a conversion at the toplevel,
3007 then canonicalize some constructs. */
3008 if (CONVERT_EXPR_P (*expr_p
))
3010 tree sub
= TREE_OPERAND (*expr_p
, 0);
3012 /* If a NOP conversion is changing the type of a COMPONENT_REF
3013 expression, then canonicalize its type now in order to expose more
3014 redundant conversions. */
3015 if (TREE_CODE (sub
) == COMPONENT_REF
)
3016 canonicalize_component_ref (&TREE_OPERAND (*expr_p
, 0));
3018 /* If a NOP conversion is changing a pointer to array of foo
3019 to a pointer to foo, embed that change in the ADDR_EXPR. */
3020 else if (TREE_CODE (sub
) == ADDR_EXPR
)
3021 canonicalize_addr_expr (expr_p
);
3024 /* If we have a conversion to a non-register type force the
3025 use of a VIEW_CONVERT_EXPR instead. */
3026 if (CONVERT_EXPR_P (*expr_p
) && !is_gimple_reg_type (TREE_TYPE (*expr_p
)))
3027 *expr_p
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (*expr_p
),
3028 TREE_OPERAND (*expr_p
, 0));
3030 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3031 if (TREE_CODE (*expr_p
) == CONVERT_EXPR
)
3032 TREE_SET_CODE (*expr_p
, NOP_EXPR
);
3037 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3038 DECL_VALUE_EXPR, and it's worth re-examining things. */
3040 static enum gimplify_status
3041 gimplify_var_or_parm_decl (tree
*expr_p
)
3043 tree decl
= *expr_p
;
3045 /* ??? If this is a local variable, and it has not been seen in any
3046 outer BIND_EXPR, then it's probably the result of a duplicate
3047 declaration, for which we've already issued an error. It would
3048 be really nice if the front end wouldn't leak these at all.
3049 Currently the only known culprit is C++ destructors, as seen
3050 in g++.old-deja/g++.jason/binding.C.
3051 Another possible culpit are size expressions for variably modified
3052 types which are lost in the FE or not gimplified correctly. */
3054 && !DECL_SEEN_IN_BIND_EXPR_P (decl
)
3055 && !TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)
3056 && decl_function_context (decl
) == current_function_decl
)
3058 gcc_assert (seen_error ());
3062 /* When within an OMP context, notice uses of variables. */
3063 if (gimplify_omp_ctxp
&& omp_notice_variable (gimplify_omp_ctxp
, decl
, true))
3066 /* If the decl is an alias for another expression, substitute it now. */
3067 if (DECL_HAS_VALUE_EXPR_P (decl
))
3069 *expr_p
= unshare_expr (DECL_VALUE_EXPR (decl
));
3076 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3079 recalculate_side_effects (tree t
)
3081 enum tree_code code
= TREE_CODE (t
);
3082 int len
= TREE_OPERAND_LENGTH (t
);
3085 switch (TREE_CODE_CLASS (code
))
3087 case tcc_expression
:
3093 case PREDECREMENT_EXPR
:
3094 case PREINCREMENT_EXPR
:
3095 case POSTDECREMENT_EXPR
:
3096 case POSTINCREMENT_EXPR
:
3097 /* All of these have side-effects, no matter what their
3106 case tcc_comparison
: /* a comparison expression */
3107 case tcc_unary
: /* a unary arithmetic expression */
3108 case tcc_binary
: /* a binary arithmetic expression */
3109 case tcc_reference
: /* a reference */
3110 case tcc_vl_exp
: /* a function call */
3111 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
3112 for (i
= 0; i
< len
; ++i
)
3114 tree op
= TREE_OPERAND (t
, i
);
3115 if (op
&& TREE_SIDE_EFFECTS (op
))
3116 TREE_SIDE_EFFECTS (t
) = 1;
3121 /* No side-effects. */
3129 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3133 : min_lval '[' val ']'
3135 | compound_lval '[' val ']'
3136 | compound_lval '.' ID
3138 This is not part of the original SIMPLE definition, which separates
3139 array and member references, but it seems reasonable to handle them
3140 together. Also, this way we don't run into problems with union
3141 aliasing; gcc requires that for accesses through a union to alias, the
3142 union reference must be explicit, which was not always the case when we
3143 were splitting up array and member refs.
3145 PRE_P points to the sequence where side effects that must happen before
3146 *EXPR_P should be stored.
3148 POST_P points to the sequence where side effects that must happen after
3149 *EXPR_P should be stored. */
3151 static enum gimplify_status
3152 gimplify_compound_lval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3153 fallback_t fallback
)
3156 enum gimplify_status ret
= GS_ALL_DONE
, tret
;
3158 location_t loc
= EXPR_LOCATION (*expr_p
);
3159 tree expr
= *expr_p
;
3161 /* Create a stack of the subexpressions so later we can walk them in
3162 order from inner to outer. */
3163 auto_vec
<tree
, 10> expr_stack
;
3165 /* We can handle anything that get_inner_reference can deal with. */
3166 for (p
= expr_p
; ; p
= &TREE_OPERAND (*p
, 0))
3169 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3170 if (TREE_CODE (*p
) == INDIRECT_REF
)
3171 *p
= fold_indirect_ref_loc (loc
, *p
);
3173 if (handled_component_p (*p
))
3175 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3176 additional COMPONENT_REFs. */
3177 else if ((VAR_P (*p
) || TREE_CODE (*p
) == PARM_DECL
)
3178 && gimplify_var_or_parm_decl (p
) == GS_OK
)
3183 expr_stack
.safe_push (*p
);
3186 gcc_assert (expr_stack
.length ());
3188 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3189 walked through and P points to the innermost expression.
3191 Java requires that we elaborated nodes in source order. That
3192 means we must gimplify the inner expression followed by each of
3193 the indices, in order. But we can't gimplify the inner
3194 expression until we deal with any variable bounds, sizes, or
3195 positions in order to deal with PLACEHOLDER_EXPRs.
3197 The base expression may contain a statement expression that
3198 has declarations used in size expressions, so has to be
3199 gimplified before gimplifying the size expressions.
3201 So we do this in three steps. First we deal with variable
3202 bounds, sizes, and positions, then we gimplify the base and
3203 ensure it is memory if needed, then we deal with the annotations
3204 for any variables in the components and any indices, from left
3207 bool need_non_reg
= false;
3208 for (i
= expr_stack
.length () - 1; i
>= 0; i
--)
3210 tree t
= expr_stack
[i
];
3212 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3214 /* Deal with the low bound and element type size and put them into
3215 the ARRAY_REF. If these values are set, they have already been
3217 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3219 tree low
= unshare_expr (array_ref_low_bound (t
));
3220 if (!is_gimple_min_invariant (low
))
3222 TREE_OPERAND (t
, 2) = low
;
3226 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
3228 tree elmt_size
= array_ref_element_size (t
);
3229 if (!is_gimple_min_invariant (elmt_size
))
3231 elmt_size
= unshare_expr (elmt_size
);
3232 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
, 0)));
3233 tree factor
= size_int (TYPE_ALIGN_UNIT (elmt_type
));
3235 /* Divide the element size by the alignment of the element
3237 elmt_size
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3240 TREE_OPERAND (t
, 3) = elmt_size
;
3243 need_non_reg
= true;
3245 else if (TREE_CODE (t
) == COMPONENT_REF
)
3247 /* Set the field offset into T and gimplify it. */
3248 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3250 tree offset
= component_ref_field_offset (t
);
3251 if (!is_gimple_min_invariant (offset
))
3253 offset
= unshare_expr (offset
);
3254 tree field
= TREE_OPERAND (t
, 1);
3256 = size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
);
3258 /* Divide the offset by its alignment. */
3259 offset
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3262 TREE_OPERAND (t
, 2) = offset
;
3265 need_non_reg
= true;
3269 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3270 so as to match the min_lval predicate. Failure to do so may result
3271 in the creation of large aggregate temporaries. */
3272 tret
= gimplify_expr (p
, pre_p
, post_p
, is_gimple_min_lval
,
3273 fallback
| fb_lvalue
);
3274 ret
= MIN (ret
, tret
);
3276 /* Step 2a: if we have component references we do not support on
3277 registers then make sure the base isn't a register. Of course
3278 we can only do so if an rvalue is OK. */
3279 if (need_non_reg
&& (fallback
& fb_rvalue
))
3280 prepare_gimple_addressable (p
, pre_p
);
3282 /* Step 3: gimplify size expressions and the indices and operands of
3283 ARRAY_REF. During this loop we also remove any useless conversions. */
3285 for (; expr_stack
.length () > 0; )
3287 tree t
= expr_stack
.pop ();
3289 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3291 /* Gimplify the low bound and element type size. */
3292 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3293 is_gimple_reg
, fb_rvalue
);
3294 ret
= MIN (ret
, tret
);
3296 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
, post_p
,
3297 is_gimple_reg
, fb_rvalue
);
3298 ret
= MIN (ret
, tret
);
3300 /* Gimplify the dimension. */
3301 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, post_p
,
3302 is_gimple_val
, fb_rvalue
);
3303 ret
= MIN (ret
, tret
);
3305 else if (TREE_CODE (t
) == COMPONENT_REF
)
3307 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3308 is_gimple_reg
, fb_rvalue
);
3309 ret
= MIN (ret
, tret
);
3312 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t
, 0));
3314 /* The innermost expression P may have originally had
3315 TREE_SIDE_EFFECTS set which would have caused all the outer
3316 expressions in *EXPR_P leading to P to also have had
3317 TREE_SIDE_EFFECTS set. */
3318 recalculate_side_effects (t
);
3321 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3322 if ((fallback
& fb_rvalue
) && TREE_CODE (*expr_p
) == COMPONENT_REF
)
3324 canonicalize_component_ref (expr_p
);
3327 expr_stack
.release ();
3329 gcc_assert (*expr_p
== expr
|| ret
!= GS_ALL_DONE
);
3334 /* Gimplify the self modifying expression pointed to by EXPR_P
3337 PRE_P points to the list where side effects that must happen before
3338 *EXPR_P should be stored.
3340 POST_P points to the list where side effects that must happen after
3341 *EXPR_P should be stored.
3343 WANT_VALUE is nonzero iff we want to use the value of this expression
3344 in another expression.
3346 ARITH_TYPE is the type the computation should be performed in. */
3348 enum gimplify_status
3349 gimplify_self_mod_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3350 bool want_value
, tree arith_type
)
3352 enum tree_code code
;
3353 tree lhs
, lvalue
, rhs
, t1
;
3354 gimple_seq post
= NULL
, *orig_post_p
= post_p
;
3356 enum tree_code arith_code
;
3357 enum gimplify_status ret
;
3358 location_t loc
= EXPR_LOCATION (*expr_p
);
3360 code
= TREE_CODE (*expr_p
);
3362 gcc_assert (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
3363 || code
== PREINCREMENT_EXPR
|| code
== PREDECREMENT_EXPR
);
3365 /* Prefix or postfix? */
3366 if (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
)
3367 /* Faster to treat as prefix if result is not used. */
3368 postfix
= want_value
;
3372 /* For postfix, make sure the inner expression's post side effects
3373 are executed after side effects from this expression. */
3377 /* Add or subtract? */
3378 if (code
== PREINCREMENT_EXPR
|| code
== POSTINCREMENT_EXPR
)
3379 arith_code
= PLUS_EXPR
;
3381 arith_code
= MINUS_EXPR
;
3383 /* Gimplify the LHS into a GIMPLE lvalue. */
3384 lvalue
= TREE_OPERAND (*expr_p
, 0);
3385 ret
= gimplify_expr (&lvalue
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
3386 if (ret
== GS_ERROR
)
3389 /* Extract the operands to the arithmetic operation. */
3391 rhs
= TREE_OPERAND (*expr_p
, 1);
3393 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3394 that as the result value and in the postqueue operation. */
3397 ret
= gimplify_expr (&lhs
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
3398 if (ret
== GS_ERROR
)
3401 lhs
= get_initialized_tmp_var (lhs
, pre_p
);
3404 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3405 if (POINTER_TYPE_P (TREE_TYPE (lhs
)))
3407 rhs
= convert_to_ptrofftype_loc (loc
, rhs
);
3408 if (arith_code
== MINUS_EXPR
)
3409 rhs
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (rhs
), rhs
);
3410 t1
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (*expr_p
), lhs
, rhs
);
3413 t1
= fold_convert (TREE_TYPE (*expr_p
),
3414 fold_build2 (arith_code
, arith_type
,
3415 fold_convert (arith_type
, lhs
),
3416 fold_convert (arith_type
, rhs
)));
3420 gimplify_assign (lvalue
, t1
, pre_p
);
3421 gimplify_seq_add_seq (orig_post_p
, post
);
3427 *expr_p
= build2 (MODIFY_EXPR
, TREE_TYPE (lvalue
), lvalue
, t1
);
3432 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3435 maybe_with_size_expr (tree
*expr_p
)
3437 tree expr
= *expr_p
;
3438 tree type
= TREE_TYPE (expr
);
3441 /* If we've already wrapped this or the type is error_mark_node, we can't do
3443 if (TREE_CODE (expr
) == WITH_SIZE_EXPR
3444 || type
== error_mark_node
)
3447 /* If the size isn't known or is a constant, we have nothing to do. */
3448 size
= TYPE_SIZE_UNIT (type
);
3449 if (!size
|| poly_int_tree_p (size
))
3452 /* Otherwise, make a WITH_SIZE_EXPR. */
3453 size
= unshare_expr (size
);
3454 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, expr
);
3455 *expr_p
= build2 (WITH_SIZE_EXPR
, type
, expr
, size
);
3458 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3459 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3460 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3461 gimplified to an SSA name. */
3463 enum gimplify_status
3464 gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
3467 bool (*test
) (tree
);
3470 /* In general, we allow lvalues for function arguments to avoid
3471 extra overhead of copying large aggregates out of even larger
3472 aggregates into temporaries only to copy the temporaries to
3473 the argument list. Make optimizers happy by pulling out to
3474 temporaries those types that fit in registers. */
3475 if (is_gimple_reg_type (TREE_TYPE (*arg_p
)))
3476 test
= is_gimple_val
, fb
= fb_rvalue
;
3479 test
= is_gimple_lvalue
, fb
= fb_either
;
3480 /* Also strip a TARGET_EXPR that would force an extra copy. */
3481 if (TREE_CODE (*arg_p
) == TARGET_EXPR
)
3483 tree init
= TARGET_EXPR_INITIAL (*arg_p
);
3485 && !VOID_TYPE_P (TREE_TYPE (init
)))
3490 /* If this is a variable sized type, we must remember the size. */
3491 maybe_with_size_expr (arg_p
);
3493 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3494 /* Make sure arguments have the same location as the function call
3496 protected_set_expr_location (*arg_p
, call_location
);
3498 /* There is a sequence point before a function call. Side effects in
3499 the argument list must occur before the actual call. So, when
3500 gimplifying arguments, force gimplify_expr to use an internal
3501 post queue which is then appended to the end of PRE_P. */
3502 return gimplify_expr (arg_p
, pre_p
, NULL
, test
, fb
, allow_ssa
);
3505 /* Don't fold inside offloading or taskreg regions: it can break code by
3506 adding decl references that weren't in the source. We'll do it during
3507 omplower pass instead. */
3510 maybe_fold_stmt (gimple_stmt_iterator
*gsi
)
3512 struct gimplify_omp_ctx
*ctx
;
3513 for (ctx
= gimplify_omp_ctxp
; ctx
; ctx
= ctx
->outer_context
)
3514 if ((ctx
->region_type
& (ORT_TARGET
| ORT_PARALLEL
| ORT_TASK
)) != 0)
3516 else if ((ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
3518 /* Delay folding of builtins until the IL is in consistent state
3519 so the diagnostic machinery can do a better job. */
3520 if (gimple_call_builtin_p (gsi_stmt (*gsi
)))
3522 return fold_stmt (gsi
);
3525 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3526 WANT_VALUE is true if the result of the call is desired. */
3528 static enum gimplify_status
3529 gimplify_call_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
3531 tree fndecl
, parms
, p
, fnptrtype
;
3532 enum gimplify_status ret
;
3535 bool builtin_va_start_p
= false;
3536 location_t loc
= EXPR_LOCATION (*expr_p
);
3538 gcc_assert (TREE_CODE (*expr_p
) == CALL_EXPR
);
3540 /* For reliable diagnostics during inlining, it is necessary that
3541 every call_expr be annotated with file and line. */
3542 if (! EXPR_HAS_LOCATION (*expr_p
))
3543 SET_EXPR_LOCATION (*expr_p
, input_location
);
3545 /* Gimplify internal functions created in the FEs. */
3546 if (CALL_EXPR_FN (*expr_p
) == NULL_TREE
)
3551 nargs
= call_expr_nargs (*expr_p
);
3552 enum internal_fn ifn
= CALL_EXPR_IFN (*expr_p
);
3553 auto_vec
<tree
> vargs (nargs
);
3555 if (ifn
== IFN_ASSUME
)
3557 if (simple_condition_p (CALL_EXPR_ARG (*expr_p
, 0)))
3559 /* If the [[assume (cond)]]; condition is simple
3560 enough and can be evaluated unconditionally
3561 without side-effects, expand it as
3562 if (!cond) __builtin_unreachable (); */
3563 tree fndecl
= builtin_decl_explicit (BUILT_IN_UNREACHABLE
);
3564 *expr_p
= build3 (COND_EXPR
, void_type_node
,
3565 CALL_EXPR_ARG (*expr_p
, 0), void_node
,
3566 build_call_expr_loc (EXPR_LOCATION (*expr_p
),
3570 /* If not optimizing, ignore the assumptions. */
3573 *expr_p
= NULL_TREE
;
3576 /* Temporarily, until gimple lowering, transform
3583 such that gimple lowering can outline the condition into
3584 a separate function easily. */
3585 tree guard
= create_tmp_var (boolean_type_node
);
3586 *expr_p
= build2 (MODIFY_EXPR
, void_type_node
, guard
,
3587 CALL_EXPR_ARG (*expr_p
, 0));
3588 *expr_p
= build3 (BIND_EXPR
, void_type_node
, NULL
, *expr_p
, NULL
);
3589 push_gimplify_context ();
3590 gimple_seq body
= NULL
;
3591 gimple
*g
= gimplify_and_return_first (*expr_p
, &body
);
3592 pop_gimplify_context (g
);
3593 g
= gimple_build_assume (guard
, body
);
3594 gimple_set_location (g
, loc
);
3595 gimplify_seq_add_stmt (pre_p
, g
);
3596 *expr_p
= NULL_TREE
;
3600 for (i
= 0; i
< nargs
; i
++)
3602 gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3603 EXPR_LOCATION (*expr_p
));
3604 vargs
.quick_push (CALL_EXPR_ARG (*expr_p
, i
));
3607 gcall
*call
= gimple_build_call_internal_vec (ifn
, vargs
);
3608 gimple_call_set_nothrow (call
, TREE_NOTHROW (*expr_p
));
3609 gimplify_seq_add_stmt (pre_p
, call
);
3613 /* This may be a call to a builtin function.
3615 Builtin function calls may be transformed into different
3616 (and more efficient) builtin function calls under certain
3617 circumstances. Unfortunately, gimplification can muck things
3618 up enough that the builtin expanders are not aware that certain
3619 transformations are still valid.
3621 So we attempt transformation/gimplification of the call before
3622 we gimplify the CALL_EXPR. At this time we do not manage to
3623 transform all calls in the same manner as the expanders do, but
3624 we do transform most of them. */
3625 fndecl
= get_callee_fndecl (*expr_p
);
3626 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
3627 switch (DECL_FUNCTION_CODE (fndecl
))
3629 CASE_BUILT_IN_ALLOCA
:
3630 /* If the call has been built for a variable-sized object, then we
3631 want to restore the stack level when the enclosing BIND_EXPR is
3632 exited to reclaim the allocated space; otherwise, we precisely
3633 need to do the opposite and preserve the latest stack level. */
3634 if (CALL_ALLOCA_FOR_VAR_P (*expr_p
))
3635 gimplify_ctxp
->save_stack
= true;
3637 gimplify_ctxp
->keep_stack
= true;
3640 case BUILT_IN_VA_START
:
3642 builtin_va_start_p
= TRUE
;
3643 if (call_expr_nargs (*expr_p
) < 2)
3645 error ("too few arguments to function %<va_start%>");
3646 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3650 if (fold_builtin_next_arg (*expr_p
, true))
3652 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3658 case BUILT_IN_EH_RETURN
:
3659 cfun
->calls_eh_return
= true;
3662 case BUILT_IN_CLEAR_PADDING
:
3663 if (call_expr_nargs (*expr_p
) == 1)
3665 /* Remember the original type of the argument in an internal
3666 dummy second argument, as in GIMPLE pointer conversions are
3667 useless. Also mark this call as not for automatic
3668 initialization in the internal dummy third argument. */
3669 p
= CALL_EXPR_ARG (*expr_p
, 0);
3671 = build_call_expr_loc (EXPR_LOCATION (*expr_p
), fndecl
, 2, p
,
3672 build_zero_cst (TREE_TYPE (p
)));
3680 if (fndecl
&& fndecl_built_in_p (fndecl
))
3682 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3683 if (new_tree
&& new_tree
!= *expr_p
)
3685 /* There was a transformation of this call which computes the
3686 same value, but in a more efficient way. Return and try
3693 /* Remember the original function pointer type. */
3694 fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
3699 && (cfun
->curr_properties
& PROP_gimple_any
) == 0)
3701 tree variant
= omp_resolve_declare_variant (fndecl
);
3702 if (variant
!= fndecl
)
3703 CALL_EXPR_FN (*expr_p
) = build1 (ADDR_EXPR
, fnptrtype
, variant
);
3706 /* There is a sequence point before the call, so any side effects in
3707 the calling expression must occur before the actual call. Force
3708 gimplify_expr to use an internal post queue. */
3709 ret
= gimplify_expr (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
3710 is_gimple_call_addr
, fb_rvalue
);
3712 nargs
= call_expr_nargs (*expr_p
);
3714 /* Get argument types for verification. */
3715 fndecl
= get_callee_fndecl (*expr_p
);
3718 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3720 parms
= TYPE_ARG_TYPES (TREE_TYPE (fnptrtype
));
3722 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
3723 p
= DECL_ARGUMENTS (fndecl
);
3728 for (i
= 0; i
< nargs
&& p
; i
++, p
= TREE_CHAIN (p
))
3731 /* If the last argument is __builtin_va_arg_pack () and it is not
3732 passed as a named argument, decrease the number of CALL_EXPR
3733 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3736 && TREE_CODE (CALL_EXPR_ARG (*expr_p
, nargs
- 1)) == CALL_EXPR
)
3738 tree last_arg
= CALL_EXPR_ARG (*expr_p
, nargs
- 1);
3739 tree last_arg_fndecl
= get_callee_fndecl (last_arg
);
3742 && fndecl_built_in_p (last_arg_fndecl
, BUILT_IN_VA_ARG_PACK
))
3744 tree call
= *expr_p
;
3747 *expr_p
= build_call_array_loc (loc
, TREE_TYPE (call
),
3748 CALL_EXPR_FN (call
),
3749 nargs
, CALL_EXPR_ARGP (call
));
3751 /* Copy all CALL_EXPR flags, location and block, except
3752 CALL_EXPR_VA_ARG_PACK flag. */
3753 CALL_EXPR_STATIC_CHAIN (*expr_p
) = CALL_EXPR_STATIC_CHAIN (call
);
3754 CALL_EXPR_TAILCALL (*expr_p
) = CALL_EXPR_TAILCALL (call
);
3755 CALL_EXPR_RETURN_SLOT_OPT (*expr_p
)
3756 = CALL_EXPR_RETURN_SLOT_OPT (call
);
3757 CALL_FROM_THUNK_P (*expr_p
) = CALL_FROM_THUNK_P (call
);
3758 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (call
));
3760 /* Set CALL_EXPR_VA_ARG_PACK. */
3761 CALL_EXPR_VA_ARG_PACK (*expr_p
) = 1;
3765 /* If the call returns twice then after building the CFG the call
3766 argument computations will no longer dominate the call because
3767 we add an abnormal incoming edge to the call. So do not use SSA
3769 bool returns_twice
= call_expr_flags (*expr_p
) & ECF_RETURNS_TWICE
;
3771 /* Gimplify the function arguments. */
3774 for (i
= (PUSH_ARGS_REVERSED
? nargs
- 1 : 0);
3775 PUSH_ARGS_REVERSED
? i
>= 0 : i
< nargs
;
3776 PUSH_ARGS_REVERSED
? i
-- : i
++)
3778 enum gimplify_status t
;
3780 /* Avoid gimplifying the second argument to va_start, which needs to
3781 be the plain PARM_DECL. */
3782 if ((i
!= 1) || !builtin_va_start_p
)
3784 t
= gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3785 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3793 /* Gimplify the static chain. */
3794 if (CALL_EXPR_STATIC_CHAIN (*expr_p
))
3796 if (fndecl
&& !DECL_STATIC_CHAIN (fndecl
))
3797 CALL_EXPR_STATIC_CHAIN (*expr_p
) = NULL
;
3800 enum gimplify_status t
;
3801 t
= gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p
), pre_p
,
3802 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3808 /* Verify the function result. */
3809 if (want_value
&& fndecl
3810 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype
))))
3812 error_at (loc
, "using result of function returning %<void%>");
3816 /* Try this again in case gimplification exposed something. */
3817 if (ret
!= GS_ERROR
)
3819 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3821 if (new_tree
&& new_tree
!= *expr_p
)
3823 /* There was a transformation of this call which computes the
3824 same value, but in a more efficient way. Return and try
3832 *expr_p
= error_mark_node
;
3836 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3837 decl. This allows us to eliminate redundant or useless
3838 calls to "const" functions. */
3839 if (TREE_CODE (*expr_p
) == CALL_EXPR
)
3841 int flags
= call_expr_flags (*expr_p
);
3842 if (flags
& (ECF_CONST
| ECF_PURE
)
3843 /* An infinite loop is considered a side effect. */
3844 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
3845 TREE_SIDE_EFFECTS (*expr_p
) = 0;
3848 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3849 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3850 form and delegate the creation of a GIMPLE_CALL to
3851 gimplify_modify_expr. This is always possible because when
3852 WANT_VALUE is true, the caller wants the result of this call into
3853 a temporary, which means that we will emit an INIT_EXPR in
3854 internal_get_tmp_var which will then be handled by
3855 gimplify_modify_expr. */
3858 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3859 have to do is replicate it as a GIMPLE_CALL tuple. */
3860 gimple_stmt_iterator gsi
;
3861 call
= gimple_build_call_from_tree (*expr_p
, fnptrtype
);
3862 notice_special_calls (call
);
3863 gimplify_seq_add_stmt (pre_p
, call
);
3864 gsi
= gsi_last (*pre_p
);
3865 maybe_fold_stmt (&gsi
);
3866 *expr_p
= NULL_TREE
;
3869 /* Remember the original function type. */
3870 CALL_EXPR_FN (*expr_p
) = build1 (NOP_EXPR
, fnptrtype
,
3871 CALL_EXPR_FN (*expr_p
));
3876 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3877 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3879 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3880 condition is true or false, respectively. If null, we should generate
3881 our own to skip over the evaluation of this specific expression.
3883 LOCUS is the source location of the COND_EXPR.
3885 This function is the tree equivalent of do_jump.
3887 shortcut_cond_r should only be called by shortcut_cond_expr. */
3890 shortcut_cond_r (tree pred
, tree
*true_label_p
, tree
*false_label_p
,
3893 tree local_label
= NULL_TREE
;
3894 tree t
, expr
= NULL
;
3896 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3897 retain the shortcut semantics. Just insert the gotos here;
3898 shortcut_cond_expr will append the real blocks later. */
3899 if (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3901 location_t new_locus
;
3903 /* Turn if (a && b) into
3905 if (a); else goto no;
3906 if (b) goto yes; else goto no;
3909 if (false_label_p
== NULL
)
3910 false_label_p
= &local_label
;
3912 /* Keep the original source location on the first 'if'. */
3913 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), NULL
, false_label_p
, locus
);
3914 append_to_statement_list (t
, &expr
);
3916 /* Set the source location of the && on the second 'if'. */
3917 new_locus
= rexpr_location (pred
, locus
);
3918 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3920 append_to_statement_list (t
, &expr
);
3922 else if (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3924 location_t new_locus
;
3926 /* Turn if (a || b) into
3929 if (b) goto yes; else goto no;
3932 if (true_label_p
== NULL
)
3933 true_label_p
= &local_label
;
3935 /* Keep the original source location on the first 'if'. */
3936 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), true_label_p
, NULL
, locus
);
3937 append_to_statement_list (t
, &expr
);
3939 /* Set the source location of the || on the second 'if'. */
3940 new_locus
= rexpr_location (pred
, locus
);
3941 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3943 append_to_statement_list (t
, &expr
);
3945 else if (TREE_CODE (pred
) == COND_EXPR
3946 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 1)))
3947 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 2))))
3949 location_t new_locus
;
3951 /* As long as we're messing with gotos, turn if (a ? b : c) into
3953 if (b) goto yes; else goto no;
3955 if (c) goto yes; else goto no;
3957 Don't do this if one of the arms has void type, which can happen
3958 in C++ when the arm is throw. */
3960 /* Keep the original source location on the first 'if'. Set the source
3961 location of the ? on the second 'if'. */
3962 new_locus
= rexpr_location (pred
, locus
);
3963 expr
= build3 (COND_EXPR
, void_type_node
, TREE_OPERAND (pred
, 0),
3964 shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
,
3965 false_label_p
, locus
),
3966 shortcut_cond_r (TREE_OPERAND (pred
, 2), true_label_p
,
3967 false_label_p
, new_locus
));
3971 expr
= build3 (COND_EXPR
, void_type_node
, pred
,
3972 build_and_jump (true_label_p
),
3973 build_and_jump (false_label_p
));
3974 SET_EXPR_LOCATION (expr
, locus
);
3979 t
= build1 (LABEL_EXPR
, void_type_node
, local_label
);
3980 append_to_statement_list (t
, &expr
);
3986 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3987 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3988 statement, if it is the last one. Otherwise, return NULL. */
3991 find_goto (tree expr
)
3996 if (TREE_CODE (expr
) == GOTO_EXPR
)
3999 if (TREE_CODE (expr
) != STATEMENT_LIST
)
4002 tree_stmt_iterator i
= tsi_start (expr
);
4004 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
4007 if (!tsi_one_before_end_p (i
))
4010 return find_goto (tsi_stmt (i
));
4013 /* Same as find_goto, except that it returns NULL if the destination
4014 is not a LABEL_DECL. */
4017 find_goto_label (tree expr
)
4019 tree dest
= find_goto (expr
);
4020 if (dest
&& TREE_CODE (GOTO_DESTINATION (dest
)) == LABEL_DECL
)
4025 /* Given a conditional expression EXPR with short-circuit boolean
4026 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4027 predicate apart into the equivalent sequence of conditionals. */
4030 shortcut_cond_expr (tree expr
)
4032 tree pred
= TREE_OPERAND (expr
, 0);
4033 tree then_
= TREE_OPERAND (expr
, 1);
4034 tree else_
= TREE_OPERAND (expr
, 2);
4035 tree true_label
, false_label
, end_label
, t
;
4037 tree
*false_label_p
;
4038 bool emit_end
, emit_false
, jump_over_else
;
4039 bool then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
4040 bool else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
4042 /* First do simple transformations. */
4045 /* If there is no 'else', turn
4048 if (a) if (b) then c. */
4049 while (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
4051 /* Keep the original source location on the first 'if'. */
4052 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
4053 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
4054 /* Set the source location of the && on the second 'if'. */
4055 if (rexpr_has_location (pred
))
4056 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
4057 then_
= shortcut_cond_expr (expr
);
4058 then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
4059 pred
= TREE_OPERAND (pred
, 0);
4060 expr
= build3 (COND_EXPR
, void_type_node
, pred
, then_
, NULL_TREE
);
4061 SET_EXPR_LOCATION (expr
, locus
);
4067 /* If there is no 'then', turn
4070 if (a); else if (b); else d. */
4071 while (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
4073 /* Keep the original source location on the first 'if'. */
4074 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
4075 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
4076 /* Set the source location of the || on the second 'if'. */
4077 if (rexpr_has_location (pred
))
4078 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
4079 else_
= shortcut_cond_expr (expr
);
4080 else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
4081 pred
= TREE_OPERAND (pred
, 0);
4082 expr
= build3 (COND_EXPR
, void_type_node
, pred
, NULL_TREE
, else_
);
4083 SET_EXPR_LOCATION (expr
, locus
);
4087 /* If we're done, great. */
4088 if (TREE_CODE (pred
) != TRUTH_ANDIF_EXPR
4089 && TREE_CODE (pred
) != TRUTH_ORIF_EXPR
)
4092 /* Otherwise we need to mess with gotos. Change
4095 if (a); else goto no;
4098 and recursively gimplify the condition. */
4100 true_label
= false_label
= end_label
= NULL_TREE
;
4102 /* If our arms just jump somewhere, hijack those labels so we don't
4103 generate jumps to jumps. */
4105 if (tree then_goto
= find_goto_label (then_
))
4107 true_label
= GOTO_DESTINATION (then_goto
);
4112 if (tree else_goto
= find_goto_label (else_
))
4114 false_label
= GOTO_DESTINATION (else_goto
);
4119 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4121 true_label_p
= &true_label
;
4123 true_label_p
= NULL
;
4125 /* The 'else' branch also needs a label if it contains interesting code. */
4126 if (false_label
|| else_se
)
4127 false_label_p
= &false_label
;
4129 false_label_p
= NULL
;
4131 /* If there was nothing else in our arms, just forward the label(s). */
4132 if (!then_se
&& !else_se
)
4133 return shortcut_cond_r (pred
, true_label_p
, false_label_p
,
4134 EXPR_LOC_OR_LOC (expr
, input_location
));
4136 /* If our last subexpression already has a terminal label, reuse it. */
4138 t
= expr_last (else_
);
4140 t
= expr_last (then_
);
4143 if (t
&& TREE_CODE (t
) == LABEL_EXPR
)
4144 end_label
= LABEL_EXPR_LABEL (t
);
4146 /* If we don't care about jumping to the 'else' branch, jump to the end
4147 if the condition is false. */
4149 false_label_p
= &end_label
;
4151 /* We only want to emit these labels if we aren't hijacking them. */
4152 emit_end
= (end_label
== NULL_TREE
);
4153 emit_false
= (false_label
== NULL_TREE
);
4155 /* We only emit the jump over the else clause if we have to--if the
4156 then clause may fall through. Otherwise we can wind up with a
4157 useless jump and a useless label at the end of gimplified code,
4158 which will cause us to think that this conditional as a whole
4159 falls through even if it doesn't. If we then inline a function
4160 which ends with such a condition, that can cause us to issue an
4161 inappropriate warning about control reaching the end of a
4162 non-void function. */
4163 jump_over_else
= block_may_fallthru (then_
);
4165 pred
= shortcut_cond_r (pred
, true_label_p
, false_label_p
,
4166 EXPR_LOC_OR_LOC (expr
, input_location
));
4169 append_to_statement_list (pred
, &expr
);
4171 append_to_statement_list (then_
, &expr
);
4176 tree last
= expr_last (expr
);
4177 t
= build_and_jump (&end_label
);
4178 if (rexpr_has_location (last
))
4179 SET_EXPR_LOCATION (t
, rexpr_location (last
));
4180 append_to_statement_list (t
, &expr
);
4184 t
= build1 (LABEL_EXPR
, void_type_node
, false_label
);
4185 append_to_statement_list (t
, &expr
);
4187 append_to_statement_list (else_
, &expr
);
4189 if (emit_end
&& end_label
)
4191 t
= build1 (LABEL_EXPR
, void_type_node
, end_label
);
4192 append_to_statement_list (t
, &expr
);
4198 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4201 gimple_boolify (tree expr
)
4203 tree type
= TREE_TYPE (expr
);
4204 location_t loc
= EXPR_LOCATION (expr
);
4206 if (TREE_CODE (expr
) == NE_EXPR
4207 && TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
4208 && integer_zerop (TREE_OPERAND (expr
, 1)))
4210 tree call
= TREE_OPERAND (expr
, 0);
4211 tree fn
= get_callee_fndecl (call
);
4213 /* For __builtin_expect ((long) (x), y) recurse into x as well
4214 if x is truth_value_p. */
4216 && fndecl_built_in_p (fn
, BUILT_IN_EXPECT
)
4217 && call_expr_nargs (call
) == 2)
4219 tree arg
= CALL_EXPR_ARG (call
, 0);
4222 if (TREE_CODE (arg
) == NOP_EXPR
4223 && TREE_TYPE (arg
) == TREE_TYPE (call
))
4224 arg
= TREE_OPERAND (arg
, 0);
4225 if (truth_value_p (TREE_CODE (arg
)))
4227 arg
= gimple_boolify (arg
);
4228 CALL_EXPR_ARG (call
, 0)
4229 = fold_convert_loc (loc
, TREE_TYPE (call
), arg
);
4235 switch (TREE_CODE (expr
))
4237 case TRUTH_AND_EXPR
:
4239 case TRUTH_XOR_EXPR
:
4240 case TRUTH_ANDIF_EXPR
:
4241 case TRUTH_ORIF_EXPR
:
4242 /* Also boolify the arguments of truth exprs. */
4243 TREE_OPERAND (expr
, 1) = gimple_boolify (TREE_OPERAND (expr
, 1));
4246 case TRUTH_NOT_EXPR
:
4247 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4249 /* These expressions always produce boolean results. */
4250 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4251 TREE_TYPE (expr
) = boolean_type_node
;
4255 switch ((enum annot_expr_kind
) TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)))
4257 case annot_expr_ivdep_kind
:
4258 case annot_expr_unroll_kind
:
4259 case annot_expr_no_vector_kind
:
4260 case annot_expr_vector_kind
:
4261 case annot_expr_parallel_kind
:
4262 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4263 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4264 TREE_TYPE (expr
) = boolean_type_node
;
4271 if (COMPARISON_CLASS_P (expr
))
4273 /* There expressions always prduce boolean results. */
4274 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4275 TREE_TYPE (expr
) = boolean_type_node
;
4278 /* Other expressions that get here must have boolean values, but
4279 might need to be converted to the appropriate mode. */
4280 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
4282 return fold_convert_loc (loc
, boolean_type_node
, expr
);
4286 /* Given a conditional expression *EXPR_P without side effects, gimplify
4287 its operands. New statements are inserted to PRE_P. */
4289 static enum gimplify_status
4290 gimplify_pure_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
)
4292 tree expr
= *expr_p
, cond
;
4293 enum gimplify_status ret
, tret
;
4294 enum tree_code code
;
4296 cond
= gimple_boolify (COND_EXPR_COND (expr
));
4298 /* We need to handle && and || specially, as their gimplification
4299 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4300 code
= TREE_CODE (cond
);
4301 if (code
== TRUTH_ANDIF_EXPR
)
4302 TREE_SET_CODE (cond
, TRUTH_AND_EXPR
);
4303 else if (code
== TRUTH_ORIF_EXPR
)
4304 TREE_SET_CODE (cond
, TRUTH_OR_EXPR
);
4305 ret
= gimplify_expr (&cond
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
4306 COND_EXPR_COND (*expr_p
) = cond
;
4308 tret
= gimplify_expr (&COND_EXPR_THEN (expr
), pre_p
, NULL
,
4309 is_gimple_val
, fb_rvalue
);
4310 ret
= MIN (ret
, tret
);
4311 tret
= gimplify_expr (&COND_EXPR_ELSE (expr
), pre_p
, NULL
,
4312 is_gimple_val
, fb_rvalue
);
4314 return MIN (ret
, tret
);
4317 /* Return true if evaluating EXPR could trap.
4318 EXPR is GENERIC, while tree_could_trap_p can be called
4322 generic_expr_could_trap_p (tree expr
)
4326 if (!expr
|| is_gimple_val (expr
))
4329 if (!EXPR_P (expr
) || tree_could_trap_p (expr
))
4332 n
= TREE_OPERAND_LENGTH (expr
);
4333 for (i
= 0; i
< n
; i
++)
4334 if (generic_expr_could_trap_p (TREE_OPERAND (expr
, i
)))
4340 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4349 The second form is used when *EXPR_P is of type void.
4351 PRE_P points to the list where side effects that must happen before
4352 *EXPR_P should be stored. */
4354 static enum gimplify_status
4355 gimplify_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
, fallback_t fallback
)
4357 tree expr
= *expr_p
;
4358 tree type
= TREE_TYPE (expr
);
4359 location_t loc
= EXPR_LOCATION (expr
);
4360 tree tmp
, arm1
, arm2
;
4361 enum gimplify_status ret
;
4362 tree label_true
, label_false
, label_cont
;
4363 bool have_then_clause_p
, have_else_clause_p
;
4365 enum tree_code pred_code
;
4366 gimple_seq seq
= NULL
;
4368 /* If this COND_EXPR has a value, copy the values into a temporary within
4370 if (!VOID_TYPE_P (type
))
4372 tree then_
= TREE_OPERAND (expr
, 1), else_
= TREE_OPERAND (expr
, 2);
4375 /* If either an rvalue is ok or we do not require an lvalue, create the
4376 temporary. But we cannot do that if the type is addressable. */
4377 if (((fallback
& fb_rvalue
) || !(fallback
& fb_lvalue
))
4378 && !TREE_ADDRESSABLE (type
))
4380 if (gimplify_ctxp
->allow_rhs_cond_expr
4381 /* If either branch has side effects or could trap, it can't be
4382 evaluated unconditionally. */
4383 && !TREE_SIDE_EFFECTS (then_
)
4384 && !generic_expr_could_trap_p (then_
)
4385 && !TREE_SIDE_EFFECTS (else_
)
4386 && !generic_expr_could_trap_p (else_
))
4387 return gimplify_pure_cond_expr (expr_p
, pre_p
);
4389 tmp
= create_tmp_var (type
, "iftmp");
4393 /* Otherwise, only create and copy references to the values. */
4396 type
= build_pointer_type (type
);
4398 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4399 then_
= build_fold_addr_expr_loc (loc
, then_
);
4401 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4402 else_
= build_fold_addr_expr_loc (loc
, else_
);
4405 = build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0), then_
, else_
);
4407 tmp
= create_tmp_var (type
, "iftmp");
4408 result
= build_simple_mem_ref_loc (loc
, tmp
);
4411 /* Build the new then clause, `tmp = then_;'. But don't build the
4412 assignment if the value is void; in C++ it can be if it's a throw. */
4413 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4414 TREE_OPERAND (expr
, 1) = build2 (INIT_EXPR
, type
, tmp
, then_
);
4416 /* Similarly, build the new else clause, `tmp = else_;'. */
4417 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4418 TREE_OPERAND (expr
, 2) = build2 (INIT_EXPR
, type
, tmp
, else_
);
4420 TREE_TYPE (expr
) = void_type_node
;
4421 recalculate_side_effects (expr
);
4423 /* Move the COND_EXPR to the prequeue. */
4424 gimplify_stmt (&expr
, pre_p
);
4430 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4431 STRIP_TYPE_NOPS (TREE_OPERAND (expr
, 0));
4432 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == COMPOUND_EXPR
)
4433 gimplify_compound_expr (&TREE_OPERAND (expr
, 0), pre_p
, true);
4435 /* Make sure the condition has BOOLEAN_TYPE. */
4436 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4438 /* Break apart && and || conditions. */
4439 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ANDIF_EXPR
4440 || TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ORIF_EXPR
)
4442 expr
= shortcut_cond_expr (expr
);
4444 if (expr
!= *expr_p
)
4448 /* We can't rely on gimplify_expr to re-gimplify the expanded
4449 form properly, as cleanups might cause the target labels to be
4450 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4451 set up a conditional context. */
4452 gimple_push_condition ();
4453 gimplify_stmt (expr_p
, &seq
);
4454 gimple_pop_condition (pre_p
);
4455 gimple_seq_add_seq (pre_p
, seq
);
4461 /* Now do the normal gimplification. */
4463 /* Gimplify condition. */
4464 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, NULL
,
4465 is_gimple_condexpr_for_cond
, fb_rvalue
);
4466 if (ret
== GS_ERROR
)
4468 gcc_assert (TREE_OPERAND (expr
, 0) != NULL_TREE
);
4470 gimple_push_condition ();
4472 have_then_clause_p
= have_else_clause_p
= false;
4473 label_true
= find_goto_label (TREE_OPERAND (expr
, 1));
4475 && DECL_CONTEXT (GOTO_DESTINATION (label_true
)) == current_function_decl
4476 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4477 have different locations, otherwise we end up with incorrect
4478 location information on the branches. */
4480 || !EXPR_HAS_LOCATION (expr
)
4481 || !rexpr_has_location (label_true
)
4482 || EXPR_LOCATION (expr
) == rexpr_location (label_true
)))
4484 have_then_clause_p
= true;
4485 label_true
= GOTO_DESTINATION (label_true
);
4488 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
4489 label_false
= find_goto_label (TREE_OPERAND (expr
, 2));
4491 && DECL_CONTEXT (GOTO_DESTINATION (label_false
)) == current_function_decl
4492 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4493 have different locations, otherwise we end up with incorrect
4494 location information on the branches. */
4496 || !EXPR_HAS_LOCATION (expr
)
4497 || !rexpr_has_location (label_false
)
4498 || EXPR_LOCATION (expr
) == rexpr_location (label_false
)))
4500 have_else_clause_p
= true;
4501 label_false
= GOTO_DESTINATION (label_false
);
4504 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
4506 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr
), &pred_code
, &arm1
,
4508 cond_stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
,
4510 gimple_set_location (cond_stmt
, EXPR_LOCATION (expr
));
4511 copy_warning (cond_stmt
, COND_EXPR_COND (expr
));
4512 gimplify_seq_add_stmt (&seq
, cond_stmt
);
4513 gimple_stmt_iterator gsi
= gsi_last (seq
);
4514 maybe_fold_stmt (&gsi
);
4516 label_cont
= NULL_TREE
;
4517 if (!have_then_clause_p
)
4519 /* For if (...) {} else { code; } put label_true after
4521 if (TREE_OPERAND (expr
, 1) == NULL_TREE
4522 && !have_else_clause_p
4523 && TREE_OPERAND (expr
, 2) != NULL_TREE
)
4525 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4526 handling that label_cont == label_true can be only reached
4527 through fallthrough from { code; }. */
4528 if (integer_zerop (COND_EXPR_COND (expr
)))
4529 UNUSED_LABEL_P (label_true
) = 1;
4530 label_cont
= label_true
;
4534 bool then_side_effects
4535 = (TREE_OPERAND (expr
, 1)
4536 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr
, 1)));
4537 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_true
));
4538 have_then_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 1), &seq
);
4539 /* For if (...) { code; } else {} or
4540 if (...) { code; } else goto label; or
4541 if (...) { code; return; } else { ... }
4542 label_cont isn't needed. */
4543 if (!have_else_clause_p
4544 && TREE_OPERAND (expr
, 2) != NULL_TREE
4545 && gimple_seq_may_fallthru (seq
))
4548 label_cont
= create_artificial_label (UNKNOWN_LOCATION
);
4550 /* For if (0) { non-side-effect-code } else { code }
4551 tell -Wimplicit-fallthrough handling that label_cont can
4552 be only reached through fallthrough from { code }. */
4553 if (integer_zerop (COND_EXPR_COND (expr
)))
4555 UNUSED_LABEL_P (label_true
) = 1;
4556 if (!then_side_effects
)
4557 UNUSED_LABEL_P (label_cont
) = 1;
4560 g
= gimple_build_goto (label_cont
);
4562 /* GIMPLE_COND's are very low level; they have embedded
4563 gotos. This particular embedded goto should not be marked
4564 with the location of the original COND_EXPR, as it would
4565 correspond to the COND_EXPR's condition, not the ELSE or the
4566 THEN arms. To avoid marking it with the wrong location, flag
4567 it as "no location". */
4568 gimple_set_do_not_emit_location (g
);
4570 gimplify_seq_add_stmt (&seq
, g
);
4574 if (!have_else_clause_p
)
4576 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4577 tell -Wimplicit-fallthrough handling that label_false can be only
4578 reached through fallthrough from { code }. */
4579 if (integer_nonzerop (COND_EXPR_COND (expr
))
4580 && (TREE_OPERAND (expr
, 2) == NULL_TREE
4581 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr
, 2))))
4582 UNUSED_LABEL_P (label_false
) = 1;
4583 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_false
));
4584 have_else_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 2), &seq
);
4587 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_cont
));
4589 gimple_pop_condition (pre_p
);
4590 gimple_seq_add_seq (pre_p
, seq
);
4592 if (ret
== GS_ERROR
)
4594 else if (have_then_clause_p
|| have_else_clause_p
)
4598 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4599 expr
= TREE_OPERAND (expr
, 0);
4600 gimplify_stmt (&expr
, pre_p
);
4607 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4608 to be marked addressable.
4610 We cannot rely on such an expression being directly markable if a temporary
4611 has been created by the gimplification. In this case, we create another
4612 temporary and initialize it with a copy, which will become a store after we
4613 mark it addressable. This can happen if the front-end passed us something
4614 that it could not mark addressable yet, like a Fortran pass-by-reference
4615 parameter (int) floatvar. */
4618 prepare_gimple_addressable (tree
*expr_p
, gimple_seq
*seq_p
)
4620 while (handled_component_p (*expr_p
))
4621 expr_p
= &TREE_OPERAND (*expr_p
, 0);
4623 /* Do not allow an SSA name as the temporary. */
4624 if (is_gimple_reg (*expr_p
))
4625 *expr_p
= internal_get_tmp_var (*expr_p
, seq_p
, NULL
, false, false, true);
4628 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4629 a call to __builtin_memcpy. */
4631 static enum gimplify_status
4632 gimplify_modify_expr_to_memcpy (tree
*expr_p
, tree size
, bool want_value
,
4635 tree t
, to
, to_ptr
, from
, from_ptr
;
4637 location_t loc
= EXPR_LOCATION (*expr_p
);
4639 to
= TREE_OPERAND (*expr_p
, 0);
4640 from
= TREE_OPERAND (*expr_p
, 1);
4642 /* Mark the RHS addressable. Beware that it may not be possible to do so
4643 directly if a temporary has been created by the gimplification. */
4644 prepare_gimple_addressable (&from
, seq_p
);
4646 mark_addressable (from
);
4647 from_ptr
= build_fold_addr_expr_loc (loc
, from
);
4648 gimplify_arg (&from_ptr
, seq_p
, loc
);
4650 mark_addressable (to
);
4651 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4652 gimplify_arg (&to_ptr
, seq_p
, loc
);
4654 t
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
4656 gs
= gimple_build_call (t
, 3, to_ptr
, from_ptr
, size
);
4657 gimple_call_set_alloca_for_var (gs
, true);
4661 /* tmp = memcpy() */
4662 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4663 gimple_call_set_lhs (gs
, t
);
4664 gimplify_seq_add_stmt (seq_p
, gs
);
4666 *expr_p
= build_simple_mem_ref (t
);
4670 gimplify_seq_add_stmt (seq_p
, gs
);
4675 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4676 a call to __builtin_memset. In this case we know that the RHS is
4677 a CONSTRUCTOR with an empty element list. */
4679 static enum gimplify_status
4680 gimplify_modify_expr_to_memset (tree
*expr_p
, tree size
, bool want_value
,
4683 tree t
, from
, to
, to_ptr
;
4685 location_t loc
= EXPR_LOCATION (*expr_p
);
4687 /* Assert our assumptions, to abort instead of producing wrong code
4688 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4689 not be immediately exposed. */
4690 from
= TREE_OPERAND (*expr_p
, 1);
4691 if (TREE_CODE (from
) == WITH_SIZE_EXPR
)
4692 from
= TREE_OPERAND (from
, 0);
4694 gcc_assert (TREE_CODE (from
) == CONSTRUCTOR
4695 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from
)));
4698 to
= TREE_OPERAND (*expr_p
, 0);
4700 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4701 gimplify_arg (&to_ptr
, seq_p
, loc
);
4702 t
= builtin_decl_implicit (BUILT_IN_MEMSET
);
4704 gs
= gimple_build_call (t
, 3, to_ptr
, integer_zero_node
, size
);
4708 /* tmp = memset() */
4709 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4710 gimple_call_set_lhs (gs
, t
);
4711 gimplify_seq_add_stmt (seq_p
, gs
);
4713 *expr_p
= build1 (INDIRECT_REF
, TREE_TYPE (to
), t
);
4717 gimplify_seq_add_stmt (seq_p
, gs
);
4722 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4723 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4724 assignment. Return non-null if we detect a potential overlap. */
4726 struct gimplify_init_ctor_preeval_data
4728 /* The base decl of the lhs object. May be NULL, in which case we
4729 have to assume the lhs is indirect. */
4732 /* The alias set of the lhs object. */
4733 alias_set_type lhs_alias_set
;
4737 gimplify_init_ctor_preeval_1 (tree
*tp
, int *walk_subtrees
, void *xdata
)
4739 struct gimplify_init_ctor_preeval_data
*data
4740 = (struct gimplify_init_ctor_preeval_data
*) xdata
;
4743 /* If we find the base object, obviously we have overlap. */
4744 if (data
->lhs_base_decl
== t
)
4747 /* If the constructor component is indirect, determine if we have a
4748 potential overlap with the lhs. The only bits of information we
4749 have to go on at this point are addressability and alias sets. */
4750 if ((INDIRECT_REF_P (t
)
4751 || TREE_CODE (t
) == MEM_REF
)
4752 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4753 && alias_sets_conflict_p (data
->lhs_alias_set
, get_alias_set (t
)))
4756 /* If the constructor component is a call, determine if it can hide a
4757 potential overlap with the lhs through an INDIRECT_REF like above.
4758 ??? Ugh - this is completely broken. In fact this whole analysis
4759 doesn't look conservative. */
4760 if (TREE_CODE (t
) == CALL_EXPR
)
4762 tree type
, fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t
)));
4764 for (type
= TYPE_ARG_TYPES (fntype
); type
; type
= TREE_CHAIN (type
))
4765 if (POINTER_TYPE_P (TREE_VALUE (type
))
4766 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4767 && alias_sets_conflict_p (data
->lhs_alias_set
,
4769 (TREE_TYPE (TREE_VALUE (type
)))))
4773 if (IS_TYPE_OR_DECL_P (t
))
4778 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4779 force values that overlap with the lhs (as described by *DATA)
4780 into temporaries. */
4783 gimplify_init_ctor_preeval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4784 struct gimplify_init_ctor_preeval_data
*data
)
4786 enum gimplify_status one
;
4788 /* If the value is constant, then there's nothing to pre-evaluate. */
4789 if (TREE_CONSTANT (*expr_p
))
4791 /* Ensure it does not have side effects, it might contain a reference to
4792 the object we're initializing. */
4793 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p
));
4797 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4798 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)))
4801 /* Recurse for nested constructors. */
4802 if (TREE_CODE (*expr_p
) == CONSTRUCTOR
)
4804 unsigned HOST_WIDE_INT ix
;
4805 constructor_elt
*ce
;
4806 vec
<constructor_elt
, va_gc
> *v
= CONSTRUCTOR_ELTS (*expr_p
);
4808 FOR_EACH_VEC_SAFE_ELT (v
, ix
, ce
)
4809 gimplify_init_ctor_preeval (&ce
->value
, pre_p
, post_p
, data
);
4814 /* If this is a variable sized type, we must remember the size. */
4815 maybe_with_size_expr (expr_p
);
4817 /* Gimplify the constructor element to something appropriate for the rhs
4818 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4819 the gimplifier will consider this a store to memory. Doing this
4820 gimplification now means that we won't have to deal with complicated
4821 language-specific trees, nor trees like SAVE_EXPR that can induce
4822 exponential search behavior. */
4823 one
= gimplify_expr (expr_p
, pre_p
, post_p
, is_gimple_mem_rhs
, fb_rvalue
);
4824 if (one
== GS_ERROR
)
4830 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4831 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4832 always be true for all scalars, since is_gimple_mem_rhs insists on a
4833 temporary variable for them. */
4834 if (DECL_P (*expr_p
))
4837 /* If this is of variable size, we have no choice but to assume it doesn't
4838 overlap since we can't make a temporary for it. */
4839 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p
))) != INTEGER_CST
)
4842 /* Otherwise, we must search for overlap ... */
4843 if (!walk_tree (expr_p
, gimplify_init_ctor_preeval_1
, data
, NULL
))
4846 /* ... and if found, force the value into a temporary. */
4847 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
4850 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4851 a RANGE_EXPR in a CONSTRUCTOR for an array.
4855 object[var] = value;
4862 We increment var _after_ the loop exit check because we might otherwise
4863 fail if upper == TYPE_MAX_VALUE (type for upper).
4865 Note that we never have to deal with SAVE_EXPRs here, because this has
4866 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4868 static void gimplify_init_ctor_eval (tree
, vec
<constructor_elt
, va_gc
> *,
4869 gimple_seq
*, bool);
4872 gimplify_init_ctor_eval_range (tree object
, tree lower
, tree upper
,
4873 tree value
, tree array_elt_type
,
4874 gimple_seq
*pre_p
, bool cleared
)
4876 tree loop_entry_label
, loop_exit_label
, fall_thru_label
;
4877 tree var
, var_type
, cref
, tmp
;
4879 loop_entry_label
= create_artificial_label (UNKNOWN_LOCATION
);
4880 loop_exit_label
= create_artificial_label (UNKNOWN_LOCATION
);
4881 fall_thru_label
= create_artificial_label (UNKNOWN_LOCATION
);
4883 /* Create and initialize the index variable. */
4884 var_type
= TREE_TYPE (upper
);
4885 var
= create_tmp_var (var_type
);
4886 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, lower
));
4888 /* Add the loop entry label. */
4889 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_entry_label
));
4891 /* Build the reference. */
4892 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4893 var
, NULL_TREE
, NULL_TREE
);
4895 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4896 the store. Otherwise just assign value to the reference. */
4898 if (TREE_CODE (value
) == CONSTRUCTOR
)
4899 /* NB we might have to call ourself recursively through
4900 gimplify_init_ctor_eval if the value is a constructor. */
4901 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4905 if (gimplify_expr (&value
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
4907 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (cref
, value
));
4910 /* We exit the loop when the index var is equal to the upper bound. */
4911 gimplify_seq_add_stmt (pre_p
,
4912 gimple_build_cond (EQ_EXPR
, var
, upper
,
4913 loop_exit_label
, fall_thru_label
));
4915 gimplify_seq_add_stmt (pre_p
, gimple_build_label (fall_thru_label
));
4917 /* Otherwise, increment the index var... */
4918 tmp
= build2 (PLUS_EXPR
, var_type
, var
,
4919 fold_convert (var_type
, integer_one_node
));
4920 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, tmp
));
4922 /* ...and jump back to the loop entry. */
4923 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (loop_entry_label
));
4925 /* Add the loop exit label. */
4926 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_exit_label
));
4929 /* A subroutine of gimplify_init_constructor. Generate individual
4930 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4931 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4932 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4936 gimplify_init_ctor_eval (tree object
, vec
<constructor_elt
, va_gc
> *elts
,
4937 gimple_seq
*pre_p
, bool cleared
)
4939 tree array_elt_type
= NULL
;
4940 unsigned HOST_WIDE_INT ix
;
4941 tree purpose
, value
;
4943 if (TREE_CODE (TREE_TYPE (object
)) == ARRAY_TYPE
)
4944 array_elt_type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object
)));
4946 FOR_EACH_CONSTRUCTOR_ELT (elts
, ix
, purpose
, value
)
4950 /* NULL values are created above for gimplification errors. */
4954 if (cleared
&& initializer_zerop (value
))
4957 /* ??? Here's to hoping the front end fills in all of the indices,
4958 so we don't have to figure out what's missing ourselves. */
4959 gcc_assert (purpose
);
4961 /* Skip zero-sized fields, unless value has side-effects. This can
4962 happen with calls to functions returning a empty type, which
4963 we shouldn't discard. As a number of downstream passes don't
4964 expect sets of empty type fields, we rely on the gimplification of
4965 the MODIFY_EXPR we make below to drop the assignment statement. */
4966 if (!TREE_SIDE_EFFECTS (value
)
4967 && TREE_CODE (purpose
) == FIELD_DECL
4968 && is_empty_type (TREE_TYPE (purpose
)))
4971 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4973 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4975 tree lower
= TREE_OPERAND (purpose
, 0);
4976 tree upper
= TREE_OPERAND (purpose
, 1);
4978 /* If the lower bound is equal to upper, just treat it as if
4979 upper was the index. */
4980 if (simple_cst_equal (lower
, upper
))
4984 gimplify_init_ctor_eval_range (object
, lower
, upper
, value
,
4985 array_elt_type
, pre_p
, cleared
);
4992 /* Do not use bitsizetype for ARRAY_REF indices. */
4993 if (TYPE_DOMAIN (TREE_TYPE (object
)))
4995 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object
))),
4997 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4998 purpose
, NULL_TREE
, NULL_TREE
);
5002 gcc_assert (TREE_CODE (purpose
) == FIELD_DECL
);
5003 cref
= build3 (COMPONENT_REF
, TREE_TYPE (purpose
),
5004 unshare_expr (object
), purpose
, NULL_TREE
);
5007 if (TREE_CODE (value
) == CONSTRUCTOR
5008 && TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
)
5009 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
5013 tree init
= build2 (INIT_EXPR
, TREE_TYPE (cref
), cref
, value
);
5014 gimplify_and_add (init
, pre_p
);
5020 /* Return the appropriate RHS predicate for this LHS. */
5023 rhs_predicate_for (tree lhs
)
5025 if (is_gimple_reg (lhs
))
5026 return is_gimple_reg_rhs_or_call
;
5028 return is_gimple_mem_rhs_or_call
;
5031 /* Return the initial guess for an appropriate RHS predicate for this LHS,
5032 before the LHS has been gimplified. */
5034 static gimple_predicate
5035 initial_rhs_predicate_for (tree lhs
)
5037 if (is_gimple_reg_type (TREE_TYPE (lhs
)))
5038 return is_gimple_reg_rhs_or_call
;
5040 return is_gimple_mem_rhs_or_call
;
5043 /* Gimplify a C99 compound literal expression. This just means adding
5044 the DECL_EXPR before the current statement and using its anonymous
5047 static enum gimplify_status
5048 gimplify_compound_literal_expr (tree
*expr_p
, gimple_seq
*pre_p
,
5049 bool (*gimple_test_f
) (tree
),
5050 fallback_t fallback
)
5052 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p
);
5053 tree decl
= DECL_EXPR_DECL (decl_s
);
5054 tree init
= DECL_INITIAL (decl
);
5055 /* Mark the decl as addressable if the compound literal
5056 expression is addressable now, otherwise it is marked too late
5057 after we gimplify the initialization expression. */
5058 if (TREE_ADDRESSABLE (*expr_p
))
5059 TREE_ADDRESSABLE (decl
) = 1;
5060 /* Otherwise, if we don't need an lvalue and have a literal directly
5061 substitute it. Check if it matches the gimple predicate, as
5062 otherwise we'd generate a new temporary, and we can as well just
5063 use the decl we already have. */
5064 else if (!TREE_ADDRESSABLE (decl
)
5065 && !TREE_THIS_VOLATILE (decl
)
5067 && (fallback
& fb_lvalue
) == 0
5068 && gimple_test_f (init
))
5074 /* If the decl is not addressable, then it is being used in some
5075 expression or on the right hand side of a statement, and it can
5076 be put into a readonly data section. */
5077 if (!TREE_ADDRESSABLE (decl
) && (fallback
& fb_lvalue
) == 0)
5078 TREE_READONLY (decl
) = 1;
5080 /* This decl isn't mentioned in the enclosing block, so add it to the
5081 list of temps. FIXME it seems a bit of a kludge to say that
5082 anonymous artificial vars aren't pushed, but everything else is. */
5083 if (DECL_NAME (decl
) == NULL_TREE
&& !DECL_SEEN_IN_BIND_EXPR_P (decl
))
5084 gimple_add_tmp_var (decl
);
5086 gimplify_and_add (decl_s
, pre_p
);
5091 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5092 return a new CONSTRUCTOR if something changed. */
5095 optimize_compound_literals_in_ctor (tree orig_ctor
)
5097 tree ctor
= orig_ctor
;
5098 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (ctor
);
5099 unsigned int idx
, num
= vec_safe_length (elts
);
5101 for (idx
= 0; idx
< num
; idx
++)
5103 tree value
= (*elts
)[idx
].value
;
5104 tree newval
= value
;
5105 if (TREE_CODE (value
) == CONSTRUCTOR
)
5106 newval
= optimize_compound_literals_in_ctor (value
);
5107 else if (TREE_CODE (value
) == COMPOUND_LITERAL_EXPR
)
5109 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (value
);
5110 tree decl
= DECL_EXPR_DECL (decl_s
);
5111 tree init
= DECL_INITIAL (decl
);
5113 if (!TREE_ADDRESSABLE (value
)
5114 && !TREE_ADDRESSABLE (decl
)
5116 && TREE_CODE (init
) == CONSTRUCTOR
)
5117 newval
= optimize_compound_literals_in_ctor (init
);
5119 if (newval
== value
)
5122 if (ctor
== orig_ctor
)
5124 ctor
= copy_node (orig_ctor
);
5125 CONSTRUCTOR_ELTS (ctor
) = vec_safe_copy (elts
);
5126 elts
= CONSTRUCTOR_ELTS (ctor
);
5128 (*elts
)[idx
].value
= newval
;
5133 /* A subroutine of gimplify_modify_expr. Break out elements of a
5134 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5136 Note that we still need to clear any elements that don't have explicit
5137 initializers, so if not all elements are initialized we keep the
5138 original MODIFY_EXPR, we just remove all of the constructor elements.
5140 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5141 GS_ERROR if we would have to create a temporary when gimplifying
5142 this constructor. Otherwise, return GS_OK.
5144 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5146 static enum gimplify_status
5147 gimplify_init_constructor (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5148 bool want_value
, bool notify_temp_creation
)
5150 tree object
, ctor
, type
;
5151 enum gimplify_status ret
;
5152 vec
<constructor_elt
, va_gc
> *elts
;
5153 bool cleared
= false;
5154 bool is_empty_ctor
= false;
5155 bool is_init_expr
= (TREE_CODE (*expr_p
) == INIT_EXPR
);
5157 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == CONSTRUCTOR
);
5159 if (!notify_temp_creation
)
5161 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
5162 is_gimple_lvalue
, fb_lvalue
);
5163 if (ret
== GS_ERROR
)
5167 object
= TREE_OPERAND (*expr_p
, 0);
5168 ctor
= TREE_OPERAND (*expr_p
, 1)
5169 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p
, 1));
5170 type
= TREE_TYPE (ctor
);
5171 elts
= CONSTRUCTOR_ELTS (ctor
);
5174 switch (TREE_CODE (type
))
5178 case QUAL_UNION_TYPE
:
5181 /* Use readonly data for initializers of this or smaller size
5182 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5184 const HOST_WIDE_INT min_unique_size
= 64;
5185 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5186 is smaller than this, use readonly data. */
5187 const int unique_nonzero_ratio
= 8;
5188 /* True if a single access of the object must be ensured. This is the
5189 case if the target is volatile, the type is non-addressable and more
5190 than one field need to be assigned. */
5191 const bool ensure_single_access
5192 = TREE_THIS_VOLATILE (object
)
5193 && !TREE_ADDRESSABLE (type
)
5194 && vec_safe_length (elts
) > 1;
5195 struct gimplify_init_ctor_preeval_data preeval_data
;
5196 HOST_WIDE_INT num_ctor_elements
, num_nonzero_elements
;
5197 HOST_WIDE_INT num_unique_nonzero_elements
;
5198 bool complete_p
, valid_const_initializer
;
5200 /* Aggregate types must lower constructors to initialization of
5201 individual elements. The exception is that a CONSTRUCTOR node
5202 with no elements indicates zero-initialization of the whole. */
5203 if (vec_safe_is_empty (elts
))
5205 if (notify_temp_creation
)
5208 /* The var will be initialized and so appear on lhs of
5209 assignment, it can't be TREE_READONLY anymore. */
5211 TREE_READONLY (object
) = 0;
5213 is_empty_ctor
= true;
5217 /* Fetch information about the constructor to direct later processing.
5218 We might want to make static versions of it in various cases, and
5219 can only do so if it known to be a valid constant initializer. */
5220 valid_const_initializer
5221 = categorize_ctor_elements (ctor
, &num_nonzero_elements
,
5222 &num_unique_nonzero_elements
,
5223 &num_ctor_elements
, &complete_p
);
5225 /* If a const aggregate variable is being initialized, then it
5226 should never be a lose to promote the variable to be static. */
5227 if (valid_const_initializer
5228 && num_nonzero_elements
> 1
5229 && TREE_READONLY (object
)
5231 && !DECL_REGISTER (object
)
5232 && (flag_merge_constants
>= 2 || !TREE_ADDRESSABLE (object
))
5233 /* For ctors that have many repeated nonzero elements
5234 represented through RANGE_EXPRs, prefer initializing
5235 those through runtime loops over copies of large amounts
5236 of data from readonly data section. */
5237 && (num_unique_nonzero_elements
5238 > num_nonzero_elements
/ unique_nonzero_ratio
5239 || ((unsigned HOST_WIDE_INT
) int_size_in_bytes (type
)
5240 <= (unsigned HOST_WIDE_INT
) min_unique_size
)))
5242 if (notify_temp_creation
)
5245 DECL_INITIAL (object
) = ctor
;
5246 TREE_STATIC (object
) = 1;
5247 if (!DECL_NAME (object
))
5248 DECL_NAME (object
) = create_tmp_var_name ("C");
5249 walk_tree (&DECL_INITIAL (object
), force_labels_r
, NULL
, NULL
);
5251 /* ??? C++ doesn't automatically append a .<number> to the
5252 assembler name, and even when it does, it looks at FE private
5253 data structures to figure out what that number should be,
5254 which are not set for this variable. I suppose this is
5255 important for local statics for inline functions, which aren't
5256 "local" in the object file sense. So in order to get a unique
5257 TU-local symbol, we must invoke the lhd version now. */
5258 lhd_set_decl_assembler_name (object
);
5260 *expr_p
= NULL_TREE
;
5264 /* The var will be initialized and so appear on lhs of
5265 assignment, it can't be TREE_READONLY anymore. */
5266 if (VAR_P (object
) && !notify_temp_creation
)
5267 TREE_READONLY (object
) = 0;
5269 /* If there are "lots" of initialized elements, even discounting
5270 those that are not address constants (and thus *must* be
5271 computed at runtime), then partition the constructor into
5272 constant and non-constant parts. Block copy the constant
5273 parts in, then generate code for the non-constant parts. */
5274 /* TODO. There's code in cp/typeck.cc to do this. */
5276 if (int_size_in_bytes (TREE_TYPE (ctor
)) < 0)
5277 /* store_constructor will ignore the clearing of variable-sized
5278 objects. Initializers for such objects must explicitly set
5279 every field that needs to be set. */
5281 else if (!complete_p
)
5282 /* If the constructor isn't complete, clear the whole object
5283 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5285 ??? This ought not to be needed. For any element not present
5286 in the initializer, we should simply set them to zero. Except
5287 we'd need to *find* the elements that are not present, and that
5288 requires trickery to avoid quadratic compile-time behavior in
5289 large cases or excessive memory use in small cases. */
5290 cleared
= !CONSTRUCTOR_NO_CLEARING (ctor
);
5291 else if (num_ctor_elements
- num_nonzero_elements
5292 > CLEAR_RATIO (optimize_function_for_speed_p (cfun
))
5293 && num_nonzero_elements
< num_ctor_elements
/ 4)
5294 /* If there are "lots" of zeros, it's more efficient to clear
5295 the memory and then set the nonzero elements. */
5297 else if (ensure_single_access
&& num_nonzero_elements
== 0)
5298 /* If a single access to the target must be ensured and all elements
5299 are zero, then it's optimal to clear whatever their number. */
5304 /* If there are "lots" of initialized elements, and all of them
5305 are valid address constants, then the entire initializer can
5306 be dropped to memory, and then memcpy'd out. Don't do this
5307 for sparse arrays, though, as it's more efficient to follow
5308 the standard CONSTRUCTOR behavior of memset followed by
5309 individual element initialization. Also don't do this for small
5310 all-zero initializers (which aren't big enough to merit
5311 clearing), and don't try to make bitwise copies of
5312 TREE_ADDRESSABLE types. */
5313 if (valid_const_initializer
5315 && !(cleared
|| num_nonzero_elements
== 0)
5316 && !TREE_ADDRESSABLE (type
))
5318 HOST_WIDE_INT size
= int_size_in_bytes (type
);
5321 /* ??? We can still get unbounded array types, at least
5322 from the C++ front end. This seems wrong, but attempt
5323 to work around it for now. */
5326 size
= int_size_in_bytes (TREE_TYPE (object
));
5328 TREE_TYPE (ctor
) = type
= TREE_TYPE (object
);
5331 /* Find the maximum alignment we can assume for the object. */
5332 /* ??? Make use of DECL_OFFSET_ALIGN. */
5333 if (DECL_P (object
))
5334 align
= DECL_ALIGN (object
);
5336 align
= TYPE_ALIGN (type
);
5338 /* Do a block move either if the size is so small as to make
5339 each individual move a sub-unit move on average, or if it
5340 is so large as to make individual moves inefficient. */
5342 && num_nonzero_elements
> 1
5343 /* For ctors that have many repeated nonzero elements
5344 represented through RANGE_EXPRs, prefer initializing
5345 those through runtime loops over copies of large amounts
5346 of data from readonly data section. */
5347 && (num_unique_nonzero_elements
5348 > num_nonzero_elements
/ unique_nonzero_ratio
5349 || size
<= min_unique_size
)
5350 && (size
< num_nonzero_elements
5351 || !can_move_by_pieces (size
, align
)))
5353 if (notify_temp_creation
)
5356 walk_tree (&ctor
, force_labels_r
, NULL
, NULL
);
5357 ctor
= tree_output_constant_def (ctor
);
5358 if (!useless_type_conversion_p (type
, TREE_TYPE (ctor
)))
5359 ctor
= build1 (VIEW_CONVERT_EXPR
, type
, ctor
);
5360 TREE_OPERAND (*expr_p
, 1) = ctor
;
5362 /* This is no longer an assignment of a CONSTRUCTOR, but
5363 we still may have processing to do on the LHS. So
5364 pretend we didn't do anything here to let that happen. */
5365 return GS_UNHANDLED
;
5369 /* If a single access to the target must be ensured and there are
5370 nonzero elements or the zero elements are not assigned en masse,
5371 initialize the target from a temporary. */
5372 if (ensure_single_access
&& (num_nonzero_elements
> 0 || !cleared
))
5374 if (notify_temp_creation
)
5377 tree temp
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
5378 TREE_OPERAND (*expr_p
, 0) = temp
;
5379 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
5381 build2 (MODIFY_EXPR
, void_type_node
,
5386 if (notify_temp_creation
)
5389 /* If there are nonzero elements and if needed, pre-evaluate to capture
5390 elements overlapping with the lhs into temporaries. We must do this
5391 before clearing to fetch the values before they are zeroed-out. */
5392 if (num_nonzero_elements
> 0 && TREE_CODE (*expr_p
) != INIT_EXPR
)
5394 preeval_data
.lhs_base_decl
= get_base_address (object
);
5395 if (!DECL_P (preeval_data
.lhs_base_decl
))
5396 preeval_data
.lhs_base_decl
= NULL
;
5397 preeval_data
.lhs_alias_set
= get_alias_set (object
);
5399 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p
, 1),
5400 pre_p
, post_p
, &preeval_data
);
5403 bool ctor_has_side_effects_p
5404 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p
, 1));
5408 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5409 Note that we still have to gimplify, in order to handle the
5410 case of variable sized types. Avoid shared tree structures. */
5411 CONSTRUCTOR_ELTS (ctor
) = NULL
;
5412 TREE_SIDE_EFFECTS (ctor
) = 0;
5413 object
= unshare_expr (object
);
5414 gimplify_stmt (expr_p
, pre_p
);
5417 /* If we have not block cleared the object, or if there are nonzero
5418 elements in the constructor, or if the constructor has side effects,
5419 add assignments to the individual scalar fields of the object. */
5421 || num_nonzero_elements
> 0
5422 || ctor_has_side_effects_p
)
5423 gimplify_init_ctor_eval (object
, elts
, pre_p
, cleared
);
5425 *expr_p
= NULL_TREE
;
5433 if (notify_temp_creation
)
5436 /* Extract the real and imaginary parts out of the ctor. */
5437 gcc_assert (elts
->length () == 2);
5438 r
= (*elts
)[0].value
;
5439 i
= (*elts
)[1].value
;
5440 if (r
== NULL
|| i
== NULL
)
5442 tree zero
= build_zero_cst (TREE_TYPE (type
));
5449 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5450 represent creation of a complex value. */
5451 if (TREE_CONSTANT (r
) && TREE_CONSTANT (i
))
5453 ctor
= build_complex (type
, r
, i
);
5454 TREE_OPERAND (*expr_p
, 1) = ctor
;
5458 ctor
= build2 (COMPLEX_EXPR
, type
, r
, i
);
5459 TREE_OPERAND (*expr_p
, 1) = ctor
;
5460 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1),
5463 rhs_predicate_for (TREE_OPERAND (*expr_p
, 0)),
5471 unsigned HOST_WIDE_INT ix
;
5472 constructor_elt
*ce
;
5474 if (notify_temp_creation
)
5477 /* Vector types use CONSTRUCTOR all the way through gimple
5478 compilation as a general initializer. */
5479 FOR_EACH_VEC_SAFE_ELT (elts
, ix
, ce
)
5481 enum gimplify_status tret
;
5482 tret
= gimplify_expr (&ce
->value
, pre_p
, post_p
, is_gimple_val
,
5484 if (tret
== GS_ERROR
)
5486 else if (TREE_STATIC (ctor
)
5487 && !initializer_constant_valid_p (ce
->value
,
5488 TREE_TYPE (ce
->value
)))
5489 TREE_STATIC (ctor
) = 0;
5491 recompute_constructor_flags (ctor
);
5493 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5494 if (TREE_CONSTANT (ctor
))
5496 bool constant_p
= true;
5499 /* Even when ctor is constant, it might contain non-*_CST
5500 elements, such as addresses or trapping values like
5501 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5502 in VECTOR_CST nodes. */
5503 FOR_EACH_CONSTRUCTOR_VALUE (elts
, ix
, value
)
5504 if (!CONSTANT_CLASS_P (value
))
5512 TREE_OPERAND (*expr_p
, 1) = build_vector_from_ctor (type
, elts
);
5517 if (!is_gimple_reg (TREE_OPERAND (*expr_p
, 0)))
5518 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (ctor
, pre_p
);
5523 /* So how did we get a CONSTRUCTOR for a scalar type? */
5527 if (ret
== GS_ERROR
)
5529 /* If we have gimplified both sides of the initializer but have
5530 not emitted an assignment, do so now. */
5532 /* If the type is an empty type, we don't need to emit the
5534 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
5536 tree lhs
= TREE_OPERAND (*expr_p
, 0);
5537 tree rhs
= TREE_OPERAND (*expr_p
, 1);
5538 if (want_value
&& object
== lhs
)
5539 lhs
= unshare_expr (lhs
);
5540 gassign
*init
= gimple_build_assign (lhs
, rhs
);
5541 gimplify_seq_add_stmt (pre_p
, init
);
5554 /* If the user requests to initialize automatic variables, we
5555 should initialize paddings inside the variable. Add a call to
5556 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5557 initialize paddings of object always to zero regardless of
5558 INIT_TYPE. Note, we will not insert this call if the aggregate
5559 variable has be completely cleared already or it's initialized
5560 with an empty constructor. We cannot insert this call if the
5561 variable is a gimple register since __builtin_clear_padding will take
5562 the address of the variable. As a result, if a long double/_Complex long
5563 double variable will be spilled into stack later, its padding cannot
5564 be cleared with __builtin_clear_padding. We should clear its padding
5565 when it is spilled into memory. */
5567 && !is_gimple_reg (object
)
5568 && clear_padding_type_may_have_padding_p (type
)
5569 && ((AGGREGATE_TYPE_P (type
) && !cleared
&& !is_empty_ctor
)
5570 || !AGGREGATE_TYPE_P (type
))
5571 && is_var_need_auto_init (object
))
5572 gimple_add_padding_init_for_auto_var (object
, false, pre_p
);
5577 /* Given a pointer value OP0, return a simplified version of an
5578 indirection through OP0, or NULL_TREE if no simplification is
5579 possible. This may only be applied to a rhs of an expression.
5580 Note that the resulting type may be different from the type pointed
5581 to in the sense that it is still compatible from the langhooks
5585 gimple_fold_indirect_ref_rhs (tree t
)
5587 return gimple_fold_indirect_ref (t
);
5590 /* Subroutine of gimplify_modify_expr to do simplifications of
5591 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5592 something changes. */
5594 static enum gimplify_status
5595 gimplify_modify_expr_rhs (tree
*expr_p
, tree
*from_p
, tree
*to_p
,
5596 gimple_seq
*pre_p
, gimple_seq
*post_p
,
5599 enum gimplify_status ret
= GS_UNHANDLED
;
5605 switch (TREE_CODE (*from_p
))
5608 /* If we're assigning from a read-only variable initialized with
5609 a constructor and not volatile, do the direct assignment from
5610 the constructor, but only if the target is not volatile either
5611 since this latter assignment might end up being done on a per
5612 field basis. However, if the target is volatile and the type
5613 is aggregate and non-addressable, gimplify_init_constructor
5614 knows that it needs to ensure a single access to the target
5615 and it will return GS_OK only in this case. */
5616 if (TREE_READONLY (*from_p
)
5617 && DECL_INITIAL (*from_p
)
5618 && TREE_CODE (DECL_INITIAL (*from_p
)) == CONSTRUCTOR
5619 && !TREE_THIS_VOLATILE (*from_p
)
5620 && (!TREE_THIS_VOLATILE (*to_p
)
5621 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p
))
5622 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p
)))))
5624 tree old_from
= *from_p
;
5625 enum gimplify_status subret
;
5627 /* Move the constructor into the RHS. */
5628 *from_p
= unshare_expr (DECL_INITIAL (*from_p
));
5630 /* Let's see if gimplify_init_constructor will need to put
5632 subret
= gimplify_init_constructor (expr_p
, NULL
, NULL
,
5634 if (subret
== GS_ERROR
)
5636 /* If so, revert the change. */
5647 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p
)))
5648 /* If we have code like
5652 where the type of "x" is a (possibly cv-qualified variant
5653 of "A"), treat the entire expression as identical to "x".
5654 This kind of code arises in C++ when an object is bound
5655 to a const reference, and if "x" is a TARGET_EXPR we want
5656 to take advantage of the optimization below. But not if
5657 the type is TREE_ADDRESSABLE; then C++17 says that the
5658 TARGET_EXPR needs to be a temporary. */
5660 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p
, 0)))
5662 bool volatile_p
= TREE_THIS_VOLATILE (*from_p
);
5663 if (TREE_THIS_VOLATILE (t
) != volatile_p
)
5666 t
= build_simple_mem_ref_loc (EXPR_LOCATION (*from_p
),
5667 build_fold_addr_expr (t
));
5668 if (REFERENCE_CLASS_P (t
))
5669 TREE_THIS_VOLATILE (t
) = volatile_p
;
5679 /* If we are initializing something from a TARGET_EXPR, strip the
5680 TARGET_EXPR and initialize it directly, if possible. This can't
5681 be done if the initializer is void, since that implies that the
5682 temporary is set in some non-trivial way.
5684 ??? What about code that pulls out the temp and uses it
5685 elsewhere? I think that such code never uses the TARGET_EXPR as
5686 an initializer. If I'm wrong, we'll die because the temp won't
5687 have any RTL. In that case, I guess we'll need to replace
5688 references somehow. */
5689 tree init
= TARGET_EXPR_INITIAL (*from_p
);
5692 && (TREE_CODE (*expr_p
) != MODIFY_EXPR
5693 || !TARGET_EXPR_NO_ELIDE (*from_p
))
5694 && !VOID_TYPE_P (TREE_TYPE (init
)))
5704 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5706 gimplify_compound_expr (from_p
, pre_p
, true);
5712 /* If we already made some changes, let the front end have a
5713 crack at this before we break it down. */
5714 if (ret
!= GS_UNHANDLED
)
5717 /* If we're initializing from a CONSTRUCTOR, break this into
5718 individual MODIFY_EXPRs. */
5719 ret
= gimplify_init_constructor (expr_p
, pre_p
, post_p
, want_value
,
5724 /* If we're assigning to a non-register type, push the assignment
5725 down into the branches. This is mandatory for ADDRESSABLE types,
5726 since we cannot generate temporaries for such, but it saves a
5727 copy in other cases as well. */
5728 if (!is_gimple_reg_type (TREE_TYPE (*from_p
)))
5730 /* This code should mirror the code in gimplify_cond_expr. */
5731 enum tree_code code
= TREE_CODE (*expr_p
);
5732 tree cond
= *from_p
;
5733 tree result
= *to_p
;
5735 ret
= gimplify_expr (&result
, pre_p
, post_p
,
5736 is_gimple_lvalue
, fb_lvalue
);
5737 if (ret
!= GS_ERROR
)
5740 /* If we are going to write RESULT more than once, clear
5741 TREE_READONLY flag, otherwise we might incorrectly promote
5742 the variable to static const and initialize it at compile
5743 time in one of the branches. */
5745 && TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
5746 && TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5747 TREE_READONLY (result
) = 0;
5748 if (TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
)
5749 TREE_OPERAND (cond
, 1)
5750 = build2 (code
, void_type_node
, result
,
5751 TREE_OPERAND (cond
, 1));
5752 if (TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5753 TREE_OPERAND (cond
, 2)
5754 = build2 (code
, void_type_node
, unshare_expr (result
),
5755 TREE_OPERAND (cond
, 2));
5757 TREE_TYPE (cond
) = void_type_node
;
5758 recalculate_side_effects (cond
);
5762 gimplify_and_add (cond
, pre_p
);
5763 *expr_p
= unshare_expr (result
);
5772 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5773 return slot so that we don't generate a temporary. */
5774 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p
)
5775 && aggregate_value_p (*from_p
, *from_p
))
5779 if (!(rhs_predicate_for (*to_p
))(*from_p
))
5780 /* If we need a temporary, *to_p isn't accurate. */
5782 /* It's OK to use the return slot directly unless it's an NRV. */
5783 else if (TREE_CODE (*to_p
) == RESULT_DECL
5784 && DECL_NAME (*to_p
) == NULL_TREE
5785 && needs_to_live_in_memory (*to_p
))
5787 else if (is_gimple_reg_type (TREE_TYPE (*to_p
))
5788 || (DECL_P (*to_p
) && DECL_REGISTER (*to_p
)))
5789 /* Don't force regs into memory. */
5791 else if (TREE_CODE (*expr_p
) == INIT_EXPR
)
5792 /* It's OK to use the target directly if it's being
5795 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p
)))
5797 /* Always use the target and thus RSO for variable-sized types.
5798 GIMPLE cannot deal with a variable-sized assignment
5799 embedded in a call statement. */
5801 else if (TREE_CODE (*to_p
) != SSA_NAME
5802 && (!is_gimple_variable (*to_p
)
5803 || needs_to_live_in_memory (*to_p
)))
5804 /* Don't use the original target if it's already addressable;
5805 if its address escapes, and the called function uses the
5806 NRV optimization, a conforming program could see *to_p
5807 change before the called function returns; see c++/19317.
5808 When optimizing, the return_slot pass marks more functions
5809 as safe after we have escape info. */
5816 CALL_EXPR_RETURN_SLOT_OPT (*from_p
) = 1;
5817 mark_addressable (*to_p
);
5822 case WITH_SIZE_EXPR
:
5823 /* Likewise for calls that return an aggregate of non-constant size,
5824 since we would not be able to generate a temporary at all. */
5825 if (TREE_CODE (TREE_OPERAND (*from_p
, 0)) == CALL_EXPR
)
5827 *from_p
= TREE_OPERAND (*from_p
, 0);
5828 /* We don't change ret in this case because the
5829 WITH_SIZE_EXPR might have been added in
5830 gimplify_modify_expr, so returning GS_OK would lead to an
5836 /* If we're initializing from a container, push the initialization
5838 case CLEANUP_POINT_EXPR
:
5840 case STATEMENT_LIST
:
5842 tree wrap
= *from_p
;
5845 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_min_lval
,
5847 if (ret
!= GS_ERROR
)
5850 t
= voidify_wrapper_expr (wrap
, *expr_p
);
5851 gcc_assert (t
== *expr_p
);
5855 gimplify_and_add (wrap
, pre_p
);
5856 *expr_p
= unshare_expr (*to_p
);
5864 /* Pull out compound literal expressions from a NOP_EXPR.
5865 Those are created in the C FE to drop qualifiers during
5866 lvalue conversion. */
5867 if ((TREE_CODE (TREE_OPERAND (*from_p
, 0)) == COMPOUND_LITERAL_EXPR
)
5868 && tree_ssa_useless_type_conversion (*from_p
))
5870 *from_p
= TREE_OPERAND (*from_p
, 0);
5876 case COMPOUND_LITERAL_EXPR
:
5878 tree complit
= TREE_OPERAND (*expr_p
, 1);
5879 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (complit
);
5880 tree decl
= DECL_EXPR_DECL (decl_s
);
5881 tree init
= DECL_INITIAL (decl
);
5883 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5884 into struct T x = { 0, 1, 2 } if the address of the
5885 compound literal has never been taken. */
5886 if (!TREE_ADDRESSABLE (complit
)
5887 && !TREE_ADDRESSABLE (decl
)
5890 *expr_p
= copy_node (*expr_p
);
5891 TREE_OPERAND (*expr_p
, 1) = init
;
5906 /* Return true if T looks like a valid GIMPLE statement. */
5909 is_gimple_stmt (tree t
)
5911 const enum tree_code code
= TREE_CODE (t
);
5916 /* The only valid NOP_EXPR is the empty statement. */
5917 return IS_EMPTY_STMT (t
);
5921 /* These are only valid if they're void. */
5922 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
5928 case CASE_LABEL_EXPR
:
5929 case TRY_CATCH_EXPR
:
5930 case TRY_FINALLY_EXPR
:
5931 case EH_FILTER_EXPR
:
5934 case STATEMENT_LIST
:
5939 case OACC_HOST_DATA
:
5942 case OACC_ENTER_DATA
:
5943 case OACC_EXIT_DATA
:
5948 case OMP_DISTRIBUTE
:
5963 case OMP_TARGET_DATA
:
5964 case OMP_TARGET_UPDATE
:
5965 case OMP_TARGET_ENTER_DATA
:
5966 case OMP_TARGET_EXIT_DATA
:
5969 /* These are always void. */
5975 /* These are valid regardless of their type. */
5984 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5985 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5987 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5988 other, unmodified part of the complex object just before the total store.
5989 As a consequence, if the object is still uninitialized, an undefined value
5990 will be loaded into a register, which may result in a spurious exception
5991 if the register is floating-point and the value happens to be a signaling
5992 NaN for example. Then the fully-fledged complex operations lowering pass
5993 followed by a DCE pass are necessary in order to fix things up. */
5995 static enum gimplify_status
5996 gimplify_modify_expr_complex_part (tree
*expr_p
, gimple_seq
*pre_p
,
5999 enum tree_code code
, ocode
;
6000 tree lhs
, rhs
, new_rhs
, other
, realpart
, imagpart
;
6002 lhs
= TREE_OPERAND (*expr_p
, 0);
6003 rhs
= TREE_OPERAND (*expr_p
, 1);
6004 code
= TREE_CODE (lhs
);
6005 lhs
= TREE_OPERAND (lhs
, 0);
6007 ocode
= code
== REALPART_EXPR
? IMAGPART_EXPR
: REALPART_EXPR
;
6008 other
= build1 (ocode
, TREE_TYPE (rhs
), lhs
);
6009 suppress_warning (other
);
6010 other
= get_formal_tmp_var (other
, pre_p
);
6012 realpart
= code
== REALPART_EXPR
? rhs
: other
;
6013 imagpart
= code
== REALPART_EXPR
? other
: rhs
;
6015 if (TREE_CONSTANT (realpart
) && TREE_CONSTANT (imagpart
))
6016 new_rhs
= build_complex (TREE_TYPE (lhs
), realpart
, imagpart
);
6018 new_rhs
= build2 (COMPLEX_EXPR
, TREE_TYPE (lhs
), realpart
, imagpart
);
6020 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (lhs
, new_rhs
));
6021 *expr_p
= (want_value
) ? rhs
: NULL_TREE
;
6026 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6032 PRE_P points to the list where side effects that must happen before
6033 *EXPR_P should be stored.
6035 POST_P points to the list where side effects that must happen after
6036 *EXPR_P should be stored.
6038 WANT_VALUE is nonzero iff we want to use the value of this expression
6039 in another expression. */
6041 static enum gimplify_status
6042 gimplify_modify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
6045 tree
*from_p
= &TREE_OPERAND (*expr_p
, 1);
6046 tree
*to_p
= &TREE_OPERAND (*expr_p
, 0);
6047 enum gimplify_status ret
= GS_UNHANDLED
;
6049 location_t loc
= EXPR_LOCATION (*expr_p
);
6050 gimple_stmt_iterator gsi
;
6052 gcc_assert (TREE_CODE (*expr_p
) == MODIFY_EXPR
6053 || TREE_CODE (*expr_p
) == INIT_EXPR
);
6055 /* Trying to simplify a clobber using normal logic doesn't work,
6056 so handle it here. */
6057 if (TREE_CLOBBER_P (*from_p
))
6059 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
6060 if (ret
== GS_ERROR
)
6062 gcc_assert (!want_value
);
6063 if (!VAR_P (*to_p
) && TREE_CODE (*to_p
) != MEM_REF
)
6065 tree addr
= get_initialized_tmp_var (build_fold_addr_expr (*to_p
),
6067 *to_p
= build_simple_mem_ref_loc (EXPR_LOCATION (*to_p
), addr
);
6069 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (*to_p
, *from_p
));
6074 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6076 if (TREE_TYPE (*from_p
) != error_mark_node
6077 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p
))
6078 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p
)))
6079 && TREE_CODE (*from_p
) == CONSTRUCTOR
6080 && CONSTRUCTOR_NELTS (*from_p
) == 0)
6082 maybe_with_size_expr (from_p
);
6083 gcc_assert (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
);
6084 return gimplify_modify_expr_to_memset (expr_p
,
6085 TREE_OPERAND (*from_p
, 1),
6089 /* Insert pointer conversions required by the middle-end that are not
6090 required by the frontend. This fixes middle-end type checking for
6091 for example gcc.dg/redecl-6.c. */
6092 if (POINTER_TYPE_P (TREE_TYPE (*to_p
)))
6094 STRIP_USELESS_TYPE_CONVERSION (*from_p
);
6095 if (!useless_type_conversion_p (TREE_TYPE (*to_p
), TREE_TYPE (*from_p
)))
6096 *from_p
= fold_convert_loc (loc
, TREE_TYPE (*to_p
), *from_p
);
6099 /* See if any simplifications can be done based on what the RHS is. */
6100 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
6102 if (ret
!= GS_UNHANDLED
)
6105 /* For empty types only gimplify the left hand side and right hand
6106 side as statements and throw away the assignment. Do this after
6107 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6109 if (is_empty_type (TREE_TYPE (*from_p
))
6111 /* Don't do this for calls that return addressable types, expand_call
6112 relies on those having a lhs. */
6113 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p
))
6114 && TREE_CODE (*from_p
) == CALL_EXPR
))
6116 gimplify_stmt (from_p
, pre_p
);
6117 gimplify_stmt (to_p
, pre_p
);
6118 *expr_p
= NULL_TREE
;
6122 /* If the value being copied is of variable width, compute the length
6123 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6124 before gimplifying any of the operands so that we can resolve any
6125 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6126 the size of the expression to be copied, not of the destination, so
6127 that is what we must do here. */
6128 maybe_with_size_expr (from_p
);
6130 /* As a special case, we have to temporarily allow for assignments
6131 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6132 a toplevel statement, when gimplifying the GENERIC expression
6133 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6134 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6136 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6137 prevent gimplify_expr from trying to create a new temporary for
6138 foo's LHS, we tell it that it should only gimplify until it
6139 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6140 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6141 and all we need to do here is set 'a' to be its LHS. */
6143 /* Gimplify the RHS first for C++17 and bug 71104. */
6144 gimple_predicate initial_pred
= initial_rhs_predicate_for (*to_p
);
6145 ret
= gimplify_expr (from_p
, pre_p
, post_p
, initial_pred
, fb_rvalue
);
6146 if (ret
== GS_ERROR
)
6149 /* Then gimplify the LHS. */
6150 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6151 twice we have to make sure to gimplify into non-SSA as otherwise
6152 the abnormal edge added later will make those defs not dominate
6154 ??? Technically this applies only to the registers used in the
6155 resulting non-register *TO_P. */
6156 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
6158 && TREE_CODE (*from_p
) == CALL_EXPR
6159 && call_expr_flags (*from_p
) & ECF_RETURNS_TWICE
)
6160 gimplify_ctxp
->into_ssa
= false;
6161 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
6162 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
6163 if (ret
== GS_ERROR
)
6166 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6167 guess for the predicate was wrong. */
6168 gimple_predicate final_pred
= rhs_predicate_for (*to_p
);
6169 if (final_pred
!= initial_pred
)
6171 ret
= gimplify_expr (from_p
, pre_p
, post_p
, final_pred
, fb_rvalue
);
6172 if (ret
== GS_ERROR
)
6176 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6177 size as argument to the call. */
6178 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
6180 tree call
= TREE_OPERAND (*from_p
, 0);
6181 tree vlasize
= TREE_OPERAND (*from_p
, 1);
6183 if (TREE_CODE (call
) == CALL_EXPR
6184 && CALL_EXPR_IFN (call
) == IFN_VA_ARG
)
6186 int nargs
= call_expr_nargs (call
);
6187 tree type
= TREE_TYPE (call
);
6188 tree ap
= CALL_EXPR_ARG (call
, 0);
6189 tree tag
= CALL_EXPR_ARG (call
, 1);
6190 tree aptag
= CALL_EXPR_ARG (call
, 2);
6191 tree newcall
= build_call_expr_internal_loc (EXPR_LOCATION (call
),
6195 TREE_OPERAND (*from_p
, 0) = newcall
;
6199 /* Now see if the above changed *from_p to something we handle specially. */
6200 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
6202 if (ret
!= GS_UNHANDLED
)
6205 /* If we've got a variable sized assignment between two lvalues (i.e. does
6206 not involve a call), then we can make things a bit more straightforward
6207 by converting the assignment to memcpy or memset. */
6208 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
6210 tree from
= TREE_OPERAND (*from_p
, 0);
6211 tree size
= TREE_OPERAND (*from_p
, 1);
6213 if (TREE_CODE (from
) == CONSTRUCTOR
)
6214 return gimplify_modify_expr_to_memset (expr_p
, size
, want_value
, pre_p
);
6216 if (is_gimple_addressable (from
))
6219 return gimplify_modify_expr_to_memcpy (expr_p
, size
, want_value
,
6224 /* Transform partial stores to non-addressable complex variables into
6225 total stores. This allows us to use real instead of virtual operands
6226 for these variables, which improves optimization. */
6227 if ((TREE_CODE (*to_p
) == REALPART_EXPR
6228 || TREE_CODE (*to_p
) == IMAGPART_EXPR
)
6229 && is_gimple_reg (TREE_OPERAND (*to_p
, 0)))
6230 return gimplify_modify_expr_complex_part (expr_p
, pre_p
, want_value
);
6232 /* Try to alleviate the effects of the gimplification creating artificial
6233 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6234 make sure not to create DECL_DEBUG_EXPR links across functions. */
6235 if (!gimplify_ctxp
->into_ssa
6237 && DECL_IGNORED_P (*from_p
)
6239 && !DECL_IGNORED_P (*to_p
)
6240 && decl_function_context (*to_p
) == current_function_decl
6241 && decl_function_context (*from_p
) == current_function_decl
)
6243 if (!DECL_NAME (*from_p
) && DECL_NAME (*to_p
))
6245 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p
)));
6246 DECL_HAS_DEBUG_EXPR_P (*from_p
) = 1;
6247 SET_DECL_DEBUG_EXPR (*from_p
, *to_p
);
6250 if (want_value
&& TREE_THIS_VOLATILE (*to_p
))
6251 *from_p
= get_initialized_tmp_var (*from_p
, pre_p
, post_p
);
6253 if (TREE_CODE (*from_p
) == CALL_EXPR
)
6255 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6256 instead of a GIMPLE_ASSIGN. */
6258 if (CALL_EXPR_FN (*from_p
) == NULL_TREE
)
6260 /* Gimplify internal functions created in the FEs. */
6261 int nargs
= call_expr_nargs (*from_p
), i
;
6262 enum internal_fn ifn
= CALL_EXPR_IFN (*from_p
);
6263 auto_vec
<tree
> vargs (nargs
);
6265 for (i
= 0; i
< nargs
; i
++)
6267 gimplify_arg (&CALL_EXPR_ARG (*from_p
, i
), pre_p
,
6268 EXPR_LOCATION (*from_p
));
6269 vargs
.quick_push (CALL_EXPR_ARG (*from_p
, i
));
6271 call_stmt
= gimple_build_call_internal_vec (ifn
, vargs
);
6272 gimple_call_set_nothrow (call_stmt
, TREE_NOTHROW (*from_p
));
6273 gimple_set_location (call_stmt
, EXPR_LOCATION (*expr_p
));
6277 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*from_p
));
6278 CALL_EXPR_FN (*from_p
) = TREE_OPERAND (CALL_EXPR_FN (*from_p
), 0);
6279 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p
));
6280 tree fndecl
= get_callee_fndecl (*from_p
);
6282 && fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
6283 && call_expr_nargs (*from_p
) == 3)
6284 call_stmt
= gimple_build_call_internal (IFN_BUILTIN_EXPECT
, 3,
6285 CALL_EXPR_ARG (*from_p
, 0),
6286 CALL_EXPR_ARG (*from_p
, 1),
6287 CALL_EXPR_ARG (*from_p
, 2));
6290 call_stmt
= gimple_build_call_from_tree (*from_p
, fnptrtype
);
6293 notice_special_calls (call_stmt
);
6294 if (!gimple_call_noreturn_p (call_stmt
) || !should_remove_lhs_p (*to_p
))
6295 gimple_call_set_lhs (call_stmt
, *to_p
);
6296 else if (TREE_CODE (*to_p
) == SSA_NAME
)
6297 /* The above is somewhat premature, avoid ICEing later for a
6298 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6299 ??? This doesn't make it a default-def. */
6300 SSA_NAME_DEF_STMT (*to_p
) = gimple_build_nop ();
6306 assign
= gimple_build_assign (*to_p
, *from_p
);
6307 gimple_set_location (assign
, EXPR_LOCATION (*expr_p
));
6308 if (COMPARISON_CLASS_P (*from_p
))
6309 copy_warning (assign
, *from_p
);
6312 if (gimplify_ctxp
->into_ssa
&& is_gimple_reg (*to_p
))
6314 /* We should have got an SSA name from the start. */
6315 gcc_assert (TREE_CODE (*to_p
) == SSA_NAME
6316 || ! gimple_in_ssa_p (cfun
));
6319 gimplify_seq_add_stmt (pre_p
, assign
);
6320 gsi
= gsi_last (*pre_p
);
6321 maybe_fold_stmt (&gsi
);
6325 *expr_p
= TREE_THIS_VOLATILE (*to_p
) ? *from_p
: unshare_expr (*to_p
);
6334 /* Gimplify a comparison between two variable-sized objects. Do this
6335 with a call to BUILT_IN_MEMCMP. */
6337 static enum gimplify_status
6338 gimplify_variable_sized_compare (tree
*expr_p
)
6340 location_t loc
= EXPR_LOCATION (*expr_p
);
6341 tree op0
= TREE_OPERAND (*expr_p
, 0);
6342 tree op1
= TREE_OPERAND (*expr_p
, 1);
6343 tree t
, arg
, dest
, src
, expr
;
6345 arg
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
6346 arg
= unshare_expr (arg
);
6347 arg
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg
, op0
);
6348 src
= build_fold_addr_expr_loc (loc
, op1
);
6349 dest
= build_fold_addr_expr_loc (loc
, op0
);
6350 t
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
6351 t
= build_call_expr_loc (loc
, t
, 3, dest
, src
, arg
);
6354 = build2 (TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), t
, integer_zero_node
);
6355 SET_EXPR_LOCATION (expr
, loc
);
6361 /* Gimplify a comparison between two aggregate objects of integral scalar
6362 mode as a comparison between the bitwise equivalent scalar values. */
6364 static enum gimplify_status
6365 gimplify_scalar_mode_aggregate_compare (tree
*expr_p
)
6367 location_t loc
= EXPR_LOCATION (*expr_p
);
6368 tree op0
= TREE_OPERAND (*expr_p
, 0);
6369 tree op1
= TREE_OPERAND (*expr_p
, 1);
6371 tree type
= TREE_TYPE (op0
);
6372 tree scalar_type
= lang_hooks
.types
.type_for_mode (TYPE_MODE (type
), 1);
6374 op0
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op0
);
6375 op1
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op1
);
6378 = fold_build2_loc (loc
, TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), op0
, op1
);
6383 /* Gimplify an expression sequence. This function gimplifies each
6384 expression and rewrites the original expression with the last
6385 expression of the sequence in GIMPLE form.
6387 PRE_P points to the list where the side effects for all the
6388 expressions in the sequence will be emitted.
6390 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6392 static enum gimplify_status
6393 gimplify_compound_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
6399 tree
*sub_p
= &TREE_OPERAND (t
, 0);
6401 if (TREE_CODE (*sub_p
) == COMPOUND_EXPR
)
6402 gimplify_compound_expr (sub_p
, pre_p
, false);
6404 gimplify_stmt (sub_p
, pre_p
);
6406 t
= TREE_OPERAND (t
, 1);
6408 while (TREE_CODE (t
) == COMPOUND_EXPR
);
6415 gimplify_stmt (expr_p
, pre_p
);
6420 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6421 gimplify. After gimplification, EXPR_P will point to a new temporary
6422 that holds the original value of the SAVE_EXPR node.
6424 PRE_P points to the list where side effects that must happen before
6425 *EXPR_P should be stored. */
6427 static enum gimplify_status
6428 gimplify_save_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6430 enum gimplify_status ret
= GS_ALL_DONE
;
6433 gcc_assert (TREE_CODE (*expr_p
) == SAVE_EXPR
);
6434 val
= TREE_OPERAND (*expr_p
, 0);
6436 if (TREE_TYPE (val
) == error_mark_node
)
6439 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6440 if (!SAVE_EXPR_RESOLVED_P (*expr_p
))
6442 /* The operand may be a void-valued expression. It is
6443 being executed only for its side-effects. */
6444 if (TREE_TYPE (val
) == void_type_node
)
6446 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
6447 is_gimple_stmt
, fb_none
);
6451 /* The temporary may not be an SSA name as later abnormal and EH
6452 control flow may invalidate use/def domination. When in SSA
6453 form then assume there are no such issues and SAVE_EXPRs only
6454 appear via GENERIC foldings. */
6455 val
= get_initialized_tmp_var (val
, pre_p
, post_p
,
6456 gimple_in_ssa_p (cfun
));
6458 TREE_OPERAND (*expr_p
, 0) = val
;
6459 SAVE_EXPR_RESOLVED_P (*expr_p
) = 1;
6467 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6474 PRE_P points to the list where side effects that must happen before
6475 *EXPR_P should be stored.
6477 POST_P points to the list where side effects that must happen after
6478 *EXPR_P should be stored. */
6480 static enum gimplify_status
6481 gimplify_addr_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6483 tree expr
= *expr_p
;
6484 tree op0
= TREE_OPERAND (expr
, 0);
6485 enum gimplify_status ret
;
6486 location_t loc
= EXPR_LOCATION (*expr_p
);
6488 switch (TREE_CODE (op0
))
6492 /* Check if we are dealing with an expression of the form '&*ptr'.
6493 While the front end folds away '&*ptr' into 'ptr', these
6494 expressions may be generated internally by the compiler (e.g.,
6495 builtins like __builtin_va_end). */
6496 /* Caution: the silent array decomposition semantics we allow for
6497 ADDR_EXPR means we can't always discard the pair. */
6498 /* Gimplification of the ADDR_EXPR operand may drop
6499 cv-qualification conversions, so make sure we add them if
6502 tree op00
= TREE_OPERAND (op0
, 0);
6503 tree t_expr
= TREE_TYPE (expr
);
6504 tree t_op00
= TREE_TYPE (op00
);
6506 if (!useless_type_conversion_p (t_expr
, t_op00
))
6507 op00
= fold_convert_loc (loc
, TREE_TYPE (expr
), op00
);
6513 case VIEW_CONVERT_EXPR
:
6514 /* Take the address of our operand and then convert it to the type of
6517 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6518 all clear. The impact of this transformation is even less clear. */
6520 /* If the operand is a useless conversion, look through it. Doing so
6521 guarantees that the ADDR_EXPR and its operand will remain of the
6523 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0
, 0)))
6524 op0
= TREE_OPERAND (op0
, 0);
6526 *expr_p
= fold_convert_loc (loc
, TREE_TYPE (expr
),
6527 build_fold_addr_expr_loc (loc
,
6528 TREE_OPERAND (op0
, 0)));
6533 if (integer_zerop (TREE_OPERAND (op0
, 1)))
6534 goto do_indirect_ref
;
6539 /* If we see a call to a declared builtin or see its address
6540 being taken (we can unify those cases here) then we can mark
6541 the builtin for implicit generation by GCC. */
6542 if (TREE_CODE (op0
) == FUNCTION_DECL
6543 && fndecl_built_in_p (op0
, BUILT_IN_NORMAL
)
6544 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0
)))
6545 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0
), true);
6547 /* We use fb_either here because the C frontend sometimes takes
6548 the address of a call that returns a struct; see
6549 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6550 the implied temporary explicit. */
6552 /* Make the operand addressable. */
6553 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, post_p
,
6554 is_gimple_addressable
, fb_either
);
6555 if (ret
== GS_ERROR
)
6558 /* Then mark it. Beware that it may not be possible to do so directly
6559 if a temporary has been created by the gimplification. */
6560 prepare_gimple_addressable (&TREE_OPERAND (expr
, 0), pre_p
);
6562 op0
= TREE_OPERAND (expr
, 0);
6564 /* For various reasons, the gimplification of the expression
6565 may have made a new INDIRECT_REF. */
6566 if (TREE_CODE (op0
) == INDIRECT_REF
6567 || (TREE_CODE (op0
) == MEM_REF
6568 && integer_zerop (TREE_OPERAND (op0
, 1))))
6569 goto do_indirect_ref
;
6571 mark_addressable (TREE_OPERAND (expr
, 0));
6573 /* The FEs may end up building ADDR_EXPRs early on a decl with
6574 an incomplete type. Re-build ADDR_EXPRs in canonical form
6576 if (!types_compatible_p (TREE_TYPE (op0
), TREE_TYPE (TREE_TYPE (expr
))))
6577 *expr_p
= build_fold_addr_expr (op0
);
6579 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6580 recompute_tree_invariant_for_addr_expr (*expr_p
);
6582 /* If we re-built the ADDR_EXPR add a conversion to the original type
6584 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
6585 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
6593 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6594 value; output operands should be a gimple lvalue. */
6596 static enum gimplify_status
6597 gimplify_asm_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6601 const char **oconstraints
;
6604 const char *constraint
;
6605 bool allows_mem
, allows_reg
, is_inout
;
6606 enum gimplify_status ret
, tret
;
6608 vec
<tree
, va_gc
> *inputs
;
6609 vec
<tree
, va_gc
> *outputs
;
6610 vec
<tree
, va_gc
> *clobbers
;
6611 vec
<tree
, va_gc
> *labels
;
6615 noutputs
= list_length (ASM_OUTPUTS (expr
));
6616 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
6624 link_next
= NULL_TREE
;
6625 for (i
= 0, link
= ASM_OUTPUTS (expr
); link
; ++i
, link
= link_next
)
6628 size_t constraint_len
;
6630 link_next
= TREE_CHAIN (link
);
6634 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6635 constraint_len
= strlen (constraint
);
6636 if (constraint_len
== 0)
6639 ok
= parse_output_constraint (&constraint
, i
, 0, 0,
6640 &allows_mem
, &allows_reg
, &is_inout
);
6647 /* If we can't make copies, we can only accept memory.
6648 Similarly for VLAs. */
6649 tree outtype
= TREE_TYPE (TREE_VALUE (link
));
6650 if (outtype
!= error_mark_node
6651 && (TREE_ADDRESSABLE (outtype
)
6652 || !COMPLETE_TYPE_P (outtype
)
6653 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype
))))
6659 error ("impossible constraint in %<asm%>");
6660 error ("non-memory output %d must stay in memory", i
);
6665 if (!allows_reg
&& allows_mem
)
6666 mark_addressable (TREE_VALUE (link
));
6668 tree orig
= TREE_VALUE (link
);
6669 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6670 is_inout
? is_gimple_min_lval
: is_gimple_lvalue
,
6671 fb_lvalue
| fb_mayfail
);
6672 if (tret
== GS_ERROR
)
6674 if (orig
!= error_mark_node
)
6675 error ("invalid lvalue in %<asm%> output %d", i
);
6679 /* If the constraint does not allow memory make sure we gimplify
6680 it to a register if it is not already but its base is. This
6681 happens for complex and vector components. */
6684 tree op
= TREE_VALUE (link
);
6685 if (! is_gimple_val (op
)
6686 && is_gimple_reg_type (TREE_TYPE (op
))
6687 && is_gimple_reg (get_base_address (op
)))
6689 tree tem
= create_tmp_reg (TREE_TYPE (op
));
6693 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
),
6694 tem
, unshare_expr (op
));
6695 gimplify_and_add (ass
, pre_p
);
6697 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
), op
, tem
);
6698 gimplify_and_add (ass
, post_p
);
6700 TREE_VALUE (link
) = tem
;
6705 vec_safe_push (outputs
, link
);
6706 TREE_CHAIN (link
) = NULL_TREE
;
6710 /* An input/output operand. To give the optimizers more
6711 flexibility, split it into separate input and output
6714 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6717 /* Turn the in/out constraint into an output constraint. */
6718 char *p
= xstrdup (constraint
);
6720 TREE_VALUE (TREE_PURPOSE (link
)) = build_string (constraint_len
, p
);
6722 /* And add a matching input constraint. */
6725 sprintf (buf
, "%u", i
);
6727 /* If there are multiple alternatives in the constraint,
6728 handle each of them individually. Those that allow register
6729 will be replaced with operand number, the others will stay
6731 if (strchr (p
, ',') != NULL
)
6733 size_t len
= 0, buflen
= strlen (buf
);
6734 char *beg
, *end
, *str
, *dst
;
6738 end
= strchr (beg
, ',');
6740 end
= strchr (beg
, '\0');
6741 if ((size_t) (end
- beg
) < buflen
)
6744 len
+= end
- beg
+ 1;
6751 str
= (char *) alloca (len
);
6752 for (beg
= p
+ 1, dst
= str
;;)
6755 bool mem_p
, reg_p
, inout_p
;
6757 end
= strchr (beg
, ',');
6762 parse_output_constraint (&tem
, i
, 0, 0,
6763 &mem_p
, ®_p
, &inout_p
);
6768 memcpy (dst
, buf
, buflen
);
6777 memcpy (dst
, beg
, len
);
6786 input
= build_string (dst
- str
, str
);
6789 input
= build_string (strlen (buf
), buf
);
6792 input
= build_string (constraint_len
- 1, constraint
+ 1);
6796 input
= build_tree_list (build_tree_list (NULL_TREE
, input
),
6797 unshare_expr (TREE_VALUE (link
)));
6798 ASM_INPUTS (expr
) = chainon (ASM_INPUTS (expr
), input
);
6802 link_next
= NULL_TREE
;
6803 for (link
= ASM_INPUTS (expr
); link
; ++i
, link
= link_next
)
6805 link_next
= TREE_CHAIN (link
);
6806 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6807 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
6808 oconstraints
, &allows_mem
, &allows_reg
);
6810 /* If we can't make copies, we can only accept memory. */
6811 tree intype
= TREE_TYPE (TREE_VALUE (link
));
6812 if (intype
!= error_mark_node
6813 && (TREE_ADDRESSABLE (intype
)
6814 || !COMPLETE_TYPE_P (intype
)
6815 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype
))))
6821 error ("impossible constraint in %<asm%>");
6822 error ("non-memory input %d must stay in memory", i
);
6827 /* If the operand is a memory input, it should be an lvalue. */
6828 if (!allows_reg
&& allows_mem
)
6830 tree inputv
= TREE_VALUE (link
);
6831 STRIP_NOPS (inputv
);
6832 if (TREE_CODE (inputv
) == PREDECREMENT_EXPR
6833 || TREE_CODE (inputv
) == PREINCREMENT_EXPR
6834 || TREE_CODE (inputv
) == POSTDECREMENT_EXPR
6835 || TREE_CODE (inputv
) == POSTINCREMENT_EXPR
6836 || TREE_CODE (inputv
) == MODIFY_EXPR
)
6837 TREE_VALUE (link
) = error_mark_node
;
6838 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6839 is_gimple_lvalue
, fb_lvalue
| fb_mayfail
);
6840 if (tret
!= GS_ERROR
)
6842 /* Unlike output operands, memory inputs are not guaranteed
6843 to be lvalues by the FE, and while the expressions are
6844 marked addressable there, if it is e.g. a statement
6845 expression, temporaries in it might not end up being
6846 addressable. They might be already used in the IL and thus
6847 it is too late to make them addressable now though. */
6848 tree x
= TREE_VALUE (link
);
6849 while (handled_component_p (x
))
6850 x
= TREE_OPERAND (x
, 0);
6851 if (TREE_CODE (x
) == MEM_REF
6852 && TREE_CODE (TREE_OPERAND (x
, 0)) == ADDR_EXPR
)
6853 x
= TREE_OPERAND (TREE_OPERAND (x
, 0), 0);
6855 || TREE_CODE (x
) == PARM_DECL
6856 || TREE_CODE (x
) == RESULT_DECL
)
6857 && !TREE_ADDRESSABLE (x
)
6858 && is_gimple_reg (x
))
6860 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
),
6862 "memory input %d is not directly addressable",
6864 prepare_gimple_addressable (&TREE_VALUE (link
), pre_p
);
6867 mark_addressable (TREE_VALUE (link
));
6868 if (tret
== GS_ERROR
)
6870 if (inputv
!= error_mark_node
)
6871 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
), input_location
),
6872 "memory input %d is not directly addressable", i
);
6878 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6879 is_gimple_asm_val
, fb_rvalue
);
6880 if (tret
== GS_ERROR
)
6884 TREE_CHAIN (link
) = NULL_TREE
;
6885 vec_safe_push (inputs
, link
);
6888 link_next
= NULL_TREE
;
6889 for (link
= ASM_CLOBBERS (expr
); link
; ++i
, link
= link_next
)
6891 link_next
= TREE_CHAIN (link
);
6892 TREE_CHAIN (link
) = NULL_TREE
;
6893 vec_safe_push (clobbers
, link
);
6896 link_next
= NULL_TREE
;
6897 for (link
= ASM_LABELS (expr
); link
; ++i
, link
= link_next
)
6899 link_next
= TREE_CHAIN (link
);
6900 TREE_CHAIN (link
) = NULL_TREE
;
6901 vec_safe_push (labels
, link
);
6904 /* Do not add ASMs with errors to the gimple IL stream. */
6905 if (ret
!= GS_ERROR
)
6907 stmt
= gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr
)),
6908 inputs
, outputs
, clobbers
, labels
);
6910 gimple_asm_set_volatile (stmt
, ASM_VOLATILE_P (expr
) || noutputs
== 0);
6911 gimple_asm_set_input (stmt
, ASM_INPUT_P (expr
));
6912 gimple_asm_set_inline (stmt
, ASM_INLINE_P (expr
));
6914 gimplify_seq_add_stmt (pre_p
, stmt
);
6920 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6921 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6922 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6923 return to this function.
6925 FIXME should we complexify the prequeue handling instead? Or use flags
6926 for all the cleanups and let the optimizer tighten them up? The current
6927 code seems pretty fragile; it will break on a cleanup within any
6928 non-conditional nesting. But any such nesting would be broken, anyway;
6929 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6930 and continues out of it. We can do that at the RTL level, though, so
6931 having an optimizer to tighten up try/finally regions would be a Good
6934 static enum gimplify_status
6935 gimplify_cleanup_point_expr (tree
*expr_p
, gimple_seq
*pre_p
)
6937 gimple_stmt_iterator iter
;
6938 gimple_seq body_sequence
= NULL
;
6940 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
6942 /* We only care about the number of conditions between the innermost
6943 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6944 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6945 int old_conds
= gimplify_ctxp
->conditions
;
6946 gimple_seq old_cleanups
= gimplify_ctxp
->conditional_cleanups
;
6947 bool old_in_cleanup_point_expr
= gimplify_ctxp
->in_cleanup_point_expr
;
6948 gimplify_ctxp
->conditions
= 0;
6949 gimplify_ctxp
->conditional_cleanups
= NULL
;
6950 gimplify_ctxp
->in_cleanup_point_expr
= true;
6952 gimplify_stmt (&TREE_OPERAND (*expr_p
, 0), &body_sequence
);
6954 gimplify_ctxp
->conditions
= old_conds
;
6955 gimplify_ctxp
->conditional_cleanups
= old_cleanups
;
6956 gimplify_ctxp
->in_cleanup_point_expr
= old_in_cleanup_point_expr
;
6958 for (iter
= gsi_start (body_sequence
); !gsi_end_p (iter
); )
6960 gimple
*wce
= gsi_stmt (iter
);
6962 if (gimple_code (wce
) == GIMPLE_WITH_CLEANUP_EXPR
)
6964 if (gsi_one_before_end_p (iter
))
6966 /* Note that gsi_insert_seq_before and gsi_remove do not
6967 scan operands, unlike some other sequence mutators. */
6968 if (!gimple_wce_cleanup_eh_only (wce
))
6969 gsi_insert_seq_before_without_update (&iter
,
6970 gimple_wce_cleanup (wce
),
6972 gsi_remove (&iter
, true);
6979 enum gimple_try_flags kind
;
6981 if (gimple_wce_cleanup_eh_only (wce
))
6982 kind
= GIMPLE_TRY_CATCH
;
6984 kind
= GIMPLE_TRY_FINALLY
;
6985 seq
= gsi_split_seq_after (iter
);
6987 gtry
= gimple_build_try (seq
, gimple_wce_cleanup (wce
), kind
);
6988 /* Do not use gsi_replace here, as it may scan operands.
6989 We want to do a simple structural modification only. */
6990 gsi_set_stmt (&iter
, gtry
);
6991 iter
= gsi_start (gtry
->eval
);
6998 gimplify_seq_add_seq (pre_p
, body_sequence
);
7011 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
7012 is the cleanup action required. EH_ONLY is true if the cleanup should
7013 only be executed if an exception is thrown, not on normal exit.
7014 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
7015 only valid for clobbers. */
7018 gimple_push_cleanup (tree var
, tree cleanup
, bool eh_only
, gimple_seq
*pre_p
,
7019 bool force_uncond
= false)
7022 gimple_seq cleanup_stmts
= NULL
;
7024 /* Errors can result in improperly nested cleanups. Which results in
7025 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7029 if (gimple_conditional_context ())
7031 /* If we're in a conditional context, this is more complex. We only
7032 want to run the cleanup if we actually ran the initialization that
7033 necessitates it, but we want to run it after the end of the
7034 conditional context. So we wrap the try/finally around the
7035 condition and use a flag to determine whether or not to actually
7036 run the destructor. Thus
7040 becomes (approximately)
7044 if (test) { A::A(temp); flag = 1; val = f(temp); }
7047 if (flag) A::~A(temp);
7053 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7054 wce
= gimple_build_wce (cleanup_stmts
);
7055 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
7059 tree flag
= create_tmp_var (boolean_type_node
, "cleanup");
7060 gassign
*ffalse
= gimple_build_assign (flag
, boolean_false_node
);
7061 gassign
*ftrue
= gimple_build_assign (flag
, boolean_true_node
);
7063 cleanup
= build3 (COND_EXPR
, void_type_node
, flag
, cleanup
, NULL
);
7064 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7065 wce
= gimple_build_wce (cleanup_stmts
);
7066 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
7068 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, ffalse
);
7069 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
7070 gimplify_seq_add_stmt (pre_p
, ftrue
);
7072 /* Because of this manipulation, and the EH edges that jump
7073 threading cannot redirect, the temporary (VAR) will appear
7074 to be used uninitialized. Don't warn. */
7075 suppress_warning (var
, OPT_Wuninitialized
);
7080 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7081 wce
= gimple_build_wce (cleanup_stmts
);
7082 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
7083 gimplify_seq_add_stmt (pre_p
, wce
);
7087 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7089 static enum gimplify_status
7090 gimplify_target_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
7092 tree targ
= *expr_p
;
7093 tree temp
= TARGET_EXPR_SLOT (targ
);
7094 tree init
= TARGET_EXPR_INITIAL (targ
);
7095 enum gimplify_status ret
;
7097 bool unpoison_empty_seq
= false;
7098 gimple_stmt_iterator unpoison_it
;
7102 gimple_seq init_pre_p
= NULL
;
7104 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7105 to the temps list. Handle also variable length TARGET_EXPRs. */
7106 if (!poly_int_tree_p (DECL_SIZE (temp
)))
7108 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp
)))
7109 gimplify_type_sizes (TREE_TYPE (temp
), &init_pre_p
);
7110 /* FIXME: this is correct only when the size of the type does
7111 not depend on expressions evaluated in init. */
7112 gimplify_vla_decl (temp
, &init_pre_p
);
7116 /* Save location where we need to place unpoisoning. It's possible
7117 that a variable will be converted to needs_to_live_in_memory. */
7118 unpoison_it
= gsi_last (*pre_p
);
7119 unpoison_empty_seq
= gsi_end_p (unpoison_it
);
7121 gimple_add_tmp_var (temp
);
7124 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7125 expression is supposed to initialize the slot. */
7126 if (VOID_TYPE_P (TREE_TYPE (init
)))
7127 ret
= gimplify_expr (&init
, &init_pre_p
, post_p
, is_gimple_stmt
,
7131 tree init_expr
= build2 (INIT_EXPR
, void_type_node
, temp
, init
);
7133 ret
= gimplify_expr (&init
, &init_pre_p
, post_p
, is_gimple_stmt
,
7136 ggc_free (init_expr
);
7138 if (ret
== GS_ERROR
)
7140 /* PR c++/28266 Make sure this is expanded only once. */
7141 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
7146 gimplify_and_add (init
, &init_pre_p
);
7148 /* Add a clobber for the temporary going out of scope, like
7149 gimplify_bind_expr. */
7150 if (gimplify_ctxp
->in_cleanup_point_expr
7151 && needs_to_live_in_memory (temp
))
7153 if (flag_stack_reuse
== SR_ALL
)
7155 tree clobber
= build_clobber (TREE_TYPE (temp
), CLOBBER_EOL
);
7156 clobber
= build2 (MODIFY_EXPR
, TREE_TYPE (temp
), temp
, clobber
);
7157 gimple_push_cleanup (temp
, clobber
, false, pre_p
, true);
7159 if (asan_poisoned_variables
7160 && DECL_ALIGN (temp
) <= MAX_SUPPORTED_STACK_ALIGNMENT
7161 && !TREE_STATIC (temp
)
7162 && dbg_cnt (asan_use_after_scope
)
7163 && !gimplify_omp_ctxp
)
7165 tree asan_cleanup
= build_asan_poison_call_expr (temp
);
7168 if (unpoison_empty_seq
)
7169 unpoison_it
= gsi_start (*pre_p
);
7171 asan_poison_variable (temp
, false, &unpoison_it
,
7172 unpoison_empty_seq
);
7173 gimple_push_cleanup (temp
, asan_cleanup
, false, pre_p
);
7178 gimple_seq_add_seq (pre_p
, init_pre_p
);
7180 /* If needed, push the cleanup for the temp. */
7181 if (TARGET_EXPR_CLEANUP (targ
))
7182 gimple_push_cleanup (temp
, TARGET_EXPR_CLEANUP (targ
),
7183 CLEANUP_EH_ONLY (targ
), pre_p
);
7185 /* Only expand this once. */
7186 TREE_OPERAND (targ
, 3) = init
;
7187 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
7190 /* We should have expanded this before. */
7191 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp
));
7197 /* Gimplification of expression trees. */
7199 /* Gimplify an expression which appears at statement context. The
7200 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7201 NULL, a new sequence is allocated.
7203 Return true if we actually added a statement to the queue. */
7206 gimplify_stmt (tree
*stmt_p
, gimple_seq
*seq_p
)
7208 gimple_seq_node last
;
7210 last
= gimple_seq_last (*seq_p
);
7211 gimplify_expr (stmt_p
, seq_p
, NULL
, is_gimple_stmt
, fb_none
);
7212 return last
!= gimple_seq_last (*seq_p
);
7215 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7216 to CTX. If entries already exist, force them to be some flavor of private.
7217 If there is no enclosing parallel, do nothing. */
7220 omp_firstprivatize_variable (struct gimplify_omp_ctx
*ctx
, tree decl
)
7224 if (decl
== NULL
|| !DECL_P (decl
) || ctx
->region_type
== ORT_NONE
)
7229 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7232 if (n
->value
& GOVD_SHARED
)
7233 n
->value
= GOVD_FIRSTPRIVATE
| (n
->value
& GOVD_SEEN
);
7234 else if (n
->value
& GOVD_MAP
)
7235 n
->value
|= GOVD_MAP_TO_ONLY
;
7239 else if ((ctx
->region_type
& ORT_TARGET
) != 0)
7241 if (ctx
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
7242 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
7244 omp_add_variable (ctx
, decl
, GOVD_MAP
| GOVD_MAP_TO_ONLY
);
7246 else if (ctx
->region_type
!= ORT_WORKSHARE
7247 && ctx
->region_type
!= ORT_TASKGROUP
7248 && ctx
->region_type
!= ORT_SIMD
7249 && ctx
->region_type
!= ORT_ACC
7250 && !(ctx
->region_type
& ORT_TARGET_DATA
))
7251 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
7253 ctx
= ctx
->outer_context
;
7258 /* Similarly for each of the type sizes of TYPE. */
7261 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
7263 if (type
== NULL
|| type
== error_mark_node
)
7265 type
= TYPE_MAIN_VARIANT (type
);
7267 if (ctx
->privatized_types
->add (type
))
7270 switch (TREE_CODE (type
))
7276 case FIXED_POINT_TYPE
:
7277 omp_firstprivatize_variable (ctx
, TYPE_MIN_VALUE (type
));
7278 omp_firstprivatize_variable (ctx
, TYPE_MAX_VALUE (type
));
7282 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
7283 omp_firstprivatize_type_sizes (ctx
, TYPE_DOMAIN (type
));
7288 case QUAL_UNION_TYPE
:
7291 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
7292 if (TREE_CODE (field
) == FIELD_DECL
)
7294 omp_firstprivatize_variable (ctx
, DECL_FIELD_OFFSET (field
));
7295 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (field
));
7301 case REFERENCE_TYPE
:
7302 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
7309 omp_firstprivatize_variable (ctx
, TYPE_SIZE (type
));
7310 omp_firstprivatize_variable (ctx
, TYPE_SIZE_UNIT (type
));
7311 lang_hooks
.types
.omp_firstprivatize_type_sizes (ctx
, type
);
7314 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7317 omp_add_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned int flags
)
7320 unsigned int nflags
;
7323 if (error_operand_p (decl
) || ctx
->region_type
== ORT_NONE
)
7326 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7327 there are constructors involved somewhere. Exception is a shared clause,
7328 there is nothing privatized in that case. */
7329 if ((flags
& GOVD_SHARED
) == 0
7330 && (TREE_ADDRESSABLE (TREE_TYPE (decl
))
7331 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl
))))
7334 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7335 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7337 /* We shouldn't be re-adding the decl with the same data
7339 gcc_assert ((n
->value
& GOVD_DATA_SHARE_CLASS
& flags
) == 0);
7340 nflags
= n
->value
| flags
;
7341 /* The only combination of data sharing classes we should see is
7342 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7343 reduction variables to be used in data sharing clauses. */
7344 gcc_assert ((ctx
->region_type
& ORT_ACC
) != 0
7345 || ((nflags
& GOVD_DATA_SHARE_CLASS
)
7346 == (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
))
7347 || (flags
& GOVD_DATA_SHARE_CLASS
) == 0);
7352 /* When adding a variable-sized variable, we have to handle all sorts
7353 of additional bits of data: the pointer replacement variable, and
7354 the parameters of the type. */
7355 if (DECL_SIZE (decl
) && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7357 /* Add the pointer replacement variable as PRIVATE if the variable
7358 replacement is private, else FIRSTPRIVATE since we'll need the
7359 address of the original variable either for SHARED, or for the
7360 copy into or out of the context. */
7361 if (!(flags
& GOVD_LOCAL
) && ctx
->region_type
!= ORT_TASKGROUP
)
7363 if (flags
& GOVD_MAP
)
7364 nflags
= GOVD_MAP
| GOVD_MAP_TO_ONLY
| GOVD_EXPLICIT
;
7365 else if (flags
& GOVD_PRIVATE
)
7366 nflags
= GOVD_PRIVATE
;
7367 else if (((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7368 && (flags
& GOVD_FIRSTPRIVATE
))
7369 || (ctx
->region_type
== ORT_TARGET_DATA
7370 && (flags
& GOVD_DATA_SHARE_CLASS
) == 0))
7371 nflags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
7373 nflags
= GOVD_FIRSTPRIVATE
;
7374 nflags
|= flags
& GOVD_SEEN
;
7375 t
= DECL_VALUE_EXPR (decl
);
7376 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
7377 t
= TREE_OPERAND (t
, 0);
7378 gcc_assert (DECL_P (t
));
7379 omp_add_variable (ctx
, t
, nflags
);
7382 /* Add all of the variable and type parameters (which should have
7383 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7384 omp_firstprivatize_variable (ctx
, DECL_SIZE_UNIT (decl
));
7385 omp_firstprivatize_variable (ctx
, DECL_SIZE (decl
));
7386 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7388 /* The variable-sized variable itself is never SHARED, only some form
7389 of PRIVATE. The sharing would take place via the pointer variable
7390 which we remapped above. */
7391 if (flags
& GOVD_SHARED
)
7392 flags
= GOVD_SHARED
| GOVD_DEBUG_PRIVATE
7393 | (flags
& (GOVD_SEEN
| GOVD_EXPLICIT
));
7395 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7396 alloca statement we generate for the variable, so make sure it
7397 is available. This isn't automatically needed for the SHARED
7398 case, since we won't be allocating local storage then.
7399 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7400 in this case omp_notice_variable will be called later
7401 on when it is gimplified. */
7402 else if (! (flags
& (GOVD_LOCAL
| GOVD_MAP
))
7403 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl
))))
7404 omp_notice_variable (ctx
, TYPE_SIZE_UNIT (TREE_TYPE (decl
)), true);
7406 else if ((flags
& (GOVD_MAP
| GOVD_LOCAL
)) == 0
7407 && omp_privatize_by_reference (decl
))
7409 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7411 /* Similar to the direct variable sized case above, we'll need the
7412 size of references being privatized. */
7413 if ((flags
& GOVD_SHARED
) == 0)
7415 t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7416 if (t
&& DECL_P (t
))
7417 omp_notice_variable (ctx
, t
, true);
7424 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, flags
);
7426 /* For reductions clauses in OpenACC loop directives, by default create a
7427 copy clause on the enclosing parallel construct for carrying back the
7429 if (ctx
->region_type
== ORT_ACC
&& (flags
& GOVD_REDUCTION
))
7431 struct gimplify_omp_ctx
*outer_ctx
= ctx
->outer_context
;
7434 n
= splay_tree_lookup (outer_ctx
->variables
, (splay_tree_key
)decl
);
7437 /* Ignore local variables and explicitly declared clauses. */
7438 if (n
->value
& (GOVD_LOCAL
| GOVD_EXPLICIT
))
7440 else if (outer_ctx
->region_type
== ORT_ACC_KERNELS
)
7442 /* According to the OpenACC spec, such a reduction variable
7443 should already have a copy map on a kernels construct,
7444 verify that here. */
7445 gcc_assert (!(n
->value
& GOVD_FIRSTPRIVATE
)
7446 && (n
->value
& GOVD_MAP
));
7448 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7450 /* Remove firstprivate and make it a copy map. */
7451 n
->value
&= ~GOVD_FIRSTPRIVATE
;
7452 n
->value
|= GOVD_MAP
;
7455 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7457 splay_tree_insert (outer_ctx
->variables
, (splay_tree_key
)decl
,
7458 GOVD_MAP
| GOVD_SEEN
);
7461 outer_ctx
= outer_ctx
->outer_context
;
7466 /* Notice a threadprivate variable DECL used in OMP context CTX.
7467 This just prints out diagnostics about threadprivate variable uses
7468 in untied tasks. If DECL2 is non-NULL, prevent this warning
7469 on that variable. */
7472 omp_notice_threadprivate_variable (struct gimplify_omp_ctx
*ctx
, tree decl
,
7476 struct gimplify_omp_ctx
*octx
;
7478 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
7479 if ((octx
->region_type
& ORT_TARGET
) != 0
7480 || octx
->order_concurrent
)
7482 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
)decl
);
7485 if (octx
->order_concurrent
)
7487 error ("threadprivate variable %qE used in a region with"
7488 " %<order(concurrent)%> clause", DECL_NAME (decl
));
7489 inform (octx
->location
, "enclosing region");
7493 error ("threadprivate variable %qE used in target region",
7495 inform (octx
->location
, "enclosing target region");
7497 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl
, 0);
7500 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl2
, 0);
7503 if (ctx
->region_type
!= ORT_UNTIED_TASK
)
7505 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7508 error ("threadprivate variable %qE used in untied task",
7510 inform (ctx
->location
, "enclosing task");
7511 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, 0);
7514 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl2
, 0);
7518 /* Return true if global var DECL is device resident. */
7521 device_resident_p (tree decl
)
7523 tree attr
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl
));
7528 for (tree t
= TREE_VALUE (attr
); t
; t
= TREE_PURPOSE (t
))
7530 tree c
= TREE_VALUE (t
);
7531 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DEVICE_RESIDENT
)
7538 /* Return true if DECL has an ACC DECLARE attribute. */
7541 is_oacc_declared (tree decl
)
7543 tree t
= TREE_CODE (decl
) == MEM_REF
? TREE_OPERAND (decl
, 0) : decl
;
7544 tree declared
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t
));
7545 return declared
!= NULL_TREE
;
7548 /* Determine outer default flags for DECL mentioned in an OMP region
7549 but not declared in an enclosing clause.
7551 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7552 remapped firstprivate instead of shared. To some extent this is
7553 addressed in omp_firstprivatize_type_sizes, but not
7557 omp_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
,
7558 bool in_code
, unsigned flags
)
7560 enum omp_clause_default_kind default_kind
= ctx
->default_kind
;
7561 enum omp_clause_default_kind kind
;
7563 kind
= lang_hooks
.decls
.omp_predetermined_sharing (decl
);
7564 if (ctx
->region_type
& ORT_TASK
)
7566 tree detach_clause
= omp_find_clause (ctx
->clauses
, OMP_CLAUSE_DETACH
);
7568 /* The event-handle specified by a detach clause should always be firstprivate,
7569 regardless of the current default. */
7570 if (detach_clause
&& OMP_CLAUSE_DECL (detach_clause
) == decl
)
7571 kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
7573 if (kind
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
7574 default_kind
= kind
;
7575 else if (VAR_P (decl
) && TREE_STATIC (decl
) && DECL_IN_CONSTANT_POOL (decl
))
7576 default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
7577 /* For C/C++ default({,first}private), variables with static storage duration
7578 declared in a namespace or global scope and referenced in construct
7579 must be explicitly specified, i.e. acts as default(none). */
7580 else if ((default_kind
== OMP_CLAUSE_DEFAULT_PRIVATE
7581 || default_kind
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
7583 && is_global_var (decl
)
7584 && (DECL_FILE_SCOPE_P (decl
)
7585 || (DECL_CONTEXT (decl
)
7586 && TREE_CODE (DECL_CONTEXT (decl
)) == NAMESPACE_DECL
))
7587 && !lang_GNU_Fortran ())
7588 default_kind
= OMP_CLAUSE_DEFAULT_NONE
;
7590 switch (default_kind
)
7592 case OMP_CLAUSE_DEFAULT_NONE
:
7596 if (ctx
->region_type
& ORT_PARALLEL
)
7598 else if ((ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
)
7600 else if (ctx
->region_type
& ORT_TASK
)
7602 else if (ctx
->region_type
& ORT_TEAMS
)
7607 error ("%qE not specified in enclosing %qs",
7608 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rtype
);
7609 inform (ctx
->location
, "enclosing %qs", rtype
);
7612 case OMP_CLAUSE_DEFAULT_SHARED
:
7613 flags
|= GOVD_SHARED
;
7615 case OMP_CLAUSE_DEFAULT_PRIVATE
:
7616 flags
|= GOVD_PRIVATE
;
7618 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
:
7619 flags
|= GOVD_FIRSTPRIVATE
;
7621 case OMP_CLAUSE_DEFAULT_UNSPECIFIED
:
7622 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7623 gcc_assert ((ctx
->region_type
& ORT_TASK
) != 0);
7624 if (struct gimplify_omp_ctx
*octx
= ctx
->outer_context
)
7626 omp_notice_variable (octx
, decl
, in_code
);
7627 for (; octx
; octx
= octx
->outer_context
)
7631 n2
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
7632 if ((octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)) != 0
7633 && (n2
== NULL
|| (n2
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7635 if (n2
&& (n2
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
)
7637 flags
|= GOVD_FIRSTPRIVATE
;
7640 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TEAMS
)) != 0)
7642 flags
|= GOVD_SHARED
;
7648 if (TREE_CODE (decl
) == PARM_DECL
7649 || (!is_global_var (decl
)
7650 && DECL_CONTEXT (decl
) == current_function_decl
))
7651 flags
|= GOVD_FIRSTPRIVATE
;
7653 flags
|= GOVD_SHARED
;
7665 /* Determine outer default flags for DECL mentioned in an OACC region
7666 but not declared in an enclosing clause. */
7669 oacc_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned flags
)
7672 bool on_device
= false;
7673 bool is_private
= false;
7674 bool declared
= is_oacc_declared (decl
);
7675 tree type
= TREE_TYPE (decl
);
7677 if (omp_privatize_by_reference (decl
))
7678 type
= TREE_TYPE (type
);
7680 /* For Fortran COMMON blocks, only used variables in those blocks are
7681 transfered and remapped. The block itself will have a private clause to
7682 avoid transfering the data twice.
7683 The hook evaluates to false by default. For a variable in Fortran's COMMON
7684 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7685 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7686 the whole block. For C++ and Fortran, it can also be true under certain
7687 other conditions, if DECL_HAS_VALUE_EXPR. */
7688 if (RECORD_OR_UNION_TYPE_P (type
))
7689 is_private
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7691 if ((ctx
->region_type
& (ORT_ACC_PARALLEL
| ORT_ACC_KERNELS
)) != 0
7692 && is_global_var (decl
)
7693 && device_resident_p (decl
)
7697 flags
|= GOVD_MAP_TO_ONLY
;
7700 switch (ctx
->region_type
)
7702 case ORT_ACC_KERNELS
:
7706 flags
|= GOVD_FIRSTPRIVATE
;
7707 else if (AGGREGATE_TYPE_P (type
))
7709 /* Aggregates default to 'present_or_copy', or 'present'. */
7710 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7713 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7716 /* Scalars default to 'copy'. */
7717 flags
|= GOVD_MAP
| GOVD_MAP_FORCE
;
7721 case ORT_ACC_PARALLEL
:
7722 case ORT_ACC_SERIAL
:
7723 rkind
= ctx
->region_type
== ORT_ACC_PARALLEL
? "parallel" : "serial";
7726 flags
|= GOVD_FIRSTPRIVATE
;
7727 else if (on_device
|| declared
)
7729 else if (AGGREGATE_TYPE_P (type
))
7731 /* Aggregates default to 'present_or_copy', or 'present'. */
7732 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7735 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7738 /* Scalars default to 'firstprivate'. */
7739 flags
|= GOVD_FIRSTPRIVATE
;
7747 if (DECL_ARTIFICIAL (decl
))
7748 ; /* We can get compiler-generated decls, and should not complain
7750 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_NONE
)
7752 error ("%qE not specified in enclosing OpenACC %qs construct",
7753 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rkind
);
7754 inform (ctx
->location
, "enclosing OpenACC %qs construct", rkind
);
7756 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_PRESENT
)
7757 ; /* Handled above. */
7759 gcc_checking_assert (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
);
7764 /* Record the fact that DECL was used within the OMP context CTX.
7765 IN_CODE is true when real code uses DECL, and false when we should
7766 merely emit default(none) errors. Return true if DECL is going to
7767 be remapped and thus DECL shouldn't be gimplified into its
7768 DECL_VALUE_EXPR (if any). */
7771 omp_notice_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, bool in_code
)
7774 unsigned flags
= in_code
? GOVD_SEEN
: 0;
7775 bool ret
= false, shared
;
7777 if (error_operand_p (decl
))
7780 if (ctx
->region_type
== ORT_NONE
)
7781 return lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7783 if (is_global_var (decl
))
7785 /* Threadprivate variables are predetermined. */
7786 if (DECL_THREAD_LOCAL_P (decl
))
7787 return omp_notice_threadprivate_variable (ctx
, decl
, NULL_TREE
);
7789 if (DECL_HAS_VALUE_EXPR_P (decl
))
7791 if (ctx
->region_type
& ORT_ACC
)
7792 /* For OpenACC, defer expansion of value to avoid transfering
7793 privatized common block data instead of im-/explicitly transfered
7794 variables which are in common blocks. */
7798 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
7800 if (value
&& DECL_P (value
) && DECL_THREAD_LOCAL_P (value
))
7801 return omp_notice_threadprivate_variable (ctx
, decl
, value
);
7805 if (gimplify_omp_ctxp
->outer_context
== NULL
7807 && oacc_get_fn_attrib (current_function_decl
))
7809 location_t loc
= DECL_SOURCE_LOCATION (decl
);
7811 if (lookup_attribute ("omp declare target link",
7812 DECL_ATTRIBUTES (decl
)))
7815 "%qE with %<link%> clause used in %<routine%> function",
7819 else if (!lookup_attribute ("omp declare target",
7820 DECL_ATTRIBUTES (decl
)))
7823 "%qE requires a %<declare%> directive for use "
7824 "in a %<routine%> function", DECL_NAME (decl
));
7830 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7831 if ((ctx
->region_type
& ORT_TARGET
) != 0)
7833 if (ctx
->region_type
& ORT_ACC
)
7834 /* For OpenACC, as remarked above, defer expansion. */
7839 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7842 unsigned nflags
= flags
;
7843 if ((ctx
->region_type
& ORT_ACC
) == 0)
7845 bool is_declare_target
= false;
7846 if (is_global_var (decl
)
7847 && varpool_node::get_create (decl
)->offloadable
)
7849 struct gimplify_omp_ctx
*octx
;
7850 for (octx
= ctx
->outer_context
;
7851 octx
; octx
= octx
->outer_context
)
7853 n
= splay_tree_lookup (octx
->variables
,
7854 (splay_tree_key
)decl
);
7856 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
7857 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7860 is_declare_target
= octx
== NULL
;
7862 if (!is_declare_target
)
7865 enum omp_clause_defaultmap_kind kind
;
7866 if (lang_hooks
.decls
.omp_allocatable_p (decl
))
7867 gdmk
= GDMK_ALLOCATABLE
;
7868 else if (lang_hooks
.decls
.omp_scalar_target_p (decl
))
7869 gdmk
= GDMK_SCALAR_TARGET
;
7870 else if (lang_hooks
.decls
.omp_scalar_p (decl
, false))
7872 else if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
7873 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
7874 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
7876 gdmk
= GDMK_POINTER
;
7878 gdmk
= GDMK_AGGREGATE
;
7879 kind
= lang_hooks
.decls
.omp_predetermined_mapping (decl
);
7880 if (kind
!= OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
)
7882 if (kind
== OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
)
7883 nflags
|= GOVD_FIRSTPRIVATE
;
7884 else if (kind
== OMP_CLAUSE_DEFAULTMAP_TO
)
7885 nflags
|= GOVD_MAP
| GOVD_MAP_TO_ONLY
;
7889 else if (ctx
->defaultmap
[gdmk
] == 0)
7891 tree d
= lang_hooks
.decls
.omp_report_decl (decl
);
7892 error ("%qE not specified in enclosing %<target%>",
7894 inform (ctx
->location
, "enclosing %<target%>");
7896 else if (ctx
->defaultmap
[gdmk
]
7897 & (GOVD_MAP_0LEN_ARRAY
| GOVD_FIRSTPRIVATE
))
7898 nflags
|= ctx
->defaultmap
[gdmk
];
7901 gcc_assert (ctx
->defaultmap
[gdmk
] & GOVD_MAP
);
7902 nflags
|= ctx
->defaultmap
[gdmk
] & ~GOVD_MAP
;
7907 struct gimplify_omp_ctx
*octx
= ctx
->outer_context
;
7908 if ((ctx
->region_type
& ORT_ACC
) && octx
)
7910 /* Look in outer OpenACC contexts, to see if there's a
7911 data attribute for this variable. */
7912 omp_notice_variable (octx
, decl
, in_code
);
7914 for (; octx
; octx
= octx
->outer_context
)
7916 if (!(octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)))
7919 = splay_tree_lookup (octx
->variables
,
7920 (splay_tree_key
) decl
);
7923 if (octx
->region_type
== ORT_ACC_HOST_DATA
)
7924 error ("variable %qE declared in enclosing "
7925 "%<host_data%> region", DECL_NAME (decl
));
7927 if (octx
->region_type
== ORT_ACC_DATA
7928 && (n2
->value
& GOVD_MAP_0LEN_ARRAY
))
7929 nflags
|= GOVD_MAP_0LEN_ARRAY
;
7935 if ((nflags
& ~(GOVD_MAP_TO_ONLY
| GOVD_MAP_FROM_ONLY
7936 | GOVD_MAP_ALLOC_ONLY
)) == flags
)
7938 tree type
= TREE_TYPE (decl
);
7940 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
7941 && omp_privatize_by_reference (decl
))
7942 type
= TREE_TYPE (type
);
7943 if (!omp_mappable_type (type
))
7945 error ("%qD referenced in target region does not have "
7946 "a mappable type", decl
);
7947 nflags
|= GOVD_MAP
| GOVD_EXPLICIT
;
7951 if ((ctx
->region_type
& ORT_ACC
) != 0)
7952 nflags
= oacc_default_clause (ctx
, decl
, flags
);
7958 omp_add_variable (ctx
, decl
, nflags
);
7962 /* If nothing changed, there's nothing left to do. */
7963 if ((n
->value
& flags
) == flags
)
7973 if (ctx
->region_type
== ORT_WORKSHARE
7974 || ctx
->region_type
== ORT_TASKGROUP
7975 || ctx
->region_type
== ORT_SIMD
7976 || ctx
->region_type
== ORT_ACC
7977 || (ctx
->region_type
& ORT_TARGET_DATA
) != 0)
7980 flags
= omp_default_clause (ctx
, decl
, in_code
, flags
);
7982 if ((flags
& GOVD_PRIVATE
)
7983 && lang_hooks
.decls
.omp_private_outer_ref (decl
))
7984 flags
|= GOVD_PRIVATE_OUTER_REF
;
7986 omp_add_variable (ctx
, decl
, flags
);
7988 shared
= (flags
& GOVD_SHARED
) != 0;
7989 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7993 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
7994 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
7995 if (ctx
->region_type
== ORT_SIMD
7996 && ctx
->in_for_exprs
7997 && ((n
->value
& (GOVD_PRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
))
7999 flags
&= ~GOVD_SEEN
;
8001 if ((n
->value
& (GOVD_SEEN
| GOVD_LOCAL
)) == 0
8002 && (flags
& (GOVD_SEEN
| GOVD_LOCAL
)) == GOVD_SEEN
8003 && DECL_SIZE (decl
))
8005 if (TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
8008 tree t
= DECL_VALUE_EXPR (decl
);
8009 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
8010 t
= TREE_OPERAND (t
, 0);
8011 gcc_assert (DECL_P (t
));
8012 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
8013 n2
->value
|= GOVD_SEEN
;
8015 else if (omp_privatize_by_reference (decl
)
8016 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)))
8017 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))))
8021 tree t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
8022 gcc_assert (DECL_P (t
));
8023 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
8025 omp_notice_variable (ctx
, t
, true);
8029 if (ctx
->region_type
& ORT_ACC
)
8030 /* For OpenACC, as remarked above, defer expansion. */
8033 shared
= ((flags
| n
->value
) & GOVD_SHARED
) != 0;
8034 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
8036 /* If nothing changed, there's nothing left to do. */
8037 if ((n
->value
& flags
) == flags
)
8043 /* If the variable is private in the current context, then we don't
8044 need to propagate anything to an outer context. */
8045 if ((flags
& GOVD_PRIVATE
) && !(flags
& GOVD_PRIVATE_OUTER_REF
))
8047 if ((flags
& (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8048 == (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8050 if ((flags
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
8051 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8052 == (GOVD_LASTPRIVATE
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8054 if (ctx
->outer_context
8055 && omp_notice_variable (ctx
->outer_context
, decl
, in_code
))
8060 /* Verify that DECL is private within CTX. If there's specific information
8061 to the contrary in the innermost scope, generate an error. */
8064 omp_is_private (struct gimplify_omp_ctx
*ctx
, tree decl
, int simd
)
8068 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
8071 if (n
->value
& GOVD_SHARED
)
8073 if (ctx
== gimplify_omp_ctxp
)
8076 error ("iteration variable %qE is predetermined linear",
8079 error ("iteration variable %qE should be private",
8081 n
->value
= GOVD_PRIVATE
;
8087 else if ((n
->value
& GOVD_EXPLICIT
) != 0
8088 && (ctx
== gimplify_omp_ctxp
8089 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
8090 && gimplify_omp_ctxp
->outer_context
== ctx
)))
8092 if ((n
->value
& GOVD_FIRSTPRIVATE
) != 0)
8093 error ("iteration variable %qE should not be firstprivate",
8095 else if ((n
->value
& GOVD_REDUCTION
) != 0)
8096 error ("iteration variable %qE should not be reduction",
8098 else if (simd
!= 1 && (n
->value
& GOVD_LINEAR
) != 0)
8099 error ("iteration variable %qE should not be linear",
8102 return (ctx
== gimplify_omp_ctxp
8103 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
8104 && gimplify_omp_ctxp
->outer_context
== ctx
));
8107 if (ctx
->region_type
!= ORT_WORKSHARE
8108 && ctx
->region_type
!= ORT_TASKGROUP
8109 && ctx
->region_type
!= ORT_SIMD
8110 && ctx
->region_type
!= ORT_ACC
)
8112 else if (ctx
->outer_context
)
8113 return omp_is_private (ctx
->outer_context
, decl
, simd
);
8117 /* Return true if DECL is private within a parallel region
8118 that binds to the current construct's context or in parallel
8119 region's REDUCTION clause. */
8122 omp_check_private (struct gimplify_omp_ctx
*ctx
, tree decl
, bool copyprivate
)
8128 ctx
= ctx
->outer_context
;
8131 if (is_global_var (decl
))
8134 /* References might be private, but might be shared too,
8135 when checking for copyprivate, assume they might be
8136 private, otherwise assume they might be shared. */
8140 if (omp_privatize_by_reference (decl
))
8143 /* Treat C++ privatized non-static data members outside
8144 of the privatization the same. */
8145 if (omp_member_access_dummy_var (decl
))
8151 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8153 if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
8154 && (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
8156 if ((ctx
->region_type
& ORT_TARGET_DATA
) != 0
8158 || (n
->value
& GOVD_MAP
) == 0)
8165 if ((n
->value
& GOVD_LOCAL
) != 0
8166 && omp_member_access_dummy_var (decl
))
8168 return (n
->value
& GOVD_SHARED
) == 0;
8171 if (ctx
->region_type
== ORT_WORKSHARE
8172 || ctx
->region_type
== ORT_TASKGROUP
8173 || ctx
->region_type
== ORT_SIMD
8174 || ctx
->region_type
== ORT_ACC
)
8183 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8186 find_decl_expr (tree
*tp
, int *walk_subtrees
, void *data
)
8190 /* If this node has been visited, unmark it and keep looking. */
8191 if (TREE_CODE (t
) == DECL_EXPR
&& DECL_EXPR_DECL (t
) == (tree
) data
)
8194 if (IS_TYPE_OR_DECL_P (t
))
8200 /* Gimplify the affinity clause but effectively ignore it.
8203 if ((step > 1) ? var <= end : var > end)
8204 locatator_var_expr; */
8207 gimplify_omp_affinity (tree
*list_p
, gimple_seq
*pre_p
)
8209 tree last_iter
= NULL_TREE
;
8210 tree last_bind
= NULL_TREE
;
8211 tree label
= NULL_TREE
;
8212 tree
*last_body
= NULL
;
8213 for (tree c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8214 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_AFFINITY
)
8216 tree t
= OMP_CLAUSE_DECL (c
);
8217 if (TREE_CODE (t
) == TREE_LIST
8219 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8221 if (TREE_VALUE (t
) == null_pointer_node
)
8223 if (TREE_PURPOSE (t
) != last_iter
)
8227 append_to_statement_list (label
, last_body
);
8228 gimplify_and_add (last_bind
, pre_p
);
8229 last_bind
= NULL_TREE
;
8231 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8233 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
8234 is_gimple_val
, fb_rvalue
) == GS_ERROR
8235 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
8236 is_gimple_val
, fb_rvalue
) == GS_ERROR
8237 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
8238 is_gimple_val
, fb_rvalue
) == GS_ERROR
8239 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
8240 is_gimple_val
, fb_rvalue
)
8244 last_iter
= TREE_PURPOSE (t
);
8245 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
8246 last_bind
= build3 (BIND_EXPR
, void_type_node
, BLOCK_VARS (block
),
8248 last_body
= &BIND_EXPR_BODY (last_bind
);
8249 tree cond
= NULL_TREE
;
8250 location_t loc
= OMP_CLAUSE_LOCATION (c
);
8251 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8253 tree var
= TREE_VEC_ELT (it
, 0);
8254 tree begin
= TREE_VEC_ELT (it
, 1);
8255 tree end
= TREE_VEC_ELT (it
, 2);
8256 tree step
= TREE_VEC_ELT (it
, 3);
8257 loc
= DECL_SOURCE_LOCATION (var
);
8258 tree tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8260 append_to_statement_list_force (tem
, last_body
);
8262 tree cond1
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8263 step
, build_zero_cst (TREE_TYPE (step
)));
8264 tree cond2
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
,
8266 tree cond3
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8268 cond1
= fold_build3_loc (loc
, COND_EXPR
, boolean_type_node
,
8269 cond1
, cond2
, cond3
);
8271 cond
= fold_build2_loc (loc
, TRUTH_AND_EXPR
,
8272 boolean_type_node
, cond
, cond1
);
8276 tree cont_label
= create_artificial_label (loc
);
8277 label
= build1 (LABEL_EXPR
, void_type_node
, cont_label
);
8278 tree tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, cond
,
8280 build_and_jump (&cont_label
));
8281 append_to_statement_list_force (tem
, last_body
);
8283 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
8285 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
), 0),
8287 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
8289 if (error_operand_p (TREE_VALUE (t
)))
8291 append_to_statement_list_force (TREE_VALUE (t
), last_body
);
8292 TREE_VALUE (t
) = null_pointer_node
;
8298 append_to_statement_list (label
, last_body
);
8299 gimplify_and_add (last_bind
, pre_p
);
8300 last_bind
= NULL_TREE
;
8302 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8304 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8305 NULL
, is_gimple_val
, fb_rvalue
);
8306 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8308 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8310 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8311 is_gimple_lvalue
, fb_lvalue
) == GS_ERROR
)
8313 gimplify_and_add (OMP_CLAUSE_DECL (c
), pre_p
);
8318 append_to_statement_list (label
, last_body
);
8319 gimplify_and_add (last_bind
, pre_p
);
8324 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8325 lower all the depend clauses by populating corresponding depend
8326 array. Returns 0 if there are no such depend clauses, or
8327 2 if all depend clauses should be removed, 1 otherwise. */
8330 gimplify_omp_depend (tree
*list_p
, gimple_seq
*pre_p
)
8334 size_t n
[5] = { 0, 0, 0, 0, 0 };
8336 tree counts
[5] = { NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
};
8337 tree last_iter
= NULL_TREE
, last_count
= NULL_TREE
;
8339 location_t first_loc
= UNKNOWN_LOCATION
;
8341 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8342 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8344 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8346 case OMP_CLAUSE_DEPEND_IN
:
8349 case OMP_CLAUSE_DEPEND_OUT
:
8350 case OMP_CLAUSE_DEPEND_INOUT
:
8353 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8356 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8359 case OMP_CLAUSE_DEPEND_INOUTSET
:
8365 tree t
= OMP_CLAUSE_DECL (c
);
8366 if (first_loc
== UNKNOWN_LOCATION
)
8367 first_loc
= OMP_CLAUSE_LOCATION (c
);
8368 if (TREE_CODE (t
) == TREE_LIST
8370 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8372 if (TREE_PURPOSE (t
) != last_iter
)
8374 tree tcnt
= size_one_node
;
8375 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8377 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
8378 is_gimple_val
, fb_rvalue
) == GS_ERROR
8379 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
8380 is_gimple_val
, fb_rvalue
) == GS_ERROR
8381 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
8382 is_gimple_val
, fb_rvalue
) == GS_ERROR
8383 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
8384 is_gimple_val
, fb_rvalue
)
8387 tree var
= TREE_VEC_ELT (it
, 0);
8388 tree begin
= TREE_VEC_ELT (it
, 1);
8389 tree end
= TREE_VEC_ELT (it
, 2);
8390 tree step
= TREE_VEC_ELT (it
, 3);
8391 tree orig_step
= TREE_VEC_ELT (it
, 4);
8392 tree type
= TREE_TYPE (var
);
8393 tree stype
= TREE_TYPE (step
);
8394 location_t loc
= DECL_SOURCE_LOCATION (var
);
8396 /* Compute count for this iterator as
8398 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8399 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8400 and compute product of those for the entire depend
8402 if (POINTER_TYPE_P (type
))
8403 endmbegin
= fold_build2_loc (loc
, POINTER_DIFF_EXPR
,
8406 endmbegin
= fold_build2_loc (loc
, MINUS_EXPR
, type
,
8408 tree stepm1
= fold_build2_loc (loc
, MINUS_EXPR
, stype
,
8410 build_int_cst (stype
, 1));
8411 tree stepp1
= fold_build2_loc (loc
, PLUS_EXPR
, stype
, step
,
8412 build_int_cst (stype
, 1));
8413 tree pos
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
8414 unshare_expr (endmbegin
),
8416 pos
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
8418 tree neg
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
8420 if (TYPE_UNSIGNED (stype
))
8422 neg
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, neg
);
8423 step
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, step
);
8425 neg
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
8428 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8431 pos
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, pos
,
8432 build_int_cst (stype
, 0));
8433 cond
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
,
8435 neg
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, neg
,
8436 build_int_cst (stype
, 0));
8437 tree osteptype
= TREE_TYPE (orig_step
);
8438 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8440 build_int_cst (osteptype
, 0));
8441 tree cnt
= fold_build3_loc (loc
, COND_EXPR
, stype
,
8443 cnt
= fold_convert_loc (loc
, sizetype
, cnt
);
8444 if (gimplify_expr (&cnt
, pre_p
, NULL
, is_gimple_val
,
8445 fb_rvalue
) == GS_ERROR
)
8447 tcnt
= size_binop_loc (loc
, MULT_EXPR
, tcnt
, cnt
);
8449 if (gimplify_expr (&tcnt
, pre_p
, NULL
, is_gimple_val
,
8450 fb_rvalue
) == GS_ERROR
)
8452 last_iter
= TREE_PURPOSE (t
);
8455 if (counts
[i
] == NULL_TREE
)
8456 counts
[i
] = last_count
;
8458 counts
[i
] = size_binop_loc (OMP_CLAUSE_LOCATION (c
),
8459 PLUS_EXPR
, counts
[i
], last_count
);
8464 for (i
= 0; i
< 5; i
++)
8470 tree total
= size_zero_node
;
8471 for (i
= 0; i
< 5; i
++)
8473 unused
[i
] = counts
[i
] == NULL_TREE
&& n
[i
] == 0;
8474 if (counts
[i
] == NULL_TREE
)
8475 counts
[i
] = size_zero_node
;
8477 counts
[i
] = size_binop (PLUS_EXPR
, counts
[i
], size_int (n
[i
]));
8478 if (gimplify_expr (&counts
[i
], pre_p
, NULL
, is_gimple_val
,
8479 fb_rvalue
) == GS_ERROR
)
8481 total
= size_binop (PLUS_EXPR
, total
, counts
[i
]);
8484 if (gimplify_expr (&total
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
8487 bool is_old
= unused
[1] && unused
[3] && unused
[4];
8488 tree totalpx
= size_binop (PLUS_EXPR
, unshare_expr (total
),
8489 size_int (is_old
? 1 : 4));
8491 totalpx
= size_binop (PLUS_EXPR
, totalpx
,
8492 size_binop (MULT_EXPR
, counts
[4], size_int (2)));
8493 tree type
= build_array_type (ptr_type_node
, build_index_type (totalpx
));
8494 tree array
= create_tmp_var_raw (type
);
8495 TREE_ADDRESSABLE (array
) = 1;
8496 if (!poly_int_tree_p (totalpx
))
8498 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array
)))
8499 gimplify_type_sizes (TREE_TYPE (array
), pre_p
);
8500 if (gimplify_omp_ctxp
)
8502 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8504 && (ctx
->region_type
== ORT_WORKSHARE
8505 || ctx
->region_type
== ORT_TASKGROUP
8506 || ctx
->region_type
== ORT_SIMD
8507 || ctx
->region_type
== ORT_ACC
))
8508 ctx
= ctx
->outer_context
;
8510 omp_add_variable (ctx
, array
, GOVD_LOCAL
| GOVD_SEEN
);
8512 gimplify_vla_decl (array
, pre_p
);
8515 gimple_add_tmp_var (array
);
8516 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
8521 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8522 build_int_cst (ptr_type_node
, 0));
8523 gimplify_and_add (tem
, pre_p
);
8524 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
8527 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8528 fold_convert (ptr_type_node
, total
));
8529 gimplify_and_add (tem
, pre_p
);
8530 for (i
= 1; i
< (is_old
? 2 : 4); i
++)
8532 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (i
+ !is_old
),
8533 NULL_TREE
, NULL_TREE
);
8534 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, counts
[i
- 1]);
8535 gimplify_and_add (tem
, pre_p
);
8542 for (i
= 0; i
< 5; i
++)
8544 if (i
&& (i
>= j
|| unused
[i
- 1]))
8546 cnts
[i
] = cnts
[i
- 1];
8549 cnts
[i
] = create_tmp_var (sizetype
);
8551 g
= gimple_build_assign (cnts
[i
], size_int (is_old
? 2 : 5));
8556 t
= size_binop (PLUS_EXPR
, counts
[0], size_int (2));
8558 t
= size_binop (PLUS_EXPR
, cnts
[i
- 1], counts
[i
- 1]);
8559 if (gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
8562 g
= gimple_build_assign (cnts
[i
], t
);
8564 gimple_seq_add_stmt (pre_p
, g
);
8567 cnts
[5] = NULL_TREE
;
8570 tree t
= size_binop (PLUS_EXPR
, total
, size_int (5));
8571 cnts
[5] = create_tmp_var (sizetype
);
8572 g
= gimple_build_assign (cnts
[i
], t
);
8573 gimple_seq_add_stmt (pre_p
, g
);
8576 last_iter
= NULL_TREE
;
8577 tree last_bind
= NULL_TREE
;
8578 tree
*last_body
= NULL
;
8579 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8580 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8582 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8584 case OMP_CLAUSE_DEPEND_IN
:
8587 case OMP_CLAUSE_DEPEND_OUT
:
8588 case OMP_CLAUSE_DEPEND_INOUT
:
8591 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8594 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8597 case OMP_CLAUSE_DEPEND_INOUTSET
:
8603 tree t
= OMP_CLAUSE_DECL (c
);
8604 if (TREE_CODE (t
) == TREE_LIST
8606 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8608 if (TREE_PURPOSE (t
) != last_iter
)
8611 gimplify_and_add (last_bind
, pre_p
);
8612 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
8613 last_bind
= build3 (BIND_EXPR
, void_type_node
,
8614 BLOCK_VARS (block
), NULL
, block
);
8615 TREE_SIDE_EFFECTS (last_bind
) = 1;
8616 SET_EXPR_LOCATION (last_bind
, OMP_CLAUSE_LOCATION (c
));
8617 tree
*p
= &BIND_EXPR_BODY (last_bind
);
8618 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8620 tree var
= TREE_VEC_ELT (it
, 0);
8621 tree begin
= TREE_VEC_ELT (it
, 1);
8622 tree end
= TREE_VEC_ELT (it
, 2);
8623 tree step
= TREE_VEC_ELT (it
, 3);
8624 tree orig_step
= TREE_VEC_ELT (it
, 4);
8625 tree type
= TREE_TYPE (var
);
8626 location_t loc
= DECL_SOURCE_LOCATION (var
);
8634 if (orig_step > 0) {
8635 if (var < end) goto beg_label;
8637 if (var > end) goto beg_label;
8639 for each iterator, with inner iterators added to
8641 tree beg_label
= create_artificial_label (loc
);
8642 tree cond_label
= NULL_TREE
;
8643 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8645 append_to_statement_list_force (tem
, p
);
8646 tem
= build_and_jump (&cond_label
);
8647 append_to_statement_list_force (tem
, p
);
8648 tem
= build1 (LABEL_EXPR
, void_type_node
, beg_label
);
8649 append_to_statement_list (tem
, p
);
8650 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
8651 NULL_TREE
, NULL_TREE
);
8652 TREE_SIDE_EFFECTS (bind
) = 1;
8653 SET_EXPR_LOCATION (bind
, loc
);
8654 append_to_statement_list_force (bind
, p
);
8655 if (POINTER_TYPE_P (type
))
8656 tem
= build2_loc (loc
, POINTER_PLUS_EXPR
, type
,
8657 var
, fold_convert_loc (loc
, sizetype
,
8660 tem
= build2_loc (loc
, PLUS_EXPR
, type
, var
, step
);
8661 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8663 append_to_statement_list_force (tem
, p
);
8664 tem
= build1 (LABEL_EXPR
, void_type_node
, cond_label
);
8665 append_to_statement_list (tem
, p
);
8666 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8670 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8671 cond
, build_and_jump (&beg_label
),
8673 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8676 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8677 cond
, build_and_jump (&beg_label
),
8679 tree osteptype
= TREE_TYPE (orig_step
);
8680 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8682 build_int_cst (osteptype
, 0));
8683 tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8685 append_to_statement_list_force (tem
, p
);
8686 p
= &BIND_EXPR_BODY (bind
);
8690 last_iter
= TREE_PURPOSE (t
);
8691 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
8693 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
),
8695 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
8697 if (error_operand_p (TREE_VALUE (t
)))
8699 if (TREE_VALUE (t
) != null_pointer_node
)
8700 TREE_VALUE (t
) = build_fold_addr_expr (TREE_VALUE (t
));
8703 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8704 NULL_TREE
, NULL_TREE
);
8705 tree r2
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[5],
8706 NULL_TREE
, NULL_TREE
);
8707 r2
= build_fold_addr_expr_with_type (r2
, ptr_type_node
);
8708 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8709 void_type_node
, r
, r2
);
8710 append_to_statement_list_force (tem
, last_body
);
8711 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8712 void_type_node
, cnts
[i
],
8713 size_binop (PLUS_EXPR
, cnts
[i
],
8715 append_to_statement_list_force (tem
, last_body
);
8718 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8719 NULL_TREE
, NULL_TREE
);
8720 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8721 void_type_node
, r
, TREE_VALUE (t
));
8722 append_to_statement_list_force (tem
, last_body
);
8725 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
8726 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)),
8727 NULL_TREE
, NULL_TREE
);
8728 tem
= build_int_cst (ptr_type_node
, GOMP_DEPEND_INOUTSET
);
8729 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8730 void_type_node
, r
, tem
);
8731 append_to_statement_list_force (tem
, last_body
);
8733 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8734 void_type_node
, cnts
[i
],
8735 size_binop (PLUS_EXPR
, cnts
[i
],
8736 size_int (1 + (i
== 5))));
8737 append_to_statement_list_force (tem
, last_body
);
8738 TREE_VALUE (t
) = null_pointer_node
;
8744 gimplify_and_add (last_bind
, pre_p
);
8745 last_bind
= NULL_TREE
;
8747 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8749 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8750 NULL
, is_gimple_val
, fb_rvalue
);
8751 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8753 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8755 if (OMP_CLAUSE_DECL (c
) != null_pointer_node
)
8756 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
8757 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8758 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8762 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8763 NULL_TREE
, NULL_TREE
);
8764 tree r2
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[5],
8765 NULL_TREE
, NULL_TREE
);
8766 r2
= build_fold_addr_expr_with_type (r2
, ptr_type_node
);
8767 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, r2
);
8768 gimplify_and_add (tem
, pre_p
);
8769 g
= gimple_build_assign (cnts
[i
], size_binop (PLUS_EXPR
,
8772 gimple_seq_add_stmt (pre_p
, g
);
8775 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8776 NULL_TREE
, NULL_TREE
);
8777 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, OMP_CLAUSE_DECL (c
));
8778 gimplify_and_add (tem
, pre_p
);
8781 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
8782 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)),
8783 NULL_TREE
, NULL_TREE
);
8784 tem
= build_int_cst (ptr_type_node
, GOMP_DEPEND_INOUTSET
);
8785 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, tem
);
8786 append_to_statement_list_force (tem
, last_body
);
8787 gimplify_and_add (tem
, pre_p
);
8789 g
= gimple_build_assign (cnts
[i
],
8790 size_binop (PLUS_EXPR
, cnts
[i
],
8791 size_int (1 + (i
== 5))));
8792 gimple_seq_add_stmt (pre_p
, g
);
8796 gimplify_and_add (last_bind
, pre_p
);
8797 tree cond
= boolean_false_node
;
8801 cond
= build2_loc (first_loc
, NE_EXPR
, boolean_type_node
, cnts
[0],
8802 size_binop_loc (first_loc
, PLUS_EXPR
, counts
[0],
8805 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8806 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8808 size_binop_loc (first_loc
, PLUS_EXPR
,
8814 tree prev
= size_int (5);
8815 for (i
= 0; i
< 5; i
++)
8819 prev
= size_binop_loc (first_loc
, PLUS_EXPR
, counts
[i
], prev
);
8820 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8821 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8822 cnts
[i
], unshare_expr (prev
)));
8825 tem
= build3_loc (first_loc
, COND_EXPR
, void_type_node
, cond
,
8826 build_call_expr_loc (first_loc
,
8827 builtin_decl_explicit (BUILT_IN_TRAP
),
8829 gimplify_and_add (tem
, pre_p
);
8830 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
8831 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
8832 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
8833 OMP_CLAUSE_CHAIN (c
) = *list_p
;
8838 /* For a set of mappings describing an array section pointed to by a struct
8839 (or derived type, etc.) component, create an "alloc" or "release" node to
8840 insert into a list following a GOMP_MAP_STRUCT node. For some types of
8841 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
8842 be created that is inserted into the list of mapping nodes attached to the
8843 directive being processed -- not part of the sorted list of nodes after
8846 CODE is the code of the directive being processed. GRP_START and GRP_END
8847 are the first and last of two or three nodes representing this array section
8848 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
8849 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
8850 filled with the additional node described above, if needed.
8852 This function does not add the new nodes to any lists itself. It is the
8853 responsibility of the caller to do that. */
8856 build_omp_struct_comp_nodes (enum tree_code code
, tree grp_start
, tree grp_end
,
8859 enum gomp_map_kind mkind
8860 = (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
)
8861 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
8863 gcc_assert (grp_start
!= grp_end
);
8865 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
), OMP_CLAUSE_MAP
);
8866 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
8867 OMP_CLAUSE_DECL (c2
) = unshare_expr (OMP_CLAUSE_DECL (grp_end
));
8868 OMP_CLAUSE_CHAIN (c2
) = NULL_TREE
;
8869 tree grp_mid
= NULL_TREE
;
8870 if (OMP_CLAUSE_CHAIN (grp_start
) != grp_end
)
8871 grp_mid
= OMP_CLAUSE_CHAIN (grp_start
);
8874 && OMP_CLAUSE_CODE (grp_mid
) == OMP_CLAUSE_MAP
8875 && OMP_CLAUSE_MAP_KIND (grp_mid
) == GOMP_MAP_TO_PSET
)
8876 OMP_CLAUSE_SIZE (c2
) = OMP_CLAUSE_SIZE (grp_mid
);
8878 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (ptr_type_node
);
8881 && OMP_CLAUSE_CODE (grp_mid
) == OMP_CLAUSE_MAP
8882 && (OMP_CLAUSE_MAP_KIND (grp_mid
) == GOMP_MAP_ALWAYS_POINTER
8883 || OMP_CLAUSE_MAP_KIND (grp_mid
) == GOMP_MAP_ATTACH_DETACH
))
8886 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
), OMP_CLAUSE_MAP
);
8887 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
8888 OMP_CLAUSE_DECL (c3
) = unshare_expr (OMP_CLAUSE_DECL (grp_mid
));
8889 OMP_CLAUSE_SIZE (c3
) = TYPE_SIZE_UNIT (ptr_type_node
);
8890 OMP_CLAUSE_CHAIN (c3
) = NULL_TREE
;
8895 *extra_node
= NULL_TREE
;
8900 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8901 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8902 If BASE_REF is non-NULL and the containing object is a reference, set
8903 *BASE_REF to that reference before dereferencing the object.
8904 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8905 has array type, else return NULL. */
8908 extract_base_bit_offset (tree base
, poly_int64
*bitposp
,
8909 poly_offset_int
*poffsetp
)
8912 poly_int64 bitsize
, bitpos
;
8914 int unsignedp
, reversep
, volatilep
= 0;
8915 poly_offset_int poffset
;
8919 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
, &mode
,
8920 &unsignedp
, &reversep
, &volatilep
);
8924 if (offset
&& poly_int_tree_p (offset
))
8926 poffset
= wi::to_poly_offset (offset
);
8932 if (maybe_ne (bitpos
, 0))
8933 poffset
+= bits_to_bytes_round_down (bitpos
);
8936 *poffsetp
= poffset
;
8941 /* Used for topological sorting of mapping groups. UNVISITED means we haven't
8942 started processing the group yet. The TEMPORARY mark is used when we first
8943 encounter a group on a depth-first traversal, and the PERMANENT mark is used
8944 when we have processed all the group's children (i.e. all the base pointers
8945 referred to by the group's mapping nodes, recursively). */
8947 enum omp_tsort_mark
{
8953 /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
8956 struct omp_mapping_group
{
8959 omp_tsort_mark mark
;
8960 /* If we've removed the group but need to reindex, mark the group as
8963 struct omp_mapping_group
*sibling
;
8964 struct omp_mapping_group
*next
;
8968 debug_mapping_group (omp_mapping_group
*grp
)
8970 tree tmp
= OMP_CLAUSE_CHAIN (grp
->grp_end
);
8971 OMP_CLAUSE_CHAIN (grp
->grp_end
) = NULL
;
8972 debug_generic_expr (*grp
->grp_start
);
8973 OMP_CLAUSE_CHAIN (grp
->grp_end
) = tmp
;
8976 /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
8980 omp_get_base_pointer (tree expr
)
8982 while (TREE_CODE (expr
) == ARRAY_REF
8983 || TREE_CODE (expr
) == COMPONENT_REF
)
8984 expr
= TREE_OPERAND (expr
, 0);
8986 if (TREE_CODE (expr
) == INDIRECT_REF
8987 || (TREE_CODE (expr
) == MEM_REF
8988 && integer_zerop (TREE_OPERAND (expr
, 1))))
8990 expr
= TREE_OPERAND (expr
, 0);
8991 while (TREE_CODE (expr
) == COMPOUND_EXPR
)
8992 expr
= TREE_OPERAND (expr
, 1);
8993 if (TREE_CODE (expr
) == POINTER_PLUS_EXPR
)
8994 expr
= TREE_OPERAND (expr
, 0);
8995 if (TREE_CODE (expr
) == SAVE_EXPR
)
8996 expr
= TREE_OPERAND (expr
, 0);
9004 /* Remove COMPONENT_REFS and indirections from EXPR. */
9007 omp_strip_components_and_deref (tree expr
)
9009 while (TREE_CODE (expr
) == COMPONENT_REF
9010 || TREE_CODE (expr
) == INDIRECT_REF
9011 || (TREE_CODE (expr
) == MEM_REF
9012 && integer_zerop (TREE_OPERAND (expr
, 1)))
9013 || TREE_CODE (expr
) == POINTER_PLUS_EXPR
9014 || TREE_CODE (expr
) == COMPOUND_EXPR
)
9015 if (TREE_CODE (expr
) == COMPOUND_EXPR
)
9016 expr
= TREE_OPERAND (expr
, 1);
9018 expr
= TREE_OPERAND (expr
, 0);
9026 omp_strip_indirections (tree expr
)
9028 while (TREE_CODE (expr
) == INDIRECT_REF
9029 || (TREE_CODE (expr
) == MEM_REF
9030 && integer_zerop (TREE_OPERAND (expr
, 1))))
9031 expr
= TREE_OPERAND (expr
, 0);
9036 /* An attach or detach operation depends directly on the address being
9037 attached/detached. Return that address, or none if there are no
9038 attachments/detachments. */
9041 omp_get_attachment (omp_mapping_group
*grp
)
9043 tree node
= *grp
->grp_start
;
9045 switch (OMP_CLAUSE_MAP_KIND (node
))
9049 case GOMP_MAP_TOFROM
:
9050 case GOMP_MAP_ALWAYS_FROM
:
9051 case GOMP_MAP_ALWAYS_TO
:
9052 case GOMP_MAP_ALWAYS_TOFROM
:
9053 case GOMP_MAP_FORCE_FROM
:
9054 case GOMP_MAP_FORCE_TO
:
9055 case GOMP_MAP_FORCE_TOFROM
:
9056 case GOMP_MAP_FORCE_PRESENT
:
9057 case GOMP_MAP_ALLOC
:
9058 case GOMP_MAP_RELEASE
:
9059 case GOMP_MAP_DELETE
:
9060 case GOMP_MAP_FORCE_ALLOC
:
9061 if (node
== grp
->grp_end
)
9064 node
= OMP_CLAUSE_CHAIN (node
);
9065 if (node
&& OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_TO_PSET
)
9067 gcc_assert (node
!= grp
->grp_end
);
9068 node
= OMP_CLAUSE_CHAIN (node
);
9071 switch (OMP_CLAUSE_MAP_KIND (node
))
9073 case GOMP_MAP_POINTER
:
9074 case GOMP_MAP_ALWAYS_POINTER
:
9075 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9076 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9077 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
9080 case GOMP_MAP_ATTACH_DETACH
:
9081 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9082 return OMP_CLAUSE_DECL (node
);
9085 internal_error ("unexpected mapping node");
9087 return error_mark_node
;
9089 case GOMP_MAP_TO_PSET
:
9090 gcc_assert (node
!= grp
->grp_end
);
9091 node
= OMP_CLAUSE_CHAIN (node
);
9092 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_ATTACH
9093 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_DETACH
)
9094 return OMP_CLAUSE_DECL (node
);
9096 internal_error ("unexpected mapping node");
9097 return error_mark_node
;
9099 case GOMP_MAP_ATTACH
:
9100 case GOMP_MAP_DETACH
:
9101 node
= OMP_CLAUSE_CHAIN (node
);
9102 if (!node
|| *grp
->grp_start
== grp
->grp_end
)
9103 return OMP_CLAUSE_DECL (*grp
->grp_start
);
9104 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9105 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9106 return OMP_CLAUSE_DECL (*grp
->grp_start
);
9108 internal_error ("unexpected mapping node");
9109 return error_mark_node
;
9111 case GOMP_MAP_STRUCT
:
9112 case GOMP_MAP_FORCE_DEVICEPTR
:
9113 case GOMP_MAP_DEVICE_RESIDENT
:
9115 case GOMP_MAP_IF_PRESENT
:
9116 case GOMP_MAP_FIRSTPRIVATE
:
9117 case GOMP_MAP_FIRSTPRIVATE_INT
:
9118 case GOMP_MAP_USE_DEVICE_PTR
:
9119 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9123 internal_error ("unexpected mapping node");
9126 return error_mark_node
;
9129 /* Given a pointer START_P to the start of a group of related (e.g. pointer)
9130 mappings, return the chain pointer to the end of that group in the list. */
9133 omp_group_last (tree
*start_p
)
9135 tree c
= *start_p
, nc
, *grp_last_p
= start_p
;
9137 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
);
9139 nc
= OMP_CLAUSE_CHAIN (c
);
9141 if (!nc
|| OMP_CLAUSE_CODE (nc
) != OMP_CLAUSE_MAP
)
9144 switch (OMP_CLAUSE_MAP_KIND (c
))
9148 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
9149 && (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9150 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9151 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ATTACH_DETACH
9152 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
9153 || (OMP_CLAUSE_MAP_KIND (nc
)
9154 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
)
9155 || (OMP_CLAUSE_MAP_KIND (nc
)
9156 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
)
9157 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ALWAYS_POINTER
9158 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_TO_PSET
))
9160 grp_last_p
= &OMP_CLAUSE_CHAIN (c
);
9162 tree nc2
= OMP_CLAUSE_CHAIN (nc
);
9164 && OMP_CLAUSE_CODE (nc2
) == OMP_CLAUSE_MAP
9165 && (OMP_CLAUSE_MAP_KIND (nc
)
9166 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
)
9167 && OMP_CLAUSE_MAP_KIND (nc2
) == GOMP_MAP_ATTACH
)
9169 grp_last_p
= &OMP_CLAUSE_CHAIN (nc
);
9171 nc2
= OMP_CLAUSE_CHAIN (nc2
);
9177 case GOMP_MAP_ATTACH
:
9178 case GOMP_MAP_DETACH
:
9179 /* This is a weird artifact of how directives are parsed: bare attach or
9180 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9181 FIRSTPRIVATE_REFERENCE node. FIXME. */
9183 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
9184 && (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9185 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
))
9186 grp_last_p
= &OMP_CLAUSE_CHAIN (c
);
9189 case GOMP_MAP_TO_PSET
:
9190 if (OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
9191 && (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ATTACH
9192 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_DETACH
))
9193 grp_last_p
= &OMP_CLAUSE_CHAIN (c
);
9196 case GOMP_MAP_STRUCT
:
9198 unsigned HOST_WIDE_INT num_mappings
9199 = tree_to_uhwi (OMP_CLAUSE_SIZE (c
));
9200 if (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9201 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9202 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ATTACH_DETACH
)
9203 grp_last_p
= &OMP_CLAUSE_CHAIN (*grp_last_p
);
9204 for (unsigned i
= 0; i
< num_mappings
; i
++)
9205 grp_last_p
= &OMP_CLAUSE_CHAIN (*grp_last_p
);
9213 /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9214 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9215 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9216 if we have more than one such group, else return NULL. */
9219 omp_gather_mapping_groups_1 (tree
*list_p
, vec
<omp_mapping_group
> *groups
,
9220 tree gather_sentinel
)
9222 for (tree
*cp
= list_p
;
9223 *cp
&& *cp
!= gather_sentinel
;
9224 cp
= &OMP_CLAUSE_CHAIN (*cp
))
9226 if (OMP_CLAUSE_CODE (*cp
) != OMP_CLAUSE_MAP
)
9229 tree
*grp_last_p
= omp_group_last (cp
);
9230 omp_mapping_group grp
;
9233 grp
.grp_end
= *grp_last_p
;
9234 grp
.mark
= UNVISITED
;
9236 grp
.deleted
= false;
9238 groups
->safe_push (grp
);
9244 static vec
<omp_mapping_group
> *
9245 omp_gather_mapping_groups (tree
*list_p
)
9247 vec
<omp_mapping_group
> *groups
= new vec
<omp_mapping_group
> ();
9249 omp_gather_mapping_groups_1 (list_p
, groups
, NULL_TREE
);
9251 if (groups
->length () > 0)
9260 /* A pointer mapping group GRP may define a block of memory starting at some
9261 base address, and maybe also define a firstprivate pointer or firstprivate
9262 reference that points to that block. The return value is a node containing
9263 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9264 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9265 return the number of consecutive chained nodes in CHAINED. */
9268 omp_group_base (omp_mapping_group
*grp
, unsigned int *chained
,
9271 tree node
= *grp
->grp_start
;
9273 *firstprivate
= NULL_TREE
;
9276 switch (OMP_CLAUSE_MAP_KIND (node
))
9280 case GOMP_MAP_TOFROM
:
9281 case GOMP_MAP_ALWAYS_FROM
:
9282 case GOMP_MAP_ALWAYS_TO
:
9283 case GOMP_MAP_ALWAYS_TOFROM
:
9284 case GOMP_MAP_FORCE_FROM
:
9285 case GOMP_MAP_FORCE_TO
:
9286 case GOMP_MAP_FORCE_TOFROM
:
9287 case GOMP_MAP_FORCE_PRESENT
:
9288 case GOMP_MAP_ALLOC
:
9289 case GOMP_MAP_RELEASE
:
9290 case GOMP_MAP_DELETE
:
9291 case GOMP_MAP_FORCE_ALLOC
:
9292 case GOMP_MAP_IF_PRESENT
:
9293 if (node
== grp
->grp_end
)
9296 node
= OMP_CLAUSE_CHAIN (node
);
9297 if (node
&& OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_TO_PSET
)
9299 if (node
== grp
->grp_end
)
9300 return *grp
->grp_start
;
9301 node
= OMP_CLAUSE_CHAIN (node
);
9304 switch (OMP_CLAUSE_MAP_KIND (node
))
9306 case GOMP_MAP_POINTER
:
9307 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9308 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9309 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
9310 *firstprivate
= OMP_CLAUSE_DECL (node
);
9311 return *grp
->grp_start
;
9313 case GOMP_MAP_ALWAYS_POINTER
:
9314 case GOMP_MAP_ATTACH_DETACH
:
9315 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9316 return *grp
->grp_start
;
9319 internal_error ("unexpected mapping node");
9322 internal_error ("unexpected mapping node");
9323 return error_mark_node
;
9325 case GOMP_MAP_TO_PSET
:
9326 gcc_assert (node
!= grp
->grp_end
);
9327 node
= OMP_CLAUSE_CHAIN (node
);
9328 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_ATTACH
9329 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_DETACH
)
9332 internal_error ("unexpected mapping node");
9333 return error_mark_node
;
9335 case GOMP_MAP_ATTACH
:
9336 case GOMP_MAP_DETACH
:
9337 node
= OMP_CLAUSE_CHAIN (node
);
9338 if (!node
|| *grp
->grp_start
== grp
->grp_end
)
9340 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9341 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9343 /* We're mapping the base pointer itself in a bare attach or detach
9344 node. This is a side effect of how parsing works, and the mapping
9345 will be removed anyway (at least for enter/exit data directives).
9346 We should ignore the mapping here. FIXME. */
9350 internal_error ("unexpected mapping node");
9351 return error_mark_node
;
9353 case GOMP_MAP_STRUCT
:
9355 unsigned HOST_WIDE_INT num_mappings
9356 = tree_to_uhwi (OMP_CLAUSE_SIZE (node
));
9357 node
= OMP_CLAUSE_CHAIN (node
);
9358 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9359 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9361 *firstprivate
= OMP_CLAUSE_DECL (node
);
9362 node
= OMP_CLAUSE_CHAIN (node
);
9364 *chained
= num_mappings
;
9368 case GOMP_MAP_FORCE_DEVICEPTR
:
9369 case GOMP_MAP_DEVICE_RESIDENT
:
9371 case GOMP_MAP_FIRSTPRIVATE
:
9372 case GOMP_MAP_FIRSTPRIVATE_INT
:
9373 case GOMP_MAP_USE_DEVICE_PTR
:
9374 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9377 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9378 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9379 case GOMP_MAP_POINTER
:
9380 case GOMP_MAP_ALWAYS_POINTER
:
9381 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
9382 /* These shouldn't appear by themselves. */
9384 internal_error ("unexpected pointer mapping node");
9385 return error_mark_node
;
9391 return error_mark_node
;
9394 /* Given a vector of omp_mapping_groups, build a hash table so we can look up
9395 nodes by tree_operand_hash. */
9398 omp_index_mapping_groups_1 (hash_map
<tree_operand_hash
,
9399 omp_mapping_group
*> *grpmap
,
9400 vec
<omp_mapping_group
> *groups
,
9401 tree reindex_sentinel
)
9403 omp_mapping_group
*grp
;
9405 bool reindexing
= reindex_sentinel
!= NULL_TREE
, above_hwm
= false;
9407 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
9409 if (reindexing
&& *grp
->grp_start
== reindex_sentinel
)
9412 if (reindexing
&& !above_hwm
)
9416 unsigned int chained
;
9417 tree node
= omp_group_base (grp
, &chained
, &fpp
);
9419 if (node
== error_mark_node
|| (!node
&& !fpp
))
9422 for (unsigned j
= 0;
9423 node
&& j
< chained
;
9424 node
= OMP_CLAUSE_CHAIN (node
), j
++)
9426 tree decl
= OMP_CLAUSE_DECL (node
);
9428 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
9429 meaning node-hash lookups don't work. This is a workaround for
9430 that, but ideally we should just create the INDIRECT_REF at
9431 source instead. FIXME. */
9432 if (TREE_CODE (decl
) == MEM_REF
9433 && integer_zerop (TREE_OPERAND (decl
, 1)))
9434 decl
= build_fold_indirect_ref (TREE_OPERAND (decl
, 0));
9436 omp_mapping_group
**prev
= grpmap
->get (decl
);
9438 if (prev
&& *prev
== grp
)
9442 /* Mapping the same thing twice is normally diagnosed as an error,
9443 but can happen under some circumstances, e.g. in pr99928-16.c,
9446 #pragma omp target simd reduction(+:a[:3]) \
9447 map(always, tofrom: a[:6])
9450 will result in two "a[0]" mappings (of different sizes). */
9452 grp
->sibling
= (*prev
)->sibling
;
9453 (*prev
)->sibling
= grp
;
9456 grpmap
->put (decl
, grp
);
9462 omp_mapping_group
**prev
= grpmap
->get (fpp
);
9463 if (prev
&& *prev
!= grp
)
9465 grp
->sibling
= (*prev
)->sibling
;
9466 (*prev
)->sibling
= grp
;
9469 grpmap
->put (fpp
, grp
);
9473 static hash_map
<tree_operand_hash
, omp_mapping_group
*> *
9474 omp_index_mapping_groups (vec
<omp_mapping_group
> *groups
)
9476 hash_map
<tree_operand_hash
, omp_mapping_group
*> *grpmap
9477 = new hash_map
<tree_operand_hash
, omp_mapping_group
*>;
9479 omp_index_mapping_groups_1 (grpmap
, groups
, NULL_TREE
);
9484 /* Rebuild group map from partially-processed clause list (during
9485 omp_build_struct_sibling_lists). We have already processed nodes up until
9486 a high-water mark (HWM). This is a bit tricky because the list is being
9487 reordered as it is scanned, but we know:
9489 1. The list after HWM has not been touched yet, so we can reindex it safely.
9491 2. The list before and including HWM has been altered, but remains
9492 well-formed throughout the sibling-list building operation.
9494 so, we can do the reindex operation in two parts, on the processed and
9495 then the unprocessed halves of the list. */
9497 static hash_map
<tree_operand_hash
, omp_mapping_group
*> *
9498 omp_reindex_mapping_groups (tree
*list_p
,
9499 vec
<omp_mapping_group
> *groups
,
9500 vec
<omp_mapping_group
> *processed_groups
,
9503 hash_map
<tree_operand_hash
, omp_mapping_group
*> *grpmap
9504 = new hash_map
<tree_operand_hash
, omp_mapping_group
*>;
9506 processed_groups
->truncate (0);
9508 omp_gather_mapping_groups_1 (list_p
, processed_groups
, sentinel
);
9509 omp_index_mapping_groups_1 (grpmap
, processed_groups
, NULL_TREE
);
9511 omp_index_mapping_groups_1 (grpmap
, groups
, sentinel
);
9516 /* Find the immediately-containing struct for a component ref (etc.)
9520 omp_containing_struct (tree expr
)
9526 /* Note: don't strip NOPs unless we're also stripping off array refs or a
9528 if (TREE_CODE (expr
) != ARRAY_REF
&& TREE_CODE (expr
) != COMPONENT_REF
)
9531 while (TREE_CODE (expr
) == ARRAY_REF
)
9532 expr
= TREE_OPERAND (expr
, 0);
9534 if (TREE_CODE (expr
) == COMPONENT_REF
)
9535 expr
= TREE_OPERAND (expr
, 0);
9540 /* Return TRUE if DECL describes a component that is part of a whole structure
9541 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
9542 that maps that structure, if present. */
9545 omp_mapped_by_containing_struct (hash_map
<tree_operand_hash
,
9546 omp_mapping_group
*> *grpmap
,
9548 omp_mapping_group
**mapped_by_group
)
9550 tree wsdecl
= NULL_TREE
;
9552 *mapped_by_group
= NULL
;
9556 wsdecl
= omp_containing_struct (decl
);
9559 omp_mapping_group
**wholestruct
= grpmap
->get (wsdecl
);
9561 && TREE_CODE (wsdecl
) == MEM_REF
9562 && integer_zerop (TREE_OPERAND (wsdecl
, 1)))
9564 tree deref
= TREE_OPERAND (wsdecl
, 0);
9565 deref
= build_fold_indirect_ref (deref
);
9566 wholestruct
= grpmap
->get (deref
);
9570 *mapped_by_group
= *wholestruct
;
9579 /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
9583 omp_tsort_mapping_groups_1 (omp_mapping_group
***outlist
,
9584 vec
<omp_mapping_group
> *groups
,
9585 hash_map
<tree_operand_hash
, omp_mapping_group
*>
9587 omp_mapping_group
*grp
)
9589 if (grp
->mark
== PERMANENT
)
9591 if (grp
->mark
== TEMPORARY
)
9593 fprintf (stderr
, "when processing group:\n");
9594 debug_mapping_group (grp
);
9595 internal_error ("base pointer cycle detected");
9598 grp
->mark
= TEMPORARY
;
9600 tree attaches_to
= omp_get_attachment (grp
);
9604 omp_mapping_group
**basep
= grpmap
->get (attaches_to
);
9606 if (basep
&& *basep
!= grp
)
9608 for (omp_mapping_group
*w
= *basep
; w
; w
= w
->sibling
)
9609 if (!omp_tsort_mapping_groups_1 (outlist
, groups
, grpmap
, w
))
9614 tree decl
= OMP_CLAUSE_DECL (*grp
->grp_start
);
9618 tree base
= omp_get_base_pointer (decl
);
9623 omp_mapping_group
**innerp
= grpmap
->get (base
);
9624 omp_mapping_group
*wholestruct
;
9626 /* We should treat whole-structure mappings as if all (pointer, in this
9627 case) members are mapped as individual list items. Check if we have
9628 such a whole-structure mapping, if we don't have an explicit reference
9629 to the pointer member itself. */
9631 && TREE_CODE (base
) == COMPONENT_REF
9632 && omp_mapped_by_containing_struct (grpmap
, base
, &wholestruct
))
9633 innerp
= &wholestruct
;
9635 if (innerp
&& *innerp
!= grp
)
9637 for (omp_mapping_group
*w
= *innerp
; w
; w
= w
->sibling
)
9638 if (!omp_tsort_mapping_groups_1 (outlist
, groups
, grpmap
, w
))
9646 grp
->mark
= PERMANENT
;
9648 /* Emit grp to output list. */
9651 *outlist
= &grp
->next
;
9656 /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
9657 before mappings that use those pointers. This is an implementation of the
9658 depth-first search algorithm, described e.g. at:
9660 https://en.wikipedia.org/wiki/Topological_sorting
9663 static omp_mapping_group
*
9664 omp_tsort_mapping_groups (vec
<omp_mapping_group
> *groups
,
9665 hash_map
<tree_operand_hash
, omp_mapping_group
*>
9668 omp_mapping_group
*grp
, *outlist
= NULL
, **cursor
;
9673 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
9675 if (grp
->mark
!= PERMANENT
)
9676 if (!omp_tsort_mapping_groups_1 (&cursor
, groups
, grpmap
, grp
))
9683 /* Split INLIST into two parts, moving groups corresponding to
9684 ALLOC/RELEASE/DELETE mappings to one list, and other mappings to another.
9685 The former list is then appended to the latter. Each sub-list retains the
9686 order of the original list.
9687 Note that ATTACH nodes are later moved to the end of the list in
9688 gimplify_adjust_omp_clauses, for target regions. */
9690 static omp_mapping_group
*
9691 omp_segregate_mapping_groups (omp_mapping_group
*inlist
)
9693 omp_mapping_group
*ard_groups
= NULL
, *tf_groups
= NULL
;
9694 omp_mapping_group
**ard_tail
= &ard_groups
, **tf_tail
= &tf_groups
;
9696 for (omp_mapping_group
*w
= inlist
; w
;)
9698 tree c
= *w
->grp_start
;
9699 omp_mapping_group
*next
= w
->next
;
9701 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
);
9703 switch (OMP_CLAUSE_MAP_KIND (c
))
9705 case GOMP_MAP_ALLOC
:
9706 case GOMP_MAP_RELEASE
:
9707 case GOMP_MAP_DELETE
:
9710 ard_tail
= &w
->next
;
9722 /* Now splice the lists together... */
9723 *tf_tail
= ard_groups
;
9728 /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
9729 those groups based on the output list of omp_tsort_mapping_groups --
9730 singly-linked, threaded through each element's NEXT pointer starting at
9731 HEAD. Each list element appears exactly once in that linked list.
9733 Each element of GROUPS may correspond to one or several mapping nodes.
9734 Node groups are kept together, and in the reordered list, the positions of
9735 the original groups are reused for the positions of the reordered list.
9736 Hence if we have e.g.
9738 {to ptr ptr} firstprivate {tofrom ptr} ...
9740 first group non-"map" second group
9742 and say the second group contains a base pointer for the first so must be
9743 moved before it, the resulting list will contain:
9745 {tofrom ptr} firstprivate {to ptr ptr} ...
9746 ^ prev. second group ^ prev. first group
9750 omp_reorder_mapping_groups (vec
<omp_mapping_group
> *groups
,
9751 omp_mapping_group
*head
,
9754 omp_mapping_group
*grp
;
9756 unsigned numgroups
= groups
->length ();
9757 auto_vec
<tree
> old_heads (numgroups
);
9758 auto_vec
<tree
*> old_headps (numgroups
);
9759 auto_vec
<tree
> new_heads (numgroups
);
9760 auto_vec
<tree
> old_succs (numgroups
);
9761 bool map_at_start
= (list_p
== (*groups
)[0].grp_start
);
9763 tree
*new_grp_tail
= NULL
;
9765 /* Stash the start & end nodes of each mapping group before we start
9766 modifying the list. */
9767 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
9769 old_headps
.quick_push (grp
->grp_start
);
9770 old_heads
.quick_push (*grp
->grp_start
);
9771 old_succs
.quick_push (OMP_CLAUSE_CHAIN (grp
->grp_end
));
9774 /* And similarly, the heads of the groups in the order we want to rearrange
9776 for (omp_mapping_group
*w
= head
; w
; w
= w
->next
)
9777 new_heads
.quick_push (*w
->grp_start
);
9779 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
9783 if (new_grp_tail
&& old_succs
[i
- 1] == old_heads
[i
])
9785 /* a {b c d} {e f g} h i j (original)
9787 a {k l m} {e f g} h i j (inserted new group on last iter)
9789 a {k l m} {n o p} h i j (this time, chain last group to new one)
9792 *new_grp_tail
= new_heads
[i
];
9794 else if (new_grp_tail
)
9796 /* a {b c d} e {f g h} i j k (original)
9798 a {l m n} e {f g h} i j k (gap after last iter's group)
9800 a {l m n} e {o p q} h i j (chain last group to old successor)
9803 *new_grp_tail
= old_succs
[i
- 1];
9804 *old_headps
[i
] = new_heads
[i
];
9808 /* The first inserted group -- point to new group, and leave end
9814 *grp
->grp_start
= new_heads
[i
];
9817 new_grp_tail
= &OMP_CLAUSE_CHAIN (head
->grp_end
);
9823 *new_grp_tail
= old_succs
[numgroups
- 1];
9827 return map_at_start
? (*groups
)[0].grp_start
: list_p
;
9830 /* DECL is supposed to have lastprivate semantics in the outer contexts
9831 of combined/composite constructs, starting with OCTX.
9832 Add needed lastprivate, shared or map clause if no data sharing or
9833 mapping clause are present. IMPLICIT_P is true if it is an implicit
9834 clause (IV on simd), in which case the lastprivate will not be
9835 copied to some constructs. */
9838 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx
*octx
,
9839 tree decl
, bool implicit_p
)
9841 struct gimplify_omp_ctx
*orig_octx
= octx
;
9842 for (; octx
; octx
= octx
->outer_context
)
9844 if ((octx
->region_type
== ORT_COMBINED_PARALLEL
9845 || (octx
->region_type
& ORT_COMBINED_TEAMS
) == ORT_COMBINED_TEAMS
)
9846 && splay_tree_lookup (octx
->variables
,
9847 (splay_tree_key
) decl
) == NULL
)
9849 omp_add_variable (octx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
9852 if ((octx
->region_type
& ORT_TASK
) != 0
9853 && octx
->combined_loop
9854 && splay_tree_lookup (octx
->variables
,
9855 (splay_tree_key
) decl
) == NULL
)
9857 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
9861 && octx
->region_type
== ORT_WORKSHARE
9862 && octx
->combined_loop
9863 && splay_tree_lookup (octx
->variables
,
9864 (splay_tree_key
) decl
) == NULL
9865 && octx
->outer_context
9866 && octx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
9867 && splay_tree_lookup (octx
->outer_context
->variables
,
9868 (splay_tree_key
) decl
) == NULL
)
9870 octx
= octx
->outer_context
;
9871 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
9874 if ((octx
->region_type
== ORT_WORKSHARE
|| octx
->region_type
== ORT_ACC
)
9875 && octx
->combined_loop
9876 && splay_tree_lookup (octx
->variables
,
9877 (splay_tree_key
) decl
) == NULL
9878 && !omp_check_private (octx
, decl
, false))
9880 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
9883 if (octx
->region_type
== ORT_COMBINED_TARGET
)
9885 splay_tree_node n
= splay_tree_lookup (octx
->variables
,
9886 (splay_tree_key
) decl
);
9889 omp_add_variable (octx
, decl
, GOVD_MAP
| GOVD_SEEN
);
9890 octx
= octx
->outer_context
;
9892 else if (!implicit_p
9893 && (n
->value
& GOVD_FIRSTPRIVATE_IMPLICIT
))
9895 n
->value
&= ~(GOVD_FIRSTPRIVATE
9896 | GOVD_FIRSTPRIVATE_IMPLICIT
9898 omp_add_variable (octx
, decl
, GOVD_MAP
| GOVD_SEEN
);
9899 octx
= octx
->outer_context
;
9904 if (octx
&& (implicit_p
|| octx
!= orig_octx
))
9905 omp_notice_variable (octx
, decl
, true);
9908 /* If we have mappings INNER and OUTER, where INNER is a component access and
9909 OUTER is a mapping of the whole containing struct, check that the mappings
9910 are compatible. We'll be deleting the inner mapping, so we need to make
9911 sure the outer mapping does (at least) the same transfers to/from the device
9912 as the inner mapping. */
9915 omp_check_mapping_compatibility (location_t loc
,
9916 omp_mapping_group
*outer
,
9917 omp_mapping_group
*inner
)
9919 tree first_outer
= *outer
->grp_start
, first_inner
= *inner
->grp_start
;
9921 gcc_assert (OMP_CLAUSE_CODE (first_outer
) == OMP_CLAUSE_MAP
);
9922 gcc_assert (OMP_CLAUSE_CODE (first_inner
) == OMP_CLAUSE_MAP
);
9924 enum gomp_map_kind outer_kind
= OMP_CLAUSE_MAP_KIND (first_outer
);
9925 enum gomp_map_kind inner_kind
= OMP_CLAUSE_MAP_KIND (first_inner
);
9927 if (outer_kind
== inner_kind
)
9932 case GOMP_MAP_ALWAYS_TO
:
9933 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
9934 || inner_kind
== GOMP_MAP_ALLOC
9935 || inner_kind
== GOMP_MAP_TO
)
9939 case GOMP_MAP_ALWAYS_FROM
:
9940 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
9941 || inner_kind
== GOMP_MAP_ALLOC
9942 || inner_kind
== GOMP_MAP_FROM
)
9948 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
9949 || inner_kind
== GOMP_MAP_ALLOC
)
9953 case GOMP_MAP_ALWAYS_TOFROM
:
9954 case GOMP_MAP_TOFROM
:
9955 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
9956 || inner_kind
== GOMP_MAP_ALLOC
9957 || inner_kind
== GOMP_MAP_TO
9958 || inner_kind
== GOMP_MAP_FROM
9959 || inner_kind
== GOMP_MAP_TOFROM
)
9967 error_at (loc
, "data movement for component %qE is not compatible with "
9968 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner
),
9969 OMP_CLAUSE_DECL (first_outer
));
9974 /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
9975 clause dependencies we handle for now are struct element mappings and
9976 whole-struct mappings on the same directive, and duplicate clause
9980 oacc_resolve_clause_dependencies (vec
<omp_mapping_group
> *groups
,
9981 hash_map
<tree_operand_hash
,
9982 omp_mapping_group
*> *grpmap
)
9985 omp_mapping_group
*grp
;
9986 hash_set
<tree_operand_hash
> *seen_components
= NULL
;
9987 hash_set
<tree_operand_hash
> *shown_error
= NULL
;
9989 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
9991 tree grp_end
= grp
->grp_end
;
9992 tree decl
= OMP_CLAUSE_DECL (grp_end
);
9994 gcc_assert (OMP_CLAUSE_CODE (grp_end
) == OMP_CLAUSE_MAP
);
9996 if (DECL_P (grp_end
))
9999 tree c
= OMP_CLAUSE_DECL (*grp
->grp_start
);
10000 while (TREE_CODE (c
) == ARRAY_REF
)
10001 c
= TREE_OPERAND (c
, 0);
10002 if (TREE_CODE (c
) != COMPONENT_REF
)
10004 if (!seen_components
)
10005 seen_components
= new hash_set
<tree_operand_hash
> ();
10007 shown_error
= new hash_set
<tree_operand_hash
> ();
10008 if (seen_components
->contains (c
)
10009 && !shown_error
->contains (c
))
10011 error_at (OMP_CLAUSE_LOCATION (grp_end
),
10012 "%qE appears more than once in map clauses",
10013 OMP_CLAUSE_DECL (grp_end
));
10014 shown_error
->add (c
);
10017 seen_components
->add (c
);
10019 omp_mapping_group
*struct_group
;
10020 if (omp_mapped_by_containing_struct (grpmap
, decl
, &struct_group
)
10021 && *grp
->grp_start
== grp_end
)
10023 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end
),
10024 struct_group
, grp
);
10025 /* Remove the whole of this mapping -- redundant. */
10026 grp
->deleted
= true;
10030 if (seen_components
)
10031 delete seen_components
;
10033 delete shown_error
;
10036 /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
10037 is linked to the previous node pointed to by INSERT_AT. */
10040 omp_siblist_insert_node_after (tree newnode
, tree
*insert_at
)
10042 OMP_CLAUSE_CHAIN (newnode
) = *insert_at
;
10043 *insert_at
= newnode
;
10044 return &OMP_CLAUSE_CHAIN (newnode
);
10047 /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
10048 pointed to by chain MOVE_AFTER instead. */
10051 omp_siblist_move_node_after (tree node
, tree
*old_pos
, tree
*move_after
)
10053 gcc_assert (node
== *old_pos
);
10054 *old_pos
= OMP_CLAUSE_CHAIN (node
);
10055 OMP_CLAUSE_CHAIN (node
) = *move_after
;
10056 *move_after
= node
;
10059 /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
10060 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
10061 new nodes are prepended to the list before splicing into the new position.
10062 Return the position we should continue scanning the list at, or NULL to
10063 stay where we were. */
10066 omp_siblist_move_nodes_after (tree
*first_ptr
, tree last_node
,
10069 if (first_ptr
== move_after
)
10072 tree tmp
= *first_ptr
;
10073 *first_ptr
= OMP_CLAUSE_CHAIN (last_node
);
10074 OMP_CLAUSE_CHAIN (last_node
) = *move_after
;
10080 /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
10081 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
10082 pointer MOVE_AFTER.
10084 The latter list was previously part of the OMP clause list, and the former
10085 (prepended) part is comprised of new nodes.
10087 We start with a list of nodes starting with a struct mapping node. We
10088 rearrange the list so that new nodes starting from FIRST_NEW and whose last
10089 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
10090 the group of mapping nodes we are currently processing (from the chain
10091 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
10092 we should continue processing from, or NULL to stay where we were.
10094 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
10095 different) is worked through below. Here we are processing LAST_NODE, and
10096 FIRST_PTR points at the preceding mapping clause:
10098 #. mapping node chain
10099 ---------------------------------------------------
10100 A. struct_node [->B]
10102 C. comp_2 [->D (move_after)]
10104 E. attach_3 [->F (first_ptr)]
10105 F. map_to_4 [->G (continue_at)]
10106 G. attach_4 (last_node) [->H]
10109 *last_new_tail = *first_ptr;
10111 I. new_node (first_new) [->F (last_new_tail)]
10113 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
10115 #. mapping node chain
10116 ----------------------------------------------------
10117 A. struct_node [->B]
10119 C. comp_2 [->D (move_after)]
10121 E. attach_3 [->H (first_ptr)]
10122 F. map_to_4 [->G (continue_at)]
10123 G. attach_4 (last_node) [->H]
10126 I. new_node (first_new) [->F (last_new_tail)]
10128 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10130 #. mapping node chain
10131 ---------------------------------------------------
10132 A. struct_node [->B]
10134 C. comp_2 [->D (move_after)]
10136 E. attach_3 [->H (continue_at)]
10138 G. attach_4 (last_node) [->D]
10141 I. new_node (first_new) [->F (last_new_tail)]
10143 *move_after = first_new;
10145 #. mapping node chain
10146 ---------------------------------------------------
10147 A. struct_node [->B]
10149 C. comp_2 [->I (move_after)]
10151 E. attach_3 [->H (continue_at)]
10153 G. attach_4 (last_node) [->D]
10155 I. new_node (first_new) [->F (last_new_tail)]
10159 #. mapping node chain
10160 ---------------------------------------------------
10161 A. struct_node [->B]
10163 C. comp_2 [->I (move_after)]
10164 I. new_node (first_new) [->F (last_new_tail)]
10166 G. attach_4 (last_node) [->D]
10168 E. attach_3 [->H (continue_at)]
10173 omp_siblist_move_concat_nodes_after (tree first_new
, tree
*last_new_tail
,
10174 tree
*first_ptr
, tree last_node
,
10177 tree
*continue_at
= NULL
;
10178 *last_new_tail
= *first_ptr
;
10179 if (first_ptr
== move_after
)
10180 *move_after
= first_new
;
10183 *first_ptr
= OMP_CLAUSE_CHAIN (last_node
);
10184 continue_at
= first_ptr
;
10185 OMP_CLAUSE_CHAIN (last_node
) = *move_after
;
10186 *move_after
= first_new
;
10188 return continue_at
;
10191 /* Mapping struct members causes an additional set of nodes to be created,
10192 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
10193 number of members being mapped, in order of ascending position (address or
10196 We scan through the list of mapping clauses, calling this function for each
10197 struct member mapping we find, and build up the list of mappings after the
10198 initial GOMP_MAP_STRUCT node. For pointer members, these will be
10199 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
10200 moved into place in the sorted list.
10209 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
10212 GOMP_MAP_STRUCT (4)
10213 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
10214 GOMP_MAP_ALLOC (struct.a)
10215 GOMP_MAP_ALLOC (struct.b)
10216 GOMP_MAP_TO (struct.c)
10217 GOMP_MAP_ALLOC (struct.d)
10220 In the case where we are mapping references to pointers, or in Fortran if
10221 we are mapping an array with a descriptor, additional nodes may be created
10222 after the struct node list also.
10224 The return code is either a pointer to the next node to process (if the
10225 list has been rearranged), else NULL to continue with the next node in the
10229 omp_accumulate_sibling_list (enum omp_region_type region_type
,
10230 enum tree_code code
,
10231 hash_map
<tree_operand_hash
, tree
>
10232 *&struct_map_to_clause
, tree
*grp_start_p
,
10233 tree grp_end
, tree
*inner
)
10235 poly_offset_int coffset
;
10236 poly_int64 cbitpos
;
10237 tree ocd
= OMP_CLAUSE_DECL (grp_end
);
10238 bool openmp
= !(region_type
& ORT_ACC
);
10239 tree
*continue_at
= NULL
;
10241 while (TREE_CODE (ocd
) == ARRAY_REF
)
10242 ocd
= TREE_OPERAND (ocd
, 0);
10244 if (TREE_CODE (ocd
) == INDIRECT_REF
)
10245 ocd
= TREE_OPERAND (ocd
, 0);
10247 tree base
= extract_base_bit_offset (ocd
, &cbitpos
, &coffset
);
10249 bool ptr
= (OMP_CLAUSE_MAP_KIND (grp_end
) == GOMP_MAP_ALWAYS_POINTER
);
10250 bool attach_detach
= ((OMP_CLAUSE_MAP_KIND (grp_end
)
10251 == GOMP_MAP_ATTACH_DETACH
)
10252 || (OMP_CLAUSE_MAP_KIND (grp_end
)
10253 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
));
10254 bool attach
= (OMP_CLAUSE_MAP_KIND (grp_end
) == GOMP_MAP_ATTACH
10255 || OMP_CLAUSE_MAP_KIND (grp_end
) == GOMP_MAP_DETACH
);
10257 /* FIXME: If we're not mapping the base pointer in some other clause on this
10258 directive, I think we want to create ALLOC/RELEASE here -- i.e. not
10260 if (openmp
&& attach_detach
)
10263 if (!struct_map_to_clause
|| struct_map_to_clause
->get (base
) == NULL
)
10265 tree l
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
), OMP_CLAUSE_MAP
);
10266 gomp_map_kind k
= attach
? GOMP_MAP_FORCE_PRESENT
: GOMP_MAP_STRUCT
;
10268 OMP_CLAUSE_SET_MAP_KIND (l
, k
);
10270 OMP_CLAUSE_DECL (l
) = unshare_expr (base
);
10272 OMP_CLAUSE_SIZE (l
)
10273 = (!attach
? size_int (1)
10274 : (DECL_P (OMP_CLAUSE_DECL (l
))
10275 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l
))
10276 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l
)))));
10277 if (struct_map_to_clause
== NULL
)
10278 struct_map_to_clause
= new hash_map
<tree_operand_hash
, tree
>;
10279 struct_map_to_clause
->put (base
, l
);
10281 if (ptr
|| attach_detach
)
10285 = build_omp_struct_comp_nodes (code
, *grp_start_p
, grp_end
,
10287 OMP_CLAUSE_CHAIN (l
) = alloc_node
;
10289 tree
*insert_node_pos
= grp_start_p
;
10293 OMP_CLAUSE_CHAIN (extra_node
) = *insert_node_pos
;
10294 OMP_CLAUSE_CHAIN (alloc_node
) = extra_node
;
10297 OMP_CLAUSE_CHAIN (alloc_node
) = *insert_node_pos
;
10299 *insert_node_pos
= l
;
10303 gcc_assert (*grp_start_p
== grp_end
);
10304 grp_start_p
= omp_siblist_insert_node_after (l
, grp_start_p
);
10307 tree noind
= omp_strip_indirections (base
);
10310 && (region_type
& ORT_TARGET
)
10311 && TREE_CODE (noind
) == COMPONENT_REF
)
10313 /* The base for this component access is a struct component access
10314 itself. Insert a node to be processed on the next iteration of
10315 our caller's loop, which will subsequently be turned into a new,
10316 inner GOMP_MAP_STRUCT mapping.
10318 We need to do this else the non-DECL_P base won't be
10319 rewritten correctly in the offloaded region. */
10320 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
),
10322 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_FORCE_PRESENT
);
10323 OMP_CLAUSE_DECL (c2
) = unshare_expr (noind
);
10324 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (TREE_TYPE (noind
));
10329 tree sdecl
= omp_strip_components_and_deref (base
);
10331 if (POINTER_TYPE_P (TREE_TYPE (sdecl
)) && (region_type
& ORT_TARGET
))
10333 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
),
10336 = (TREE_CODE (base
) == INDIRECT_REF
10337 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0)))
10339 || ((TREE_CODE (TREE_OPERAND (base
, 0))
10341 && (TREE_CODE (TREE_TYPE (TREE_OPERAND
10342 (TREE_OPERAND (base
, 0), 0)))
10343 == REFERENCE_TYPE
))));
10344 enum gomp_map_kind mkind
= base_ref
? GOMP_MAP_FIRSTPRIVATE_REFERENCE
10345 : GOMP_MAP_FIRSTPRIVATE_POINTER
;
10346 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
10347 OMP_CLAUSE_DECL (c2
) = sdecl
;
10348 tree baddr
= build_fold_addr_expr (base
);
10349 baddr
= fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end
),
10350 ptrdiff_type_node
, baddr
);
10351 /* This isn't going to be good enough when we add support for more
10352 complicated lvalue expressions. FIXME. */
10353 if (TREE_CODE (TREE_TYPE (sdecl
)) == REFERENCE_TYPE
10354 && TREE_CODE (TREE_TYPE (TREE_TYPE (sdecl
))) == POINTER_TYPE
)
10355 sdecl
= build_simple_mem_ref (sdecl
);
10356 tree decladdr
= fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end
),
10357 ptrdiff_type_node
, sdecl
);
10358 OMP_CLAUSE_SIZE (c2
)
10359 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end
), MINUS_EXPR
,
10360 ptrdiff_type_node
, baddr
, decladdr
);
10361 /* Insert after struct node. */
10362 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (l
);
10363 OMP_CLAUSE_CHAIN (l
) = c2
;
10368 else if (struct_map_to_clause
)
10370 tree
*osc
= struct_map_to_clause
->get (base
);
10371 tree
*sc
= NULL
, *scp
= NULL
;
10372 sc
= &OMP_CLAUSE_CHAIN (*osc
);
10373 /* The struct mapping might be immediately followed by a
10374 FIRSTPRIVATE_POINTER and/or FIRSTPRIVATE_REFERENCE -- if it's an
10375 indirect access or a reference, or both. (This added node is removed
10376 in omp-low.c after it has been processed there.) */
10378 && (OMP_CLAUSE_MAP_KIND (*sc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
10379 || OMP_CLAUSE_MAP_KIND (*sc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
10380 sc
= &OMP_CLAUSE_CHAIN (*sc
);
10381 for (; *sc
!= grp_end
; sc
= &OMP_CLAUSE_CHAIN (*sc
))
10382 if ((ptr
|| attach_detach
) && sc
== grp_start_p
)
10384 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc
)) != COMPONENT_REF
10385 && TREE_CODE (OMP_CLAUSE_DECL (*sc
)) != INDIRECT_REF
10386 && TREE_CODE (OMP_CLAUSE_DECL (*sc
)) != ARRAY_REF
)
10390 tree sc_decl
= OMP_CLAUSE_DECL (*sc
);
10391 poly_offset_int offset
;
10394 if (TREE_CODE (sc_decl
) == ARRAY_REF
)
10396 while (TREE_CODE (sc_decl
) == ARRAY_REF
)
10397 sc_decl
= TREE_OPERAND (sc_decl
, 0);
10398 if (TREE_CODE (sc_decl
) != COMPONENT_REF
10399 || TREE_CODE (TREE_TYPE (sc_decl
)) != ARRAY_TYPE
)
10402 else if (TREE_CODE (sc_decl
) == INDIRECT_REF
10403 && TREE_CODE (TREE_OPERAND (sc_decl
, 0)) == COMPONENT_REF
10404 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl
, 0)))
10405 == REFERENCE_TYPE
))
10406 sc_decl
= TREE_OPERAND (sc_decl
, 0);
10408 tree base2
= extract_base_bit_offset (sc_decl
, &bitpos
, &offset
);
10409 if (!base2
|| !operand_equal_p (base2
, base
, 0))
10413 if (maybe_lt (coffset
, offset
)
10414 || (known_eq (coffset
, offset
)
10415 && maybe_lt (cbitpos
, bitpos
)))
10417 if (ptr
|| attach_detach
)
10425 OMP_CLAUSE_SIZE (*osc
)
10426 = size_binop (PLUS_EXPR
, OMP_CLAUSE_SIZE (*osc
), size_one_node
);
10427 if (ptr
|| attach_detach
)
10429 tree cl
= NULL_TREE
, extra_node
;
10430 tree alloc_node
= build_omp_struct_comp_nodes (code
, *grp_start_p
,
10431 grp_end
, &extra_node
);
10432 tree
*tail_chain
= NULL
;
10436 grp_end : the last (or only) node in this group.
10437 grp_start_p : pointer to the first node in a pointer mapping group
10438 up to and including GRP_END.
10439 sc : pointer to the chain for the end of the struct component
10441 scp : pointer to the chain for the sorted position at which we
10442 should insert in the middle of the struct component list
10443 (else NULL to insert at end).
10444 alloc_node : the "alloc" node for the structure (pointer-type)
10445 component. We insert at SCP (if present), else SC
10446 (the end of the struct component list).
10447 extra_node : a newly-synthesized node for an additional indirect
10448 pointer mapping or a Fortran pointer set, if needed.
10449 cl : first node to prepend before grp_start_p.
10450 tail_chain : pointer to chain of last prepended node.
10452 The general idea is we move the nodes for this struct mapping
10453 together: the alloc node goes into the sorted list directly after
10454 the struct mapping, and any extra nodes (together with the nodes
10455 mapping arrays pointed to by struct components) get moved after
10456 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
10457 the end of the struct component mapping list. It's important that
10458 the alloc_node comes first in that case because it's part of the
10459 sorted component mapping list (but subsequent nodes are not!). */
10462 omp_siblist_insert_node_after (alloc_node
, scp
);
10464 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
10465 already inserted it) and the extra_node (if it is present). The
10466 list can be empty if we added alloc_node above and there is no
10468 if (scp
&& extra_node
)
10471 tail_chain
= &OMP_CLAUSE_CHAIN (extra_node
);
10473 else if (extra_node
)
10475 OMP_CLAUSE_CHAIN (alloc_node
) = extra_node
;
10477 tail_chain
= &OMP_CLAUSE_CHAIN (extra_node
);
10482 tail_chain
= &OMP_CLAUSE_CHAIN (alloc_node
);
10486 = cl
? omp_siblist_move_concat_nodes_after (cl
, tail_chain
,
10487 grp_start_p
, grp_end
,
10489 : omp_siblist_move_nodes_after (grp_start_p
, grp_end
, sc
);
10491 else if (*sc
!= grp_end
)
10493 gcc_assert (*grp_start_p
== grp_end
);
10495 /* We are moving the current node back to a previous struct node:
10496 the node that used to point to the current node will now point to
10498 continue_at
= grp_start_p
;
10499 /* In the non-pointer case, the mapping clause itself is moved into
10500 the correct position in the struct component list, which in this
10501 case is just SC. */
10502 omp_siblist_move_node_after (*grp_start_p
, grp_start_p
, sc
);
10505 return continue_at
;
10508 /* Scan through GROUPS, and create sorted structure sibling lists without
10512 omp_build_struct_sibling_lists (enum tree_code code
,
10513 enum omp_region_type region_type
,
10514 vec
<omp_mapping_group
> *groups
,
10515 hash_map
<tree_operand_hash
, omp_mapping_group
*>
10520 omp_mapping_group
*grp
;
10521 hash_map
<tree_operand_hash
, tree
> *struct_map_to_clause
= NULL
;
10522 bool success
= true;
10523 tree
*new_next
= NULL
;
10524 tree
*tail
= &OMP_CLAUSE_CHAIN ((*groups
)[groups
->length () - 1].grp_end
);
10525 auto_vec
<omp_mapping_group
> pre_hwm_groups
;
10527 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10529 tree c
= grp
->grp_end
;
10530 tree decl
= OMP_CLAUSE_DECL (c
);
10531 tree grp_end
= grp
->grp_end
;
10532 tree sentinel
= OMP_CLAUSE_CHAIN (grp_end
);
10535 grp
->grp_start
= new_next
;
10539 tree
*grp_start_p
= grp
->grp_start
;
10544 /* Skip groups we marked for deletion in
10545 oacc_resolve_clause_dependencies. */
10549 if (OMP_CLAUSE_CHAIN (*grp_start_p
)
10550 && OMP_CLAUSE_CHAIN (*grp_start_p
) != grp_end
)
10552 /* Don't process an array descriptor that isn't inside a derived type
10553 as a struct (the GOMP_MAP_POINTER following will have the form
10554 "var.data", but such mappings are handled specially). */
10555 tree grpmid
= OMP_CLAUSE_CHAIN (*grp_start_p
);
10556 if (OMP_CLAUSE_CODE (grpmid
) == OMP_CLAUSE_MAP
10557 && OMP_CLAUSE_MAP_KIND (grpmid
) == GOMP_MAP_TO_PSET
10558 && DECL_P (OMP_CLAUSE_DECL (grpmid
)))
10563 if (TREE_CODE (d
) == ARRAY_REF
)
10565 while (TREE_CODE (d
) == ARRAY_REF
)
10566 d
= TREE_OPERAND (d
, 0);
10567 if (TREE_CODE (d
) == COMPONENT_REF
10568 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
10572 && TREE_CODE (decl
) == INDIRECT_REF
10573 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
10574 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
10576 && (OMP_CLAUSE_MAP_KIND (c
)
10577 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
))
10578 decl
= TREE_OPERAND (decl
, 0);
10582 if (TREE_CODE (decl
) != COMPONENT_REF
)
10585 /* If we're mapping the whole struct in another node, skip adding this
10586 node to a sibling list. */
10587 omp_mapping_group
*wholestruct
;
10588 if (omp_mapped_by_containing_struct (*grpmap
, OMP_CLAUSE_DECL (c
),
10591 if (!(region_type
& ORT_ACC
)
10592 && *grp_start_p
== grp_end
)
10593 /* Remove the whole of this mapping -- redundant. */
10594 grp
->deleted
= true;
10599 if (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
10600 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
10601 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
10602 && code
!= OACC_UPDATE
10603 && code
!= OMP_TARGET_UPDATE
)
10605 if (error_operand_p (decl
))
10611 tree stype
= TREE_TYPE (decl
);
10612 if (TREE_CODE (stype
) == REFERENCE_TYPE
)
10613 stype
= TREE_TYPE (stype
);
10614 if (TYPE_SIZE_UNIT (stype
) == NULL
10615 || TREE_CODE (TYPE_SIZE_UNIT (stype
)) != INTEGER_CST
)
10617 error_at (OMP_CLAUSE_LOCATION (c
),
10618 "mapping field %qE of variable length "
10619 "structure", OMP_CLAUSE_DECL (c
));
10624 tree inner
= NULL_TREE
;
10627 = omp_accumulate_sibling_list (region_type
, code
,
10628 struct_map_to_clause
, grp_start_p
,
10633 if (new_next
&& *new_next
== NULL_TREE
)
10638 OMP_CLAUSE_CHAIN (inner
) = NULL_TREE
;
10639 omp_mapping_group newgrp
;
10640 newgrp
.grp_start
= new_next
? new_next
: tail
;
10641 newgrp
.grp_end
= inner
;
10642 newgrp
.mark
= UNVISITED
;
10643 newgrp
.sibling
= NULL
;
10644 newgrp
.deleted
= false;
10645 newgrp
.next
= NULL
;
10646 groups
->safe_push (newgrp
);
10648 /* !!! Growing GROUPS might invalidate the pointers in the group
10649 map. Rebuild it here. This is a bit inefficient, but
10650 shouldn't happen very often. */
10653 = omp_reindex_mapping_groups (list_p
, groups
, &pre_hwm_groups
,
10656 tail
= &OMP_CLAUSE_CHAIN (inner
);
10661 /* Delete groups marked for deletion above. At this point the order of the
10662 groups may no longer correspond to the order of the underlying list,
10663 which complicates this a little. First clear out OMP_CLAUSE_DECL for
10664 deleted nodes... */
10666 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10668 for (tree d
= *grp
->grp_start
;
10669 d
!= OMP_CLAUSE_CHAIN (grp
->grp_end
);
10670 d
= OMP_CLAUSE_CHAIN (d
))
10671 OMP_CLAUSE_DECL (d
) = NULL_TREE
;
10673 /* ...then sweep through the list removing the now-empty nodes. */
10678 if (OMP_CLAUSE_CODE (*tail
) == OMP_CLAUSE_MAP
10679 && OMP_CLAUSE_DECL (*tail
) == NULL_TREE
)
10680 *tail
= OMP_CLAUSE_CHAIN (*tail
);
10682 tail
= &OMP_CLAUSE_CHAIN (*tail
);
10686 if (struct_map_to_clause
)
10687 delete struct_map_to_clause
;
10692 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
10693 and previous omp contexts. */
10696 gimplify_scan_omp_clauses (tree
*list_p
, gimple_seq
*pre_p
,
10697 enum omp_region_type region_type
,
10698 enum tree_code code
)
10700 struct gimplify_omp_ctx
*ctx
, *outer_ctx
;
10702 tree
*prev_list_p
= NULL
, *orig_list_p
= list_p
;
10703 int handled_depend_iterators
= -1;
10706 ctx
= new_omp_context (region_type
);
10708 outer_ctx
= ctx
->outer_context
;
10709 if (code
== OMP_TARGET
)
10711 if (!lang_GNU_Fortran ())
10712 ctx
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
10713 ctx
->defaultmap
[GDMK_SCALAR
] = GOVD_FIRSTPRIVATE
;
10714 ctx
->defaultmap
[GDMK_SCALAR_TARGET
] = (lang_GNU_Fortran ()
10715 ? GOVD_MAP
: GOVD_FIRSTPRIVATE
);
10717 if (!lang_GNU_Fortran ())
10721 case OMP_TARGET_DATA
:
10722 case OMP_TARGET_ENTER_DATA
:
10723 case OMP_TARGET_EXIT_DATA
:
10725 case OACC_HOST_DATA
:
10726 case OACC_PARALLEL
:
10728 ctx
->target_firstprivatize_array_bases
= true;
10733 if (code
== OMP_TARGET
10734 || code
== OMP_TARGET_DATA
10735 || code
== OMP_TARGET_ENTER_DATA
10736 || code
== OMP_TARGET_EXIT_DATA
)
10738 vec
<omp_mapping_group
> *groups
;
10739 groups
= omp_gather_mapping_groups (list_p
);
10742 hash_map
<tree_operand_hash
, omp_mapping_group
*> *grpmap
;
10743 grpmap
= omp_index_mapping_groups (groups
);
10745 omp_build_struct_sibling_lists (code
, region_type
, groups
, &grpmap
,
10748 omp_mapping_group
*outlist
= NULL
;
10750 /* Topological sorting may fail if we have duplicate nodes, which
10751 we should have detected and shown an error for already. Skip
10752 sorting in that case. */
10759 /* Rebuild now we have struct sibling lists. */
10760 groups
= omp_gather_mapping_groups (list_p
);
10761 grpmap
= omp_index_mapping_groups (groups
);
10763 outlist
= omp_tsort_mapping_groups (groups
, grpmap
);
10764 outlist
= omp_segregate_mapping_groups (outlist
);
10765 list_p
= omp_reorder_mapping_groups (groups
, outlist
, list_p
);
10772 else if (region_type
& ORT_ACC
)
10774 vec
<omp_mapping_group
> *groups
;
10775 groups
= omp_gather_mapping_groups (list_p
);
10778 hash_map
<tree_operand_hash
, omp_mapping_group
*> *grpmap
;
10779 grpmap
= omp_index_mapping_groups (groups
);
10781 oacc_resolve_clause_dependencies (groups
, grpmap
);
10782 omp_build_struct_sibling_lists (code
, region_type
, groups
, &grpmap
,
10790 while ((c
= *list_p
) != NULL
)
10792 bool remove
= false;
10793 bool notice_outer
= true;
10794 const char *check_non_private
= NULL
;
10795 unsigned int flags
;
10798 switch (OMP_CLAUSE_CODE (c
))
10800 case OMP_CLAUSE_PRIVATE
:
10801 flags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
10802 if (lang_hooks
.decls
.omp_private_outer_ref (OMP_CLAUSE_DECL (c
)))
10804 flags
|= GOVD_PRIVATE_OUTER_REF
;
10805 OMP_CLAUSE_PRIVATE_OUTER_REF (c
) = 1;
10808 notice_outer
= false;
10810 case OMP_CLAUSE_SHARED
:
10811 flags
= GOVD_SHARED
| GOVD_EXPLICIT
;
10813 case OMP_CLAUSE_FIRSTPRIVATE
:
10814 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
10815 check_non_private
= "firstprivate";
10816 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
10818 gcc_assert (code
== OMP_TARGET
);
10819 flags
|= GOVD_FIRSTPRIVATE_IMPLICIT
;
10822 case OMP_CLAUSE_LASTPRIVATE
:
10823 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
10826 case OMP_DISTRIBUTE
:
10827 error_at (OMP_CLAUSE_LOCATION (c
),
10828 "conditional %<lastprivate%> clause on "
10829 "%qs construct", "distribute");
10830 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
10833 error_at (OMP_CLAUSE_LOCATION (c
),
10834 "conditional %<lastprivate%> clause on "
10835 "%qs construct", "taskloop");
10836 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
10841 flags
= GOVD_LASTPRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
;
10842 if (code
!= OMP_LOOP
)
10843 check_non_private
= "lastprivate";
10844 decl
= OMP_CLAUSE_DECL (c
);
10845 if (error_operand_p (decl
))
10847 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
10848 && !lang_hooks
.decls
.omp_scalar_p (decl
, true))
10850 error_at (OMP_CLAUSE_LOCATION (c
),
10851 "non-scalar variable %qD in conditional "
10852 "%<lastprivate%> clause", decl
);
10853 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
10855 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
10856 flags
|= GOVD_LASTPRIVATE_CONDITIONAL
;
10857 omp_lastprivate_for_combined_outer_constructs (outer_ctx
, decl
,
10860 case OMP_CLAUSE_REDUCTION
:
10861 if (OMP_CLAUSE_REDUCTION_TASK (c
))
10863 if (region_type
== ORT_WORKSHARE
|| code
== OMP_SCOPE
)
10866 nowait
= omp_find_clause (*list_p
,
10867 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
10869 && (outer_ctx
== NULL
10870 || outer_ctx
->region_type
!= ORT_COMBINED_PARALLEL
))
10872 error_at (OMP_CLAUSE_LOCATION (c
),
10873 "%<task%> reduction modifier on a construct "
10874 "with a %<nowait%> clause");
10875 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
10878 else if ((region_type
& ORT_PARALLEL
) != ORT_PARALLEL
)
10880 error_at (OMP_CLAUSE_LOCATION (c
),
10881 "invalid %<task%> reduction modifier on construct "
10882 "other than %<parallel%>, %qs, %<sections%> or "
10883 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
10884 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
10887 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
10891 error_at (OMP_CLAUSE_LOCATION (c
),
10892 "%<inscan%> %<reduction%> clause on "
10893 "%qs construct", "sections");
10894 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
10897 error_at (OMP_CLAUSE_LOCATION (c
),
10898 "%<inscan%> %<reduction%> clause on "
10899 "%qs construct", "parallel");
10900 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
10903 error_at (OMP_CLAUSE_LOCATION (c
),
10904 "%<inscan%> %<reduction%> clause on "
10905 "%qs construct", "teams");
10906 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
10909 error_at (OMP_CLAUSE_LOCATION (c
),
10910 "%<inscan%> %<reduction%> clause on "
10911 "%qs construct", "taskloop");
10912 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
10915 error_at (OMP_CLAUSE_LOCATION (c
),
10916 "%<inscan%> %<reduction%> clause on "
10917 "%qs construct", "scope");
10918 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
10924 case OMP_CLAUSE_IN_REDUCTION
:
10925 case OMP_CLAUSE_TASK_REDUCTION
:
10926 flags
= GOVD_REDUCTION
| GOVD_SEEN
| GOVD_EXPLICIT
;
10927 /* OpenACC permits reductions on private variables. */
10928 if (!(region_type
& ORT_ACC
)
10929 /* taskgroup is actually not a worksharing region. */
10930 && code
!= OMP_TASKGROUP
)
10931 check_non_private
= omp_clause_code_name
[OMP_CLAUSE_CODE (c
)];
10932 decl
= OMP_CLAUSE_DECL (c
);
10933 if (TREE_CODE (decl
) == MEM_REF
)
10935 tree type
= TREE_TYPE (decl
);
10936 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
10937 gimplify_ctxp
->into_ssa
= false;
10938 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type
)), pre_p
,
10939 NULL
, is_gimple_val
, fb_rvalue
, false)
10942 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
10946 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
10947 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
10950 omp_firstprivatize_variable (ctx
, v
);
10951 omp_notice_variable (ctx
, v
, true);
10953 decl
= TREE_OPERAND (decl
, 0);
10954 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
10956 gimplify_ctxp
->into_ssa
= false;
10957 if (gimplify_expr (&TREE_OPERAND (decl
, 1), pre_p
,
10958 NULL
, is_gimple_val
, fb_rvalue
, false)
10961 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
10965 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
10966 v
= TREE_OPERAND (decl
, 1);
10969 omp_firstprivatize_variable (ctx
, v
);
10970 omp_notice_variable (ctx
, v
, true);
10972 decl
= TREE_OPERAND (decl
, 0);
10974 if (TREE_CODE (decl
) == ADDR_EXPR
10975 || TREE_CODE (decl
) == INDIRECT_REF
)
10976 decl
= TREE_OPERAND (decl
, 0);
10979 case OMP_CLAUSE_LINEAR
:
10980 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
), pre_p
, NULL
,
10981 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
10988 if (code
== OMP_SIMD
10989 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
10991 struct gimplify_omp_ctx
*octx
= outer_ctx
;
10993 && octx
->region_type
== ORT_WORKSHARE
10994 && octx
->combined_loop
10995 && !octx
->distribute
)
10997 if (octx
->outer_context
10998 && (octx
->outer_context
->region_type
10999 == ORT_COMBINED_PARALLEL
))
11000 octx
= octx
->outer_context
->outer_context
;
11002 octx
= octx
->outer_context
;
11005 && octx
->region_type
== ORT_WORKSHARE
11006 && octx
->combined_loop
11007 && octx
->distribute
)
11009 error_at (OMP_CLAUSE_LOCATION (c
),
11010 "%<linear%> clause for variable other than "
11011 "loop iterator specified on construct "
11012 "combined with %<distribute%>");
11017 /* For combined #pragma omp parallel for simd, need to put
11018 lastprivate and perhaps firstprivate too on the
11019 parallel. Similarly for #pragma omp for simd. */
11020 struct gimplify_omp_ctx
*octx
= outer_ctx
;
11021 bool taskloop_seen
= false;
11025 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
11026 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11028 decl
= OMP_CLAUSE_DECL (c
);
11029 if (error_operand_p (decl
))
11035 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
11036 flags
|= GOVD_FIRSTPRIVATE
;
11037 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11038 flags
|= GOVD_LASTPRIVATE
;
11040 && octx
->region_type
== ORT_WORKSHARE
11041 && octx
->combined_loop
)
11043 if (octx
->outer_context
11044 && (octx
->outer_context
->region_type
11045 == ORT_COMBINED_PARALLEL
))
11046 octx
= octx
->outer_context
;
11047 else if (omp_check_private (octx
, decl
, false))
11051 && (octx
->region_type
& ORT_TASK
) != 0
11052 && octx
->combined_loop
)
11053 taskloop_seen
= true;
11055 && octx
->region_type
== ORT_COMBINED_PARALLEL
11056 && ((ctx
->region_type
== ORT_WORKSHARE
11057 && octx
== outer_ctx
)
11059 flags
= GOVD_SEEN
| GOVD_SHARED
;
11061 && ((octx
->region_type
& ORT_COMBINED_TEAMS
)
11062 == ORT_COMBINED_TEAMS
))
11063 flags
= GOVD_SEEN
| GOVD_SHARED
;
11065 && octx
->region_type
== ORT_COMBINED_TARGET
)
11067 if (flags
& GOVD_LASTPRIVATE
)
11068 flags
= GOVD_SEEN
| GOVD_MAP
;
11073 = splay_tree_lookup (octx
->variables
,
11074 (splay_tree_key
) decl
);
11075 if (on
&& (on
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
11080 omp_add_variable (octx
, decl
, flags
);
11081 if (octx
->outer_context
== NULL
)
11083 octx
= octx
->outer_context
;
11088 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
11089 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
11090 omp_notice_variable (octx
, decl
, true);
11092 flags
= GOVD_LINEAR
| GOVD_EXPLICIT
;
11093 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
11094 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11096 notice_outer
= false;
11097 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
11101 case OMP_CLAUSE_MAP
:
11102 decl
= OMP_CLAUSE_DECL (c
);
11103 if (error_operand_p (decl
))
11110 if (TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
)
11113 case OMP_TARGET_DATA
:
11114 case OMP_TARGET_ENTER_DATA
:
11115 case OMP_TARGET_EXIT_DATA
:
11116 case OACC_ENTER_DATA
:
11117 case OACC_EXIT_DATA
:
11118 case OACC_HOST_DATA
:
11119 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11120 || (OMP_CLAUSE_MAP_KIND (c
)
11121 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
11122 /* For target {,enter ,exit }data only the array slice is
11123 mapped, but not the pointer to it. */
11129 /* For Fortran, not only the pointer to the data is mapped but also
11130 the address of the pointer, the array descriptor etc.; for
11131 'exit data' - and in particular for 'delete:' - having an 'alloc:'
11132 does not make sense. Likewise, for 'update' only transferring the
11133 data itself is needed as the rest has been handled in previous
11134 directives. However, for 'exit data', the array descriptor needs
11135 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE.
11137 NOTE: Generally, it is not safe to perform "enter data" operations
11138 on arrays where the data *or the descriptor* may go out of scope
11139 before a corresponding "exit data" operation -- and such a
11140 descriptor may be synthesized temporarily, e.g. to pass an
11141 explicit-shape array to a function expecting an assumed-shape
11142 argument. Performing "enter data" inside the called function
11143 would thus be problematic. */
11144 if (code
== OMP_TARGET_EXIT_DATA
11145 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
)
11146 OMP_CLAUSE_SET_MAP_KIND (c
, OMP_CLAUSE_MAP_KIND (*prev_list_p
)
11148 ? GOMP_MAP_DELETE
: GOMP_MAP_RELEASE
);
11149 else if ((code
== OMP_TARGET_EXIT_DATA
|| code
== OMP_TARGET_UPDATE
)
11150 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11151 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
))
11156 if (DECL_P (decl
) && outer_ctx
&& (region_type
& ORT_ACC
))
11158 struct gimplify_omp_ctx
*octx
;
11159 for (octx
= outer_ctx
; octx
; octx
= octx
->outer_context
)
11161 if (octx
->region_type
!= ORT_ACC_HOST_DATA
)
11164 = splay_tree_lookup (octx
->variables
,
11165 (splay_tree_key
) decl
);
11167 error_at (OMP_CLAUSE_LOCATION (c
), "variable %qE "
11168 "declared in enclosing %<host_data%> region",
11172 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
11173 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
11174 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
11175 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
11176 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
11181 else if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11182 || (OMP_CLAUSE_MAP_KIND (c
)
11183 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
11184 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
11185 && TREE_CODE (OMP_CLAUSE_SIZE (c
)) != INTEGER_CST
)
11187 OMP_CLAUSE_SIZE (c
)
11188 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c
), pre_p
, NULL
,
11190 if ((region_type
& ORT_TARGET
) != 0)
11191 omp_add_variable (ctx
, OMP_CLAUSE_SIZE (c
),
11192 GOVD_FIRSTPRIVATE
| GOVD_SEEN
);
11195 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
11197 tree base
= omp_strip_components_and_deref (decl
);
11202 = splay_tree_lookup (ctx
->variables
,
11203 (splay_tree_key
) decl
);
11206 && (n
->value
& (GOVD_MAP
| GOVD_FIRSTPRIVATE
)) != 0)
11211 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
11217 if (TREE_CODE (decl
) == TARGET_EXPR
)
11219 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
11220 is_gimple_lvalue
, fb_lvalue
)
11224 else if (!DECL_P (decl
))
11226 tree d
= decl
, *pd
;
11227 if (TREE_CODE (d
) == ARRAY_REF
)
11229 while (TREE_CODE (d
) == ARRAY_REF
)
11230 d
= TREE_OPERAND (d
, 0);
11231 if (TREE_CODE (d
) == COMPONENT_REF
11232 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
11235 pd
= &OMP_CLAUSE_DECL (c
);
11237 && TREE_CODE (decl
) == INDIRECT_REF
11238 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
11239 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
11241 && (OMP_CLAUSE_MAP_KIND (c
)
11242 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
))
11244 pd
= &TREE_OPERAND (decl
, 0);
11245 decl
= TREE_OPERAND (decl
, 0);
11247 /* An "attach/detach" operation on an update directive should
11248 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
11249 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
11250 depends on the previous mapping. */
11251 if (code
== OACC_UPDATE
11252 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
11253 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_ALWAYS_POINTER
);
11255 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
11257 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
11262 gomp_map_kind k
= ((code
== OACC_EXIT_DATA
11263 || code
== OMP_TARGET_EXIT_DATA
)
11264 ? GOMP_MAP_DETACH
: GOMP_MAP_ATTACH
);
11265 OMP_CLAUSE_SET_MAP_KIND (c
, k
);
11271 while (TREE_CODE (cref
) == ARRAY_REF
)
11272 cref
= TREE_OPERAND (cref
, 0);
11274 if (TREE_CODE (cref
) == INDIRECT_REF
)
11275 cref
= TREE_OPERAND (cref
, 0);
11277 if (TREE_CODE (cref
) == COMPONENT_REF
)
11280 while (base
&& !DECL_P (base
))
11282 tree innerbase
= omp_get_base_pointer (base
);
11289 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
))
11290 && POINTER_TYPE_P (TREE_TYPE (base
)))
11293 = splay_tree_lookup (ctx
->variables
,
11294 (splay_tree_key
) base
);
11295 n
->value
|= GOVD_SEEN
;
11299 if (code
== OMP_TARGET
&& OMP_CLAUSE_MAP_IN_REDUCTION (c
))
11301 /* Don't gimplify *pd fully at this point, as the base
11302 will need to be adjusted during omp lowering. */
11303 auto_vec
<tree
, 10> expr_stack
;
11305 while (handled_component_p (*p
)
11306 || TREE_CODE (*p
) == INDIRECT_REF
11307 || TREE_CODE (*p
) == ADDR_EXPR
11308 || TREE_CODE (*p
) == MEM_REF
11309 || TREE_CODE (*p
) == NON_LVALUE_EXPR
)
11311 expr_stack
.safe_push (*p
);
11312 p
= &TREE_OPERAND (*p
, 0);
11314 for (int i
= expr_stack
.length () - 1; i
>= 0; i
--)
11316 tree t
= expr_stack
[i
];
11317 if (TREE_CODE (t
) == ARRAY_REF
11318 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
11320 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
11322 tree low
= unshare_expr (array_ref_low_bound (t
));
11323 if (!is_gimple_min_invariant (low
))
11325 TREE_OPERAND (t
, 2) = low
;
11326 if (gimplify_expr (&TREE_OPERAND (t
, 2),
11329 fb_rvalue
) == GS_ERROR
)
11333 else if (gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
11334 NULL
, is_gimple_reg
,
11335 fb_rvalue
) == GS_ERROR
)
11337 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
11339 tree elmt_size
= array_ref_element_size (t
);
11340 if (!is_gimple_min_invariant (elmt_size
))
11342 elmt_size
= unshare_expr (elmt_size
);
11344 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
,
11347 = size_int (TYPE_ALIGN_UNIT (elmt_type
));
11349 = size_binop (EXACT_DIV_EXPR
, elmt_size
,
11351 TREE_OPERAND (t
, 3) = elmt_size
;
11352 if (gimplify_expr (&TREE_OPERAND (t
, 3),
11355 fb_rvalue
) == GS_ERROR
)
11359 else if (gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
11360 NULL
, is_gimple_reg
,
11361 fb_rvalue
) == GS_ERROR
)
11364 else if (TREE_CODE (t
) == COMPONENT_REF
)
11366 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
11368 tree offset
= component_ref_field_offset (t
);
11369 if (!is_gimple_min_invariant (offset
))
11371 offset
= unshare_expr (offset
);
11372 tree field
= TREE_OPERAND (t
, 1);
11374 = size_int (DECL_OFFSET_ALIGN (field
)
11376 offset
= size_binop (EXACT_DIV_EXPR
, offset
,
11378 TREE_OPERAND (t
, 2) = offset
;
11379 if (gimplify_expr (&TREE_OPERAND (t
, 2),
11382 fb_rvalue
) == GS_ERROR
)
11386 else if (gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
11387 NULL
, is_gimple_reg
,
11388 fb_rvalue
) == GS_ERROR
)
11392 for (; expr_stack
.length () > 0; )
11394 tree t
= expr_stack
.pop ();
11396 if (TREE_CODE (t
) == ARRAY_REF
11397 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
11399 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1))
11400 && gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
,
11401 NULL
, is_gimple_val
,
11402 fb_rvalue
) == GS_ERROR
)
11407 else if (gimplify_expr (pd
, pre_p
, NULL
, is_gimple_lvalue
,
11408 fb_lvalue
) == GS_ERROR
)
11415 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_POINTER
11416 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH_DETACH
11417 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
11418 && OMP_CLAUSE_CHAIN (c
)
11419 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
)) == OMP_CLAUSE_MAP
11420 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
11421 == GOMP_MAP_ALWAYS_POINTER
)
11422 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
11423 == GOMP_MAP_ATTACH_DETACH
)
11424 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
11425 == GOMP_MAP_TO_PSET
)))
11426 prev_list_p
= list_p
;
11430 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
11431 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TO
11432 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TOFROM
)
11433 flags
|= GOVD_MAP_ALWAYS_TO
;
11435 if ((code
== OMP_TARGET
11436 || code
== OMP_TARGET_DATA
11437 || code
== OMP_TARGET_ENTER_DATA
11438 || code
== OMP_TARGET_EXIT_DATA
)
11439 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
11441 for (struct gimplify_omp_ctx
*octx
= outer_ctx
; octx
;
11442 octx
= octx
->outer_context
)
11445 = splay_tree_lookup (octx
->variables
,
11446 (splay_tree_key
) OMP_CLAUSE_DECL (c
));
11447 /* If this is contained in an outer OpenMP region as a
11448 firstprivate value, remove the attach/detach. */
11449 if (n
&& (n
->value
& GOVD_FIRSTPRIVATE
))
11451 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
11456 enum gomp_map_kind map_kind
= (code
== OMP_TARGET_EXIT_DATA
11458 : GOMP_MAP_ATTACH
);
11459 OMP_CLAUSE_SET_MAP_KIND (c
, map_kind
);
11464 case OMP_CLAUSE_AFFINITY
:
11465 gimplify_omp_affinity (list_p
, pre_p
);
11468 case OMP_CLAUSE_DOACROSS
:
11469 if (OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SINK
)
11471 tree deps
= OMP_CLAUSE_DECL (c
);
11472 while (deps
&& TREE_CODE (deps
) == TREE_LIST
)
11474 if (TREE_CODE (TREE_PURPOSE (deps
)) == TRUNC_DIV_EXPR
11475 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps
), 1)))
11476 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps
), 1),
11477 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
11478 deps
= TREE_CHAIN (deps
);
11482 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c
)
11483 == OMP_CLAUSE_DOACROSS_SOURCE
);
11485 case OMP_CLAUSE_DEPEND
:
11486 if (handled_depend_iterators
== -1)
11487 handled_depend_iterators
= gimplify_omp_depend (list_p
, pre_p
);
11488 if (handled_depend_iterators
)
11490 if (handled_depend_iterators
== 2)
11494 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
11496 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
11497 NULL
, is_gimple_val
, fb_rvalue
);
11498 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
11500 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
11505 if (OMP_CLAUSE_DECL (c
) != null_pointer_node
)
11507 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
11508 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
11509 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
11515 if (code
== OMP_TASK
)
11516 ctx
->has_depend
= true;
11519 case OMP_CLAUSE_TO
:
11520 case OMP_CLAUSE_FROM
:
11521 case OMP_CLAUSE__CACHE_
:
11522 decl
= OMP_CLAUSE_DECL (c
);
11523 if (error_operand_p (decl
))
11528 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
11529 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
11530 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
11531 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
11532 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
11537 if (!DECL_P (decl
))
11539 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
,
11540 NULL
, is_gimple_lvalue
, fb_lvalue
)
11550 case OMP_CLAUSE_USE_DEVICE_PTR
:
11551 case OMP_CLAUSE_USE_DEVICE_ADDR
:
11552 flags
= GOVD_EXPLICIT
;
11555 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
11556 decl
= OMP_CLAUSE_DECL (c
);
11557 while (TREE_CODE (decl
) == INDIRECT_REF
11558 || TREE_CODE (decl
) == ARRAY_REF
)
11559 decl
= TREE_OPERAND (decl
, 0);
11560 flags
= GOVD_EXPLICIT
;
11563 case OMP_CLAUSE_IS_DEVICE_PTR
:
11564 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
11568 decl
= OMP_CLAUSE_DECL (c
);
11570 if (error_operand_p (decl
))
11575 if (DECL_NAME (decl
) == NULL_TREE
&& (flags
& GOVD_SHARED
) == 0)
11577 tree t
= omp_member_access_dummy_var (decl
);
11580 tree v
= DECL_VALUE_EXPR (decl
);
11581 DECL_NAME (decl
) = DECL_NAME (TREE_OPERAND (v
, 1));
11583 omp_notice_variable (outer_ctx
, t
, true);
11586 if (code
== OACC_DATA
11587 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11588 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
11589 flags
|= GOVD_MAP_0LEN_ARRAY
;
11590 omp_add_variable (ctx
, decl
, flags
);
11591 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11592 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
11593 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
11594 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
11596 struct gimplify_omp_ctx
*pctx
11597 = code
== OMP_TARGET
? outer_ctx
: ctx
;
11599 omp_add_variable (pctx
, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
11600 GOVD_LOCAL
| GOVD_SEEN
);
11602 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
11603 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c
),
11605 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
11606 NULL
) == NULL_TREE
)
11607 omp_add_variable (pctx
,
11608 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
11609 GOVD_LOCAL
| GOVD_SEEN
);
11610 gimplify_omp_ctxp
= pctx
;
11611 push_gimplify_context ();
11613 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
11614 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
11616 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c
),
11617 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
));
11618 pop_gimplify_context
11619 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
)));
11620 push_gimplify_context ();
11621 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c
),
11622 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
11623 pop_gimplify_context
11624 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
)));
11625 OMP_CLAUSE_REDUCTION_INIT (c
) = NULL_TREE
;
11626 OMP_CLAUSE_REDUCTION_MERGE (c
) = NULL_TREE
;
11628 gimplify_omp_ctxp
= outer_ctx
;
11630 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
11631 && OMP_CLAUSE_LASTPRIVATE_STMT (c
))
11633 gimplify_omp_ctxp
= ctx
;
11634 push_gimplify_context ();
11635 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c
)) != BIND_EXPR
)
11637 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
11639 TREE_SIDE_EFFECTS (bind
) = 1;
11640 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LASTPRIVATE_STMT (c
);
11641 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = bind
;
11643 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c
),
11644 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
11645 pop_gimplify_context
11646 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
)));
11647 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = NULL_TREE
;
11649 gimplify_omp_ctxp
= outer_ctx
;
11651 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11652 && OMP_CLAUSE_LINEAR_STMT (c
))
11654 gimplify_omp_ctxp
= ctx
;
11655 push_gimplify_context ();
11656 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c
)) != BIND_EXPR
)
11658 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
11660 TREE_SIDE_EFFECTS (bind
) = 1;
11661 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LINEAR_STMT (c
);
11662 OMP_CLAUSE_LINEAR_STMT (c
) = bind
;
11664 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c
),
11665 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
11666 pop_gimplify_context
11667 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
)));
11668 OMP_CLAUSE_LINEAR_STMT (c
) = NULL_TREE
;
11670 gimplify_omp_ctxp
= outer_ctx
;
11676 case OMP_CLAUSE_COPYIN
:
11677 case OMP_CLAUSE_COPYPRIVATE
:
11678 decl
= OMP_CLAUSE_DECL (c
);
11679 if (error_operand_p (decl
))
11684 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_COPYPRIVATE
11686 && !omp_check_private (ctx
, decl
, true))
11689 if (is_global_var (decl
))
11691 if (DECL_THREAD_LOCAL_P (decl
))
11693 else if (DECL_HAS_VALUE_EXPR_P (decl
))
11695 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
11699 && DECL_THREAD_LOCAL_P (value
))
11704 error_at (OMP_CLAUSE_LOCATION (c
),
11705 "copyprivate variable %qE is not threadprivate"
11706 " or private in outer context", DECL_NAME (decl
));
11709 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11710 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
11711 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
11713 && ((region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
11714 || (region_type
== ORT_WORKSHARE
11715 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11716 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
11717 || code
== OMP_LOOP
)))
11718 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
11719 || (code
== OMP_LOOP
11720 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11721 && ((outer_ctx
->region_type
& ORT_COMBINED_TEAMS
)
11722 == ORT_COMBINED_TEAMS
))))
11725 = splay_tree_lookup (outer_ctx
->variables
,
11726 (splay_tree_key
)decl
);
11727 if (on
== NULL
|| (on
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11729 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11730 && TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
11731 && (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
11732 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
11733 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
11734 == POINTER_TYPE
))))
11735 omp_firstprivatize_variable (outer_ctx
, decl
);
11738 omp_add_variable (outer_ctx
, decl
,
11739 GOVD_SEEN
| GOVD_SHARED
);
11740 if (outer_ctx
->outer_context
)
11741 omp_notice_variable (outer_ctx
->outer_context
, decl
,
11747 omp_notice_variable (outer_ctx
, decl
, true);
11748 if (check_non_private
11749 && (region_type
== ORT_WORKSHARE
|| code
== OMP_SCOPE
)
11750 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
11751 || decl
== OMP_CLAUSE_DECL (c
)
11752 || (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
11753 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
11755 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
11756 == POINTER_PLUS_EXPR
11757 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
11758 (OMP_CLAUSE_DECL (c
), 0), 0))
11760 && omp_check_private (ctx
, decl
, false))
11762 error ("%s variable %qE is private in outer context",
11763 check_non_private
, DECL_NAME (decl
));
11768 case OMP_CLAUSE_DETACH
:
11769 flags
= GOVD_FIRSTPRIVATE
| GOVD_SEEN
;
11772 case OMP_CLAUSE_IF
:
11773 if (OMP_CLAUSE_IF_MODIFIER (c
) != ERROR_MARK
11774 && OMP_CLAUSE_IF_MODIFIER (c
) != code
)
11777 for (int i
= 0; i
< 2; i
++)
11778 switch (i
? OMP_CLAUSE_IF_MODIFIER (c
) : code
)
11780 case VOID_CST
: p
[i
] = "cancel"; break;
11781 case OMP_PARALLEL
: p
[i
] = "parallel"; break;
11782 case OMP_SIMD
: p
[i
] = "simd"; break;
11783 case OMP_TASK
: p
[i
] = "task"; break;
11784 case OMP_TASKLOOP
: p
[i
] = "taskloop"; break;
11785 case OMP_TARGET_DATA
: p
[i
] = "target data"; break;
11786 case OMP_TARGET
: p
[i
] = "target"; break;
11787 case OMP_TARGET_UPDATE
: p
[i
] = "target update"; break;
11788 case OMP_TARGET_ENTER_DATA
:
11789 p
[i
] = "target enter data"; break;
11790 case OMP_TARGET_EXIT_DATA
: p
[i
] = "target exit data"; break;
11791 default: gcc_unreachable ();
11793 error_at (OMP_CLAUSE_LOCATION (c
),
11794 "expected %qs %<if%> clause modifier rather than %qs",
11798 /* Fall through. */
11800 case OMP_CLAUSE_FINAL
:
11801 OMP_CLAUSE_OPERAND (c
, 0)
11802 = gimple_boolify (OMP_CLAUSE_OPERAND (c
, 0));
11803 /* Fall through. */
11805 case OMP_CLAUSE_NUM_TEAMS
:
11806 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
11807 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
11808 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)))
11810 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)))
11815 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
11816 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
),
11817 pre_p
, NULL
, true);
11819 /* Fall through. */
11821 case OMP_CLAUSE_SCHEDULE
:
11822 case OMP_CLAUSE_NUM_THREADS
:
11823 case OMP_CLAUSE_THREAD_LIMIT
:
11824 case OMP_CLAUSE_DIST_SCHEDULE
:
11825 case OMP_CLAUSE_DEVICE
:
11826 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEVICE
11827 && OMP_CLAUSE_DEVICE_ANCESTOR (c
))
11829 if (code
!= OMP_TARGET
)
11831 error_at (OMP_CLAUSE_LOCATION (c
),
11832 "%<device%> clause with %<ancestor%> is only "
11833 "allowed on %<target%> construct");
11838 tree clauses
= *orig_list_p
;
11839 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
11840 if (OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_DEVICE
11841 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_FIRSTPRIVATE
11842 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_PRIVATE
11843 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_DEFAULTMAP
11844 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_MAP
11847 error_at (OMP_CLAUSE_LOCATION (c
),
11848 "with %<ancestor%>, only the %<device%>, "
11849 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
11850 "and %<map%> clauses may appear on the "
11856 /* Fall through. */
11858 case OMP_CLAUSE_PRIORITY
:
11859 case OMP_CLAUSE_GRAINSIZE
:
11860 case OMP_CLAUSE_NUM_TASKS
:
11861 case OMP_CLAUSE_FILTER
:
11862 case OMP_CLAUSE_HINT
:
11863 case OMP_CLAUSE_ASYNC
:
11864 case OMP_CLAUSE_WAIT
:
11865 case OMP_CLAUSE_NUM_GANGS
:
11866 case OMP_CLAUSE_NUM_WORKERS
:
11867 case OMP_CLAUSE_VECTOR_LENGTH
:
11868 case OMP_CLAUSE_WORKER
:
11869 case OMP_CLAUSE_VECTOR
:
11870 if (OMP_CLAUSE_OPERAND (c
, 0)
11871 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c
, 0)))
11873 if (error_operand_p (OMP_CLAUSE_OPERAND (c
, 0)))
11878 /* All these clauses care about value, not a particular decl,
11879 so try to force it into a SSA_NAME or fresh temporary. */
11880 OMP_CLAUSE_OPERAND (c
, 0)
11881 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c
, 0),
11882 pre_p
, NULL
, true);
11886 case OMP_CLAUSE_GANG
:
11887 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
11888 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
11890 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 1), pre_p
, NULL
,
11891 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
11895 case OMP_CLAUSE_NOWAIT
:
11899 case OMP_CLAUSE_ORDERED
:
11900 case OMP_CLAUSE_UNTIED
:
11901 case OMP_CLAUSE_COLLAPSE
:
11902 case OMP_CLAUSE_TILE
:
11903 case OMP_CLAUSE_AUTO
:
11904 case OMP_CLAUSE_SEQ
:
11905 case OMP_CLAUSE_INDEPENDENT
:
11906 case OMP_CLAUSE_MERGEABLE
:
11907 case OMP_CLAUSE_PROC_BIND
:
11908 case OMP_CLAUSE_SAFELEN
:
11909 case OMP_CLAUSE_SIMDLEN
:
11910 case OMP_CLAUSE_NOGROUP
:
11911 case OMP_CLAUSE_THREADS
:
11912 case OMP_CLAUSE_SIMD
:
11913 case OMP_CLAUSE_BIND
:
11914 case OMP_CLAUSE_IF_PRESENT
:
11915 case OMP_CLAUSE_FINALIZE
:
11918 case OMP_CLAUSE_ORDER
:
11919 ctx
->order_concurrent
= true;
11922 case OMP_CLAUSE_DEFAULTMAP
:
11923 enum gimplify_defaultmap_kind gdmkmin
, gdmkmax
;
11924 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c
))
11926 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
:
11927 gdmkmin
= GDMK_SCALAR
;
11928 gdmkmax
= GDMK_POINTER
;
11930 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
:
11931 gdmkmin
= GDMK_SCALAR
;
11932 gdmkmax
= GDMK_SCALAR_TARGET
;
11934 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE
:
11935 gdmkmin
= gdmkmax
= GDMK_AGGREGATE
;
11937 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE
:
11938 gdmkmin
= gdmkmax
= GDMK_ALLOCATABLE
;
11940 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER
:
11941 gdmkmin
= gdmkmax
= GDMK_POINTER
;
11944 gcc_unreachable ();
11946 for (int gdmk
= gdmkmin
; gdmk
<= gdmkmax
; gdmk
++)
11947 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c
))
11949 case OMP_CLAUSE_DEFAULTMAP_ALLOC
:
11950 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_ALLOC_ONLY
;
11952 case OMP_CLAUSE_DEFAULTMAP_TO
:
11953 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_TO_ONLY
;
11955 case OMP_CLAUSE_DEFAULTMAP_FROM
:
11956 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_FROM_ONLY
;
11958 case OMP_CLAUSE_DEFAULTMAP_TOFROM
:
11959 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
11961 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
:
11962 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
11964 case OMP_CLAUSE_DEFAULTMAP_NONE
:
11965 ctx
->defaultmap
[gdmk
] = 0;
11967 case OMP_CLAUSE_DEFAULTMAP_DEFAULT
:
11971 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
11973 case GDMK_SCALAR_TARGET
:
11974 ctx
->defaultmap
[gdmk
] = (lang_GNU_Fortran ()
11975 ? GOVD_MAP
: GOVD_FIRSTPRIVATE
);
11977 case GDMK_AGGREGATE
:
11978 case GDMK_ALLOCATABLE
:
11979 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
11982 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
11983 if (!lang_GNU_Fortran ())
11984 ctx
->defaultmap
[gdmk
] |= GOVD_MAP_0LEN_ARRAY
;
11987 gcc_unreachable ();
11991 gcc_unreachable ();
11995 case OMP_CLAUSE_ALIGNED
:
11996 decl
= OMP_CLAUSE_DECL (c
);
11997 if (error_operand_p (decl
))
12002 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c
), pre_p
, NULL
,
12003 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
12008 if (!is_global_var (decl
)
12009 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
12010 omp_add_variable (ctx
, decl
, GOVD_ALIGNED
);
12013 case OMP_CLAUSE_NONTEMPORAL
:
12014 decl
= OMP_CLAUSE_DECL (c
);
12015 if (error_operand_p (decl
))
12020 omp_add_variable (ctx
, decl
, GOVD_NONTEMPORAL
);
12023 case OMP_CLAUSE_ALLOCATE
:
12024 decl
= OMP_CLAUSE_DECL (c
);
12025 if (error_operand_p (decl
))
12030 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
), pre_p
, NULL
,
12031 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
12036 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
12037 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
12040 else if (code
== OMP_TASKLOOP
12041 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
12042 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
12043 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
12044 pre_p
, NULL
, false);
12047 case OMP_CLAUSE_DEFAULT
:
12048 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
12051 case OMP_CLAUSE_INCLUSIVE
:
12052 case OMP_CLAUSE_EXCLUSIVE
:
12053 decl
= OMP_CLAUSE_DECL (c
);
12055 splay_tree_node n
= splay_tree_lookup (outer_ctx
->variables
,
12056 (splay_tree_key
) decl
);
12057 if (n
== NULL
|| (n
->value
& GOVD_REDUCTION
) == 0)
12059 error_at (OMP_CLAUSE_LOCATION (c
),
12060 "%qD specified in %qs clause but not in %<inscan%> "
12061 "%<reduction%> clause on the containing construct",
12062 decl
, omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
12067 n
->value
|= GOVD_REDUCTION_INSCAN
;
12068 if (outer_ctx
->region_type
== ORT_SIMD
12069 && outer_ctx
->outer_context
12070 && outer_ctx
->outer_context
->region_type
== ORT_WORKSHARE
)
12072 n
= splay_tree_lookup (outer_ctx
->outer_context
->variables
,
12073 (splay_tree_key
) decl
);
12074 if (n
&& (n
->value
& GOVD_REDUCTION
) != 0)
12075 n
->value
|= GOVD_REDUCTION_INSCAN
;
12081 case OMP_CLAUSE_NOHOST
:
12083 gcc_unreachable ();
12086 if (code
== OACC_DATA
12087 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12088 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12089 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
12092 *list_p
= OMP_CLAUSE_CHAIN (c
);
12094 list_p
= &OMP_CLAUSE_CHAIN (c
);
12097 ctx
->clauses
= *orig_list_p
;
12098 gimplify_omp_ctxp
= ctx
;
12101 /* Return true if DECL is a candidate for shared to firstprivate
12102 optimization. We only consider non-addressable scalars, not
12103 too big, and not references. */
12106 omp_shared_to_firstprivate_optimizable_decl_p (tree decl
)
12108 if (TREE_ADDRESSABLE (decl
))
12110 tree type
= TREE_TYPE (decl
);
12111 if (!is_gimple_reg_type (type
)
12112 || TREE_CODE (type
) == REFERENCE_TYPE
12113 || TREE_ADDRESSABLE (type
))
12115 /* Don't optimize too large decls, as each thread/task will have
12117 HOST_WIDE_INT len
= int_size_in_bytes (type
);
12118 if (len
== -1 || len
> 4 * POINTER_SIZE
/ BITS_PER_UNIT
)
12120 if (omp_privatize_by_reference (decl
))
12125 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
12126 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
12127 GOVD_WRITTEN in outer contexts. */
12130 omp_mark_stores (struct gimplify_omp_ctx
*ctx
, tree decl
)
12132 for (; ctx
; ctx
= ctx
->outer_context
)
12134 splay_tree_node n
= splay_tree_lookup (ctx
->variables
,
12135 (splay_tree_key
) decl
);
12138 else if (n
->value
& GOVD_SHARED
)
12140 n
->value
|= GOVD_WRITTEN
;
12143 else if (n
->value
& GOVD_DATA_SHARE_CLASS
)
12148 /* Helper callback for walk_gimple_seq to discover possible stores
12149 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12150 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12154 omp_find_stores_op (tree
*tp
, int *walk_subtrees
, void *data
)
12156 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
12158 *walk_subtrees
= 0;
12165 if (handled_component_p (op
))
12166 op
= TREE_OPERAND (op
, 0);
12167 else if ((TREE_CODE (op
) == MEM_REF
|| TREE_CODE (op
) == TARGET_MEM_REF
)
12168 && TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
12169 op
= TREE_OPERAND (TREE_OPERAND (op
, 0), 0);
12174 if (!DECL_P (op
) || !omp_shared_to_firstprivate_optimizable_decl_p (op
))
12177 omp_mark_stores (gimplify_omp_ctxp
, op
);
12181 /* Helper callback for walk_gimple_seq to discover possible stores
12182 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12183 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12187 omp_find_stores_stmt (gimple_stmt_iterator
*gsi_p
,
12188 bool *handled_ops_p
,
12189 struct walk_stmt_info
*wi
)
12191 gimple
*stmt
= gsi_stmt (*gsi_p
);
12192 switch (gimple_code (stmt
))
12194 /* Don't recurse on OpenMP constructs for which
12195 gimplify_adjust_omp_clauses already handled the bodies,
12196 except handle gimple_omp_for_pre_body. */
12197 case GIMPLE_OMP_FOR
:
12198 *handled_ops_p
= true;
12199 if (gimple_omp_for_pre_body (stmt
))
12200 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
12201 omp_find_stores_stmt
, omp_find_stores_op
, wi
);
12203 case GIMPLE_OMP_PARALLEL
:
12204 case GIMPLE_OMP_TASK
:
12205 case GIMPLE_OMP_SECTIONS
:
12206 case GIMPLE_OMP_SINGLE
:
12207 case GIMPLE_OMP_SCOPE
:
12208 case GIMPLE_OMP_TARGET
:
12209 case GIMPLE_OMP_TEAMS
:
12210 case GIMPLE_OMP_CRITICAL
:
12211 *handled_ops_p
= true;
12219 struct gimplify_adjust_omp_clauses_data
12225 /* For all variables that were not actually used within the context,
12226 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
12229 gimplify_adjust_omp_clauses_1 (splay_tree_node n
, void *data
)
12231 tree
*list_p
= ((struct gimplify_adjust_omp_clauses_data
*) data
)->list_p
;
12233 = ((struct gimplify_adjust_omp_clauses_data
*) data
)->pre_p
;
12234 tree decl
= (tree
) n
->key
;
12235 unsigned flags
= n
->value
;
12236 enum omp_clause_code code
;
12238 bool private_debug
;
12240 if (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
12241 && (flags
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0)
12242 flags
= GOVD_SHARED
| GOVD_SEEN
| GOVD_WRITTEN
;
12243 if (flags
& (GOVD_EXPLICIT
| GOVD_LOCAL
))
12245 if ((flags
& GOVD_SEEN
) == 0)
12247 if (flags
& GOVD_DEBUG_PRIVATE
)
12249 gcc_assert ((flags
& GOVD_DATA_SHARE_CLASS
) == GOVD_SHARED
);
12250 private_debug
= true;
12252 else if (flags
& GOVD_MAP
)
12253 private_debug
= false;
12256 = lang_hooks
.decls
.omp_private_debug_clause (decl
,
12257 !!(flags
& GOVD_SHARED
));
12259 code
= OMP_CLAUSE_PRIVATE
;
12260 else if (flags
& GOVD_MAP
)
12262 code
= OMP_CLAUSE_MAP
;
12263 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
12264 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
12266 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl
);
12270 && DECL_IN_CONSTANT_POOL (decl
)
12271 && !lookup_attribute ("omp declare target",
12272 DECL_ATTRIBUTES (decl
)))
12274 tree id
= get_identifier ("omp declare target");
12275 DECL_ATTRIBUTES (decl
)
12276 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
12277 varpool_node
*node
= varpool_node::get (decl
);
12280 node
->offloadable
= 1;
12281 if (ENABLE_OFFLOADING
)
12282 g
->have_offload
= true;
12286 else if (flags
& GOVD_SHARED
)
12288 if (is_global_var (decl
))
12290 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
12291 while (ctx
!= NULL
)
12294 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12295 if (on
&& (on
->value
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
12296 | GOVD_PRIVATE
| GOVD_REDUCTION
12297 | GOVD_LINEAR
| GOVD_MAP
)) != 0)
12299 ctx
= ctx
->outer_context
;
12304 code
= OMP_CLAUSE_SHARED
;
12305 /* Don't optimize shared into firstprivate for read-only vars
12306 on tasks with depend clause, we shouldn't try to copy them
12307 until the dependencies are satisfied. */
12308 if (gimplify_omp_ctxp
->has_depend
)
12309 flags
|= GOVD_WRITTEN
;
12311 else if (flags
& GOVD_PRIVATE
)
12312 code
= OMP_CLAUSE_PRIVATE
;
12313 else if (flags
& GOVD_FIRSTPRIVATE
)
12315 code
= OMP_CLAUSE_FIRSTPRIVATE
;
12316 if ((gimplify_omp_ctxp
->region_type
& ORT_TARGET
)
12317 && (gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
12318 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
12320 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
12321 "%<target%> construct", decl
);
12325 else if (flags
& GOVD_LASTPRIVATE
)
12326 code
= OMP_CLAUSE_LASTPRIVATE
;
12327 else if (flags
& (GOVD_ALIGNED
| GOVD_NONTEMPORAL
))
12329 else if (flags
& GOVD_CONDTEMP
)
12331 code
= OMP_CLAUSE__CONDTEMP_
;
12332 gimple_add_tmp_var (decl
);
12335 gcc_unreachable ();
12337 if (((flags
& GOVD_LASTPRIVATE
)
12338 || (code
== OMP_CLAUSE_SHARED
&& (flags
& GOVD_WRITTEN
)))
12339 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12340 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
12342 tree chain
= *list_p
;
12343 clause
= build_omp_clause (input_location
, code
);
12344 OMP_CLAUSE_DECL (clause
) = decl
;
12345 OMP_CLAUSE_CHAIN (clause
) = chain
;
12347 OMP_CLAUSE_PRIVATE_DEBUG (clause
) = 1;
12348 else if (code
== OMP_CLAUSE_PRIVATE
&& (flags
& GOVD_PRIVATE_OUTER_REF
))
12349 OMP_CLAUSE_PRIVATE_OUTER_REF (clause
) = 1;
12350 else if (code
== OMP_CLAUSE_SHARED
12351 && (flags
& GOVD_WRITTEN
) == 0
12352 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12353 OMP_CLAUSE_SHARED_READONLY (clause
) = 1;
12354 else if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_EXPLICIT
) == 0)
12355 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause
) = 1;
12356 else if (code
== OMP_CLAUSE_MAP
&& (flags
& GOVD_MAP_0LEN_ARRAY
) != 0)
12358 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
12359 OMP_CLAUSE_DECL (nc
) = decl
;
12360 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
12361 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
12362 OMP_CLAUSE_DECL (clause
)
12363 = build_simple_mem_ref_loc (input_location
, decl
);
12364 OMP_CLAUSE_DECL (clause
)
12365 = build2 (MEM_REF
, char_type_node
, OMP_CLAUSE_DECL (clause
),
12366 build_int_cst (build_pointer_type (char_type_node
), 0));
12367 OMP_CLAUSE_SIZE (clause
) = size_zero_node
;
12368 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
12369 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_ALLOC
);
12370 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause
) = 1;
12371 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
12372 OMP_CLAUSE_CHAIN (nc
) = chain
;
12373 OMP_CLAUSE_CHAIN (clause
) = nc
;
12374 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12375 gimplify_omp_ctxp
= ctx
->outer_context
;
12376 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0),
12377 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
12378 gimplify_omp_ctxp
= ctx
;
12380 else if (code
== OMP_CLAUSE_MAP
)
12383 /* Not all combinations of these GOVD_MAP flags are actually valid. */
12384 switch (flags
& (GOVD_MAP_TO_ONLY
12386 | GOVD_MAP_FORCE_PRESENT
12387 | GOVD_MAP_ALLOC_ONLY
12388 | GOVD_MAP_FROM_ONLY
))
12391 kind
= GOMP_MAP_TOFROM
;
12393 case GOVD_MAP_FORCE
:
12394 kind
= GOMP_MAP_TOFROM
| GOMP_MAP_FLAG_FORCE
;
12396 case GOVD_MAP_TO_ONLY
:
12397 kind
= GOMP_MAP_TO
;
12399 case GOVD_MAP_FROM_ONLY
:
12400 kind
= GOMP_MAP_FROM
;
12402 case GOVD_MAP_ALLOC_ONLY
:
12403 kind
= GOMP_MAP_ALLOC
;
12405 case GOVD_MAP_TO_ONLY
| GOVD_MAP_FORCE
:
12406 kind
= GOMP_MAP_TO
| GOMP_MAP_FLAG_FORCE
;
12408 case GOVD_MAP_FORCE_PRESENT
:
12409 kind
= GOMP_MAP_FORCE_PRESENT
;
12412 gcc_unreachable ();
12414 OMP_CLAUSE_SET_MAP_KIND (clause
, kind
);
12415 /* Setting of the implicit flag for the runtime is currently disabled for
12417 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0)
12418 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause
) = 1;
12419 if (DECL_SIZE (decl
)
12420 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
12422 tree decl2
= DECL_VALUE_EXPR (decl
);
12423 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
12424 decl2
= TREE_OPERAND (decl2
, 0);
12425 gcc_assert (DECL_P (decl2
));
12426 tree mem
= build_simple_mem_ref (decl2
);
12427 OMP_CLAUSE_DECL (clause
) = mem
;
12428 OMP_CLAUSE_SIZE (clause
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
12429 if (gimplify_omp_ctxp
->outer_context
)
12431 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
12432 omp_notice_variable (ctx
, decl2
, true);
12433 omp_notice_variable (ctx
, OMP_CLAUSE_SIZE (clause
), true);
12435 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
12437 OMP_CLAUSE_DECL (nc
) = decl
;
12438 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
12439 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
)
12440 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
12442 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
12443 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
12444 OMP_CLAUSE_CHAIN (clause
) = nc
;
12446 else if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
12447 && omp_privatize_by_reference (decl
))
12449 OMP_CLAUSE_DECL (clause
) = build_simple_mem_ref (decl
);
12450 OMP_CLAUSE_SIZE (clause
)
12451 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))));
12452 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12453 gimplify_omp_ctxp
= ctx
->outer_context
;
12454 gimplify_expr (&OMP_CLAUSE_SIZE (clause
),
12455 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
12456 gimplify_omp_ctxp
= ctx
;
12457 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
12459 OMP_CLAUSE_DECL (nc
) = decl
;
12460 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
12461 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_REFERENCE
);
12462 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
12463 OMP_CLAUSE_CHAIN (clause
) = nc
;
12466 OMP_CLAUSE_SIZE (clause
) = DECL_SIZE_UNIT (decl
);
12468 if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_LASTPRIVATE
) != 0)
12470 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
12471 OMP_CLAUSE_DECL (nc
) = decl
;
12472 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc
) = 1;
12473 OMP_CLAUSE_CHAIN (nc
) = chain
;
12474 OMP_CLAUSE_CHAIN (clause
) = nc
;
12475 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12476 gimplify_omp_ctxp
= ctx
->outer_context
;
12477 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
12478 (ctx
->region_type
& ORT_ACC
) != 0);
12479 gimplify_omp_ctxp
= ctx
;
12482 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12483 gimplify_omp_ctxp
= ctx
->outer_context
;
12484 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
12485 in simd. Those are only added for the local vars inside of simd body
12486 and they don't need to be e.g. default constructible. */
12487 if (code
!= OMP_CLAUSE_PRIVATE
|| ctx
->region_type
!= ORT_SIMD
)
12488 lang_hooks
.decls
.omp_finish_clause (clause
, pre_p
,
12489 (ctx
->region_type
& ORT_ACC
) != 0);
12490 if (gimplify_omp_ctxp
)
12491 for (; clause
!= chain
; clause
= OMP_CLAUSE_CHAIN (clause
))
12492 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
12493 && DECL_P (OMP_CLAUSE_SIZE (clause
)))
12494 omp_notice_variable (gimplify_omp_ctxp
, OMP_CLAUSE_SIZE (clause
),
12496 gimplify_omp_ctxp
= ctx
;
12501 gimplify_adjust_omp_clauses (gimple_seq
*pre_p
, gimple_seq body
, tree
*list_p
,
12502 enum tree_code code
)
12504 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12505 tree
*orig_list_p
= list_p
;
12507 bool has_inscan_reductions
= false;
12511 struct gimplify_omp_ctx
*octx
;
12512 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
12513 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TASK
| ORT_TEAMS
)) != 0)
12517 struct walk_stmt_info wi
;
12518 memset (&wi
, 0, sizeof (wi
));
12519 walk_gimple_seq (body
, omp_find_stores_stmt
,
12520 omp_find_stores_op
, &wi
);
12524 if (ctx
->add_safelen1
)
12526 /* If there are VLAs in the body of simd loop, prevent
12528 gcc_assert (ctx
->region_type
== ORT_SIMD
);
12529 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
12530 OMP_CLAUSE_SAFELEN_EXPR (c
) = integer_one_node
;
12531 OMP_CLAUSE_CHAIN (c
) = *list_p
;
12533 list_p
= &OMP_CLAUSE_CHAIN (c
);
12536 if (ctx
->region_type
== ORT_WORKSHARE
12537 && ctx
->outer_context
12538 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
)
12540 for (c
= ctx
->outer_context
->clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12541 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12542 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
12544 decl
= OMP_CLAUSE_DECL (c
);
12546 = splay_tree_lookup (ctx
->outer_context
->variables
,
12547 (splay_tree_key
) decl
);
12548 gcc_checking_assert (!splay_tree_lookup (ctx
->variables
,
12549 (splay_tree_key
) decl
));
12550 omp_add_variable (ctx
, decl
, n
->value
);
12551 tree c2
= copy_node (c
);
12552 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
12554 if ((n
->value
& GOVD_FIRSTPRIVATE
) == 0)
12556 c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12557 OMP_CLAUSE_FIRSTPRIVATE
);
12558 OMP_CLAUSE_DECL (c2
) = decl
;
12559 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
12564 tree attach_list
= NULL_TREE
;
12565 tree
*attach_tail
= &attach_list
;
12567 while ((c
= *list_p
) != NULL
)
12570 bool remove
= false;
12571 bool move_attach
= false;
12573 switch (OMP_CLAUSE_CODE (c
))
12575 case OMP_CLAUSE_FIRSTPRIVATE
:
12576 if ((ctx
->region_type
& ORT_TARGET
)
12577 && (ctx
->region_type
& ORT_ACC
) == 0
12578 && TYPE_ATOMIC (strip_array_types
12579 (TREE_TYPE (OMP_CLAUSE_DECL (c
)))))
12581 error_at (OMP_CLAUSE_LOCATION (c
),
12582 "%<_Atomic%> %qD in %<firstprivate%> clause on "
12583 "%<target%> construct", OMP_CLAUSE_DECL (c
));
12587 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
12589 decl
= OMP_CLAUSE_DECL (c
);
12590 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12591 if ((n
->value
& GOVD_MAP
) != 0)
12596 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c
) = 0;
12597 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
) = 0;
12600 case OMP_CLAUSE_PRIVATE
:
12601 case OMP_CLAUSE_SHARED
:
12602 case OMP_CLAUSE_LINEAR
:
12603 decl
= OMP_CLAUSE_DECL (c
);
12604 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12605 remove
= !(n
->value
& GOVD_SEEN
);
12606 if ((n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0
12607 && code
== OMP_PARALLEL
12608 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12612 bool shared
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
;
12613 if ((n
->value
& GOVD_DEBUG_PRIVATE
)
12614 || lang_hooks
.decls
.omp_private_debug_clause (decl
, shared
))
12616 gcc_assert ((n
->value
& GOVD_DEBUG_PRIVATE
) == 0
12617 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
12619 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_PRIVATE
);
12620 OMP_CLAUSE_PRIVATE_DEBUG (c
) = 1;
12622 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
12625 n
->value
|= GOVD_WRITTEN
;
12626 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
12627 && (n
->value
& GOVD_WRITTEN
) == 0
12629 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12630 OMP_CLAUSE_SHARED_READONLY (c
) = 1;
12631 else if (DECL_P (decl
)
12632 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
12633 && (n
->value
& GOVD_WRITTEN
) != 0)
12634 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
12635 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
12636 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12637 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
12640 n
->value
&= ~GOVD_EXPLICIT
;
12643 case OMP_CLAUSE_LASTPRIVATE
:
12644 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
12645 accurately reflect the presence of a FIRSTPRIVATE clause. */
12646 decl
= OMP_CLAUSE_DECL (c
);
12647 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12648 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
12649 = (n
->value
& GOVD_FIRSTPRIVATE
) != 0;
12650 if (code
== OMP_DISTRIBUTE
12651 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
12654 error_at (OMP_CLAUSE_LOCATION (c
),
12655 "same variable used in %<firstprivate%> and "
12656 "%<lastprivate%> clauses on %<distribute%> "
12660 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12662 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12663 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
12664 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) && code
== OMP_PARALLEL
)
12668 case OMP_CLAUSE_ALIGNED
:
12669 decl
= OMP_CLAUSE_DECL (c
);
12670 if (!is_global_var (decl
))
12672 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12673 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
12674 if (!remove
&& TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
12676 struct gimplify_omp_ctx
*octx
;
12678 && (n
->value
& (GOVD_DATA_SHARE_CLASS
12679 & ~GOVD_FIRSTPRIVATE
)))
12682 for (octx
= ctx
->outer_context
; octx
;
12683 octx
= octx
->outer_context
)
12685 n
= splay_tree_lookup (octx
->variables
,
12686 (splay_tree_key
) decl
);
12689 if (n
->value
& GOVD_LOCAL
)
12691 /* We have to avoid assigning a shared variable
12692 to itself when trying to add
12693 __builtin_assume_aligned. */
12694 if (n
->value
& GOVD_SHARED
)
12702 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
12704 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12705 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
12710 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
12711 decl
= OMP_CLAUSE_DECL (c
);
12712 while (TREE_CODE (decl
) == INDIRECT_REF
12713 || TREE_CODE (decl
) == ARRAY_REF
)
12714 decl
= TREE_OPERAND (decl
, 0);
12715 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12716 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
12719 case OMP_CLAUSE_IS_DEVICE_PTR
:
12720 case OMP_CLAUSE_NONTEMPORAL
:
12721 decl
= OMP_CLAUSE_DECL (c
);
12722 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12723 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
12726 case OMP_CLAUSE_MAP
:
12727 if (code
== OMP_TARGET_EXIT_DATA
12728 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
12733 /* If we have a target region, we can push all the attaches to the
12734 end of the list (we may have standalone "attach" operations
12735 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
12736 the attachment point AND the pointed-to block have been mapped).
12737 If we have something else, e.g. "enter data", we need to keep
12738 "attach" nodes together with the previous node they attach to so
12739 that separate "exit data" operations work properly (see
12740 libgomp/target.c). */
12741 if ((ctx
->region_type
& ORT_TARGET
) != 0
12742 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12743 || (OMP_CLAUSE_MAP_KIND (c
)
12744 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
)))
12745 move_attach
= true;
12746 decl
= OMP_CLAUSE_DECL (c
);
12747 /* Data clauses associated with reductions must be
12748 compatible with present_or_copy. Warn and adjust the clause
12749 if that is not the case. */
12750 if (ctx
->region_type
== ORT_ACC_PARALLEL
12751 || ctx
->region_type
== ORT_ACC_SERIAL
)
12753 tree t
= DECL_P (decl
) ? decl
: TREE_OPERAND (decl
, 0);
12757 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
12759 if (n
&& (n
->value
& GOVD_REDUCTION
))
12761 enum gomp_map_kind kind
= OMP_CLAUSE_MAP_KIND (c
);
12763 OMP_CLAUSE_MAP_IN_REDUCTION (c
) = 1;
12764 if ((kind
& GOMP_MAP_TOFROM
) != GOMP_MAP_TOFROM
12765 && kind
!= GOMP_MAP_FORCE_PRESENT
12766 && kind
!= GOMP_MAP_POINTER
)
12768 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
12769 "incompatible data clause with reduction "
12770 "on %qE; promoting to %<present_or_copy%>",
12772 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
12776 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
12777 && (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
))
12782 if (!DECL_P (decl
))
12784 if ((ctx
->region_type
& ORT_TARGET
) != 0
12785 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
12787 if (TREE_CODE (decl
) == INDIRECT_REF
12788 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
12789 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
12790 == REFERENCE_TYPE
))
12791 decl
= TREE_OPERAND (decl
, 0);
12792 if (TREE_CODE (decl
) == COMPONENT_REF
)
12794 while (TREE_CODE (decl
) == COMPONENT_REF
)
12795 decl
= TREE_OPERAND (decl
, 0);
12798 n
= splay_tree_lookup (ctx
->variables
,
12799 (splay_tree_key
) decl
);
12800 if (!(n
->value
& GOVD_SEEN
))
12807 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12808 if ((ctx
->region_type
& ORT_TARGET
) != 0
12809 && !(n
->value
& GOVD_SEEN
)
12810 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) == 0
12811 && (!is_global_var (decl
)
12812 || !lookup_attribute ("omp declare target link",
12813 DECL_ATTRIBUTES (decl
))))
12816 /* For struct element mapping, if struct is never referenced
12817 in target block and none of the mapping has always modifier,
12818 remove all the struct element mappings, which immediately
12819 follow the GOMP_MAP_STRUCT map clause. */
12820 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
12822 HOST_WIDE_INT cnt
= tree_to_shwi (OMP_CLAUSE_SIZE (c
));
12824 OMP_CLAUSE_CHAIN (c
)
12825 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c
));
12828 else if (DECL_SIZE (decl
)
12829 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
12830 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_POINTER
12831 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
12832 && (OMP_CLAUSE_MAP_KIND (c
)
12833 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
12835 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
12836 for these, TREE_CODE (DECL_SIZE (decl)) will always be
12838 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DEVICEPTR
);
12840 tree decl2
= DECL_VALUE_EXPR (decl
);
12841 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
12842 decl2
= TREE_OPERAND (decl2
, 0);
12843 gcc_assert (DECL_P (decl2
));
12844 tree mem
= build_simple_mem_ref (decl2
);
12845 OMP_CLAUSE_DECL (c
) = mem
;
12846 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
12847 if (ctx
->outer_context
)
12849 omp_notice_variable (ctx
->outer_context
, decl2
, true);
12850 omp_notice_variable (ctx
->outer_context
,
12851 OMP_CLAUSE_SIZE (c
), true);
12853 if (((ctx
->region_type
& ORT_TARGET
) != 0
12854 || !ctx
->target_firstprivatize_array_bases
)
12855 && ((n
->value
& GOVD_SEEN
) == 0
12856 || (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
)) == 0))
12858 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12860 OMP_CLAUSE_DECL (nc
) = decl
;
12861 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
12862 if (ctx
->target_firstprivatize_array_bases
)
12863 OMP_CLAUSE_SET_MAP_KIND (nc
,
12864 GOMP_MAP_FIRSTPRIVATE_POINTER
);
12866 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
12867 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (c
);
12868 OMP_CLAUSE_CHAIN (c
) = nc
;
12874 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
12875 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
12876 gcc_assert ((n
->value
& GOVD_SEEN
) == 0
12877 || ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
12882 case OMP_CLAUSE_TO
:
12883 case OMP_CLAUSE_FROM
:
12884 case OMP_CLAUSE__CACHE_
:
12885 decl
= OMP_CLAUSE_DECL (c
);
12886 if (!DECL_P (decl
))
12888 if (DECL_SIZE (decl
)
12889 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
12891 tree decl2
= DECL_VALUE_EXPR (decl
);
12892 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
12893 decl2
= TREE_OPERAND (decl2
, 0);
12894 gcc_assert (DECL_P (decl2
));
12895 tree mem
= build_simple_mem_ref (decl2
);
12896 OMP_CLAUSE_DECL (c
) = mem
;
12897 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
12898 if (ctx
->outer_context
)
12900 omp_notice_variable (ctx
->outer_context
, decl2
, true);
12901 omp_notice_variable (ctx
->outer_context
,
12902 OMP_CLAUSE_SIZE (c
), true);
12905 else if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
12906 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
12909 case OMP_CLAUSE_REDUCTION
:
12910 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
12912 decl
= OMP_CLAUSE_DECL (c
);
12913 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12914 if ((n
->value
& GOVD_REDUCTION_INSCAN
) == 0)
12917 error_at (OMP_CLAUSE_LOCATION (c
),
12918 "%qD specified in %<inscan%> %<reduction%> clause "
12919 "but not in %<scan%> directive clause", decl
);
12922 has_inscan_reductions
= true;
12925 case OMP_CLAUSE_IN_REDUCTION
:
12926 case OMP_CLAUSE_TASK_REDUCTION
:
12927 decl
= OMP_CLAUSE_DECL (c
);
12928 /* OpenACC reductions need a present_or_copy data clause.
12929 Add one if necessary. Emit error when the reduction is private. */
12930 if (ctx
->region_type
== ORT_ACC_PARALLEL
12931 || ctx
->region_type
== ORT_ACC_SERIAL
)
12933 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12934 if (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
12937 error_at (OMP_CLAUSE_LOCATION (c
), "invalid private "
12938 "reduction on %qE", DECL_NAME (decl
));
12940 else if ((n
->value
& GOVD_MAP
) == 0)
12942 tree next
= OMP_CLAUSE_CHAIN (c
);
12943 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_MAP
);
12944 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_TOFROM
);
12945 OMP_CLAUSE_DECL (nc
) = decl
;
12946 OMP_CLAUSE_CHAIN (c
) = nc
;
12947 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
12952 OMP_CLAUSE_MAP_IN_REDUCTION (nc
) = 1;
12953 if (OMP_CLAUSE_CHAIN (nc
) == NULL
)
12955 nc
= OMP_CLAUSE_CHAIN (nc
);
12957 OMP_CLAUSE_CHAIN (nc
) = next
;
12958 n
->value
|= GOVD_MAP
;
12962 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12963 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
12966 case OMP_CLAUSE_ALLOCATE
:
12967 decl
= OMP_CLAUSE_DECL (c
);
12968 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12969 if (n
!= NULL
&& !(n
->value
& GOVD_SEEN
))
12971 if ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
| GOVD_LINEAR
))
12973 && (n
->value
& (GOVD_REDUCTION
| GOVD_LASTPRIVATE
)) == 0)
12977 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
12978 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)) != INTEGER_CST
12979 && ((ctx
->region_type
& (ORT_PARALLEL
| ORT_TARGET
)) != 0
12980 || (ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASK
12981 || (ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
))
12983 tree allocator
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
12984 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) allocator
);
12987 enum omp_clause_default_kind default_kind
12988 = ctx
->default_kind
;
12989 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
12990 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
12992 ctx
->default_kind
= default_kind
;
12995 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
13000 case OMP_CLAUSE_COPYIN
:
13001 case OMP_CLAUSE_COPYPRIVATE
:
13002 case OMP_CLAUSE_IF
:
13003 case OMP_CLAUSE_NUM_THREADS
:
13004 case OMP_CLAUSE_NUM_TEAMS
:
13005 case OMP_CLAUSE_THREAD_LIMIT
:
13006 case OMP_CLAUSE_DIST_SCHEDULE
:
13007 case OMP_CLAUSE_DEVICE
:
13008 case OMP_CLAUSE_SCHEDULE
:
13009 case OMP_CLAUSE_NOWAIT
:
13010 case OMP_CLAUSE_ORDERED
:
13011 case OMP_CLAUSE_DEFAULT
:
13012 case OMP_CLAUSE_UNTIED
:
13013 case OMP_CLAUSE_COLLAPSE
:
13014 case OMP_CLAUSE_FINAL
:
13015 case OMP_CLAUSE_MERGEABLE
:
13016 case OMP_CLAUSE_PROC_BIND
:
13017 case OMP_CLAUSE_SAFELEN
:
13018 case OMP_CLAUSE_SIMDLEN
:
13019 case OMP_CLAUSE_DEPEND
:
13020 case OMP_CLAUSE_DOACROSS
:
13021 case OMP_CLAUSE_PRIORITY
:
13022 case OMP_CLAUSE_GRAINSIZE
:
13023 case OMP_CLAUSE_NUM_TASKS
:
13024 case OMP_CLAUSE_NOGROUP
:
13025 case OMP_CLAUSE_THREADS
:
13026 case OMP_CLAUSE_SIMD
:
13027 case OMP_CLAUSE_FILTER
:
13028 case OMP_CLAUSE_HINT
:
13029 case OMP_CLAUSE_DEFAULTMAP
:
13030 case OMP_CLAUSE_ORDER
:
13031 case OMP_CLAUSE_BIND
:
13032 case OMP_CLAUSE_DETACH
:
13033 case OMP_CLAUSE_USE_DEVICE_PTR
:
13034 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13035 case OMP_CLAUSE_ASYNC
:
13036 case OMP_CLAUSE_WAIT
:
13037 case OMP_CLAUSE_INDEPENDENT
:
13038 case OMP_CLAUSE_NUM_GANGS
:
13039 case OMP_CLAUSE_NUM_WORKERS
:
13040 case OMP_CLAUSE_VECTOR_LENGTH
:
13041 case OMP_CLAUSE_GANG
:
13042 case OMP_CLAUSE_WORKER
:
13043 case OMP_CLAUSE_VECTOR
:
13044 case OMP_CLAUSE_AUTO
:
13045 case OMP_CLAUSE_SEQ
:
13046 case OMP_CLAUSE_TILE
:
13047 case OMP_CLAUSE_IF_PRESENT
:
13048 case OMP_CLAUSE_FINALIZE
:
13049 case OMP_CLAUSE_INCLUSIVE
:
13050 case OMP_CLAUSE_EXCLUSIVE
:
13053 case OMP_CLAUSE_NOHOST
:
13055 gcc_unreachable ();
13059 *list_p
= OMP_CLAUSE_CHAIN (c
);
13060 else if (move_attach
)
13062 /* Remove attach node from here, separate out into its own list. */
13064 *list_p
= OMP_CLAUSE_CHAIN (c
);
13065 OMP_CLAUSE_CHAIN (c
) = NULL_TREE
;
13066 attach_tail
= &OMP_CLAUSE_CHAIN (c
);
13069 list_p
= &OMP_CLAUSE_CHAIN (c
);
13072 /* Splice attach nodes at the end of the list. */
13075 *list_p
= attach_list
;
13076 list_p
= attach_tail
;
13079 /* Add in any implicit data sharing. */
13080 struct gimplify_adjust_omp_clauses_data data
;
13081 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0)
13083 /* OpenMP. Implicit clauses are added at the start of the clause list,
13084 but after any non-map clauses. */
13085 tree
*implicit_add_list_p
= orig_list_p
;
13086 while (*implicit_add_list_p
13087 && OMP_CLAUSE_CODE (*implicit_add_list_p
) != OMP_CLAUSE_MAP
)
13088 implicit_add_list_p
= &OMP_CLAUSE_CHAIN (*implicit_add_list_p
);
13089 data
.list_p
= implicit_add_list_p
;
13093 data
.list_p
= list_p
;
13094 data
.pre_p
= pre_p
;
13095 splay_tree_foreach (ctx
->variables
, gimplify_adjust_omp_clauses_1
, &data
);
13097 if (has_inscan_reductions
)
13098 for (c
= *orig_list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13099 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
13100 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
13102 error_at (OMP_CLAUSE_LOCATION (c
),
13103 "%<inscan%> %<reduction%> clause used together with "
13104 "%<linear%> clause for a variable other than loop "
13109 gimplify_omp_ctxp
= ctx
->outer_context
;
13110 delete_omp_context (ctx
);
13113 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
13114 -1 if unknown yet (simd is involved, won't be known until vectorization)
13115 and 1 if they do. If SCORES is non-NULL, it should point to an array
13116 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
13117 of the CONSTRUCTS (position -1 if it will never match) followed by
13118 number of constructs in the OpenMP context construct trait. If the
13119 score depends on whether it will be in a declare simd clone or not,
13120 the function returns 2 and there will be two sets of the scores, the first
13121 one for the case that it is not in a declare simd clone, the other
13122 that it is in a declare simd clone. */
13125 omp_construct_selector_matches (enum tree_code
*constructs
, int nconstructs
,
13128 int matched
= 0, cnt
= 0;
13129 bool simd_seen
= false;
13130 bool target_seen
= false;
13131 int declare_simd_cnt
= -1;
13132 auto_vec
<enum tree_code
, 16> codes
;
13133 for (struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
; ctx
;)
13135 if (((ctx
->region_type
& ORT_PARALLEL
) && ctx
->code
== OMP_PARALLEL
)
13136 || ((ctx
->region_type
& (ORT_TARGET
| ORT_IMPLICIT_TARGET
| ORT_ACC
))
13137 == ORT_TARGET
&& ctx
->code
== OMP_TARGET
)
13138 || ((ctx
->region_type
& ORT_TEAMS
) && ctx
->code
== OMP_TEAMS
)
13139 || (ctx
->region_type
== ORT_WORKSHARE
&& ctx
->code
== OMP_FOR
)
13140 || (ctx
->region_type
== ORT_SIMD
13141 && ctx
->code
== OMP_SIMD
13142 && !omp_find_clause (ctx
->clauses
, OMP_CLAUSE_BIND
)))
13146 codes
.safe_push (ctx
->code
);
13147 else if (matched
< nconstructs
&& ctx
->code
== constructs
[matched
])
13149 if (ctx
->code
== OMP_SIMD
)
13157 if (ctx
->code
== OMP_TARGET
)
13159 if (scores
== NULL
)
13160 return matched
< nconstructs
? 0 : simd_seen
? -1 : 1;
13161 target_seen
= true;
13165 else if (ctx
->region_type
== ORT_WORKSHARE
13166 && ctx
->code
== OMP_LOOP
13167 && ctx
->outer_context
13168 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
13169 && ctx
->outer_context
->outer_context
13170 && ctx
->outer_context
->outer_context
->code
== OMP_LOOP
13171 && ctx
->outer_context
->outer_context
->distribute
)
13172 ctx
= ctx
->outer_context
->outer_context
;
13173 ctx
= ctx
->outer_context
;
13176 && lookup_attribute ("omp declare simd",
13177 DECL_ATTRIBUTES (current_function_decl
)))
13179 /* Declare simd is a maybe case, it is supposed to be added only to the
13180 omp-simd-clone.cc added clones and not to the base function. */
13181 declare_simd_cnt
= cnt
++;
13183 codes
.safe_push (OMP_SIMD
);
13185 && constructs
[0] == OMP_SIMD
)
13187 gcc_assert (matched
== 0);
13189 if (++matched
== nconstructs
)
13193 if (tree attr
= lookup_attribute ("omp declare variant variant",
13194 DECL_ATTRIBUTES (current_function_decl
)))
13196 enum tree_code variant_constructs
[5];
13197 int variant_nconstructs
= 0;
13199 variant_nconstructs
13200 = omp_constructor_traits_to_codes (TREE_VALUE (attr
),
13201 variant_constructs
);
13202 for (int i
= 0; i
< variant_nconstructs
; i
++)
13206 codes
.safe_push (variant_constructs
[i
]);
13207 else if (matched
< nconstructs
13208 && variant_constructs
[i
] == constructs
[matched
])
13210 if (variant_constructs
[i
] == OMP_SIMD
)
13221 && lookup_attribute ("omp declare target block",
13222 DECL_ATTRIBUTES (current_function_decl
)))
13225 codes
.safe_push (OMP_TARGET
);
13226 else if (matched
< nconstructs
&& constructs
[matched
] == OMP_TARGET
)
13231 for (int pass
= 0; pass
< (declare_simd_cnt
== -1 ? 1 : 2); pass
++)
13233 int j
= codes
.length () - 1;
13234 for (int i
= nconstructs
- 1; i
>= 0; i
--)
13237 && (pass
!= 0 || declare_simd_cnt
!= j
)
13238 && constructs
[i
] != codes
[j
])
13240 if (pass
== 0 && declare_simd_cnt
!= -1 && j
> declare_simd_cnt
)
13245 *scores
++ = ((pass
== 0 && declare_simd_cnt
!= -1)
13246 ? codes
.length () - 1 : codes
.length ());
13248 return declare_simd_cnt
== -1 ? 1 : 2;
13250 if (matched
== nconstructs
)
13251 return simd_seen
? -1 : 1;
13255 /* Gimplify OACC_CACHE. */
13258 gimplify_oacc_cache (tree
*expr_p
, gimple_seq
*pre_p
)
13260 tree expr
= *expr_p
;
13262 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr
), pre_p
, ORT_ACC
,
13264 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OACC_CACHE_CLAUSES (expr
),
13267 /* TODO: Do something sensible with this information. */
13269 *expr_p
= NULL_TREE
;
13272 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
13273 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
13274 kind. The entry kind will replace the one in CLAUSE, while the exit
13275 kind will be used in a new omp_clause and returned to the caller. */
13278 gimplify_oacc_declare_1 (tree clause
)
13280 HOST_WIDE_INT kind
, new_op
;
13284 kind
= OMP_CLAUSE_MAP_KIND (clause
);
13288 case GOMP_MAP_ALLOC
:
13289 new_op
= GOMP_MAP_RELEASE
;
13293 case GOMP_MAP_FROM
:
13294 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
13295 new_op
= GOMP_MAP_FROM
;
13299 case GOMP_MAP_TOFROM
:
13300 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_TO
);
13301 new_op
= GOMP_MAP_FROM
;
13305 case GOMP_MAP_DEVICE_RESIDENT
:
13306 case GOMP_MAP_FORCE_DEVICEPTR
:
13307 case GOMP_MAP_FORCE_PRESENT
:
13308 case GOMP_MAP_LINK
:
13309 case GOMP_MAP_POINTER
:
13314 gcc_unreachable ();
13320 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
), OMP_CLAUSE_MAP
);
13321 OMP_CLAUSE_SET_MAP_KIND (c
, new_op
);
13322 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clause
);
13328 /* Gimplify OACC_DECLARE. */
13331 gimplify_oacc_declare (tree
*expr_p
, gimple_seq
*pre_p
)
13333 tree expr
= *expr_p
;
13335 tree clauses
, t
, decl
;
13337 clauses
= OACC_DECLARE_CLAUSES (expr
);
13339 gimplify_scan_omp_clauses (&clauses
, pre_p
, ORT_TARGET_DATA
, OACC_DECLARE
);
13340 gimplify_adjust_omp_clauses (pre_p
, NULL
, &clauses
, OACC_DECLARE
);
13342 for (t
= clauses
; t
; t
= OMP_CLAUSE_CHAIN (t
))
13344 decl
= OMP_CLAUSE_DECL (t
);
13346 if (TREE_CODE (decl
) == MEM_REF
)
13347 decl
= TREE_OPERAND (decl
, 0);
13349 if (VAR_P (decl
) && !is_oacc_declared (decl
))
13351 tree attr
= get_identifier ("oacc declare target");
13352 DECL_ATTRIBUTES (decl
) = tree_cons (attr
, NULL_TREE
,
13353 DECL_ATTRIBUTES (decl
));
13357 && !is_global_var (decl
)
13358 && DECL_CONTEXT (decl
) == current_function_decl
)
13360 tree c
= gimplify_oacc_declare_1 (t
);
13363 if (oacc_declare_returns
== NULL
)
13364 oacc_declare_returns
= new hash_map
<tree
, tree
>;
13366 oacc_declare_returns
->put (decl
, c
);
13370 if (gimplify_omp_ctxp
)
13371 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_SEEN
);
13374 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
13377 gimplify_seq_add_stmt (pre_p
, stmt
);
13379 *expr_p
= NULL_TREE
;
13382 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
13383 gimplification of the body, as well as scanning the body for used
13384 variables. We need to do this scan now, because variable-sized
13385 decls will be decomposed during gimplification. */
13388 gimplify_omp_parallel (tree
*expr_p
, gimple_seq
*pre_p
)
13390 tree expr
= *expr_p
;
13392 gimple_seq body
= NULL
;
13394 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr
), pre_p
,
13395 OMP_PARALLEL_COMBINED (expr
)
13396 ? ORT_COMBINED_PARALLEL
13397 : ORT_PARALLEL
, OMP_PARALLEL
);
13399 push_gimplify_context ();
13401 g
= gimplify_and_return_first (OMP_PARALLEL_BODY (expr
), &body
);
13402 if (gimple_code (g
) == GIMPLE_BIND
)
13403 pop_gimplify_context (g
);
13405 pop_gimplify_context (NULL
);
13407 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_PARALLEL_CLAUSES (expr
),
13410 g
= gimple_build_omp_parallel (body
,
13411 OMP_PARALLEL_CLAUSES (expr
),
13412 NULL_TREE
, NULL_TREE
);
13413 if (OMP_PARALLEL_COMBINED (expr
))
13414 gimple_omp_set_subcode (g
, GF_OMP_PARALLEL_COMBINED
);
13415 gimplify_seq_add_stmt (pre_p
, g
);
13416 *expr_p
= NULL_TREE
;
13419 /* Gimplify the contents of an OMP_TASK statement. This involves
13420 gimplification of the body, as well as scanning the body for used
13421 variables. We need to do this scan now, because variable-sized
13422 decls will be decomposed during gimplification. */
13425 gimplify_omp_task (tree
*expr_p
, gimple_seq
*pre_p
)
13427 tree expr
= *expr_p
;
13429 gimple_seq body
= NULL
;
13430 bool nowait
= false;
13431 bool has_depend
= false;
13433 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
13435 for (tree c
= OMP_TASK_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13436 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
13439 if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET
)
13441 error_at (OMP_CLAUSE_LOCATION (c
),
13442 "%<mutexinoutset%> kind in %<depend%> clause on a "
13443 "%<taskwait%> construct");
13447 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NOWAIT
)
13449 if (nowait
&& !has_depend
)
13451 error_at (EXPR_LOCATION (expr
),
13452 "%<taskwait%> construct with %<nowait%> clause but no "
13453 "%<depend%> clauses");
13454 *expr_p
= NULL_TREE
;
13459 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr
), pre_p
,
13460 omp_find_clause (OMP_TASK_CLAUSES (expr
),
13462 ? ORT_UNTIED_TASK
: ORT_TASK
, OMP_TASK
);
13464 if (OMP_TASK_BODY (expr
))
13466 push_gimplify_context ();
13468 g
= gimplify_and_return_first (OMP_TASK_BODY (expr
), &body
);
13469 if (gimple_code (g
) == GIMPLE_BIND
)
13470 pop_gimplify_context (g
);
13472 pop_gimplify_context (NULL
);
13475 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_TASK_CLAUSES (expr
),
13478 g
= gimple_build_omp_task (body
,
13479 OMP_TASK_CLAUSES (expr
),
13480 NULL_TREE
, NULL_TREE
,
13481 NULL_TREE
, NULL_TREE
, NULL_TREE
);
13482 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
13483 gimple_omp_task_set_taskwait_p (g
, true);
13484 gimplify_seq_add_stmt (pre_p
, g
);
13485 *expr_p
= NULL_TREE
;
13488 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
13489 force it into a temporary initialized in PRE_P and add firstprivate clause
13490 to ORIG_FOR_STMT. */
13493 gimplify_omp_taskloop_expr (tree type
, tree
*tp
, gimple_seq
*pre_p
,
13494 tree orig_for_stmt
)
13496 if (*tp
== NULL
|| is_gimple_constant (*tp
))
13499 *tp
= get_initialized_tmp_var (*tp
, pre_p
, NULL
, false);
13500 /* Reference to pointer conversion is considered useless,
13501 but is significant for firstprivate clause. Force it
13504 && TREE_CODE (type
) == POINTER_TYPE
13505 && TREE_CODE (TREE_TYPE (*tp
)) == REFERENCE_TYPE
)
13507 tree v
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
13508 tree m
= build2 (INIT_EXPR
, TREE_TYPE (v
), v
, *tp
);
13509 gimplify_and_add (m
, pre_p
);
13513 tree c
= build_omp_clause (input_location
, OMP_CLAUSE_FIRSTPRIVATE
);
13514 OMP_CLAUSE_DECL (c
) = *tp
;
13515 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
13516 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
13519 /* Helper function of gimplify_omp_for, find OMP_ORDERED with
13520 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
13523 find_standalone_omp_ordered (tree
*tp
, int *walk_subtrees
, void *)
13525 switch (TREE_CODE (*tp
))
13528 if (OMP_ORDERED_BODY (*tp
) == NULL_TREE
)
13534 *walk_subtrees
= 0;
13542 /* Gimplify the gross structure of an OMP_FOR statement. */
13544 static enum gimplify_status
13545 gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
13547 tree for_stmt
, orig_for_stmt
, inner_for_stmt
= NULL_TREE
, decl
, var
, t
;
13548 enum gimplify_status ret
= GS_ALL_DONE
;
13549 enum gimplify_status tret
;
13551 gimple_seq for_body
, for_pre_body
;
13553 bitmap has_decl_expr
= NULL
;
13554 enum omp_region_type ort
= ORT_WORKSHARE
;
13555 bool openacc
= TREE_CODE (*expr_p
) == OACC_LOOP
;
13557 orig_for_stmt
= for_stmt
= *expr_p
;
13559 bool loop_p
= (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_BIND
)
13561 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
13563 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
13564 gcc_assert (TREE_CODE (for_stmt
) != OACC_LOOP
);
13565 inner_for_stmt
= walk_tree (&OMP_FOR_BODY (for_stmt
),
13566 find_combined_omp_for
, data
, NULL
);
13567 if (inner_for_stmt
== NULL_TREE
)
13569 gcc_assert (seen_error ());
13570 *expr_p
= NULL_TREE
;
13573 if (data
[2] && OMP_FOR_PRE_BODY (*data
[2]))
13575 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data
[2]),
13576 &OMP_FOR_PRE_BODY (for_stmt
));
13577 OMP_FOR_PRE_BODY (*data
[2]) = NULL_TREE
;
13579 if (OMP_FOR_PRE_BODY (inner_for_stmt
))
13581 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt
),
13582 &OMP_FOR_PRE_BODY (for_stmt
));
13583 OMP_FOR_PRE_BODY (inner_for_stmt
) = NULL_TREE
;
13588 /* We have some statements or variable declarations in between
13589 the composite construct directives. Move them around the
13592 for (i
= 0; i
< 3; i
++)
13596 if (i
< 2 && data
[i
+ 1] == &OMP_BODY (t
))
13597 data
[i
+ 1] = data
[i
];
13598 *data
[i
] = OMP_BODY (t
);
13599 tree body
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
13600 NULL_TREE
, make_node (BLOCK
));
13601 OMP_BODY (t
) = body
;
13602 append_to_statement_list_force (inner_for_stmt
,
13603 &BIND_EXPR_BODY (body
));
13605 data
[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body
)));
13606 gcc_assert (*data
[3] == inner_for_stmt
);
13611 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
13613 && OMP_FOR_ORIG_DECLS (inner_for_stmt
)
13614 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
13616 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
13619 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
13620 /* Class iterators aren't allowed on OMP_SIMD, so the only
13621 case we need to solve is distribute parallel for. They are
13622 allowed on the loop construct, but that is already handled
13623 in gimplify_omp_loop. */
13624 gcc_assert (TREE_CODE (inner_for_stmt
) == OMP_FOR
13625 && TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
13627 tree orig_decl
= TREE_PURPOSE (orig
);
13628 tree last
= TREE_VALUE (orig
);
13630 for (pc
= &OMP_FOR_CLAUSES (inner_for_stmt
);
13631 *pc
; pc
= &OMP_CLAUSE_CHAIN (*pc
))
13632 if ((OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
13633 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LASTPRIVATE
)
13634 && OMP_CLAUSE_DECL (*pc
) == orig_decl
)
13636 if (*pc
== NULL_TREE
)
13639 for (spc
= &OMP_PARALLEL_CLAUSES (*data
[1]);
13640 *spc
; spc
= &OMP_CLAUSE_CHAIN (*spc
))
13641 if (OMP_CLAUSE_CODE (*spc
) == OMP_CLAUSE_PRIVATE
13642 && OMP_CLAUSE_DECL (*spc
) == orig_decl
)
13647 *spc
= OMP_CLAUSE_CHAIN (c
);
13648 OMP_CLAUSE_CHAIN (c
) = NULL_TREE
;
13652 if (*pc
== NULL_TREE
)
13654 else if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
)
13656 /* private clause will appear only on inner_for_stmt.
13657 Change it into firstprivate, and add private clause
13659 tree c
= copy_node (*pc
);
13660 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
13661 OMP_FOR_CLAUSES (for_stmt
) = c
;
13662 OMP_CLAUSE_CODE (*pc
) = OMP_CLAUSE_FIRSTPRIVATE
;
13663 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
13667 /* lastprivate clause will appear on both inner_for_stmt
13668 and for_stmt. Add firstprivate clause to
13670 tree c
= build_omp_clause (OMP_CLAUSE_LOCATION (*pc
),
13671 OMP_CLAUSE_FIRSTPRIVATE
);
13672 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (*pc
);
13673 OMP_CLAUSE_CHAIN (c
) = *pc
;
13675 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
13677 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
13678 OMP_CLAUSE_FIRSTPRIVATE
);
13679 OMP_CLAUSE_DECL (c
) = last
;
13680 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
13681 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
13682 c
= build_omp_clause (UNKNOWN_LOCATION
,
13683 *pc
? OMP_CLAUSE_SHARED
13684 : OMP_CLAUSE_FIRSTPRIVATE
);
13685 OMP_CLAUSE_DECL (c
) = orig_decl
;
13686 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
13687 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
13689 /* Similarly, take care of C++ range for temporaries, those should
13690 be firstprivate on OMP_PARALLEL if any. */
13692 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
13693 if (OMP_FOR_ORIG_DECLS (inner_for_stmt
)
13694 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
13696 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
13700 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
13701 tree v
= TREE_CHAIN (orig
);
13702 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
13703 OMP_CLAUSE_FIRSTPRIVATE
);
13704 /* First add firstprivate clause for the __for_end artificial
13706 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 1);
13707 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
13709 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
13710 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
13711 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
13712 if (TREE_VEC_ELT (v
, 0))
13714 /* And now the same for __for_range artificial decl if it
13716 c
= build_omp_clause (UNKNOWN_LOCATION
,
13717 OMP_CLAUSE_FIRSTPRIVATE
);
13718 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 0);
13719 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
13721 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
13722 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
13723 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
13728 switch (TREE_CODE (for_stmt
))
13731 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
))
13733 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
13734 OMP_CLAUSE_SCHEDULE
))
13735 error_at (EXPR_LOCATION (for_stmt
),
13736 "%qs clause may not appear on non-rectangular %qs",
13737 "schedule", lang_GNU_Fortran () ? "do" : "for");
13738 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
))
13739 error_at (EXPR_LOCATION (for_stmt
),
13740 "%qs clause may not appear on non-rectangular %qs",
13741 "ordered", lang_GNU_Fortran () ? "do" : "for");
13744 case OMP_DISTRIBUTE
:
13745 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
)
13746 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
13747 OMP_CLAUSE_DIST_SCHEDULE
))
13748 error_at (EXPR_LOCATION (for_stmt
),
13749 "%qs clause may not appear on non-rectangular %qs",
13750 "dist_schedule", "distribute");
13756 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
))
13758 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
13759 OMP_CLAUSE_GRAINSIZE
))
13760 error_at (EXPR_LOCATION (for_stmt
),
13761 "%qs clause may not appear on non-rectangular %qs",
13762 "grainsize", "taskloop");
13763 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
13764 OMP_CLAUSE_NUM_TASKS
))
13765 error_at (EXPR_LOCATION (for_stmt
),
13766 "%qs clause may not appear on non-rectangular %qs",
13767 "num_tasks", "taskloop");
13769 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_UNTIED
))
13770 ort
= ORT_UNTIED_TASKLOOP
;
13772 ort
= ORT_TASKLOOP
;
13778 gcc_unreachable ();
13781 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
13782 clause for the IV. */
13783 if (ort
== ORT_SIMD
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
13785 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), 0);
13786 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
13787 decl
= TREE_OPERAND (t
, 0);
13788 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13789 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
13790 && OMP_CLAUSE_DECL (c
) == decl
)
13792 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
13797 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
)
13798 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt
), pre_p
, ort
,
13799 loop_p
&& TREE_CODE (for_stmt
) != OMP_SIMD
13800 ? OMP_LOOP
: TREE_CODE (for_stmt
));
13802 if (TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
)
13803 gimplify_omp_ctxp
->distribute
= true;
13805 /* Handle OMP_FOR_INIT. */
13806 for_pre_body
= NULL
;
13807 if ((ort
== ORT_SIMD
13808 || (inner_for_stmt
&& TREE_CODE (inner_for_stmt
) == OMP_SIMD
))
13809 && OMP_FOR_PRE_BODY (for_stmt
))
13811 has_decl_expr
= BITMAP_ALLOC (NULL
);
13812 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == DECL_EXPR
13813 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt
)))
13816 t
= OMP_FOR_PRE_BODY (for_stmt
);
13817 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
13819 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == STATEMENT_LIST
)
13821 tree_stmt_iterator si
;
13822 for (si
= tsi_start (OMP_FOR_PRE_BODY (for_stmt
)); !tsi_end_p (si
);
13826 if (TREE_CODE (t
) == DECL_EXPR
13827 && TREE_CODE (DECL_EXPR_DECL (t
)) == VAR_DECL
)
13828 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
13832 if (OMP_FOR_PRE_BODY (for_stmt
))
13834 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
|| gimplify_omp_ctxp
)
13835 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
13838 struct gimplify_omp_ctx ctx
;
13839 memset (&ctx
, 0, sizeof (ctx
));
13840 ctx
.region_type
= ORT_NONE
;
13841 gimplify_omp_ctxp
= &ctx
;
13842 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
13843 gimplify_omp_ctxp
= NULL
;
13846 OMP_FOR_PRE_BODY (for_stmt
) = NULL_TREE
;
13848 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
13849 for_stmt
= inner_for_stmt
;
13851 /* For taskloop, need to gimplify the start, end and step before the
13852 taskloop, outside of the taskloop omp context. */
13853 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
13855 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
13857 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
13858 gimple_seq
*for_pre_p
= (gimple_seq_empty_p (for_pre_body
)
13859 ? pre_p
: &for_pre_body
);
13860 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
13861 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
13863 tree v
= TREE_OPERAND (t
, 1);
13864 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
13865 for_pre_p
, orig_for_stmt
);
13866 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
13867 for_pre_p
, orig_for_stmt
);
13870 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
13873 /* Handle OMP_FOR_COND. */
13874 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
13875 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
13877 tree v
= TREE_OPERAND (t
, 1);
13878 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
13879 for_pre_p
, orig_for_stmt
);
13880 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
13881 for_pre_p
, orig_for_stmt
);
13884 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
13887 /* Handle OMP_FOR_INCR. */
13888 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
13889 if (TREE_CODE (t
) == MODIFY_EXPR
)
13891 decl
= TREE_OPERAND (t
, 0);
13892 t
= TREE_OPERAND (t
, 1);
13893 tree
*tp
= &TREE_OPERAND (t
, 1);
13894 if (TREE_CODE (t
) == PLUS_EXPR
&& *tp
== decl
)
13895 tp
= &TREE_OPERAND (t
, 0);
13897 gimplify_omp_taskloop_expr (NULL_TREE
, tp
, for_pre_p
,
13902 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt
), pre_p
, ort
,
13906 if (orig_for_stmt
!= for_stmt
)
13907 gimplify_omp_ctxp
->combined_loop
= true;
13910 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
13911 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt
)));
13912 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
13913 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt
)));
13915 tree c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
);
13916 bool is_doacross
= false;
13917 if (c
&& walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt
),
13918 find_standalone_omp_ordered
, NULL
))
13920 OMP_CLAUSE_ORDERED_DOACROSS (c
) = 1;
13921 is_doacross
= true;
13922 int len
= TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
));
13923 gimplify_omp_ctxp
->loop_iter_var
.create (len
* 2);
13924 for (tree
*pc
= &OMP_FOR_CLAUSES (for_stmt
); *pc
; )
13925 if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LINEAR
)
13927 error_at (OMP_CLAUSE_LOCATION (*pc
),
13928 "%<linear%> clause may not be specified together "
13929 "with %<ordered%> clause if stand-alone %<ordered%> "
13930 "construct is nested in it");
13931 *pc
= OMP_CLAUSE_CHAIN (*pc
);
13934 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13936 int collapse
= 1, tile
= 0;
13937 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_COLLAPSE
);
13939 collapse
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c
));
13940 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_TILE
);
13942 tile
= list_length (OMP_CLAUSE_TILE_LIST (c
));
13943 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ALLOCATE
);
13944 hash_set
<tree
> *allocate_uids
= NULL
;
13947 allocate_uids
= new hash_set
<tree
>;
13948 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
13949 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
)
13950 allocate_uids
->add (OMP_CLAUSE_DECL (c
));
13952 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
13954 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
13955 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
13956 decl
= TREE_OPERAND (t
, 0);
13957 gcc_assert (DECL_P (decl
));
13958 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl
))
13959 || POINTER_TYPE_P (TREE_TYPE (decl
)));
13962 if (TREE_CODE (for_stmt
) == OMP_FOR
&& OMP_FOR_ORIG_DECLS (for_stmt
))
13964 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
13965 if (TREE_CODE (orig_decl
) == TREE_LIST
)
13967 orig_decl
= TREE_PURPOSE (orig_decl
);
13971 gimplify_omp_ctxp
->loop_iter_var
.quick_push (orig_decl
);
13974 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
13975 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
13978 if (for_stmt
== orig_for_stmt
)
13980 tree orig_decl
= decl
;
13981 if (OMP_FOR_ORIG_DECLS (for_stmt
))
13983 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
13984 if (TREE_CODE (orig_decl
) == TREE_LIST
)
13986 orig_decl
= TREE_PURPOSE (orig_decl
);
13991 if (is_global_var (orig_decl
) && DECL_THREAD_LOCAL_P (orig_decl
))
13992 error_at (EXPR_LOCATION (for_stmt
),
13993 "threadprivate iteration variable %qD", orig_decl
);
13996 /* Make sure the iteration variable is private. */
13997 tree c
= NULL_TREE
;
13998 tree c2
= NULL_TREE
;
13999 if (orig_for_stmt
!= for_stmt
)
14001 /* Preserve this information until we gimplify the inner simd. */
14003 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
14004 TREE_PRIVATE (t
) = 1;
14006 else if (ort
== ORT_SIMD
)
14008 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
14009 (splay_tree_key
) decl
);
14010 omp_is_private (gimplify_omp_ctxp
, decl
,
14011 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
14013 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
14015 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
14016 if (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
)
14017 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
14018 OMP_CLAUSE_LASTPRIVATE
);
14019 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
14020 OMP_CLAUSE_LASTPRIVATE
))
14021 if (OMP_CLAUSE_DECL (c3
) == decl
)
14023 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
14024 "conditional %<lastprivate%> on loop "
14025 "iterator %qD ignored", decl
);
14026 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
14027 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
14030 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1 && !loop_p
)
14032 c
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
14033 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
14034 unsigned int flags
= GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
;
14036 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
14037 || TREE_PRIVATE (t
))
14039 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
14040 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
14042 struct gimplify_omp_ctx
*outer
14043 = gimplify_omp_ctxp
->outer_context
;
14044 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
14046 if (outer
->region_type
== ORT_WORKSHARE
14047 && outer
->combined_loop
)
14049 n
= splay_tree_lookup (outer
->variables
,
14050 (splay_tree_key
)decl
);
14051 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
14053 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
14054 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
14058 struct gimplify_omp_ctx
*octx
= outer
->outer_context
;
14060 && octx
->region_type
== ORT_COMBINED_PARALLEL
14061 && octx
->outer_context
14062 && (octx
->outer_context
->region_type
14064 && octx
->outer_context
->combined_loop
)
14066 octx
= octx
->outer_context
;
14067 n
= splay_tree_lookup (octx
->variables
,
14068 (splay_tree_key
)decl
);
14069 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
14071 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
14072 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
14079 OMP_CLAUSE_DECL (c
) = decl
;
14080 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
14081 OMP_FOR_CLAUSES (for_stmt
) = c
;
14082 omp_add_variable (gimplify_omp_ctxp
, decl
, flags
);
14083 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
14084 omp_lastprivate_for_combined_outer_constructs (outer
, decl
,
14091 || !bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)));
14092 if (TREE_PRIVATE (t
))
14093 lastprivate
= false;
14094 if (loop_p
&& OMP_FOR_ORIG_DECLS (for_stmt
))
14096 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
14097 if (TREE_CODE (elt
) == TREE_LIST
&& TREE_PURPOSE (elt
))
14098 lastprivate
= false;
14101 struct gimplify_omp_ctx
*outer
14102 = gimplify_omp_ctxp
->outer_context
;
14103 if (outer
&& lastprivate
)
14104 omp_lastprivate_for_combined_outer_constructs (outer
, decl
,
14107 c
= build_omp_clause (input_location
,
14108 lastprivate
? OMP_CLAUSE_LASTPRIVATE
14109 : OMP_CLAUSE_PRIVATE
);
14110 OMP_CLAUSE_DECL (c
) = decl
;
14111 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
14112 OMP_FOR_CLAUSES (for_stmt
) = c
;
14113 omp_add_variable (gimplify_omp_ctxp
, decl
,
14114 (lastprivate
? GOVD_LASTPRIVATE
: GOVD_PRIVATE
)
14115 | GOVD_EXPLICIT
| GOVD_SEEN
);
14119 else if (omp_is_private (gimplify_omp_ctxp
, decl
, 0))
14121 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
14122 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
14123 (splay_tree_key
) decl
);
14124 if (n
&& (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
))
14125 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
14126 OMP_CLAUSE_LASTPRIVATE
);
14127 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
14128 OMP_CLAUSE_LASTPRIVATE
))
14129 if (OMP_CLAUSE_DECL (c3
) == decl
)
14131 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
14132 "conditional %<lastprivate%> on loop "
14133 "iterator %qD ignored", decl
);
14134 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
14135 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
14139 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_PRIVATE
| GOVD_SEEN
);
14141 /* If DECL is not a gimple register, create a temporary variable to act
14142 as an iteration counter. This is valid, since DECL cannot be
14143 modified in the body of the loop. Similarly for any iteration vars
14144 in simd with collapse > 1 where the iterator vars must be
14145 lastprivate. And similarly for vars mentioned in allocate clauses. */
14146 if (orig_for_stmt
!= for_stmt
)
14148 else if (!is_gimple_reg (decl
)
14149 || (ort
== ORT_SIMD
14150 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) > 1)
14151 || (allocate_uids
&& allocate_uids
->contains (decl
)))
14153 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
14154 /* Make sure omp_add_variable is not called on it prematurely.
14155 We call it ourselves a few lines later. */
14156 gimplify_omp_ctxp
= NULL
;
14157 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
14158 gimplify_omp_ctxp
= ctx
;
14159 TREE_OPERAND (t
, 0) = var
;
14161 gimplify_seq_add_stmt (&for_body
, gimple_build_assign (decl
, var
));
14163 if (ort
== ORT_SIMD
14164 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
14166 c2
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
14167 OMP_CLAUSE_LINEAR_NO_COPYIN (c2
) = 1;
14168 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2
) = 1;
14169 OMP_CLAUSE_DECL (c2
) = var
;
14170 OMP_CLAUSE_CHAIN (c2
) = OMP_FOR_CLAUSES (for_stmt
);
14171 OMP_FOR_CLAUSES (for_stmt
) = c2
;
14172 omp_add_variable (gimplify_omp_ctxp
, var
,
14173 GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
);
14174 if (c
== NULL_TREE
)
14181 omp_add_variable (gimplify_omp_ctxp
, var
,
14182 GOVD_PRIVATE
| GOVD_SEEN
);
14187 gimplify_omp_ctxp
->in_for_exprs
= true;
14188 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
14190 tree lb
= TREE_OPERAND (t
, 1);
14191 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 1), &for_pre_body
, NULL
,
14192 is_gimple_val
, fb_rvalue
, false);
14193 ret
= MIN (ret
, tret
);
14194 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 2), &for_pre_body
, NULL
,
14195 is_gimple_val
, fb_rvalue
, false);
14198 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
14199 is_gimple_val
, fb_rvalue
, false);
14200 gimplify_omp_ctxp
->in_for_exprs
= false;
14201 ret
= MIN (ret
, tret
);
14202 if (ret
== GS_ERROR
)
14205 /* Handle OMP_FOR_COND. */
14206 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
14207 gcc_assert (COMPARISON_CLASS_P (t
));
14208 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
14210 gimplify_omp_ctxp
->in_for_exprs
= true;
14211 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
14213 tree ub
= TREE_OPERAND (t
, 1);
14214 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 1), &for_pre_body
, NULL
,
14215 is_gimple_val
, fb_rvalue
, false);
14216 ret
= MIN (ret
, tret
);
14217 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 2), &for_pre_body
, NULL
,
14218 is_gimple_val
, fb_rvalue
, false);
14221 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
14222 is_gimple_val
, fb_rvalue
, false);
14223 gimplify_omp_ctxp
->in_for_exprs
= false;
14224 ret
= MIN (ret
, tret
);
14226 /* Handle OMP_FOR_INCR. */
14227 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
14228 switch (TREE_CODE (t
))
14230 case PREINCREMENT_EXPR
:
14231 case POSTINCREMENT_EXPR
:
14233 tree decl
= TREE_OPERAND (t
, 0);
14234 /* c_omp_for_incr_canonicalize_ptr() should have been
14235 called to massage things appropriately. */
14236 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
14238 if (orig_for_stmt
!= for_stmt
)
14240 t
= build_int_cst (TREE_TYPE (decl
), 1);
14242 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
14243 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
14244 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
14245 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
14249 case PREDECREMENT_EXPR
:
14250 case POSTDECREMENT_EXPR
:
14251 /* c_omp_for_incr_canonicalize_ptr() should have been
14252 called to massage things appropriately. */
14253 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
14254 if (orig_for_stmt
!= for_stmt
)
14256 t
= build_int_cst (TREE_TYPE (decl
), -1);
14258 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
14259 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
14260 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
14261 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
14265 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
14266 TREE_OPERAND (t
, 0) = var
;
14268 t
= TREE_OPERAND (t
, 1);
14269 switch (TREE_CODE (t
))
14272 if (TREE_OPERAND (t
, 1) == decl
)
14274 TREE_OPERAND (t
, 1) = TREE_OPERAND (t
, 0);
14275 TREE_OPERAND (t
, 0) = var
;
14281 case POINTER_PLUS_EXPR
:
14282 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
14283 TREE_OPERAND (t
, 0) = var
;
14286 gcc_unreachable ();
14289 gimplify_omp_ctxp
->in_for_exprs
= true;
14290 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
14291 is_gimple_val
, fb_rvalue
, false);
14292 ret
= MIN (ret
, tret
);
14295 tree step
= TREE_OPERAND (t
, 1);
14296 tree stept
= TREE_TYPE (decl
);
14297 if (POINTER_TYPE_P (stept
))
14299 step
= fold_convert (stept
, step
);
14300 if (TREE_CODE (t
) == MINUS_EXPR
)
14301 step
= fold_build1 (NEGATE_EXPR
, stept
, step
);
14302 OMP_CLAUSE_LINEAR_STEP (c
) = step
;
14303 if (step
!= TREE_OPERAND (t
, 1))
14305 tret
= gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
),
14306 &for_pre_body
, NULL
,
14307 is_gimple_val
, fb_rvalue
, false);
14308 ret
= MIN (ret
, tret
);
14311 gimplify_omp_ctxp
->in_for_exprs
= false;
14315 gcc_unreachable ();
14321 OMP_CLAUSE_LINEAR_STEP (c2
) = OMP_CLAUSE_LINEAR_STEP (c
);
14324 if ((var
!= decl
|| collapse
> 1 || tile
) && orig_for_stmt
== for_stmt
)
14326 for (c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
14327 if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
14328 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) == NULL
)
14329 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
14330 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)
14331 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) == NULL
))
14332 && OMP_CLAUSE_DECL (c
) == decl
)
14334 if (is_doacross
&& (collapse
== 1 || i
>= collapse
))
14338 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
14339 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
14340 gcc_assert (TREE_OPERAND (t
, 0) == var
);
14341 t
= TREE_OPERAND (t
, 1);
14342 gcc_assert (TREE_CODE (t
) == PLUS_EXPR
14343 || TREE_CODE (t
) == MINUS_EXPR
14344 || TREE_CODE (t
) == POINTER_PLUS_EXPR
);
14345 gcc_assert (TREE_OPERAND (t
, 0) == var
);
14346 t
= build2 (TREE_CODE (t
), TREE_TYPE (decl
),
14347 is_doacross
? var
: decl
,
14348 TREE_OPERAND (t
, 1));
14351 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
14352 seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
);
14354 seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
);
14355 push_gimplify_context ();
14356 gimplify_assign (decl
, t
, seq
);
14357 gimple
*bind
= NULL
;
14358 if (gimplify_ctxp
->temps
)
14360 bind
= gimple_build_bind (NULL_TREE
, *seq
, NULL_TREE
);
14362 gimplify_seq_add_stmt (seq
, bind
);
14364 pop_gimplify_context (bind
);
14367 if (OMP_FOR_NON_RECTANGULAR (for_stmt
) && var
!= decl
)
14368 for (int j
= i
+ 1; j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
14370 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
14371 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
14372 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
14373 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
14374 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
14375 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
14376 gcc_assert (COMPARISON_CLASS_P (t
));
14377 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
14378 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
14379 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
14383 BITMAP_FREE (has_decl_expr
);
14384 delete allocate_uids
;
14386 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
14387 || (loop_p
&& orig_for_stmt
== for_stmt
))
14389 push_gimplify_context ();
14390 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt
)) != BIND_EXPR
)
14392 OMP_FOR_BODY (orig_for_stmt
)
14393 = build3 (BIND_EXPR
, void_type_node
, NULL
,
14394 OMP_FOR_BODY (orig_for_stmt
), NULL
);
14395 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt
)) = 1;
14399 gimple
*g
= gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt
),
14402 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
14403 || (loop_p
&& orig_for_stmt
== for_stmt
))
14405 if (gimple_code (g
) == GIMPLE_BIND
)
14406 pop_gimplify_context (g
);
14408 pop_gimplify_context (NULL
);
14411 if (orig_for_stmt
!= for_stmt
)
14412 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
14414 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
14415 decl
= TREE_OPERAND (t
, 0);
14416 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
14417 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
14418 gimplify_omp_ctxp
= ctx
->outer_context
;
14419 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
14420 gimplify_omp_ctxp
= ctx
;
14421 omp_add_variable (gimplify_omp_ctxp
, var
, GOVD_PRIVATE
| GOVD_SEEN
);
14422 TREE_OPERAND (t
, 0) = var
;
14423 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
14424 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
14425 TREE_OPERAND (TREE_OPERAND (t
, 1), 0) = var
;
14426 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
14427 for (int j
= i
+ 1;
14428 j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
14430 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
14431 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
14432 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
14433 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
14435 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
14436 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
14438 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
14439 gcc_assert (COMPARISON_CLASS_P (t
));
14440 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
14441 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
14443 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
14444 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
14449 gimplify_adjust_omp_clauses (pre_p
, for_body
,
14450 &OMP_FOR_CLAUSES (orig_for_stmt
),
14451 TREE_CODE (orig_for_stmt
));
14454 switch (TREE_CODE (orig_for_stmt
))
14456 case OMP_FOR
: kind
= GF_OMP_FOR_KIND_FOR
; break;
14457 case OMP_SIMD
: kind
= GF_OMP_FOR_KIND_SIMD
; break;
14458 case OMP_DISTRIBUTE
: kind
= GF_OMP_FOR_KIND_DISTRIBUTE
; break;
14459 case OMP_TASKLOOP
: kind
= GF_OMP_FOR_KIND_TASKLOOP
; break;
14460 case OACC_LOOP
: kind
= GF_OMP_FOR_KIND_OACC_LOOP
; break;
14462 gcc_unreachable ();
14464 if (loop_p
&& kind
== GF_OMP_FOR_KIND_SIMD
)
14466 gimplify_seq_add_seq (pre_p
, for_pre_body
);
14467 for_pre_body
= NULL
;
14469 gfor
= gimple_build_omp_for (for_body
, kind
, OMP_FOR_CLAUSES (orig_for_stmt
),
14470 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)),
14472 if (orig_for_stmt
!= for_stmt
)
14473 gimple_omp_for_set_combined_p (gfor
, true);
14474 if (gimplify_omp_ctxp
14475 && (gimplify_omp_ctxp
->combined_loop
14476 || (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
14477 && gimplify_omp_ctxp
->outer_context
14478 && gimplify_omp_ctxp
->outer_context
->combined_loop
)))
14480 gimple_omp_for_set_combined_into_p (gfor
, true);
14481 if (gimplify_omp_ctxp
->combined_loop
)
14482 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_SIMD
);
14484 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_FOR
);
14487 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
14489 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
14490 gimple_omp_for_set_index (gfor
, i
, TREE_OPERAND (t
, 0));
14491 gimple_omp_for_set_initial (gfor
, i
, TREE_OPERAND (t
, 1));
14492 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
14493 gimple_omp_for_set_cond (gfor
, i
, TREE_CODE (t
));
14494 gimple_omp_for_set_final (gfor
, i
, TREE_OPERAND (t
, 1));
14495 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
14496 gimple_omp_for_set_incr (gfor
, i
, TREE_OPERAND (t
, 1));
14499 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
14500 constructs with GIMPLE_OMP_TASK sandwiched in between them.
14501 The outer taskloop stands for computing the number of iterations,
14502 counts for collapsed loops and holding taskloop specific clauses.
14503 The task construct stands for the effect of data sharing on the
14504 explicit task it creates and the inner taskloop stands for expansion
14505 of the static loop inside of the explicit task construct. */
14506 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
14508 tree
*gfor_clauses_ptr
= gimple_omp_for_clauses_ptr (gfor
);
14509 tree task_clauses
= NULL_TREE
;
14510 tree c
= *gfor_clauses_ptr
;
14511 tree
*gtask_clauses_ptr
= &task_clauses
;
14512 tree outer_for_clauses
= NULL_TREE
;
14513 tree
*gforo_clauses_ptr
= &outer_for_clauses
;
14514 bitmap lastprivate_uids
= NULL
;
14515 if (omp_find_clause (c
, OMP_CLAUSE_ALLOCATE
))
14517 c
= omp_find_clause (c
, OMP_CLAUSE_LASTPRIVATE
);
14520 lastprivate_uids
= BITMAP_ALLOC (NULL
);
14521 for (; c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
14522 OMP_CLAUSE_LASTPRIVATE
))
14523 bitmap_set_bit (lastprivate_uids
,
14524 DECL_UID (OMP_CLAUSE_DECL (c
)));
14526 c
= *gfor_clauses_ptr
;
14528 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
14529 switch (OMP_CLAUSE_CODE (c
))
14531 /* These clauses are allowed on task, move them there. */
14532 case OMP_CLAUSE_SHARED
:
14533 case OMP_CLAUSE_FIRSTPRIVATE
:
14534 case OMP_CLAUSE_DEFAULT
:
14535 case OMP_CLAUSE_IF
:
14536 case OMP_CLAUSE_UNTIED
:
14537 case OMP_CLAUSE_FINAL
:
14538 case OMP_CLAUSE_MERGEABLE
:
14539 case OMP_CLAUSE_PRIORITY
:
14540 case OMP_CLAUSE_REDUCTION
:
14541 case OMP_CLAUSE_IN_REDUCTION
:
14542 *gtask_clauses_ptr
= c
;
14543 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14545 case OMP_CLAUSE_PRIVATE
:
14546 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c
))
14548 /* We want private on outer for and firstprivate
14551 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
14552 OMP_CLAUSE_FIRSTPRIVATE
);
14553 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
14554 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
14556 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
14557 *gforo_clauses_ptr
= c
;
14558 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14562 *gtask_clauses_ptr
= c
;
14563 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14566 /* These clauses go into outer taskloop clauses. */
14567 case OMP_CLAUSE_GRAINSIZE
:
14568 case OMP_CLAUSE_NUM_TASKS
:
14569 case OMP_CLAUSE_NOGROUP
:
14570 *gforo_clauses_ptr
= c
;
14571 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14573 /* Collapse clause we duplicate on both taskloops. */
14574 case OMP_CLAUSE_COLLAPSE
:
14575 *gfor_clauses_ptr
= c
;
14576 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14577 *gforo_clauses_ptr
= copy_node (c
);
14578 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
14580 /* For lastprivate, keep the clause on inner taskloop, and add
14581 a shared clause on task. If the same decl is also firstprivate,
14582 add also firstprivate clause on the inner taskloop. */
14583 case OMP_CLAUSE_LASTPRIVATE
:
14584 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
14586 /* For taskloop C++ lastprivate IVs, we want:
14587 1) private on outer taskloop
14588 2) firstprivate and shared on task
14589 3) lastprivate on inner taskloop */
14591 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
14592 OMP_CLAUSE_FIRSTPRIVATE
);
14593 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
14594 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
14596 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
14597 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
) = 1;
14598 *gforo_clauses_ptr
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
14599 OMP_CLAUSE_PRIVATE
);
14600 OMP_CLAUSE_DECL (*gforo_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
14601 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr
) = 1;
14602 TREE_TYPE (*gforo_clauses_ptr
) = TREE_TYPE (c
);
14603 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
14605 *gfor_clauses_ptr
= c
;
14606 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14608 = build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_SHARED
);
14609 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
14610 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
14611 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr
) = 1;
14613 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
14615 /* Allocate clause we duplicate on task and inner taskloop
14616 if the decl is lastprivate, otherwise just put on task. */
14617 case OMP_CLAUSE_ALLOCATE
:
14618 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
14619 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
14621 /* Additionally, put firstprivate clause on task
14622 for the allocator if it is not constant. */
14624 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
14625 OMP_CLAUSE_FIRSTPRIVATE
);
14626 OMP_CLAUSE_DECL (*gtask_clauses_ptr
)
14627 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
14628 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
14630 if (lastprivate_uids
14631 && bitmap_bit_p (lastprivate_uids
,
14632 DECL_UID (OMP_CLAUSE_DECL (c
))))
14634 *gfor_clauses_ptr
= c
;
14635 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14636 *gtask_clauses_ptr
= copy_node (c
);
14637 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
14641 *gtask_clauses_ptr
= c
;
14642 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14646 gcc_unreachable ();
14648 *gfor_clauses_ptr
= NULL_TREE
;
14649 *gtask_clauses_ptr
= NULL_TREE
;
14650 *gforo_clauses_ptr
= NULL_TREE
;
14651 BITMAP_FREE (lastprivate_uids
);
14652 gimple_set_location (gfor
, input_location
);
14653 g
= gimple_build_bind (NULL_TREE
, gfor
, NULL_TREE
);
14654 g
= gimple_build_omp_task (g
, task_clauses
, NULL_TREE
, NULL_TREE
,
14655 NULL_TREE
, NULL_TREE
, NULL_TREE
);
14656 gimple_set_location (g
, input_location
);
14657 gimple_omp_task_set_taskloop_p (g
, true);
14658 g
= gimple_build_bind (NULL_TREE
, g
, NULL_TREE
);
14660 = gimple_build_omp_for (g
, GF_OMP_FOR_KIND_TASKLOOP
, outer_for_clauses
,
14661 gimple_omp_for_collapse (gfor
),
14662 gimple_omp_for_pre_body (gfor
));
14663 gimple_omp_for_set_pre_body (gfor
, NULL
);
14664 gimple_omp_for_set_combined_p (gforo
, true);
14665 gimple_omp_for_set_combined_into_p (gfor
, true);
14666 for (i
= 0; i
< (int) gimple_omp_for_collapse (gfor
); i
++)
14668 tree type
= TREE_TYPE (gimple_omp_for_index (gfor
, i
));
14669 tree v
= create_tmp_var (type
);
14670 gimple_omp_for_set_index (gforo
, i
, v
);
14671 t
= unshare_expr (gimple_omp_for_initial (gfor
, i
));
14672 gimple_omp_for_set_initial (gforo
, i
, t
);
14673 gimple_omp_for_set_cond (gforo
, i
,
14674 gimple_omp_for_cond (gfor
, i
));
14675 t
= unshare_expr (gimple_omp_for_final (gfor
, i
));
14676 gimple_omp_for_set_final (gforo
, i
, t
);
14677 t
= unshare_expr (gimple_omp_for_incr (gfor
, i
));
14678 gcc_assert (TREE_OPERAND (t
, 0) == gimple_omp_for_index (gfor
, i
));
14679 TREE_OPERAND (t
, 0) = v
;
14680 gimple_omp_for_set_incr (gforo
, i
, t
);
14681 t
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
14682 OMP_CLAUSE_DECL (t
) = v
;
14683 OMP_CLAUSE_CHAIN (t
) = gimple_omp_for_clauses (gforo
);
14684 gimple_omp_for_set_clauses (gforo
, t
);
14685 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
14687 tree
*p1
= NULL
, *p2
= NULL
;
14688 t
= gimple_omp_for_initial (gforo
, i
);
14689 if (TREE_CODE (t
) == TREE_VEC
)
14690 p1
= &TREE_VEC_ELT (t
, 0);
14691 t
= gimple_omp_for_final (gforo
, i
);
14692 if (TREE_CODE (t
) == TREE_VEC
)
14695 p2
= &TREE_VEC_ELT (t
, 0);
14697 p1
= &TREE_VEC_ELT (t
, 0);
14702 for (j
= 0; j
< i
; j
++)
14703 if (*p1
== gimple_omp_for_index (gfor
, j
))
14705 *p1
= gimple_omp_for_index (gforo
, j
);
14710 gcc_assert (j
< i
);
14714 gimplify_seq_add_stmt (pre_p
, gforo
);
14717 gimplify_seq_add_stmt (pre_p
, gfor
);
14719 if (TREE_CODE (orig_for_stmt
) == OMP_FOR
)
14721 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
14722 unsigned lastprivate_conditional
= 0;
14724 && (ctx
->region_type
== ORT_TARGET_DATA
14725 || ctx
->region_type
== ORT_TASKGROUP
))
14726 ctx
= ctx
->outer_context
;
14727 if (ctx
&& (ctx
->region_type
& ORT_PARALLEL
) != 0)
14728 for (tree c
= gimple_omp_for_clauses (gfor
);
14729 c
; c
= OMP_CLAUSE_CHAIN (c
))
14730 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
14731 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
14732 ++lastprivate_conditional
;
14733 if (lastprivate_conditional
)
14735 struct omp_for_data fd
;
14736 omp_extract_for_data (gfor
, &fd
, NULL
);
14737 tree type
= build_array_type_nelts (unsigned_type_for (fd
.iter_type
),
14738 lastprivate_conditional
);
14739 tree var
= create_tmp_var_raw (type
);
14740 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
14741 OMP_CLAUSE_DECL (c
) = var
;
14742 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
14743 gimple_omp_for_set_clauses (gfor
, c
);
14744 omp_add_variable (ctx
, var
, GOVD_CONDTEMP
| GOVD_SEEN
);
14747 else if (TREE_CODE (orig_for_stmt
) == OMP_SIMD
)
14749 unsigned lastprivate_conditional
= 0;
14750 for (tree c
= gimple_omp_for_clauses (gfor
); c
; c
= OMP_CLAUSE_CHAIN (c
))
14751 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
14752 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
14753 ++lastprivate_conditional
;
14754 if (lastprivate_conditional
)
14756 struct omp_for_data fd
;
14757 omp_extract_for_data (gfor
, &fd
, NULL
);
14758 tree type
= unsigned_type_for (fd
.iter_type
);
14759 while (lastprivate_conditional
--)
14761 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
14762 OMP_CLAUSE__CONDTEMP_
);
14763 OMP_CLAUSE_DECL (c
) = create_tmp_var (type
);
14764 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
14765 gimple_omp_for_set_clauses (gfor
, c
);
14770 if (ret
!= GS_ALL_DONE
)
14772 *expr_p
= NULL_TREE
;
14773 return GS_ALL_DONE
;
14776 /* Helper for gimplify_omp_loop, called through walk_tree. */
14779 note_no_context_vars (tree
*tp
, int *, void *data
)
14782 && DECL_CONTEXT (*tp
) == NULL_TREE
14783 && !is_global_var (*tp
))
14785 vec
<tree
> *d
= (vec
<tree
> *) data
;
14786 d
->safe_push (*tp
);
14787 DECL_CONTEXT (*tp
) = current_function_decl
;
14792 /* Gimplify the gross structure of an OMP_LOOP statement. */
14794 static enum gimplify_status
14795 gimplify_omp_loop (tree
*expr_p
, gimple_seq
*pre_p
)
14797 tree for_stmt
= *expr_p
;
14798 tree clauses
= OMP_FOR_CLAUSES (for_stmt
);
14799 struct gimplify_omp_ctx
*octx
= gimplify_omp_ctxp
;
14800 enum omp_clause_bind_kind kind
= OMP_CLAUSE_BIND_THREAD
;
14803 /* If order is not present, the behavior is as if order(concurrent)
14805 tree order
= omp_find_clause (clauses
, OMP_CLAUSE_ORDER
);
14806 if (order
== NULL_TREE
)
14808 order
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_ORDER
);
14809 OMP_CLAUSE_CHAIN (order
) = clauses
;
14810 OMP_FOR_CLAUSES (for_stmt
) = clauses
= order
;
14813 tree bind
= omp_find_clause (clauses
, OMP_CLAUSE_BIND
);
14814 if (bind
== NULL_TREE
)
14816 if (!flag_openmp
) /* flag_openmp_simd */
14818 else if (octx
&& (octx
->region_type
& ORT_TEAMS
) != 0)
14819 kind
= OMP_CLAUSE_BIND_TEAMS
;
14820 else if (octx
&& (octx
->region_type
& ORT_PARALLEL
) != 0)
14821 kind
= OMP_CLAUSE_BIND_PARALLEL
;
14824 for (; octx
; octx
= octx
->outer_context
)
14826 if ((octx
->region_type
& ORT_ACC
) != 0
14827 || octx
->region_type
== ORT_NONE
14828 || octx
->region_type
== ORT_IMPLICIT_TARGET
)
14832 if (octx
== NULL
&& !in_omp_construct
)
14833 error_at (EXPR_LOCATION (for_stmt
),
14834 "%<bind%> clause not specified on a %<loop%> "
14835 "construct not nested inside another OpenMP construct");
14837 bind
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_BIND
);
14838 OMP_CLAUSE_CHAIN (bind
) = clauses
;
14839 OMP_CLAUSE_BIND_KIND (bind
) = kind
;
14840 OMP_FOR_CLAUSES (for_stmt
) = bind
;
14843 switch (OMP_CLAUSE_BIND_KIND (bind
))
14845 case OMP_CLAUSE_BIND_THREAD
:
14847 case OMP_CLAUSE_BIND_PARALLEL
:
14848 if (!flag_openmp
) /* flag_openmp_simd */
14850 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
14853 for (; octx
; octx
= octx
->outer_context
)
14854 if (octx
->region_type
== ORT_SIMD
14855 && omp_find_clause (octx
->clauses
, OMP_CLAUSE_BIND
) == NULL_TREE
)
14857 error_at (EXPR_LOCATION (for_stmt
),
14858 "%<bind(parallel)%> on a %<loop%> construct nested "
14859 "inside %<simd%> construct");
14860 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
14863 kind
= OMP_CLAUSE_BIND_PARALLEL
;
14865 case OMP_CLAUSE_BIND_TEAMS
:
14866 if (!flag_openmp
) /* flag_openmp_simd */
14868 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
14872 && octx
->region_type
!= ORT_IMPLICIT_TARGET
14873 && octx
->region_type
!= ORT_NONE
14874 && (octx
->region_type
& ORT_TEAMS
) == 0)
14875 || in_omp_construct
)
14877 error_at (EXPR_LOCATION (for_stmt
),
14878 "%<bind(teams)%> on a %<loop%> region not strictly "
14879 "nested inside of a %<teams%> region");
14880 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
14883 kind
= OMP_CLAUSE_BIND_TEAMS
;
14886 gcc_unreachable ();
14889 for (tree
*pc
= &OMP_FOR_CLAUSES (for_stmt
); *pc
; )
14890 switch (OMP_CLAUSE_CODE (*pc
))
14892 case OMP_CLAUSE_REDUCTION
:
14893 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc
))
14895 error_at (OMP_CLAUSE_LOCATION (*pc
),
14896 "%<inscan%> %<reduction%> clause on "
14897 "%qs construct", "loop");
14898 OMP_CLAUSE_REDUCTION_INSCAN (*pc
) = 0;
14900 if (OMP_CLAUSE_REDUCTION_TASK (*pc
))
14902 error_at (OMP_CLAUSE_LOCATION (*pc
),
14903 "invalid %<task%> reduction modifier on construct "
14904 "other than %<parallel%>, %qs or %<sections%>",
14905 lang_GNU_Fortran () ? "do" : "for");
14906 OMP_CLAUSE_REDUCTION_TASK (*pc
) = 0;
14908 pc
= &OMP_CLAUSE_CHAIN (*pc
);
14910 case OMP_CLAUSE_LASTPRIVATE
:
14911 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
14913 tree t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
14914 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
14915 if (OMP_CLAUSE_DECL (*pc
) == TREE_OPERAND (t
, 0))
14917 if (OMP_FOR_ORIG_DECLS (for_stmt
)
14918 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
14920 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
14923 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
14924 if (OMP_CLAUSE_DECL (*pc
) == TREE_PURPOSE (orig
))
14928 if (i
== TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)))
14930 error_at (OMP_CLAUSE_LOCATION (*pc
),
14931 "%<lastprivate%> clause on a %<loop%> construct refers "
14932 "to a variable %qD which is not the loop iterator",
14933 OMP_CLAUSE_DECL (*pc
));
14934 *pc
= OMP_CLAUSE_CHAIN (*pc
);
14937 pc
= &OMP_CLAUSE_CHAIN (*pc
);
14940 pc
= &OMP_CLAUSE_CHAIN (*pc
);
14944 TREE_SET_CODE (for_stmt
, OMP_SIMD
);
14949 case OMP_CLAUSE_BIND_THREAD
: last
= 0; break;
14950 case OMP_CLAUSE_BIND_PARALLEL
: last
= 1; break;
14951 case OMP_CLAUSE_BIND_TEAMS
: last
= 2; break;
14953 for (int pass
= 1; pass
<= last
; pass
++)
14957 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
,
14958 make_node (BLOCK
));
14959 append_to_statement_list (*expr_p
, &BIND_EXPR_BODY (bind
));
14960 *expr_p
= make_node (OMP_PARALLEL
);
14961 TREE_TYPE (*expr_p
) = void_type_node
;
14962 OMP_PARALLEL_BODY (*expr_p
) = bind
;
14963 OMP_PARALLEL_COMBINED (*expr_p
) = 1;
14964 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (for_stmt
));
14965 tree
*pc
= &OMP_PARALLEL_CLAUSES (*expr_p
);
14966 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
14967 if (OMP_FOR_ORIG_DECLS (for_stmt
)
14968 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
))
14971 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
14972 if (TREE_PURPOSE (elt
) && TREE_VALUE (elt
))
14974 *pc
= build_omp_clause (UNKNOWN_LOCATION
,
14975 OMP_CLAUSE_FIRSTPRIVATE
);
14976 OMP_CLAUSE_DECL (*pc
) = TREE_VALUE (elt
);
14977 pc
= &OMP_CLAUSE_CHAIN (*pc
);
14981 tree t
= make_node (pass
== 2 ? OMP_DISTRIBUTE
: OMP_FOR
);
14982 tree
*pc
= &OMP_FOR_CLAUSES (t
);
14983 TREE_TYPE (t
) = void_type_node
;
14984 OMP_FOR_BODY (t
) = *expr_p
;
14985 SET_EXPR_LOCATION (t
, EXPR_LOCATION (for_stmt
));
14986 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
14987 switch (OMP_CLAUSE_CODE (c
))
14989 case OMP_CLAUSE_BIND
:
14990 case OMP_CLAUSE_ORDER
:
14991 case OMP_CLAUSE_COLLAPSE
:
14992 *pc
= copy_node (c
);
14993 pc
= &OMP_CLAUSE_CHAIN (*pc
);
14995 case OMP_CLAUSE_PRIVATE
:
14996 case OMP_CLAUSE_FIRSTPRIVATE
:
14997 /* Only needed on innermost. */
14999 case OMP_CLAUSE_LASTPRIVATE
:
15000 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
) && pass
!= last
)
15002 *pc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
15003 OMP_CLAUSE_FIRSTPRIVATE
);
15004 OMP_CLAUSE_DECL (*pc
) = OMP_CLAUSE_DECL (c
);
15005 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
15006 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15008 *pc
= copy_node (c
);
15009 OMP_CLAUSE_LASTPRIVATE_STMT (*pc
) = NULL_TREE
;
15010 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
15011 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
15014 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc
) = 1;
15016 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
15017 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc
) = 0;
15019 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15021 case OMP_CLAUSE_REDUCTION
:
15022 *pc
= copy_node (c
);
15023 OMP_CLAUSE_DECL (*pc
) = unshare_expr (OMP_CLAUSE_DECL (c
));
15024 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
15025 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
))
15027 auto_vec
<tree
> no_context_vars
;
15028 int walk_subtrees
= 0;
15029 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
15030 &walk_subtrees
, &no_context_vars
);
15031 if (tree p
= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
))
15032 note_no_context_vars (&p
, &walk_subtrees
, &no_context_vars
);
15033 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c
),
15034 note_no_context_vars
,
15036 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c
),
15037 note_no_context_vars
,
15040 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
)
15041 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
));
15042 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
15043 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
)
15044 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
));
15046 hash_map
<tree
, tree
> decl_map
;
15047 decl_map
.put (OMP_CLAUSE_DECL (c
), OMP_CLAUSE_DECL (c
));
15048 decl_map
.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
15049 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
));
15050 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
15051 decl_map
.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
15052 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
));
15055 memset (&id
, 0, sizeof (id
));
15056 id
.src_fn
= current_function_decl
;
15057 id
.dst_fn
= current_function_decl
;
15058 id
.src_cfun
= cfun
;
15059 id
.decl_map
= &decl_map
;
15060 id
.copy_decl
= copy_decl_no_change
;
15061 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
15062 id
.transform_new_cfg
= true;
15063 id
.transform_return_to_modify
= false;
15065 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc
), copy_tree_body_r
,
15067 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc
), copy_tree_body_r
,
15070 for (tree d
: no_context_vars
)
15072 DECL_CONTEXT (d
) = NULL_TREE
;
15073 DECL_CONTEXT (*decl_map
.get (d
)) = NULL_TREE
;
15078 OMP_CLAUSE_REDUCTION_INIT (*pc
)
15079 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c
));
15080 OMP_CLAUSE_REDUCTION_MERGE (*pc
)
15081 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c
));
15083 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15086 gcc_unreachable ();
15091 return gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_stmt
, fb_none
);
15095 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
15096 of OMP_TARGET's body. */
15099 find_omp_teams (tree
*tp
, int *walk_subtrees
, void *)
15101 *walk_subtrees
= 0;
15102 switch (TREE_CODE (*tp
))
15107 case STATEMENT_LIST
:
15108 *walk_subtrees
= 1;
15116 /* Helper function of optimize_target_teams, determine if the expression
15117 can be computed safely before the target construct on the host. */
15120 computable_teams_clause (tree
*tp
, int *walk_subtrees
, void *)
15126 *walk_subtrees
= 0;
15129 switch (TREE_CODE (*tp
))
15134 *walk_subtrees
= 0;
15135 if (error_operand_p (*tp
)
15136 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
15137 || DECL_HAS_VALUE_EXPR_P (*tp
)
15138 || DECL_THREAD_LOCAL_P (*tp
)
15139 || TREE_SIDE_EFFECTS (*tp
)
15140 || TREE_THIS_VOLATILE (*tp
))
15142 if (is_global_var (*tp
)
15143 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp
))
15144 || lookup_attribute ("omp declare target link",
15145 DECL_ATTRIBUTES (*tp
))))
15148 && !DECL_SEEN_IN_BIND_EXPR_P (*tp
)
15149 && !is_global_var (*tp
)
15150 && decl_function_context (*tp
) == current_function_decl
)
15152 n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
15153 (splay_tree_key
) *tp
);
15156 if (gimplify_omp_ctxp
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
15160 else if (n
->value
& GOVD_LOCAL
)
15162 else if (n
->value
& GOVD_FIRSTPRIVATE
)
15164 else if ((n
->value
& (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
15165 == (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
15169 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
15173 if (TARGET_EXPR_INITIAL (*tp
)
15174 || TREE_CODE (TARGET_EXPR_SLOT (*tp
)) != VAR_DECL
)
15176 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp
),
15177 walk_subtrees
, NULL
);
15178 /* Allow some reasonable subset of integral arithmetics. */
15182 case TRUNC_DIV_EXPR
:
15183 case CEIL_DIV_EXPR
:
15184 case FLOOR_DIV_EXPR
:
15185 case ROUND_DIV_EXPR
:
15186 case TRUNC_MOD_EXPR
:
15187 case CEIL_MOD_EXPR
:
15188 case FLOOR_MOD_EXPR
:
15189 case ROUND_MOD_EXPR
:
15191 case EXACT_DIV_EXPR
:
15202 case NON_LVALUE_EXPR
:
15204 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
15207 /* And disallow anything else, except for comparisons. */
15209 if (COMPARISON_CLASS_P (*tp
))
15215 /* Try to determine if the num_teams and/or thread_limit expressions
15216 can have their values determined already before entering the
15218 INTEGER_CSTs trivially are,
15219 integral decls that are firstprivate (explicitly or implicitly)
15220 or explicitly map(always, to:) or map(always, tofrom:) on the target
15221 region too, and expressions involving simple arithmetics on those
15222 too, function calls are not ok, dereferencing something neither etc.
15223 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
15224 EXPR based on what we find:
15225 0 stands for clause not specified at all, use implementation default
15226 -1 stands for value that can't be determined easily before entering
15227 the target construct.
15228 If teams construct is not present at all, use 1 for num_teams
15229 and 0 for thread_limit (only one team is involved, and the thread
15230 limit is implementation defined. */
15233 optimize_target_teams (tree target
, gimple_seq
*pre_p
)
15235 tree body
= OMP_BODY (target
);
15236 tree teams
= walk_tree (&body
, find_omp_teams
, NULL
, NULL
);
15237 tree num_teams_lower
= NULL_TREE
;
15238 tree num_teams_upper
= integer_zero_node
;
15239 tree thread_limit
= integer_zero_node
;
15240 location_t num_teams_loc
= EXPR_LOCATION (target
);
15241 location_t thread_limit_loc
= EXPR_LOCATION (target
);
15243 struct gimplify_omp_ctx
*target_ctx
= gimplify_omp_ctxp
;
15245 if (teams
== NULL_TREE
)
15246 num_teams_upper
= integer_one_node
;
15248 for (c
= OMP_TEAMS_CLAUSES (teams
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15250 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
)
15252 p
= &num_teams_upper
;
15253 num_teams_loc
= OMP_CLAUSE_LOCATION (c
);
15254 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
))
15256 expr
= OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
);
15257 if (TREE_CODE (expr
) == INTEGER_CST
)
15258 num_teams_lower
= expr
;
15259 else if (walk_tree (&expr
, computable_teams_clause
,
15261 num_teams_lower
= integer_minus_one_node
;
15264 num_teams_lower
= expr
;
15265 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
15266 if (gimplify_expr (&num_teams_lower
, pre_p
, NULL
,
15267 is_gimple_val
, fb_rvalue
, false)
15270 gimplify_omp_ctxp
= target_ctx
;
15271 num_teams_lower
= integer_minus_one_node
;
15275 gimplify_omp_ctxp
= target_ctx
;
15276 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
15277 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
15283 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREAD_LIMIT
)
15286 thread_limit_loc
= OMP_CLAUSE_LOCATION (c
);
15290 expr
= OMP_CLAUSE_OPERAND (c
, 0);
15291 if (TREE_CODE (expr
) == INTEGER_CST
)
15296 if (walk_tree (&expr
, computable_teams_clause
, NULL
, NULL
))
15298 *p
= integer_minus_one_node
;
15302 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
15303 if (gimplify_expr (p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
, false)
15306 gimplify_omp_ctxp
= target_ctx
;
15307 *p
= integer_minus_one_node
;
15310 gimplify_omp_ctxp
= target_ctx
;
15311 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
15312 OMP_CLAUSE_OPERAND (c
, 0) = *p
;
15314 if (!omp_find_clause (OMP_TARGET_CLAUSES (target
), OMP_CLAUSE_THREAD_LIMIT
))
15316 c
= build_omp_clause (thread_limit_loc
, OMP_CLAUSE_THREAD_LIMIT
);
15317 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
15318 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
15319 OMP_TARGET_CLAUSES (target
) = c
;
15321 c
= build_omp_clause (num_teams_loc
, OMP_CLAUSE_NUM_TEAMS
);
15322 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c
) = num_teams_upper
;
15323 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
) = num_teams_lower
;
15324 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
15325 OMP_TARGET_CLAUSES (target
) = c
;
15328 /* Gimplify the gross structure of several OMP constructs. */
15331 gimplify_omp_workshare (tree
*expr_p
, gimple_seq
*pre_p
)
15333 tree expr
= *expr_p
;
15335 gimple_seq body
= NULL
;
15336 enum omp_region_type ort
;
15338 switch (TREE_CODE (expr
))
15342 ort
= ORT_WORKSHARE
;
15345 ort
= ORT_TASKGROUP
;
15348 ort
= OMP_TARGET_COMBINED (expr
) ? ORT_COMBINED_TARGET
: ORT_TARGET
;
15351 ort
= ORT_ACC_KERNELS
;
15353 case OACC_PARALLEL
:
15354 ort
= ORT_ACC_PARALLEL
;
15357 ort
= ORT_ACC_SERIAL
;
15360 ort
= ORT_ACC_DATA
;
15362 case OMP_TARGET_DATA
:
15363 ort
= ORT_TARGET_DATA
;
15366 ort
= OMP_TEAMS_COMBINED (expr
) ? ORT_COMBINED_TEAMS
: ORT_TEAMS
;
15367 if (gimplify_omp_ctxp
== NULL
15368 || gimplify_omp_ctxp
->region_type
== ORT_IMPLICIT_TARGET
)
15369 ort
= (enum omp_region_type
) (ort
| ORT_HOST_TEAMS
);
15371 case OACC_HOST_DATA
:
15372 ort
= ORT_ACC_HOST_DATA
;
15375 gcc_unreachable ();
15378 bool save_in_omp_construct
= in_omp_construct
;
15379 if ((ort
& ORT_ACC
) == 0)
15380 in_omp_construct
= false;
15381 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr
), pre_p
, ort
,
15383 if (TREE_CODE (expr
) == OMP_TARGET
)
15384 optimize_target_teams (expr
, pre_p
);
15385 if ((ort
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
15386 || (ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
15388 push_gimplify_context ();
15389 gimple
*g
= gimplify_and_return_first (OMP_BODY (expr
), &body
);
15390 if (gimple_code (g
) == GIMPLE_BIND
)
15391 pop_gimplify_context (g
);
15393 pop_gimplify_context (NULL
);
15394 if ((ort
& ORT_TARGET_DATA
) != 0)
15396 enum built_in_function end_ix
;
15397 switch (TREE_CODE (expr
))
15400 case OACC_HOST_DATA
:
15401 end_ix
= BUILT_IN_GOACC_DATA_END
;
15403 case OMP_TARGET_DATA
:
15404 end_ix
= BUILT_IN_GOMP_TARGET_END_DATA
;
15407 gcc_unreachable ();
15409 tree fn
= builtin_decl_explicit (end_ix
);
15410 g
= gimple_build_call (fn
, 0);
15411 gimple_seq cleanup
= NULL
;
15412 gimple_seq_add_stmt (&cleanup
, g
);
15413 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
15415 gimple_seq_add_stmt (&body
, g
);
15419 gimplify_and_add (OMP_BODY (expr
), &body
);
15420 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_CLAUSES (expr
),
15422 in_omp_construct
= save_in_omp_construct
;
15424 switch (TREE_CODE (expr
))
15427 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_DATA
,
15428 OMP_CLAUSES (expr
));
15430 case OACC_HOST_DATA
:
15431 if (omp_find_clause (OMP_CLAUSES (expr
), OMP_CLAUSE_IF_PRESENT
))
15433 for (tree c
= OMP_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15434 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
15435 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
) = 1;
15438 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_HOST_DATA
,
15439 OMP_CLAUSES (expr
));
15442 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_KERNELS
,
15443 OMP_CLAUSES (expr
));
15445 case OACC_PARALLEL
:
15446 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_PARALLEL
,
15447 OMP_CLAUSES (expr
));
15450 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_SERIAL
,
15451 OMP_CLAUSES (expr
));
15454 stmt
= gimple_build_omp_sections (body
, OMP_CLAUSES (expr
));
15457 stmt
= gimple_build_omp_single (body
, OMP_CLAUSES (expr
));
15460 stmt
= gimple_build_omp_scope (body
, OMP_CLAUSES (expr
));
15463 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_REGION
,
15464 OMP_CLAUSES (expr
));
15466 case OMP_TARGET_DATA
:
15467 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
15468 to be evaluated before the use_device_{ptr,addr} clauses if they
15469 refer to the same variables. */
15471 tree use_device_clauses
;
15472 tree
*pc
, *uc
= &use_device_clauses
;
15473 for (pc
= &OMP_CLAUSES (expr
); *pc
; )
15474 if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_PTR
15475 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_ADDR
)
15478 *pc
= OMP_CLAUSE_CHAIN (*pc
);
15479 uc
= &OMP_CLAUSE_CHAIN (*uc
);
15482 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15484 *pc
= use_device_clauses
;
15485 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_DATA
,
15486 OMP_CLAUSES (expr
));
15490 stmt
= gimple_build_omp_teams (body
, OMP_CLAUSES (expr
));
15491 if ((ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
15492 gimple_omp_teams_set_host (as_a
<gomp_teams
*> (stmt
), true);
15495 gcc_unreachable ();
15498 gimplify_seq_add_stmt (pre_p
, stmt
);
15499 *expr_p
= NULL_TREE
;
15502 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
15503 target update constructs. */
15506 gimplify_omp_target_update (tree
*expr_p
, gimple_seq
*pre_p
)
15508 tree expr
= *expr_p
;
15511 enum omp_region_type ort
= ORT_WORKSHARE
;
15513 switch (TREE_CODE (expr
))
15515 case OACC_ENTER_DATA
:
15516 kind
= GF_OMP_TARGET_KIND_OACC_ENTER_DATA
;
15519 case OACC_EXIT_DATA
:
15520 kind
= GF_OMP_TARGET_KIND_OACC_EXIT_DATA
;
15524 kind
= GF_OMP_TARGET_KIND_OACC_UPDATE
;
15527 case OMP_TARGET_UPDATE
:
15528 kind
= GF_OMP_TARGET_KIND_UPDATE
;
15530 case OMP_TARGET_ENTER_DATA
:
15531 kind
= GF_OMP_TARGET_KIND_ENTER_DATA
;
15533 case OMP_TARGET_EXIT_DATA
:
15534 kind
= GF_OMP_TARGET_KIND_EXIT_DATA
;
15537 gcc_unreachable ();
15539 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr
), pre_p
,
15540 ort
, TREE_CODE (expr
));
15541 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OMP_STANDALONE_CLAUSES (expr
),
15543 if (TREE_CODE (expr
) == OACC_UPDATE
15544 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
15545 OMP_CLAUSE_IF_PRESENT
))
15547 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
15549 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15550 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
15551 switch (OMP_CLAUSE_MAP_KIND (c
))
15553 case GOMP_MAP_FORCE_TO
:
15554 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
15556 case GOMP_MAP_FORCE_FROM
:
15557 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FROM
);
15563 else if (TREE_CODE (expr
) == OACC_EXIT_DATA
15564 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
15565 OMP_CLAUSE_FINALIZE
))
15567 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
15569 bool have_clause
= false;
15570 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15571 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
15572 switch (OMP_CLAUSE_MAP_KIND (c
))
15574 case GOMP_MAP_FROM
:
15575 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_FROM
);
15576 have_clause
= true;
15578 case GOMP_MAP_RELEASE
:
15579 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_DELETE
);
15580 have_clause
= true;
15582 case GOMP_MAP_TO_PSET
:
15583 /* Fortran arrays with descriptors must map that descriptor when
15584 doing standalone "attach" operations (in OpenACC). In that
15585 case GOMP_MAP_TO_PSET appears by itself with no preceding
15586 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
15588 case GOMP_MAP_POINTER
:
15589 /* TODO PR92929: we may see these here, but they'll always follow
15590 one of the clauses above, and will be handled by libgomp as
15591 one group, so no handling required here. */
15592 gcc_assert (have_clause
);
15594 case GOMP_MAP_DETACH
:
15595 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_DETACH
);
15596 have_clause
= false;
15598 case GOMP_MAP_STRUCT
:
15599 have_clause
= false;
15602 gcc_unreachable ();
15605 stmt
= gimple_build_omp_target (NULL
, kind
, OMP_STANDALONE_CLAUSES (expr
));
15607 gimplify_seq_add_stmt (pre_p
, stmt
);
15608 *expr_p
= NULL_TREE
;
15611 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
15612 stabilized the lhs of the atomic operation as *ADDR. Return true if
15613 EXPR is this stabilized form. */
15616 goa_lhs_expr_p (tree expr
, tree addr
)
15618 /* Also include casts to other type variants. The C front end is fond
15619 of adding these for e.g. volatile variables. This is like
15620 STRIP_TYPE_NOPS but includes the main variant lookup. */
15621 STRIP_USELESS_TYPE_CONVERSION (expr
);
15623 if (TREE_CODE (expr
) == INDIRECT_REF
)
15625 expr
= TREE_OPERAND (expr
, 0);
15626 while (expr
!= addr
15627 && (CONVERT_EXPR_P (expr
)
15628 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
15629 && TREE_CODE (expr
) == TREE_CODE (addr
)
15630 && types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (addr
)))
15632 expr
= TREE_OPERAND (expr
, 0);
15633 addr
= TREE_OPERAND (addr
, 0);
15637 return (TREE_CODE (addr
) == ADDR_EXPR
15638 && TREE_CODE (expr
) == ADDR_EXPR
15639 && TREE_OPERAND (addr
, 0) == TREE_OPERAND (expr
, 0));
15641 if (TREE_CODE (addr
) == ADDR_EXPR
&& expr
== TREE_OPERAND (addr
, 0))
15646 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
15647 expression does not involve the lhs, evaluate it into a temporary.
15648 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
15649 or -1 if an error was encountered. */
15652 goa_stabilize_expr (tree
*expr_p
, gimple_seq
*pre_p
, tree lhs_addr
,
15653 tree lhs_var
, tree
&target_expr
, bool rhs
, int depth
)
15655 tree expr
= *expr_p
;
15658 if (goa_lhs_expr_p (expr
, lhs_addr
))
15664 if (is_gimple_val (expr
))
15667 /* Maximum depth of lhs in expression is for the
15668 __builtin_clear_padding (...), __builtin_clear_padding (...),
15669 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
15673 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
15676 case tcc_comparison
:
15677 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
, lhs_addr
,
15678 lhs_var
, target_expr
, true, depth
);
15681 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
, lhs_addr
,
15682 lhs_var
, target_expr
, true, depth
);
15684 case tcc_expression
:
15685 switch (TREE_CODE (expr
))
15687 case TRUTH_ANDIF_EXPR
:
15688 case TRUTH_ORIF_EXPR
:
15689 case TRUTH_AND_EXPR
:
15690 case TRUTH_OR_EXPR
:
15691 case TRUTH_XOR_EXPR
:
15692 case BIT_INSERT_EXPR
:
15693 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
15694 lhs_addr
, lhs_var
, target_expr
, true,
15697 case TRUTH_NOT_EXPR
:
15698 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
15699 lhs_addr
, lhs_var
, target_expr
, true,
15703 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
, lhs_var
,
15704 target_expr
, true, depth
))
15706 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
15707 lhs_addr
, lhs_var
, target_expr
, true,
15709 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
15710 lhs_addr
, lhs_var
, target_expr
, false,
15715 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
, lhs_var
,
15716 target_expr
, true, depth
))
15718 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
15719 lhs_addr
, lhs_var
, target_expr
, false,
15722 case COMPOUND_EXPR
:
15723 /* Break out any preevaluations from cp_build_modify_expr. */
15724 for (; TREE_CODE (expr
) == COMPOUND_EXPR
;
15725 expr
= TREE_OPERAND (expr
, 1))
15727 /* Special-case __builtin_clear_padding call before
15728 __builtin_memcmp. */
15729 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
)
15731 tree fndecl
= get_callee_fndecl (TREE_OPERAND (expr
, 0));
15733 && fndecl_built_in_p (fndecl
, BUILT_IN_CLEAR_PADDING
)
15734 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
15736 || goa_stabilize_expr (&TREE_OPERAND (expr
, 0), NULL
,
15738 target_expr
, true, depth
)))
15742 saw_lhs
= goa_stabilize_expr (&TREE_OPERAND (expr
, 0),
15743 pre_p
, lhs_addr
, lhs_var
,
15744 target_expr
, true, depth
);
15745 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1),
15746 pre_p
, lhs_addr
, lhs_var
,
15747 target_expr
, rhs
, depth
);
15753 gimplify_stmt (&TREE_OPERAND (expr
, 0), pre_p
);
15756 return goa_stabilize_expr (&expr
, pre_p
, lhs_addr
, lhs_var
,
15757 target_expr
, rhs
, depth
);
15759 return goa_stabilize_expr (expr_p
, pre_p
, lhs_addr
, lhs_var
,
15760 target_expr
, rhs
, depth
);
15762 if (!goa_stabilize_expr (&TREE_OPERAND (expr
, 0), NULL
, lhs_addr
,
15763 lhs_var
, target_expr
, true, depth
))
15765 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
15766 lhs_addr
, lhs_var
, target_expr
, true,
15768 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
15769 lhs_addr
, lhs_var
, target_expr
, true,
15771 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 2), pre_p
,
15772 lhs_addr
, lhs_var
, target_expr
, true,
15776 if (TARGET_EXPR_INITIAL (expr
))
15778 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
,
15779 lhs_var
, target_expr
, true,
15782 if (expr
== target_expr
)
15786 saw_lhs
= goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr
),
15787 pre_p
, lhs_addr
, lhs_var
,
15788 target_expr
, true, depth
);
15789 if (saw_lhs
&& target_expr
== NULL_TREE
&& pre_p
)
15790 target_expr
= expr
;
15798 case tcc_reference
:
15799 if (TREE_CODE (expr
) == BIT_FIELD_REF
15800 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
15801 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
15802 lhs_addr
, lhs_var
, target_expr
, true,
15806 if (TREE_CODE (expr
) == CALL_EXPR
)
15808 if (tree fndecl
= get_callee_fndecl (expr
))
15809 if (fndecl_built_in_p (fndecl
, BUILT_IN_CLEAR_PADDING
)
15810 || fndecl_built_in_p (fndecl
, BUILT_IN_MEMCMP
))
15812 int nargs
= call_expr_nargs (expr
);
15813 for (int i
= 0; i
< nargs
; i
++)
15814 saw_lhs
|= goa_stabilize_expr (&CALL_EXPR_ARG (expr
, i
),
15815 pre_p
, lhs_addr
, lhs_var
,
15816 target_expr
, true, depth
);
15825 if (saw_lhs
== 0 && pre_p
)
15827 enum gimplify_status gs
;
15828 if (TREE_CODE (expr
) == CALL_EXPR
&& VOID_TYPE_P (TREE_TYPE (expr
)))
15830 gimplify_stmt (&expr
, pre_p
);
15834 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
15836 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_lvalue
, fb_lvalue
);
15837 if (gs
!= GS_ALL_DONE
)
15844 /* Gimplify an OMP_ATOMIC statement. */
15846 static enum gimplify_status
15847 gimplify_omp_atomic (tree
*expr_p
, gimple_seq
*pre_p
)
15849 tree addr
= TREE_OPERAND (*expr_p
, 0);
15850 tree rhs
= TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
15851 ? NULL
: TREE_OPERAND (*expr_p
, 1);
15852 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr
)));
15854 gomp_atomic_load
*loadstmt
;
15855 gomp_atomic_store
*storestmt
;
15856 tree target_expr
= NULL_TREE
;
15858 tmp_load
= create_tmp_reg (type
);
15860 && goa_stabilize_expr (&rhs
, pre_p
, addr
, tmp_load
, target_expr
,
15864 if (gimplify_expr (&addr
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
15868 loadstmt
= gimple_build_omp_atomic_load (tmp_load
, addr
,
15869 OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
15870 gimplify_seq_add_stmt (pre_p
, loadstmt
);
15873 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
15874 representatives. Use BIT_FIELD_REF on the lhs instead. */
15876 if (TREE_CODE (rhs
) == COND_EXPR
)
15877 rhsarg
= TREE_OPERAND (rhs
, 1);
15878 if (TREE_CODE (rhsarg
) == BIT_INSERT_EXPR
15879 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load
)))
15881 tree bitpos
= TREE_OPERAND (rhsarg
, 2);
15882 tree op1
= TREE_OPERAND (rhsarg
, 1);
15884 tree tmp_store
= tmp_load
;
15885 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_CAPTURE_OLD
)
15886 tmp_store
= get_initialized_tmp_var (tmp_load
, pre_p
);
15887 if (INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
15888 bitsize
= bitsize_int (TYPE_PRECISION (TREE_TYPE (op1
)));
15890 bitsize
= TYPE_SIZE (TREE_TYPE (op1
));
15891 gcc_assert (TREE_OPERAND (rhsarg
, 0) == tmp_load
);
15892 tree t
= build2_loc (EXPR_LOCATION (rhsarg
),
15893 MODIFY_EXPR
, void_type_node
,
15894 build3_loc (EXPR_LOCATION (rhsarg
),
15895 BIT_FIELD_REF
, TREE_TYPE (op1
),
15896 tmp_store
, bitsize
, bitpos
), op1
);
15897 if (TREE_CODE (rhs
) == COND_EXPR
)
15898 t
= build3_loc (EXPR_LOCATION (rhs
), COND_EXPR
, void_type_node
,
15899 TREE_OPERAND (rhs
, 0), t
, void_node
);
15900 gimplify_and_add (t
, pre_p
);
15903 bool save_allow_rhs_cond_expr
= gimplify_ctxp
->allow_rhs_cond_expr
;
15904 if (TREE_CODE (rhs
) == COND_EXPR
)
15905 gimplify_ctxp
->allow_rhs_cond_expr
= true;
15906 enum gimplify_status gs
= gimplify_expr (&rhs
, pre_p
, NULL
,
15907 is_gimple_val
, fb_rvalue
);
15908 gimplify_ctxp
->allow_rhs_cond_expr
= save_allow_rhs_cond_expr
;
15909 if (gs
!= GS_ALL_DONE
)
15913 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
)
15916 = gimple_build_omp_atomic_store (rhs
, OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
15917 if (TREE_CODE (*expr_p
) != OMP_ATOMIC_READ
&& OMP_ATOMIC_WEAK (*expr_p
))
15919 gimple_omp_atomic_set_weak (loadstmt
);
15920 gimple_omp_atomic_set_weak (storestmt
);
15922 gimplify_seq_add_stmt (pre_p
, storestmt
);
15923 switch (TREE_CODE (*expr_p
))
15925 case OMP_ATOMIC_READ
:
15926 case OMP_ATOMIC_CAPTURE_OLD
:
15927 *expr_p
= tmp_load
;
15928 gimple_omp_atomic_set_need_value (loadstmt
);
15930 case OMP_ATOMIC_CAPTURE_NEW
:
15932 gimple_omp_atomic_set_need_value (storestmt
);
15939 return GS_ALL_DONE
;
15942 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
15943 body, and adding some EH bits. */
15945 static enum gimplify_status
15946 gimplify_transaction (tree
*expr_p
, gimple_seq
*pre_p
)
15948 tree expr
= *expr_p
, temp
, tbody
= TRANSACTION_EXPR_BODY (expr
);
15950 gtransaction
*trans_stmt
;
15951 gimple_seq body
= NULL
;
15954 /* Wrap the transaction body in a BIND_EXPR so we have a context
15955 where to put decls for OMP. */
15956 if (TREE_CODE (tbody
) != BIND_EXPR
)
15958 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, tbody
, NULL
);
15959 TREE_SIDE_EFFECTS (bind
) = 1;
15960 SET_EXPR_LOCATION (bind
, EXPR_LOCATION (tbody
));
15961 TRANSACTION_EXPR_BODY (expr
) = bind
;
15964 push_gimplify_context ();
15965 temp
= voidify_wrapper_expr (*expr_p
, NULL
);
15967 body_stmt
= gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr
), &body
);
15968 pop_gimplify_context (body_stmt
);
15970 trans_stmt
= gimple_build_transaction (body
);
15971 if (TRANSACTION_EXPR_OUTER (expr
))
15972 subcode
= GTMA_IS_OUTER
;
15973 else if (TRANSACTION_EXPR_RELAXED (expr
))
15974 subcode
= GTMA_IS_RELAXED
;
15975 gimple_transaction_set_subcode (trans_stmt
, subcode
);
15977 gimplify_seq_add_stmt (pre_p
, trans_stmt
);
15985 *expr_p
= NULL_TREE
;
15986 return GS_ALL_DONE
;
15989 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
15990 is the OMP_BODY of the original EXPR (which has already been
15991 gimplified so it's not present in the EXPR).
15993 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
15996 gimplify_omp_ordered (tree expr
, gimple_seq body
)
16001 tree source_c
= NULL_TREE
;
16002 tree sink_c
= NULL_TREE
;
16004 if (gimplify_omp_ctxp
)
16006 for (c
= OMP_ORDERED_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
16007 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
16008 && gimplify_omp_ctxp
->loop_iter_var
.is_empty ())
16010 error_at (OMP_CLAUSE_LOCATION (c
),
16011 "%<ordered%> construct with %qs clause must be "
16012 "closely nested inside a loop with %<ordered%> clause",
16013 OMP_CLAUSE_DOACROSS_DEPEND (c
) ? "depend" : "doacross");
16016 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
16017 && OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SINK
)
16021 if (OMP_CLAUSE_DECL (c
) == NULL_TREE
)
16022 continue; /* omp_cur_iteration - 1 */
16023 for (decls
= OMP_CLAUSE_DECL (c
), i
= 0;
16024 decls
&& TREE_CODE (decls
) == TREE_LIST
;
16025 decls
= TREE_CHAIN (decls
), ++i
)
16026 if (i
>= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
16028 else if (TREE_VALUE (decls
)
16029 != gimplify_omp_ctxp
->loop_iter_var
[2 * i
])
16031 error_at (OMP_CLAUSE_LOCATION (c
),
16032 "variable %qE is not an iteration "
16033 "of outermost loop %d, expected %qE",
16034 TREE_VALUE (decls
), i
+ 1,
16035 gimplify_omp_ctxp
->loop_iter_var
[2 * i
]);
16041 = gimplify_omp_ctxp
->loop_iter_var
[2 * i
+ 1];
16042 if (!fail
&& i
!= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
16044 error_at (OMP_CLAUSE_LOCATION (c
),
16045 "number of variables in %qs clause with "
16046 "%<sink%> modifier does not match number of "
16047 "iteration variables",
16048 OMP_CLAUSE_DOACROSS_DEPEND (c
)
16049 ? "depend" : "doacross");
16053 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
16054 && OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SOURCE
)
16058 error_at (OMP_CLAUSE_LOCATION (c
),
16059 "more than one %qs clause with %<source%> "
16060 "modifier on an %<ordered%> construct",
16061 OMP_CLAUSE_DOACROSS_DEPEND (source_c
)
16062 ? "depend" : "doacross");
16069 if (source_c
&& sink_c
)
16071 error_at (OMP_CLAUSE_LOCATION (source_c
),
16072 "%qs clause with %<source%> modifier specified "
16073 "together with %qs clauses with %<sink%> modifier "
16074 "on the same construct",
16075 OMP_CLAUSE_DOACROSS_DEPEND (source_c
) ? "depend" : "doacross",
16076 OMP_CLAUSE_DOACROSS_DEPEND (sink_c
) ? "depend" : "doacross");
16081 return gimple_build_nop ();
16082 return gimple_build_omp_ordered (body
, OMP_ORDERED_CLAUSES (expr
));
16085 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
16086 expression produces a value to be used as an operand inside a GIMPLE
16087 statement, the value will be stored back in *EXPR_P. This value will
16088 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
16089 an SSA_NAME. The corresponding sequence of GIMPLE statements is
16090 emitted in PRE_P and POST_P.
16092 Additionally, this process may overwrite parts of the input
16093 expression during gimplification. Ideally, it should be
16094 possible to do non-destructive gimplification.
16096 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
16097 the expression needs to evaluate to a value to be used as
16098 an operand in a GIMPLE statement, this value will be stored in
16099 *EXPR_P on exit. This happens when the caller specifies one
16100 of fb_lvalue or fb_rvalue fallback flags.
16102 PRE_P will contain the sequence of GIMPLE statements corresponding
16103 to the evaluation of EXPR and all the side-effects that must
16104 be executed before the main expression. On exit, the last
16105 statement of PRE_P is the core statement being gimplified. For
16106 instance, when gimplifying 'if (++a)' the last statement in
16107 PRE_P will be 'if (t.1)' where t.1 is the result of
16108 pre-incrementing 'a'.
16110 POST_P will contain the sequence of GIMPLE statements corresponding
16111 to the evaluation of all the side-effects that must be executed
16112 after the main expression. If this is NULL, the post
16113 side-effects are stored at the end of PRE_P.
16115 The reason why the output is split in two is to handle post
16116 side-effects explicitly. In some cases, an expression may have
16117 inner and outer post side-effects which need to be emitted in
16118 an order different from the one given by the recursive
16119 traversal. For instance, for the expression (*p--)++ the post
16120 side-effects of '--' must actually occur *after* the post
16121 side-effects of '++'. However, gimplification will first visit
16122 the inner expression, so if a separate POST sequence was not
16123 used, the resulting sequence would be:
16130 However, the post-decrement operation in line #2 must not be
16131 evaluated until after the store to *p at line #4, so the
16132 correct sequence should be:
16139 So, by specifying a separate post queue, it is possible
16140 to emit the post side-effects in the correct order.
16141 If POST_P is NULL, an internal queue will be used. Before
16142 returning to the caller, the sequence POST_P is appended to
16143 the main output sequence PRE_P.
16145 GIMPLE_TEST_F points to a function that takes a tree T and
16146 returns nonzero if T is in the GIMPLE form requested by the
16147 caller. The GIMPLE predicates are in gimple.cc.
16149 FALLBACK tells the function what sort of a temporary we want if
16150 gimplification cannot produce an expression that complies with
16153 fb_none means that no temporary should be generated
16154 fb_rvalue means that an rvalue is OK to generate
16155 fb_lvalue means that an lvalue is OK to generate
16156 fb_either means that either is OK, but an lvalue is preferable.
16157 fb_mayfail means that gimplification may fail (in which case
16158 GS_ERROR will be returned)
16160 The return value is either GS_ERROR or GS_ALL_DONE, since this
16161 function iterates until EXPR is completely gimplified or an error
16164 enum gimplify_status
16165 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
16166 bool (*gimple_test_f
) (tree
), fallback_t fallback
)
16169 gimple_seq internal_pre
= NULL
;
16170 gimple_seq internal_post
= NULL
;
16173 location_t saved_location
;
16174 enum gimplify_status ret
;
16175 gimple_stmt_iterator pre_last_gsi
, post_last_gsi
;
16178 save_expr
= *expr_p
;
16179 if (save_expr
== NULL_TREE
)
16180 return GS_ALL_DONE
;
16182 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
16183 is_statement
= gimple_test_f
== is_gimple_stmt
;
16185 gcc_assert (pre_p
);
16187 /* Consistency checks. */
16188 if (gimple_test_f
== is_gimple_reg
)
16189 gcc_assert (fallback
& (fb_rvalue
| fb_lvalue
));
16190 else if (gimple_test_f
== is_gimple_val
16191 || gimple_test_f
== is_gimple_call_addr
16192 || gimple_test_f
== is_gimple_condexpr_for_cond
16193 || gimple_test_f
== is_gimple_mem_rhs
16194 || gimple_test_f
== is_gimple_mem_rhs_or_call
16195 || gimple_test_f
== is_gimple_reg_rhs
16196 || gimple_test_f
== is_gimple_reg_rhs_or_call
16197 || gimple_test_f
== is_gimple_asm_val
16198 || gimple_test_f
== is_gimple_mem_ref_addr
)
16199 gcc_assert (fallback
& fb_rvalue
);
16200 else if (gimple_test_f
== is_gimple_min_lval
16201 || gimple_test_f
== is_gimple_lvalue
)
16202 gcc_assert (fallback
& fb_lvalue
);
16203 else if (gimple_test_f
== is_gimple_addressable
)
16204 gcc_assert (fallback
& fb_either
);
16205 else if (gimple_test_f
== is_gimple_stmt
)
16206 gcc_assert (fallback
== fb_none
);
16209 /* We should have recognized the GIMPLE_TEST_F predicate to
16210 know what kind of fallback to use in case a temporary is
16211 needed to hold the value or address of *EXPR_P. */
16212 gcc_unreachable ();
16215 /* We used to check the predicate here and return immediately if it
16216 succeeds. This is wrong; the design is for gimplification to be
16217 idempotent, and for the predicates to only test for valid forms, not
16218 whether they are fully simplified. */
16220 pre_p
= &internal_pre
;
16222 if (post_p
== NULL
)
16223 post_p
= &internal_post
;
16225 /* Remember the last statements added to PRE_P and POST_P. Every
16226 new statement added by the gimplification helpers needs to be
16227 annotated with location information. To centralize the
16228 responsibility, we remember the last statement that had been
16229 added to both queues before gimplifying *EXPR_P. If
16230 gimplification produces new statements in PRE_P and POST_P, those
16231 statements will be annotated with the same location information
16233 pre_last_gsi
= gsi_last (*pre_p
);
16234 post_last_gsi
= gsi_last (*post_p
);
16236 saved_location
= input_location
;
16237 if (save_expr
!= error_mark_node
16238 && EXPR_HAS_LOCATION (*expr_p
))
16239 input_location
= EXPR_LOCATION (*expr_p
);
16241 /* Loop over the specific gimplifiers until the toplevel node
16242 remains the same. */
16245 /* Strip away as many useless type conversions as possible
16246 at the toplevel. */
16247 STRIP_USELESS_TYPE_CONVERSION (*expr_p
);
16249 /* Remember the expr. */
16250 save_expr
= *expr_p
;
16252 /* Die, die, die, my darling. */
16253 if (error_operand_p (save_expr
))
16259 /* Do any language-specific gimplification. */
16260 ret
= ((enum gimplify_status
)
16261 lang_hooks
.gimplify_expr (expr_p
, pre_p
, post_p
));
16264 if (*expr_p
== NULL_TREE
)
16266 if (*expr_p
!= save_expr
)
16269 else if (ret
!= GS_UNHANDLED
)
16272 /* Make sure that all the cases set 'ret' appropriately. */
16273 ret
= GS_UNHANDLED
;
16274 switch (TREE_CODE (*expr_p
))
16276 /* First deal with the special cases. */
16278 case POSTINCREMENT_EXPR
:
16279 case POSTDECREMENT_EXPR
:
16280 case PREINCREMENT_EXPR
:
16281 case PREDECREMENT_EXPR
:
16282 ret
= gimplify_self_mod_expr (expr_p
, pre_p
, post_p
,
16283 fallback
!= fb_none
,
16284 TREE_TYPE (*expr_p
));
16287 case VIEW_CONVERT_EXPR
:
16288 if ((fallback
& fb_rvalue
)
16289 && is_gimple_reg_type (TREE_TYPE (*expr_p
))
16290 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
16292 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
16293 post_p
, is_gimple_val
, fb_rvalue
);
16294 recalculate_side_effects (*expr_p
);
16300 case ARRAY_RANGE_REF
:
16301 case REALPART_EXPR
:
16302 case IMAGPART_EXPR
:
16303 case COMPONENT_REF
:
16304 ret
= gimplify_compound_lval (expr_p
, pre_p
, post_p
,
16305 fallback
? fallback
: fb_rvalue
);
16309 ret
= gimplify_cond_expr (expr_p
, pre_p
, fallback
);
16311 /* C99 code may assign to an array in a structure value of a
16312 conditional expression, and this has undefined behavior
16313 only on execution, so create a temporary if an lvalue is
16315 if (fallback
== fb_lvalue
)
16317 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
16318 mark_addressable (*expr_p
);
16324 ret
= gimplify_call_expr (expr_p
, pre_p
, fallback
!= fb_none
);
16326 /* C99 code may assign to an array in a structure returned
16327 from a function, and this has undefined behavior only on
16328 execution, so create a temporary if an lvalue is
16330 if (fallback
== fb_lvalue
)
16332 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
16333 mark_addressable (*expr_p
);
16339 gcc_unreachable ();
16341 case COMPOUND_EXPR
:
16342 ret
= gimplify_compound_expr (expr_p
, pre_p
, fallback
!= fb_none
);
16345 case COMPOUND_LITERAL_EXPR
:
16346 ret
= gimplify_compound_literal_expr (expr_p
, pre_p
,
16347 gimple_test_f
, fallback
);
16352 ret
= gimplify_modify_expr (expr_p
, pre_p
, post_p
,
16353 fallback
!= fb_none
);
16356 case TRUTH_ANDIF_EXPR
:
16357 case TRUTH_ORIF_EXPR
:
16359 /* Preserve the original type of the expression and the
16360 source location of the outer expression. */
16361 tree org_type
= TREE_TYPE (*expr_p
);
16362 *expr_p
= gimple_boolify (*expr_p
);
16363 *expr_p
= build3_loc (input_location
, COND_EXPR
,
16367 org_type
, boolean_true_node
),
16370 org_type
, boolean_false_node
));
16375 case TRUTH_NOT_EXPR
:
16377 tree type
= TREE_TYPE (*expr_p
);
16378 /* The parsers are careful to generate TRUTH_NOT_EXPR
16379 only with operands that are always zero or one.
16380 We do not fold here but handle the only interesting case
16381 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
16382 *expr_p
= gimple_boolify (*expr_p
);
16383 if (TYPE_PRECISION (TREE_TYPE (*expr_p
)) == 1)
16384 *expr_p
= build1_loc (input_location
, BIT_NOT_EXPR
,
16385 TREE_TYPE (*expr_p
),
16386 TREE_OPERAND (*expr_p
, 0));
16388 *expr_p
= build2_loc (input_location
, BIT_XOR_EXPR
,
16389 TREE_TYPE (*expr_p
),
16390 TREE_OPERAND (*expr_p
, 0),
16391 build_int_cst (TREE_TYPE (*expr_p
), 1));
16392 if (!useless_type_conversion_p (type
, TREE_TYPE (*expr_p
)))
16393 *expr_p
= fold_convert_loc (input_location
, type
, *expr_p
);
16399 ret
= gimplify_addr_expr (expr_p
, pre_p
, post_p
);
16402 case ANNOTATE_EXPR
:
16404 tree cond
= TREE_OPERAND (*expr_p
, 0);
16405 tree kind
= TREE_OPERAND (*expr_p
, 1);
16406 tree data
= TREE_OPERAND (*expr_p
, 2);
16407 tree type
= TREE_TYPE (cond
);
16408 if (!INTEGRAL_TYPE_P (type
))
16414 tree tmp
= create_tmp_var (type
);
16415 gimplify_arg (&cond
, pre_p
, EXPR_LOCATION (*expr_p
));
16417 = gimple_build_call_internal (IFN_ANNOTATE
, 3, cond
, kind
, data
);
16418 gimple_call_set_lhs (call
, tmp
);
16419 gimplify_seq_add_stmt (pre_p
, call
);
16426 ret
= gimplify_va_arg_expr (expr_p
, pre_p
, post_p
);
16430 if (IS_EMPTY_STMT (*expr_p
))
16436 if (VOID_TYPE_P (TREE_TYPE (*expr_p
))
16437 || fallback
== fb_none
)
16439 /* Just strip a conversion to void (or in void context) and
16441 *expr_p
= TREE_OPERAND (*expr_p
, 0);
16446 ret
= gimplify_conversion (expr_p
);
16447 if (ret
== GS_ERROR
)
16449 if (*expr_p
!= save_expr
)
16453 case FIX_TRUNC_EXPR
:
16454 /* unary_expr: ... | '(' cast ')' val | ... */
16455 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
16456 is_gimple_val
, fb_rvalue
);
16457 recalculate_side_effects (*expr_p
);
16462 bool volatilep
= TREE_THIS_VOLATILE (*expr_p
);
16463 bool notrap
= TREE_THIS_NOTRAP (*expr_p
);
16464 tree saved_ptr_type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 0));
16466 *expr_p
= fold_indirect_ref_loc (input_location
, *expr_p
);
16467 if (*expr_p
!= save_expr
)
16473 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
16474 is_gimple_reg
, fb_rvalue
);
16475 if (ret
== GS_ERROR
)
16478 recalculate_side_effects (*expr_p
);
16479 *expr_p
= fold_build2_loc (input_location
, MEM_REF
,
16480 TREE_TYPE (*expr_p
),
16481 TREE_OPERAND (*expr_p
, 0),
16482 build_int_cst (saved_ptr_type
, 0));
16483 TREE_THIS_VOLATILE (*expr_p
) = volatilep
;
16484 TREE_THIS_NOTRAP (*expr_p
) = notrap
;
16489 /* We arrive here through the various re-gimplifcation paths. */
16491 /* First try re-folding the whole thing. */
16492 tmp
= fold_binary (MEM_REF
, TREE_TYPE (*expr_p
),
16493 TREE_OPERAND (*expr_p
, 0),
16494 TREE_OPERAND (*expr_p
, 1));
16497 REF_REVERSE_STORAGE_ORDER (tmp
)
16498 = REF_REVERSE_STORAGE_ORDER (*expr_p
);
16500 recalculate_side_effects (*expr_p
);
16504 /* Avoid re-gimplifying the address operand if it is already
16505 in suitable form. Re-gimplifying would mark the address
16506 operand addressable. Always gimplify when not in SSA form
16507 as we still may have to gimplify decls with value-exprs. */
16508 if (!gimplify_ctxp
|| !gimple_in_ssa_p (cfun
)
16509 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p
, 0)))
16511 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
16512 is_gimple_mem_ref_addr
, fb_rvalue
);
16513 if (ret
== GS_ERROR
)
16516 recalculate_side_effects (*expr_p
);
16520 /* Constants need not be gimplified. */
16527 /* Drop the overflow flag on constants, we do not want
16528 that in the GIMPLE IL. */
16529 if (TREE_OVERFLOW_P (*expr_p
))
16530 *expr_p
= drop_tree_overflow (*expr_p
);
16535 /* If we require an lvalue, such as for ADDR_EXPR, retain the
16536 CONST_DECL node. Otherwise the decl is replaceable by its
16538 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
16539 if (fallback
& fb_lvalue
)
16543 *expr_p
= DECL_INITIAL (*expr_p
);
16549 ret
= gimplify_decl_expr (expr_p
, pre_p
);
16553 ret
= gimplify_bind_expr (expr_p
, pre_p
);
16557 ret
= gimplify_loop_expr (expr_p
, pre_p
);
16561 ret
= gimplify_switch_expr (expr_p
, pre_p
);
16565 ret
= gimplify_exit_expr (expr_p
);
16569 /* If the target is not LABEL, then it is a computed jump
16570 and the target needs to be gimplified. */
16571 if (TREE_CODE (GOTO_DESTINATION (*expr_p
)) != LABEL_DECL
)
16573 ret
= gimplify_expr (&GOTO_DESTINATION (*expr_p
), pre_p
,
16574 NULL
, is_gimple_val
, fb_rvalue
);
16575 if (ret
== GS_ERROR
)
16578 gimplify_seq_add_stmt (pre_p
,
16579 gimple_build_goto (GOTO_DESTINATION (*expr_p
)));
16584 gimplify_seq_add_stmt (pre_p
,
16585 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p
),
16586 PREDICT_EXPR_OUTCOME (*expr_p
)));
16591 ret
= gimplify_label_expr (expr_p
, pre_p
);
16592 label
= LABEL_EXPR_LABEL (*expr_p
);
16593 gcc_assert (decl_function_context (label
) == current_function_decl
);
16595 /* If the label is used in a goto statement, or address of the label
16596 is taken, we need to unpoison all variables that were seen so far.
16597 Doing so would prevent us from reporting a false positives. */
16598 if (asan_poisoned_variables
16599 && asan_used_labels
!= NULL
16600 && asan_used_labels
->contains (label
)
16601 && !gimplify_omp_ctxp
)
16602 asan_poison_variables (asan_poisoned_variables
, false, pre_p
);
16605 case CASE_LABEL_EXPR
:
16606 ret
= gimplify_case_label_expr (expr_p
, pre_p
);
16608 if (gimplify_ctxp
->live_switch_vars
)
16609 asan_poison_variables (gimplify_ctxp
->live_switch_vars
, false,
16614 ret
= gimplify_return_expr (*expr_p
, pre_p
);
16618 /* Don't reduce this in place; let gimplify_init_constructor work its
16619 magic. Buf if we're just elaborating this for side effects, just
16620 gimplify any element that has side-effects. */
16621 if (fallback
== fb_none
)
16623 unsigned HOST_WIDE_INT ix
;
16625 tree temp
= NULL_TREE
;
16626 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p
), ix
, val
)
16627 if (TREE_SIDE_EFFECTS (val
))
16628 append_to_statement_list (val
, &temp
);
16631 ret
= temp
? GS_OK
: GS_ALL_DONE
;
16633 /* C99 code may assign to an array in a constructed
16634 structure or union, and this has undefined behavior only
16635 on execution, so create a temporary if an lvalue is
16637 else if (fallback
== fb_lvalue
)
16639 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
16640 mark_addressable (*expr_p
);
16647 /* The following are special cases that are not handled by the
16648 original GIMPLE grammar. */
16650 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
16653 ret
= gimplify_save_expr (expr_p
, pre_p
, post_p
);
16656 case BIT_FIELD_REF
:
16657 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
16658 post_p
, is_gimple_lvalue
, fb_either
);
16659 recalculate_side_effects (*expr_p
);
16662 case TARGET_MEM_REF
:
16664 enum gimplify_status r0
= GS_ALL_DONE
, r1
= GS_ALL_DONE
;
16666 if (TMR_BASE (*expr_p
))
16667 r0
= gimplify_expr (&TMR_BASE (*expr_p
), pre_p
,
16668 post_p
, is_gimple_mem_ref_addr
, fb_either
);
16669 if (TMR_INDEX (*expr_p
))
16670 r1
= gimplify_expr (&TMR_INDEX (*expr_p
), pre_p
,
16671 post_p
, is_gimple_val
, fb_rvalue
);
16672 if (TMR_INDEX2 (*expr_p
))
16673 r1
= gimplify_expr (&TMR_INDEX2 (*expr_p
), pre_p
,
16674 post_p
, is_gimple_val
, fb_rvalue
);
16675 /* TMR_STEP and TMR_OFFSET are always integer constants. */
16676 ret
= MIN (r0
, r1
);
16680 case NON_LVALUE_EXPR
:
16681 /* This should have been stripped above. */
16682 gcc_unreachable ();
16685 ret
= gimplify_asm_expr (expr_p
, pre_p
, post_p
);
16688 case TRY_FINALLY_EXPR
:
16689 case TRY_CATCH_EXPR
:
16691 gimple_seq eval
, cleanup
;
16694 /* Calls to destructors are generated automatically in FINALLY/CATCH
16695 block. They should have location as UNKNOWN_LOCATION. However,
16696 gimplify_call_expr will reset these call stmts to input_location
16697 if it finds stmt's location is unknown. To prevent resetting for
16698 destructors, we set the input_location to unknown.
16699 Note that this only affects the destructor calls in FINALLY/CATCH
16700 block, and will automatically reset to its original value by the
16701 end of gimplify_expr. */
16702 input_location
= UNKNOWN_LOCATION
;
16703 eval
= cleanup
= NULL
;
16704 gimplify_and_add (TREE_OPERAND (*expr_p
, 0), &eval
);
16705 if (TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
16706 && TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == EH_ELSE_EXPR
)
16708 gimple_seq n
= NULL
, e
= NULL
;
16709 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
16711 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
16713 if (!gimple_seq_empty_p (n
) && !gimple_seq_empty_p (e
))
16715 geh_else
*stmt
= gimple_build_eh_else (n
, e
);
16716 gimple_seq_add_stmt (&cleanup
, stmt
);
16720 gimplify_and_add (TREE_OPERAND (*expr_p
, 1), &cleanup
);
16721 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
16722 if (gimple_seq_empty_p (cleanup
))
16724 gimple_seq_add_seq (pre_p
, eval
);
16728 try_
= gimple_build_try (eval
, cleanup
,
16729 TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
16730 ? GIMPLE_TRY_FINALLY
16731 : GIMPLE_TRY_CATCH
);
16732 if (EXPR_HAS_LOCATION (save_expr
))
16733 gimple_set_location (try_
, EXPR_LOCATION (save_expr
));
16734 else if (LOCATION_LOCUS (saved_location
) != UNKNOWN_LOCATION
)
16735 gimple_set_location (try_
, saved_location
);
16736 if (TREE_CODE (*expr_p
) == TRY_CATCH_EXPR
)
16737 gimple_try_set_catch_is_cleanup (try_
,
16738 TRY_CATCH_IS_CLEANUP (*expr_p
));
16739 gimplify_seq_add_stmt (pre_p
, try_
);
16744 case CLEANUP_POINT_EXPR
:
16745 ret
= gimplify_cleanup_point_expr (expr_p
, pre_p
);
16749 ret
= gimplify_target_expr (expr_p
, pre_p
, post_p
);
16755 gimple_seq handler
= NULL
;
16756 gimplify_and_add (CATCH_BODY (*expr_p
), &handler
);
16757 c
= gimple_build_catch (CATCH_TYPES (*expr_p
), handler
);
16758 gimplify_seq_add_stmt (pre_p
, c
);
16763 case EH_FILTER_EXPR
:
16766 gimple_seq failure
= NULL
;
16768 gimplify_and_add (EH_FILTER_FAILURE (*expr_p
), &failure
);
16769 ehf
= gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p
), failure
);
16770 copy_warning (ehf
, *expr_p
);
16771 gimplify_seq_add_stmt (pre_p
, ehf
);
16778 enum gimplify_status r0
, r1
;
16779 r0
= gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p
), pre_p
,
16780 post_p
, is_gimple_val
, fb_rvalue
);
16781 r1
= gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p
), pre_p
,
16782 post_p
, is_gimple_val
, fb_rvalue
);
16783 TREE_SIDE_EFFECTS (*expr_p
) = 0;
16784 ret
= MIN (r0
, r1
);
16789 /* We get here when taking the address of a label. We mark
16790 the label as "forced"; meaning it can never be removed and
16791 it is a potential target for any computed goto. */
16792 FORCED_LABEL (*expr_p
) = 1;
16796 case STATEMENT_LIST
:
16797 ret
= gimplify_statement_list (expr_p
, pre_p
);
16800 case WITH_SIZE_EXPR
:
16802 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
16803 post_p
== &internal_post
? NULL
: post_p
,
16804 gimple_test_f
, fallback
);
16805 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
16806 is_gimple_val
, fb_rvalue
);
16813 ret
= gimplify_var_or_parm_decl (expr_p
);
16817 /* When within an OMP context, notice uses of variables. */
16818 if (gimplify_omp_ctxp
)
16819 omp_notice_variable (gimplify_omp_ctxp
, *expr_p
, true);
16823 case DEBUG_EXPR_DECL
:
16824 gcc_unreachable ();
16826 case DEBUG_BEGIN_STMT
:
16827 gimplify_seq_add_stmt (pre_p
,
16828 gimple_build_debug_begin_stmt
16829 (TREE_BLOCK (*expr_p
),
16830 EXPR_LOCATION (*expr_p
)));
16836 /* Allow callbacks into the gimplifier during optimization. */
16841 gimplify_omp_parallel (expr_p
, pre_p
);
16846 gimplify_omp_task (expr_p
, pre_p
);
16852 /* Temporarily disable into_ssa, as scan_omp_simd
16853 which calls copy_gimple_seq_and_replace_locals can't deal
16854 with SSA_NAMEs defined outside of the body properly. */
16855 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
16856 gimplify_ctxp
->into_ssa
= false;
16857 ret
= gimplify_omp_for (expr_p
, pre_p
);
16858 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
16863 case OMP_DISTRIBUTE
:
16866 ret
= gimplify_omp_for (expr_p
, pre_p
);
16870 ret
= gimplify_omp_loop (expr_p
, pre_p
);
16874 gimplify_oacc_cache (expr_p
, pre_p
);
16879 gimplify_oacc_declare (expr_p
, pre_p
);
16883 case OACC_HOST_DATA
:
16886 case OACC_PARALLEL
:
16892 case OMP_TARGET_DATA
:
16894 gimplify_omp_workshare (expr_p
, pre_p
);
16898 case OACC_ENTER_DATA
:
16899 case OACC_EXIT_DATA
:
16901 case OMP_TARGET_UPDATE
:
16902 case OMP_TARGET_ENTER_DATA
:
16903 case OMP_TARGET_EXIT_DATA
:
16904 gimplify_omp_target_update (expr_p
, pre_p
);
16915 gimple_seq body
= NULL
;
16917 bool saved_in_omp_construct
= in_omp_construct
;
16919 in_omp_construct
= true;
16920 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
16921 in_omp_construct
= saved_in_omp_construct
;
16922 switch (TREE_CODE (*expr_p
))
16925 g
= gimple_build_omp_section (body
);
16928 g
= gimple_build_omp_master (body
);
16931 g
= gimplify_omp_ordered (*expr_p
, body
);
16932 if (OMP_BODY (*expr_p
) == NULL_TREE
16933 && gimple_code (g
) == GIMPLE_OMP_ORDERED
)
16934 gimple_omp_ordered_standalone (g
);
16937 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p
),
16938 pre_p
, ORT_WORKSHARE
, OMP_MASKED
);
16939 gimplify_adjust_omp_clauses (pre_p
, body
,
16940 &OMP_MASKED_CLAUSES (*expr_p
),
16942 g
= gimple_build_omp_masked (body
,
16943 OMP_MASKED_CLAUSES (*expr_p
));
16946 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p
),
16947 pre_p
, ORT_WORKSHARE
, OMP_CRITICAL
);
16948 gimplify_adjust_omp_clauses (pre_p
, body
,
16949 &OMP_CRITICAL_CLAUSES (*expr_p
),
16951 g
= gimple_build_omp_critical (body
,
16952 OMP_CRITICAL_NAME (*expr_p
),
16953 OMP_CRITICAL_CLAUSES (*expr_p
));
16956 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p
),
16957 pre_p
, ORT_WORKSHARE
, OMP_SCAN
);
16958 gimplify_adjust_omp_clauses (pre_p
, body
,
16959 &OMP_SCAN_CLAUSES (*expr_p
),
16961 g
= gimple_build_omp_scan (body
, OMP_SCAN_CLAUSES (*expr_p
));
16964 gcc_unreachable ();
16966 gimplify_seq_add_stmt (pre_p
, g
);
16971 case OMP_TASKGROUP
:
16973 gimple_seq body
= NULL
;
16975 tree
*pclauses
= &OMP_TASKGROUP_CLAUSES (*expr_p
);
16976 bool saved_in_omp_construct
= in_omp_construct
;
16977 gimplify_scan_omp_clauses (pclauses
, pre_p
, ORT_TASKGROUP
,
16979 gimplify_adjust_omp_clauses (pre_p
, NULL
, pclauses
, OMP_TASKGROUP
);
16981 in_omp_construct
= true;
16982 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
16983 in_omp_construct
= saved_in_omp_construct
;
16984 gimple_seq cleanup
= NULL
;
16985 tree fn
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END
);
16986 gimple
*g
= gimple_build_call (fn
, 0);
16987 gimple_seq_add_stmt (&cleanup
, g
);
16988 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
16990 gimple_seq_add_stmt (&body
, g
);
16991 g
= gimple_build_omp_taskgroup (body
, *pclauses
);
16992 gimplify_seq_add_stmt (pre_p
, g
);
16998 case OMP_ATOMIC_READ
:
16999 case OMP_ATOMIC_CAPTURE_OLD
:
17000 case OMP_ATOMIC_CAPTURE_NEW
:
17001 ret
= gimplify_omp_atomic (expr_p
, pre_p
);
17004 case TRANSACTION_EXPR
:
17005 ret
= gimplify_transaction (expr_p
, pre_p
);
17008 case TRUTH_AND_EXPR
:
17009 case TRUTH_OR_EXPR
:
17010 case TRUTH_XOR_EXPR
:
17012 tree orig_type
= TREE_TYPE (*expr_p
);
17013 tree new_type
, xop0
, xop1
;
17014 *expr_p
= gimple_boolify (*expr_p
);
17015 new_type
= TREE_TYPE (*expr_p
);
17016 if (!useless_type_conversion_p (orig_type
, new_type
))
17018 *expr_p
= fold_convert_loc (input_location
, orig_type
, *expr_p
);
17023 /* Boolified binary truth expressions are semantically equivalent
17024 to bitwise binary expressions. Canonicalize them to the
17025 bitwise variant. */
17026 switch (TREE_CODE (*expr_p
))
17028 case TRUTH_AND_EXPR
:
17029 TREE_SET_CODE (*expr_p
, BIT_AND_EXPR
);
17031 case TRUTH_OR_EXPR
:
17032 TREE_SET_CODE (*expr_p
, BIT_IOR_EXPR
);
17034 case TRUTH_XOR_EXPR
:
17035 TREE_SET_CODE (*expr_p
, BIT_XOR_EXPR
);
17040 /* Now make sure that operands have compatible type to
17041 expression's new_type. */
17042 xop0
= TREE_OPERAND (*expr_p
, 0);
17043 xop1
= TREE_OPERAND (*expr_p
, 1);
17044 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop0
)))
17045 TREE_OPERAND (*expr_p
, 0) = fold_convert_loc (input_location
,
17048 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop1
)))
17049 TREE_OPERAND (*expr_p
, 1) = fold_convert_loc (input_location
,
17052 /* Continue classified as tcc_binary. */
17056 case VEC_COND_EXPR
:
17059 case VEC_PERM_EXPR
:
17060 /* Classified as tcc_expression. */
17063 case BIT_INSERT_EXPR
:
17064 /* Argument 3 is a constant. */
17067 case POINTER_PLUS_EXPR
:
17069 enum gimplify_status r0
, r1
;
17070 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
17071 post_p
, is_gimple_val
, fb_rvalue
);
17072 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
17073 post_p
, is_gimple_val
, fb_rvalue
);
17074 recalculate_side_effects (*expr_p
);
17075 ret
= MIN (r0
, r1
);
17080 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p
)))
17082 case tcc_comparison
:
17083 /* Handle comparison of objects of non scalar mode aggregates
17084 with a call to memcmp. It would be nice to only have to do
17085 this for variable-sized objects, but then we'd have to allow
17086 the same nest of reference nodes we allow for MODIFY_EXPR and
17087 that's too complex.
17089 Compare scalar mode aggregates as scalar mode values. Using
17090 memcmp for them would be very inefficient at best, and is
17091 plain wrong if bitfields are involved. */
17093 tree type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 1));
17095 /* Vector comparisons need no boolification. */
17096 if (TREE_CODE (type
) == VECTOR_TYPE
)
17098 else if (!AGGREGATE_TYPE_P (type
))
17100 tree org_type
= TREE_TYPE (*expr_p
);
17101 *expr_p
= gimple_boolify (*expr_p
);
17102 if (!useless_type_conversion_p (org_type
,
17103 TREE_TYPE (*expr_p
)))
17105 *expr_p
= fold_convert_loc (input_location
,
17106 org_type
, *expr_p
);
17112 else if (TYPE_MODE (type
) != BLKmode
)
17113 ret
= gimplify_scalar_mode_aggregate_compare (expr_p
);
17115 ret
= gimplify_variable_sized_compare (expr_p
);
17120 /* If *EXPR_P does not need to be special-cased, handle it
17121 according to its class. */
17123 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
17124 post_p
, is_gimple_val
, fb_rvalue
);
17130 enum gimplify_status r0
, r1
;
17132 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
17133 post_p
, is_gimple_val
, fb_rvalue
);
17134 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
17135 post_p
, is_gimple_val
, fb_rvalue
);
17137 ret
= MIN (r0
, r1
);
17143 enum gimplify_status r0
, r1
, r2
;
17145 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
17146 post_p
, is_gimple_val
, fb_rvalue
);
17147 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
17148 post_p
, is_gimple_val
, fb_rvalue
);
17149 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
17150 post_p
, is_gimple_val
, fb_rvalue
);
17152 ret
= MIN (MIN (r0
, r1
), r2
);
17156 case tcc_declaration
:
17159 goto dont_recalculate
;
17162 gcc_unreachable ();
17165 recalculate_side_effects (*expr_p
);
17171 gcc_assert (*expr_p
|| ret
!= GS_OK
);
17173 while (ret
== GS_OK
);
17175 /* If we encountered an error_mark somewhere nested inside, either
17176 stub out the statement or propagate the error back out. */
17177 if (ret
== GS_ERROR
)
17184 /* This was only valid as a return value from the langhook, which
17185 we handled. Make sure it doesn't escape from any other context. */
17186 gcc_assert (ret
!= GS_UNHANDLED
);
17188 if (fallback
== fb_none
&& *expr_p
&& !is_gimple_stmt (*expr_p
))
17190 /* We aren't looking for a value, and we don't have a valid
17191 statement. If it doesn't have side-effects, throw it away.
17192 We can also get here with code such as "*&&L;", where L is
17193 a LABEL_DECL that is marked as FORCED_LABEL. */
17194 if (TREE_CODE (*expr_p
) == LABEL_DECL
17195 || !TREE_SIDE_EFFECTS (*expr_p
))
17197 else if (!TREE_THIS_VOLATILE (*expr_p
))
17199 /* This is probably a _REF that contains something nested that
17200 has side effects. Recurse through the operands to find it. */
17201 enum tree_code code
= TREE_CODE (*expr_p
);
17205 case COMPONENT_REF
:
17206 case REALPART_EXPR
:
17207 case IMAGPART_EXPR
:
17208 case VIEW_CONVERT_EXPR
:
17209 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
17210 gimple_test_f
, fallback
);
17214 case ARRAY_RANGE_REF
:
17215 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
17216 gimple_test_f
, fallback
);
17217 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
17218 gimple_test_f
, fallback
);
17222 /* Anything else with side-effects must be converted to
17223 a valid statement before we get here. */
17224 gcc_unreachable ();
17229 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p
))
17230 && TYPE_MODE (TREE_TYPE (*expr_p
)) != BLKmode
17231 && !is_empty_type (TREE_TYPE (*expr_p
)))
17233 /* Historically, the compiler has treated a bare reference
17234 to a non-BLKmode volatile lvalue as forcing a load. */
17235 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p
));
17237 /* Normally, we do not want to create a temporary for a
17238 TREE_ADDRESSABLE type because such a type should not be
17239 copied by bitwise-assignment. However, we make an
17240 exception here, as all we are doing here is ensuring that
17241 we read the bytes that make up the type. We use
17242 create_tmp_var_raw because create_tmp_var will abort when
17243 given a TREE_ADDRESSABLE type. */
17244 tree tmp
= create_tmp_var_raw (type
, "vol");
17245 gimple_add_tmp_var (tmp
);
17246 gimplify_assign (tmp
, *expr_p
, pre_p
);
17250 /* We can't do anything useful with a volatile reference to
17251 an incomplete type, so just throw it away. Likewise for
17252 a BLKmode type, since any implicit inner load should
17253 already have been turned into an explicit one by the
17254 gimplification process. */
17258 /* If we are gimplifying at the statement level, we're done. Tack
17259 everything together and return. */
17260 if (fallback
== fb_none
|| is_statement
)
17262 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
17263 it out for GC to reclaim it. */
17264 *expr_p
= NULL_TREE
;
17266 if (!gimple_seq_empty_p (internal_pre
)
17267 || !gimple_seq_empty_p (internal_post
))
17269 gimplify_seq_add_seq (&internal_pre
, internal_post
);
17270 gimplify_seq_add_seq (pre_p
, internal_pre
);
17273 /* The result of gimplifying *EXPR_P is going to be the last few
17274 statements in *PRE_P and *POST_P. Add location information
17275 to all the statements that were added by the gimplification
17277 if (!gimple_seq_empty_p (*pre_p
))
17278 annotate_all_with_location_after (*pre_p
, pre_last_gsi
, input_location
);
17280 if (!gimple_seq_empty_p (*post_p
))
17281 annotate_all_with_location_after (*post_p
, post_last_gsi
,
17287 #ifdef ENABLE_GIMPLE_CHECKING
17290 enum tree_code code
= TREE_CODE (*expr_p
);
17291 /* These expressions should already be in gimple IR form. */
17292 gcc_assert (code
!= MODIFY_EXPR
17293 && code
!= ASM_EXPR
17294 && code
!= BIND_EXPR
17295 && code
!= CATCH_EXPR
17296 && (code
!= COND_EXPR
|| gimplify_ctxp
->allow_rhs_cond_expr
)
17297 && code
!= EH_FILTER_EXPR
17298 && code
!= GOTO_EXPR
17299 && code
!= LABEL_EXPR
17300 && code
!= LOOP_EXPR
17301 && code
!= SWITCH_EXPR
17302 && code
!= TRY_FINALLY_EXPR
17303 && code
!= EH_ELSE_EXPR
17304 && code
!= OACC_PARALLEL
17305 && code
!= OACC_KERNELS
17306 && code
!= OACC_SERIAL
17307 && code
!= OACC_DATA
17308 && code
!= OACC_HOST_DATA
17309 && code
!= OACC_DECLARE
17310 && code
!= OACC_UPDATE
17311 && code
!= OACC_ENTER_DATA
17312 && code
!= OACC_EXIT_DATA
17313 && code
!= OACC_CACHE
17314 && code
!= OMP_CRITICAL
17316 && code
!= OACC_LOOP
17317 && code
!= OMP_MASTER
17318 && code
!= OMP_MASKED
17319 && code
!= OMP_TASKGROUP
17320 && code
!= OMP_ORDERED
17321 && code
!= OMP_PARALLEL
17322 && code
!= OMP_SCAN
17323 && code
!= OMP_SECTIONS
17324 && code
!= OMP_SECTION
17325 && code
!= OMP_SINGLE
17326 && code
!= OMP_SCOPE
);
17330 /* Otherwise we're gimplifying a subexpression, so the resulting
17331 value is interesting. If it's a valid operand that matches
17332 GIMPLE_TEST_F, we're done. Unless we are handling some
17333 post-effects internally; if that's the case, we need to copy into
17334 a temporary before adding the post-effects to POST_P. */
17335 if (gimple_seq_empty_p (internal_post
) && (*gimple_test_f
) (*expr_p
))
17338 /* Otherwise, we need to create a new temporary for the gimplified
17341 /* We can't return an lvalue if we have an internal postqueue. The
17342 object the lvalue refers to would (probably) be modified by the
17343 postqueue; we need to copy the value out first, which means an
17345 if ((fallback
& fb_lvalue
)
17346 && gimple_seq_empty_p (internal_post
)
17347 && is_gimple_addressable (*expr_p
))
17349 /* An lvalue will do. Take the address of the expression, store it
17350 in a temporary, and replace the expression with an INDIRECT_REF of
17352 tree ref_alias_type
= reference_alias_ptr_type (*expr_p
);
17353 unsigned int ref_align
= get_object_alignment (*expr_p
);
17354 tree ref_type
= TREE_TYPE (*expr_p
);
17355 tmp
= build_fold_addr_expr_loc (input_location
, *expr_p
);
17356 gimplify_expr (&tmp
, pre_p
, post_p
, is_gimple_reg
, fb_rvalue
);
17357 if (TYPE_ALIGN (ref_type
) != ref_align
)
17358 ref_type
= build_aligned_type (ref_type
, ref_align
);
17359 *expr_p
= build2 (MEM_REF
, ref_type
,
17360 tmp
, build_zero_cst (ref_alias_type
));
17362 else if ((fallback
& fb_rvalue
) && is_gimple_reg_rhs_or_call (*expr_p
))
17364 /* An rvalue will do. Assign the gimplified expression into a
17365 new temporary TMP and replace the original expression with
17366 TMP. First, make sure that the expression has a type so that
17367 it can be assigned into a temporary. */
17368 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p
)));
17369 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
17373 #ifdef ENABLE_GIMPLE_CHECKING
17374 if (!(fallback
& fb_mayfail
))
17376 fprintf (stderr
, "gimplification failed:\n");
17377 print_generic_expr (stderr
, *expr_p
);
17378 debug_tree (*expr_p
);
17379 internal_error ("gimplification failed");
17382 gcc_assert (fallback
& fb_mayfail
);
17384 /* If this is an asm statement, and the user asked for the
17385 impossible, don't die. Fail and let gimplify_asm_expr
17391 /* Make sure the temporary matches our predicate. */
17392 gcc_assert ((*gimple_test_f
) (*expr_p
));
17394 if (!gimple_seq_empty_p (internal_post
))
17396 annotate_all_with_location (internal_post
, input_location
);
17397 gimplify_seq_add_seq (pre_p
, internal_post
);
17401 input_location
= saved_location
;
17405 /* Like gimplify_expr but make sure the gimplified result is not itself
17406 a SSA name (but a decl if it were). Temporaries required by
17407 evaluating *EXPR_P may be still SSA names. */
17409 static enum gimplify_status
17410 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
17411 bool (*gimple_test_f
) (tree
), fallback_t fallback
,
17414 enum gimplify_status ret
= gimplify_expr (expr_p
, pre_p
, post_p
,
17415 gimple_test_f
, fallback
);
17417 && TREE_CODE (*expr_p
) == SSA_NAME
)
17418 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, NULL
, false);
17422 /* Look through TYPE for variable-sized objects and gimplify each such
17423 size that we find. Add to LIST_P any statements generated. */
17426 gimplify_type_sizes (tree type
, gimple_seq
*list_p
)
17428 if (type
== NULL
|| type
== error_mark_node
)
17431 const bool ignored_p
17433 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
17434 && DECL_IGNORED_P (TYPE_NAME (type
));
17437 /* We first do the main variant, then copy into any other variants. */
17438 type
= TYPE_MAIN_VARIANT (type
);
17440 /* Avoid infinite recursion. */
17441 if (TYPE_SIZES_GIMPLIFIED (type
))
17444 TYPE_SIZES_GIMPLIFIED (type
) = 1;
17446 switch (TREE_CODE (type
))
17449 case ENUMERAL_TYPE
:
17452 case FIXED_POINT_TYPE
:
17453 gimplify_one_sizepos (&TYPE_MIN_VALUE (type
), list_p
);
17454 gimplify_one_sizepos (&TYPE_MAX_VALUE (type
), list_p
);
17456 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
17458 TYPE_MIN_VALUE (t
) = TYPE_MIN_VALUE (type
);
17459 TYPE_MAX_VALUE (t
) = TYPE_MAX_VALUE (type
);
17464 /* These types may not have declarations, so handle them here. */
17465 gimplify_type_sizes (TREE_TYPE (type
), list_p
);
17466 gimplify_type_sizes (TYPE_DOMAIN (type
), list_p
);
17467 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
17468 with assigned stack slots, for -O1+ -g they should be tracked
17471 && TYPE_DOMAIN (type
)
17472 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type
)))
17474 t
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
17475 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
17476 DECL_IGNORED_P (t
) = 0;
17477 t
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
17478 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
17479 DECL_IGNORED_P (t
) = 0;
17485 case QUAL_UNION_TYPE
:
17486 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
17487 if (TREE_CODE (field
) == FIELD_DECL
)
17489 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field
), list_p
);
17490 /* Likewise, ensure variable offsets aren't removed. */
17492 && (t
= DECL_FIELD_OFFSET (field
))
17494 && DECL_ARTIFICIAL (t
))
17495 DECL_IGNORED_P (t
) = 0;
17496 gimplify_one_sizepos (&DECL_SIZE (field
), list_p
);
17497 gimplify_one_sizepos (&DECL_SIZE_UNIT (field
), list_p
);
17498 gimplify_type_sizes (TREE_TYPE (field
), list_p
);
17503 case REFERENCE_TYPE
:
17504 /* We used to recurse on the pointed-to type here, which turned out to
17505 be incorrect because its definition might refer to variables not
17506 yet initialized at this point if a forward declaration is involved.
17508 It was actually useful for anonymous pointed-to types to ensure
17509 that the sizes evaluation dominates every possible later use of the
17510 values. Restricting to such types here would be safe since there
17511 is no possible forward declaration around, but would introduce an
17512 undesirable middle-end semantic to anonymity. We then defer to
17513 front-ends the responsibility of ensuring that the sizes are
17514 evaluated both early and late enough, e.g. by attaching artificial
17515 type declarations to the tree. */
17522 gimplify_one_sizepos (&TYPE_SIZE (type
), list_p
);
17523 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type
), list_p
);
17525 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
17527 TYPE_SIZE (t
) = TYPE_SIZE (type
);
17528 TYPE_SIZE_UNIT (t
) = TYPE_SIZE_UNIT (type
);
17529 TYPE_SIZES_GIMPLIFIED (t
) = 1;
17533 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
17534 a size or position, has had all of its SAVE_EXPRs evaluated.
17535 We add any required statements to *STMT_P. */
17538 gimplify_one_sizepos (tree
*expr_p
, gimple_seq
*stmt_p
)
17540 tree expr
= *expr_p
;
17542 /* We don't do anything if the value isn't there, is constant, or contains
17543 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
17544 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
17545 will want to replace it with a new variable, but that will cause problems
17546 if this type is from outside the function. It's OK to have that here. */
17547 if (expr
== NULL_TREE
17548 || is_gimple_constant (expr
)
17549 || TREE_CODE (expr
) == VAR_DECL
17550 || CONTAINS_PLACEHOLDER_P (expr
))
17553 *expr_p
= unshare_expr (expr
);
17555 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
17556 if the def vanishes. */
17557 gimplify_expr (expr_p
, stmt_p
, NULL
, is_gimple_val
, fb_rvalue
, false);
17559 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
17560 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
17561 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
17562 if (is_gimple_constant (*expr_p
))
17563 *expr_p
= get_initialized_tmp_var (*expr_p
, stmt_p
, NULL
, false);
17566 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
17567 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
17568 is true, also gimplify the parameters. */
17571 gimplify_body (tree fndecl
, bool do_parms
)
17573 location_t saved_location
= input_location
;
17574 gimple_seq parm_stmts
, parm_cleanup
= NULL
, seq
;
17575 gimple
*outer_stmt
;
17578 timevar_push (TV_TREE_GIMPLIFY
);
17580 init_tree_ssa (cfun
);
17582 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
17584 default_rtl_profile ();
17586 gcc_assert (gimplify_ctxp
== NULL
);
17587 push_gimplify_context (true);
17589 if (flag_openacc
|| flag_openmp
)
17591 gcc_assert (gimplify_omp_ctxp
== NULL
);
17592 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl
)))
17593 gimplify_omp_ctxp
= new_omp_context (ORT_IMPLICIT_TARGET
);
17596 /* Unshare most shared trees in the body and in that of any nested functions.
17597 It would seem we don't have to do this for nested functions because
17598 they are supposed to be output and then the outer function gimplified
17599 first, but the g++ front end doesn't always do it that way. */
17600 unshare_body (fndecl
);
17601 unvisit_body (fndecl
);
17603 /* Make sure input_location isn't set to something weird. */
17604 input_location
= DECL_SOURCE_LOCATION (fndecl
);
17606 /* Resolve callee-copies. This has to be done before processing
17607 the body so that DECL_VALUE_EXPR gets processed correctly. */
17608 parm_stmts
= do_parms
? gimplify_parameters (&parm_cleanup
) : NULL
;
17610 /* Gimplify the function's body. */
17612 gimplify_stmt (&DECL_SAVED_TREE (fndecl
), &seq
);
17613 outer_stmt
= gimple_seq_first_nondebug_stmt (seq
);
17616 outer_stmt
= gimple_build_nop ();
17617 gimplify_seq_add_stmt (&seq
, outer_stmt
);
17620 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
17621 not the case, wrap everything in a GIMPLE_BIND to make it so. */
17622 if (gimple_code (outer_stmt
) == GIMPLE_BIND
17623 && (gimple_seq_first_nondebug_stmt (seq
)
17624 == gimple_seq_last_nondebug_stmt (seq
)))
17626 outer_bind
= as_a
<gbind
*> (outer_stmt
);
17627 if (gimple_seq_first_stmt (seq
) != outer_stmt
17628 || gimple_seq_last_stmt (seq
) != outer_stmt
)
17630 /* If there are debug stmts before or after outer_stmt, move them
17631 inside of outer_bind body. */
17632 gimple_stmt_iterator gsi
= gsi_for_stmt (outer_stmt
, &seq
);
17633 gimple_seq second_seq
= NULL
;
17634 if (gimple_seq_first_stmt (seq
) != outer_stmt
17635 && gimple_seq_last_stmt (seq
) != outer_stmt
)
17637 second_seq
= gsi_split_seq_after (gsi
);
17638 gsi_remove (&gsi
, false);
17640 else if (gimple_seq_first_stmt (seq
) != outer_stmt
)
17641 gsi_remove (&gsi
, false);
17644 gsi_remove (&gsi
, false);
17648 gimple_seq_add_seq_without_update (&seq
,
17649 gimple_bind_body (outer_bind
));
17650 gimple_seq_add_seq_without_update (&seq
, second_seq
);
17651 gimple_bind_set_body (outer_bind
, seq
);
17655 outer_bind
= gimple_build_bind (NULL_TREE
, seq
, NULL
);
17657 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
17659 /* If we had callee-copies statements, insert them at the beginning
17660 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
17661 if (!gimple_seq_empty_p (parm_stmts
))
17665 gimplify_seq_add_seq (&parm_stmts
, gimple_bind_body (outer_bind
));
17668 gtry
*g
= gimple_build_try (parm_stmts
, parm_cleanup
,
17669 GIMPLE_TRY_FINALLY
);
17671 gimple_seq_add_stmt (&parm_stmts
, g
);
17673 gimple_bind_set_body (outer_bind
, parm_stmts
);
17675 for (parm
= DECL_ARGUMENTS (current_function_decl
);
17676 parm
; parm
= DECL_CHAIN (parm
))
17677 if (DECL_HAS_VALUE_EXPR_P (parm
))
17679 DECL_HAS_VALUE_EXPR_P (parm
) = 0;
17680 DECL_IGNORED_P (parm
) = 0;
17684 if ((flag_openacc
|| flag_openmp
|| flag_openmp_simd
)
17685 && gimplify_omp_ctxp
)
17687 delete_omp_context (gimplify_omp_ctxp
);
17688 gimplify_omp_ctxp
= NULL
;
17691 pop_gimplify_context (outer_bind
);
17692 gcc_assert (gimplify_ctxp
== NULL
);
17694 if (flag_checking
&& !seen_error ())
17695 verify_gimple_in_seq (gimple_bind_body (outer_bind
));
17697 timevar_pop (TV_TREE_GIMPLIFY
);
17698 input_location
= saved_location
;
17703 typedef char *char_p
; /* For DEF_VEC_P. */
17705 /* Return whether we should exclude FNDECL from instrumentation. */
17708 flag_instrument_functions_exclude_p (tree fndecl
)
17712 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_functions
;
17713 if (v
&& v
->length () > 0)
17719 name
= lang_hooks
.decl_printable_name (fndecl
, 1);
17720 FOR_EACH_VEC_ELT (*v
, i
, s
)
17721 if (strstr (name
, s
) != NULL
)
17725 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_files
;
17726 if (v
&& v
->length () > 0)
17732 name
= DECL_SOURCE_FILE (fndecl
);
17733 FOR_EACH_VEC_ELT (*v
, i
, s
)
17734 if (strstr (name
, s
) != NULL
)
17741 /* Build a call to the instrumentation function FNCODE and add it to SEQ.
17742 If COND_VAR is not NULL, it is a boolean variable guarding the call to
17743 the instrumentation function. IF STMT is not NULL, it is a statement
17744 to be executed just before the call to the instrumentation function. */
17747 build_instrumentation_call (gimple_seq
*seq
, enum built_in_function fncode
,
17748 tree cond_var
, gimple
*stmt
)
17750 /* The instrumentation hooks aren't going to call the instrumented
17751 function and the address they receive is expected to be matchable
17752 against symbol addresses. Make sure we don't create a trampoline,
17753 in case the current function is nested. */
17754 tree this_fn_addr
= build_fold_addr_expr (current_function_decl
);
17755 TREE_NO_TRAMPOLINE (this_fn_addr
) = 1;
17757 tree label_true
, label_false
;
17760 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
17761 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
17762 gcond
*cond
= gimple_build_cond (EQ_EXPR
, cond_var
, boolean_false_node
,
17763 label_true
, label_false
);
17764 gimplify_seq_add_stmt (seq
, cond
);
17765 gimplify_seq_add_stmt (seq
, gimple_build_label (label_true
));
17766 gimplify_seq_add_stmt (seq
, gimple_build_predict (PRED_COLD_LABEL
,
17771 gimplify_seq_add_stmt (seq
, stmt
);
17773 tree x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
17774 gcall
*call
= gimple_build_call (x
, 1, integer_zero_node
);
17775 tree tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
17776 gimple_call_set_lhs (call
, tmp_var
);
17777 gimplify_seq_add_stmt (seq
, call
);
17778 x
= builtin_decl_implicit (fncode
);
17779 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
17780 gimplify_seq_add_stmt (seq
, call
);
17783 gimplify_seq_add_stmt (seq
, gimple_build_label (label_false
));
17786 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
17787 node for the function we want to gimplify.
17789 Return the sequence of GIMPLE statements corresponding to the body
17793 gimplify_function_tree (tree fndecl
)
17798 gcc_assert (!gimple_body (fndecl
));
17800 if (DECL_STRUCT_FUNCTION (fndecl
))
17801 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
17803 push_struct_function (fndecl
);
17805 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
17807 cfun
->curr_properties
|= PROP_gimple_lva
;
17809 if (asan_sanitize_use_after_scope ())
17810 asan_poisoned_variables
= new hash_set
<tree
> ();
17811 bind
= gimplify_body (fndecl
, true);
17812 if (asan_poisoned_variables
)
17814 delete asan_poisoned_variables
;
17815 asan_poisoned_variables
= NULL
;
17818 /* The tree body of the function is no longer needed, replace it
17819 with the new GIMPLE body. */
17821 gimple_seq_add_stmt (&seq
, bind
);
17822 gimple_set_body (fndecl
, seq
);
17824 /* If we're instrumenting function entry/exit, then prepend the call to
17825 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
17826 catch the exit hook. */
17827 /* ??? Add some way to ignore exceptions for this TFE. */
17828 if (flag_instrument_function_entry_exit
17829 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
)
17830 /* Do not instrument extern inline functions. */
17831 && !(DECL_DECLARED_INLINE_P (fndecl
)
17832 && DECL_EXTERNAL (fndecl
)
17833 && DECL_DISREGARD_INLINE_LIMITS (fndecl
))
17834 && !flag_instrument_functions_exclude_p (fndecl
))
17836 gimple_seq body
= NULL
, cleanup
= NULL
;
17840 /* If -finstrument-functions-once is specified, generate:
17842 static volatile bool C.0 = false;
17849 [call profiling enter function]
17852 without specific protection for data races. */
17853 if (flag_instrument_function_entry_exit
> 1)
17856 = build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
17858 create_tmp_var_name ("C"),
17859 boolean_type_node
);
17860 DECL_ARTIFICIAL (first_var
) = 1;
17861 DECL_IGNORED_P (first_var
) = 1;
17862 TREE_STATIC (first_var
) = 1;
17863 TREE_THIS_VOLATILE (first_var
) = 1;
17864 TREE_USED (first_var
) = 1;
17865 DECL_INITIAL (first_var
) = boolean_false_node
;
17866 varpool_node::add (first_var
);
17868 cond_var
= create_tmp_var (boolean_type_node
, "tmp_called");
17869 assign
= gimple_build_assign (cond_var
, first_var
);
17870 gimplify_seq_add_stmt (&body
, assign
);
17872 assign
= gimple_build_assign (first_var
, boolean_true_node
);
17877 cond_var
= NULL_TREE
;
17881 build_instrumentation_call (&body
, BUILT_IN_PROFILE_FUNC_ENTER
,
17884 /* If -finstrument-functions-once is specified, generate:
17887 [call profiling exit function]
17889 without specific protection for data races. */
17890 build_instrumentation_call (&cleanup
, BUILT_IN_PROFILE_FUNC_EXIT
,
17893 gimple
*tf
= gimple_build_try (seq
, cleanup
, GIMPLE_TRY_FINALLY
);
17894 gimplify_seq_add_stmt (&body
, tf
);
17895 gbind
*new_bind
= gimple_build_bind (NULL
, body
, NULL
);
17897 /* Replace the current function body with the body
17898 wrapped in the try/finally TF. */
17900 gimple_seq_add_stmt (&seq
, new_bind
);
17901 gimple_set_body (fndecl
, seq
);
17905 if (sanitize_flags_p (SANITIZE_THREAD
)
17906 && param_tsan_instrument_func_entry_exit
)
17908 gcall
*call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
17909 gimple
*tf
= gimple_build_try (seq
, call
, GIMPLE_TRY_FINALLY
);
17910 gbind
*new_bind
= gimple_build_bind (NULL
, tf
, NULL
);
17911 /* Replace the current function body with the body
17912 wrapped in the try/finally TF. */
17914 gimple_seq_add_stmt (&seq
, new_bind
);
17915 gimple_set_body (fndecl
, seq
);
17918 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
17919 cfun
->curr_properties
|= PROP_gimple_any
;
17923 dump_function (TDI_gimple
, fndecl
);
17926 /* Return a dummy expression of type TYPE in order to keep going after an
17930 dummy_object (tree type
)
17932 tree t
= build_int_cst (build_pointer_type (type
), 0);
17933 return build2 (MEM_REF
, type
, t
, t
);
17936 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
17937 builtin function, but a very special sort of operator. */
17939 enum gimplify_status
17940 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
,
17941 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
17943 tree promoted_type
, have_va_type
;
17944 tree valist
= TREE_OPERAND (*expr_p
, 0);
17945 tree type
= TREE_TYPE (*expr_p
);
17946 tree t
, tag
, aptag
;
17947 location_t loc
= EXPR_LOCATION (*expr_p
);
17949 /* Verify that valist is of the proper type. */
17950 have_va_type
= TREE_TYPE (valist
);
17951 if (have_va_type
== error_mark_node
)
17953 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
17954 if (have_va_type
== NULL_TREE
17955 && POINTER_TYPE_P (TREE_TYPE (valist
)))
17956 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
17958 = targetm
.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist
)));
17959 gcc_assert (have_va_type
!= NULL_TREE
);
17961 /* Generate a diagnostic for requesting data of a type that cannot
17962 be passed through `...' due to type promotion at the call site. */
17963 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
17966 static bool gave_help
;
17968 /* Use the expansion point to handle cases such as passing bool (defined
17969 in a system header) through `...'. */
17971 = expansion_point_location_if_in_system_header (loc
);
17973 /* Unfortunately, this is merely undefined, rather than a constraint
17974 violation, so we cannot make this an error. If this call is never
17975 executed, the program is still strictly conforming. */
17976 auto_diagnostic_group d
;
17977 warned
= warning_at (xloc
, 0,
17978 "%qT is promoted to %qT when passed through %<...%>",
17979 type
, promoted_type
);
17980 if (!gave_help
&& warned
)
17983 inform (xloc
, "(so you should pass %qT not %qT to %<va_arg%>)",
17984 promoted_type
, type
);
17987 /* We can, however, treat "undefined" any way we please.
17988 Call abort to encourage the user to fix the program. */
17990 inform (xloc
, "if this code is reached, the program will abort");
17991 /* Before the abort, allow the evaluation of the va_list
17992 expression to exit or longjmp. */
17993 gimplify_and_add (valist
, pre_p
);
17994 t
= build_call_expr_loc (loc
,
17995 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
17996 gimplify_and_add (t
, pre_p
);
17998 /* This is dead code, but go ahead and finish so that the
17999 mode of the result comes out right. */
18000 *expr_p
= dummy_object (type
);
18001 return GS_ALL_DONE
;
18004 tag
= build_int_cst (build_pointer_type (type
), 0);
18005 aptag
= build_int_cst (TREE_TYPE (valist
), 0);
18007 *expr_p
= build_call_expr_internal_loc (loc
, IFN_VA_ARG
, type
, 3,
18008 valist
, tag
, aptag
);
18010 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
18011 needs to be expanded. */
18012 cfun
->curr_properties
&= ~PROP_gimple_lva
;
18017 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
18019 DST/SRC are the destination and source respectively. You can pass
18020 ungimplified trees in DST or SRC, in which case they will be
18021 converted to a gimple operand if necessary.
18023 This function returns the newly created GIMPLE_ASSIGN tuple. */
18026 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
18028 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
18029 gimplify_and_add (t
, seq_p
);
18031 return gimple_seq_last_stmt (*seq_p
);
18035 gimplify_hasher::hash (const elt_t
*p
)
18038 return iterative_hash_expr (t
, 0);
18042 gimplify_hasher::equal (const elt_t
*p1
, const elt_t
*p2
)
18046 enum tree_code code
= TREE_CODE (t1
);
18048 if (TREE_CODE (t2
) != code
18049 || TREE_TYPE (t1
) != TREE_TYPE (t2
))
18052 if (!operand_equal_p (t1
, t2
, 0))
18055 /* Only allow them to compare equal if they also hash equal; otherwise
18056 results are nondeterminate, and we fail bootstrap comparison. */
18057 gcc_checking_assert (hash (p1
) == hash (p2
));