1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2016 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
38 #include "fold-const.h"
43 #include "gimple-fold.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
55 #include "gimple-low.h"
57 #include "gomp-constants.h"
58 #include "tree-dump.h"
59 #include "gimple-walk.h"
60 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
65 /* Hash set of poisoned variables in a bind expr. */
66 static hash_set
<tree
> *asan_poisoned_variables
= NULL
;
68 enum gimplify_omp_var_data
74 GOVD_FIRSTPRIVATE
= 16,
75 GOVD_LASTPRIVATE
= 32,
79 GOVD_DEBUG_PRIVATE
= 512,
80 GOVD_PRIVATE_OUTER_REF
= 1024,
84 /* Flag for GOVD_MAP: don't copy back. */
85 GOVD_MAP_TO_ONLY
= 8192,
87 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
88 GOVD_LINEAR_LASTPRIVATE_NO_OUTER
= 16384,
90 GOVD_MAP_0LEN_ARRAY
= 32768,
92 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
93 GOVD_MAP_ALWAYS_TO
= 65536,
95 /* Flag for shared vars that are or might be stored to in the region. */
96 GOVD_WRITTEN
= 131072,
98 /* Flag for GOVD_MAP, if it is a forced mapping. */
99 GOVD_MAP_FORCE
= 262144,
101 GOVD_DATA_SHARE_CLASS
= (GOVD_SHARED
| GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
102 | GOVD_LASTPRIVATE
| GOVD_REDUCTION
| GOVD_LINEAR
109 ORT_WORKSHARE
= 0x00,
113 ORT_COMBINED_PARALLEL
= 0x03,
116 ORT_UNTIED_TASK
= 0x05,
119 ORT_COMBINED_TEAMS
= 0x09,
122 ORT_TARGET_DATA
= 0x10,
124 /* Data region with offloading. */
126 ORT_COMBINED_TARGET
= 0x21,
128 /* OpenACC variants. */
129 ORT_ACC
= 0x40, /* A generic OpenACC region. */
130 ORT_ACC_DATA
= ORT_ACC
| ORT_TARGET_DATA
, /* Data construct. */
131 ORT_ACC_PARALLEL
= ORT_ACC
| ORT_TARGET
, /* Parallel construct */
132 ORT_ACC_KERNELS
= ORT_ACC
| ORT_TARGET
| 0x80, /* Kernels construct. */
133 ORT_ACC_HOST_DATA
= ORT_ACC
| ORT_TARGET_DATA
| 0x80, /* Host data. */
135 /* Dummy OpenMP region, used to disable expansion of
136 DECL_VALUE_EXPRs in taskloop pre body. */
140 /* Gimplify hashtable helper. */
142 struct gimplify_hasher
: free_ptr_hash
<elt_t
>
144 static inline hashval_t
hash (const elt_t
*);
145 static inline bool equal (const elt_t
*, const elt_t
*);
150 struct gimplify_ctx
*prev_context
;
152 vec
<gbind
*> bind_expr_stack
;
154 gimple_seq conditional_cleanups
;
158 vec
<tree
> case_labels
;
159 hash_set
<tree
> *live_switch_vars
;
160 /* The formal temporary table. Should this be persistent? */
161 hash_table
<gimplify_hasher
> *temp_htab
;
164 unsigned into_ssa
: 1;
165 unsigned allow_rhs_cond_expr
: 1;
166 unsigned in_cleanup_point_expr
: 1;
167 unsigned keep_stack
: 1;
168 unsigned save_stack
: 1;
169 unsigned in_switch_expr
: 1;
172 struct gimplify_omp_ctx
174 struct gimplify_omp_ctx
*outer_context
;
175 splay_tree variables
;
176 hash_set
<tree
> *privatized_types
;
177 /* Iteration variables in an OMP_FOR. */
178 vec
<tree
> loop_iter_var
;
180 enum omp_clause_default_kind default_kind
;
181 enum omp_region_type region_type
;
184 bool target_map_scalars_firstprivate
;
185 bool target_map_pointers_as_0len_arrays
;
186 bool target_firstprivatize_array_bases
;
189 static struct gimplify_ctx
*gimplify_ctxp
;
190 static struct gimplify_omp_ctx
*gimplify_omp_ctxp
;
192 /* Forward declaration. */
193 static enum gimplify_status
gimplify_compound_expr (tree
*, gimple_seq
*, bool);
194 static hash_map
<tree
, tree
> *oacc_declare_returns
;
195 static enum gimplify_status
gimplify_expr (tree
*, gimple_seq
*, gimple_seq
*,
196 bool (*) (tree
), fallback_t
, bool);
198 /* Shorter alias name for the above function for use in gimplify.c
202 gimplify_seq_add_stmt (gimple_seq
*seq_p
, gimple
*gs
)
204 gimple_seq_add_stmt_without_update (seq_p
, gs
);
207 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
208 NULL, a new sequence is allocated. This function is
209 similar to gimple_seq_add_seq, but does not scan the operands.
210 During gimplification, we need to manipulate statement sequences
211 before the def/use vectors have been constructed. */
214 gimplify_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
216 gimple_stmt_iterator si
;
221 si
= gsi_last (*dst_p
);
222 gsi_insert_seq_after_without_update (&si
, src
, GSI_NEW_STMT
);
226 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
227 and popping gimplify contexts. */
229 static struct gimplify_ctx
*ctx_pool
= NULL
;
231 /* Return a gimplify context struct from the pool. */
233 static inline struct gimplify_ctx
*
236 struct gimplify_ctx
* c
= ctx_pool
;
239 ctx_pool
= c
->prev_context
;
241 c
= XNEW (struct gimplify_ctx
);
243 memset (c
, '\0', sizeof (*c
));
247 /* Put gimplify context C back into the pool. */
250 ctx_free (struct gimplify_ctx
*c
)
252 c
->prev_context
= ctx_pool
;
256 /* Free allocated ctx stack memory. */
259 free_gimplify_stack (void)
261 struct gimplify_ctx
*c
;
263 while ((c
= ctx_pool
))
265 ctx_pool
= c
->prev_context
;
271 /* Set up a context for the gimplifier. */
274 push_gimplify_context (bool in_ssa
, bool rhs_cond_ok
)
276 struct gimplify_ctx
*c
= ctx_alloc ();
278 c
->prev_context
= gimplify_ctxp
;
280 gimplify_ctxp
->into_ssa
= in_ssa
;
281 gimplify_ctxp
->allow_rhs_cond_expr
= rhs_cond_ok
;
284 /* Tear down a context for the gimplifier. If BODY is non-null, then
285 put the temporaries into the outer BIND_EXPR. Otherwise, put them
288 BODY is not a sequence, but the first tuple in a sequence. */
291 pop_gimplify_context (gimple
*body
)
293 struct gimplify_ctx
*c
= gimplify_ctxp
;
296 && (!c
->bind_expr_stack
.exists ()
297 || c
->bind_expr_stack
.is_empty ()));
298 c
->bind_expr_stack
.release ();
299 gimplify_ctxp
= c
->prev_context
;
302 declare_vars (c
->temps
, body
, false);
304 record_vars (c
->temps
);
311 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
314 gimple_push_bind_expr (gbind
*bind_stmt
)
316 gimplify_ctxp
->bind_expr_stack
.reserve (8);
317 gimplify_ctxp
->bind_expr_stack
.safe_push (bind_stmt
);
320 /* Pop the first element off the stack of bindings. */
323 gimple_pop_bind_expr (void)
325 gimplify_ctxp
->bind_expr_stack
.pop ();
328 /* Return the first element of the stack of bindings. */
331 gimple_current_bind_expr (void)
333 return gimplify_ctxp
->bind_expr_stack
.last ();
336 /* Return the stack of bindings created during gimplification. */
339 gimple_bind_expr_stack (void)
341 return gimplify_ctxp
->bind_expr_stack
;
344 /* Return true iff there is a COND_EXPR between us and the innermost
345 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
348 gimple_conditional_context (void)
350 return gimplify_ctxp
->conditions
> 0;
353 /* Note that we've entered a COND_EXPR. */
356 gimple_push_condition (void)
358 #ifdef ENABLE_GIMPLE_CHECKING
359 if (gimplify_ctxp
->conditions
== 0)
360 gcc_assert (gimple_seq_empty_p (gimplify_ctxp
->conditional_cleanups
));
362 ++(gimplify_ctxp
->conditions
);
365 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
366 now, add any conditional cleanups we've seen to the prequeue. */
369 gimple_pop_condition (gimple_seq
*pre_p
)
371 int conds
= --(gimplify_ctxp
->conditions
);
373 gcc_assert (conds
>= 0);
376 gimplify_seq_add_seq (pre_p
, gimplify_ctxp
->conditional_cleanups
);
377 gimplify_ctxp
->conditional_cleanups
= NULL
;
381 /* A stable comparison routine for use with splay trees and DECLs. */
384 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
389 return DECL_UID (a
) - DECL_UID (b
);
392 /* Create a new omp construct that deals with variable remapping. */
394 static struct gimplify_omp_ctx
*
395 new_omp_context (enum omp_region_type region_type
)
397 struct gimplify_omp_ctx
*c
;
399 c
= XCNEW (struct gimplify_omp_ctx
);
400 c
->outer_context
= gimplify_omp_ctxp
;
401 c
->variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
402 c
->privatized_types
= new hash_set
<tree
>;
403 c
->location
= input_location
;
404 c
->region_type
= region_type
;
405 if ((region_type
& ORT_TASK
) == 0)
406 c
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
408 c
->default_kind
= OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
413 /* Destroy an omp construct that deals with variable remapping. */
416 delete_omp_context (struct gimplify_omp_ctx
*c
)
418 splay_tree_delete (c
->variables
);
419 delete c
->privatized_types
;
420 c
->loop_iter_var
.release ();
424 static void omp_add_variable (struct gimplify_omp_ctx
*, tree
, unsigned int);
425 static bool omp_notice_variable (struct gimplify_omp_ctx
*, tree
, bool);
427 /* Both gimplify the statement T and append it to *SEQ_P. This function
428 behaves exactly as gimplify_stmt, but you don't have to pass T as a
432 gimplify_and_add (tree t
, gimple_seq
*seq_p
)
434 gimplify_stmt (&t
, seq_p
);
437 /* Gimplify statement T into sequence *SEQ_P, and return the first
438 tuple in the sequence of generated tuples for this statement.
439 Return NULL if gimplifying T produced no tuples. */
442 gimplify_and_return_first (tree t
, gimple_seq
*seq_p
)
444 gimple_stmt_iterator last
= gsi_last (*seq_p
);
446 gimplify_and_add (t
, seq_p
);
448 if (!gsi_end_p (last
))
451 return gsi_stmt (last
);
454 return gimple_seq_first_stmt (*seq_p
);
457 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
458 LHS, or for a call argument. */
461 is_gimple_mem_rhs (tree t
)
463 /* If we're dealing with a renamable type, either source or dest must be
464 a renamed variable. */
465 if (is_gimple_reg_type (TREE_TYPE (t
)))
466 return is_gimple_val (t
);
468 return is_gimple_val (t
) || is_gimple_lvalue (t
);
471 /* Return true if T is a CALL_EXPR or an expression that can be
472 assigned to a temporary. Note that this predicate should only be
473 used during gimplification. See the rationale for this in
474 gimplify_modify_expr. */
477 is_gimple_reg_rhs_or_call (tree t
)
479 return (get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
480 || TREE_CODE (t
) == CALL_EXPR
);
483 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
484 this predicate should only be used during gimplification. See the
485 rationale for this in gimplify_modify_expr. */
488 is_gimple_mem_rhs_or_call (tree t
)
490 /* If we're dealing with a renamable type, either source or dest must be
491 a renamed variable. */
492 if (is_gimple_reg_type (TREE_TYPE (t
)))
493 return is_gimple_val (t
);
495 return (is_gimple_val (t
) || is_gimple_lvalue (t
)
496 || TREE_CODE (t
) == CALL_EXPR
);
499 /* Create a temporary with a name derived from VAL. Subroutine of
500 lookup_tmp_var; nobody else should call this function. */
503 create_tmp_from_val (tree val
)
505 /* Drop all qualifiers and address-space information from the value type. */
506 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (val
));
507 tree var
= create_tmp_var (type
, get_name (val
));
508 if (TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
509 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
)
510 DECL_GIMPLE_REG_P (var
) = 1;
514 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
515 an existing expression temporary. */
518 lookup_tmp_var (tree val
, bool is_formal
)
522 /* If not optimizing, never really reuse a temporary. local-alloc
523 won't allocate any variable that is used in more than one basic
524 block, which means it will go into memory, causing much extra
525 work in reload and final and poorer code generation, outweighing
526 the extra memory allocation here. */
527 if (!optimize
|| !is_formal
|| TREE_SIDE_EFFECTS (val
))
528 ret
= create_tmp_from_val (val
);
535 if (!gimplify_ctxp
->temp_htab
)
536 gimplify_ctxp
->temp_htab
= new hash_table
<gimplify_hasher
> (1000);
537 slot
= gimplify_ctxp
->temp_htab
->find_slot (&elt
, INSERT
);
540 elt_p
= XNEW (elt_t
);
542 elt_p
->temp
= ret
= create_tmp_from_val (val
);
555 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
558 internal_get_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
559 bool is_formal
, bool allow_ssa
)
563 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
564 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
565 gimplify_expr (&val
, pre_p
, post_p
, is_gimple_reg_rhs_or_call
,
569 && gimplify_ctxp
->into_ssa
570 && is_gimple_reg_type (TREE_TYPE (val
)))
572 t
= make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val
)));
573 if (! gimple_in_ssa_p (cfun
))
575 const char *name
= get_name (val
);
577 SET_SSA_NAME_VAR_OR_IDENTIFIER (t
, create_tmp_var_name (name
));
581 t
= lookup_tmp_var (val
, is_formal
);
583 mod
= build2 (INIT_EXPR
, TREE_TYPE (t
), t
, unshare_expr (val
));
585 SET_EXPR_LOCATION (mod
, EXPR_LOC_OR_LOC (val
, input_location
));
587 /* gimplify_modify_expr might want to reduce this further. */
588 gimplify_and_add (mod
, pre_p
);
594 /* Return a formal temporary variable initialized with VAL. PRE_P is as
595 in gimplify_expr. Only use this function if:
597 1) The value of the unfactored expression represented by VAL will not
598 change between the initialization and use of the temporary, and
599 2) The temporary will not be otherwise modified.
601 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
602 and #2 means it is inappropriate for && temps.
604 For other cases, use get_initialized_tmp_var instead. */
607 get_formal_tmp_var (tree val
, gimple_seq
*pre_p
)
609 return internal_get_tmp_var (val
, pre_p
, NULL
, true, true);
612 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
613 are as in gimplify_expr. */
616 get_initialized_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
619 return internal_get_tmp_var (val
, pre_p
, post_p
, false, allow_ssa
);
622 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
623 generate debug info for them; otherwise don't. */
626 declare_vars (tree vars
, gimple
*gs
, bool debug_info
)
633 gbind
*scope
= as_a
<gbind
*> (gs
);
635 temps
= nreverse (last
);
637 block
= gimple_bind_block (scope
);
638 gcc_assert (!block
|| TREE_CODE (block
) == BLOCK
);
639 if (!block
|| !debug_info
)
641 DECL_CHAIN (last
) = gimple_bind_vars (scope
);
642 gimple_bind_set_vars (scope
, temps
);
646 /* We need to attach the nodes both to the BIND_EXPR and to its
647 associated BLOCK for debugging purposes. The key point here
648 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
649 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
650 if (BLOCK_VARS (block
))
651 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), temps
);
654 gimple_bind_set_vars (scope
,
655 chainon (gimple_bind_vars (scope
), temps
));
656 BLOCK_VARS (block
) = temps
;
662 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
663 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
664 no such upper bound can be obtained. */
667 force_constant_size (tree var
)
669 /* The only attempt we make is by querying the maximum size of objects
670 of the variable's type. */
672 HOST_WIDE_INT max_size
;
674 gcc_assert (VAR_P (var
));
676 max_size
= max_int_size_in_bytes (TREE_TYPE (var
));
678 gcc_assert (max_size
>= 0);
681 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var
)), max_size
);
683 = build_int_cst (TREE_TYPE (DECL_SIZE (var
)), max_size
* BITS_PER_UNIT
);
686 /* Push the temporary variable TMP into the current binding. */
689 gimple_add_tmp_var_fn (struct function
*fn
, tree tmp
)
691 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
693 /* Later processing assumes that the object size is constant, which might
694 not be true at this point. Force the use of a constant upper bound in
696 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp
)))
697 force_constant_size (tmp
);
699 DECL_CONTEXT (tmp
) = fn
->decl
;
700 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
702 record_vars_into (tmp
, fn
->decl
);
705 /* Push the temporary variable TMP into the current binding. */
708 gimple_add_tmp_var (tree tmp
)
710 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
712 /* Later processing assumes that the object size is constant, which might
713 not be true at this point. Force the use of a constant upper bound in
715 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp
)))
716 force_constant_size (tmp
);
718 DECL_CONTEXT (tmp
) = current_function_decl
;
719 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
723 DECL_CHAIN (tmp
) = gimplify_ctxp
->temps
;
724 gimplify_ctxp
->temps
= tmp
;
726 /* Mark temporaries local within the nearest enclosing parallel. */
727 if (gimplify_omp_ctxp
)
729 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
731 && (ctx
->region_type
== ORT_WORKSHARE
732 || ctx
->region_type
== ORT_SIMD
733 || ctx
->region_type
== ORT_ACC
))
734 ctx
= ctx
->outer_context
;
736 omp_add_variable (ctx
, tmp
, GOVD_LOCAL
| GOVD_SEEN
);
745 /* This case is for nested functions. We need to expose the locals
747 body_seq
= gimple_body (current_function_decl
);
748 declare_vars (tmp
, gimple_seq_first_stmt (body_seq
), false);
754 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
755 nodes that are referenced more than once in GENERIC functions. This is
756 necessary because gimplification (translation into GIMPLE) is performed
757 by modifying tree nodes in-place, so gimplication of a shared node in a
758 first context could generate an invalid GIMPLE form in a second context.
760 This is achieved with a simple mark/copy/unmark algorithm that walks the
761 GENERIC representation top-down, marks nodes with TREE_VISITED the first
762 time it encounters them, duplicates them if they already have TREE_VISITED
763 set, and finally removes the TREE_VISITED marks it has set.
765 The algorithm works only at the function level, i.e. it generates a GENERIC
766 representation of a function with no nodes shared within the function when
767 passed a GENERIC function (except for nodes that are allowed to be shared).
769 At the global level, it is also necessary to unshare tree nodes that are
770 referenced in more than one function, for the same aforementioned reason.
771 This requires some cooperation from the front-end. There are 2 strategies:
773 1. Manual unsharing. The front-end needs to call unshare_expr on every
774 expression that might end up being shared across functions.
776 2. Deep unsharing. This is an extension of regular unsharing. Instead
777 of calling unshare_expr on expressions that might be shared across
778 functions, the front-end pre-marks them with TREE_VISITED. This will
779 ensure that they are unshared on the first reference within functions
780 when the regular unsharing algorithm runs. The counterpart is that
781 this algorithm must look deeper than for manual unsharing, which is
782 specified by LANG_HOOKS_DEEP_UNSHARING.
784 If there are only few specific cases of node sharing across functions, it is
785 probably easier for a front-end to unshare the expressions manually. On the
786 contrary, if the expressions generated at the global level are as widespread
787 as expressions generated within functions, deep unsharing is very likely the
790 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
791 These nodes model computations that must be done once. If we were to
792 unshare something like SAVE_EXPR(i++), the gimplification process would
793 create wrong code. However, if DATA is non-null, it must hold a pointer
794 set that is used to unshare the subtrees of these nodes. */
797 mostly_copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data
)
800 enum tree_code code
= TREE_CODE (t
);
802 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
803 copy their subtrees if we can make sure to do it only once. */
804 if (code
== SAVE_EXPR
|| code
== TARGET_EXPR
|| code
== BIND_EXPR
)
806 if (data
&& !((hash_set
<tree
> *)data
)->add (t
))
812 /* Stop at types, decls, constants like copy_tree_r. */
813 else if (TREE_CODE_CLASS (code
) == tcc_type
814 || TREE_CODE_CLASS (code
) == tcc_declaration
815 || TREE_CODE_CLASS (code
) == tcc_constant
816 /* We can't do anything sensible with a BLOCK used as an
817 expression, but we also can't just die when we see it
818 because of non-expression uses. So we avert our eyes
819 and cross our fingers. Silly Java. */
823 /* Cope with the statement expression extension. */
824 else if (code
== STATEMENT_LIST
)
827 /* Leave the bulk of the work to copy_tree_r itself. */
829 copy_tree_r (tp
, walk_subtrees
, NULL
);
834 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
835 If *TP has been visited already, then *TP is deeply copied by calling
836 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
839 copy_if_shared_r (tree
*tp
, int *walk_subtrees
, void *data
)
842 enum tree_code code
= TREE_CODE (t
);
844 /* Skip types, decls, and constants. But we do want to look at their
845 types and the bounds of types. Mark them as visited so we properly
846 unmark their subtrees on the unmark pass. If we've already seen them,
847 don't look down further. */
848 if (TREE_CODE_CLASS (code
) == tcc_type
849 || TREE_CODE_CLASS (code
) == tcc_declaration
850 || TREE_CODE_CLASS (code
) == tcc_constant
)
852 if (TREE_VISITED (t
))
855 TREE_VISITED (t
) = 1;
858 /* If this node has been visited already, unshare it and don't look
860 else if (TREE_VISITED (t
))
862 walk_tree (tp
, mostly_copy_tree_r
, data
, NULL
);
866 /* Otherwise, mark the node as visited and keep looking. */
868 TREE_VISITED (t
) = 1;
873 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
874 copy_if_shared_r callback unmodified. */
877 copy_if_shared (tree
*tp
, void *data
)
879 walk_tree (tp
, copy_if_shared_r
, data
, NULL
);
882 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
883 any nested functions. */
886 unshare_body (tree fndecl
)
888 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
889 /* If the language requires deep unsharing, we need a pointer set to make
890 sure we don't repeatedly unshare subtrees of unshareable nodes. */
891 hash_set
<tree
> *visited
892 = lang_hooks
.deep_unsharing
? new hash_set
<tree
> : NULL
;
894 copy_if_shared (&DECL_SAVED_TREE (fndecl
), visited
);
895 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl
)), visited
);
896 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)), visited
);
901 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
902 unshare_body (cgn
->decl
);
905 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
906 Subtrees are walked until the first unvisited node is encountered. */
909 unmark_visited_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
913 /* If this node has been visited, unmark it and keep looking. */
914 if (TREE_VISITED (t
))
915 TREE_VISITED (t
) = 0;
917 /* Otherwise, don't look any deeper. */
924 /* Unmark the visited trees rooted at *TP. */
927 unmark_visited (tree
*tp
)
929 walk_tree (tp
, unmark_visited_r
, NULL
, NULL
);
932 /* Likewise, but mark all trees as not visited. */
935 unvisit_body (tree fndecl
)
937 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
939 unmark_visited (&DECL_SAVED_TREE (fndecl
));
940 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl
)));
941 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)));
944 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
945 unvisit_body (cgn
->decl
);
948 /* Unconditionally make an unshared copy of EXPR. This is used when using
949 stored expressions which span multiple functions, such as BINFO_VTABLE,
950 as the normal unsharing process can't tell that they're shared. */
953 unshare_expr (tree expr
)
955 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
959 /* Worker for unshare_expr_without_location. */
962 prune_expr_location (tree
*tp
, int *walk_subtrees
, void *)
965 SET_EXPR_LOCATION (*tp
, UNKNOWN_LOCATION
);
971 /* Similar to unshare_expr but also prune all expression locations
975 unshare_expr_without_location (tree expr
)
977 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
979 walk_tree (&expr
, prune_expr_location
, NULL
, NULL
);
983 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
984 contain statements and have a value. Assign its value to a temporary
985 and give it void_type_node. Return the temporary, or NULL_TREE if
986 WRAPPER was already void. */
989 voidify_wrapper_expr (tree wrapper
, tree temp
)
991 tree type
= TREE_TYPE (wrapper
);
992 if (type
&& !VOID_TYPE_P (type
))
996 /* Set p to point to the body of the wrapper. Loop until we find
997 something that isn't a wrapper. */
998 for (p
= &wrapper
; p
&& *p
; )
1000 switch (TREE_CODE (*p
))
1003 TREE_SIDE_EFFECTS (*p
) = 1;
1004 TREE_TYPE (*p
) = void_type_node
;
1005 /* For a BIND_EXPR, the body is operand 1. */
1006 p
= &BIND_EXPR_BODY (*p
);
1009 case CLEANUP_POINT_EXPR
:
1010 case TRY_FINALLY_EXPR
:
1011 case TRY_CATCH_EXPR
:
1012 TREE_SIDE_EFFECTS (*p
) = 1;
1013 TREE_TYPE (*p
) = void_type_node
;
1014 p
= &TREE_OPERAND (*p
, 0);
1017 case STATEMENT_LIST
:
1019 tree_stmt_iterator i
= tsi_last (*p
);
1020 TREE_SIDE_EFFECTS (*p
) = 1;
1021 TREE_TYPE (*p
) = void_type_node
;
1022 p
= tsi_end_p (i
) ? NULL
: tsi_stmt_ptr (i
);
1027 /* Advance to the last statement. Set all container types to
1029 for (; TREE_CODE (*p
) == COMPOUND_EXPR
; p
= &TREE_OPERAND (*p
, 1))
1031 TREE_SIDE_EFFECTS (*p
) = 1;
1032 TREE_TYPE (*p
) = void_type_node
;
1036 case TRANSACTION_EXPR
:
1037 TREE_SIDE_EFFECTS (*p
) = 1;
1038 TREE_TYPE (*p
) = void_type_node
;
1039 p
= &TRANSACTION_EXPR_BODY (*p
);
1043 /* Assume that any tree upon which voidify_wrapper_expr is
1044 directly called is a wrapper, and that its body is op0. */
1047 TREE_SIDE_EFFECTS (*p
) = 1;
1048 TREE_TYPE (*p
) = void_type_node
;
1049 p
= &TREE_OPERAND (*p
, 0);
1057 if (p
== NULL
|| IS_EMPTY_STMT (*p
))
1061 /* The wrapper is on the RHS of an assignment that we're pushing
1063 gcc_assert (TREE_CODE (temp
) == INIT_EXPR
1064 || TREE_CODE (temp
) == MODIFY_EXPR
);
1065 TREE_OPERAND (temp
, 1) = *p
;
1070 temp
= create_tmp_var (type
, "retval");
1071 *p
= build2 (INIT_EXPR
, type
, temp
, *p
);
1080 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1081 a temporary through which they communicate. */
1084 build_stack_save_restore (gcall
**save
, gcall
**restore
)
1088 *save
= gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE
), 0);
1089 tmp_var
= create_tmp_var (ptr_type_node
, "saved_stack");
1090 gimple_call_set_lhs (*save
, tmp_var
);
1093 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE
),
1097 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1100 build_asan_poison_call_expr (tree decl
)
1102 /* Do not poison variables that have size equal to zero. */
1103 tree unit_size
= DECL_SIZE_UNIT (decl
);
1104 if (zerop (unit_size
))
1107 tree base
= build_fold_addr_expr (decl
);
1109 return build_call_expr_internal_loc (UNKNOWN_LOCATION
, IFN_ASAN_MARK
,
1111 build_int_cst (integer_type_node
,
1116 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1117 on POISON flag, shadow memory of a DECL variable. The call will be
1118 put on location identified by IT iterator, where BEFORE flag drives
1119 position where the stmt will be put. */
1122 asan_poison_variable (tree decl
, bool poison
, gimple_stmt_iterator
*it
,
1125 /* When within an OMP context, do not emit ASAN_MARK internal fns. */
1126 if (gimplify_omp_ctxp
)
1129 tree unit_size
= DECL_SIZE_UNIT (decl
);
1130 tree base
= build_fold_addr_expr (decl
);
1132 /* Do not poison variables that have size equal to zero. */
1133 if (zerop (unit_size
))
1136 /* It's necessary to have all stack variables aligned to ASAN granularity
1138 if (DECL_ALIGN_UNIT (decl
) <= ASAN_SHADOW_GRANULARITY
)
1139 SET_DECL_ALIGN (decl
, BITS_PER_UNIT
* ASAN_SHADOW_GRANULARITY
);
1141 HOST_WIDE_INT flags
= poison
? ASAN_MARK_POISON
: ASAN_MARK_UNPOISON
;
1144 = gimple_build_call_internal (IFN_ASAN_MARK
, 3,
1145 build_int_cst (integer_type_node
, flags
),
1149 gsi_insert_before (it
, g
, GSI_NEW_STMT
);
1151 gsi_insert_after (it
, g
, GSI_NEW_STMT
);
1154 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1155 either poisons or unpoisons a DECL. Created statement is appended
1156 to SEQ_P gimple sequence. */
1159 asan_poison_variable (tree decl
, bool poison
, gimple_seq
*seq_p
)
1161 gimple_stmt_iterator it
= gsi_last (*seq_p
);
1162 bool before
= false;
1167 asan_poison_variable (decl
, poison
, &it
, before
);
1170 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1173 sort_by_decl_uid (const void *a
, const void *b
)
1175 const tree
*t1
= (const tree
*)a
;
1176 const tree
*t2
= (const tree
*)b
;
1178 int uid1
= DECL_UID (*t1
);
1179 int uid2
= DECL_UID (*t2
);
1183 else if (uid1
> uid2
)
1189 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1190 depending on POISON flag. Created statement is appended
1191 to SEQ_P gimple sequence. */
1194 asan_poison_variables (hash_set
<tree
> *variables
, bool poison
, gimple_seq
*seq_p
)
1196 unsigned c
= variables
->elements ();
1200 auto_vec
<tree
> sorted_variables (c
);
1202 for (hash_set
<tree
>::iterator it
= variables
->begin ();
1203 it
!= variables
->end (); ++it
)
1204 sorted_variables
.safe_push (*it
);
1206 sorted_variables
.qsort (sort_by_decl_uid
);
1208 for (unsigned i
= 0; i
< sorted_variables
.length (); i
++)
1209 asan_poison_variable (sorted_variables
[i
], poison
, seq_p
);
1212 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1214 static enum gimplify_status
1215 gimplify_bind_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1217 tree bind_expr
= *expr_p
;
1218 bool old_keep_stack
= gimplify_ctxp
->keep_stack
;
1219 bool old_save_stack
= gimplify_ctxp
->save_stack
;
1222 gimple_seq body
, cleanup
;
1224 location_t start_locus
= 0, end_locus
= 0;
1225 tree ret_clauses
= NULL
;
1227 tree temp
= voidify_wrapper_expr (bind_expr
, NULL
);
1229 /* Mark variables seen in this bind expr. */
1230 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1234 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
1236 /* Mark variable as local. */
1237 if (ctx
&& ctx
->region_type
!= ORT_NONE
&& !DECL_EXTERNAL (t
)
1238 && (! DECL_SEEN_IN_BIND_EXPR_P (t
)
1239 || splay_tree_lookup (ctx
->variables
,
1240 (splay_tree_key
) t
) == NULL
))
1242 if (ctx
->region_type
== ORT_SIMD
1243 && TREE_ADDRESSABLE (t
)
1244 && !TREE_STATIC (t
))
1245 omp_add_variable (ctx
, t
, GOVD_PRIVATE
| GOVD_SEEN
);
1247 omp_add_variable (ctx
, t
, GOVD_LOCAL
| GOVD_SEEN
);
1250 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
1252 if (DECL_HARD_REGISTER (t
) && !is_global_var (t
) && cfun
)
1253 cfun
->has_local_explicit_reg_vars
= true;
1256 /* Preliminarily mark non-addressed complex variables as eligible
1257 for promotion to gimple registers. We'll transform their uses
1259 if ((TREE_CODE (TREE_TYPE (t
)) == COMPLEX_TYPE
1260 || TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
1261 && !TREE_THIS_VOLATILE (t
)
1262 && (VAR_P (t
) && !DECL_HARD_REGISTER (t
))
1263 && !needs_to_live_in_memory (t
))
1264 DECL_GIMPLE_REG_P (t
) = 1;
1267 bind_stmt
= gimple_build_bind (BIND_EXPR_VARS (bind_expr
), NULL
,
1268 BIND_EXPR_BLOCK (bind_expr
));
1269 gimple_push_bind_expr (bind_stmt
);
1271 gimplify_ctxp
->keep_stack
= false;
1272 gimplify_ctxp
->save_stack
= false;
1274 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1276 gimplify_stmt (&BIND_EXPR_BODY (bind_expr
), &body
);
1277 gimple_bind_set_body (bind_stmt
, body
);
1279 /* Source location wise, the cleanup code (stack_restore and clobbers)
1280 belongs to the end of the block, so propagate what we have. The
1281 stack_save operation belongs to the beginning of block, which we can
1282 infer from the bind_expr directly if the block has no explicit
1284 if (BIND_EXPR_BLOCK (bind_expr
))
1286 end_locus
= BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1287 start_locus
= BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1289 if (start_locus
== 0)
1290 start_locus
= EXPR_LOCATION (bind_expr
);
1295 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1296 the stack space allocated to the VLAs. */
1297 if (gimplify_ctxp
->save_stack
&& !gimplify_ctxp
->keep_stack
)
1299 gcall
*stack_restore
;
1301 /* Save stack on entry and restore it on exit. Add a try_finally
1302 block to achieve this. */
1303 build_stack_save_restore (&stack_save
, &stack_restore
);
1305 gimple_set_location (stack_save
, start_locus
);
1306 gimple_set_location (stack_restore
, end_locus
);
1308 gimplify_seq_add_stmt (&cleanup
, stack_restore
);
1311 /* Add clobbers for all variables that go out of scope. */
1312 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1315 && !is_global_var (t
)
1316 && DECL_CONTEXT (t
) == current_function_decl
)
1318 if (!DECL_HARD_REGISTER (t
)
1319 && !TREE_THIS_VOLATILE (t
)
1320 && !DECL_HAS_VALUE_EXPR_P (t
)
1321 /* Only care for variables that have to be in memory. Others
1322 will be rewritten into SSA names, hence moved to the
1324 && !is_gimple_reg (t
)
1325 && flag_stack_reuse
!= SR_NONE
)
1327 tree clobber
= build_constructor (TREE_TYPE (t
), NULL
);
1328 gimple
*clobber_stmt
;
1329 TREE_THIS_VOLATILE (clobber
) = 1;
1330 clobber_stmt
= gimple_build_assign (t
, clobber
);
1331 gimple_set_location (clobber_stmt
, end_locus
);
1332 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1335 if (flag_openacc
&& oacc_declare_returns
!= NULL
)
1337 tree
*c
= oacc_declare_returns
->get (t
);
1341 OMP_CLAUSE_CHAIN (*c
) = ret_clauses
;
1345 oacc_declare_returns
->remove (t
);
1347 if (oacc_declare_returns
->elements () == 0)
1349 delete oacc_declare_returns
;
1350 oacc_declare_returns
= NULL
;
1356 if (asan_poisoned_variables
!= NULL
1357 && asan_poisoned_variables
->contains (t
))
1359 asan_poisoned_variables
->remove (t
);
1360 asan_poison_variable (t
, true, &cleanup
);
1363 if (gimplify_ctxp
->live_switch_vars
!= NULL
1364 && gimplify_ctxp
->live_switch_vars
->contains (t
))
1365 gimplify_ctxp
->live_switch_vars
->remove (t
);
1371 gimple_stmt_iterator si
= gsi_start (cleanup
);
1373 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
1375 gsi_insert_seq_before_without_update (&si
, stmt
, GSI_NEW_STMT
);
1381 gimple_seq new_body
;
1384 gs
= gimple_build_try (gimple_bind_body (bind_stmt
), cleanup
,
1385 GIMPLE_TRY_FINALLY
);
1388 gimplify_seq_add_stmt (&new_body
, stack_save
);
1389 gimplify_seq_add_stmt (&new_body
, gs
);
1390 gimple_bind_set_body (bind_stmt
, new_body
);
1393 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1394 if (!gimplify_ctxp
->keep_stack
)
1395 gimplify_ctxp
->keep_stack
= old_keep_stack
;
1396 gimplify_ctxp
->save_stack
= old_save_stack
;
1398 gimple_pop_bind_expr ();
1400 gimplify_seq_add_stmt (pre_p
, bind_stmt
);
1408 *expr_p
= NULL_TREE
;
1412 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1413 GIMPLE value, it is assigned to a new temporary and the statement is
1414 re-written to return the temporary.
1416 PRE_P points to the sequence where side effects that must happen before
1417 STMT should be stored. */
1419 static enum gimplify_status
1420 gimplify_return_expr (tree stmt
, gimple_seq
*pre_p
)
1423 tree ret_expr
= TREE_OPERAND (stmt
, 0);
1424 tree result_decl
, result
;
1426 if (ret_expr
== error_mark_node
)
1429 /* Implicit _Cilk_sync must be inserted right before any return statement
1430 if there is a _Cilk_spawn in the function. If the user has provided a
1431 _Cilk_sync, the optimizer should remove this duplicate one. */
1432 if (fn_contains_cilk_spawn_p (cfun
))
1434 tree impl_sync
= build0 (CILK_SYNC_STMT
, void_type_node
);
1435 gimplify_and_add (impl_sync
, pre_p
);
1439 || TREE_CODE (ret_expr
) == RESULT_DECL
1440 || ret_expr
== error_mark_node
)
1442 greturn
*ret
= gimple_build_return (ret_expr
);
1443 gimple_set_no_warning (ret
, TREE_NO_WARNING (stmt
));
1444 gimplify_seq_add_stmt (pre_p
, ret
);
1448 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1449 result_decl
= NULL_TREE
;
1452 result_decl
= TREE_OPERAND (ret_expr
, 0);
1454 /* See through a return by reference. */
1455 if (TREE_CODE (result_decl
) == INDIRECT_REF
)
1456 result_decl
= TREE_OPERAND (result_decl
, 0);
1458 gcc_assert ((TREE_CODE (ret_expr
) == MODIFY_EXPR
1459 || TREE_CODE (ret_expr
) == INIT_EXPR
)
1460 && TREE_CODE (result_decl
) == RESULT_DECL
);
1463 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1464 Recall that aggregate_value_p is FALSE for any aggregate type that is
1465 returned in registers. If we're returning values in registers, then
1466 we don't want to extend the lifetime of the RESULT_DECL, particularly
1467 across another call. In addition, for those aggregates for which
1468 hard_function_value generates a PARALLEL, we'll die during normal
1469 expansion of structure assignments; there's special code in expand_return
1470 to handle this case that does not exist in expand_expr. */
1473 else if (aggregate_value_p (result_decl
, TREE_TYPE (current_function_decl
)))
1475 if (TREE_CODE (DECL_SIZE (result_decl
)) != INTEGER_CST
)
1477 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl
)))
1478 gimplify_type_sizes (TREE_TYPE (result_decl
), pre_p
);
1479 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1480 should be effectively allocated by the caller, i.e. all calls to
1481 this function must be subject to the Return Slot Optimization. */
1482 gimplify_one_sizepos (&DECL_SIZE (result_decl
), pre_p
);
1483 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl
), pre_p
);
1485 result
= result_decl
;
1487 else if (gimplify_ctxp
->return_temp
)
1488 result
= gimplify_ctxp
->return_temp
;
1491 result
= create_tmp_reg (TREE_TYPE (result_decl
));
1493 /* ??? With complex control flow (usually involving abnormal edges),
1494 we can wind up warning about an uninitialized value for this. Due
1495 to how this variable is constructed and initialized, this is never
1496 true. Give up and never warn. */
1497 TREE_NO_WARNING (result
) = 1;
1499 gimplify_ctxp
->return_temp
= result
;
1502 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1503 Then gimplify the whole thing. */
1504 if (result
!= result_decl
)
1505 TREE_OPERAND (ret_expr
, 0) = result
;
1507 gimplify_and_add (TREE_OPERAND (stmt
, 0), pre_p
);
1509 ret
= gimple_build_return (result
);
1510 gimple_set_no_warning (ret
, TREE_NO_WARNING (stmt
));
1511 gimplify_seq_add_stmt (pre_p
, ret
);
1516 /* Gimplify a variable-length array DECL. */
1519 gimplify_vla_decl (tree decl
, gimple_seq
*seq_p
)
1521 /* This is a variable-sized decl. Simplify its size and mark it
1522 for deferred expansion. */
1523 tree t
, addr
, ptr_type
;
1525 gimplify_one_sizepos (&DECL_SIZE (decl
), seq_p
);
1526 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl
), seq_p
);
1528 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1529 if (DECL_HAS_VALUE_EXPR_P (decl
))
1532 /* All occurrences of this decl in final gimplified code will be
1533 replaced by indirection. Setting DECL_VALUE_EXPR does two
1534 things: First, it lets the rest of the gimplifier know what
1535 replacement to use. Second, it lets the debug info know
1536 where to find the value. */
1537 ptr_type
= build_pointer_type (TREE_TYPE (decl
));
1538 addr
= create_tmp_var (ptr_type
, get_name (decl
));
1539 DECL_IGNORED_P (addr
) = 0;
1540 t
= build_fold_indirect_ref (addr
);
1541 TREE_THIS_NOTRAP (t
) = 1;
1542 SET_DECL_VALUE_EXPR (decl
, t
);
1543 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
1545 t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
1546 t
= build_call_expr (t
, 2, DECL_SIZE_UNIT (decl
),
1547 size_int (DECL_ALIGN (decl
)));
1548 /* The call has been built for a variable-sized object. */
1549 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
1550 t
= fold_convert (ptr_type
, t
);
1551 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
1553 gimplify_and_add (t
, seq_p
);
1556 /* A helper function to be called via walk_tree. Mark all labels under *TP
1557 as being forced. To be called for DECL_INITIAL of static variables. */
1560 force_labels_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1564 if (TREE_CODE (*tp
) == LABEL_DECL
)
1566 FORCED_LABEL (*tp
) = 1;
1567 cfun
->has_forced_label_in_static
= 1;
1573 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1574 and initialization explicit. */
1576 static enum gimplify_status
1577 gimplify_decl_expr (tree
*stmt_p
, gimple_seq
*seq_p
)
1579 tree stmt
= *stmt_p
;
1580 tree decl
= DECL_EXPR_DECL (stmt
);
1582 *stmt_p
= NULL_TREE
;
1584 if (TREE_TYPE (decl
) == error_mark_node
)
1587 if ((TREE_CODE (decl
) == TYPE_DECL
1589 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl
)))
1591 gimplify_type_sizes (TREE_TYPE (decl
), seq_p
);
1592 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1593 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl
)), seq_p
);
1596 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1597 in case its size expressions contain problematic nodes like CALL_EXPR. */
1598 if (TREE_CODE (decl
) == TYPE_DECL
1599 && DECL_ORIGINAL_TYPE (decl
)
1600 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl
)))
1602 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl
), seq_p
);
1603 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl
)) == REFERENCE_TYPE
)
1604 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl
)), seq_p
);
1607 if (VAR_P (decl
) && !DECL_EXTERNAL (decl
))
1609 tree init
= DECL_INITIAL (decl
);
1610 bool is_vla
= false;
1612 if (TREE_CODE (DECL_SIZE_UNIT (decl
)) != INTEGER_CST
1613 || (!TREE_STATIC (decl
)
1614 && flag_stack_check
== GENERIC_STACK_CHECK
1615 && compare_tree_int (DECL_SIZE_UNIT (decl
),
1616 STACK_CHECK_MAX_VAR_SIZE
) > 0))
1618 gimplify_vla_decl (decl
, seq_p
);
1622 if (asan_sanitize_use_after_scope ()
1623 && !asan_no_sanitize_address_p ()
1625 && TREE_ADDRESSABLE (decl
)
1626 && !TREE_STATIC (decl
)
1627 && !DECL_HAS_VALUE_EXPR_P (decl
)
1628 && dbg_cnt (asan_use_after_scope
))
1630 asan_poisoned_variables
->add (decl
);
1631 asan_poison_variable (decl
, false, seq_p
);
1632 if (!DECL_ARTIFICIAL (decl
) && gimplify_ctxp
->live_switch_vars
)
1633 gimplify_ctxp
->live_switch_vars
->add (decl
);
1636 /* Some front ends do not explicitly declare all anonymous
1637 artificial variables. We compensate here by declaring the
1638 variables, though it would be better if the front ends would
1639 explicitly declare them. */
1640 if (!DECL_SEEN_IN_BIND_EXPR_P (decl
)
1641 && DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)
1642 gimple_add_tmp_var (decl
);
1644 if (init
&& init
!= error_mark_node
)
1646 if (!TREE_STATIC (decl
))
1648 DECL_INITIAL (decl
) = NULL_TREE
;
1649 init
= build2 (INIT_EXPR
, void_type_node
, decl
, init
);
1650 gimplify_and_add (init
, seq_p
);
1654 /* We must still examine initializers for static variables
1655 as they may contain a label address. */
1656 walk_tree (&init
, force_labels_r
, NULL
, NULL
);
1663 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1664 and replacing the LOOP_EXPR with goto, but if the loop contains an
1665 EXIT_EXPR, we need to append a label for it to jump to. */
1667 static enum gimplify_status
1668 gimplify_loop_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1670 tree saved_label
= gimplify_ctxp
->exit_label
;
1671 tree start_label
= create_artificial_label (UNKNOWN_LOCATION
);
1673 gimplify_seq_add_stmt (pre_p
, gimple_build_label (start_label
));
1675 gimplify_ctxp
->exit_label
= NULL_TREE
;
1677 gimplify_and_add (LOOP_EXPR_BODY (*expr_p
), pre_p
);
1679 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (start_label
));
1681 if (gimplify_ctxp
->exit_label
)
1682 gimplify_seq_add_stmt (pre_p
,
1683 gimple_build_label (gimplify_ctxp
->exit_label
));
1685 gimplify_ctxp
->exit_label
= saved_label
;
1691 /* Gimplify a statement list onto a sequence. These may be created either
1692 by an enlightened front-end, or by shortcut_cond_expr. */
1694 static enum gimplify_status
1695 gimplify_statement_list (tree
*expr_p
, gimple_seq
*pre_p
)
1697 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
1699 tree_stmt_iterator i
= tsi_start (*expr_p
);
1701 while (!tsi_end_p (i
))
1703 gimplify_stmt (tsi_stmt_ptr (i
), pre_p
);
1716 /* Callback for walk_gimple_seq. */
1719 warn_switch_unreachable_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
1720 struct walk_stmt_info
*wi
)
1722 gimple
*stmt
= gsi_stmt (*gsi_p
);
1724 *handled_ops_p
= true;
1725 switch (gimple_code (stmt
))
1728 /* A compiler-generated cleanup or a user-written try block.
1729 If it's empty, don't dive into it--that would result in
1730 worse location info. */
1731 if (gimple_try_eval (stmt
) == NULL
)
1734 return integer_zero_node
;
1739 case GIMPLE_EH_FILTER
:
1740 case GIMPLE_TRANSACTION
:
1741 /* Walk the sub-statements. */
1742 *handled_ops_p
= false;
1745 if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
1747 *handled_ops_p
= false;
1752 /* Save the first "real" statement (not a decl/lexical scope/...). */
1754 return integer_zero_node
;
1759 /* Possibly warn about unreachable statements between switch's controlling
1760 expression and the first case. SEQ is the body of a switch expression. */
1763 maybe_warn_switch_unreachable (gimple_seq seq
)
1765 if (!warn_switch_unreachable
1766 /* This warning doesn't play well with Fortran when optimizations
1768 || lang_GNU_Fortran ()
1772 struct walk_stmt_info wi
;
1773 memset (&wi
, 0, sizeof (wi
));
1774 walk_gimple_seq (seq
, warn_switch_unreachable_r
, NULL
, &wi
);
1775 gimple
*stmt
= (gimple
*) wi
.info
;
1777 if (stmt
&& gimple_code (stmt
) != GIMPLE_LABEL
)
1779 if (gimple_code (stmt
) == GIMPLE_GOTO
1780 && TREE_CODE (gimple_goto_dest (stmt
)) == LABEL_DECL
1781 && DECL_ARTIFICIAL (gimple_goto_dest (stmt
)))
1782 /* Don't warn for compiler-generated gotos. These occur
1783 in Duff's devices, for example. */;
1785 warning_at (gimple_location (stmt
), OPT_Wswitch_unreachable
,
1786 "statement will never be executed");
1791 /* A label entry that pairs label and a location. */
1798 /* Find LABEL in vector of label entries VEC. */
1800 static struct label_entry
*
1801 find_label_entry (const auto_vec
<struct label_entry
> *vec
, tree label
)
1804 struct label_entry
*l
;
1806 FOR_EACH_VEC_ELT (*vec
, i
, l
)
1807 if (l
->label
== label
)
1812 /* Return true if LABEL, a LABEL_DECL, represents a case label
1813 in a vector of labels CASES. */
1816 case_label_p (const vec
<tree
> *cases
, tree label
)
1821 FOR_EACH_VEC_ELT (*cases
, i
, l
)
1822 if (CASE_LABEL (l
) == label
)
1827 /* Find the last statement in a scope STMT. */
1830 last_stmt_in_scope (gimple
*stmt
)
1835 switch (gimple_code (stmt
))
1839 gbind
*bind
= as_a
<gbind
*> (stmt
);
1840 stmt
= gimple_seq_last_stmt (gimple_bind_body (bind
));
1841 return last_stmt_in_scope (stmt
);
1846 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
1847 stmt
= gimple_seq_last_stmt (gimple_try_eval (try_stmt
));
1848 gimple
*last_eval
= last_stmt_in_scope (stmt
);
1849 if (gimple_stmt_may_fallthru (last_eval
)
1850 && (last_eval
== NULL
1851 || !gimple_call_internal_p (last_eval
, IFN_FALLTHROUGH
))
1852 && gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
1854 stmt
= gimple_seq_last_stmt (gimple_try_cleanup (try_stmt
));
1855 return last_stmt_in_scope (stmt
);
1866 /* Collect interesting labels in LABELS and return the statement preceding
1867 another case label, or a user-defined label. */
1870 collect_fallthrough_labels (gimple_stmt_iterator
*gsi_p
,
1871 auto_vec
<struct label_entry
> *labels
)
1873 gimple
*prev
= NULL
;
1877 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
1878 || gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_TRY
)
1880 /* Nested scope. Only look at the last statement of
1881 the innermost scope. */
1882 location_t bind_loc
= gimple_location (gsi_stmt (*gsi_p
));
1883 gimple
*last
= last_stmt_in_scope (gsi_stmt (*gsi_p
));
1887 /* It might be a label without a location. Use the
1888 location of the scope then. */
1889 if (!gimple_has_location (prev
))
1890 gimple_set_location (prev
, bind_loc
);
1896 /* Ifs are tricky. */
1897 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_COND
)
1899 gcond
*cond_stmt
= as_a
<gcond
*> (gsi_stmt (*gsi_p
));
1900 tree false_lab
= gimple_cond_false_label (cond_stmt
);
1901 location_t if_loc
= gimple_location (cond_stmt
);
1904 if (i > 1) goto <D.2259>; else goto D;
1905 we can't do much with the else-branch. */
1906 if (!DECL_ARTIFICIAL (false_lab
))
1909 /* Go on until the false label, then one step back. */
1910 for (; !gsi_end_p (*gsi_p
); gsi_next (gsi_p
))
1912 gimple
*stmt
= gsi_stmt (*gsi_p
);
1913 if (gimple_code (stmt
) == GIMPLE_LABEL
1914 && gimple_label_label (as_a
<glabel
*> (stmt
)) == false_lab
)
1918 /* Not found? Oops. */
1919 if (gsi_end_p (*gsi_p
))
1922 struct label_entry l
= { false_lab
, if_loc
};
1923 labels
->safe_push (l
);
1925 /* Go to the last statement of the then branch. */
1928 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
1934 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_GOTO
1935 && !gimple_has_location (gsi_stmt (*gsi_p
)))
1937 /* Look at the statement before, it might be
1938 attribute fallthrough, in which case don't warn. */
1940 bool fallthru_before_dest
1941 = gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_FALLTHROUGH
);
1943 tree goto_dest
= gimple_goto_dest (gsi_stmt (*gsi_p
));
1944 if (!fallthru_before_dest
)
1946 struct label_entry l
= { goto_dest
, if_loc
};
1947 labels
->safe_push (l
);
1950 /* And move back. */
1954 /* Remember the last statement. Skip labels that are of no interest
1956 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
1958 tree label
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (*gsi_p
)));
1959 if (find_label_entry (labels
, label
))
1960 prev
= gsi_stmt (*gsi_p
);
1962 else if (gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_ASAN_MARK
))
1965 prev
= gsi_stmt (*gsi_p
);
1968 while (!gsi_end_p (*gsi_p
)
1969 /* Stop if we find a case or a user-defined label. */
1970 && (gimple_code (gsi_stmt (*gsi_p
)) != GIMPLE_LABEL
1971 || !gimple_has_location (gsi_stmt (*gsi_p
))));
1976 /* Return true if the switch fallthough warning should occur. LABEL is
1977 the label statement that we're falling through to. */
1980 should_warn_for_implicit_fallthrough (gimple_stmt_iterator
*gsi_p
, tree label
)
1982 gimple_stmt_iterator gsi
= *gsi_p
;
1984 /* Don't warn if the label is marked with a "falls through" comment. */
1985 if (FALLTHROUGH_LABEL_P (label
))
1988 /* Don't warn for a non-case label followed by a statement:
1993 as these are likely intentional. */
1994 if (!case_label_p (&gimplify_ctxp
->case_labels
, label
))
1997 if (gsi_end_p (gsi
) || gimple_code (gsi_stmt (gsi
)) != GIMPLE_LABEL
)
2001 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2002 immediately breaks. */
2005 /* Skip all immediately following labels. */
2006 while (!gsi_end_p (gsi
) && gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
)
2009 /* { ... something; default:; } */
2011 /* { ... something; default: break; } or
2012 { ... something; default: goto L; } */
2013 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_GOTO
2014 /* { ... something; default: return; } */
2015 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
2021 /* Callback for walk_gimple_seq. */
2024 warn_implicit_fallthrough_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2025 struct walk_stmt_info
*)
2027 gimple
*stmt
= gsi_stmt (*gsi_p
);
2029 *handled_ops_p
= true;
2030 switch (gimple_code (stmt
))
2035 case GIMPLE_EH_FILTER
:
2036 case GIMPLE_TRANSACTION
:
2037 /* Walk the sub-statements. */
2038 *handled_ops_p
= false;
2041 /* Find a sequence of form:
2048 and possibly warn. */
2051 /* Found a label. Skip all immediately following labels. */
2052 while (!gsi_end_p (*gsi_p
)
2053 && gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2056 /* There might be no more statements. */
2057 if (gsi_end_p (*gsi_p
))
2058 return integer_zero_node
;
2060 /* Vector of labels that fall through. */
2061 auto_vec
<struct label_entry
> labels
;
2062 gimple
*prev
= collect_fallthrough_labels (gsi_p
, &labels
);
2064 /* There might be no more statements. */
2065 if (gsi_end_p (*gsi_p
))
2066 return integer_zero_node
;
2068 gimple
*next
= gsi_stmt (*gsi_p
);
2070 /* If what follows is a label, then we may have a fallthrough. */
2071 if (gimple_code (next
) == GIMPLE_LABEL
2072 && gimple_has_location (next
)
2073 && (label
= gimple_label_label (as_a
<glabel
*> (next
)))
2076 struct label_entry
*l
;
2077 bool warned_p
= false;
2078 if (!should_warn_for_implicit_fallthrough (gsi_p
, label
))
2080 else if (gimple_code (prev
) == GIMPLE_LABEL
2081 && (label
= gimple_label_label (as_a
<glabel
*> (prev
)))
2082 && (l
= find_label_entry (&labels
, label
)))
2083 warned_p
= warning_at (l
->loc
, OPT_Wimplicit_fallthrough_
,
2084 "this statement may fall through");
2085 else if (!gimple_call_internal_p (prev
, IFN_FALLTHROUGH
)
2086 /* Try to be clever and don't warn when the statement
2087 can't actually fall through. */
2088 && gimple_stmt_may_fallthru (prev
)
2089 && gimple_has_location (prev
))
2090 warned_p
= warning_at (gimple_location (prev
),
2091 OPT_Wimplicit_fallthrough_
,
2092 "this statement may fall through");
2094 inform (gimple_location (next
), "here");
2096 /* Mark this label as processed so as to prevent multiple
2097 warnings in nested switches. */
2098 FALLTHROUGH_LABEL_P (label
) = true;
2100 /* So that next warn_implicit_fallthrough_r will start looking for
2101 a new sequence starting with this label. */
2112 /* Warn when a switch case falls through. */
2115 maybe_warn_implicit_fallthrough (gimple_seq seq
)
2117 if (!warn_implicit_fallthrough
)
2120 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2123 || lang_GNU_OBJC ()))
2126 struct walk_stmt_info wi
;
2127 memset (&wi
, 0, sizeof (wi
));
2128 walk_gimple_seq (seq
, warn_implicit_fallthrough_r
, NULL
, &wi
);
2131 /* Callback for walk_gimple_seq. */
2134 expand_FALLTHROUGH_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2135 struct walk_stmt_info
*)
2137 gimple
*stmt
= gsi_stmt (*gsi_p
);
2139 *handled_ops_p
= true;
2140 switch (gimple_code (stmt
))
2145 case GIMPLE_EH_FILTER
:
2146 case GIMPLE_TRANSACTION
:
2147 /* Walk the sub-statements. */
2148 *handled_ops_p
= false;
2151 if (gimple_call_internal_p (stmt
, IFN_FALLTHROUGH
))
2153 gsi_remove (gsi_p
, true);
2154 if (gsi_end_p (*gsi_p
))
2155 return integer_zero_node
;
2158 location_t loc
= gimple_location (stmt
);
2160 gimple_stmt_iterator gsi2
= *gsi_p
;
2161 stmt
= gsi_stmt (gsi2
);
2162 if (gimple_code (stmt
) == GIMPLE_GOTO
&& !gimple_has_location (stmt
))
2164 /* Go on until the artificial label. */
2165 tree goto_dest
= gimple_goto_dest (stmt
);
2166 for (; !gsi_end_p (gsi2
); gsi_next (&gsi2
))
2168 if (gimple_code (gsi_stmt (gsi2
)) == GIMPLE_LABEL
2169 && gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi2
)))
2174 /* Not found? Stop. */
2175 if (gsi_end_p (gsi2
))
2178 /* Look one past it. */
2182 /* We're looking for a case label or default label here. */
2183 while (!gsi_end_p (gsi2
))
2185 stmt
= gsi_stmt (gsi2
);
2186 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2188 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
2189 if (gimple_has_location (stmt
) && DECL_ARTIFICIAL (label
))
2196 /* Something other than a label. That's not expected. */
2201 warning_at (loc
, 0, "attribute %<fallthrough%> not preceding "
2202 "a case label or default label");
2211 /* Expand all FALLTHROUGH () calls in SEQ. */
2214 expand_FALLTHROUGH (gimple_seq
*seq_p
)
2216 struct walk_stmt_info wi
;
2217 memset (&wi
, 0, sizeof (wi
));
2218 walk_gimple_seq_mod (seq_p
, expand_FALLTHROUGH_r
, NULL
, &wi
);
2222 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2225 static enum gimplify_status
2226 gimplify_switch_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2228 tree switch_expr
= *expr_p
;
2229 gimple_seq switch_body_seq
= NULL
;
2230 enum gimplify_status ret
;
2231 tree index_type
= TREE_TYPE (switch_expr
);
2232 if (index_type
== NULL_TREE
)
2233 index_type
= TREE_TYPE (SWITCH_COND (switch_expr
));
2235 ret
= gimplify_expr (&SWITCH_COND (switch_expr
), pre_p
, NULL
, is_gimple_val
,
2237 if (ret
== GS_ERROR
|| ret
== GS_UNHANDLED
)
2240 if (SWITCH_BODY (switch_expr
))
2243 vec
<tree
> saved_labels
;
2244 hash_set
<tree
> *saved_live_switch_vars
= NULL
;
2245 tree default_case
= NULL_TREE
;
2246 gswitch
*switch_stmt
;
2248 /* If someone can be bothered to fill in the labels, they can
2249 be bothered to null out the body too. */
2250 gcc_assert (!SWITCH_LABELS (switch_expr
));
2252 /* Save old labels, get new ones from body, then restore the old
2253 labels. Save all the things from the switch body to append after. */
2254 saved_labels
= gimplify_ctxp
->case_labels
;
2255 gimplify_ctxp
->case_labels
.create (8);
2257 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2258 saved_live_switch_vars
= gimplify_ctxp
->live_switch_vars
;
2259 if (TREE_CODE (SWITCH_BODY (switch_expr
)) == BIND_EXPR
)
2260 gimplify_ctxp
->live_switch_vars
= new hash_set
<tree
> (4);
2262 gimplify_ctxp
->live_switch_vars
= NULL
;
2264 bool old_in_switch_expr
= gimplify_ctxp
->in_switch_expr
;
2265 gimplify_ctxp
->in_switch_expr
= true;
2267 gimplify_stmt (&SWITCH_BODY (switch_expr
), &switch_body_seq
);
2269 gimplify_ctxp
->in_switch_expr
= old_in_switch_expr
;
2270 maybe_warn_switch_unreachable (switch_body_seq
);
2271 maybe_warn_implicit_fallthrough (switch_body_seq
);
2272 /* Only do this for the outermost GIMPLE_SWITCH. */
2273 if (!gimplify_ctxp
->in_switch_expr
)
2274 expand_FALLTHROUGH (&switch_body_seq
);
2276 labels
= gimplify_ctxp
->case_labels
;
2277 gimplify_ctxp
->case_labels
= saved_labels
;
2279 if (gimplify_ctxp
->live_switch_vars
)
2281 gcc_assert (gimplify_ctxp
->live_switch_vars
->elements () == 0);
2282 delete gimplify_ctxp
->live_switch_vars
;
2284 gimplify_ctxp
->live_switch_vars
= saved_live_switch_vars
;
2286 preprocess_case_label_vec_for_gimple (labels
, index_type
,
2291 glabel
*new_default
;
2294 = build_case_label (NULL_TREE
, NULL_TREE
,
2295 create_artificial_label (UNKNOWN_LOCATION
));
2296 new_default
= gimple_build_label (CASE_LABEL (default_case
));
2297 gimplify_seq_add_stmt (&switch_body_seq
, new_default
);
2300 switch_stmt
= gimple_build_switch (SWITCH_COND (switch_expr
),
2301 default_case
, labels
);
2302 gimplify_seq_add_stmt (pre_p
, switch_stmt
);
2303 gimplify_seq_add_seq (pre_p
, switch_body_seq
);
2307 gcc_assert (SWITCH_LABELS (switch_expr
));
2312 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2314 static enum gimplify_status
2315 gimplify_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2317 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p
))
2318 == current_function_decl
);
2320 glabel
*label_stmt
= gimple_build_label (LABEL_EXPR_LABEL (*expr_p
));
2321 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2322 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2327 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2329 static enum gimplify_status
2330 gimplify_case_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2332 struct gimplify_ctx
*ctxp
;
2335 /* Invalid programs can play Duff's Device type games with, for example,
2336 #pragma omp parallel. At least in the C front end, we don't
2337 detect such invalid branches until after gimplification, in the
2338 diagnose_omp_blocks pass. */
2339 for (ctxp
= gimplify_ctxp
; ; ctxp
= ctxp
->prev_context
)
2340 if (ctxp
->case_labels
.exists ())
2343 label_stmt
= gimple_build_label (CASE_LABEL (*expr_p
));
2344 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2345 ctxp
->case_labels
.safe_push (*expr_p
);
2346 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2351 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2355 build_and_jump (tree
*label_p
)
2357 if (label_p
== NULL
)
2358 /* If there's nowhere to jump, just fall through. */
2361 if (*label_p
== NULL_TREE
)
2363 tree label
= create_artificial_label (UNKNOWN_LOCATION
);
2367 return build1 (GOTO_EXPR
, void_type_node
, *label_p
);
2370 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2371 This also involves building a label to jump to and communicating it to
2372 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2374 static enum gimplify_status
2375 gimplify_exit_expr (tree
*expr_p
)
2377 tree cond
= TREE_OPERAND (*expr_p
, 0);
2380 expr
= build_and_jump (&gimplify_ctxp
->exit_label
);
2381 expr
= build3 (COND_EXPR
, void_type_node
, cond
, expr
, NULL_TREE
);
2387 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2388 different from its canonical type, wrap the whole thing inside a
2389 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2392 The canonical type of a COMPONENT_REF is the type of the field being
2393 referenced--unless the field is a bit-field which can be read directly
2394 in a smaller mode, in which case the canonical type is the
2395 sign-appropriate type corresponding to that mode. */
2398 canonicalize_component_ref (tree
*expr_p
)
2400 tree expr
= *expr_p
;
2403 gcc_assert (TREE_CODE (expr
) == COMPONENT_REF
);
2405 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
)))
2406 type
= TREE_TYPE (get_unwidened (expr
, NULL_TREE
));
2408 type
= TREE_TYPE (TREE_OPERAND (expr
, 1));
2410 /* One could argue that all the stuff below is not necessary for
2411 the non-bitfield case and declare it a FE error if type
2412 adjustment would be needed. */
2413 if (TREE_TYPE (expr
) != type
)
2415 #ifdef ENABLE_TYPES_CHECKING
2416 tree old_type
= TREE_TYPE (expr
);
2420 /* We need to preserve qualifiers and propagate them from
2422 type_quals
= TYPE_QUALS (type
)
2423 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr
, 0)));
2424 if (TYPE_QUALS (type
) != type_quals
)
2425 type
= build_qualified_type (TYPE_MAIN_VARIANT (type
), type_quals
);
2427 /* Set the type of the COMPONENT_REF to the underlying type. */
2428 TREE_TYPE (expr
) = type
;
2430 #ifdef ENABLE_TYPES_CHECKING
2431 /* It is now a FE error, if the conversion from the canonical
2432 type to the original expression type is not useless. */
2433 gcc_assert (useless_type_conversion_p (old_type
, type
));
2438 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2439 to foo, embed that change in the ADDR_EXPR by converting
2444 where L is the lower bound. For simplicity, only do this for constant
2446 The constraint is that the type of &array[L] is trivially convertible
2450 canonicalize_addr_expr (tree
*expr_p
)
2452 tree expr
= *expr_p
;
2453 tree addr_expr
= TREE_OPERAND (expr
, 0);
2454 tree datype
, ddatype
, pddatype
;
2456 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2457 if (!POINTER_TYPE_P (TREE_TYPE (expr
))
2458 || TREE_CODE (addr_expr
) != ADDR_EXPR
)
2461 /* The addr_expr type should be a pointer to an array. */
2462 datype
= TREE_TYPE (TREE_TYPE (addr_expr
));
2463 if (TREE_CODE (datype
) != ARRAY_TYPE
)
2466 /* The pointer to element type shall be trivially convertible to
2467 the expression pointer type. */
2468 ddatype
= TREE_TYPE (datype
);
2469 pddatype
= build_pointer_type (ddatype
);
2470 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr
)),
2474 /* The lower bound and element sizes must be constant. */
2475 if (!TYPE_SIZE_UNIT (ddatype
)
2476 || TREE_CODE (TYPE_SIZE_UNIT (ddatype
)) != INTEGER_CST
2477 || !TYPE_DOMAIN (datype
) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))
2478 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))) != INTEGER_CST
)
2481 /* All checks succeeded. Build a new node to merge the cast. */
2482 *expr_p
= build4 (ARRAY_REF
, ddatype
, TREE_OPERAND (addr_expr
, 0),
2483 TYPE_MIN_VALUE (TYPE_DOMAIN (datype
)),
2484 NULL_TREE
, NULL_TREE
);
2485 *expr_p
= build1 (ADDR_EXPR
, pddatype
, *expr_p
);
2487 /* We can have stripped a required restrict qualifier above. */
2488 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
2489 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
2492 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2493 underneath as appropriate. */
2495 static enum gimplify_status
2496 gimplify_conversion (tree
*expr_p
)
2498 location_t loc
= EXPR_LOCATION (*expr_p
);
2499 gcc_assert (CONVERT_EXPR_P (*expr_p
));
2501 /* Then strip away all but the outermost conversion. */
2502 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p
, 0));
2504 /* And remove the outermost conversion if it's useless. */
2505 if (tree_ssa_useless_type_conversion (*expr_p
))
2506 *expr_p
= TREE_OPERAND (*expr_p
, 0);
2508 /* If we still have a conversion at the toplevel,
2509 then canonicalize some constructs. */
2510 if (CONVERT_EXPR_P (*expr_p
))
2512 tree sub
= TREE_OPERAND (*expr_p
, 0);
2514 /* If a NOP conversion is changing the type of a COMPONENT_REF
2515 expression, then canonicalize its type now in order to expose more
2516 redundant conversions. */
2517 if (TREE_CODE (sub
) == COMPONENT_REF
)
2518 canonicalize_component_ref (&TREE_OPERAND (*expr_p
, 0));
2520 /* If a NOP conversion is changing a pointer to array of foo
2521 to a pointer to foo, embed that change in the ADDR_EXPR. */
2522 else if (TREE_CODE (sub
) == ADDR_EXPR
)
2523 canonicalize_addr_expr (expr_p
);
2526 /* If we have a conversion to a non-register type force the
2527 use of a VIEW_CONVERT_EXPR instead. */
2528 if (CONVERT_EXPR_P (*expr_p
) && !is_gimple_reg_type (TREE_TYPE (*expr_p
)))
2529 *expr_p
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (*expr_p
),
2530 TREE_OPERAND (*expr_p
, 0));
2532 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2533 if (TREE_CODE (*expr_p
) == CONVERT_EXPR
)
2534 TREE_SET_CODE (*expr_p
, NOP_EXPR
);
2539 /* Nonlocal VLAs seen in the current function. */
2540 static hash_set
<tree
> *nonlocal_vlas
;
2542 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2543 static tree nonlocal_vla_vars
;
2545 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2546 DECL_VALUE_EXPR, and it's worth re-examining things. */
2548 static enum gimplify_status
2549 gimplify_var_or_parm_decl (tree
*expr_p
)
2551 tree decl
= *expr_p
;
2553 /* ??? If this is a local variable, and it has not been seen in any
2554 outer BIND_EXPR, then it's probably the result of a duplicate
2555 declaration, for which we've already issued an error. It would
2556 be really nice if the front end wouldn't leak these at all.
2557 Currently the only known culprit is C++ destructors, as seen
2558 in g++.old-deja/g++.jason/binding.C. */
2560 && !DECL_SEEN_IN_BIND_EXPR_P (decl
)
2561 && !TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)
2562 && decl_function_context (decl
) == current_function_decl
)
2564 gcc_assert (seen_error ());
2568 /* When within an OMP context, notice uses of variables. */
2569 if (gimplify_omp_ctxp
&& omp_notice_variable (gimplify_omp_ctxp
, decl
, true))
2572 /* If the decl is an alias for another expression, substitute it now. */
2573 if (DECL_HAS_VALUE_EXPR_P (decl
))
2575 tree value_expr
= DECL_VALUE_EXPR (decl
);
2577 /* For referenced nonlocal VLAs add a decl for debugging purposes
2578 to the current function. */
2580 && TREE_CODE (DECL_SIZE_UNIT (decl
)) != INTEGER_CST
2581 && nonlocal_vlas
!= NULL
2582 && TREE_CODE (value_expr
) == INDIRECT_REF
2583 && TREE_CODE (TREE_OPERAND (value_expr
, 0)) == VAR_DECL
2584 && decl_function_context (decl
) != current_function_decl
)
2586 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
2588 && (ctx
->region_type
== ORT_WORKSHARE
2589 || ctx
->region_type
== ORT_SIMD
2590 || ctx
->region_type
== ORT_ACC
))
2591 ctx
= ctx
->outer_context
;
2592 if (!ctx
&& !nonlocal_vlas
->add (decl
))
2594 tree copy
= copy_node (decl
);
2596 lang_hooks
.dup_lang_specific_decl (copy
);
2597 SET_DECL_RTL (copy
, 0);
2598 TREE_USED (copy
) = 1;
2599 DECL_CHAIN (copy
) = nonlocal_vla_vars
;
2600 nonlocal_vla_vars
= copy
;
2601 SET_DECL_VALUE_EXPR (copy
, unshare_expr (value_expr
));
2602 DECL_HAS_VALUE_EXPR_P (copy
) = 1;
2606 *expr_p
= unshare_expr (value_expr
);
2613 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2616 recalculate_side_effects (tree t
)
2618 enum tree_code code
= TREE_CODE (t
);
2619 int len
= TREE_OPERAND_LENGTH (t
);
2622 switch (TREE_CODE_CLASS (code
))
2624 case tcc_expression
:
2630 case PREDECREMENT_EXPR
:
2631 case PREINCREMENT_EXPR
:
2632 case POSTDECREMENT_EXPR
:
2633 case POSTINCREMENT_EXPR
:
2634 /* All of these have side-effects, no matter what their
2643 case tcc_comparison
: /* a comparison expression */
2644 case tcc_unary
: /* a unary arithmetic expression */
2645 case tcc_binary
: /* a binary arithmetic expression */
2646 case tcc_reference
: /* a reference */
2647 case tcc_vl_exp
: /* a function call */
2648 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
2649 for (i
= 0; i
< len
; ++i
)
2651 tree op
= TREE_OPERAND (t
, i
);
2652 if (op
&& TREE_SIDE_EFFECTS (op
))
2653 TREE_SIDE_EFFECTS (t
) = 1;
2658 /* No side-effects. */
2666 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2670 : min_lval '[' val ']'
2672 | compound_lval '[' val ']'
2673 | compound_lval '.' ID
2675 This is not part of the original SIMPLE definition, which separates
2676 array and member references, but it seems reasonable to handle them
2677 together. Also, this way we don't run into problems with union
2678 aliasing; gcc requires that for accesses through a union to alias, the
2679 union reference must be explicit, which was not always the case when we
2680 were splitting up array and member refs.
2682 PRE_P points to the sequence where side effects that must happen before
2683 *EXPR_P should be stored.
2685 POST_P points to the sequence where side effects that must happen after
2686 *EXPR_P should be stored. */
2688 static enum gimplify_status
2689 gimplify_compound_lval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
2690 fallback_t fallback
)
2693 enum gimplify_status ret
= GS_ALL_DONE
, tret
;
2695 location_t loc
= EXPR_LOCATION (*expr_p
);
2696 tree expr
= *expr_p
;
2698 /* Create a stack of the subexpressions so later we can walk them in
2699 order from inner to outer. */
2700 auto_vec
<tree
, 10> expr_stack
;
2702 /* We can handle anything that get_inner_reference can deal with. */
2703 for (p
= expr_p
; ; p
= &TREE_OPERAND (*p
, 0))
2706 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2707 if (TREE_CODE (*p
) == INDIRECT_REF
)
2708 *p
= fold_indirect_ref_loc (loc
, *p
);
2710 if (handled_component_p (*p
))
2712 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2713 additional COMPONENT_REFs. */
2714 else if ((VAR_P (*p
) || TREE_CODE (*p
) == PARM_DECL
)
2715 && gimplify_var_or_parm_decl (p
) == GS_OK
)
2720 expr_stack
.safe_push (*p
);
2723 gcc_assert (expr_stack
.length ());
2725 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2726 walked through and P points to the innermost expression.
2728 Java requires that we elaborated nodes in source order. That
2729 means we must gimplify the inner expression followed by each of
2730 the indices, in order. But we can't gimplify the inner
2731 expression until we deal with any variable bounds, sizes, or
2732 positions in order to deal with PLACEHOLDER_EXPRs.
2734 So we do this in three steps. First we deal with the annotations
2735 for any variables in the components, then we gimplify the base,
2736 then we gimplify any indices, from left to right. */
2737 for (i
= expr_stack
.length () - 1; i
>= 0; i
--)
2739 tree t
= expr_stack
[i
];
2741 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
2743 /* Gimplify the low bound and element type size and put them into
2744 the ARRAY_REF. If these values are set, they have already been
2746 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
2748 tree low
= unshare_expr (array_ref_low_bound (t
));
2749 if (!is_gimple_min_invariant (low
))
2751 TREE_OPERAND (t
, 2) = low
;
2752 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
2753 post_p
, is_gimple_reg
,
2755 ret
= MIN (ret
, tret
);
2760 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
2761 is_gimple_reg
, fb_rvalue
);
2762 ret
= MIN (ret
, tret
);
2765 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
2767 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
, 0)));
2768 tree elmt_size
= unshare_expr (array_ref_element_size (t
));
2769 tree factor
= size_int (TYPE_ALIGN_UNIT (elmt_type
));
2771 /* Divide the element size by the alignment of the element
2774 = size_binop_loc (loc
, EXACT_DIV_EXPR
, elmt_size
, factor
);
2776 if (!is_gimple_min_invariant (elmt_size
))
2778 TREE_OPERAND (t
, 3) = elmt_size
;
2779 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
2780 post_p
, is_gimple_reg
,
2782 ret
= MIN (ret
, tret
);
2787 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
, post_p
,
2788 is_gimple_reg
, fb_rvalue
);
2789 ret
= MIN (ret
, tret
);
2792 else if (TREE_CODE (t
) == COMPONENT_REF
)
2794 /* Set the field offset into T and gimplify it. */
2795 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
2797 tree offset
= unshare_expr (component_ref_field_offset (t
));
2798 tree field
= TREE_OPERAND (t
, 1);
2800 = size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
);
2802 /* Divide the offset by its alignment. */
2803 offset
= size_binop_loc (loc
, EXACT_DIV_EXPR
, offset
, factor
);
2805 if (!is_gimple_min_invariant (offset
))
2807 TREE_OPERAND (t
, 2) = offset
;
2808 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
2809 post_p
, is_gimple_reg
,
2811 ret
= MIN (ret
, tret
);
2816 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
2817 is_gimple_reg
, fb_rvalue
);
2818 ret
= MIN (ret
, tret
);
2823 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2824 so as to match the min_lval predicate. Failure to do so may result
2825 in the creation of large aggregate temporaries. */
2826 tret
= gimplify_expr (p
, pre_p
, post_p
, is_gimple_min_lval
,
2827 fallback
| fb_lvalue
);
2828 ret
= MIN (ret
, tret
);
2830 /* And finally, the indices and operands of ARRAY_REF. During this
2831 loop we also remove any useless conversions. */
2832 for (; expr_stack
.length () > 0; )
2834 tree t
= expr_stack
.pop ();
2836 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
2838 /* Gimplify the dimension. */
2839 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1)))
2841 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, post_p
,
2842 is_gimple_val
, fb_rvalue
);
2843 ret
= MIN (ret
, tret
);
2847 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t
, 0));
2849 /* The innermost expression P may have originally had
2850 TREE_SIDE_EFFECTS set which would have caused all the outer
2851 expressions in *EXPR_P leading to P to also have had
2852 TREE_SIDE_EFFECTS set. */
2853 recalculate_side_effects (t
);
2856 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2857 if ((fallback
& fb_rvalue
) && TREE_CODE (*expr_p
) == COMPONENT_REF
)
2859 canonicalize_component_ref (expr_p
);
2862 expr_stack
.release ();
2864 gcc_assert (*expr_p
== expr
|| ret
!= GS_ALL_DONE
);
2869 /* Gimplify the self modifying expression pointed to by EXPR_P
2872 PRE_P points to the list where side effects that must happen before
2873 *EXPR_P should be stored.
2875 POST_P points to the list where side effects that must happen after
2876 *EXPR_P should be stored.
2878 WANT_VALUE is nonzero iff we want to use the value of this expression
2879 in another expression.
2881 ARITH_TYPE is the type the computation should be performed in. */
2883 enum gimplify_status
2884 gimplify_self_mod_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
2885 bool want_value
, tree arith_type
)
2887 enum tree_code code
;
2888 tree lhs
, lvalue
, rhs
, t1
;
2889 gimple_seq post
= NULL
, *orig_post_p
= post_p
;
2891 enum tree_code arith_code
;
2892 enum gimplify_status ret
;
2893 location_t loc
= EXPR_LOCATION (*expr_p
);
2895 code
= TREE_CODE (*expr_p
);
2897 gcc_assert (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
2898 || code
== PREINCREMENT_EXPR
|| code
== PREDECREMENT_EXPR
);
2900 /* Prefix or postfix? */
2901 if (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
)
2902 /* Faster to treat as prefix if result is not used. */
2903 postfix
= want_value
;
2907 /* For postfix, make sure the inner expression's post side effects
2908 are executed after side effects from this expression. */
2912 /* Add or subtract? */
2913 if (code
== PREINCREMENT_EXPR
|| code
== POSTINCREMENT_EXPR
)
2914 arith_code
= PLUS_EXPR
;
2916 arith_code
= MINUS_EXPR
;
2918 /* Gimplify the LHS into a GIMPLE lvalue. */
2919 lvalue
= TREE_OPERAND (*expr_p
, 0);
2920 ret
= gimplify_expr (&lvalue
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
2921 if (ret
== GS_ERROR
)
2924 /* Extract the operands to the arithmetic operation. */
2926 rhs
= TREE_OPERAND (*expr_p
, 1);
2928 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2929 that as the result value and in the postqueue operation. */
2932 ret
= gimplify_expr (&lhs
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
2933 if (ret
== GS_ERROR
)
2936 lhs
= get_initialized_tmp_var (lhs
, pre_p
, NULL
);
2939 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2940 if (POINTER_TYPE_P (TREE_TYPE (lhs
)))
2942 rhs
= convert_to_ptrofftype_loc (loc
, rhs
);
2943 if (arith_code
== MINUS_EXPR
)
2944 rhs
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (rhs
), rhs
);
2945 t1
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (*expr_p
), lhs
, rhs
);
2948 t1
= fold_convert (TREE_TYPE (*expr_p
),
2949 fold_build2 (arith_code
, arith_type
,
2950 fold_convert (arith_type
, lhs
),
2951 fold_convert (arith_type
, rhs
)));
2955 gimplify_assign (lvalue
, t1
, pre_p
);
2956 gimplify_seq_add_seq (orig_post_p
, post
);
2962 *expr_p
= build2 (MODIFY_EXPR
, TREE_TYPE (lvalue
), lvalue
, t1
);
2967 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2970 maybe_with_size_expr (tree
*expr_p
)
2972 tree expr
= *expr_p
;
2973 tree type
= TREE_TYPE (expr
);
2976 /* If we've already wrapped this or the type is error_mark_node, we can't do
2978 if (TREE_CODE (expr
) == WITH_SIZE_EXPR
2979 || type
== error_mark_node
)
2982 /* If the size isn't known or is a constant, we have nothing to do. */
2983 size
= TYPE_SIZE_UNIT (type
);
2984 if (!size
|| TREE_CODE (size
) == INTEGER_CST
)
2987 /* Otherwise, make a WITH_SIZE_EXPR. */
2988 size
= unshare_expr (size
);
2989 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, expr
);
2990 *expr_p
= build2 (WITH_SIZE_EXPR
, type
, expr
, size
);
2993 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2994 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2995 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
2996 gimplified to an SSA name. */
2998 enum gimplify_status
2999 gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
3002 bool (*test
) (tree
);
3005 /* In general, we allow lvalues for function arguments to avoid
3006 extra overhead of copying large aggregates out of even larger
3007 aggregates into temporaries only to copy the temporaries to
3008 the argument list. Make optimizers happy by pulling out to
3009 temporaries those types that fit in registers. */
3010 if (is_gimple_reg_type (TREE_TYPE (*arg_p
)))
3011 test
= is_gimple_val
, fb
= fb_rvalue
;
3014 test
= is_gimple_lvalue
, fb
= fb_either
;
3015 /* Also strip a TARGET_EXPR that would force an extra copy. */
3016 if (TREE_CODE (*arg_p
) == TARGET_EXPR
)
3018 tree init
= TARGET_EXPR_INITIAL (*arg_p
);
3020 && !VOID_TYPE_P (TREE_TYPE (init
)))
3025 /* If this is a variable sized type, we must remember the size. */
3026 maybe_with_size_expr (arg_p
);
3028 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3029 /* Make sure arguments have the same location as the function call
3031 protected_set_expr_location (*arg_p
, call_location
);
3033 /* There is a sequence point before a function call. Side effects in
3034 the argument list must occur before the actual call. So, when
3035 gimplifying arguments, force gimplify_expr to use an internal
3036 post queue which is then appended to the end of PRE_P. */
3037 return gimplify_expr (arg_p
, pre_p
, NULL
, test
, fb
, allow_ssa
);
3040 /* Don't fold inside offloading or taskreg regions: it can break code by
3041 adding decl references that weren't in the source. We'll do it during
3042 omplower pass instead. */
3045 maybe_fold_stmt (gimple_stmt_iterator
*gsi
)
3047 struct gimplify_omp_ctx
*ctx
;
3048 for (ctx
= gimplify_omp_ctxp
; ctx
; ctx
= ctx
->outer_context
)
3049 if ((ctx
->region_type
& (ORT_TARGET
| ORT_PARALLEL
| ORT_TASK
)) != 0)
3051 return fold_stmt (gsi
);
3054 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3055 WANT_VALUE is true if the result of the call is desired. */
3057 static enum gimplify_status
3058 gimplify_call_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
3060 tree fndecl
, parms
, p
, fnptrtype
;
3061 enum gimplify_status ret
;
3064 bool builtin_va_start_p
= false;
3065 location_t loc
= EXPR_LOCATION (*expr_p
);
3067 gcc_assert (TREE_CODE (*expr_p
) == CALL_EXPR
);
3069 /* For reliable diagnostics during inlining, it is necessary that
3070 every call_expr be annotated with file and line. */
3071 if (! EXPR_HAS_LOCATION (*expr_p
))
3072 SET_EXPR_LOCATION (*expr_p
, input_location
);
3074 /* Gimplify internal functions created in the FEs. */
3075 if (CALL_EXPR_FN (*expr_p
) == NULL_TREE
)
3080 nargs
= call_expr_nargs (*expr_p
);
3081 enum internal_fn ifn
= CALL_EXPR_IFN (*expr_p
);
3082 auto_vec
<tree
> vargs (nargs
);
3084 for (i
= 0; i
< nargs
; i
++)
3086 gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3087 EXPR_LOCATION (*expr_p
));
3088 vargs
.quick_push (CALL_EXPR_ARG (*expr_p
, i
));
3090 gimple
*call
= gimple_build_call_internal_vec (ifn
, vargs
);
3091 gimplify_seq_add_stmt (pre_p
, call
);
3095 /* This may be a call to a builtin function.
3097 Builtin function calls may be transformed into different
3098 (and more efficient) builtin function calls under certain
3099 circumstances. Unfortunately, gimplification can muck things
3100 up enough that the builtin expanders are not aware that certain
3101 transformations are still valid.
3103 So we attempt transformation/gimplification of the call before
3104 we gimplify the CALL_EXPR. At this time we do not manage to
3105 transform all calls in the same manner as the expanders do, but
3106 we do transform most of them. */
3107 fndecl
= get_callee_fndecl (*expr_p
);
3109 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3110 switch (DECL_FUNCTION_CODE (fndecl
))
3112 case BUILT_IN_ALLOCA
:
3113 case BUILT_IN_ALLOCA_WITH_ALIGN
:
3114 /* If the call has been built for a variable-sized object, then we
3115 want to restore the stack level when the enclosing BIND_EXPR is
3116 exited to reclaim the allocated space; otherwise, we precisely
3117 need to do the opposite and preserve the latest stack level. */
3118 if (CALL_ALLOCA_FOR_VAR_P (*expr_p
))
3119 gimplify_ctxp
->save_stack
= true;
3121 gimplify_ctxp
->keep_stack
= true;
3124 case BUILT_IN_VA_START
:
3126 builtin_va_start_p
= TRUE
;
3127 if (call_expr_nargs (*expr_p
) < 2)
3129 error ("too few arguments to function %<va_start%>");
3130 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3134 if (fold_builtin_next_arg (*expr_p
, true))
3136 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3145 if (fndecl
&& DECL_BUILT_IN (fndecl
))
3147 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3148 if (new_tree
&& new_tree
!= *expr_p
)
3150 /* There was a transformation of this call which computes the
3151 same value, but in a more efficient way. Return and try
3158 /* Remember the original function pointer type. */
3159 fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
3161 /* There is a sequence point before the call, so any side effects in
3162 the calling expression must occur before the actual call. Force
3163 gimplify_expr to use an internal post queue. */
3164 ret
= gimplify_expr (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
3165 is_gimple_call_addr
, fb_rvalue
);
3167 nargs
= call_expr_nargs (*expr_p
);
3169 /* Get argument types for verification. */
3170 fndecl
= get_callee_fndecl (*expr_p
);
3173 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3175 parms
= TYPE_ARG_TYPES (TREE_TYPE (fnptrtype
));
3177 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
3178 p
= DECL_ARGUMENTS (fndecl
);
3183 for (i
= 0; i
< nargs
&& p
; i
++, p
= TREE_CHAIN (p
))
3186 /* If the last argument is __builtin_va_arg_pack () and it is not
3187 passed as a named argument, decrease the number of CALL_EXPR
3188 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3191 && TREE_CODE (CALL_EXPR_ARG (*expr_p
, nargs
- 1)) == CALL_EXPR
)
3193 tree last_arg
= CALL_EXPR_ARG (*expr_p
, nargs
- 1);
3194 tree last_arg_fndecl
= get_callee_fndecl (last_arg
);
3197 && TREE_CODE (last_arg_fndecl
) == FUNCTION_DECL
3198 && DECL_BUILT_IN_CLASS (last_arg_fndecl
) == BUILT_IN_NORMAL
3199 && DECL_FUNCTION_CODE (last_arg_fndecl
) == BUILT_IN_VA_ARG_PACK
)
3201 tree call
= *expr_p
;
3204 *expr_p
= build_call_array_loc (loc
, TREE_TYPE (call
),
3205 CALL_EXPR_FN (call
),
3206 nargs
, CALL_EXPR_ARGP (call
));
3208 /* Copy all CALL_EXPR flags, location and block, except
3209 CALL_EXPR_VA_ARG_PACK flag. */
3210 CALL_EXPR_STATIC_CHAIN (*expr_p
) = CALL_EXPR_STATIC_CHAIN (call
);
3211 CALL_EXPR_TAILCALL (*expr_p
) = CALL_EXPR_TAILCALL (call
);
3212 CALL_EXPR_RETURN_SLOT_OPT (*expr_p
)
3213 = CALL_EXPR_RETURN_SLOT_OPT (call
);
3214 CALL_FROM_THUNK_P (*expr_p
) = CALL_FROM_THUNK_P (call
);
3215 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (call
));
3217 /* Set CALL_EXPR_VA_ARG_PACK. */
3218 CALL_EXPR_VA_ARG_PACK (*expr_p
) = 1;
3222 /* If the call returns twice then after building the CFG the call
3223 argument computations will no longer dominate the call because
3224 we add an abnormal incoming edge to the call. So do not use SSA
3226 bool returns_twice
= call_expr_flags (*expr_p
) & ECF_RETURNS_TWICE
;
3228 /* Gimplify the function arguments. */
3231 for (i
= (PUSH_ARGS_REVERSED
? nargs
- 1 : 0);
3232 PUSH_ARGS_REVERSED
? i
>= 0 : i
< nargs
;
3233 PUSH_ARGS_REVERSED
? i
-- : i
++)
3235 enum gimplify_status t
;
3237 /* Avoid gimplifying the second argument to va_start, which needs to
3238 be the plain PARM_DECL. */
3239 if ((i
!= 1) || !builtin_va_start_p
)
3241 t
= gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3242 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3250 /* Gimplify the static chain. */
3251 if (CALL_EXPR_STATIC_CHAIN (*expr_p
))
3253 if (fndecl
&& !DECL_STATIC_CHAIN (fndecl
))
3254 CALL_EXPR_STATIC_CHAIN (*expr_p
) = NULL
;
3257 enum gimplify_status t
;
3258 t
= gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p
), pre_p
,
3259 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3265 /* Verify the function result. */
3266 if (want_value
&& fndecl
3267 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype
))))
3269 error_at (loc
, "using result of function returning %<void%>");
3273 /* Try this again in case gimplification exposed something. */
3274 if (ret
!= GS_ERROR
)
3276 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3278 if (new_tree
&& new_tree
!= *expr_p
)
3280 /* There was a transformation of this call which computes the
3281 same value, but in a more efficient way. Return and try
3289 *expr_p
= error_mark_node
;
3293 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3294 decl. This allows us to eliminate redundant or useless
3295 calls to "const" functions. */
3296 if (TREE_CODE (*expr_p
) == CALL_EXPR
)
3298 int flags
= call_expr_flags (*expr_p
);
3299 if (flags
& (ECF_CONST
| ECF_PURE
)
3300 /* An infinite loop is considered a side effect. */
3301 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
3302 TREE_SIDE_EFFECTS (*expr_p
) = 0;
3305 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3306 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3307 form and delegate the creation of a GIMPLE_CALL to
3308 gimplify_modify_expr. This is always possible because when
3309 WANT_VALUE is true, the caller wants the result of this call into
3310 a temporary, which means that we will emit an INIT_EXPR in
3311 internal_get_tmp_var which will then be handled by
3312 gimplify_modify_expr. */
3315 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3316 have to do is replicate it as a GIMPLE_CALL tuple. */
3317 gimple_stmt_iterator gsi
;
3318 call
= gimple_build_call_from_tree (*expr_p
);
3319 gimple_call_set_fntype (call
, TREE_TYPE (fnptrtype
));
3320 notice_special_calls (call
);
3321 gimplify_seq_add_stmt (pre_p
, call
);
3322 gsi
= gsi_last (*pre_p
);
3323 maybe_fold_stmt (&gsi
);
3324 *expr_p
= NULL_TREE
;
3327 /* Remember the original function type. */
3328 CALL_EXPR_FN (*expr_p
) = build1 (NOP_EXPR
, fnptrtype
,
3329 CALL_EXPR_FN (*expr_p
));
3334 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3335 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3337 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3338 condition is true or false, respectively. If null, we should generate
3339 our own to skip over the evaluation of this specific expression.
3341 LOCUS is the source location of the COND_EXPR.
3343 This function is the tree equivalent of do_jump.
3345 shortcut_cond_r should only be called by shortcut_cond_expr. */
3348 shortcut_cond_r (tree pred
, tree
*true_label_p
, tree
*false_label_p
,
3351 tree local_label
= NULL_TREE
;
3352 tree t
, expr
= NULL
;
3354 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3355 retain the shortcut semantics. Just insert the gotos here;
3356 shortcut_cond_expr will append the real blocks later. */
3357 if (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3359 location_t new_locus
;
3361 /* Turn if (a && b) into
3363 if (a); else goto no;
3364 if (b) goto yes; else goto no;
3367 if (false_label_p
== NULL
)
3368 false_label_p
= &local_label
;
3370 /* Keep the original source location on the first 'if'. */
3371 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), NULL
, false_label_p
, locus
);
3372 append_to_statement_list (t
, &expr
);
3374 /* Set the source location of the && on the second 'if'. */
3375 new_locus
= EXPR_HAS_LOCATION (pred
) ? EXPR_LOCATION (pred
) : locus
;
3376 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3378 append_to_statement_list (t
, &expr
);
3380 else if (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3382 location_t new_locus
;
3384 /* Turn if (a || b) into
3387 if (b) goto yes; else goto no;
3390 if (true_label_p
== NULL
)
3391 true_label_p
= &local_label
;
3393 /* Keep the original source location on the first 'if'. */
3394 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), true_label_p
, NULL
, locus
);
3395 append_to_statement_list (t
, &expr
);
3397 /* Set the source location of the || on the second 'if'. */
3398 new_locus
= EXPR_HAS_LOCATION (pred
) ? EXPR_LOCATION (pred
) : locus
;
3399 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3401 append_to_statement_list (t
, &expr
);
3403 else if (TREE_CODE (pred
) == COND_EXPR
3404 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 1)))
3405 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 2))))
3407 location_t new_locus
;
3409 /* As long as we're messing with gotos, turn if (a ? b : c) into
3411 if (b) goto yes; else goto no;
3413 if (c) goto yes; else goto no;
3415 Don't do this if one of the arms has void type, which can happen
3416 in C++ when the arm is throw. */
3418 /* Keep the original source location on the first 'if'. Set the source
3419 location of the ? on the second 'if'. */
3420 new_locus
= EXPR_HAS_LOCATION (pred
) ? EXPR_LOCATION (pred
) : locus
;
3421 expr
= build3 (COND_EXPR
, void_type_node
, TREE_OPERAND (pred
, 0),
3422 shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
,
3423 false_label_p
, locus
),
3424 shortcut_cond_r (TREE_OPERAND (pred
, 2), true_label_p
,
3425 false_label_p
, new_locus
));
3429 expr
= build3 (COND_EXPR
, void_type_node
, pred
,
3430 build_and_jump (true_label_p
),
3431 build_and_jump (false_label_p
));
3432 SET_EXPR_LOCATION (expr
, locus
);
3437 t
= build1 (LABEL_EXPR
, void_type_node
, local_label
);
3438 append_to_statement_list (t
, &expr
);
3444 /* Given a conditional expression EXPR with short-circuit boolean
3445 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3446 predicate apart into the equivalent sequence of conditionals. */
3449 shortcut_cond_expr (tree expr
)
3451 tree pred
= TREE_OPERAND (expr
, 0);
3452 tree then_
= TREE_OPERAND (expr
, 1);
3453 tree else_
= TREE_OPERAND (expr
, 2);
3454 tree true_label
, false_label
, end_label
, t
;
3456 tree
*false_label_p
;
3457 bool emit_end
, emit_false
, jump_over_else
;
3458 bool then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3459 bool else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3461 /* First do simple transformations. */
3464 /* If there is no 'else', turn
3467 if (a) if (b) then c. */
3468 while (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3470 /* Keep the original source location on the first 'if'. */
3471 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3472 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3473 /* Set the source location of the && on the second 'if'. */
3474 if (EXPR_HAS_LOCATION (pred
))
3475 SET_EXPR_LOCATION (expr
, EXPR_LOCATION (pred
));
3476 then_
= shortcut_cond_expr (expr
);
3477 then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3478 pred
= TREE_OPERAND (pred
, 0);
3479 expr
= build3 (COND_EXPR
, void_type_node
, pred
, then_
, NULL_TREE
);
3480 SET_EXPR_LOCATION (expr
, locus
);
3486 /* If there is no 'then', turn
3489 if (a); else if (b); else d. */
3490 while (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3492 /* Keep the original source location on the first 'if'. */
3493 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3494 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3495 /* Set the source location of the || on the second 'if'. */
3496 if (EXPR_HAS_LOCATION (pred
))
3497 SET_EXPR_LOCATION (expr
, EXPR_LOCATION (pred
));
3498 else_
= shortcut_cond_expr (expr
);
3499 else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3500 pred
= TREE_OPERAND (pred
, 0);
3501 expr
= build3 (COND_EXPR
, void_type_node
, pred
, NULL_TREE
, else_
);
3502 SET_EXPR_LOCATION (expr
, locus
);
3506 /* If we're done, great. */
3507 if (TREE_CODE (pred
) != TRUTH_ANDIF_EXPR
3508 && TREE_CODE (pred
) != TRUTH_ORIF_EXPR
)
3511 /* Otherwise we need to mess with gotos. Change
3514 if (a); else goto no;
3517 and recursively gimplify the condition. */
3519 true_label
= false_label
= end_label
= NULL_TREE
;
3521 /* If our arms just jump somewhere, hijack those labels so we don't
3522 generate jumps to jumps. */
3525 && TREE_CODE (then_
) == GOTO_EXPR
3526 && TREE_CODE (GOTO_DESTINATION (then_
)) == LABEL_DECL
)
3528 true_label
= GOTO_DESTINATION (then_
);
3534 && TREE_CODE (else_
) == GOTO_EXPR
3535 && TREE_CODE (GOTO_DESTINATION (else_
)) == LABEL_DECL
)
3537 false_label
= GOTO_DESTINATION (else_
);
3542 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3544 true_label_p
= &true_label
;
3546 true_label_p
= NULL
;
3548 /* The 'else' branch also needs a label if it contains interesting code. */
3549 if (false_label
|| else_se
)
3550 false_label_p
= &false_label
;
3552 false_label_p
= NULL
;
3554 /* If there was nothing else in our arms, just forward the label(s). */
3555 if (!then_se
&& !else_se
)
3556 return shortcut_cond_r (pred
, true_label_p
, false_label_p
,
3557 EXPR_LOC_OR_LOC (expr
, input_location
));
3559 /* If our last subexpression already has a terminal label, reuse it. */
3561 t
= expr_last (else_
);
3563 t
= expr_last (then_
);
3566 if (t
&& TREE_CODE (t
) == LABEL_EXPR
)
3567 end_label
= LABEL_EXPR_LABEL (t
);
3569 /* If we don't care about jumping to the 'else' branch, jump to the end
3570 if the condition is false. */
3572 false_label_p
= &end_label
;
3574 /* We only want to emit these labels if we aren't hijacking them. */
3575 emit_end
= (end_label
== NULL_TREE
);
3576 emit_false
= (false_label
== NULL_TREE
);
3578 /* We only emit the jump over the else clause if we have to--if the
3579 then clause may fall through. Otherwise we can wind up with a
3580 useless jump and a useless label at the end of gimplified code,
3581 which will cause us to think that this conditional as a whole
3582 falls through even if it doesn't. If we then inline a function
3583 which ends with such a condition, that can cause us to issue an
3584 inappropriate warning about control reaching the end of a
3585 non-void function. */
3586 jump_over_else
= block_may_fallthru (then_
);
3588 pred
= shortcut_cond_r (pred
, true_label_p
, false_label_p
,
3589 EXPR_LOC_OR_LOC (expr
, input_location
));
3592 append_to_statement_list (pred
, &expr
);
3594 append_to_statement_list (then_
, &expr
);
3599 tree last
= expr_last (expr
);
3600 t
= build_and_jump (&end_label
);
3601 if (EXPR_HAS_LOCATION (last
))
3602 SET_EXPR_LOCATION (t
, EXPR_LOCATION (last
));
3603 append_to_statement_list (t
, &expr
);
3607 t
= build1 (LABEL_EXPR
, void_type_node
, false_label
);
3608 append_to_statement_list (t
, &expr
);
3610 append_to_statement_list (else_
, &expr
);
3612 if (emit_end
&& end_label
)
3614 t
= build1 (LABEL_EXPR
, void_type_node
, end_label
);
3615 append_to_statement_list (t
, &expr
);
3621 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3624 gimple_boolify (tree expr
)
3626 tree type
= TREE_TYPE (expr
);
3627 location_t loc
= EXPR_LOCATION (expr
);
3629 if (TREE_CODE (expr
) == NE_EXPR
3630 && TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
3631 && integer_zerop (TREE_OPERAND (expr
, 1)))
3633 tree call
= TREE_OPERAND (expr
, 0);
3634 tree fn
= get_callee_fndecl (call
);
3636 /* For __builtin_expect ((long) (x), y) recurse into x as well
3637 if x is truth_value_p. */
3639 && DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
3640 && DECL_FUNCTION_CODE (fn
) == BUILT_IN_EXPECT
3641 && call_expr_nargs (call
) == 2)
3643 tree arg
= CALL_EXPR_ARG (call
, 0);
3646 if (TREE_CODE (arg
) == NOP_EXPR
3647 && TREE_TYPE (arg
) == TREE_TYPE (call
))
3648 arg
= TREE_OPERAND (arg
, 0);
3649 if (truth_value_p (TREE_CODE (arg
)))
3651 arg
= gimple_boolify (arg
);
3652 CALL_EXPR_ARG (call
, 0)
3653 = fold_convert_loc (loc
, TREE_TYPE (call
), arg
);
3659 switch (TREE_CODE (expr
))
3661 case TRUTH_AND_EXPR
:
3663 case TRUTH_XOR_EXPR
:
3664 case TRUTH_ANDIF_EXPR
:
3665 case TRUTH_ORIF_EXPR
:
3666 /* Also boolify the arguments of truth exprs. */
3667 TREE_OPERAND (expr
, 1) = gimple_boolify (TREE_OPERAND (expr
, 1));
3670 case TRUTH_NOT_EXPR
:
3671 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3673 /* These expressions always produce boolean results. */
3674 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3675 TREE_TYPE (expr
) = boolean_type_node
;
3679 switch ((enum annot_expr_kind
) TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)))
3681 case annot_expr_ivdep_kind
:
3682 case annot_expr_no_vector_kind
:
3683 case annot_expr_vector_kind
:
3684 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3685 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3686 TREE_TYPE (expr
) = boolean_type_node
;
3693 if (COMPARISON_CLASS_P (expr
))
3695 /* There expressions always prduce boolean results. */
3696 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3697 TREE_TYPE (expr
) = boolean_type_node
;
3700 /* Other expressions that get here must have boolean values, but
3701 might need to be converted to the appropriate mode. */
3702 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
3704 return fold_convert_loc (loc
, boolean_type_node
, expr
);
3708 /* Given a conditional expression *EXPR_P without side effects, gimplify
3709 its operands. New statements are inserted to PRE_P. */
3711 static enum gimplify_status
3712 gimplify_pure_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
)
3714 tree expr
= *expr_p
, cond
;
3715 enum gimplify_status ret
, tret
;
3716 enum tree_code code
;
3718 cond
= gimple_boolify (COND_EXPR_COND (expr
));
3720 /* We need to handle && and || specially, as their gimplification
3721 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3722 code
= TREE_CODE (cond
);
3723 if (code
== TRUTH_ANDIF_EXPR
)
3724 TREE_SET_CODE (cond
, TRUTH_AND_EXPR
);
3725 else if (code
== TRUTH_ORIF_EXPR
)
3726 TREE_SET_CODE (cond
, TRUTH_OR_EXPR
);
3727 ret
= gimplify_expr (&cond
, pre_p
, NULL
, is_gimple_condexpr
, fb_rvalue
);
3728 COND_EXPR_COND (*expr_p
) = cond
;
3730 tret
= gimplify_expr (&COND_EXPR_THEN (expr
), pre_p
, NULL
,
3731 is_gimple_val
, fb_rvalue
);
3732 ret
= MIN (ret
, tret
);
3733 tret
= gimplify_expr (&COND_EXPR_ELSE (expr
), pre_p
, NULL
,
3734 is_gimple_val
, fb_rvalue
);
3736 return MIN (ret
, tret
);
3739 /* Return true if evaluating EXPR could trap.
3740 EXPR is GENERIC, while tree_could_trap_p can be called
3744 generic_expr_could_trap_p (tree expr
)
3748 if (!expr
|| is_gimple_val (expr
))
3751 if (!EXPR_P (expr
) || tree_could_trap_p (expr
))
3754 n
= TREE_OPERAND_LENGTH (expr
);
3755 for (i
= 0; i
< n
; i
++)
3756 if (generic_expr_could_trap_p (TREE_OPERAND (expr
, i
)))
3762 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3771 The second form is used when *EXPR_P is of type void.
3773 PRE_P points to the list where side effects that must happen before
3774 *EXPR_P should be stored. */
3776 static enum gimplify_status
3777 gimplify_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
, fallback_t fallback
)
3779 tree expr
= *expr_p
;
3780 tree type
= TREE_TYPE (expr
);
3781 location_t loc
= EXPR_LOCATION (expr
);
3782 tree tmp
, arm1
, arm2
;
3783 enum gimplify_status ret
;
3784 tree label_true
, label_false
, label_cont
;
3785 bool have_then_clause_p
, have_else_clause_p
;
3787 enum tree_code pred_code
;
3788 gimple_seq seq
= NULL
;
3790 /* If this COND_EXPR has a value, copy the values into a temporary within
3792 if (!VOID_TYPE_P (type
))
3794 tree then_
= TREE_OPERAND (expr
, 1), else_
= TREE_OPERAND (expr
, 2);
3797 /* If either an rvalue is ok or we do not require an lvalue, create the
3798 temporary. But we cannot do that if the type is addressable. */
3799 if (((fallback
& fb_rvalue
) || !(fallback
& fb_lvalue
))
3800 && !TREE_ADDRESSABLE (type
))
3802 if (gimplify_ctxp
->allow_rhs_cond_expr
3803 /* If either branch has side effects or could trap, it can't be
3804 evaluated unconditionally. */
3805 && !TREE_SIDE_EFFECTS (then_
)
3806 && !generic_expr_could_trap_p (then_
)
3807 && !TREE_SIDE_EFFECTS (else_
)
3808 && !generic_expr_could_trap_p (else_
))
3809 return gimplify_pure_cond_expr (expr_p
, pre_p
);
3811 tmp
= create_tmp_var (type
, "iftmp");
3815 /* Otherwise, only create and copy references to the values. */
3818 type
= build_pointer_type (type
);
3820 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
3821 then_
= build_fold_addr_expr_loc (loc
, then_
);
3823 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
3824 else_
= build_fold_addr_expr_loc (loc
, else_
);
3827 = build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0), then_
, else_
);
3829 tmp
= create_tmp_var (type
, "iftmp");
3830 result
= build_simple_mem_ref_loc (loc
, tmp
);
3833 /* Build the new then clause, `tmp = then_;'. But don't build the
3834 assignment if the value is void; in C++ it can be if it's a throw. */
3835 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
3836 TREE_OPERAND (expr
, 1) = build2 (MODIFY_EXPR
, type
, tmp
, then_
);
3838 /* Similarly, build the new else clause, `tmp = else_;'. */
3839 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
3840 TREE_OPERAND (expr
, 2) = build2 (MODIFY_EXPR
, type
, tmp
, else_
);
3842 TREE_TYPE (expr
) = void_type_node
;
3843 recalculate_side_effects (expr
);
3845 /* Move the COND_EXPR to the prequeue. */
3846 gimplify_stmt (&expr
, pre_p
);
3852 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3853 STRIP_TYPE_NOPS (TREE_OPERAND (expr
, 0));
3854 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == COMPOUND_EXPR
)
3855 gimplify_compound_expr (&TREE_OPERAND (expr
, 0), pre_p
, true);
3857 /* Make sure the condition has BOOLEAN_TYPE. */
3858 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3860 /* Break apart && and || conditions. */
3861 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ANDIF_EXPR
3862 || TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ORIF_EXPR
)
3864 expr
= shortcut_cond_expr (expr
);
3866 if (expr
!= *expr_p
)
3870 /* We can't rely on gimplify_expr to re-gimplify the expanded
3871 form properly, as cleanups might cause the target labels to be
3872 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3873 set up a conditional context. */
3874 gimple_push_condition ();
3875 gimplify_stmt (expr_p
, &seq
);
3876 gimple_pop_condition (pre_p
);
3877 gimple_seq_add_seq (pre_p
, seq
);
3883 /* Now do the normal gimplification. */
3885 /* Gimplify condition. */
3886 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, NULL
, is_gimple_condexpr
,
3888 if (ret
== GS_ERROR
)
3890 gcc_assert (TREE_OPERAND (expr
, 0) != NULL_TREE
);
3892 gimple_push_condition ();
3894 have_then_clause_p
= have_else_clause_p
= false;
3895 if (TREE_OPERAND (expr
, 1) != NULL
3896 && TREE_CODE (TREE_OPERAND (expr
, 1)) == GOTO_EXPR
3897 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr
, 1))) == LABEL_DECL
3898 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr
, 1)))
3899 == current_function_decl
)
3900 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3901 have different locations, otherwise we end up with incorrect
3902 location information on the branches. */
3904 || !EXPR_HAS_LOCATION (expr
)
3905 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr
, 1))
3906 || EXPR_LOCATION (expr
) == EXPR_LOCATION (TREE_OPERAND (expr
, 1))))
3908 label_true
= GOTO_DESTINATION (TREE_OPERAND (expr
, 1));
3909 have_then_clause_p
= true;
3912 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
3913 if (TREE_OPERAND (expr
, 2) != NULL
3914 && TREE_CODE (TREE_OPERAND (expr
, 2)) == GOTO_EXPR
3915 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr
, 2))) == LABEL_DECL
3916 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr
, 2)))
3917 == current_function_decl
)
3918 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3919 have different locations, otherwise we end up with incorrect
3920 location information on the branches. */
3922 || !EXPR_HAS_LOCATION (expr
)
3923 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr
, 2))
3924 || EXPR_LOCATION (expr
) == EXPR_LOCATION (TREE_OPERAND (expr
, 2))))
3926 label_false
= GOTO_DESTINATION (TREE_OPERAND (expr
, 2));
3927 have_else_clause_p
= true;
3930 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
3932 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr
), &pred_code
, &arm1
,
3934 cond_stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
,
3936 gimple_set_no_warning (cond_stmt
, TREE_NO_WARNING (COND_EXPR_COND (expr
)));
3937 gimplify_seq_add_stmt (&seq
, cond_stmt
);
3938 gimple_stmt_iterator gsi
= gsi_last (seq
);
3939 maybe_fold_stmt (&gsi
);
3941 label_cont
= NULL_TREE
;
3942 if (!have_then_clause_p
)
3944 /* For if (...) {} else { code; } put label_true after
3946 if (TREE_OPERAND (expr
, 1) == NULL_TREE
3947 && !have_else_clause_p
3948 && TREE_OPERAND (expr
, 2) != NULL_TREE
)
3949 label_cont
= label_true
;
3952 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_true
));
3953 have_then_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 1), &seq
);
3954 /* For if (...) { code; } else {} or
3955 if (...) { code; } else goto label; or
3956 if (...) { code; return; } else { ... }
3957 label_cont isn't needed. */
3958 if (!have_else_clause_p
3959 && TREE_OPERAND (expr
, 2) != NULL_TREE
3960 && gimple_seq_may_fallthru (seq
))
3963 label_cont
= create_artificial_label (UNKNOWN_LOCATION
);
3965 g
= gimple_build_goto (label_cont
);
3967 /* GIMPLE_COND's are very low level; they have embedded
3968 gotos. This particular embedded goto should not be marked
3969 with the location of the original COND_EXPR, as it would
3970 correspond to the COND_EXPR's condition, not the ELSE or the
3971 THEN arms. To avoid marking it with the wrong location, flag
3972 it as "no location". */
3973 gimple_set_do_not_emit_location (g
);
3975 gimplify_seq_add_stmt (&seq
, g
);
3979 if (!have_else_clause_p
)
3981 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_false
));
3982 have_else_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 2), &seq
);
3985 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_cont
));
3987 gimple_pop_condition (pre_p
);
3988 gimple_seq_add_seq (pre_p
, seq
);
3990 if (ret
== GS_ERROR
)
3992 else if (have_then_clause_p
|| have_else_clause_p
)
3996 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3997 expr
= TREE_OPERAND (expr
, 0);
3998 gimplify_stmt (&expr
, pre_p
);
4005 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4006 to be marked addressable.
4008 We cannot rely on such an expression being directly markable if a temporary
4009 has been created by the gimplification. In this case, we create another
4010 temporary and initialize it with a copy, which will become a store after we
4011 mark it addressable. This can happen if the front-end passed us something
4012 that it could not mark addressable yet, like a Fortran pass-by-reference
4013 parameter (int) floatvar. */
4016 prepare_gimple_addressable (tree
*expr_p
, gimple_seq
*seq_p
)
4018 while (handled_component_p (*expr_p
))
4019 expr_p
= &TREE_OPERAND (*expr_p
, 0);
4020 if (is_gimple_reg (*expr_p
))
4022 /* Do not allow an SSA name as the temporary. */
4023 tree var
= get_initialized_tmp_var (*expr_p
, seq_p
, NULL
, false);
4024 DECL_GIMPLE_REG_P (var
) = 0;
4029 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4030 a call to __builtin_memcpy. */
4032 static enum gimplify_status
4033 gimplify_modify_expr_to_memcpy (tree
*expr_p
, tree size
, bool want_value
,
4036 tree t
, to
, to_ptr
, from
, from_ptr
;
4038 location_t loc
= EXPR_LOCATION (*expr_p
);
4040 to
= TREE_OPERAND (*expr_p
, 0);
4041 from
= TREE_OPERAND (*expr_p
, 1);
4043 /* Mark the RHS addressable. Beware that it may not be possible to do so
4044 directly if a temporary has been created by the gimplification. */
4045 prepare_gimple_addressable (&from
, seq_p
);
4047 mark_addressable (from
);
4048 from_ptr
= build_fold_addr_expr_loc (loc
, from
);
4049 gimplify_arg (&from_ptr
, seq_p
, loc
);
4051 mark_addressable (to
);
4052 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4053 gimplify_arg (&to_ptr
, seq_p
, loc
);
4055 t
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
4057 gs
= gimple_build_call (t
, 3, to_ptr
, from_ptr
, size
);
4061 /* tmp = memcpy() */
4062 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4063 gimple_call_set_lhs (gs
, t
);
4064 gimplify_seq_add_stmt (seq_p
, gs
);
4066 *expr_p
= build_simple_mem_ref (t
);
4070 gimplify_seq_add_stmt (seq_p
, gs
);
4075 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4076 a call to __builtin_memset. In this case we know that the RHS is
4077 a CONSTRUCTOR with an empty element list. */
4079 static enum gimplify_status
4080 gimplify_modify_expr_to_memset (tree
*expr_p
, tree size
, bool want_value
,
4083 tree t
, from
, to
, to_ptr
;
4085 location_t loc
= EXPR_LOCATION (*expr_p
);
4087 /* Assert our assumptions, to abort instead of producing wrong code
4088 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4089 not be immediately exposed. */
4090 from
= TREE_OPERAND (*expr_p
, 1);
4091 if (TREE_CODE (from
) == WITH_SIZE_EXPR
)
4092 from
= TREE_OPERAND (from
, 0);
4094 gcc_assert (TREE_CODE (from
) == CONSTRUCTOR
4095 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from
)));
4098 to
= TREE_OPERAND (*expr_p
, 0);
4100 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4101 gimplify_arg (&to_ptr
, seq_p
, loc
);
4102 t
= builtin_decl_implicit (BUILT_IN_MEMSET
);
4104 gs
= gimple_build_call (t
, 3, to_ptr
, integer_zero_node
, size
);
4108 /* tmp = memset() */
4109 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4110 gimple_call_set_lhs (gs
, t
);
4111 gimplify_seq_add_stmt (seq_p
, gs
);
4113 *expr_p
= build1 (INDIRECT_REF
, TREE_TYPE (to
), t
);
4117 gimplify_seq_add_stmt (seq_p
, gs
);
4122 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4123 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4124 assignment. Return non-null if we detect a potential overlap. */
4126 struct gimplify_init_ctor_preeval_data
4128 /* The base decl of the lhs object. May be NULL, in which case we
4129 have to assume the lhs is indirect. */
4132 /* The alias set of the lhs object. */
4133 alias_set_type lhs_alias_set
;
4137 gimplify_init_ctor_preeval_1 (tree
*tp
, int *walk_subtrees
, void *xdata
)
4139 struct gimplify_init_ctor_preeval_data
*data
4140 = (struct gimplify_init_ctor_preeval_data
*) xdata
;
4143 /* If we find the base object, obviously we have overlap. */
4144 if (data
->lhs_base_decl
== t
)
4147 /* If the constructor component is indirect, determine if we have a
4148 potential overlap with the lhs. The only bits of information we
4149 have to go on at this point are addressability and alias sets. */
4150 if ((INDIRECT_REF_P (t
)
4151 || TREE_CODE (t
) == MEM_REF
)
4152 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4153 && alias_sets_conflict_p (data
->lhs_alias_set
, get_alias_set (t
)))
4156 /* If the constructor component is a call, determine if it can hide a
4157 potential overlap with the lhs through an INDIRECT_REF like above.
4158 ??? Ugh - this is completely broken. In fact this whole analysis
4159 doesn't look conservative. */
4160 if (TREE_CODE (t
) == CALL_EXPR
)
4162 tree type
, fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t
)));
4164 for (type
= TYPE_ARG_TYPES (fntype
); type
; type
= TREE_CHAIN (type
))
4165 if (POINTER_TYPE_P (TREE_VALUE (type
))
4166 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4167 && alias_sets_conflict_p (data
->lhs_alias_set
,
4169 (TREE_TYPE (TREE_VALUE (type
)))))
4173 if (IS_TYPE_OR_DECL_P (t
))
4178 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4179 force values that overlap with the lhs (as described by *DATA)
4180 into temporaries. */
4183 gimplify_init_ctor_preeval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4184 struct gimplify_init_ctor_preeval_data
*data
)
4186 enum gimplify_status one
;
4188 /* If the value is constant, then there's nothing to pre-evaluate. */
4189 if (TREE_CONSTANT (*expr_p
))
4191 /* Ensure it does not have side effects, it might contain a reference to
4192 the object we're initializing. */
4193 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p
));
4197 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4198 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)))
4201 /* Recurse for nested constructors. */
4202 if (TREE_CODE (*expr_p
) == CONSTRUCTOR
)
4204 unsigned HOST_WIDE_INT ix
;
4205 constructor_elt
*ce
;
4206 vec
<constructor_elt
, va_gc
> *v
= CONSTRUCTOR_ELTS (*expr_p
);
4208 FOR_EACH_VEC_SAFE_ELT (v
, ix
, ce
)
4209 gimplify_init_ctor_preeval (&ce
->value
, pre_p
, post_p
, data
);
4214 /* If this is a variable sized type, we must remember the size. */
4215 maybe_with_size_expr (expr_p
);
4217 /* Gimplify the constructor element to something appropriate for the rhs
4218 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4219 the gimplifier will consider this a store to memory. Doing this
4220 gimplification now means that we won't have to deal with complicated
4221 language-specific trees, nor trees like SAVE_EXPR that can induce
4222 exponential search behavior. */
4223 one
= gimplify_expr (expr_p
, pre_p
, post_p
, is_gimple_mem_rhs
, fb_rvalue
);
4224 if (one
== GS_ERROR
)
4230 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4231 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4232 always be true for all scalars, since is_gimple_mem_rhs insists on a
4233 temporary variable for them. */
4234 if (DECL_P (*expr_p
))
4237 /* If this is of variable size, we have no choice but to assume it doesn't
4238 overlap since we can't make a temporary for it. */
4239 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p
))) != INTEGER_CST
)
4242 /* Otherwise, we must search for overlap ... */
4243 if (!walk_tree (expr_p
, gimplify_init_ctor_preeval_1
, data
, NULL
))
4246 /* ... and if found, force the value into a temporary. */
4247 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
4250 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4251 a RANGE_EXPR in a CONSTRUCTOR for an array.
4255 object[var] = value;
4262 We increment var _after_ the loop exit check because we might otherwise
4263 fail if upper == TYPE_MAX_VALUE (type for upper).
4265 Note that we never have to deal with SAVE_EXPRs here, because this has
4266 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4268 static void gimplify_init_ctor_eval (tree
, vec
<constructor_elt
, va_gc
> *,
4269 gimple_seq
*, bool);
4272 gimplify_init_ctor_eval_range (tree object
, tree lower
, tree upper
,
4273 tree value
, tree array_elt_type
,
4274 gimple_seq
*pre_p
, bool cleared
)
4276 tree loop_entry_label
, loop_exit_label
, fall_thru_label
;
4277 tree var
, var_type
, cref
, tmp
;
4279 loop_entry_label
= create_artificial_label (UNKNOWN_LOCATION
);
4280 loop_exit_label
= create_artificial_label (UNKNOWN_LOCATION
);
4281 fall_thru_label
= create_artificial_label (UNKNOWN_LOCATION
);
4283 /* Create and initialize the index variable. */
4284 var_type
= TREE_TYPE (upper
);
4285 var
= create_tmp_var (var_type
);
4286 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, lower
));
4288 /* Add the loop entry label. */
4289 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_entry_label
));
4291 /* Build the reference. */
4292 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4293 var
, NULL_TREE
, NULL_TREE
);
4295 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4296 the store. Otherwise just assign value to the reference. */
4298 if (TREE_CODE (value
) == CONSTRUCTOR
)
4299 /* NB we might have to call ourself recursively through
4300 gimplify_init_ctor_eval if the value is a constructor. */
4301 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4304 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (cref
, value
));
4306 /* We exit the loop when the index var is equal to the upper bound. */
4307 gimplify_seq_add_stmt (pre_p
,
4308 gimple_build_cond (EQ_EXPR
, var
, upper
,
4309 loop_exit_label
, fall_thru_label
));
4311 gimplify_seq_add_stmt (pre_p
, gimple_build_label (fall_thru_label
));
4313 /* Otherwise, increment the index var... */
4314 tmp
= build2 (PLUS_EXPR
, var_type
, var
,
4315 fold_convert (var_type
, integer_one_node
));
4316 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, tmp
));
4318 /* ...and jump back to the loop entry. */
4319 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (loop_entry_label
));
4321 /* Add the loop exit label. */
4322 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_exit_label
));
4325 /* Return true if FDECL is accessing a field that is zero sized. */
4328 zero_sized_field_decl (const_tree fdecl
)
4330 if (TREE_CODE (fdecl
) == FIELD_DECL
&& DECL_SIZE (fdecl
)
4331 && integer_zerop (DECL_SIZE (fdecl
)))
4336 /* Return true if TYPE is zero sized. */
4339 zero_sized_type (const_tree type
)
4341 if (AGGREGATE_TYPE_P (type
) && TYPE_SIZE (type
)
4342 && integer_zerop (TYPE_SIZE (type
)))
4347 /* A subroutine of gimplify_init_constructor. Generate individual
4348 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4349 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4350 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4354 gimplify_init_ctor_eval (tree object
, vec
<constructor_elt
, va_gc
> *elts
,
4355 gimple_seq
*pre_p
, bool cleared
)
4357 tree array_elt_type
= NULL
;
4358 unsigned HOST_WIDE_INT ix
;
4359 tree purpose
, value
;
4361 if (TREE_CODE (TREE_TYPE (object
)) == ARRAY_TYPE
)
4362 array_elt_type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object
)));
4364 FOR_EACH_CONSTRUCTOR_ELT (elts
, ix
, purpose
, value
)
4368 /* NULL values are created above for gimplification errors. */
4372 if (cleared
&& initializer_zerop (value
))
4375 /* ??? Here's to hoping the front end fills in all of the indices,
4376 so we don't have to figure out what's missing ourselves. */
4377 gcc_assert (purpose
);
4379 /* Skip zero-sized fields, unless value has side-effects. This can
4380 happen with calls to functions returning a zero-sized type, which
4381 we shouldn't discard. As a number of downstream passes don't
4382 expect sets of zero-sized fields, we rely on the gimplification of
4383 the MODIFY_EXPR we make below to drop the assignment statement. */
4384 if (! TREE_SIDE_EFFECTS (value
) && zero_sized_field_decl (purpose
))
4387 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4389 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4391 tree lower
= TREE_OPERAND (purpose
, 0);
4392 tree upper
= TREE_OPERAND (purpose
, 1);
4394 /* If the lower bound is equal to upper, just treat it as if
4395 upper was the index. */
4396 if (simple_cst_equal (lower
, upper
))
4400 gimplify_init_ctor_eval_range (object
, lower
, upper
, value
,
4401 array_elt_type
, pre_p
, cleared
);
4408 /* Do not use bitsizetype for ARRAY_REF indices. */
4409 if (TYPE_DOMAIN (TREE_TYPE (object
)))
4411 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object
))),
4413 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4414 purpose
, NULL_TREE
, NULL_TREE
);
4418 gcc_assert (TREE_CODE (purpose
) == FIELD_DECL
);
4419 cref
= build3 (COMPONENT_REF
, TREE_TYPE (purpose
),
4420 unshare_expr (object
), purpose
, NULL_TREE
);
4423 if (TREE_CODE (value
) == CONSTRUCTOR
4424 && TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
)
4425 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4429 tree init
= build2 (INIT_EXPR
, TREE_TYPE (cref
), cref
, value
);
4430 gimplify_and_add (init
, pre_p
);
4436 /* Return the appropriate RHS predicate for this LHS. */
4439 rhs_predicate_for (tree lhs
)
4441 if (is_gimple_reg (lhs
))
4442 return is_gimple_reg_rhs_or_call
;
4444 return is_gimple_mem_rhs_or_call
;
4447 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4448 before the LHS has been gimplified. */
4450 static gimple_predicate
4451 initial_rhs_predicate_for (tree lhs
)
4453 if (is_gimple_reg_type (TREE_TYPE (lhs
)))
4454 return is_gimple_reg_rhs_or_call
;
4456 return is_gimple_mem_rhs_or_call
;
4459 /* Gimplify a C99 compound literal expression. This just means adding
4460 the DECL_EXPR before the current statement and using its anonymous
4463 static enum gimplify_status
4464 gimplify_compound_literal_expr (tree
*expr_p
, gimple_seq
*pre_p
,
4465 bool (*gimple_test_f
) (tree
),
4466 fallback_t fallback
)
4468 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p
);
4469 tree decl
= DECL_EXPR_DECL (decl_s
);
4470 tree init
= DECL_INITIAL (decl
);
4471 /* Mark the decl as addressable if the compound literal
4472 expression is addressable now, otherwise it is marked too late
4473 after we gimplify the initialization expression. */
4474 if (TREE_ADDRESSABLE (*expr_p
))
4475 TREE_ADDRESSABLE (decl
) = 1;
4476 /* Otherwise, if we don't need an lvalue and have a literal directly
4477 substitute it. Check if it matches the gimple predicate, as
4478 otherwise we'd generate a new temporary, and we can as well just
4479 use the decl we already have. */
4480 else if (!TREE_ADDRESSABLE (decl
)
4482 && (fallback
& fb_lvalue
) == 0
4483 && gimple_test_f (init
))
4489 /* Preliminarily mark non-addressed complex variables as eligible
4490 for promotion to gimple registers. We'll transform their uses
4492 if ((TREE_CODE (TREE_TYPE (decl
)) == COMPLEX_TYPE
4493 || TREE_CODE (TREE_TYPE (decl
)) == VECTOR_TYPE
)
4494 && !TREE_THIS_VOLATILE (decl
)
4495 && !needs_to_live_in_memory (decl
))
4496 DECL_GIMPLE_REG_P (decl
) = 1;
4498 /* If the decl is not addressable, then it is being used in some
4499 expression or on the right hand side of a statement, and it can
4500 be put into a readonly data section. */
4501 if (!TREE_ADDRESSABLE (decl
) && (fallback
& fb_lvalue
) == 0)
4502 TREE_READONLY (decl
) = 1;
4504 /* This decl isn't mentioned in the enclosing block, so add it to the
4505 list of temps. FIXME it seems a bit of a kludge to say that
4506 anonymous artificial vars aren't pushed, but everything else is. */
4507 if (DECL_NAME (decl
) == NULL_TREE
&& !DECL_SEEN_IN_BIND_EXPR_P (decl
))
4508 gimple_add_tmp_var (decl
);
4510 gimplify_and_add (decl_s
, pre_p
);
4515 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4516 return a new CONSTRUCTOR if something changed. */
4519 optimize_compound_literals_in_ctor (tree orig_ctor
)
4521 tree ctor
= orig_ctor
;
4522 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (ctor
);
4523 unsigned int idx
, num
= vec_safe_length (elts
);
4525 for (idx
= 0; idx
< num
; idx
++)
4527 tree value
= (*elts
)[idx
].value
;
4528 tree newval
= value
;
4529 if (TREE_CODE (value
) == CONSTRUCTOR
)
4530 newval
= optimize_compound_literals_in_ctor (value
);
4531 else if (TREE_CODE (value
) == COMPOUND_LITERAL_EXPR
)
4533 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (value
);
4534 tree decl
= DECL_EXPR_DECL (decl_s
);
4535 tree init
= DECL_INITIAL (decl
);
4537 if (!TREE_ADDRESSABLE (value
)
4538 && !TREE_ADDRESSABLE (decl
)
4540 && TREE_CODE (init
) == CONSTRUCTOR
)
4541 newval
= optimize_compound_literals_in_ctor (init
);
4543 if (newval
== value
)
4546 if (ctor
== orig_ctor
)
4548 ctor
= copy_node (orig_ctor
);
4549 CONSTRUCTOR_ELTS (ctor
) = vec_safe_copy (elts
);
4550 elts
= CONSTRUCTOR_ELTS (ctor
);
4552 (*elts
)[idx
].value
= newval
;
4557 /* A subroutine of gimplify_modify_expr. Break out elements of a
4558 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4560 Note that we still need to clear any elements that don't have explicit
4561 initializers, so if not all elements are initialized we keep the
4562 original MODIFY_EXPR, we just remove all of the constructor elements.
4564 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4565 GS_ERROR if we would have to create a temporary when gimplifying
4566 this constructor. Otherwise, return GS_OK.
4568 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4570 static enum gimplify_status
4571 gimplify_init_constructor (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4572 bool want_value
, bool notify_temp_creation
)
4574 tree object
, ctor
, type
;
4575 enum gimplify_status ret
;
4576 vec
<constructor_elt
, va_gc
> *elts
;
4578 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == CONSTRUCTOR
);
4580 if (!notify_temp_creation
)
4582 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
4583 is_gimple_lvalue
, fb_lvalue
);
4584 if (ret
== GS_ERROR
)
4588 object
= TREE_OPERAND (*expr_p
, 0);
4589 ctor
= TREE_OPERAND (*expr_p
, 1) =
4590 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p
, 1));
4591 type
= TREE_TYPE (ctor
);
4592 elts
= CONSTRUCTOR_ELTS (ctor
);
4595 switch (TREE_CODE (type
))
4599 case QUAL_UNION_TYPE
:
4602 struct gimplify_init_ctor_preeval_data preeval_data
;
4603 HOST_WIDE_INT num_ctor_elements
, num_nonzero_elements
;
4604 bool cleared
, complete_p
, valid_const_initializer
;
4606 /* Aggregate types must lower constructors to initialization of
4607 individual elements. The exception is that a CONSTRUCTOR node
4608 with no elements indicates zero-initialization of the whole. */
4609 if (vec_safe_is_empty (elts
))
4611 if (notify_temp_creation
)
4616 /* Fetch information about the constructor to direct later processing.
4617 We might want to make static versions of it in various cases, and
4618 can only do so if it known to be a valid constant initializer. */
4619 valid_const_initializer
4620 = categorize_ctor_elements (ctor
, &num_nonzero_elements
,
4621 &num_ctor_elements
, &complete_p
);
4623 /* If a const aggregate variable is being initialized, then it
4624 should never be a lose to promote the variable to be static. */
4625 if (valid_const_initializer
4626 && num_nonzero_elements
> 1
4627 && TREE_READONLY (object
)
4629 && (flag_merge_constants
>= 2 || !TREE_ADDRESSABLE (object
)))
4631 if (notify_temp_creation
)
4633 DECL_INITIAL (object
) = ctor
;
4634 TREE_STATIC (object
) = 1;
4635 if (!DECL_NAME (object
))
4636 DECL_NAME (object
) = create_tmp_var_name ("C");
4637 walk_tree (&DECL_INITIAL (object
), force_labels_r
, NULL
, NULL
);
4639 /* ??? C++ doesn't automatically append a .<number> to the
4640 assembler name, and even when it does, it looks at FE private
4641 data structures to figure out what that number should be,
4642 which are not set for this variable. I suppose this is
4643 important for local statics for inline functions, which aren't
4644 "local" in the object file sense. So in order to get a unique
4645 TU-local symbol, we must invoke the lhd version now. */
4646 lhd_set_decl_assembler_name (object
);
4648 *expr_p
= NULL_TREE
;
4652 /* If there are "lots" of initialized elements, even discounting
4653 those that are not address constants (and thus *must* be
4654 computed at runtime), then partition the constructor into
4655 constant and non-constant parts. Block copy the constant
4656 parts in, then generate code for the non-constant parts. */
4657 /* TODO. There's code in cp/typeck.c to do this. */
4659 if (int_size_in_bytes (TREE_TYPE (ctor
)) < 0)
4660 /* store_constructor will ignore the clearing of variable-sized
4661 objects. Initializers for such objects must explicitly set
4662 every field that needs to be set. */
4664 else if (!complete_p
&& !CONSTRUCTOR_NO_CLEARING (ctor
))
4665 /* If the constructor isn't complete, clear the whole object
4666 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4668 ??? This ought not to be needed. For any element not present
4669 in the initializer, we should simply set them to zero. Except
4670 we'd need to *find* the elements that are not present, and that
4671 requires trickery to avoid quadratic compile-time behavior in
4672 large cases or excessive memory use in small cases. */
4674 else if (num_ctor_elements
- num_nonzero_elements
4675 > CLEAR_RATIO (optimize_function_for_speed_p (cfun
))
4676 && num_nonzero_elements
< num_ctor_elements
/ 4)
4677 /* If there are "lots" of zeros, it's more efficient to clear
4678 the memory and then set the nonzero elements. */
4683 /* If there are "lots" of initialized elements, and all of them
4684 are valid address constants, then the entire initializer can
4685 be dropped to memory, and then memcpy'd out. Don't do this
4686 for sparse arrays, though, as it's more efficient to follow
4687 the standard CONSTRUCTOR behavior of memset followed by
4688 individual element initialization. Also don't do this for small
4689 all-zero initializers (which aren't big enough to merit
4690 clearing), and don't try to make bitwise copies of
4691 TREE_ADDRESSABLE types.
4693 We cannot apply such transformation when compiling chkp static
4694 initializer because creation of initializer image in the memory
4695 will require static initialization of bounds for it. It should
4696 result in another gimplification of similar initializer and we
4697 may fall into infinite loop. */
4698 if (valid_const_initializer
4699 && !(cleared
|| num_nonzero_elements
== 0)
4700 && !TREE_ADDRESSABLE (type
)
4701 && (!current_function_decl
4702 || !lookup_attribute ("chkp ctor",
4703 DECL_ATTRIBUTES (current_function_decl
))))
4705 HOST_WIDE_INT size
= int_size_in_bytes (type
);
4708 /* ??? We can still get unbounded array types, at least
4709 from the C++ front end. This seems wrong, but attempt
4710 to work around it for now. */
4713 size
= int_size_in_bytes (TREE_TYPE (object
));
4715 TREE_TYPE (ctor
) = type
= TREE_TYPE (object
);
4718 /* Find the maximum alignment we can assume for the object. */
4719 /* ??? Make use of DECL_OFFSET_ALIGN. */
4720 if (DECL_P (object
))
4721 align
= DECL_ALIGN (object
);
4723 align
= TYPE_ALIGN (type
);
4725 /* Do a block move either if the size is so small as to make
4726 each individual move a sub-unit move on average, or if it
4727 is so large as to make individual moves inefficient. */
4729 && num_nonzero_elements
> 1
4730 && (size
< num_nonzero_elements
4731 || !can_move_by_pieces (size
, align
)))
4733 if (notify_temp_creation
)
4736 walk_tree (&ctor
, force_labels_r
, NULL
, NULL
);
4737 ctor
= tree_output_constant_def (ctor
);
4738 if (!useless_type_conversion_p (type
, TREE_TYPE (ctor
)))
4739 ctor
= build1 (VIEW_CONVERT_EXPR
, type
, ctor
);
4740 TREE_OPERAND (*expr_p
, 1) = ctor
;
4742 /* This is no longer an assignment of a CONSTRUCTOR, but
4743 we still may have processing to do on the LHS. So
4744 pretend we didn't do anything here to let that happen. */
4745 return GS_UNHANDLED
;
4749 /* If the target is volatile, we have non-zero elements and more than
4750 one field to assign, initialize the target from a temporary. */
4751 if (TREE_THIS_VOLATILE (object
)
4752 && !TREE_ADDRESSABLE (type
)
4753 && num_nonzero_elements
> 0
4754 && vec_safe_length (elts
) > 1)
4756 tree temp
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
4757 TREE_OPERAND (*expr_p
, 0) = temp
;
4758 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
4760 build2 (MODIFY_EXPR
, void_type_node
,
4765 if (notify_temp_creation
)
4768 /* If there are nonzero elements and if needed, pre-evaluate to capture
4769 elements overlapping with the lhs into temporaries. We must do this
4770 before clearing to fetch the values before they are zeroed-out. */
4771 if (num_nonzero_elements
> 0 && TREE_CODE (*expr_p
) != INIT_EXPR
)
4773 preeval_data
.lhs_base_decl
= get_base_address (object
);
4774 if (!DECL_P (preeval_data
.lhs_base_decl
))
4775 preeval_data
.lhs_base_decl
= NULL
;
4776 preeval_data
.lhs_alias_set
= get_alias_set (object
);
4778 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p
, 1),
4779 pre_p
, post_p
, &preeval_data
);
4782 bool ctor_has_side_effects_p
4783 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p
, 1));
4787 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4788 Note that we still have to gimplify, in order to handle the
4789 case of variable sized types. Avoid shared tree structures. */
4790 CONSTRUCTOR_ELTS (ctor
) = NULL
;
4791 TREE_SIDE_EFFECTS (ctor
) = 0;
4792 object
= unshare_expr (object
);
4793 gimplify_stmt (expr_p
, pre_p
);
4796 /* If we have not block cleared the object, or if there are nonzero
4797 elements in the constructor, or if the constructor has side effects,
4798 add assignments to the individual scalar fields of the object. */
4800 || num_nonzero_elements
> 0
4801 || ctor_has_side_effects_p
)
4802 gimplify_init_ctor_eval (object
, elts
, pre_p
, cleared
);
4804 *expr_p
= NULL_TREE
;
4812 if (notify_temp_creation
)
4815 /* Extract the real and imaginary parts out of the ctor. */
4816 gcc_assert (elts
->length () == 2);
4817 r
= (*elts
)[0].value
;
4818 i
= (*elts
)[1].value
;
4819 if (r
== NULL
|| i
== NULL
)
4821 tree zero
= build_zero_cst (TREE_TYPE (type
));
4828 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4829 represent creation of a complex value. */
4830 if (TREE_CONSTANT (r
) && TREE_CONSTANT (i
))
4832 ctor
= build_complex (type
, r
, i
);
4833 TREE_OPERAND (*expr_p
, 1) = ctor
;
4837 ctor
= build2 (COMPLEX_EXPR
, type
, r
, i
);
4838 TREE_OPERAND (*expr_p
, 1) = ctor
;
4839 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1),
4842 rhs_predicate_for (TREE_OPERAND (*expr_p
, 0)),
4850 unsigned HOST_WIDE_INT ix
;
4851 constructor_elt
*ce
;
4853 if (notify_temp_creation
)
4856 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4857 if (TREE_CONSTANT (ctor
))
4859 bool constant_p
= true;
4862 /* Even when ctor is constant, it might contain non-*_CST
4863 elements, such as addresses or trapping values like
4864 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4865 in VECTOR_CST nodes. */
4866 FOR_EACH_CONSTRUCTOR_VALUE (elts
, ix
, value
)
4867 if (!CONSTANT_CLASS_P (value
))
4875 TREE_OPERAND (*expr_p
, 1) = build_vector_from_ctor (type
, elts
);
4879 TREE_CONSTANT (ctor
) = 0;
4882 /* Vector types use CONSTRUCTOR all the way through gimple
4883 compilation as a general initializer. */
4884 FOR_EACH_VEC_SAFE_ELT (elts
, ix
, ce
)
4886 enum gimplify_status tret
;
4887 tret
= gimplify_expr (&ce
->value
, pre_p
, post_p
, is_gimple_val
,
4889 if (tret
== GS_ERROR
)
4891 else if (TREE_STATIC (ctor
)
4892 && !initializer_constant_valid_p (ce
->value
,
4893 TREE_TYPE (ce
->value
)))
4894 TREE_STATIC (ctor
) = 0;
4896 if (!is_gimple_reg (TREE_OPERAND (*expr_p
, 0)))
4897 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (ctor
, pre_p
);
4902 /* So how did we get a CONSTRUCTOR for a scalar type? */
4906 if (ret
== GS_ERROR
)
4908 /* If we have gimplified both sides of the initializer but have
4909 not emitted an assignment, do so now. */
4912 tree lhs
= TREE_OPERAND (*expr_p
, 0);
4913 tree rhs
= TREE_OPERAND (*expr_p
, 1);
4914 gassign
*init
= gimple_build_assign (lhs
, rhs
);
4915 gimplify_seq_add_stmt (pre_p
, init
);
4929 /* Given a pointer value OP0, return a simplified version of an
4930 indirection through OP0, or NULL_TREE if no simplification is
4931 possible. This may only be applied to a rhs of an expression.
4932 Note that the resulting type may be different from the type pointed
4933 to in the sense that it is still compatible from the langhooks
4937 gimple_fold_indirect_ref_rhs (tree t
)
4939 return gimple_fold_indirect_ref (t
);
4942 /* Subroutine of gimplify_modify_expr to do simplifications of
4943 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4944 something changes. */
4946 static enum gimplify_status
4947 gimplify_modify_expr_rhs (tree
*expr_p
, tree
*from_p
, tree
*to_p
,
4948 gimple_seq
*pre_p
, gimple_seq
*post_p
,
4951 enum gimplify_status ret
= GS_UNHANDLED
;
4957 switch (TREE_CODE (*from_p
))
4960 /* If we're assigning from a read-only variable initialized with
4961 a constructor, do the direct assignment from the constructor,
4962 but only if neither source nor target are volatile since this
4963 latter assignment might end up being done on a per-field basis. */
4964 if (DECL_INITIAL (*from_p
)
4965 && TREE_READONLY (*from_p
)
4966 && !TREE_THIS_VOLATILE (*from_p
)
4967 && !TREE_THIS_VOLATILE (*to_p
)
4968 && TREE_CODE (DECL_INITIAL (*from_p
)) == CONSTRUCTOR
)
4970 tree old_from
= *from_p
;
4971 enum gimplify_status subret
;
4973 /* Move the constructor into the RHS. */
4974 *from_p
= unshare_expr (DECL_INITIAL (*from_p
));
4976 /* Let's see if gimplify_init_constructor will need to put
4978 subret
= gimplify_init_constructor (expr_p
, NULL
, NULL
,
4980 if (subret
== GS_ERROR
)
4982 /* If so, revert the change. */
4994 /* If we have code like
4998 where the type of "x" is a (possibly cv-qualified variant
4999 of "A"), treat the entire expression as identical to "x".
5000 This kind of code arises in C++ when an object is bound
5001 to a const reference, and if "x" is a TARGET_EXPR we want
5002 to take advantage of the optimization below. */
5003 bool volatile_p
= TREE_THIS_VOLATILE (*from_p
);
5004 tree t
= gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p
, 0));
5007 if (TREE_THIS_VOLATILE (t
) != volatile_p
)
5010 t
= build_simple_mem_ref_loc (EXPR_LOCATION (*from_p
),
5011 build_fold_addr_expr (t
));
5012 if (REFERENCE_CLASS_P (t
))
5013 TREE_THIS_VOLATILE (t
) = volatile_p
;
5024 /* If we are initializing something from a TARGET_EXPR, strip the
5025 TARGET_EXPR and initialize it directly, if possible. This can't
5026 be done if the initializer is void, since that implies that the
5027 temporary is set in some non-trivial way.
5029 ??? What about code that pulls out the temp and uses it
5030 elsewhere? I think that such code never uses the TARGET_EXPR as
5031 an initializer. If I'm wrong, we'll die because the temp won't
5032 have any RTL. In that case, I guess we'll need to replace
5033 references somehow. */
5034 tree init
= TARGET_EXPR_INITIAL (*from_p
);
5037 && !VOID_TYPE_P (TREE_TYPE (init
)))
5047 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5049 gimplify_compound_expr (from_p
, pre_p
, true);
5055 /* If we already made some changes, let the front end have a
5056 crack at this before we break it down. */
5057 if (ret
!= GS_UNHANDLED
)
5059 /* If we're initializing from a CONSTRUCTOR, break this into
5060 individual MODIFY_EXPRs. */
5061 return gimplify_init_constructor (expr_p
, pre_p
, post_p
, want_value
,
5065 /* If we're assigning to a non-register type, push the assignment
5066 down into the branches. This is mandatory for ADDRESSABLE types,
5067 since we cannot generate temporaries for such, but it saves a
5068 copy in other cases as well. */
5069 if (!is_gimple_reg_type (TREE_TYPE (*from_p
)))
5071 /* This code should mirror the code in gimplify_cond_expr. */
5072 enum tree_code code
= TREE_CODE (*expr_p
);
5073 tree cond
= *from_p
;
5074 tree result
= *to_p
;
5076 ret
= gimplify_expr (&result
, pre_p
, post_p
,
5077 is_gimple_lvalue
, fb_lvalue
);
5078 if (ret
!= GS_ERROR
)
5081 if (TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
)
5082 TREE_OPERAND (cond
, 1)
5083 = build2 (code
, void_type_node
, result
,
5084 TREE_OPERAND (cond
, 1));
5085 if (TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5086 TREE_OPERAND (cond
, 2)
5087 = build2 (code
, void_type_node
, unshare_expr (result
),
5088 TREE_OPERAND (cond
, 2));
5090 TREE_TYPE (cond
) = void_type_node
;
5091 recalculate_side_effects (cond
);
5095 gimplify_and_add (cond
, pre_p
);
5096 *expr_p
= unshare_expr (result
);
5105 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5106 return slot so that we don't generate a temporary. */
5107 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p
)
5108 && aggregate_value_p (*from_p
, *from_p
))
5112 if (!(rhs_predicate_for (*to_p
))(*from_p
))
5113 /* If we need a temporary, *to_p isn't accurate. */
5115 /* It's OK to use the return slot directly unless it's an NRV. */
5116 else if (TREE_CODE (*to_p
) == RESULT_DECL
5117 && DECL_NAME (*to_p
) == NULL_TREE
5118 && needs_to_live_in_memory (*to_p
))
5120 else if (is_gimple_reg_type (TREE_TYPE (*to_p
))
5121 || (DECL_P (*to_p
) && DECL_REGISTER (*to_p
)))
5122 /* Don't force regs into memory. */
5124 else if (TREE_CODE (*expr_p
) == INIT_EXPR
)
5125 /* It's OK to use the target directly if it's being
5128 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p
)))
5130 /* Always use the target and thus RSO for variable-sized types.
5131 GIMPLE cannot deal with a variable-sized assignment
5132 embedded in a call statement. */
5134 else if (TREE_CODE (*to_p
) != SSA_NAME
5135 && (!is_gimple_variable (*to_p
)
5136 || needs_to_live_in_memory (*to_p
)))
5137 /* Don't use the original target if it's already addressable;
5138 if its address escapes, and the called function uses the
5139 NRV optimization, a conforming program could see *to_p
5140 change before the called function returns; see c++/19317.
5141 When optimizing, the return_slot pass marks more functions
5142 as safe after we have escape info. */
5149 CALL_EXPR_RETURN_SLOT_OPT (*from_p
) = 1;
5150 mark_addressable (*to_p
);
5155 case WITH_SIZE_EXPR
:
5156 /* Likewise for calls that return an aggregate of non-constant size,
5157 since we would not be able to generate a temporary at all. */
5158 if (TREE_CODE (TREE_OPERAND (*from_p
, 0)) == CALL_EXPR
)
5160 *from_p
= TREE_OPERAND (*from_p
, 0);
5161 /* We don't change ret in this case because the
5162 WITH_SIZE_EXPR might have been added in
5163 gimplify_modify_expr, so returning GS_OK would lead to an
5169 /* If we're initializing from a container, push the initialization
5171 case CLEANUP_POINT_EXPR
:
5173 case STATEMENT_LIST
:
5175 tree wrap
= *from_p
;
5178 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_min_lval
,
5180 if (ret
!= GS_ERROR
)
5183 t
= voidify_wrapper_expr (wrap
, *expr_p
);
5184 gcc_assert (t
== *expr_p
);
5188 gimplify_and_add (wrap
, pre_p
);
5189 *expr_p
= unshare_expr (*to_p
);
5196 case COMPOUND_LITERAL_EXPR
:
5198 tree complit
= TREE_OPERAND (*expr_p
, 1);
5199 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (complit
);
5200 tree decl
= DECL_EXPR_DECL (decl_s
);
5201 tree init
= DECL_INITIAL (decl
);
5203 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5204 into struct T x = { 0, 1, 2 } if the address of the
5205 compound literal has never been taken. */
5206 if (!TREE_ADDRESSABLE (complit
)
5207 && !TREE_ADDRESSABLE (decl
)
5210 *expr_p
= copy_node (*expr_p
);
5211 TREE_OPERAND (*expr_p
, 1) = init
;
5226 /* Return true if T looks like a valid GIMPLE statement. */
5229 is_gimple_stmt (tree t
)
5231 const enum tree_code code
= TREE_CODE (t
);
5236 /* The only valid NOP_EXPR is the empty statement. */
5237 return IS_EMPTY_STMT (t
);
5241 /* These are only valid if they're void. */
5242 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
5248 case CASE_LABEL_EXPR
:
5249 case TRY_CATCH_EXPR
:
5250 case TRY_FINALLY_EXPR
:
5251 case EH_FILTER_EXPR
:
5254 case STATEMENT_LIST
:
5258 case OACC_HOST_DATA
:
5261 case OACC_ENTER_DATA
:
5262 case OACC_EXIT_DATA
:
5268 case OMP_DISTRIBUTE
:
5279 case OMP_TARGET_DATA
:
5280 case OMP_TARGET_UPDATE
:
5281 case OMP_TARGET_ENTER_DATA
:
5282 case OMP_TARGET_EXIT_DATA
:
5285 /* These are always void. */
5291 /* These are valid regardless of their type. */
5300 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5301 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5302 DECL_GIMPLE_REG_P set.
5304 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5305 other, unmodified part of the complex object just before the total store.
5306 As a consequence, if the object is still uninitialized, an undefined value
5307 will be loaded into a register, which may result in a spurious exception
5308 if the register is floating-point and the value happens to be a signaling
5309 NaN for example. Then the fully-fledged complex operations lowering pass
5310 followed by a DCE pass are necessary in order to fix things up. */
5312 static enum gimplify_status
5313 gimplify_modify_expr_complex_part (tree
*expr_p
, gimple_seq
*pre_p
,
5316 enum tree_code code
, ocode
;
5317 tree lhs
, rhs
, new_rhs
, other
, realpart
, imagpart
;
5319 lhs
= TREE_OPERAND (*expr_p
, 0);
5320 rhs
= TREE_OPERAND (*expr_p
, 1);
5321 code
= TREE_CODE (lhs
);
5322 lhs
= TREE_OPERAND (lhs
, 0);
5324 ocode
= code
== REALPART_EXPR
? IMAGPART_EXPR
: REALPART_EXPR
;
5325 other
= build1 (ocode
, TREE_TYPE (rhs
), lhs
);
5326 TREE_NO_WARNING (other
) = 1;
5327 other
= get_formal_tmp_var (other
, pre_p
);
5329 realpart
= code
== REALPART_EXPR
? rhs
: other
;
5330 imagpart
= code
== REALPART_EXPR
? other
: rhs
;
5332 if (TREE_CONSTANT (realpart
) && TREE_CONSTANT (imagpart
))
5333 new_rhs
= build_complex (TREE_TYPE (lhs
), realpart
, imagpart
);
5335 new_rhs
= build2 (COMPLEX_EXPR
, TREE_TYPE (lhs
), realpart
, imagpart
);
5337 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (lhs
, new_rhs
));
5338 *expr_p
= (want_value
) ? rhs
: NULL_TREE
;
5343 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5349 PRE_P points to the list where side effects that must happen before
5350 *EXPR_P should be stored.
5352 POST_P points to the list where side effects that must happen after
5353 *EXPR_P should be stored.
5355 WANT_VALUE is nonzero iff we want to use the value of this expression
5356 in another expression. */
5358 static enum gimplify_status
5359 gimplify_modify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5362 tree
*from_p
= &TREE_OPERAND (*expr_p
, 1);
5363 tree
*to_p
= &TREE_OPERAND (*expr_p
, 0);
5364 enum gimplify_status ret
= GS_UNHANDLED
;
5366 location_t loc
= EXPR_LOCATION (*expr_p
);
5367 gimple_stmt_iterator gsi
;
5369 gcc_assert (TREE_CODE (*expr_p
) == MODIFY_EXPR
5370 || TREE_CODE (*expr_p
) == INIT_EXPR
);
5372 /* Trying to simplify a clobber using normal logic doesn't work,
5373 so handle it here. */
5374 if (TREE_CLOBBER_P (*from_p
))
5376 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5377 if (ret
== GS_ERROR
)
5379 gcc_assert (!want_value
5380 && (VAR_P (*to_p
) || TREE_CODE (*to_p
) == MEM_REF
));
5381 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (*to_p
, *from_p
));
5386 /* Insert pointer conversions required by the middle-end that are not
5387 required by the frontend. This fixes middle-end type checking for
5388 for example gcc.dg/redecl-6.c. */
5389 if (POINTER_TYPE_P (TREE_TYPE (*to_p
)))
5391 STRIP_USELESS_TYPE_CONVERSION (*from_p
);
5392 if (!useless_type_conversion_p (TREE_TYPE (*to_p
), TREE_TYPE (*from_p
)))
5393 *from_p
= fold_convert_loc (loc
, TREE_TYPE (*to_p
), *from_p
);
5396 /* See if any simplifications can be done based on what the RHS is. */
5397 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
5399 if (ret
!= GS_UNHANDLED
)
5402 /* For zero sized types only gimplify the left hand side and right hand
5403 side as statements and throw away the assignment. Do this after
5404 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5406 if (zero_sized_type (TREE_TYPE (*from_p
)) && !want_value
)
5408 gimplify_stmt (from_p
, pre_p
);
5409 gimplify_stmt (to_p
, pre_p
);
5410 *expr_p
= NULL_TREE
;
5414 /* If the value being copied is of variable width, compute the length
5415 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5416 before gimplifying any of the operands so that we can resolve any
5417 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5418 the size of the expression to be copied, not of the destination, so
5419 that is what we must do here. */
5420 maybe_with_size_expr (from_p
);
5422 /* As a special case, we have to temporarily allow for assignments
5423 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5424 a toplevel statement, when gimplifying the GENERIC expression
5425 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5426 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5428 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5429 prevent gimplify_expr from trying to create a new temporary for
5430 foo's LHS, we tell it that it should only gimplify until it
5431 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5432 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5433 and all we need to do here is set 'a' to be its LHS. */
5435 /* Gimplify the RHS first for C++17 and bug 71104. */
5436 gimple_predicate initial_pred
= initial_rhs_predicate_for (*to_p
);
5437 ret
= gimplify_expr (from_p
, pre_p
, post_p
, initial_pred
, fb_rvalue
);
5438 if (ret
== GS_ERROR
)
5441 /* Then gimplify the LHS. */
5442 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5443 twice we have to make sure to gimplify into non-SSA as otherwise
5444 the abnormal edge added later will make those defs not dominate
5446 ??? Technically this applies only to the registers used in the
5447 resulting non-register *TO_P. */
5448 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
5450 && TREE_CODE (*from_p
) == CALL_EXPR
5451 && call_expr_flags (*from_p
) & ECF_RETURNS_TWICE
)
5452 gimplify_ctxp
->into_ssa
= false;
5453 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5454 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
5455 if (ret
== GS_ERROR
)
5458 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5459 guess for the predicate was wrong. */
5460 gimple_predicate final_pred
= rhs_predicate_for (*to_p
);
5461 if (final_pred
!= initial_pred
)
5463 ret
= gimplify_expr (from_p
, pre_p
, post_p
, final_pred
, fb_rvalue
);
5464 if (ret
== GS_ERROR
)
5468 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5469 size as argument to the call. */
5470 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
5472 tree call
= TREE_OPERAND (*from_p
, 0);
5473 tree vlasize
= TREE_OPERAND (*from_p
, 1);
5475 if (TREE_CODE (call
) == CALL_EXPR
5476 && CALL_EXPR_IFN (call
) == IFN_VA_ARG
)
5478 int nargs
= call_expr_nargs (call
);
5479 tree type
= TREE_TYPE (call
);
5480 tree ap
= CALL_EXPR_ARG (call
, 0);
5481 tree tag
= CALL_EXPR_ARG (call
, 1);
5482 tree aptag
= CALL_EXPR_ARG (call
, 2);
5483 tree newcall
= build_call_expr_internal_loc (EXPR_LOCATION (call
),
5487 TREE_OPERAND (*from_p
, 0) = newcall
;
5491 /* Now see if the above changed *from_p to something we handle specially. */
5492 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
5494 if (ret
!= GS_UNHANDLED
)
5497 /* If we've got a variable sized assignment between two lvalues (i.e. does
5498 not involve a call), then we can make things a bit more straightforward
5499 by converting the assignment to memcpy or memset. */
5500 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
5502 tree from
= TREE_OPERAND (*from_p
, 0);
5503 tree size
= TREE_OPERAND (*from_p
, 1);
5505 if (TREE_CODE (from
) == CONSTRUCTOR
)
5506 return gimplify_modify_expr_to_memset (expr_p
, size
, want_value
, pre_p
);
5508 if (is_gimple_addressable (from
))
5511 return gimplify_modify_expr_to_memcpy (expr_p
, size
, want_value
,
5516 /* Transform partial stores to non-addressable complex variables into
5517 total stores. This allows us to use real instead of virtual operands
5518 for these variables, which improves optimization. */
5519 if ((TREE_CODE (*to_p
) == REALPART_EXPR
5520 || TREE_CODE (*to_p
) == IMAGPART_EXPR
)
5521 && is_gimple_reg (TREE_OPERAND (*to_p
, 0)))
5522 return gimplify_modify_expr_complex_part (expr_p
, pre_p
, want_value
);
5524 /* Try to alleviate the effects of the gimplification creating artificial
5525 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5526 make sure not to create DECL_DEBUG_EXPR links across functions. */
5527 if (!gimplify_ctxp
->into_ssa
5529 && DECL_IGNORED_P (*from_p
)
5531 && !DECL_IGNORED_P (*to_p
)
5532 && decl_function_context (*to_p
) == current_function_decl
)
5534 if (!DECL_NAME (*from_p
) && DECL_NAME (*to_p
))
5536 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p
)));
5537 DECL_HAS_DEBUG_EXPR_P (*from_p
) = 1;
5538 SET_DECL_DEBUG_EXPR (*from_p
, *to_p
);
5541 if (want_value
&& TREE_THIS_VOLATILE (*to_p
))
5542 *from_p
= get_initialized_tmp_var (*from_p
, pre_p
, post_p
);
5544 if (TREE_CODE (*from_p
) == CALL_EXPR
)
5546 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5547 instead of a GIMPLE_ASSIGN. */
5549 if (CALL_EXPR_FN (*from_p
) == NULL_TREE
)
5551 /* Gimplify internal functions created in the FEs. */
5552 int nargs
= call_expr_nargs (*from_p
), i
;
5553 enum internal_fn ifn
= CALL_EXPR_IFN (*from_p
);
5554 auto_vec
<tree
> vargs (nargs
);
5556 for (i
= 0; i
< nargs
; i
++)
5558 gimplify_arg (&CALL_EXPR_ARG (*from_p
, i
), pre_p
,
5559 EXPR_LOCATION (*from_p
));
5560 vargs
.quick_push (CALL_EXPR_ARG (*from_p
, i
));
5562 call_stmt
= gimple_build_call_internal_vec (ifn
, vargs
);
5563 gimple_set_location (call_stmt
, EXPR_LOCATION (*expr_p
));
5567 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*from_p
));
5568 CALL_EXPR_FN (*from_p
) = TREE_OPERAND (CALL_EXPR_FN (*from_p
), 0);
5569 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p
));
5570 tree fndecl
= get_callee_fndecl (*from_p
);
5572 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
5573 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
5574 && call_expr_nargs (*from_p
) == 3)
5575 call_stmt
= gimple_build_call_internal (IFN_BUILTIN_EXPECT
, 3,
5576 CALL_EXPR_ARG (*from_p
, 0),
5577 CALL_EXPR_ARG (*from_p
, 1),
5578 CALL_EXPR_ARG (*from_p
, 2));
5581 call_stmt
= gimple_build_call_from_tree (*from_p
);
5582 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fnptrtype
));
5585 notice_special_calls (call_stmt
);
5586 if (!gimple_call_noreturn_p (call_stmt
) || !should_remove_lhs_p (*to_p
))
5587 gimple_call_set_lhs (call_stmt
, *to_p
);
5588 else if (TREE_CODE (*to_p
) == SSA_NAME
)
5589 /* The above is somewhat premature, avoid ICEing later for a
5590 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5591 ??? This doesn't make it a default-def. */
5592 SSA_NAME_DEF_STMT (*to_p
) = gimple_build_nop ();
5597 assign
= gimple_build_assign (*to_p
, *from_p
);
5598 gimple_set_location (assign
, EXPR_LOCATION (*expr_p
));
5599 if (COMPARISON_CLASS_P (*from_p
))
5600 gimple_set_no_warning (assign
, TREE_NO_WARNING (*from_p
));
5603 if (gimplify_ctxp
->into_ssa
&& is_gimple_reg (*to_p
))
5605 /* We should have got an SSA name from the start. */
5606 gcc_assert (TREE_CODE (*to_p
) == SSA_NAME
5607 || ! gimple_in_ssa_p (cfun
));
5610 gimplify_seq_add_stmt (pre_p
, assign
);
5611 gsi
= gsi_last (*pre_p
);
5612 maybe_fold_stmt (&gsi
);
5616 *expr_p
= TREE_THIS_VOLATILE (*to_p
) ? *from_p
: unshare_expr (*to_p
);
5625 /* Gimplify a comparison between two variable-sized objects. Do this
5626 with a call to BUILT_IN_MEMCMP. */
5628 static enum gimplify_status
5629 gimplify_variable_sized_compare (tree
*expr_p
)
5631 location_t loc
= EXPR_LOCATION (*expr_p
);
5632 tree op0
= TREE_OPERAND (*expr_p
, 0);
5633 tree op1
= TREE_OPERAND (*expr_p
, 1);
5634 tree t
, arg
, dest
, src
, expr
;
5636 arg
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
5637 arg
= unshare_expr (arg
);
5638 arg
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg
, op0
);
5639 src
= build_fold_addr_expr_loc (loc
, op1
);
5640 dest
= build_fold_addr_expr_loc (loc
, op0
);
5641 t
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
5642 t
= build_call_expr_loc (loc
, t
, 3, dest
, src
, arg
);
5645 = build2 (TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), t
, integer_zero_node
);
5646 SET_EXPR_LOCATION (expr
, loc
);
5652 /* Gimplify a comparison between two aggregate objects of integral scalar
5653 mode as a comparison between the bitwise equivalent scalar values. */
5655 static enum gimplify_status
5656 gimplify_scalar_mode_aggregate_compare (tree
*expr_p
)
5658 location_t loc
= EXPR_LOCATION (*expr_p
);
5659 tree op0
= TREE_OPERAND (*expr_p
, 0);
5660 tree op1
= TREE_OPERAND (*expr_p
, 1);
5662 tree type
= TREE_TYPE (op0
);
5663 tree scalar_type
= lang_hooks
.types
.type_for_mode (TYPE_MODE (type
), 1);
5665 op0
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op0
);
5666 op1
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op1
);
5669 = fold_build2_loc (loc
, TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), op0
, op1
);
5674 /* Gimplify an expression sequence. This function gimplifies each
5675 expression and rewrites the original expression with the last
5676 expression of the sequence in GIMPLE form.
5678 PRE_P points to the list where the side effects for all the
5679 expressions in the sequence will be emitted.
5681 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5683 static enum gimplify_status
5684 gimplify_compound_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
5690 tree
*sub_p
= &TREE_OPERAND (t
, 0);
5692 if (TREE_CODE (*sub_p
) == COMPOUND_EXPR
)
5693 gimplify_compound_expr (sub_p
, pre_p
, false);
5695 gimplify_stmt (sub_p
, pre_p
);
5697 t
= TREE_OPERAND (t
, 1);
5699 while (TREE_CODE (t
) == COMPOUND_EXPR
);
5706 gimplify_stmt (expr_p
, pre_p
);
5711 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5712 gimplify. After gimplification, EXPR_P will point to a new temporary
5713 that holds the original value of the SAVE_EXPR node.
5715 PRE_P points to the list where side effects that must happen before
5716 *EXPR_P should be stored. */
5718 static enum gimplify_status
5719 gimplify_save_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
5721 enum gimplify_status ret
= GS_ALL_DONE
;
5724 gcc_assert (TREE_CODE (*expr_p
) == SAVE_EXPR
);
5725 val
= TREE_OPERAND (*expr_p
, 0);
5727 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5728 if (!SAVE_EXPR_RESOLVED_P (*expr_p
))
5730 /* The operand may be a void-valued expression such as SAVE_EXPRs
5731 generated by the Java frontend for class initialization. It is
5732 being executed only for its side-effects. */
5733 if (TREE_TYPE (val
) == void_type_node
)
5735 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
5736 is_gimple_stmt
, fb_none
);
5740 /* The temporary may not be an SSA name as later abnormal and EH
5741 control flow may invalidate use/def domination. */
5742 val
= get_initialized_tmp_var (val
, pre_p
, post_p
, false);
5744 TREE_OPERAND (*expr_p
, 0) = val
;
5745 SAVE_EXPR_RESOLVED_P (*expr_p
) = 1;
5753 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5760 PRE_P points to the list where side effects that must happen before
5761 *EXPR_P should be stored.
5763 POST_P points to the list where side effects that must happen after
5764 *EXPR_P should be stored. */
5766 static enum gimplify_status
5767 gimplify_addr_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
5769 tree expr
= *expr_p
;
5770 tree op0
= TREE_OPERAND (expr
, 0);
5771 enum gimplify_status ret
;
5772 location_t loc
= EXPR_LOCATION (*expr_p
);
5774 switch (TREE_CODE (op0
))
5778 /* Check if we are dealing with an expression of the form '&*ptr'.
5779 While the front end folds away '&*ptr' into 'ptr', these
5780 expressions may be generated internally by the compiler (e.g.,
5781 builtins like __builtin_va_end). */
5782 /* Caution: the silent array decomposition semantics we allow for
5783 ADDR_EXPR means we can't always discard the pair. */
5784 /* Gimplification of the ADDR_EXPR operand may drop
5785 cv-qualification conversions, so make sure we add them if
5788 tree op00
= TREE_OPERAND (op0
, 0);
5789 tree t_expr
= TREE_TYPE (expr
);
5790 tree t_op00
= TREE_TYPE (op00
);
5792 if (!useless_type_conversion_p (t_expr
, t_op00
))
5793 op00
= fold_convert_loc (loc
, TREE_TYPE (expr
), op00
);
5799 case VIEW_CONVERT_EXPR
:
5800 /* Take the address of our operand and then convert it to the type of
5803 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5804 all clear. The impact of this transformation is even less clear. */
5806 /* If the operand is a useless conversion, look through it. Doing so
5807 guarantees that the ADDR_EXPR and its operand will remain of the
5809 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0
, 0)))
5810 op0
= TREE_OPERAND (op0
, 0);
5812 *expr_p
= fold_convert_loc (loc
, TREE_TYPE (expr
),
5813 build_fold_addr_expr_loc (loc
,
5814 TREE_OPERAND (op0
, 0)));
5819 if (integer_zerop (TREE_OPERAND (op0
, 1)))
5820 goto do_indirect_ref
;
5825 /* If we see a call to a declared builtin or see its address
5826 being taken (we can unify those cases here) then we can mark
5827 the builtin for implicit generation by GCC. */
5828 if (TREE_CODE (op0
) == FUNCTION_DECL
5829 && DECL_BUILT_IN_CLASS (op0
) == BUILT_IN_NORMAL
5830 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0
)))
5831 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0
), true);
5833 /* We use fb_either here because the C frontend sometimes takes
5834 the address of a call that returns a struct; see
5835 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5836 the implied temporary explicit. */
5838 /* Make the operand addressable. */
5839 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, post_p
,
5840 is_gimple_addressable
, fb_either
);
5841 if (ret
== GS_ERROR
)
5844 /* Then mark it. Beware that it may not be possible to do so directly
5845 if a temporary has been created by the gimplification. */
5846 prepare_gimple_addressable (&TREE_OPERAND (expr
, 0), pre_p
);
5848 op0
= TREE_OPERAND (expr
, 0);
5850 /* For various reasons, the gimplification of the expression
5851 may have made a new INDIRECT_REF. */
5852 if (TREE_CODE (op0
) == INDIRECT_REF
)
5853 goto do_indirect_ref
;
5855 mark_addressable (TREE_OPERAND (expr
, 0));
5857 /* The FEs may end up building ADDR_EXPRs early on a decl with
5858 an incomplete type. Re-build ADDR_EXPRs in canonical form
5860 if (!types_compatible_p (TREE_TYPE (op0
), TREE_TYPE (TREE_TYPE (expr
))))
5861 *expr_p
= build_fold_addr_expr (op0
);
5863 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5864 recompute_tree_invariant_for_addr_expr (*expr_p
);
5866 /* If we re-built the ADDR_EXPR add a conversion to the original type
5868 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
5869 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
5877 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5878 value; output operands should be a gimple lvalue. */
5880 static enum gimplify_status
5881 gimplify_asm_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
5885 const char **oconstraints
;
5888 const char *constraint
;
5889 bool allows_mem
, allows_reg
, is_inout
;
5890 enum gimplify_status ret
, tret
;
5892 vec
<tree
, va_gc
> *inputs
;
5893 vec
<tree
, va_gc
> *outputs
;
5894 vec
<tree
, va_gc
> *clobbers
;
5895 vec
<tree
, va_gc
> *labels
;
5899 noutputs
= list_length (ASM_OUTPUTS (expr
));
5900 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
5908 link_next
= NULL_TREE
;
5909 for (i
= 0, link
= ASM_OUTPUTS (expr
); link
; ++i
, link
= link_next
)
5912 size_t constraint_len
;
5914 link_next
= TREE_CHAIN (link
);
5918 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
5919 constraint_len
= strlen (constraint
);
5920 if (constraint_len
== 0)
5923 ok
= parse_output_constraint (&constraint
, i
, 0, 0,
5924 &allows_mem
, &allows_reg
, &is_inout
);
5931 if (!allows_reg
&& allows_mem
)
5932 mark_addressable (TREE_VALUE (link
));
5934 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
5935 is_inout
? is_gimple_min_lval
: is_gimple_lvalue
,
5936 fb_lvalue
| fb_mayfail
);
5937 if (tret
== GS_ERROR
)
5939 error ("invalid lvalue in asm output %d", i
);
5943 /* If the constraint does not allow memory make sure we gimplify
5944 it to a register if it is not already but its base is. This
5945 happens for complex and vector components. */
5948 tree op
= TREE_VALUE (link
);
5949 if (! is_gimple_val (op
)
5950 && is_gimple_reg_type (TREE_TYPE (op
))
5951 && is_gimple_reg (get_base_address (op
)))
5953 tree tem
= create_tmp_reg (TREE_TYPE (op
));
5957 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
),
5958 tem
, unshare_expr (op
));
5959 gimplify_and_add (ass
, pre_p
);
5961 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
), op
, tem
);
5962 gimplify_and_add (ass
, post_p
);
5964 TREE_VALUE (link
) = tem
;
5969 vec_safe_push (outputs
, link
);
5970 TREE_CHAIN (link
) = NULL_TREE
;
5974 /* An input/output operand. To give the optimizers more
5975 flexibility, split it into separate input and output
5978 /* Buffer big enough to format a 32-bit UINT_MAX into. */
5981 /* Turn the in/out constraint into an output constraint. */
5982 char *p
= xstrdup (constraint
);
5984 TREE_VALUE (TREE_PURPOSE (link
)) = build_string (constraint_len
, p
);
5986 /* And add a matching input constraint. */
5989 sprintf (buf
, "%u", i
);
5991 /* If there are multiple alternatives in the constraint,
5992 handle each of them individually. Those that allow register
5993 will be replaced with operand number, the others will stay
5995 if (strchr (p
, ',') != NULL
)
5997 size_t len
= 0, buflen
= strlen (buf
);
5998 char *beg
, *end
, *str
, *dst
;
6002 end
= strchr (beg
, ',');
6004 end
= strchr (beg
, '\0');
6005 if ((size_t) (end
- beg
) < buflen
)
6008 len
+= end
- beg
+ 1;
6015 str
= (char *) alloca (len
);
6016 for (beg
= p
+ 1, dst
= str
;;)
6019 bool mem_p
, reg_p
, inout_p
;
6021 end
= strchr (beg
, ',');
6026 parse_output_constraint (&tem
, i
, 0, 0,
6027 &mem_p
, ®_p
, &inout_p
);
6032 memcpy (dst
, buf
, buflen
);
6041 memcpy (dst
, beg
, len
);
6050 input
= build_string (dst
- str
, str
);
6053 input
= build_string (strlen (buf
), buf
);
6056 input
= build_string (constraint_len
- 1, constraint
+ 1);
6060 input
= build_tree_list (build_tree_list (NULL_TREE
, input
),
6061 unshare_expr (TREE_VALUE (link
)));
6062 ASM_INPUTS (expr
) = chainon (ASM_INPUTS (expr
), input
);
6066 link_next
= NULL_TREE
;
6067 for (link
= ASM_INPUTS (expr
); link
; ++i
, link
= link_next
)
6069 link_next
= TREE_CHAIN (link
);
6070 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6071 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
6072 oconstraints
, &allows_mem
, &allows_reg
);
6074 /* If we can't make copies, we can only accept memory. */
6075 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link
))))
6081 error ("impossible constraint in %<asm%>");
6082 error ("non-memory input %d must stay in memory", i
);
6087 /* If the operand is a memory input, it should be an lvalue. */
6088 if (!allows_reg
&& allows_mem
)
6090 tree inputv
= TREE_VALUE (link
);
6091 STRIP_NOPS (inputv
);
6092 if (TREE_CODE (inputv
) == PREDECREMENT_EXPR
6093 || TREE_CODE (inputv
) == PREINCREMENT_EXPR
6094 || TREE_CODE (inputv
) == POSTDECREMENT_EXPR
6095 || TREE_CODE (inputv
) == POSTINCREMENT_EXPR
6096 || TREE_CODE (inputv
) == MODIFY_EXPR
)
6097 TREE_VALUE (link
) = error_mark_node
;
6098 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6099 is_gimple_lvalue
, fb_lvalue
| fb_mayfail
);
6100 if (tret
!= GS_ERROR
)
6102 /* Unlike output operands, memory inputs are not guaranteed
6103 to be lvalues by the FE, and while the expressions are
6104 marked addressable there, if it is e.g. a statement
6105 expression, temporaries in it might not end up being
6106 addressable. They might be already used in the IL and thus
6107 it is too late to make them addressable now though. */
6108 tree x
= TREE_VALUE (link
);
6109 while (handled_component_p (x
))
6110 x
= TREE_OPERAND (x
, 0);
6111 if (TREE_CODE (x
) == MEM_REF
6112 && TREE_CODE (TREE_OPERAND (x
, 0)) == ADDR_EXPR
)
6113 x
= TREE_OPERAND (TREE_OPERAND (x
, 0), 0);
6115 || TREE_CODE (x
) == PARM_DECL
6116 || TREE_CODE (x
) == RESULT_DECL
)
6117 && !TREE_ADDRESSABLE (x
)
6118 && is_gimple_reg (x
))
6120 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
),
6122 "memory input %d is not directly addressable",
6124 prepare_gimple_addressable (&TREE_VALUE (link
), pre_p
);
6127 mark_addressable (TREE_VALUE (link
));
6128 if (tret
== GS_ERROR
)
6130 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
), input_location
),
6131 "memory input %d is not directly addressable", i
);
6137 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6138 is_gimple_asm_val
, fb_rvalue
);
6139 if (tret
== GS_ERROR
)
6143 TREE_CHAIN (link
) = NULL_TREE
;
6144 vec_safe_push (inputs
, link
);
6147 link_next
= NULL_TREE
;
6148 for (link
= ASM_CLOBBERS (expr
); link
; ++i
, link
= link_next
)
6150 link_next
= TREE_CHAIN (link
);
6151 TREE_CHAIN (link
) = NULL_TREE
;
6152 vec_safe_push (clobbers
, link
);
6155 link_next
= NULL_TREE
;
6156 for (link
= ASM_LABELS (expr
); link
; ++i
, link
= link_next
)
6158 link_next
= TREE_CHAIN (link
);
6159 TREE_CHAIN (link
) = NULL_TREE
;
6160 vec_safe_push (labels
, link
);
6163 /* Do not add ASMs with errors to the gimple IL stream. */
6164 if (ret
!= GS_ERROR
)
6166 stmt
= gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr
)),
6167 inputs
, outputs
, clobbers
, labels
);
6169 gimple_asm_set_volatile (stmt
, ASM_VOLATILE_P (expr
) || noutputs
== 0);
6170 gimple_asm_set_input (stmt
, ASM_INPUT_P (expr
));
6172 gimplify_seq_add_stmt (pre_p
, stmt
);
6178 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6179 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6180 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6181 return to this function.
6183 FIXME should we complexify the prequeue handling instead? Or use flags
6184 for all the cleanups and let the optimizer tighten them up? The current
6185 code seems pretty fragile; it will break on a cleanup within any
6186 non-conditional nesting. But any such nesting would be broken, anyway;
6187 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6188 and continues out of it. We can do that at the RTL level, though, so
6189 having an optimizer to tighten up try/finally regions would be a Good
6192 static enum gimplify_status
6193 gimplify_cleanup_point_expr (tree
*expr_p
, gimple_seq
*pre_p
)
6195 gimple_stmt_iterator iter
;
6196 gimple_seq body_sequence
= NULL
;
6198 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
6200 /* We only care about the number of conditions between the innermost
6201 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6202 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6203 int old_conds
= gimplify_ctxp
->conditions
;
6204 gimple_seq old_cleanups
= gimplify_ctxp
->conditional_cleanups
;
6205 bool old_in_cleanup_point_expr
= gimplify_ctxp
->in_cleanup_point_expr
;
6206 gimplify_ctxp
->conditions
= 0;
6207 gimplify_ctxp
->conditional_cleanups
= NULL
;
6208 gimplify_ctxp
->in_cleanup_point_expr
= true;
6210 gimplify_stmt (&TREE_OPERAND (*expr_p
, 0), &body_sequence
);
6212 gimplify_ctxp
->conditions
= old_conds
;
6213 gimplify_ctxp
->conditional_cleanups
= old_cleanups
;
6214 gimplify_ctxp
->in_cleanup_point_expr
= old_in_cleanup_point_expr
;
6216 for (iter
= gsi_start (body_sequence
); !gsi_end_p (iter
); )
6218 gimple
*wce
= gsi_stmt (iter
);
6220 if (gimple_code (wce
) == GIMPLE_WITH_CLEANUP_EXPR
)
6222 if (gsi_one_before_end_p (iter
))
6224 /* Note that gsi_insert_seq_before and gsi_remove do not
6225 scan operands, unlike some other sequence mutators. */
6226 if (!gimple_wce_cleanup_eh_only (wce
))
6227 gsi_insert_seq_before_without_update (&iter
,
6228 gimple_wce_cleanup (wce
),
6230 gsi_remove (&iter
, true);
6237 enum gimple_try_flags kind
;
6239 if (gimple_wce_cleanup_eh_only (wce
))
6240 kind
= GIMPLE_TRY_CATCH
;
6242 kind
= GIMPLE_TRY_FINALLY
;
6243 seq
= gsi_split_seq_after (iter
);
6245 gtry
= gimple_build_try (seq
, gimple_wce_cleanup (wce
), kind
);
6246 /* Do not use gsi_replace here, as it may scan operands.
6247 We want to do a simple structural modification only. */
6248 gsi_set_stmt (&iter
, gtry
);
6249 iter
= gsi_start (gtry
->eval
);
6256 gimplify_seq_add_seq (pre_p
, body_sequence
);
6269 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6270 is the cleanup action required. EH_ONLY is true if the cleanup should
6271 only be executed if an exception is thrown, not on normal exit. */
6274 gimple_push_cleanup (tree var
, tree cleanup
, bool eh_only
, gimple_seq
*pre_p
)
6277 gimple_seq cleanup_stmts
= NULL
;
6279 /* Errors can result in improperly nested cleanups. Which results in
6280 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6284 if (gimple_conditional_context ())
6286 /* If we're in a conditional context, this is more complex. We only
6287 want to run the cleanup if we actually ran the initialization that
6288 necessitates it, but we want to run it after the end of the
6289 conditional context. So we wrap the try/finally around the
6290 condition and use a flag to determine whether or not to actually
6291 run the destructor. Thus
6295 becomes (approximately)
6299 if (test) { A::A(temp); flag = 1; val = f(temp); }
6302 if (flag) A::~A(temp);
6306 tree flag
= create_tmp_var (boolean_type_node
, "cleanup");
6307 gassign
*ffalse
= gimple_build_assign (flag
, boolean_false_node
);
6308 gassign
*ftrue
= gimple_build_assign (flag
, boolean_true_node
);
6310 cleanup
= build3 (COND_EXPR
, void_type_node
, flag
, cleanup
, NULL
);
6311 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6312 wce
= gimple_build_wce (cleanup_stmts
);
6314 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, ffalse
);
6315 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6316 gimplify_seq_add_stmt (pre_p
, ftrue
);
6318 /* Because of this manipulation, and the EH edges that jump
6319 threading cannot redirect, the temporary (VAR) will appear
6320 to be used uninitialized. Don't warn. */
6321 TREE_NO_WARNING (var
) = 1;
6325 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6326 wce
= gimple_build_wce (cleanup_stmts
);
6327 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
6328 gimplify_seq_add_stmt (pre_p
, wce
);
6332 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6334 static enum gimplify_status
6335 gimplify_target_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6337 tree targ
= *expr_p
;
6338 tree temp
= TARGET_EXPR_SLOT (targ
);
6339 tree init
= TARGET_EXPR_INITIAL (targ
);
6340 enum gimplify_status ret
;
6342 bool unpoison_empty_seq
= false;
6343 gimple_stmt_iterator unpoison_it
;
6347 tree cleanup
= NULL_TREE
;
6349 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6350 to the temps list. Handle also variable length TARGET_EXPRs. */
6351 if (TREE_CODE (DECL_SIZE (temp
)) != INTEGER_CST
)
6353 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp
)))
6354 gimplify_type_sizes (TREE_TYPE (temp
), pre_p
);
6355 gimplify_vla_decl (temp
, pre_p
);
6359 /* Save location where we need to place unpoisoning. It's possible
6360 that a variable will be converted to needs_to_live_in_memory. */
6361 unpoison_it
= gsi_last (*pre_p
);
6362 unpoison_empty_seq
= gsi_end_p (unpoison_it
);
6364 gimple_add_tmp_var (temp
);
6367 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6368 expression is supposed to initialize the slot. */
6369 if (VOID_TYPE_P (TREE_TYPE (init
)))
6370 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6373 tree init_expr
= build2 (INIT_EXPR
, void_type_node
, temp
, init
);
6375 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6377 ggc_free (init_expr
);
6379 if (ret
== GS_ERROR
)
6381 /* PR c++/28266 Make sure this is expanded only once. */
6382 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
6386 gimplify_and_add (init
, pre_p
);
6388 /* If needed, push the cleanup for the temp. */
6389 if (TARGET_EXPR_CLEANUP (targ
))
6391 if (CLEANUP_EH_ONLY (targ
))
6392 gimple_push_cleanup (temp
, TARGET_EXPR_CLEANUP (targ
),
6393 CLEANUP_EH_ONLY (targ
), pre_p
);
6395 cleanup
= TARGET_EXPR_CLEANUP (targ
);
6398 /* Add a clobber for the temporary going out of scope, like
6399 gimplify_bind_expr. */
6400 if (gimplify_ctxp
->in_cleanup_point_expr
6401 && needs_to_live_in_memory (temp
))
6403 if (flag_stack_reuse
== SR_ALL
)
6405 tree clobber
= build_constructor (TREE_TYPE (temp
),
6407 TREE_THIS_VOLATILE (clobber
) = true;
6408 clobber
= build2 (MODIFY_EXPR
, TREE_TYPE (temp
), temp
, clobber
);
6410 cleanup
= build2 (COMPOUND_EXPR
, void_type_node
, cleanup
,
6415 if (asan_sanitize_use_after_scope ()
6416 && dbg_cnt (asan_use_after_scope
))
6418 tree asan_cleanup
= build_asan_poison_call_expr (temp
);
6421 if (unpoison_empty_seq
)
6422 unpoison_it
= gsi_start (*pre_p
);
6424 asan_poison_variable (temp
, false, &unpoison_it
,
6425 unpoison_empty_seq
);
6426 gimple_push_cleanup (temp
, asan_cleanup
, false, pre_p
);
6431 gimple_push_cleanup (temp
, cleanup
, false, pre_p
);
6433 /* Only expand this once. */
6434 TREE_OPERAND (targ
, 3) = init
;
6435 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
6438 /* We should have expanded this before. */
6439 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp
));
6445 /* Gimplification of expression trees. */
6447 /* Gimplify an expression which appears at statement context. The
6448 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6449 NULL, a new sequence is allocated.
6451 Return true if we actually added a statement to the queue. */
6454 gimplify_stmt (tree
*stmt_p
, gimple_seq
*seq_p
)
6456 gimple_seq_node last
;
6458 last
= gimple_seq_last (*seq_p
);
6459 gimplify_expr (stmt_p
, seq_p
, NULL
, is_gimple_stmt
, fb_none
);
6460 return last
!= gimple_seq_last (*seq_p
);
6463 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6464 to CTX. If entries already exist, force them to be some flavor of private.
6465 If there is no enclosing parallel, do nothing. */
6468 omp_firstprivatize_variable (struct gimplify_omp_ctx
*ctx
, tree decl
)
6472 if (decl
== NULL
|| !DECL_P (decl
) || ctx
->region_type
== ORT_NONE
)
6477 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6480 if (n
->value
& GOVD_SHARED
)
6481 n
->value
= GOVD_FIRSTPRIVATE
| (n
->value
& GOVD_SEEN
);
6482 else if (n
->value
& GOVD_MAP
)
6483 n
->value
|= GOVD_MAP_TO_ONLY
;
6487 else if ((ctx
->region_type
& ORT_TARGET
) != 0)
6489 if (ctx
->target_map_scalars_firstprivate
)
6490 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
6492 omp_add_variable (ctx
, decl
, GOVD_MAP
| GOVD_MAP_TO_ONLY
);
6494 else if (ctx
->region_type
!= ORT_WORKSHARE
6495 && ctx
->region_type
!= ORT_SIMD
6496 && ctx
->region_type
!= ORT_ACC
6497 && !(ctx
->region_type
& ORT_TARGET_DATA
))
6498 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
6500 ctx
= ctx
->outer_context
;
6505 /* Similarly for each of the type sizes of TYPE. */
6508 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
6510 if (type
== NULL
|| type
== error_mark_node
)
6512 type
= TYPE_MAIN_VARIANT (type
);
6514 if (ctx
->privatized_types
->add (type
))
6517 switch (TREE_CODE (type
))
6523 case FIXED_POINT_TYPE
:
6524 omp_firstprivatize_variable (ctx
, TYPE_MIN_VALUE (type
));
6525 omp_firstprivatize_variable (ctx
, TYPE_MAX_VALUE (type
));
6529 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
6530 omp_firstprivatize_type_sizes (ctx
, TYPE_DOMAIN (type
));
6535 case QUAL_UNION_TYPE
:
6538 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
6539 if (TREE_CODE (field
) == FIELD_DECL
)
6541 omp_firstprivatize_variable (ctx
, DECL_FIELD_OFFSET (field
));
6542 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (field
));
6548 case REFERENCE_TYPE
:
6549 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
6556 omp_firstprivatize_variable (ctx
, TYPE_SIZE (type
));
6557 omp_firstprivatize_variable (ctx
, TYPE_SIZE_UNIT (type
));
6558 lang_hooks
.types
.omp_firstprivatize_type_sizes (ctx
, type
);
6561 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6564 omp_add_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned int flags
)
6567 unsigned int nflags
;
6570 if (error_operand_p (decl
) || ctx
->region_type
== ORT_NONE
)
6573 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6574 there are constructors involved somewhere. */
6575 if (TREE_ADDRESSABLE (TREE_TYPE (decl
))
6576 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl
)))
6579 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6580 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
6582 /* We shouldn't be re-adding the decl with the same data
6584 gcc_assert ((n
->value
& GOVD_DATA_SHARE_CLASS
& flags
) == 0);
6585 nflags
= n
->value
| flags
;
6586 /* The only combination of data sharing classes we should see is
6587 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6588 reduction variables to be used in data sharing clauses. */
6589 gcc_assert ((ctx
->region_type
& ORT_ACC
) != 0
6590 || ((nflags
& GOVD_DATA_SHARE_CLASS
)
6591 == (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
))
6592 || (flags
& GOVD_DATA_SHARE_CLASS
) == 0);
6597 /* When adding a variable-sized variable, we have to handle all sorts
6598 of additional bits of data: the pointer replacement variable, and
6599 the parameters of the type. */
6600 if (DECL_SIZE (decl
) && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
6602 /* Add the pointer replacement variable as PRIVATE if the variable
6603 replacement is private, else FIRSTPRIVATE since we'll need the
6604 address of the original variable either for SHARED, or for the
6605 copy into or out of the context. */
6606 if (!(flags
& GOVD_LOCAL
))
6608 if (flags
& GOVD_MAP
)
6609 nflags
= GOVD_MAP
| GOVD_MAP_TO_ONLY
| GOVD_EXPLICIT
;
6610 else if (flags
& GOVD_PRIVATE
)
6611 nflags
= GOVD_PRIVATE
;
6612 else if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
6613 && (flags
& GOVD_FIRSTPRIVATE
))
6614 nflags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
6616 nflags
= GOVD_FIRSTPRIVATE
;
6617 nflags
|= flags
& GOVD_SEEN
;
6618 t
= DECL_VALUE_EXPR (decl
);
6619 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
6620 t
= TREE_OPERAND (t
, 0);
6621 gcc_assert (DECL_P (t
));
6622 omp_add_variable (ctx
, t
, nflags
);
6625 /* Add all of the variable and type parameters (which should have
6626 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6627 omp_firstprivatize_variable (ctx
, DECL_SIZE_UNIT (decl
));
6628 omp_firstprivatize_variable (ctx
, DECL_SIZE (decl
));
6629 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
6631 /* The variable-sized variable itself is never SHARED, only some form
6632 of PRIVATE. The sharing would take place via the pointer variable
6633 which we remapped above. */
6634 if (flags
& GOVD_SHARED
)
6635 flags
= GOVD_PRIVATE
| GOVD_DEBUG_PRIVATE
6636 | (flags
& (GOVD_SEEN
| GOVD_EXPLICIT
));
6638 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6639 alloca statement we generate for the variable, so make sure it
6640 is available. This isn't automatically needed for the SHARED
6641 case, since we won't be allocating local storage then.
6642 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6643 in this case omp_notice_variable will be called later
6644 on when it is gimplified. */
6645 else if (! (flags
& (GOVD_LOCAL
| GOVD_MAP
))
6646 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl
))))
6647 omp_notice_variable (ctx
, TYPE_SIZE_UNIT (TREE_TYPE (decl
)), true);
6649 else if ((flags
& (GOVD_MAP
| GOVD_LOCAL
)) == 0
6650 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
6652 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
6654 /* Similar to the direct variable sized case above, we'll need the
6655 size of references being privatized. */
6656 if ((flags
& GOVD_SHARED
) == 0)
6658 t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
6660 omp_notice_variable (ctx
, t
, true);
6667 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, flags
);
6669 /* For reductions clauses in OpenACC loop directives, by default create a
6670 copy clause on the enclosing parallel construct for carrying back the
6672 if (ctx
->region_type
== ORT_ACC
&& (flags
& GOVD_REDUCTION
))
6674 struct gimplify_omp_ctx
*outer_ctx
= ctx
->outer_context
;
6677 n
= splay_tree_lookup (outer_ctx
->variables
, (splay_tree_key
)decl
);
6680 /* Ignore local variables and explicitly declared clauses. */
6681 if (n
->value
& (GOVD_LOCAL
| GOVD_EXPLICIT
))
6683 else if (outer_ctx
->region_type
== ORT_ACC_KERNELS
)
6685 /* According to the OpenACC spec, such a reduction variable
6686 should already have a copy map on a kernels construct,
6687 verify that here. */
6688 gcc_assert (!(n
->value
& GOVD_FIRSTPRIVATE
)
6689 && (n
->value
& GOVD_MAP
));
6691 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
6693 /* Remove firstprivate and make it a copy map. */
6694 n
->value
&= ~GOVD_FIRSTPRIVATE
;
6695 n
->value
|= GOVD_MAP
;
6698 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
6700 splay_tree_insert (outer_ctx
->variables
, (splay_tree_key
)decl
,
6701 GOVD_MAP
| GOVD_SEEN
);
6704 outer_ctx
= outer_ctx
->outer_context
;
6709 /* Notice a threadprivate variable DECL used in OMP context CTX.
6710 This just prints out diagnostics about threadprivate variable uses
6711 in untied tasks. If DECL2 is non-NULL, prevent this warning
6712 on that variable. */
6715 omp_notice_threadprivate_variable (struct gimplify_omp_ctx
*ctx
, tree decl
,
6719 struct gimplify_omp_ctx
*octx
;
6721 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
6722 if ((octx
->region_type
& ORT_TARGET
) != 0)
6724 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
)decl
);
6727 error ("threadprivate variable %qE used in target region",
6729 error_at (octx
->location
, "enclosing target region");
6730 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl
, 0);
6733 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl2
, 0);
6736 if (ctx
->region_type
!= ORT_UNTIED_TASK
)
6738 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6741 error ("threadprivate variable %qE used in untied task",
6743 error_at (ctx
->location
, "enclosing task");
6744 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, 0);
6747 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl2
, 0);
6751 /* Return true if global var DECL is device resident. */
6754 device_resident_p (tree decl
)
6756 tree attr
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl
));
6761 for (tree t
= TREE_VALUE (attr
); t
; t
= TREE_PURPOSE (t
))
6763 tree c
= TREE_VALUE (t
);
6764 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DEVICE_RESIDENT
)
6771 /* Determine outer default flags for DECL mentioned in an OMP region
6772 but not declared in an enclosing clause.
6774 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6775 remapped firstprivate instead of shared. To some extent this is
6776 addressed in omp_firstprivatize_type_sizes, but not
6780 omp_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
,
6781 bool in_code
, unsigned flags
)
6783 enum omp_clause_default_kind default_kind
= ctx
->default_kind
;
6784 enum omp_clause_default_kind kind
;
6786 kind
= lang_hooks
.decls
.omp_predetermined_sharing (decl
);
6787 if (kind
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
6788 default_kind
= kind
;
6790 switch (default_kind
)
6792 case OMP_CLAUSE_DEFAULT_NONE
:
6796 if (ctx
->region_type
& ORT_PARALLEL
)
6798 else if (ctx
->region_type
& ORT_TASK
)
6800 else if (ctx
->region_type
& ORT_TEAMS
)
6805 error ("%qE not specified in enclosing %s",
6806 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rtype
);
6807 error_at (ctx
->location
, "enclosing %s", rtype
);
6810 case OMP_CLAUSE_DEFAULT_SHARED
:
6811 flags
|= GOVD_SHARED
;
6813 case OMP_CLAUSE_DEFAULT_PRIVATE
:
6814 flags
|= GOVD_PRIVATE
;
6816 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
:
6817 flags
|= GOVD_FIRSTPRIVATE
;
6819 case OMP_CLAUSE_DEFAULT_UNSPECIFIED
:
6820 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6821 gcc_assert ((ctx
->region_type
& ORT_TASK
) != 0);
6822 if (struct gimplify_omp_ctx
*octx
= ctx
->outer_context
)
6824 omp_notice_variable (octx
, decl
, in_code
);
6825 for (; octx
; octx
= octx
->outer_context
)
6829 n2
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
6830 if ((octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)) != 0
6831 && (n2
== NULL
|| (n2
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
6833 if (n2
&& (n2
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
)
6835 flags
|= GOVD_FIRSTPRIVATE
;
6838 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TEAMS
)) != 0)
6840 flags
|= GOVD_SHARED
;
6846 if (TREE_CODE (decl
) == PARM_DECL
6847 || (!is_global_var (decl
)
6848 && DECL_CONTEXT (decl
) == current_function_decl
))
6849 flags
|= GOVD_FIRSTPRIVATE
;
6851 flags
|= GOVD_SHARED
;
6863 /* Determine outer default flags for DECL mentioned in an OACC region
6864 but not declared in an enclosing clause. */
6867 oacc_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned flags
)
6870 bool on_device
= false;
6871 tree type
= TREE_TYPE (decl
);
6873 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
6874 type
= TREE_TYPE (type
);
6876 if ((ctx
->region_type
& (ORT_ACC_PARALLEL
| ORT_ACC_KERNELS
)) != 0
6877 && is_global_var (decl
)
6878 && device_resident_p (decl
))
6881 flags
|= GOVD_MAP_TO_ONLY
;
6884 switch (ctx
->region_type
)
6889 case ORT_ACC_KERNELS
:
6890 /* Scalars are default 'copy' under kernels, non-scalars are default
6891 'present_or_copy'. */
6893 if (!AGGREGATE_TYPE_P (type
))
6894 flags
|= GOVD_MAP_FORCE
;
6899 case ORT_ACC_PARALLEL
:
6901 if (on_device
|| AGGREGATE_TYPE_P (type
))
6902 /* Aggregates default to 'present_or_copy'. */
6905 /* Scalars default to 'firstprivate'. */
6906 flags
|= GOVD_FIRSTPRIVATE
;
6912 if (DECL_ARTIFICIAL (decl
))
6913 ; /* We can get compiler-generated decls, and should not complain
6915 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_NONE
)
6917 error ("%qE not specified in enclosing OpenACC %qs construct",
6918 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rkind
);
6919 inform (ctx
->location
, "enclosing OpenACC %qs construct", rkind
);
6922 gcc_checking_assert (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
);
6927 /* Record the fact that DECL was used within the OMP context CTX.
6928 IN_CODE is true when real code uses DECL, and false when we should
6929 merely emit default(none) errors. Return true if DECL is going to
6930 be remapped and thus DECL shouldn't be gimplified into its
6931 DECL_VALUE_EXPR (if any). */
6934 omp_notice_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, bool in_code
)
6937 unsigned flags
= in_code
? GOVD_SEEN
: 0;
6938 bool ret
= false, shared
;
6940 if (error_operand_p (decl
))
6943 if (ctx
->region_type
== ORT_NONE
)
6944 return lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
6946 if (is_global_var (decl
))
6948 /* Threadprivate variables are predetermined. */
6949 if (DECL_THREAD_LOCAL_P (decl
))
6950 return omp_notice_threadprivate_variable (ctx
, decl
, NULL_TREE
);
6952 if (DECL_HAS_VALUE_EXPR_P (decl
))
6954 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
6956 if (value
&& DECL_P (value
) && DECL_THREAD_LOCAL_P (value
))
6957 return omp_notice_threadprivate_variable (ctx
, decl
, value
);
6960 if (gimplify_omp_ctxp
->outer_context
== NULL
6962 && get_oacc_fn_attrib (current_function_decl
))
6964 location_t loc
= DECL_SOURCE_LOCATION (decl
);
6966 if (lookup_attribute ("omp declare target link",
6967 DECL_ATTRIBUTES (decl
)))
6970 "%qE with %<link%> clause used in %<routine%> function",
6974 else if (!lookup_attribute ("omp declare target",
6975 DECL_ATTRIBUTES (decl
)))
6978 "%qE requires a %<declare%> directive for use "
6979 "in a %<routine%> function", DECL_NAME (decl
));
6985 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6986 if ((ctx
->region_type
& ORT_TARGET
) != 0)
6988 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, true);
6991 unsigned nflags
= flags
;
6992 if (ctx
->target_map_pointers_as_0len_arrays
6993 || ctx
->target_map_scalars_firstprivate
)
6995 bool is_declare_target
= false;
6996 bool is_scalar
= false;
6997 if (is_global_var (decl
)
6998 && varpool_node::get_create (decl
)->offloadable
)
7000 struct gimplify_omp_ctx
*octx
;
7001 for (octx
= ctx
->outer_context
;
7002 octx
; octx
= octx
->outer_context
)
7004 n
= splay_tree_lookup (octx
->variables
,
7005 (splay_tree_key
)decl
);
7007 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
7008 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7011 is_declare_target
= octx
== NULL
;
7013 if (!is_declare_target
&& ctx
->target_map_scalars_firstprivate
)
7014 is_scalar
= lang_hooks
.decls
.omp_scalar_p (decl
);
7015 if (is_declare_target
)
7017 else if (ctx
->target_map_pointers_as_0len_arrays
7018 && (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
7019 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
7020 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
7022 nflags
|= GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
7024 nflags
|= GOVD_FIRSTPRIVATE
;
7027 struct gimplify_omp_ctx
*octx
= ctx
->outer_context
;
7028 if ((ctx
->region_type
& ORT_ACC
) && octx
)
7030 /* Look in outer OpenACC contexts, to see if there's a
7031 data attribute for this variable. */
7032 omp_notice_variable (octx
, decl
, in_code
);
7034 for (; octx
; octx
= octx
->outer_context
)
7036 if (!(octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)))
7039 = splay_tree_lookup (octx
->variables
,
7040 (splay_tree_key
) decl
);
7043 if (octx
->region_type
== ORT_ACC_HOST_DATA
)
7044 error ("variable %qE declared in enclosing "
7045 "%<host_data%> region", DECL_NAME (decl
));
7047 if (octx
->region_type
== ORT_ACC_DATA
7048 && (n2
->value
& GOVD_MAP_0LEN_ARRAY
))
7049 nflags
|= GOVD_MAP_0LEN_ARRAY
;
7056 tree type
= TREE_TYPE (decl
);
7059 && gimplify_omp_ctxp
->target_firstprivatize_array_bases
7060 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7061 type
= TREE_TYPE (type
);
7063 && !lang_hooks
.types
.omp_mappable_type (type
))
7065 error ("%qD referenced in target region does not have "
7066 "a mappable type", decl
);
7067 nflags
|= GOVD_MAP
| GOVD_EXPLICIT
;
7069 else if (nflags
== flags
)
7071 if ((ctx
->region_type
& ORT_ACC
) != 0)
7072 nflags
= oacc_default_clause (ctx
, decl
, flags
);
7078 omp_add_variable (ctx
, decl
, nflags
);
7082 /* If nothing changed, there's nothing left to do. */
7083 if ((n
->value
& flags
) == flags
)
7093 if (ctx
->region_type
== ORT_WORKSHARE
7094 || ctx
->region_type
== ORT_SIMD
7095 || ctx
->region_type
== ORT_ACC
7096 || (ctx
->region_type
& ORT_TARGET_DATA
) != 0)
7099 flags
= omp_default_clause (ctx
, decl
, in_code
, flags
);
7101 if ((flags
& GOVD_PRIVATE
)
7102 && lang_hooks
.decls
.omp_private_outer_ref (decl
))
7103 flags
|= GOVD_PRIVATE_OUTER_REF
;
7105 omp_add_variable (ctx
, decl
, flags
);
7107 shared
= (flags
& GOVD_SHARED
) != 0;
7108 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7112 if ((n
->value
& (GOVD_SEEN
| GOVD_LOCAL
)) == 0
7113 && (flags
& (GOVD_SEEN
| GOVD_LOCAL
)) == GOVD_SEEN
7114 && DECL_SIZE (decl
))
7116 if (TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7119 tree t
= DECL_VALUE_EXPR (decl
);
7120 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
7121 t
= TREE_OPERAND (t
, 0);
7122 gcc_assert (DECL_P (t
));
7123 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7124 n2
->value
|= GOVD_SEEN
;
7126 else if (lang_hooks
.decls
.omp_privatize_by_reference (decl
)
7127 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)))
7128 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))))
7132 tree t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7133 gcc_assert (DECL_P (t
));
7134 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7136 n2
->value
|= GOVD_SEEN
;
7140 shared
= ((flags
| n
->value
) & GOVD_SHARED
) != 0;
7141 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7143 /* If nothing changed, there's nothing left to do. */
7144 if ((n
->value
& flags
) == flags
)
7150 /* If the variable is private in the current context, then we don't
7151 need to propagate anything to an outer context. */
7152 if ((flags
& GOVD_PRIVATE
) && !(flags
& GOVD_PRIVATE_OUTER_REF
))
7154 if ((flags
& (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7155 == (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7157 if ((flags
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
7158 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7159 == (GOVD_LASTPRIVATE
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7161 if (ctx
->outer_context
7162 && omp_notice_variable (ctx
->outer_context
, decl
, in_code
))
7167 /* Verify that DECL is private within CTX. If there's specific information
7168 to the contrary in the innermost scope, generate an error. */
7171 omp_is_private (struct gimplify_omp_ctx
*ctx
, tree decl
, int simd
)
7175 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7178 if (n
->value
& GOVD_SHARED
)
7180 if (ctx
== gimplify_omp_ctxp
)
7183 error ("iteration variable %qE is predetermined linear",
7186 error ("iteration variable %qE should be private",
7188 n
->value
= GOVD_PRIVATE
;
7194 else if ((n
->value
& GOVD_EXPLICIT
) != 0
7195 && (ctx
== gimplify_omp_ctxp
7196 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7197 && gimplify_omp_ctxp
->outer_context
== ctx
)))
7199 if ((n
->value
& GOVD_FIRSTPRIVATE
) != 0)
7200 error ("iteration variable %qE should not be firstprivate",
7202 else if ((n
->value
& GOVD_REDUCTION
) != 0)
7203 error ("iteration variable %qE should not be reduction",
7205 else if (simd
== 0 && (n
->value
& GOVD_LINEAR
) != 0)
7206 error ("iteration variable %qE should not be linear",
7208 else if (simd
== 1 && (n
->value
& GOVD_LASTPRIVATE
) != 0)
7209 error ("iteration variable %qE should not be lastprivate",
7211 else if (simd
&& (n
->value
& GOVD_PRIVATE
) != 0)
7212 error ("iteration variable %qE should not be private",
7214 else if (simd
== 2 && (n
->value
& GOVD_LINEAR
) != 0)
7215 error ("iteration variable %qE is predetermined linear",
7218 return (ctx
== gimplify_omp_ctxp
7219 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7220 && gimplify_omp_ctxp
->outer_context
== ctx
));
7223 if (ctx
->region_type
!= ORT_WORKSHARE
7224 && ctx
->region_type
!= ORT_SIMD
7225 && ctx
->region_type
!= ORT_ACC
)
7227 else if (ctx
->outer_context
)
7228 return omp_is_private (ctx
->outer_context
, decl
, simd
);
7232 /* Return true if DECL is private within a parallel region
7233 that binds to the current construct's context or in parallel
7234 region's REDUCTION clause. */
7237 omp_check_private (struct gimplify_omp_ctx
*ctx
, tree decl
, bool copyprivate
)
7243 ctx
= ctx
->outer_context
;
7246 if (is_global_var (decl
))
7249 /* References might be private, but might be shared too,
7250 when checking for copyprivate, assume they might be
7251 private, otherwise assume they might be shared. */
7255 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7258 /* Treat C++ privatized non-static data members outside
7259 of the privatization the same. */
7260 if (omp_member_access_dummy_var (decl
))
7266 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
7268 if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7269 && (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7274 if ((n
->value
& GOVD_LOCAL
) != 0
7275 && omp_member_access_dummy_var (decl
))
7277 return (n
->value
& GOVD_SHARED
) == 0;
7280 while (ctx
->region_type
== ORT_WORKSHARE
7281 || ctx
->region_type
== ORT_SIMD
7282 || ctx
->region_type
== ORT_ACC
);
7286 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7289 find_decl_expr (tree
*tp
, int *walk_subtrees
, void *data
)
7293 /* If this node has been visited, unmark it and keep looking. */
7294 if (TREE_CODE (t
) == DECL_EXPR
&& DECL_EXPR_DECL (t
) == (tree
) data
)
7297 if (IS_TYPE_OR_DECL_P (t
))
7302 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7303 and previous omp contexts. */
7306 gimplify_scan_omp_clauses (tree
*list_p
, gimple_seq
*pre_p
,
7307 enum omp_region_type region_type
,
7308 enum tree_code code
)
7310 struct gimplify_omp_ctx
*ctx
, *outer_ctx
;
7312 hash_map
<tree
, tree
> *struct_map_to_clause
= NULL
;
7313 tree
*prev_list_p
= NULL
;
7315 ctx
= new_omp_context (region_type
);
7316 outer_ctx
= ctx
->outer_context
;
7317 if (code
== OMP_TARGET
)
7319 if (!lang_GNU_Fortran ())
7320 ctx
->target_map_pointers_as_0len_arrays
= true;
7321 ctx
->target_map_scalars_firstprivate
= true;
7323 if (!lang_GNU_Fortran ())
7327 case OMP_TARGET_DATA
:
7328 case OMP_TARGET_ENTER_DATA
:
7329 case OMP_TARGET_EXIT_DATA
:
7330 case OACC_HOST_DATA
:
7331 ctx
->target_firstprivatize_array_bases
= true;
7336 while ((c
= *list_p
) != NULL
)
7338 bool remove
= false;
7339 bool notice_outer
= true;
7340 const char *check_non_private
= NULL
;
7344 switch (OMP_CLAUSE_CODE (c
))
7346 case OMP_CLAUSE_PRIVATE
:
7347 flags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
7348 if (lang_hooks
.decls
.omp_private_outer_ref (OMP_CLAUSE_DECL (c
)))
7350 flags
|= GOVD_PRIVATE_OUTER_REF
;
7351 OMP_CLAUSE_PRIVATE_OUTER_REF (c
) = 1;
7354 notice_outer
= false;
7356 case OMP_CLAUSE_SHARED
:
7357 flags
= GOVD_SHARED
| GOVD_EXPLICIT
;
7359 case OMP_CLAUSE_FIRSTPRIVATE
:
7360 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
7361 check_non_private
= "firstprivate";
7363 case OMP_CLAUSE_LASTPRIVATE
:
7364 flags
= GOVD_LASTPRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
;
7365 check_non_private
= "lastprivate";
7366 decl
= OMP_CLAUSE_DECL (c
);
7367 if (error_operand_p (decl
))
7370 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
7371 || outer_ctx
->region_type
== ORT_COMBINED_TEAMS
)
7372 && splay_tree_lookup (outer_ctx
->variables
,
7373 (splay_tree_key
) decl
) == NULL
)
7375 omp_add_variable (outer_ctx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
7376 if (outer_ctx
->outer_context
)
7377 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
7380 && (outer_ctx
->region_type
& ORT_TASK
) != 0
7381 && outer_ctx
->combined_loop
7382 && splay_tree_lookup (outer_ctx
->variables
,
7383 (splay_tree_key
) decl
) == NULL
)
7385 omp_add_variable (outer_ctx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
7386 if (outer_ctx
->outer_context
)
7387 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
7390 && (outer_ctx
->region_type
== ORT_WORKSHARE
7391 || outer_ctx
->region_type
== ORT_ACC
)
7392 && outer_ctx
->combined_loop
7393 && splay_tree_lookup (outer_ctx
->variables
,
7394 (splay_tree_key
) decl
) == NULL
7395 && !omp_check_private (outer_ctx
, decl
, false))
7397 omp_add_variable (outer_ctx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
7398 if (outer_ctx
->outer_context
7399 && (outer_ctx
->outer_context
->region_type
7400 == ORT_COMBINED_PARALLEL
)
7401 && splay_tree_lookup (outer_ctx
->outer_context
->variables
,
7402 (splay_tree_key
) decl
) == NULL
)
7404 struct gimplify_omp_ctx
*octx
= outer_ctx
->outer_context
;
7405 omp_add_variable (octx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
7406 if (octx
->outer_context
)
7408 octx
= octx
->outer_context
;
7409 if (octx
->region_type
== ORT_WORKSHARE
7410 && octx
->combined_loop
7411 && splay_tree_lookup (octx
->variables
,
7412 (splay_tree_key
) decl
) == NULL
7413 && !omp_check_private (octx
, decl
, false))
7415 omp_add_variable (octx
, decl
,
7416 GOVD_LASTPRIVATE
| GOVD_SEEN
);
7417 octx
= octx
->outer_context
;
7419 && octx
->region_type
== ORT_COMBINED_TEAMS
7420 && (splay_tree_lookup (octx
->variables
,
7421 (splay_tree_key
) decl
)
7424 omp_add_variable (octx
, decl
,
7425 GOVD_SHARED
| GOVD_SEEN
);
7426 octx
= octx
->outer_context
;
7430 omp_notice_variable (octx
, decl
, true);
7433 else if (outer_ctx
->outer_context
)
7434 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
7437 case OMP_CLAUSE_REDUCTION
:
7438 flags
= GOVD_REDUCTION
| GOVD_SEEN
| GOVD_EXPLICIT
;
7439 /* OpenACC permits reductions on private variables. */
7440 if (!(region_type
& ORT_ACC
))
7441 check_non_private
= "reduction";
7442 decl
= OMP_CLAUSE_DECL (c
);
7443 if (TREE_CODE (decl
) == MEM_REF
)
7445 tree type
= TREE_TYPE (decl
);
7446 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type
)), pre_p
,
7447 NULL
, is_gimple_val
, fb_rvalue
, false)
7453 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7456 omp_firstprivatize_variable (ctx
, v
);
7457 omp_notice_variable (ctx
, v
, true);
7459 decl
= TREE_OPERAND (decl
, 0);
7460 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
7462 if (gimplify_expr (&TREE_OPERAND (decl
, 1), pre_p
,
7463 NULL
, is_gimple_val
, fb_rvalue
, false)
7469 v
= TREE_OPERAND (decl
, 1);
7472 omp_firstprivatize_variable (ctx
, v
);
7473 omp_notice_variable (ctx
, v
, true);
7475 decl
= TREE_OPERAND (decl
, 0);
7477 if (TREE_CODE (decl
) == ADDR_EXPR
7478 || TREE_CODE (decl
) == INDIRECT_REF
)
7479 decl
= TREE_OPERAND (decl
, 0);
7482 case OMP_CLAUSE_LINEAR
:
7483 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
), pre_p
, NULL
,
7484 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
7491 if (code
== OMP_SIMD
7492 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
7494 struct gimplify_omp_ctx
*octx
= outer_ctx
;
7496 && octx
->region_type
== ORT_WORKSHARE
7497 && octx
->combined_loop
7498 && !octx
->distribute
)
7500 if (octx
->outer_context
7501 && (octx
->outer_context
->region_type
7502 == ORT_COMBINED_PARALLEL
))
7503 octx
= octx
->outer_context
->outer_context
;
7505 octx
= octx
->outer_context
;
7508 && octx
->region_type
== ORT_WORKSHARE
7509 && octx
->combined_loop
7510 && octx
->distribute
)
7512 error_at (OMP_CLAUSE_LOCATION (c
),
7513 "%<linear%> clause for variable other than "
7514 "loop iterator specified on construct "
7515 "combined with %<distribute%>");
7520 /* For combined #pragma omp parallel for simd, need to put
7521 lastprivate and perhaps firstprivate too on the
7522 parallel. Similarly for #pragma omp for simd. */
7523 struct gimplify_omp_ctx
*octx
= outer_ctx
;
7527 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
7528 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
7530 decl
= OMP_CLAUSE_DECL (c
);
7531 if (error_operand_p (decl
))
7537 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
7538 flags
|= GOVD_FIRSTPRIVATE
;
7539 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
7540 flags
|= GOVD_LASTPRIVATE
;
7542 && octx
->region_type
== ORT_WORKSHARE
7543 && octx
->combined_loop
)
7545 if (octx
->outer_context
7546 && (octx
->outer_context
->region_type
7547 == ORT_COMBINED_PARALLEL
))
7548 octx
= octx
->outer_context
;
7549 else if (omp_check_private (octx
, decl
, false))
7553 && (octx
->region_type
& ORT_TASK
) != 0
7554 && octx
->combined_loop
)
7557 && octx
->region_type
== ORT_COMBINED_PARALLEL
7558 && ctx
->region_type
== ORT_WORKSHARE
7559 && octx
== outer_ctx
)
7560 flags
= GOVD_SEEN
| GOVD_SHARED
;
7562 && octx
->region_type
== ORT_COMBINED_TEAMS
)
7563 flags
= GOVD_SEEN
| GOVD_SHARED
;
7565 && octx
->region_type
== ORT_COMBINED_TARGET
)
7567 flags
&= ~GOVD_LASTPRIVATE
;
7568 if (flags
== GOVD_SEEN
)
7574 = splay_tree_lookup (octx
->variables
,
7575 (splay_tree_key
) decl
);
7576 if (on
&& (on
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7581 omp_add_variable (octx
, decl
, flags
);
7582 if (octx
->outer_context
== NULL
)
7584 octx
= octx
->outer_context
;
7589 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
7590 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
7591 omp_notice_variable (octx
, decl
, true);
7593 flags
= GOVD_LINEAR
| GOVD_EXPLICIT
;
7594 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
7595 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
7597 notice_outer
= false;
7598 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
7602 case OMP_CLAUSE_MAP
:
7603 decl
= OMP_CLAUSE_DECL (c
);
7604 if (error_operand_p (decl
))
7611 if (TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
)
7614 case OMP_TARGET_DATA
:
7615 case OMP_TARGET_ENTER_DATA
:
7616 case OMP_TARGET_EXIT_DATA
:
7617 case OACC_ENTER_DATA
:
7618 case OACC_EXIT_DATA
:
7619 case OACC_HOST_DATA
:
7620 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
7621 || (OMP_CLAUSE_MAP_KIND (c
)
7622 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
7623 /* For target {,enter ,exit }data only the array slice is
7624 mapped, but not the pointer to it. */
7632 if (DECL_P (decl
) && outer_ctx
&& (region_type
& ORT_ACC
))
7634 struct gimplify_omp_ctx
*octx
;
7635 for (octx
= outer_ctx
; octx
; octx
= octx
->outer_context
)
7637 if (octx
->region_type
!= ORT_ACC_HOST_DATA
)
7640 = splay_tree_lookup (octx
->variables
,
7641 (splay_tree_key
) decl
);
7643 error_at (OMP_CLAUSE_LOCATION (c
), "variable %qE "
7644 "declared in enclosing %<host_data%> region",
7648 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
7649 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
7650 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
7651 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
7652 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
7657 else if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
7658 || (OMP_CLAUSE_MAP_KIND (c
)
7659 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
7660 && TREE_CODE (OMP_CLAUSE_SIZE (c
)) != INTEGER_CST
)
7663 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c
), pre_p
, NULL
,
7665 omp_add_variable (ctx
, OMP_CLAUSE_SIZE (c
),
7666 GOVD_FIRSTPRIVATE
| GOVD_SEEN
);
7671 if (TREE_CODE (d
) == ARRAY_REF
)
7673 while (TREE_CODE (d
) == ARRAY_REF
)
7674 d
= TREE_OPERAND (d
, 0);
7675 if (TREE_CODE (d
) == COMPONENT_REF
7676 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
7679 pd
= &OMP_CLAUSE_DECL (c
);
7681 && TREE_CODE (decl
) == INDIRECT_REF
7682 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
7683 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
7686 pd
= &TREE_OPERAND (decl
, 0);
7687 decl
= TREE_OPERAND (decl
, 0);
7689 if (TREE_CODE (decl
) == COMPONENT_REF
)
7691 while (TREE_CODE (decl
) == COMPONENT_REF
)
7692 decl
= TREE_OPERAND (decl
, 0);
7693 if (TREE_CODE (decl
) == INDIRECT_REF
7694 && DECL_P (TREE_OPERAND (decl
, 0))
7695 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
7697 decl
= TREE_OPERAND (decl
, 0);
7699 if (gimplify_expr (pd
, pre_p
, NULL
, is_gimple_lvalue
, fb_lvalue
)
7707 if (error_operand_p (decl
))
7713 tree stype
= TREE_TYPE (decl
);
7714 if (TREE_CODE (stype
) == REFERENCE_TYPE
)
7715 stype
= TREE_TYPE (stype
);
7716 if (TYPE_SIZE_UNIT (stype
) == NULL
7717 || TREE_CODE (TYPE_SIZE_UNIT (stype
)) != INTEGER_CST
)
7719 error_at (OMP_CLAUSE_LOCATION (c
),
7720 "mapping field %qE of variable length "
7721 "structure", OMP_CLAUSE_DECL (c
));
7726 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
7728 /* Error recovery. */
7729 if (prev_list_p
== NULL
)
7734 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
7736 tree ch
= OMP_CLAUSE_CHAIN (*prev_list_p
);
7737 if (ch
== NULL_TREE
|| OMP_CLAUSE_CHAIN (ch
) != c
)
7746 HOST_WIDE_INT bitsize
, bitpos
;
7748 int unsignedp
, reversep
, volatilep
= 0;
7749 tree base
= OMP_CLAUSE_DECL (c
);
7750 while (TREE_CODE (base
) == ARRAY_REF
)
7751 base
= TREE_OPERAND (base
, 0);
7752 if (TREE_CODE (base
) == INDIRECT_REF
)
7753 base
= TREE_OPERAND (base
, 0);
7754 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7755 &mode
, &unsignedp
, &reversep
,
7757 tree orig_base
= base
;
7758 if ((TREE_CODE (base
) == INDIRECT_REF
7759 || (TREE_CODE (base
) == MEM_REF
7760 && integer_zerop (TREE_OPERAND (base
, 1))))
7761 && DECL_P (TREE_OPERAND (base
, 0))
7762 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0)))
7764 base
= TREE_OPERAND (base
, 0);
7765 gcc_assert (base
== decl
7766 && (offset
== NULL_TREE
7767 || TREE_CODE (offset
) == INTEGER_CST
));
7770 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7771 bool ptr
= (OMP_CLAUSE_MAP_KIND (c
)
7772 == GOMP_MAP_ALWAYS_POINTER
);
7773 if (n
== NULL
|| (n
->value
& GOVD_MAP
) == 0)
7775 tree l
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
7777 OMP_CLAUSE_SET_MAP_KIND (l
, GOMP_MAP_STRUCT
);
7778 if (orig_base
!= base
)
7779 OMP_CLAUSE_DECL (l
) = unshare_expr (orig_base
);
7781 OMP_CLAUSE_DECL (l
) = decl
;
7782 OMP_CLAUSE_SIZE (l
) = size_int (1);
7783 if (struct_map_to_clause
== NULL
)
7784 struct_map_to_clause
= new hash_map
<tree
, tree
>;
7785 struct_map_to_clause
->put (decl
, l
);
7788 enum gomp_map_kind mkind
7789 = code
== OMP_TARGET_EXIT_DATA
7790 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
7791 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
7793 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
7794 OMP_CLAUSE_DECL (c2
)
7795 = unshare_expr (OMP_CLAUSE_DECL (c
));
7796 OMP_CLAUSE_CHAIN (c2
) = *prev_list_p
;
7797 OMP_CLAUSE_SIZE (c2
)
7798 = TYPE_SIZE_UNIT (ptr_type_node
);
7799 OMP_CLAUSE_CHAIN (l
) = c2
;
7800 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
7802 tree c4
= OMP_CLAUSE_CHAIN (*prev_list_p
);
7804 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
7806 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
7807 OMP_CLAUSE_DECL (c3
)
7808 = unshare_expr (OMP_CLAUSE_DECL (c4
));
7809 OMP_CLAUSE_SIZE (c3
)
7810 = TYPE_SIZE_UNIT (ptr_type_node
);
7811 OMP_CLAUSE_CHAIN (c3
) = *prev_list_p
;
7812 OMP_CLAUSE_CHAIN (c2
) = c3
;
7819 OMP_CLAUSE_CHAIN (l
) = c
;
7821 list_p
= &OMP_CLAUSE_CHAIN (l
);
7823 if (orig_base
!= base
&& code
== OMP_TARGET
)
7825 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
7827 enum gomp_map_kind mkind
7828 = GOMP_MAP_FIRSTPRIVATE_REFERENCE
;
7829 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
7830 OMP_CLAUSE_DECL (c2
) = decl
;
7831 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
7832 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (l
);
7833 OMP_CLAUSE_CHAIN (l
) = c2
;
7835 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
7836 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) || ptr
)
7842 tree
*osc
= struct_map_to_clause
->get (decl
);
7843 tree
*sc
= NULL
, *scp
= NULL
;
7844 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) || ptr
)
7845 n
->value
|= GOVD_SEEN
;
7848 o1
= wi::to_offset (offset
);
7852 o1
= o1
+ bitpos
/ BITS_PER_UNIT
;
7853 sc
= &OMP_CLAUSE_CHAIN (*osc
);
7855 && (OMP_CLAUSE_MAP_KIND (*sc
)
7856 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
7857 sc
= &OMP_CLAUSE_CHAIN (*sc
);
7858 for (; *sc
!= c
; sc
= &OMP_CLAUSE_CHAIN (*sc
))
7859 if (ptr
&& sc
== prev_list_p
)
7861 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
7863 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
7865 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
7871 HOST_WIDE_INT bitsize2
, bitpos2
;
7872 base
= OMP_CLAUSE_DECL (*sc
);
7873 if (TREE_CODE (base
) == ARRAY_REF
)
7875 while (TREE_CODE (base
) == ARRAY_REF
)
7876 base
= TREE_OPERAND (base
, 0);
7877 if (TREE_CODE (base
) != COMPONENT_REF
7878 || (TREE_CODE (TREE_TYPE (base
))
7882 else if (TREE_CODE (base
) == INDIRECT_REF
7883 && (TREE_CODE (TREE_OPERAND (base
, 0))
7885 && (TREE_CODE (TREE_TYPE
7886 (TREE_OPERAND (base
, 0)))
7888 base
= TREE_OPERAND (base
, 0);
7889 base
= get_inner_reference (base
, &bitsize2
,
7892 &reversep
, &volatilep
);
7893 if ((TREE_CODE (base
) == INDIRECT_REF
7894 || (TREE_CODE (base
) == MEM_REF
7895 && integer_zerop (TREE_OPERAND (base
,
7897 && DECL_P (TREE_OPERAND (base
, 0))
7898 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base
,
7901 base
= TREE_OPERAND (base
, 0);
7906 gcc_assert (offset
== NULL_TREE
7907 || TREE_CODE (offset
) == INTEGER_CST
);
7908 tree d1
= OMP_CLAUSE_DECL (*sc
);
7909 tree d2
= OMP_CLAUSE_DECL (c
);
7910 while (TREE_CODE (d1
) == ARRAY_REF
)
7911 d1
= TREE_OPERAND (d1
, 0);
7912 while (TREE_CODE (d2
) == ARRAY_REF
)
7913 d2
= TREE_OPERAND (d2
, 0);
7914 if (TREE_CODE (d1
) == INDIRECT_REF
)
7915 d1
= TREE_OPERAND (d1
, 0);
7916 if (TREE_CODE (d2
) == INDIRECT_REF
)
7917 d2
= TREE_OPERAND (d2
, 0);
7918 while (TREE_CODE (d1
) == COMPONENT_REF
)
7919 if (TREE_CODE (d2
) == COMPONENT_REF
7920 && TREE_OPERAND (d1
, 1)
7921 == TREE_OPERAND (d2
, 1))
7923 d1
= TREE_OPERAND (d1
, 0);
7924 d2
= TREE_OPERAND (d2
, 0);
7930 error_at (OMP_CLAUSE_LOCATION (c
),
7931 "%qE appears more than once in map "
7932 "clauses", OMP_CLAUSE_DECL (c
));
7937 o2
= wi::to_offset (offset2
);
7941 o2
= o2
+ bitpos2
/ BITS_PER_UNIT
;
7942 if (wi::ltu_p (o1
, o2
)
7943 || (wi::eq_p (o1
, o2
) && bitpos
< bitpos2
))
7953 OMP_CLAUSE_SIZE (*osc
)
7954 = size_binop (PLUS_EXPR
, OMP_CLAUSE_SIZE (*osc
),
7958 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
7960 tree cl
= NULL_TREE
;
7961 enum gomp_map_kind mkind
7962 = code
== OMP_TARGET_EXIT_DATA
7963 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
7964 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
7965 OMP_CLAUSE_DECL (c2
)
7966 = unshare_expr (OMP_CLAUSE_DECL (c
));
7967 OMP_CLAUSE_CHAIN (c2
) = scp
? *scp
: *prev_list_p
;
7968 OMP_CLAUSE_SIZE (c2
)
7969 = TYPE_SIZE_UNIT (ptr_type_node
);
7970 cl
= scp
? *prev_list_p
: c2
;
7971 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
7973 tree c4
= OMP_CLAUSE_CHAIN (*prev_list_p
);
7975 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
7977 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
7978 OMP_CLAUSE_DECL (c3
)
7979 = unshare_expr (OMP_CLAUSE_DECL (c4
));
7980 OMP_CLAUSE_SIZE (c3
)
7981 = TYPE_SIZE_UNIT (ptr_type_node
);
7982 OMP_CLAUSE_CHAIN (c3
) = *prev_list_p
;
7984 OMP_CLAUSE_CHAIN (c2
) = c3
;
7990 if (sc
== prev_list_p
)
7997 *prev_list_p
= OMP_CLAUSE_CHAIN (c
);
7998 list_p
= prev_list_p
;
8000 OMP_CLAUSE_CHAIN (c
) = *sc
;
8007 *list_p
= OMP_CLAUSE_CHAIN (c
);
8008 OMP_CLAUSE_CHAIN (c
) = *sc
;
8015 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_POINTER
8016 && OMP_CLAUSE_CHAIN (c
)
8017 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
)) == OMP_CLAUSE_MAP
8018 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
8019 == GOMP_MAP_ALWAYS_POINTER
))
8020 prev_list_p
= list_p
;
8023 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
8024 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TO
8025 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TOFROM
)
8026 flags
|= GOVD_MAP_ALWAYS_TO
;
8029 case OMP_CLAUSE_DEPEND
:
8030 if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
8032 tree deps
= OMP_CLAUSE_DECL (c
);
8033 while (deps
&& TREE_CODE (deps
) == TREE_LIST
)
8035 if (TREE_CODE (TREE_PURPOSE (deps
)) == TRUNC_DIV_EXPR
8036 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps
), 1)))
8037 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps
), 1),
8038 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
8039 deps
= TREE_CHAIN (deps
);
8043 else if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
8045 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8047 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8048 NULL
, is_gimple_val
, fb_rvalue
);
8049 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8051 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8056 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
8057 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8058 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8066 case OMP_CLAUSE_FROM
:
8067 case OMP_CLAUSE__CACHE_
:
8068 decl
= OMP_CLAUSE_DECL (c
);
8069 if (error_operand_p (decl
))
8074 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
8075 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
8076 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
8077 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
8078 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8085 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
,
8086 NULL
, is_gimple_lvalue
, fb_lvalue
)
8096 case OMP_CLAUSE_USE_DEVICE_PTR
:
8097 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
8099 case OMP_CLAUSE_IS_DEVICE_PTR
:
8100 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
8104 decl
= OMP_CLAUSE_DECL (c
);
8106 if (error_operand_p (decl
))
8111 if (DECL_NAME (decl
) == NULL_TREE
&& (flags
& GOVD_SHARED
) == 0)
8113 tree t
= omp_member_access_dummy_var (decl
);
8116 tree v
= DECL_VALUE_EXPR (decl
);
8117 DECL_NAME (decl
) = DECL_NAME (TREE_OPERAND (v
, 1));
8119 omp_notice_variable (outer_ctx
, t
, true);
8122 if (code
== OACC_DATA
8123 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
8124 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
8125 flags
|= GOVD_MAP_0LEN_ARRAY
;
8126 omp_add_variable (ctx
, decl
, flags
);
8127 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
8128 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8130 omp_add_variable (ctx
, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
8131 GOVD_LOCAL
| GOVD_SEEN
);
8132 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
8133 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c
),
8135 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
8137 omp_add_variable (ctx
,
8138 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
8139 GOVD_LOCAL
| GOVD_SEEN
);
8140 gimplify_omp_ctxp
= ctx
;
8141 push_gimplify_context ();
8143 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
8144 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8146 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c
),
8147 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
));
8148 pop_gimplify_context
8149 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
)));
8150 push_gimplify_context ();
8151 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c
),
8152 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8153 pop_gimplify_context
8154 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
)));
8155 OMP_CLAUSE_REDUCTION_INIT (c
) = NULL_TREE
;
8156 OMP_CLAUSE_REDUCTION_MERGE (c
) = NULL_TREE
;
8158 gimplify_omp_ctxp
= outer_ctx
;
8160 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
8161 && OMP_CLAUSE_LASTPRIVATE_STMT (c
))
8163 gimplify_omp_ctxp
= ctx
;
8164 push_gimplify_context ();
8165 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c
)) != BIND_EXPR
)
8167 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
8169 TREE_SIDE_EFFECTS (bind
) = 1;
8170 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LASTPRIVATE_STMT (c
);
8171 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = bind
;
8173 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c
),
8174 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
8175 pop_gimplify_context
8176 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
)));
8177 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = NULL_TREE
;
8179 gimplify_omp_ctxp
= outer_ctx
;
8181 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
8182 && OMP_CLAUSE_LINEAR_STMT (c
))
8184 gimplify_omp_ctxp
= ctx
;
8185 push_gimplify_context ();
8186 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c
)) != BIND_EXPR
)
8188 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
8190 TREE_SIDE_EFFECTS (bind
) = 1;
8191 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LINEAR_STMT (c
);
8192 OMP_CLAUSE_LINEAR_STMT (c
) = bind
;
8194 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c
),
8195 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
8196 pop_gimplify_context
8197 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
)));
8198 OMP_CLAUSE_LINEAR_STMT (c
) = NULL_TREE
;
8200 gimplify_omp_ctxp
= outer_ctx
;
8206 case OMP_CLAUSE_COPYIN
:
8207 case OMP_CLAUSE_COPYPRIVATE
:
8208 decl
= OMP_CLAUSE_DECL (c
);
8209 if (error_operand_p (decl
))
8214 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_COPYPRIVATE
8216 && !omp_check_private (ctx
, decl
, true))
8219 if (is_global_var (decl
))
8221 if (DECL_THREAD_LOCAL_P (decl
))
8223 else if (DECL_HAS_VALUE_EXPR_P (decl
))
8225 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
8229 && DECL_THREAD_LOCAL_P (value
))
8234 error_at (OMP_CLAUSE_LOCATION (c
),
8235 "copyprivate variable %qE is not threadprivate"
8236 " or private in outer context", DECL_NAME (decl
));
8240 omp_notice_variable (outer_ctx
, decl
, true);
8241 if (check_non_private
8242 && region_type
== ORT_WORKSHARE
8243 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
8244 || decl
== OMP_CLAUSE_DECL (c
)
8245 || (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
8246 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
8248 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
8249 == POINTER_PLUS_EXPR
8250 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8251 (OMP_CLAUSE_DECL (c
), 0), 0))
8253 && omp_check_private (ctx
, decl
, false))
8255 error ("%s variable %qE is private in outer context",
8256 check_non_private
, DECL_NAME (decl
));
8262 if (OMP_CLAUSE_IF_MODIFIER (c
) != ERROR_MARK
8263 && OMP_CLAUSE_IF_MODIFIER (c
) != code
)
8266 for (int i
= 0; i
< 2; i
++)
8267 switch (i
? OMP_CLAUSE_IF_MODIFIER (c
) : code
)
8269 case OMP_PARALLEL
: p
[i
] = "parallel"; break;
8270 case OMP_TASK
: p
[i
] = "task"; break;
8271 case OMP_TASKLOOP
: p
[i
] = "taskloop"; break;
8272 case OMP_TARGET_DATA
: p
[i
] = "target data"; break;
8273 case OMP_TARGET
: p
[i
] = "target"; break;
8274 case OMP_TARGET_UPDATE
: p
[i
] = "target update"; break;
8275 case OMP_TARGET_ENTER_DATA
:
8276 p
[i
] = "target enter data"; break;
8277 case OMP_TARGET_EXIT_DATA
: p
[i
] = "target exit data"; break;
8278 default: gcc_unreachable ();
8280 error_at (OMP_CLAUSE_LOCATION (c
),
8281 "expected %qs %<if%> clause modifier rather than %qs",
8287 case OMP_CLAUSE_FINAL
:
8288 OMP_CLAUSE_OPERAND (c
, 0)
8289 = gimple_boolify (OMP_CLAUSE_OPERAND (c
, 0));
8292 case OMP_CLAUSE_SCHEDULE
:
8293 case OMP_CLAUSE_NUM_THREADS
:
8294 case OMP_CLAUSE_NUM_TEAMS
:
8295 case OMP_CLAUSE_THREAD_LIMIT
:
8296 case OMP_CLAUSE_DIST_SCHEDULE
:
8297 case OMP_CLAUSE_DEVICE
:
8298 case OMP_CLAUSE_PRIORITY
:
8299 case OMP_CLAUSE_GRAINSIZE
:
8300 case OMP_CLAUSE_NUM_TASKS
:
8301 case OMP_CLAUSE_HINT
:
8302 case OMP_CLAUSE__CILK_FOR_COUNT_
:
8303 case OMP_CLAUSE_ASYNC
:
8304 case OMP_CLAUSE_WAIT
:
8305 case OMP_CLAUSE_NUM_GANGS
:
8306 case OMP_CLAUSE_NUM_WORKERS
:
8307 case OMP_CLAUSE_VECTOR_LENGTH
:
8308 case OMP_CLAUSE_WORKER
:
8309 case OMP_CLAUSE_VECTOR
:
8310 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
8311 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8315 case OMP_CLAUSE_GANG
:
8316 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
8317 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8319 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 1), pre_p
, NULL
,
8320 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8324 case OMP_CLAUSE_TILE
:
8325 for (tree list
= OMP_CLAUSE_TILE_LIST (c
); !remove
&& list
;
8326 list
= TREE_CHAIN (list
))
8328 if (gimplify_expr (&TREE_VALUE (list
), pre_p
, NULL
,
8329 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8334 case OMP_CLAUSE_NOWAIT
:
8335 case OMP_CLAUSE_ORDERED
:
8336 case OMP_CLAUSE_UNTIED
:
8337 case OMP_CLAUSE_COLLAPSE
:
8338 case OMP_CLAUSE_AUTO
:
8339 case OMP_CLAUSE_SEQ
:
8340 case OMP_CLAUSE_INDEPENDENT
:
8341 case OMP_CLAUSE_MERGEABLE
:
8342 case OMP_CLAUSE_PROC_BIND
:
8343 case OMP_CLAUSE_SAFELEN
:
8344 case OMP_CLAUSE_SIMDLEN
:
8345 case OMP_CLAUSE_NOGROUP
:
8346 case OMP_CLAUSE_THREADS
:
8347 case OMP_CLAUSE_SIMD
:
8350 case OMP_CLAUSE_DEFAULTMAP
:
8351 ctx
->target_map_scalars_firstprivate
= false;
8354 case OMP_CLAUSE_ALIGNED
:
8355 decl
= OMP_CLAUSE_DECL (c
);
8356 if (error_operand_p (decl
))
8361 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c
), pre_p
, NULL
,
8362 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8367 if (!is_global_var (decl
)
8368 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
8369 omp_add_variable (ctx
, decl
, GOVD_ALIGNED
);
8372 case OMP_CLAUSE_DEFAULT
:
8373 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
8380 if (code
== OACC_DATA
8381 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
8382 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
8385 *list_p
= OMP_CLAUSE_CHAIN (c
);
8387 list_p
= &OMP_CLAUSE_CHAIN (c
);
8390 gimplify_omp_ctxp
= ctx
;
8391 if (struct_map_to_clause
)
8392 delete struct_map_to_clause
;
8395 /* Return true if DECL is a candidate for shared to firstprivate
8396 optimization. We only consider non-addressable scalars, not
8397 too big, and not references. */
8400 omp_shared_to_firstprivate_optimizable_decl_p (tree decl
)
8402 if (TREE_ADDRESSABLE (decl
))
8404 tree type
= TREE_TYPE (decl
);
8405 if (!is_gimple_reg_type (type
)
8406 || TREE_CODE (type
) == REFERENCE_TYPE
8407 || TREE_ADDRESSABLE (type
))
8409 /* Don't optimize too large decls, as each thread/task will have
8411 HOST_WIDE_INT len
= int_size_in_bytes (type
);
8412 if (len
== -1 || len
> 4 * POINTER_SIZE
/ BITS_PER_UNIT
)
8414 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
8419 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8420 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8421 GOVD_WRITTEN in outer contexts. */
8424 omp_mark_stores (struct gimplify_omp_ctx
*ctx
, tree decl
)
8426 for (; ctx
; ctx
= ctx
->outer_context
)
8428 splay_tree_node n
= splay_tree_lookup (ctx
->variables
,
8429 (splay_tree_key
) decl
);
8432 else if (n
->value
& GOVD_SHARED
)
8434 n
->value
|= GOVD_WRITTEN
;
8437 else if (n
->value
& GOVD_DATA_SHARE_CLASS
)
8442 /* Helper callback for walk_gimple_seq to discover possible stores
8443 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8444 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8448 omp_find_stores_op (tree
*tp
, int *walk_subtrees
, void *data
)
8450 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
8459 if (handled_component_p (op
))
8460 op
= TREE_OPERAND (op
, 0);
8461 else if ((TREE_CODE (op
) == MEM_REF
|| TREE_CODE (op
) == TARGET_MEM_REF
)
8462 && TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
8463 op
= TREE_OPERAND (TREE_OPERAND (op
, 0), 0);
8468 if (!DECL_P (op
) || !omp_shared_to_firstprivate_optimizable_decl_p (op
))
8471 omp_mark_stores (gimplify_omp_ctxp
, op
);
8475 /* Helper callback for walk_gimple_seq to discover possible stores
8476 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8477 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8481 omp_find_stores_stmt (gimple_stmt_iterator
*gsi_p
,
8482 bool *handled_ops_p
,
8483 struct walk_stmt_info
*wi
)
8485 gimple
*stmt
= gsi_stmt (*gsi_p
);
8486 switch (gimple_code (stmt
))
8488 /* Don't recurse on OpenMP constructs for which
8489 gimplify_adjust_omp_clauses already handled the bodies,
8490 except handle gimple_omp_for_pre_body. */
8491 case GIMPLE_OMP_FOR
:
8492 *handled_ops_p
= true;
8493 if (gimple_omp_for_pre_body (stmt
))
8494 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
8495 omp_find_stores_stmt
, omp_find_stores_op
, wi
);
8497 case GIMPLE_OMP_PARALLEL
:
8498 case GIMPLE_OMP_TASK
:
8499 case GIMPLE_OMP_SECTIONS
:
8500 case GIMPLE_OMP_SINGLE
:
8501 case GIMPLE_OMP_TARGET
:
8502 case GIMPLE_OMP_TEAMS
:
8503 case GIMPLE_OMP_CRITICAL
:
8504 *handled_ops_p
= true;
8512 struct gimplify_adjust_omp_clauses_data
8518 /* For all variables that were not actually used within the context,
8519 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8522 gimplify_adjust_omp_clauses_1 (splay_tree_node n
, void *data
)
8524 tree
*list_p
= ((struct gimplify_adjust_omp_clauses_data
*) data
)->list_p
;
8526 = ((struct gimplify_adjust_omp_clauses_data
*) data
)->pre_p
;
8527 tree decl
= (tree
) n
->key
;
8528 unsigned flags
= n
->value
;
8529 enum omp_clause_code code
;
8533 if (flags
& (GOVD_EXPLICIT
| GOVD_LOCAL
))
8535 if ((flags
& GOVD_SEEN
) == 0)
8537 if (flags
& GOVD_DEBUG_PRIVATE
)
8539 gcc_assert ((flags
& GOVD_DATA_SHARE_CLASS
) == GOVD_PRIVATE
);
8540 private_debug
= true;
8542 else if (flags
& GOVD_MAP
)
8543 private_debug
= false;
8546 = lang_hooks
.decls
.omp_private_debug_clause (decl
,
8547 !!(flags
& GOVD_SHARED
));
8549 code
= OMP_CLAUSE_PRIVATE
;
8550 else if (flags
& GOVD_MAP
)
8552 code
= OMP_CLAUSE_MAP
;
8553 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
8554 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
8556 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl
);
8560 else if (flags
& GOVD_SHARED
)
8562 if (is_global_var (decl
))
8564 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
8568 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8569 if (on
&& (on
->value
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
8570 | GOVD_PRIVATE
| GOVD_REDUCTION
8571 | GOVD_LINEAR
| GOVD_MAP
)) != 0)
8573 ctx
= ctx
->outer_context
;
8578 code
= OMP_CLAUSE_SHARED
;
8580 else if (flags
& GOVD_PRIVATE
)
8581 code
= OMP_CLAUSE_PRIVATE
;
8582 else if (flags
& GOVD_FIRSTPRIVATE
)
8584 code
= OMP_CLAUSE_FIRSTPRIVATE
;
8585 if ((gimplify_omp_ctxp
->region_type
& ORT_TARGET
)
8586 && (gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
8587 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
8589 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8590 "%<target%> construct", decl
);
8594 else if (flags
& GOVD_LASTPRIVATE
)
8595 code
= OMP_CLAUSE_LASTPRIVATE
;
8596 else if (flags
& GOVD_ALIGNED
)
8601 if (((flags
& GOVD_LASTPRIVATE
)
8602 || (code
== OMP_CLAUSE_SHARED
&& (flags
& GOVD_WRITTEN
)))
8603 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
8604 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
8606 tree chain
= *list_p
;
8607 clause
= build_omp_clause (input_location
, code
);
8608 OMP_CLAUSE_DECL (clause
) = decl
;
8609 OMP_CLAUSE_CHAIN (clause
) = chain
;
8611 OMP_CLAUSE_PRIVATE_DEBUG (clause
) = 1;
8612 else if (code
== OMP_CLAUSE_PRIVATE
&& (flags
& GOVD_PRIVATE_OUTER_REF
))
8613 OMP_CLAUSE_PRIVATE_OUTER_REF (clause
) = 1;
8614 else if (code
== OMP_CLAUSE_SHARED
8615 && (flags
& GOVD_WRITTEN
) == 0
8616 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
8617 OMP_CLAUSE_SHARED_READONLY (clause
) = 1;
8618 else if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_EXPLICIT
) == 0)
8619 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause
) = 1;
8620 else if (code
== OMP_CLAUSE_MAP
&& (flags
& GOVD_MAP_0LEN_ARRAY
) != 0)
8622 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
8623 OMP_CLAUSE_DECL (nc
) = decl
;
8624 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
8625 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
8626 OMP_CLAUSE_DECL (clause
)
8627 = build_simple_mem_ref_loc (input_location
, decl
);
8628 OMP_CLAUSE_DECL (clause
)
8629 = build2 (MEM_REF
, char_type_node
, OMP_CLAUSE_DECL (clause
),
8630 build_int_cst (build_pointer_type (char_type_node
), 0));
8631 OMP_CLAUSE_SIZE (clause
) = size_zero_node
;
8632 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
8633 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_ALLOC
);
8634 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause
) = 1;
8635 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
8636 OMP_CLAUSE_CHAIN (nc
) = chain
;
8637 OMP_CLAUSE_CHAIN (clause
) = nc
;
8638 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8639 gimplify_omp_ctxp
= ctx
->outer_context
;
8640 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0),
8641 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
8642 gimplify_omp_ctxp
= ctx
;
8644 else if (code
== OMP_CLAUSE_MAP
)
8646 int kind
= (flags
& GOVD_MAP_TO_ONLY
8649 if (flags
& GOVD_MAP_FORCE
)
8650 kind
|= GOMP_MAP_FLAG_FORCE
;
8651 OMP_CLAUSE_SET_MAP_KIND (clause
, kind
);
8652 if (DECL_SIZE (decl
)
8653 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
8655 tree decl2
= DECL_VALUE_EXPR (decl
);
8656 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
8657 decl2
= TREE_OPERAND (decl2
, 0);
8658 gcc_assert (DECL_P (decl2
));
8659 tree mem
= build_simple_mem_ref (decl2
);
8660 OMP_CLAUSE_DECL (clause
) = mem
;
8661 OMP_CLAUSE_SIZE (clause
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
8662 if (gimplify_omp_ctxp
->outer_context
)
8664 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
8665 omp_notice_variable (ctx
, decl2
, true);
8666 omp_notice_variable (ctx
, OMP_CLAUSE_SIZE (clause
), true);
8668 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
8670 OMP_CLAUSE_DECL (nc
) = decl
;
8671 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
8672 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
)
8673 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
8675 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
8676 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
8677 OMP_CLAUSE_CHAIN (clause
) = nc
;
8679 else if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
8680 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
8682 OMP_CLAUSE_DECL (clause
) = build_simple_mem_ref (decl
);
8683 OMP_CLAUSE_SIZE (clause
)
8684 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))));
8685 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8686 gimplify_omp_ctxp
= ctx
->outer_context
;
8687 gimplify_expr (&OMP_CLAUSE_SIZE (clause
),
8688 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
8689 gimplify_omp_ctxp
= ctx
;
8690 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
8692 OMP_CLAUSE_DECL (nc
) = decl
;
8693 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
8694 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_REFERENCE
);
8695 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
8696 OMP_CLAUSE_CHAIN (clause
) = nc
;
8699 OMP_CLAUSE_SIZE (clause
) = DECL_SIZE_UNIT (decl
);
8701 if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_LASTPRIVATE
) != 0)
8703 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
8704 OMP_CLAUSE_DECL (nc
) = decl
;
8705 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc
) = 1;
8706 OMP_CLAUSE_CHAIN (nc
) = chain
;
8707 OMP_CLAUSE_CHAIN (clause
) = nc
;
8708 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8709 gimplify_omp_ctxp
= ctx
->outer_context
;
8710 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
);
8711 gimplify_omp_ctxp
= ctx
;
8714 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8715 gimplify_omp_ctxp
= ctx
->outer_context
;
8716 lang_hooks
.decls
.omp_finish_clause (clause
, pre_p
);
8717 if (gimplify_omp_ctxp
)
8718 for (; clause
!= chain
; clause
= OMP_CLAUSE_CHAIN (clause
))
8719 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
8720 && DECL_P (OMP_CLAUSE_SIZE (clause
)))
8721 omp_notice_variable (gimplify_omp_ctxp
, OMP_CLAUSE_SIZE (clause
),
8723 gimplify_omp_ctxp
= ctx
;
8728 gimplify_adjust_omp_clauses (gimple_seq
*pre_p
, gimple_seq body
, tree
*list_p
,
8729 enum tree_code code
)
8731 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8736 struct gimplify_omp_ctx
*octx
;
8737 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
8738 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TASK
| ORT_TEAMS
)) != 0)
8742 struct walk_stmt_info wi
;
8743 memset (&wi
, 0, sizeof (wi
));
8744 walk_gimple_seq (body
, omp_find_stores_stmt
,
8745 omp_find_stores_op
, &wi
);
8748 while ((c
= *list_p
) != NULL
)
8751 bool remove
= false;
8753 switch (OMP_CLAUSE_CODE (c
))
8755 case OMP_CLAUSE_FIRSTPRIVATE
:
8756 if ((ctx
->region_type
& ORT_TARGET
)
8757 && (ctx
->region_type
& ORT_ACC
) == 0
8758 && TYPE_ATOMIC (strip_array_types
8759 (TREE_TYPE (OMP_CLAUSE_DECL (c
)))))
8761 error_at (OMP_CLAUSE_LOCATION (c
),
8762 "%<_Atomic%> %qD in %<firstprivate%> clause on "
8763 "%<target%> construct", OMP_CLAUSE_DECL (c
));
8768 case OMP_CLAUSE_PRIVATE
:
8769 case OMP_CLAUSE_SHARED
:
8770 case OMP_CLAUSE_LINEAR
:
8771 decl
= OMP_CLAUSE_DECL (c
);
8772 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8773 remove
= !(n
->value
& GOVD_SEEN
);
8776 bool shared
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
;
8777 if ((n
->value
& GOVD_DEBUG_PRIVATE
)
8778 || lang_hooks
.decls
.omp_private_debug_clause (decl
, shared
))
8780 gcc_assert ((n
->value
& GOVD_DEBUG_PRIVATE
) == 0
8781 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
8783 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_PRIVATE
);
8784 OMP_CLAUSE_PRIVATE_DEBUG (c
) = 1;
8786 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
8787 && (n
->value
& GOVD_WRITTEN
) == 0
8789 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
8790 OMP_CLAUSE_SHARED_READONLY (c
) = 1;
8791 else if (DECL_P (decl
)
8792 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
8793 && (n
->value
& GOVD_WRITTEN
) != 1)
8794 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
8795 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
8796 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
8797 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
8801 case OMP_CLAUSE_LASTPRIVATE
:
8802 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8803 accurately reflect the presence of a FIRSTPRIVATE clause. */
8804 decl
= OMP_CLAUSE_DECL (c
);
8805 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8806 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
8807 = (n
->value
& GOVD_FIRSTPRIVATE
) != 0;
8808 if (code
== OMP_DISTRIBUTE
8809 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
8812 error_at (OMP_CLAUSE_LOCATION (c
),
8813 "same variable used in %<firstprivate%> and "
8814 "%<lastprivate%> clauses on %<distribute%> "
8818 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
8820 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
8821 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
8824 case OMP_CLAUSE_ALIGNED
:
8825 decl
= OMP_CLAUSE_DECL (c
);
8826 if (!is_global_var (decl
))
8828 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8829 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
8830 if (!remove
&& TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
8832 struct gimplify_omp_ctx
*octx
;
8834 && (n
->value
& (GOVD_DATA_SHARE_CLASS
8835 & ~GOVD_FIRSTPRIVATE
)))
8838 for (octx
= ctx
->outer_context
; octx
;
8839 octx
= octx
->outer_context
)
8841 n
= splay_tree_lookup (octx
->variables
,
8842 (splay_tree_key
) decl
);
8845 if (n
->value
& GOVD_LOCAL
)
8847 /* We have to avoid assigning a shared variable
8848 to itself when trying to add
8849 __builtin_assume_aligned. */
8850 if (n
->value
& GOVD_SHARED
)
8858 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
8860 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8861 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
8866 case OMP_CLAUSE_MAP
:
8867 if (code
== OMP_TARGET_EXIT_DATA
8868 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
8873 decl
= OMP_CLAUSE_DECL (c
);
8874 /* Data clauses associated with acc parallel reductions must be
8875 compatible with present_or_copy. Warn and adjust the clause
8876 if that is not the case. */
8877 if (ctx
->region_type
== ORT_ACC_PARALLEL
)
8879 tree t
= DECL_P (decl
) ? decl
: TREE_OPERAND (decl
, 0);
8883 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
8885 if (n
&& (n
->value
& GOVD_REDUCTION
))
8887 enum gomp_map_kind kind
= OMP_CLAUSE_MAP_KIND (c
);
8889 OMP_CLAUSE_MAP_IN_REDUCTION (c
) = 1;
8890 if ((kind
& GOMP_MAP_TOFROM
) != GOMP_MAP_TOFROM
8891 && kind
!= GOMP_MAP_FORCE_PRESENT
8892 && kind
!= GOMP_MAP_POINTER
)
8894 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
8895 "incompatible data clause with reduction "
8896 "on %qE; promoting to present_or_copy",
8898 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
8904 if ((ctx
->region_type
& ORT_TARGET
) != 0
8905 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
8907 if (TREE_CODE (decl
) == INDIRECT_REF
8908 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
8909 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
8911 decl
= TREE_OPERAND (decl
, 0);
8912 if (TREE_CODE (decl
) == COMPONENT_REF
)
8914 while (TREE_CODE (decl
) == COMPONENT_REF
)
8915 decl
= TREE_OPERAND (decl
, 0);
8918 n
= splay_tree_lookup (ctx
->variables
,
8919 (splay_tree_key
) decl
);
8920 if (!(n
->value
& GOVD_SEEN
))
8927 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8928 if ((ctx
->region_type
& ORT_TARGET
) != 0
8929 && !(n
->value
& GOVD_SEEN
)
8930 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) == 0
8931 && !lookup_attribute ("omp declare target link",
8932 DECL_ATTRIBUTES (decl
)))
8935 /* For struct element mapping, if struct is never referenced
8936 in target block and none of the mapping has always modifier,
8937 remove all the struct element mappings, which immediately
8938 follow the GOMP_MAP_STRUCT map clause. */
8939 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
8941 HOST_WIDE_INT cnt
= tree_to_shwi (OMP_CLAUSE_SIZE (c
));
8943 OMP_CLAUSE_CHAIN (c
)
8944 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c
));
8947 else if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
8948 && code
== OMP_TARGET_EXIT_DATA
)
8950 else if (DECL_SIZE (decl
)
8951 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
8952 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_POINTER
8953 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
8954 && (OMP_CLAUSE_MAP_KIND (c
)
8955 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
8957 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
8958 for these, TREE_CODE (DECL_SIZE (decl)) will always be
8960 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DEVICEPTR
);
8962 tree decl2
= DECL_VALUE_EXPR (decl
);
8963 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
8964 decl2
= TREE_OPERAND (decl2
, 0);
8965 gcc_assert (DECL_P (decl2
));
8966 tree mem
= build_simple_mem_ref (decl2
);
8967 OMP_CLAUSE_DECL (c
) = mem
;
8968 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
8969 if (ctx
->outer_context
)
8971 omp_notice_variable (ctx
->outer_context
, decl2
, true);
8972 omp_notice_variable (ctx
->outer_context
,
8973 OMP_CLAUSE_SIZE (c
), true);
8975 if (((ctx
->region_type
& ORT_TARGET
) != 0
8976 || !ctx
->target_firstprivatize_array_bases
)
8977 && ((n
->value
& GOVD_SEEN
) == 0
8978 || (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
)) == 0))
8980 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
8982 OMP_CLAUSE_DECL (nc
) = decl
;
8983 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
8984 if (ctx
->target_firstprivatize_array_bases
)
8985 OMP_CLAUSE_SET_MAP_KIND (nc
,
8986 GOMP_MAP_FIRSTPRIVATE_POINTER
);
8988 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
8989 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (c
);
8990 OMP_CLAUSE_CHAIN (c
) = nc
;
8996 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
8997 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
8998 gcc_assert ((n
->value
& GOVD_SEEN
) == 0
8999 || ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
9005 case OMP_CLAUSE_FROM
:
9006 case OMP_CLAUSE__CACHE_
:
9007 decl
= OMP_CLAUSE_DECL (c
);
9010 if (DECL_SIZE (decl
)
9011 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
9013 tree decl2
= DECL_VALUE_EXPR (decl
);
9014 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
9015 decl2
= TREE_OPERAND (decl2
, 0);
9016 gcc_assert (DECL_P (decl2
));
9017 tree mem
= build_simple_mem_ref (decl2
);
9018 OMP_CLAUSE_DECL (c
) = mem
;
9019 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
9020 if (ctx
->outer_context
)
9022 omp_notice_variable (ctx
->outer_context
, decl2
, true);
9023 omp_notice_variable (ctx
->outer_context
,
9024 OMP_CLAUSE_SIZE (c
), true);
9027 else if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
9028 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
9031 case OMP_CLAUSE_REDUCTION
:
9032 decl
= OMP_CLAUSE_DECL (c
);
9033 /* OpenACC reductions need a present_or_copy data clause.
9034 Add one if necessary. Error is the reduction is private. */
9035 if (ctx
->region_type
== ORT_ACC_PARALLEL
)
9037 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
9038 if (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
9039 error_at (OMP_CLAUSE_LOCATION (c
), "invalid private "
9040 "reduction on %qE", DECL_NAME (decl
));
9041 else if ((n
->value
& GOVD_MAP
) == 0)
9043 tree next
= OMP_CLAUSE_CHAIN (c
);
9044 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_MAP
);
9045 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_TOFROM
);
9046 OMP_CLAUSE_DECL (nc
) = decl
;
9047 OMP_CLAUSE_CHAIN (c
) = nc
;
9048 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
);
9051 OMP_CLAUSE_MAP_IN_REDUCTION (nc
) = 1;
9052 if (OMP_CLAUSE_CHAIN (nc
) == NULL
)
9054 nc
= OMP_CLAUSE_CHAIN (nc
);
9056 OMP_CLAUSE_CHAIN (nc
) = next
;
9057 n
->value
|= GOVD_MAP
;
9061 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
9062 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
9064 case OMP_CLAUSE_COPYIN
:
9065 case OMP_CLAUSE_COPYPRIVATE
:
9067 case OMP_CLAUSE_NUM_THREADS
:
9068 case OMP_CLAUSE_NUM_TEAMS
:
9069 case OMP_CLAUSE_THREAD_LIMIT
:
9070 case OMP_CLAUSE_DIST_SCHEDULE
:
9071 case OMP_CLAUSE_DEVICE
:
9072 case OMP_CLAUSE_SCHEDULE
:
9073 case OMP_CLAUSE_NOWAIT
:
9074 case OMP_CLAUSE_ORDERED
:
9075 case OMP_CLAUSE_DEFAULT
:
9076 case OMP_CLAUSE_UNTIED
:
9077 case OMP_CLAUSE_COLLAPSE
:
9078 case OMP_CLAUSE_FINAL
:
9079 case OMP_CLAUSE_MERGEABLE
:
9080 case OMP_CLAUSE_PROC_BIND
:
9081 case OMP_CLAUSE_SAFELEN
:
9082 case OMP_CLAUSE_SIMDLEN
:
9083 case OMP_CLAUSE_DEPEND
:
9084 case OMP_CLAUSE_PRIORITY
:
9085 case OMP_CLAUSE_GRAINSIZE
:
9086 case OMP_CLAUSE_NUM_TASKS
:
9087 case OMP_CLAUSE_NOGROUP
:
9088 case OMP_CLAUSE_THREADS
:
9089 case OMP_CLAUSE_SIMD
:
9090 case OMP_CLAUSE_HINT
:
9091 case OMP_CLAUSE_DEFAULTMAP
:
9092 case OMP_CLAUSE_USE_DEVICE_PTR
:
9093 case OMP_CLAUSE_IS_DEVICE_PTR
:
9094 case OMP_CLAUSE__CILK_FOR_COUNT_
:
9095 case OMP_CLAUSE_ASYNC
:
9096 case OMP_CLAUSE_WAIT
:
9097 case OMP_CLAUSE_INDEPENDENT
:
9098 case OMP_CLAUSE_NUM_GANGS
:
9099 case OMP_CLAUSE_NUM_WORKERS
:
9100 case OMP_CLAUSE_VECTOR_LENGTH
:
9101 case OMP_CLAUSE_GANG
:
9102 case OMP_CLAUSE_WORKER
:
9103 case OMP_CLAUSE_VECTOR
:
9104 case OMP_CLAUSE_AUTO
:
9105 case OMP_CLAUSE_SEQ
:
9108 case OMP_CLAUSE_TILE
:
9109 /* We're not yet making use of the information provided by OpenACC
9110 tile clauses. Discard these here, to simplify later middle end
9120 *list_p
= OMP_CLAUSE_CHAIN (c
);
9122 list_p
= &OMP_CLAUSE_CHAIN (c
);
9125 /* Add in any implicit data sharing. */
9126 struct gimplify_adjust_omp_clauses_data data
;
9127 data
.list_p
= list_p
;
9129 splay_tree_foreach (ctx
->variables
, gimplify_adjust_omp_clauses_1
, &data
);
9131 gimplify_omp_ctxp
= ctx
->outer_context
;
9132 delete_omp_context (ctx
);
9135 /* Gimplify OACC_CACHE. */
9138 gimplify_oacc_cache (tree
*expr_p
, gimple_seq
*pre_p
)
9140 tree expr
= *expr_p
;
9142 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr
), pre_p
, ORT_ACC
,
9144 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OACC_CACHE_CLAUSES (expr
),
9147 /* TODO: Do something sensible with this information. */
9149 *expr_p
= NULL_TREE
;
9152 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9153 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9154 kind. The entry kind will replace the one in CLAUSE, while the exit
9155 kind will be used in a new omp_clause and returned to the caller. */
9158 gimplify_oacc_declare_1 (tree clause
)
9160 HOST_WIDE_INT kind
, new_op
;
9164 kind
= OMP_CLAUSE_MAP_KIND (clause
);
9168 case GOMP_MAP_ALLOC
:
9169 case GOMP_MAP_FORCE_ALLOC
:
9170 case GOMP_MAP_FORCE_TO
:
9171 new_op
= GOMP_MAP_DELETE
;
9175 case GOMP_MAP_FORCE_FROM
:
9176 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
9177 new_op
= GOMP_MAP_FORCE_FROM
;
9181 case GOMP_MAP_FORCE_TOFROM
:
9182 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_TO
);
9183 new_op
= GOMP_MAP_FORCE_FROM
;
9188 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
9189 new_op
= GOMP_MAP_FROM
;
9193 case GOMP_MAP_TOFROM
:
9194 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_TO
);
9195 new_op
= GOMP_MAP_FROM
;
9199 case GOMP_MAP_DEVICE_RESIDENT
:
9200 case GOMP_MAP_FORCE_DEVICEPTR
:
9201 case GOMP_MAP_FORCE_PRESENT
:
9203 case GOMP_MAP_POINTER
:
9214 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
), OMP_CLAUSE_MAP
);
9215 OMP_CLAUSE_SET_MAP_KIND (c
, new_op
);
9216 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clause
);
9222 /* Gimplify OACC_DECLARE. */
9225 gimplify_oacc_declare (tree
*expr_p
, gimple_seq
*pre_p
)
9227 tree expr
= *expr_p
;
9231 clauses
= OACC_DECLARE_CLAUSES (expr
);
9233 gimplify_scan_omp_clauses (&clauses
, pre_p
, ORT_TARGET_DATA
, OACC_DECLARE
);
9235 for (t
= clauses
; t
; t
= OMP_CLAUSE_CHAIN (t
))
9237 tree decl
= OMP_CLAUSE_DECL (t
);
9239 if (TREE_CODE (decl
) == MEM_REF
)
9243 && !is_global_var (decl
)
9244 && DECL_CONTEXT (decl
) == current_function_decl
)
9246 tree c
= gimplify_oacc_declare_1 (t
);
9249 if (oacc_declare_returns
== NULL
)
9250 oacc_declare_returns
= new hash_map
<tree
, tree
>;
9252 oacc_declare_returns
->put (decl
, c
);
9256 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_SEEN
);
9259 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
9262 gimplify_seq_add_stmt (pre_p
, stmt
);
9264 *expr_p
= NULL_TREE
;
9267 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
9268 gimplification of the body, as well as scanning the body for used
9269 variables. We need to do this scan now, because variable-sized
9270 decls will be decomposed during gimplification. */
9273 gimplify_omp_parallel (tree
*expr_p
, gimple_seq
*pre_p
)
9275 tree expr
= *expr_p
;
9277 gimple_seq body
= NULL
;
9279 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr
), pre_p
,
9280 OMP_PARALLEL_COMBINED (expr
)
9281 ? ORT_COMBINED_PARALLEL
9282 : ORT_PARALLEL
, OMP_PARALLEL
);
9284 push_gimplify_context ();
9286 g
= gimplify_and_return_first (OMP_PARALLEL_BODY (expr
), &body
);
9287 if (gimple_code (g
) == GIMPLE_BIND
)
9288 pop_gimplify_context (g
);
9290 pop_gimplify_context (NULL
);
9292 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_PARALLEL_CLAUSES (expr
),
9295 g
= gimple_build_omp_parallel (body
,
9296 OMP_PARALLEL_CLAUSES (expr
),
9297 NULL_TREE
, NULL_TREE
);
9298 if (OMP_PARALLEL_COMBINED (expr
))
9299 gimple_omp_set_subcode (g
, GF_OMP_PARALLEL_COMBINED
);
9300 gimplify_seq_add_stmt (pre_p
, g
);
9301 *expr_p
= NULL_TREE
;
9304 /* Gimplify the contents of an OMP_TASK statement. This involves
9305 gimplification of the body, as well as scanning the body for used
9306 variables. We need to do this scan now, because variable-sized
9307 decls will be decomposed during gimplification. */
9310 gimplify_omp_task (tree
*expr_p
, gimple_seq
*pre_p
)
9312 tree expr
= *expr_p
;
9314 gimple_seq body
= NULL
;
9316 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr
), pre_p
,
9317 find_omp_clause (OMP_TASK_CLAUSES (expr
),
9319 ? ORT_UNTIED_TASK
: ORT_TASK
, OMP_TASK
);
9321 push_gimplify_context ();
9323 g
= gimplify_and_return_first (OMP_TASK_BODY (expr
), &body
);
9324 if (gimple_code (g
) == GIMPLE_BIND
)
9325 pop_gimplify_context (g
);
9327 pop_gimplify_context (NULL
);
9329 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_TASK_CLAUSES (expr
),
9332 g
= gimple_build_omp_task (body
,
9333 OMP_TASK_CLAUSES (expr
),
9334 NULL_TREE
, NULL_TREE
,
9335 NULL_TREE
, NULL_TREE
, NULL_TREE
);
9336 gimplify_seq_add_stmt (pre_p
, g
);
9337 *expr_p
= NULL_TREE
;
9340 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9341 with non-NULL OMP_FOR_INIT. */
9344 find_combined_omp_for (tree
*tp
, int *walk_subtrees
, void *)
9347 switch (TREE_CODE (*tp
))
9353 if (OMP_FOR_INIT (*tp
) != NULL_TREE
)
9357 case STATEMENT_LIST
:
9367 /* Gimplify the gross structure of an OMP_FOR statement. */
9369 static enum gimplify_status
9370 gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
9372 tree for_stmt
, orig_for_stmt
, inner_for_stmt
= NULL_TREE
, decl
, var
, t
;
9373 enum gimplify_status ret
= GS_ALL_DONE
;
9374 enum gimplify_status tret
;
9376 gimple_seq for_body
, for_pre_body
;
9378 bitmap has_decl_expr
= NULL
;
9379 enum omp_region_type ort
= ORT_WORKSHARE
;
9381 orig_for_stmt
= for_stmt
= *expr_p
;
9383 switch (TREE_CODE (for_stmt
))
9387 case OMP_DISTRIBUTE
:
9393 if (find_omp_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_UNTIED
))
9394 ort
= ORT_UNTIED_TASK
;
9406 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9407 clause for the IV. */
9408 if (ort
== ORT_SIMD
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
9410 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), 0);
9411 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
9412 decl
= TREE_OPERAND (t
, 0);
9413 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
9414 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
9415 && OMP_CLAUSE_DECL (c
) == decl
)
9417 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
9422 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
9424 gcc_assert (TREE_CODE (for_stmt
) != OACC_LOOP
);
9425 inner_for_stmt
= walk_tree (&OMP_FOR_BODY (for_stmt
),
9426 find_combined_omp_for
, NULL
, NULL
);
9427 if (inner_for_stmt
== NULL_TREE
)
9429 gcc_assert (seen_error ());
9430 *expr_p
= NULL_TREE
;
9435 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
)
9436 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt
), pre_p
, ort
,
9437 TREE_CODE (for_stmt
));
9439 if (TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
)
9440 gimplify_omp_ctxp
->distribute
= true;
9442 /* Handle OMP_FOR_INIT. */
9443 for_pre_body
= NULL
;
9444 if (ort
== ORT_SIMD
&& OMP_FOR_PRE_BODY (for_stmt
))
9446 has_decl_expr
= BITMAP_ALLOC (NULL
);
9447 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == DECL_EXPR
9448 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt
)))
9451 t
= OMP_FOR_PRE_BODY (for_stmt
);
9452 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
9454 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == STATEMENT_LIST
)
9456 tree_stmt_iterator si
;
9457 for (si
= tsi_start (OMP_FOR_PRE_BODY (for_stmt
)); !tsi_end_p (si
);
9461 if (TREE_CODE (t
) == DECL_EXPR
9462 && TREE_CODE (DECL_EXPR_DECL (t
)) == VAR_DECL
)
9463 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
9467 if (OMP_FOR_PRE_BODY (for_stmt
))
9469 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
|| gimplify_omp_ctxp
)
9470 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
9473 struct gimplify_omp_ctx ctx
;
9474 memset (&ctx
, 0, sizeof (ctx
));
9475 ctx
.region_type
= ORT_NONE
;
9476 gimplify_omp_ctxp
= &ctx
;
9477 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
9478 gimplify_omp_ctxp
= NULL
;
9481 OMP_FOR_PRE_BODY (for_stmt
) = NULL_TREE
;
9483 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
9484 for_stmt
= inner_for_stmt
;
9486 /* For taskloop, need to gimplify the start, end and step before the
9487 taskloop, outside of the taskloop omp context. */
9488 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
9490 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
9492 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
9493 if (!is_gimple_constant (TREE_OPERAND (t
, 1)))
9496 = get_initialized_tmp_var (TREE_OPERAND (t
, 1),
9497 pre_p
, NULL
, false);
9498 tree c
= build_omp_clause (input_location
,
9499 OMP_CLAUSE_FIRSTPRIVATE
);
9500 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (t
, 1);
9501 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
9502 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
9505 /* Handle OMP_FOR_COND. */
9506 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
9507 if (!is_gimple_constant (TREE_OPERAND (t
, 1)))
9510 = get_initialized_tmp_var (TREE_OPERAND (t
, 1),
9511 gimple_seq_empty_p (for_pre_body
)
9512 ? pre_p
: &for_pre_body
, NULL
,
9514 tree c
= build_omp_clause (input_location
,
9515 OMP_CLAUSE_FIRSTPRIVATE
);
9516 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (t
, 1);
9517 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
9518 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
9521 /* Handle OMP_FOR_INCR. */
9522 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
9523 if (TREE_CODE (t
) == MODIFY_EXPR
)
9525 decl
= TREE_OPERAND (t
, 0);
9526 t
= TREE_OPERAND (t
, 1);
9527 tree
*tp
= &TREE_OPERAND (t
, 1);
9528 if (TREE_CODE (t
) == PLUS_EXPR
&& *tp
== decl
)
9529 tp
= &TREE_OPERAND (t
, 0);
9531 if (!is_gimple_constant (*tp
))
9533 gimple_seq
*seq
= gimple_seq_empty_p (for_pre_body
)
9534 ? pre_p
: &for_pre_body
;
9535 *tp
= get_initialized_tmp_var (*tp
, seq
, NULL
, false);
9536 tree c
= build_omp_clause (input_location
,
9537 OMP_CLAUSE_FIRSTPRIVATE
);
9538 OMP_CLAUSE_DECL (c
) = *tp
;
9539 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
9540 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
9545 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt
), pre_p
, ort
,
9549 if (orig_for_stmt
!= for_stmt
)
9550 gimplify_omp_ctxp
->combined_loop
= true;
9553 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
9554 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt
)));
9555 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
9556 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt
)));
9558 tree c
= find_omp_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
);
9559 bool is_doacross
= false;
9560 if (c
&& OMP_CLAUSE_ORDERED_EXPR (c
))
9563 gimplify_omp_ctxp
->loop_iter_var
.create (TREE_VEC_LENGTH
9564 (OMP_FOR_INIT (for_stmt
))
9568 c
= find_omp_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_COLLAPSE
);
9570 collapse
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c
));
9571 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
9573 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
9574 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
9575 decl
= TREE_OPERAND (t
, 0);
9576 gcc_assert (DECL_P (decl
));
9577 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl
))
9578 || POINTER_TYPE_P (TREE_TYPE (decl
)));
9581 if (TREE_CODE (for_stmt
) == OMP_FOR
&& OMP_FOR_ORIG_DECLS (for_stmt
))
9582 gimplify_omp_ctxp
->loop_iter_var
.quick_push
9583 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
));
9585 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
9586 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
9589 /* Make sure the iteration variable is private. */
9591 tree c2
= NULL_TREE
;
9592 if (orig_for_stmt
!= for_stmt
)
9593 /* Do this only on innermost construct for combined ones. */;
9594 else if (ort
== ORT_SIMD
)
9596 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
9597 (splay_tree_key
) decl
);
9598 omp_is_private (gimplify_omp_ctxp
, decl
,
9599 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
9601 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
9602 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
9603 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
9605 c
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
9606 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
9607 unsigned int flags
= GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
;
9609 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
9611 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
9612 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
9614 struct gimplify_omp_ctx
*outer
9615 = gimplify_omp_ctxp
->outer_context
;
9616 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
9618 if (outer
->region_type
== ORT_WORKSHARE
9619 && outer
->combined_loop
)
9621 n
= splay_tree_lookup (outer
->variables
,
9622 (splay_tree_key
)decl
);
9623 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
9625 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
9626 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
9630 struct gimplify_omp_ctx
*octx
= outer
->outer_context
;
9632 && octx
->region_type
== ORT_COMBINED_PARALLEL
9633 && octx
->outer_context
9634 && (octx
->outer_context
->region_type
9636 && octx
->outer_context
->combined_loop
)
9638 octx
= octx
->outer_context
;
9639 n
= splay_tree_lookup (octx
->variables
,
9640 (splay_tree_key
)decl
);
9641 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
9643 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
9644 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
9651 OMP_CLAUSE_DECL (c
) = decl
;
9652 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
9653 OMP_FOR_CLAUSES (for_stmt
) = c
;
9654 omp_add_variable (gimplify_omp_ctxp
, decl
, flags
);
9655 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
9657 if (outer
->region_type
== ORT_WORKSHARE
9658 && outer
->combined_loop
)
9660 if (outer
->outer_context
9661 && (outer
->outer_context
->region_type
9662 == ORT_COMBINED_PARALLEL
))
9663 outer
= outer
->outer_context
;
9664 else if (omp_check_private (outer
, decl
, false))
9667 else if (((outer
->region_type
& ORT_TASK
) != 0)
9668 && outer
->combined_loop
9669 && !omp_check_private (gimplify_omp_ctxp
,
9672 else if (outer
->region_type
!= ORT_COMBINED_PARALLEL
)
9674 omp_notice_variable (outer
, decl
, true);
9679 n
= splay_tree_lookup (outer
->variables
,
9680 (splay_tree_key
)decl
);
9681 if (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
9683 omp_add_variable (outer
, decl
,
9684 GOVD_LASTPRIVATE
| GOVD_SEEN
);
9685 if (outer
->region_type
== ORT_COMBINED_PARALLEL
9686 && outer
->outer_context
9687 && (outer
->outer_context
->region_type
9689 && outer
->outer_context
->combined_loop
)
9691 outer
= outer
->outer_context
;
9692 n
= splay_tree_lookup (outer
->variables
,
9693 (splay_tree_key
)decl
);
9694 if (omp_check_private (outer
, decl
, false))
9697 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
9699 omp_add_variable (outer
, decl
,
9705 if (outer
&& outer
->outer_context
9706 && (outer
->outer_context
->region_type
9707 == ORT_COMBINED_TEAMS
))
9709 outer
= outer
->outer_context
;
9710 n
= splay_tree_lookup (outer
->variables
,
9711 (splay_tree_key
)decl
);
9713 || (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
9714 omp_add_variable (outer
, decl
,
9715 GOVD_SHARED
| GOVD_SEEN
);
9719 if (outer
&& outer
->outer_context
)
9720 omp_notice_variable (outer
->outer_context
, decl
,
9730 || !bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)));
9731 struct gimplify_omp_ctx
*outer
9732 = gimplify_omp_ctxp
->outer_context
;
9733 if (outer
&& lastprivate
)
9735 if (outer
->region_type
== ORT_WORKSHARE
9736 && outer
->combined_loop
)
9738 n
= splay_tree_lookup (outer
->variables
,
9739 (splay_tree_key
)decl
);
9740 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
9742 lastprivate
= false;
9745 else if (outer
->outer_context
9746 && (outer
->outer_context
->region_type
9747 == ORT_COMBINED_PARALLEL
))
9748 outer
= outer
->outer_context
;
9749 else if (omp_check_private (outer
, decl
, false))
9752 else if (((outer
->region_type
& ORT_TASK
) != 0)
9753 && outer
->combined_loop
9754 && !omp_check_private (gimplify_omp_ctxp
,
9757 else if (outer
->region_type
!= ORT_COMBINED_PARALLEL
)
9759 omp_notice_variable (outer
, decl
, true);
9764 n
= splay_tree_lookup (outer
->variables
,
9765 (splay_tree_key
)decl
);
9766 if (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
9768 omp_add_variable (outer
, decl
,
9769 GOVD_LASTPRIVATE
| GOVD_SEEN
);
9770 if (outer
->region_type
== ORT_COMBINED_PARALLEL
9771 && outer
->outer_context
9772 && (outer
->outer_context
->region_type
9774 && outer
->outer_context
->combined_loop
)
9776 outer
= outer
->outer_context
;
9777 n
= splay_tree_lookup (outer
->variables
,
9778 (splay_tree_key
)decl
);
9779 if (omp_check_private (outer
, decl
, false))
9782 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
9784 omp_add_variable (outer
, decl
,
9790 if (outer
&& outer
->outer_context
9791 && (outer
->outer_context
->region_type
9792 == ORT_COMBINED_TEAMS
))
9794 outer
= outer
->outer_context
;
9795 n
= splay_tree_lookup (outer
->variables
,
9796 (splay_tree_key
)decl
);
9798 || (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
9799 omp_add_variable (outer
, decl
,
9800 GOVD_SHARED
| GOVD_SEEN
);
9804 if (outer
&& outer
->outer_context
)
9805 omp_notice_variable (outer
->outer_context
, decl
,
9811 c
= build_omp_clause (input_location
,
9812 lastprivate
? OMP_CLAUSE_LASTPRIVATE
9813 : OMP_CLAUSE_PRIVATE
);
9814 OMP_CLAUSE_DECL (c
) = decl
;
9815 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
9816 OMP_FOR_CLAUSES (for_stmt
) = c
;
9817 omp_add_variable (gimplify_omp_ctxp
, decl
,
9818 (lastprivate
? GOVD_LASTPRIVATE
: GOVD_PRIVATE
)
9819 | GOVD_EXPLICIT
| GOVD_SEEN
);
9823 else if (omp_is_private (gimplify_omp_ctxp
, decl
, 0))
9824 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
9826 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_PRIVATE
| GOVD_SEEN
);
9828 /* If DECL is not a gimple register, create a temporary variable to act
9829 as an iteration counter. This is valid, since DECL cannot be
9830 modified in the body of the loop. Similarly for any iteration vars
9831 in simd with collapse > 1 where the iterator vars must be
9833 if (orig_for_stmt
!= for_stmt
)
9835 else if (!is_gimple_reg (decl
)
9837 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) > 1))
9839 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
9840 /* Make sure omp_add_variable is not called on it prematurely.
9841 We call it ourselves a few lines later. */
9842 gimplify_omp_ctxp
= NULL
;
9843 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
9844 gimplify_omp_ctxp
= ctx
;
9845 TREE_OPERAND (t
, 0) = var
;
9847 gimplify_seq_add_stmt (&for_body
, gimple_build_assign (decl
, var
));
9850 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
9852 c2
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
9853 OMP_CLAUSE_LINEAR_NO_COPYIN (c2
) = 1;
9854 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2
) = 1;
9855 OMP_CLAUSE_DECL (c2
) = var
;
9856 OMP_CLAUSE_CHAIN (c2
) = OMP_FOR_CLAUSES (for_stmt
);
9857 OMP_FOR_CLAUSES (for_stmt
) = c2
;
9858 omp_add_variable (gimplify_omp_ctxp
, var
,
9859 GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
);
9867 omp_add_variable (gimplify_omp_ctxp
, var
,
9868 GOVD_PRIVATE
| GOVD_SEEN
);
9873 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
9874 is_gimple_val
, fb_rvalue
, false);
9875 ret
= MIN (ret
, tret
);
9876 if (ret
== GS_ERROR
)
9879 /* Handle OMP_FOR_COND. */
9880 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
9881 gcc_assert (COMPARISON_CLASS_P (t
));
9882 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
9884 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
9885 is_gimple_val
, fb_rvalue
, false);
9886 ret
= MIN (ret
, tret
);
9888 /* Handle OMP_FOR_INCR. */
9889 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
9890 switch (TREE_CODE (t
))
9892 case PREINCREMENT_EXPR
:
9893 case POSTINCREMENT_EXPR
:
9895 tree decl
= TREE_OPERAND (t
, 0);
9896 /* c_omp_for_incr_canonicalize_ptr() should have been
9897 called to massage things appropriately. */
9898 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
9900 if (orig_for_stmt
!= for_stmt
)
9902 t
= build_int_cst (TREE_TYPE (decl
), 1);
9904 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
9905 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
9906 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
9907 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
9911 case PREDECREMENT_EXPR
:
9912 case POSTDECREMENT_EXPR
:
9913 /* c_omp_for_incr_canonicalize_ptr() should have been
9914 called to massage things appropriately. */
9915 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
9916 if (orig_for_stmt
!= for_stmt
)
9918 t
= build_int_cst (TREE_TYPE (decl
), -1);
9920 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
9921 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
9922 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
9923 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
9927 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
9928 TREE_OPERAND (t
, 0) = var
;
9930 t
= TREE_OPERAND (t
, 1);
9931 switch (TREE_CODE (t
))
9934 if (TREE_OPERAND (t
, 1) == decl
)
9936 TREE_OPERAND (t
, 1) = TREE_OPERAND (t
, 0);
9937 TREE_OPERAND (t
, 0) = var
;
9943 case POINTER_PLUS_EXPR
:
9944 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
9945 TREE_OPERAND (t
, 0) = var
;
9951 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
9952 is_gimple_val
, fb_rvalue
, false);
9953 ret
= MIN (ret
, tret
);
9956 tree step
= TREE_OPERAND (t
, 1);
9957 tree stept
= TREE_TYPE (decl
);
9958 if (POINTER_TYPE_P (stept
))
9960 step
= fold_convert (stept
, step
);
9961 if (TREE_CODE (t
) == MINUS_EXPR
)
9962 step
= fold_build1 (NEGATE_EXPR
, stept
, step
);
9963 OMP_CLAUSE_LINEAR_STEP (c
) = step
;
9964 if (step
!= TREE_OPERAND (t
, 1))
9966 tret
= gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
),
9967 &for_pre_body
, NULL
,
9968 is_gimple_val
, fb_rvalue
, false);
9969 ret
= MIN (ret
, tret
);
9981 OMP_CLAUSE_LINEAR_STEP (c2
) = OMP_CLAUSE_LINEAR_STEP (c
);
9984 if ((var
!= decl
|| collapse
> 1) && orig_for_stmt
== for_stmt
)
9986 for (c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
9987 if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
9988 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) == NULL
)
9989 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
9990 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)
9991 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) == NULL
))
9992 && OMP_CLAUSE_DECL (c
) == decl
)
9994 if (is_doacross
&& (collapse
== 1 || i
>= collapse
))
9998 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
9999 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
10000 gcc_assert (TREE_OPERAND (t
, 0) == var
);
10001 t
= TREE_OPERAND (t
, 1);
10002 gcc_assert (TREE_CODE (t
) == PLUS_EXPR
10003 || TREE_CODE (t
) == MINUS_EXPR
10004 || TREE_CODE (t
) == POINTER_PLUS_EXPR
);
10005 gcc_assert (TREE_OPERAND (t
, 0) == var
);
10006 t
= build2 (TREE_CODE (t
), TREE_TYPE (decl
),
10007 is_doacross
? var
: decl
,
10008 TREE_OPERAND (t
, 1));
10011 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
10012 seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
);
10014 seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
);
10015 gimplify_assign (decl
, t
, seq
);
10020 BITMAP_FREE (has_decl_expr
);
10022 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
10024 push_gimplify_context ();
10025 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt
)) != BIND_EXPR
)
10027 OMP_FOR_BODY (orig_for_stmt
)
10028 = build3 (BIND_EXPR
, void_type_node
, NULL
,
10029 OMP_FOR_BODY (orig_for_stmt
), NULL
);
10030 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt
)) = 1;
10034 gimple
*g
= gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt
),
10037 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
10039 if (gimple_code (g
) == GIMPLE_BIND
)
10040 pop_gimplify_context (g
);
10042 pop_gimplify_context (NULL
);
10045 if (orig_for_stmt
!= for_stmt
)
10046 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
10048 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
10049 decl
= TREE_OPERAND (t
, 0);
10050 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10051 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
10052 gimplify_omp_ctxp
= ctx
->outer_context
;
10053 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
10054 gimplify_omp_ctxp
= ctx
;
10055 omp_add_variable (gimplify_omp_ctxp
, var
, GOVD_PRIVATE
| GOVD_SEEN
);
10056 TREE_OPERAND (t
, 0) = var
;
10057 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
10058 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
10059 TREE_OPERAND (TREE_OPERAND (t
, 1), 0) = var
;
10062 gimplify_adjust_omp_clauses (pre_p
, for_body
,
10063 &OMP_FOR_CLAUSES (orig_for_stmt
),
10064 TREE_CODE (orig_for_stmt
));
10067 switch (TREE_CODE (orig_for_stmt
))
10069 case OMP_FOR
: kind
= GF_OMP_FOR_KIND_FOR
; break;
10070 case OMP_SIMD
: kind
= GF_OMP_FOR_KIND_SIMD
; break;
10071 case CILK_SIMD
: kind
= GF_OMP_FOR_KIND_CILKSIMD
; break;
10072 case CILK_FOR
: kind
= GF_OMP_FOR_KIND_CILKFOR
; break;
10073 case OMP_DISTRIBUTE
: kind
= GF_OMP_FOR_KIND_DISTRIBUTE
; break;
10074 case OMP_TASKLOOP
: kind
= GF_OMP_FOR_KIND_TASKLOOP
; break;
10075 case OACC_LOOP
: kind
= GF_OMP_FOR_KIND_OACC_LOOP
; break;
10077 gcc_unreachable ();
10079 gfor
= gimple_build_omp_for (for_body
, kind
, OMP_FOR_CLAUSES (orig_for_stmt
),
10080 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)),
10082 if (orig_for_stmt
!= for_stmt
)
10083 gimple_omp_for_set_combined_p (gfor
, true);
10084 if (gimplify_omp_ctxp
10085 && (gimplify_omp_ctxp
->combined_loop
10086 || (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
10087 && gimplify_omp_ctxp
->outer_context
10088 && gimplify_omp_ctxp
->outer_context
->combined_loop
)))
10090 gimple_omp_for_set_combined_into_p (gfor
, true);
10091 if (gimplify_omp_ctxp
->combined_loop
)
10092 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_SIMD
);
10094 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_FOR
);
10097 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
10099 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
10100 gimple_omp_for_set_index (gfor
, i
, TREE_OPERAND (t
, 0));
10101 gimple_omp_for_set_initial (gfor
, i
, TREE_OPERAND (t
, 1));
10102 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
10103 gimple_omp_for_set_cond (gfor
, i
, TREE_CODE (t
));
10104 gimple_omp_for_set_final (gfor
, i
, TREE_OPERAND (t
, 1));
10105 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
10106 gimple_omp_for_set_incr (gfor
, i
, TREE_OPERAND (t
, 1));
10109 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10110 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10111 The outer taskloop stands for computing the number of iterations,
10112 counts for collapsed loops and holding taskloop specific clauses.
10113 The task construct stands for the effect of data sharing on the
10114 explicit task it creates and the inner taskloop stands for expansion
10115 of the static loop inside of the explicit task construct. */
10116 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
10118 tree
*gfor_clauses_ptr
= gimple_omp_for_clauses_ptr (gfor
);
10119 tree task_clauses
= NULL_TREE
;
10120 tree c
= *gfor_clauses_ptr
;
10121 tree
*gtask_clauses_ptr
= &task_clauses
;
10122 tree outer_for_clauses
= NULL_TREE
;
10123 tree
*gforo_clauses_ptr
= &outer_for_clauses
;
10124 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
10125 switch (OMP_CLAUSE_CODE (c
))
10127 /* These clauses are allowed on task, move them there. */
10128 case OMP_CLAUSE_SHARED
:
10129 case OMP_CLAUSE_FIRSTPRIVATE
:
10130 case OMP_CLAUSE_DEFAULT
:
10131 case OMP_CLAUSE_IF
:
10132 case OMP_CLAUSE_UNTIED
:
10133 case OMP_CLAUSE_FINAL
:
10134 case OMP_CLAUSE_MERGEABLE
:
10135 case OMP_CLAUSE_PRIORITY
:
10136 *gtask_clauses_ptr
= c
;
10137 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
10139 case OMP_CLAUSE_PRIVATE
:
10140 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c
))
10142 /* We want private on outer for and firstprivate
10145 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
10146 OMP_CLAUSE_FIRSTPRIVATE
);
10147 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
10148 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
);
10149 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
10150 *gforo_clauses_ptr
= c
;
10151 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
10155 *gtask_clauses_ptr
= c
;
10156 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
10159 /* These clauses go into outer taskloop clauses. */
10160 case OMP_CLAUSE_GRAINSIZE
:
10161 case OMP_CLAUSE_NUM_TASKS
:
10162 case OMP_CLAUSE_NOGROUP
:
10163 *gforo_clauses_ptr
= c
;
10164 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
10166 /* Taskloop clause we duplicate on both taskloops. */
10167 case OMP_CLAUSE_COLLAPSE
:
10168 *gfor_clauses_ptr
= c
;
10169 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
10170 *gforo_clauses_ptr
= copy_node (c
);
10171 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
10173 /* For lastprivate, keep the clause on inner taskloop, and add
10174 a shared clause on task. If the same decl is also firstprivate,
10175 add also firstprivate clause on the inner taskloop. */
10176 case OMP_CLAUSE_LASTPRIVATE
:
10177 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c
))
10179 /* For taskloop C++ lastprivate IVs, we want:
10180 1) private on outer taskloop
10181 2) firstprivate and shared on task
10182 3) lastprivate on inner taskloop */
10184 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
10185 OMP_CLAUSE_FIRSTPRIVATE
);
10186 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
10187 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
);
10188 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
10189 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
) = 1;
10190 *gforo_clauses_ptr
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
10191 OMP_CLAUSE_PRIVATE
);
10192 OMP_CLAUSE_DECL (*gforo_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
10193 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr
) = 1;
10194 TREE_TYPE (*gforo_clauses_ptr
) = TREE_TYPE (c
);
10195 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
10197 *gfor_clauses_ptr
= c
;
10198 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
10200 = build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_SHARED
);
10201 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
10202 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
10203 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr
) = 1;
10205 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
10208 gcc_unreachable ();
10210 *gfor_clauses_ptr
= NULL_TREE
;
10211 *gtask_clauses_ptr
= NULL_TREE
;
10212 *gforo_clauses_ptr
= NULL_TREE
;
10213 g
= gimple_build_bind (NULL_TREE
, gfor
, NULL_TREE
);
10214 g
= gimple_build_omp_task (g
, task_clauses
, NULL_TREE
, NULL_TREE
,
10215 NULL_TREE
, NULL_TREE
, NULL_TREE
);
10216 gimple_omp_task_set_taskloop_p (g
, true);
10217 g
= gimple_build_bind (NULL_TREE
, g
, NULL_TREE
);
10219 = gimple_build_omp_for (g
, GF_OMP_FOR_KIND_TASKLOOP
, outer_for_clauses
,
10220 gimple_omp_for_collapse (gfor
),
10221 gimple_omp_for_pre_body (gfor
));
10222 gimple_omp_for_set_pre_body (gfor
, NULL
);
10223 gimple_omp_for_set_combined_p (gforo
, true);
10224 gimple_omp_for_set_combined_into_p (gfor
, true);
10225 for (i
= 0; i
< (int) gimple_omp_for_collapse (gfor
); i
++)
10227 t
= unshare_expr (gimple_omp_for_index (gfor
, i
));
10228 gimple_omp_for_set_index (gforo
, i
, t
);
10229 t
= unshare_expr (gimple_omp_for_initial (gfor
, i
));
10230 gimple_omp_for_set_initial (gforo
, i
, t
);
10231 gimple_omp_for_set_cond (gforo
, i
,
10232 gimple_omp_for_cond (gfor
, i
));
10233 t
= unshare_expr (gimple_omp_for_final (gfor
, i
));
10234 gimple_omp_for_set_final (gforo
, i
, t
);
10235 t
= unshare_expr (gimple_omp_for_incr (gfor
, i
));
10236 gimple_omp_for_set_incr (gforo
, i
, t
);
10238 gimplify_seq_add_stmt (pre_p
, gforo
);
10241 gimplify_seq_add_stmt (pre_p
, gfor
);
10242 if (ret
!= GS_ALL_DONE
)
10244 *expr_p
= NULL_TREE
;
10245 return GS_ALL_DONE
;
10248 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10249 of OMP_TARGET's body. */
10252 find_omp_teams (tree
*tp
, int *walk_subtrees
, void *)
10254 *walk_subtrees
= 0;
10255 switch (TREE_CODE (*tp
))
10260 case STATEMENT_LIST
:
10261 *walk_subtrees
= 1;
10269 /* Helper function of optimize_target_teams, determine if the expression
10270 can be computed safely before the target construct on the host. */
10273 computable_teams_clause (tree
*tp
, int *walk_subtrees
, void *)
10279 *walk_subtrees
= 0;
10282 switch (TREE_CODE (*tp
))
10287 *walk_subtrees
= 0;
10288 if (error_operand_p (*tp
)
10289 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
10290 || DECL_HAS_VALUE_EXPR_P (*tp
)
10291 || DECL_THREAD_LOCAL_P (*tp
)
10292 || TREE_SIDE_EFFECTS (*tp
)
10293 || TREE_THIS_VOLATILE (*tp
))
10295 if (is_global_var (*tp
)
10296 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp
))
10297 || lookup_attribute ("omp declare target link",
10298 DECL_ATTRIBUTES (*tp
))))
10301 && !DECL_SEEN_IN_BIND_EXPR_P (*tp
)
10302 && !is_global_var (*tp
)
10303 && decl_function_context (*tp
) == current_function_decl
)
10305 n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
10306 (splay_tree_key
) *tp
);
10309 if (gimplify_omp_ctxp
->target_map_scalars_firstprivate
)
10313 else if (n
->value
& GOVD_LOCAL
)
10315 else if (n
->value
& GOVD_FIRSTPRIVATE
)
10317 else if ((n
->value
& (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
10318 == (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
10322 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
10326 if (TARGET_EXPR_INITIAL (*tp
)
10327 || TREE_CODE (TARGET_EXPR_SLOT (*tp
)) != VAR_DECL
)
10329 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp
),
10330 walk_subtrees
, NULL
);
10331 /* Allow some reasonable subset of integral arithmetics. */
10335 case TRUNC_DIV_EXPR
:
10336 case CEIL_DIV_EXPR
:
10337 case FLOOR_DIV_EXPR
:
10338 case ROUND_DIV_EXPR
:
10339 case TRUNC_MOD_EXPR
:
10340 case CEIL_MOD_EXPR
:
10341 case FLOOR_MOD_EXPR
:
10342 case ROUND_MOD_EXPR
:
10344 case EXACT_DIV_EXPR
:
10355 case NON_LVALUE_EXPR
:
10357 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
10360 /* And disallow anything else, except for comparisons. */
10362 if (COMPARISON_CLASS_P (*tp
))
10368 /* Try to determine if the num_teams and/or thread_limit expressions
10369 can have their values determined already before entering the
10371 INTEGER_CSTs trivially are,
10372 integral decls that are firstprivate (explicitly or implicitly)
10373 or explicitly map(always, to:) or map(always, tofrom:) on the target
10374 region too, and expressions involving simple arithmetics on those
10375 too, function calls are not ok, dereferencing something neither etc.
10376 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10377 EXPR based on what we find:
10378 0 stands for clause not specified at all, use implementation default
10379 -1 stands for value that can't be determined easily before entering
10380 the target construct.
10381 If teams construct is not present at all, use 1 for num_teams
10382 and 0 for thread_limit (only one team is involved, and the thread
10383 limit is implementation defined. */
10386 optimize_target_teams (tree target
, gimple_seq
*pre_p
)
10388 tree body
= OMP_BODY (target
);
10389 tree teams
= walk_tree (&body
, find_omp_teams
, NULL
, NULL
);
10390 tree num_teams
= integer_zero_node
;
10391 tree thread_limit
= integer_zero_node
;
10392 location_t num_teams_loc
= EXPR_LOCATION (target
);
10393 location_t thread_limit_loc
= EXPR_LOCATION (target
);
10395 struct gimplify_omp_ctx
*target_ctx
= gimplify_omp_ctxp
;
10397 if (teams
== NULL_TREE
)
10398 num_teams
= integer_one_node
;
10400 for (c
= OMP_TEAMS_CLAUSES (teams
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10402 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
)
10405 num_teams_loc
= OMP_CLAUSE_LOCATION (c
);
10407 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREAD_LIMIT
)
10410 thread_limit_loc
= OMP_CLAUSE_LOCATION (c
);
10414 expr
= OMP_CLAUSE_OPERAND (c
, 0);
10415 if (TREE_CODE (expr
) == INTEGER_CST
)
10420 if (walk_tree (&expr
, computable_teams_clause
, NULL
, NULL
))
10422 *p
= integer_minus_one_node
;
10426 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
10427 if (gimplify_expr (p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
, false)
10430 gimplify_omp_ctxp
= target_ctx
;
10431 *p
= integer_minus_one_node
;
10434 gimplify_omp_ctxp
= target_ctx
;
10435 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
10436 OMP_CLAUSE_OPERAND (c
, 0) = *p
;
10438 c
= build_omp_clause (thread_limit_loc
, OMP_CLAUSE_THREAD_LIMIT
);
10439 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
10440 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
10441 OMP_TARGET_CLAUSES (target
) = c
;
10442 c
= build_omp_clause (num_teams_loc
, OMP_CLAUSE_NUM_TEAMS
);
10443 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
10444 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
10445 OMP_TARGET_CLAUSES (target
) = c
;
10448 /* Gimplify the gross structure of several OMP constructs. */
10451 gimplify_omp_workshare (tree
*expr_p
, gimple_seq
*pre_p
)
10453 tree expr
= *expr_p
;
10455 gimple_seq body
= NULL
;
10456 enum omp_region_type ort
;
10458 switch (TREE_CODE (expr
))
10462 ort
= ORT_WORKSHARE
;
10465 ort
= OMP_TARGET_COMBINED (expr
) ? ORT_COMBINED_TARGET
: ORT_TARGET
;
10468 ort
= ORT_ACC_KERNELS
;
10470 case OACC_PARALLEL
:
10471 ort
= ORT_ACC_PARALLEL
;
10474 ort
= ORT_ACC_DATA
;
10476 case OMP_TARGET_DATA
:
10477 ort
= ORT_TARGET_DATA
;
10480 ort
= OMP_TEAMS_COMBINED (expr
) ? ORT_COMBINED_TEAMS
: ORT_TEAMS
;
10482 case OACC_HOST_DATA
:
10483 ort
= ORT_ACC_HOST_DATA
;
10486 gcc_unreachable ();
10488 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr
), pre_p
, ort
,
10490 if (TREE_CODE (expr
) == OMP_TARGET
)
10491 optimize_target_teams (expr
, pre_p
);
10492 if ((ort
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0)
10494 push_gimplify_context ();
10495 gimple
*g
= gimplify_and_return_first (OMP_BODY (expr
), &body
);
10496 if (gimple_code (g
) == GIMPLE_BIND
)
10497 pop_gimplify_context (g
);
10499 pop_gimplify_context (NULL
);
10500 if ((ort
& ORT_TARGET_DATA
) != 0)
10502 enum built_in_function end_ix
;
10503 switch (TREE_CODE (expr
))
10506 case OACC_HOST_DATA
:
10507 end_ix
= BUILT_IN_GOACC_DATA_END
;
10509 case OMP_TARGET_DATA
:
10510 end_ix
= BUILT_IN_GOMP_TARGET_END_DATA
;
10513 gcc_unreachable ();
10515 tree fn
= builtin_decl_explicit (end_ix
);
10516 g
= gimple_build_call (fn
, 0);
10517 gimple_seq cleanup
= NULL
;
10518 gimple_seq_add_stmt (&cleanup
, g
);
10519 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
10521 gimple_seq_add_stmt (&body
, g
);
10525 gimplify_and_add (OMP_BODY (expr
), &body
);
10526 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_CLAUSES (expr
),
10529 switch (TREE_CODE (expr
))
10532 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_DATA
,
10533 OMP_CLAUSES (expr
));
10536 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_KERNELS
,
10537 OMP_CLAUSES (expr
));
10539 case OACC_HOST_DATA
:
10540 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_HOST_DATA
,
10541 OMP_CLAUSES (expr
));
10543 case OACC_PARALLEL
:
10544 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_PARALLEL
,
10545 OMP_CLAUSES (expr
));
10548 stmt
= gimple_build_omp_sections (body
, OMP_CLAUSES (expr
));
10551 stmt
= gimple_build_omp_single (body
, OMP_CLAUSES (expr
));
10554 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_REGION
,
10555 OMP_CLAUSES (expr
));
10557 case OMP_TARGET_DATA
:
10558 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_DATA
,
10559 OMP_CLAUSES (expr
));
10562 stmt
= gimple_build_omp_teams (body
, OMP_CLAUSES (expr
));
10565 gcc_unreachable ();
10568 gimplify_seq_add_stmt (pre_p
, stmt
);
10569 *expr_p
= NULL_TREE
;
10572 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10573 target update constructs. */
10576 gimplify_omp_target_update (tree
*expr_p
, gimple_seq
*pre_p
)
10578 tree expr
= *expr_p
;
10581 enum omp_region_type ort
= ORT_WORKSHARE
;
10583 switch (TREE_CODE (expr
))
10585 case OACC_ENTER_DATA
:
10586 case OACC_EXIT_DATA
:
10587 kind
= GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
;
10591 kind
= GF_OMP_TARGET_KIND_OACC_UPDATE
;
10594 case OMP_TARGET_UPDATE
:
10595 kind
= GF_OMP_TARGET_KIND_UPDATE
;
10597 case OMP_TARGET_ENTER_DATA
:
10598 kind
= GF_OMP_TARGET_KIND_ENTER_DATA
;
10600 case OMP_TARGET_EXIT_DATA
:
10601 kind
= GF_OMP_TARGET_KIND_EXIT_DATA
;
10604 gcc_unreachable ();
10606 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr
), pre_p
,
10607 ort
, TREE_CODE (expr
));
10608 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OMP_STANDALONE_CLAUSES (expr
),
10610 stmt
= gimple_build_omp_target (NULL
, kind
, OMP_STANDALONE_CLAUSES (expr
));
10612 gimplify_seq_add_stmt (pre_p
, stmt
);
10613 *expr_p
= NULL_TREE
;
10616 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
10617 stabilized the lhs of the atomic operation as *ADDR. Return true if
10618 EXPR is this stabilized form. */
10621 goa_lhs_expr_p (tree expr
, tree addr
)
10623 /* Also include casts to other type variants. The C front end is fond
10624 of adding these for e.g. volatile variables. This is like
10625 STRIP_TYPE_NOPS but includes the main variant lookup. */
10626 STRIP_USELESS_TYPE_CONVERSION (expr
);
10628 if (TREE_CODE (expr
) == INDIRECT_REF
)
10630 expr
= TREE_OPERAND (expr
, 0);
10631 while (expr
!= addr
10632 && (CONVERT_EXPR_P (expr
)
10633 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
10634 && TREE_CODE (expr
) == TREE_CODE (addr
)
10635 && types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (addr
)))
10637 expr
= TREE_OPERAND (expr
, 0);
10638 addr
= TREE_OPERAND (addr
, 0);
10642 return (TREE_CODE (addr
) == ADDR_EXPR
10643 && TREE_CODE (expr
) == ADDR_EXPR
10644 && TREE_OPERAND (addr
, 0) == TREE_OPERAND (expr
, 0));
10646 if (TREE_CODE (addr
) == ADDR_EXPR
&& expr
== TREE_OPERAND (addr
, 0))
10651 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
10652 expression does not involve the lhs, evaluate it into a temporary.
10653 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
10654 or -1 if an error was encountered. */
10657 goa_stabilize_expr (tree
*expr_p
, gimple_seq
*pre_p
, tree lhs_addr
,
10660 tree expr
= *expr_p
;
10663 if (goa_lhs_expr_p (expr
, lhs_addr
))
10668 if (is_gimple_val (expr
))
10672 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
10675 case tcc_comparison
:
10676 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
, lhs_addr
,
10680 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
, lhs_addr
,
10683 case tcc_expression
:
10684 switch (TREE_CODE (expr
))
10686 case TRUTH_ANDIF_EXPR
:
10687 case TRUTH_ORIF_EXPR
:
10688 case TRUTH_AND_EXPR
:
10689 case TRUTH_OR_EXPR
:
10690 case TRUTH_XOR_EXPR
:
10691 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
10692 lhs_addr
, lhs_var
);
10694 case TRUTH_NOT_EXPR
:
10695 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
10696 lhs_addr
, lhs_var
);
10698 case COMPOUND_EXPR
:
10699 /* Break out any preevaluations from cp_build_modify_expr. */
10700 for (; TREE_CODE (expr
) == COMPOUND_EXPR
;
10701 expr
= TREE_OPERAND (expr
, 1))
10702 gimplify_stmt (&TREE_OPERAND (expr
, 0), pre_p
);
10704 return goa_stabilize_expr (expr_p
, pre_p
, lhs_addr
, lhs_var
);
10715 enum gimplify_status gs
;
10716 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
10717 if (gs
!= GS_ALL_DONE
)
10724 /* Gimplify an OMP_ATOMIC statement. */
10726 static enum gimplify_status
10727 gimplify_omp_atomic (tree
*expr_p
, gimple_seq
*pre_p
)
10729 tree addr
= TREE_OPERAND (*expr_p
, 0);
10730 tree rhs
= TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
10731 ? NULL
: TREE_OPERAND (*expr_p
, 1);
10732 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr
)));
10734 gomp_atomic_load
*loadstmt
;
10735 gomp_atomic_store
*storestmt
;
10737 tmp_load
= create_tmp_reg (type
);
10738 if (rhs
&& goa_stabilize_expr (&rhs
, pre_p
, addr
, tmp_load
) < 0)
10741 if (gimplify_expr (&addr
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
10745 loadstmt
= gimple_build_omp_atomic_load (tmp_load
, addr
);
10746 gimplify_seq_add_stmt (pre_p
, loadstmt
);
10747 if (rhs
&& gimplify_expr (&rhs
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
10751 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
)
10753 storestmt
= gimple_build_omp_atomic_store (rhs
);
10754 gimplify_seq_add_stmt (pre_p
, storestmt
);
10755 if (OMP_ATOMIC_SEQ_CST (*expr_p
))
10757 gimple_omp_atomic_set_seq_cst (loadstmt
);
10758 gimple_omp_atomic_set_seq_cst (storestmt
);
10760 switch (TREE_CODE (*expr_p
))
10762 case OMP_ATOMIC_READ
:
10763 case OMP_ATOMIC_CAPTURE_OLD
:
10764 *expr_p
= tmp_load
;
10765 gimple_omp_atomic_set_need_value (loadstmt
);
10767 case OMP_ATOMIC_CAPTURE_NEW
:
10769 gimple_omp_atomic_set_need_value (storestmt
);
10776 return GS_ALL_DONE
;
10779 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10780 body, and adding some EH bits. */
10782 static enum gimplify_status
10783 gimplify_transaction (tree
*expr_p
, gimple_seq
*pre_p
)
10785 tree expr
= *expr_p
, temp
, tbody
= TRANSACTION_EXPR_BODY (expr
);
10787 gtransaction
*trans_stmt
;
10788 gimple_seq body
= NULL
;
10791 /* Wrap the transaction body in a BIND_EXPR so we have a context
10792 where to put decls for OMP. */
10793 if (TREE_CODE (tbody
) != BIND_EXPR
)
10795 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, tbody
, NULL
);
10796 TREE_SIDE_EFFECTS (bind
) = 1;
10797 SET_EXPR_LOCATION (bind
, EXPR_LOCATION (tbody
));
10798 TRANSACTION_EXPR_BODY (expr
) = bind
;
10801 push_gimplify_context ();
10802 temp
= voidify_wrapper_expr (*expr_p
, NULL
);
10804 body_stmt
= gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr
), &body
);
10805 pop_gimplify_context (body_stmt
);
10807 trans_stmt
= gimple_build_transaction (body
);
10808 if (TRANSACTION_EXPR_OUTER (expr
))
10809 subcode
= GTMA_IS_OUTER
;
10810 else if (TRANSACTION_EXPR_RELAXED (expr
))
10811 subcode
= GTMA_IS_RELAXED
;
10812 gimple_transaction_set_subcode (trans_stmt
, subcode
);
10814 gimplify_seq_add_stmt (pre_p
, trans_stmt
);
10822 *expr_p
= NULL_TREE
;
10823 return GS_ALL_DONE
;
10826 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10827 is the OMP_BODY of the original EXPR (which has already been
10828 gimplified so it's not present in the EXPR).
10830 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10833 gimplify_omp_ordered (tree expr
, gimple_seq body
)
10838 tree source_c
= NULL_TREE
;
10839 tree sink_c
= NULL_TREE
;
10841 if (gimplify_omp_ctxp
)
10843 for (c
= OMP_ORDERED_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10844 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
10845 && gimplify_omp_ctxp
->loop_iter_var
.is_empty ()
10846 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
10847 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
))
10849 error_at (OMP_CLAUSE_LOCATION (c
),
10850 "%<ordered%> construct with %<depend%> clause must be "
10851 "closely nested inside a loop with %<ordered%> clause "
10852 "with a parameter");
10855 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
10856 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
10859 for (decls
= OMP_CLAUSE_DECL (c
), i
= 0;
10860 decls
&& TREE_CODE (decls
) == TREE_LIST
;
10861 decls
= TREE_CHAIN (decls
), ++i
)
10862 if (i
>= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
10864 else if (TREE_VALUE (decls
)
10865 != gimplify_omp_ctxp
->loop_iter_var
[2 * i
])
10867 error_at (OMP_CLAUSE_LOCATION (c
),
10868 "variable %qE is not an iteration "
10869 "of outermost loop %d, expected %qE",
10870 TREE_VALUE (decls
), i
+ 1,
10871 gimplify_omp_ctxp
->loop_iter_var
[2 * i
]);
10877 = gimplify_omp_ctxp
->loop_iter_var
[2 * i
+ 1];
10878 if (!fail
&& i
!= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
10880 error_at (OMP_CLAUSE_LOCATION (c
),
10881 "number of variables in %<depend(sink)%> "
10882 "clause does not match number of "
10883 "iteration variables");
10888 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
10889 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
10893 error_at (OMP_CLAUSE_LOCATION (c
),
10894 "more than one %<depend(source)%> clause on an "
10895 "%<ordered%> construct");
10902 if (source_c
&& sink_c
)
10904 error_at (OMP_CLAUSE_LOCATION (source_c
),
10905 "%<depend(source)%> clause specified together with "
10906 "%<depend(sink:)%> clauses on the same construct");
10911 return gimple_build_nop ();
10912 return gimple_build_omp_ordered (body
, OMP_ORDERED_CLAUSES (expr
));
10915 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
10916 expression produces a value to be used as an operand inside a GIMPLE
10917 statement, the value will be stored back in *EXPR_P. This value will
10918 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
10919 an SSA_NAME. The corresponding sequence of GIMPLE statements is
10920 emitted in PRE_P and POST_P.
10922 Additionally, this process may overwrite parts of the input
10923 expression during gimplification. Ideally, it should be
10924 possible to do non-destructive gimplification.
10926 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
10927 the expression needs to evaluate to a value to be used as
10928 an operand in a GIMPLE statement, this value will be stored in
10929 *EXPR_P on exit. This happens when the caller specifies one
10930 of fb_lvalue or fb_rvalue fallback flags.
10932 PRE_P will contain the sequence of GIMPLE statements corresponding
10933 to the evaluation of EXPR and all the side-effects that must
10934 be executed before the main expression. On exit, the last
10935 statement of PRE_P is the core statement being gimplified. For
10936 instance, when gimplifying 'if (++a)' the last statement in
10937 PRE_P will be 'if (t.1)' where t.1 is the result of
10938 pre-incrementing 'a'.
10940 POST_P will contain the sequence of GIMPLE statements corresponding
10941 to the evaluation of all the side-effects that must be executed
10942 after the main expression. If this is NULL, the post
10943 side-effects are stored at the end of PRE_P.
10945 The reason why the output is split in two is to handle post
10946 side-effects explicitly. In some cases, an expression may have
10947 inner and outer post side-effects which need to be emitted in
10948 an order different from the one given by the recursive
10949 traversal. For instance, for the expression (*p--)++ the post
10950 side-effects of '--' must actually occur *after* the post
10951 side-effects of '++'. However, gimplification will first visit
10952 the inner expression, so if a separate POST sequence was not
10953 used, the resulting sequence would be:
10960 However, the post-decrement operation in line #2 must not be
10961 evaluated until after the store to *p at line #4, so the
10962 correct sequence should be:
10969 So, by specifying a separate post queue, it is possible
10970 to emit the post side-effects in the correct order.
10971 If POST_P is NULL, an internal queue will be used. Before
10972 returning to the caller, the sequence POST_P is appended to
10973 the main output sequence PRE_P.
10975 GIMPLE_TEST_F points to a function that takes a tree T and
10976 returns nonzero if T is in the GIMPLE form requested by the
10977 caller. The GIMPLE predicates are in gimple.c.
10979 FALLBACK tells the function what sort of a temporary we want if
10980 gimplification cannot produce an expression that complies with
10983 fb_none means that no temporary should be generated
10984 fb_rvalue means that an rvalue is OK to generate
10985 fb_lvalue means that an lvalue is OK to generate
10986 fb_either means that either is OK, but an lvalue is preferable.
10987 fb_mayfail means that gimplification may fail (in which case
10988 GS_ERROR will be returned)
10990 The return value is either GS_ERROR or GS_ALL_DONE, since this
10991 function iterates until EXPR is completely gimplified or an error
10994 enum gimplify_status
10995 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
10996 bool (*gimple_test_f
) (tree
), fallback_t fallback
)
10999 gimple_seq internal_pre
= NULL
;
11000 gimple_seq internal_post
= NULL
;
11003 location_t saved_location
;
11004 enum gimplify_status ret
;
11005 gimple_stmt_iterator pre_last_gsi
, post_last_gsi
;
11008 save_expr
= *expr_p
;
11009 if (save_expr
== NULL_TREE
)
11010 return GS_ALL_DONE
;
11012 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
11013 is_statement
= gimple_test_f
== is_gimple_stmt
;
11015 gcc_assert (pre_p
);
11017 /* Consistency checks. */
11018 if (gimple_test_f
== is_gimple_reg
)
11019 gcc_assert (fallback
& (fb_rvalue
| fb_lvalue
));
11020 else if (gimple_test_f
== is_gimple_val
11021 || gimple_test_f
== is_gimple_call_addr
11022 || gimple_test_f
== is_gimple_condexpr
11023 || gimple_test_f
== is_gimple_mem_rhs
11024 || gimple_test_f
== is_gimple_mem_rhs_or_call
11025 || gimple_test_f
== is_gimple_reg_rhs
11026 || gimple_test_f
== is_gimple_reg_rhs_or_call
11027 || gimple_test_f
== is_gimple_asm_val
11028 || gimple_test_f
== is_gimple_mem_ref_addr
)
11029 gcc_assert (fallback
& fb_rvalue
);
11030 else if (gimple_test_f
== is_gimple_min_lval
11031 || gimple_test_f
== is_gimple_lvalue
)
11032 gcc_assert (fallback
& fb_lvalue
);
11033 else if (gimple_test_f
== is_gimple_addressable
)
11034 gcc_assert (fallback
& fb_either
);
11035 else if (gimple_test_f
== is_gimple_stmt
)
11036 gcc_assert (fallback
== fb_none
);
11039 /* We should have recognized the GIMPLE_TEST_F predicate to
11040 know what kind of fallback to use in case a temporary is
11041 needed to hold the value or address of *EXPR_P. */
11042 gcc_unreachable ();
11045 /* We used to check the predicate here and return immediately if it
11046 succeeds. This is wrong; the design is for gimplification to be
11047 idempotent, and for the predicates to only test for valid forms, not
11048 whether they are fully simplified. */
11050 pre_p
= &internal_pre
;
11052 if (post_p
== NULL
)
11053 post_p
= &internal_post
;
11055 /* Remember the last statements added to PRE_P and POST_P. Every
11056 new statement added by the gimplification helpers needs to be
11057 annotated with location information. To centralize the
11058 responsibility, we remember the last statement that had been
11059 added to both queues before gimplifying *EXPR_P. If
11060 gimplification produces new statements in PRE_P and POST_P, those
11061 statements will be annotated with the same location information
11063 pre_last_gsi
= gsi_last (*pre_p
);
11064 post_last_gsi
= gsi_last (*post_p
);
11066 saved_location
= input_location
;
11067 if (save_expr
!= error_mark_node
11068 && EXPR_HAS_LOCATION (*expr_p
))
11069 input_location
= EXPR_LOCATION (*expr_p
);
11071 /* Loop over the specific gimplifiers until the toplevel node
11072 remains the same. */
11075 /* Strip away as many useless type conversions as possible
11076 at the toplevel. */
11077 STRIP_USELESS_TYPE_CONVERSION (*expr_p
);
11079 /* Remember the expr. */
11080 save_expr
= *expr_p
;
11082 /* Die, die, die, my darling. */
11083 if (save_expr
== error_mark_node
11084 || (TREE_TYPE (save_expr
)
11085 && TREE_TYPE (save_expr
) == error_mark_node
))
11091 /* Do any language-specific gimplification. */
11092 ret
= ((enum gimplify_status
)
11093 lang_hooks
.gimplify_expr (expr_p
, pre_p
, post_p
));
11096 if (*expr_p
== NULL_TREE
)
11098 if (*expr_p
!= save_expr
)
11101 else if (ret
!= GS_UNHANDLED
)
11104 /* Make sure that all the cases set 'ret' appropriately. */
11105 ret
= GS_UNHANDLED
;
11106 switch (TREE_CODE (*expr_p
))
11108 /* First deal with the special cases. */
11110 case POSTINCREMENT_EXPR
:
11111 case POSTDECREMENT_EXPR
:
11112 case PREINCREMENT_EXPR
:
11113 case PREDECREMENT_EXPR
:
11114 ret
= gimplify_self_mod_expr (expr_p
, pre_p
, post_p
,
11115 fallback
!= fb_none
,
11116 TREE_TYPE (*expr_p
));
11119 case VIEW_CONVERT_EXPR
:
11120 if (is_gimple_reg_type (TREE_TYPE (*expr_p
))
11121 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
11123 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
11124 post_p
, is_gimple_val
, fb_rvalue
);
11125 recalculate_side_effects (*expr_p
);
11131 case ARRAY_RANGE_REF
:
11132 case REALPART_EXPR
:
11133 case IMAGPART_EXPR
:
11134 case COMPONENT_REF
:
11135 ret
= gimplify_compound_lval (expr_p
, pre_p
, post_p
,
11136 fallback
? fallback
: fb_rvalue
);
11140 ret
= gimplify_cond_expr (expr_p
, pre_p
, fallback
);
11142 /* C99 code may assign to an array in a structure value of a
11143 conditional expression, and this has undefined behavior
11144 only on execution, so create a temporary if an lvalue is
11146 if (fallback
== fb_lvalue
)
11148 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
11149 mark_addressable (*expr_p
);
11155 ret
= gimplify_call_expr (expr_p
, pre_p
, fallback
!= fb_none
);
11157 /* C99 code may assign to an array in a structure returned
11158 from a function, and this has undefined behavior only on
11159 execution, so create a temporary if an lvalue is
11161 if (fallback
== fb_lvalue
)
11163 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
11164 mark_addressable (*expr_p
);
11170 gcc_unreachable ();
11172 case COMPOUND_EXPR
:
11173 ret
= gimplify_compound_expr (expr_p
, pre_p
, fallback
!= fb_none
);
11176 case COMPOUND_LITERAL_EXPR
:
11177 ret
= gimplify_compound_literal_expr (expr_p
, pre_p
,
11178 gimple_test_f
, fallback
);
11183 ret
= gimplify_modify_expr (expr_p
, pre_p
, post_p
,
11184 fallback
!= fb_none
);
11187 case TRUTH_ANDIF_EXPR
:
11188 case TRUTH_ORIF_EXPR
:
11190 /* Preserve the original type of the expression and the
11191 source location of the outer expression. */
11192 tree org_type
= TREE_TYPE (*expr_p
);
11193 *expr_p
= gimple_boolify (*expr_p
);
11194 *expr_p
= build3_loc (input_location
, COND_EXPR
,
11198 org_type
, boolean_true_node
),
11201 org_type
, boolean_false_node
));
11206 case TRUTH_NOT_EXPR
:
11208 tree type
= TREE_TYPE (*expr_p
);
11209 /* The parsers are careful to generate TRUTH_NOT_EXPR
11210 only with operands that are always zero or one.
11211 We do not fold here but handle the only interesting case
11212 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
11213 *expr_p
= gimple_boolify (*expr_p
);
11214 if (TYPE_PRECISION (TREE_TYPE (*expr_p
)) == 1)
11215 *expr_p
= build1_loc (input_location
, BIT_NOT_EXPR
,
11216 TREE_TYPE (*expr_p
),
11217 TREE_OPERAND (*expr_p
, 0));
11219 *expr_p
= build2_loc (input_location
, BIT_XOR_EXPR
,
11220 TREE_TYPE (*expr_p
),
11221 TREE_OPERAND (*expr_p
, 0),
11222 build_int_cst (TREE_TYPE (*expr_p
), 1));
11223 if (!useless_type_conversion_p (type
, TREE_TYPE (*expr_p
)))
11224 *expr_p
= fold_convert_loc (input_location
, type
, *expr_p
);
11230 ret
= gimplify_addr_expr (expr_p
, pre_p
, post_p
);
11233 case ANNOTATE_EXPR
:
11235 tree cond
= TREE_OPERAND (*expr_p
, 0);
11236 tree kind
= TREE_OPERAND (*expr_p
, 1);
11237 tree type
= TREE_TYPE (cond
);
11238 if (!INTEGRAL_TYPE_P (type
))
11244 tree tmp
= create_tmp_var (type
);
11245 gimplify_arg (&cond
, pre_p
, EXPR_LOCATION (*expr_p
));
11247 = gimple_build_call_internal (IFN_ANNOTATE
, 2, cond
, kind
);
11248 gimple_call_set_lhs (call
, tmp
);
11249 gimplify_seq_add_stmt (pre_p
, call
);
11256 ret
= gimplify_va_arg_expr (expr_p
, pre_p
, post_p
);
11260 if (IS_EMPTY_STMT (*expr_p
))
11266 if (VOID_TYPE_P (TREE_TYPE (*expr_p
))
11267 || fallback
== fb_none
)
11269 /* Just strip a conversion to void (or in void context) and
11271 *expr_p
= TREE_OPERAND (*expr_p
, 0);
11276 ret
= gimplify_conversion (expr_p
);
11277 if (ret
== GS_ERROR
)
11279 if (*expr_p
!= save_expr
)
11283 case FIX_TRUNC_EXPR
:
11284 /* unary_expr: ... | '(' cast ')' val | ... */
11285 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
11286 is_gimple_val
, fb_rvalue
);
11287 recalculate_side_effects (*expr_p
);
11292 bool volatilep
= TREE_THIS_VOLATILE (*expr_p
);
11293 bool notrap
= TREE_THIS_NOTRAP (*expr_p
);
11294 tree saved_ptr_type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 0));
11296 *expr_p
= fold_indirect_ref_loc (input_location
, *expr_p
);
11297 if (*expr_p
!= save_expr
)
11303 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
11304 is_gimple_reg
, fb_rvalue
);
11305 if (ret
== GS_ERROR
)
11308 recalculate_side_effects (*expr_p
);
11309 *expr_p
= fold_build2_loc (input_location
, MEM_REF
,
11310 TREE_TYPE (*expr_p
),
11311 TREE_OPERAND (*expr_p
, 0),
11312 build_int_cst (saved_ptr_type
, 0));
11313 TREE_THIS_VOLATILE (*expr_p
) = volatilep
;
11314 TREE_THIS_NOTRAP (*expr_p
) = notrap
;
11319 /* We arrive here through the various re-gimplifcation paths. */
11321 /* First try re-folding the whole thing. */
11322 tmp
= fold_binary (MEM_REF
, TREE_TYPE (*expr_p
),
11323 TREE_OPERAND (*expr_p
, 0),
11324 TREE_OPERAND (*expr_p
, 1));
11327 REF_REVERSE_STORAGE_ORDER (tmp
)
11328 = REF_REVERSE_STORAGE_ORDER (*expr_p
);
11330 recalculate_side_effects (*expr_p
);
11334 /* Avoid re-gimplifying the address operand if it is already
11335 in suitable form. Re-gimplifying would mark the address
11336 operand addressable. Always gimplify when not in SSA form
11337 as we still may have to gimplify decls with value-exprs. */
11338 if (!gimplify_ctxp
|| !gimple_in_ssa_p (cfun
)
11339 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p
, 0)))
11341 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
11342 is_gimple_mem_ref_addr
, fb_rvalue
);
11343 if (ret
== GS_ERROR
)
11346 recalculate_side_effects (*expr_p
);
11350 /* Constants need not be gimplified. */
11357 /* Drop the overflow flag on constants, we do not want
11358 that in the GIMPLE IL. */
11359 if (TREE_OVERFLOW_P (*expr_p
))
11360 *expr_p
= drop_tree_overflow (*expr_p
);
11365 /* If we require an lvalue, such as for ADDR_EXPR, retain the
11366 CONST_DECL node. Otherwise the decl is replaceable by its
11368 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11369 if (fallback
& fb_lvalue
)
11373 *expr_p
= DECL_INITIAL (*expr_p
);
11379 ret
= gimplify_decl_expr (expr_p
, pre_p
);
11383 ret
= gimplify_bind_expr (expr_p
, pre_p
);
11387 ret
= gimplify_loop_expr (expr_p
, pre_p
);
11391 ret
= gimplify_switch_expr (expr_p
, pre_p
);
11395 ret
= gimplify_exit_expr (expr_p
);
11399 /* If the target is not LABEL, then it is a computed jump
11400 and the target needs to be gimplified. */
11401 if (TREE_CODE (GOTO_DESTINATION (*expr_p
)) != LABEL_DECL
)
11403 ret
= gimplify_expr (&GOTO_DESTINATION (*expr_p
), pre_p
,
11404 NULL
, is_gimple_val
, fb_rvalue
);
11405 if (ret
== GS_ERROR
)
11408 gimplify_seq_add_stmt (pre_p
,
11409 gimple_build_goto (GOTO_DESTINATION (*expr_p
)));
11414 gimplify_seq_add_stmt (pre_p
,
11415 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p
),
11416 PREDICT_EXPR_OUTCOME (*expr_p
)));
11421 ret
= gimplify_label_expr (expr_p
, pre_p
);
11422 label
= LABEL_EXPR_LABEL (*expr_p
);
11423 gcc_assert (decl_function_context (label
) == current_function_decl
);
11425 /* If the label is used in a goto statement, or address of the label
11426 is taken, we need to unpoison all variables that were seen so far.
11427 Doing so would prevent us from reporting a false positives. */
11428 if (asan_sanitize_use_after_scope ()
11429 && asan_used_labels
!= NULL
11430 && asan_used_labels
->contains (label
))
11431 asan_poison_variables (asan_poisoned_variables
, false, pre_p
);
11434 case CASE_LABEL_EXPR
:
11435 ret
= gimplify_case_label_expr (expr_p
, pre_p
);
11437 if (gimplify_ctxp
->live_switch_vars
)
11438 asan_poison_variables (gimplify_ctxp
->live_switch_vars
, false,
11443 ret
= gimplify_return_expr (*expr_p
, pre_p
);
11447 /* Don't reduce this in place; let gimplify_init_constructor work its
11448 magic. Buf if we're just elaborating this for side effects, just
11449 gimplify any element that has side-effects. */
11450 if (fallback
== fb_none
)
11452 unsigned HOST_WIDE_INT ix
;
11454 tree temp
= NULL_TREE
;
11455 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p
), ix
, val
)
11456 if (TREE_SIDE_EFFECTS (val
))
11457 append_to_statement_list (val
, &temp
);
11460 ret
= temp
? GS_OK
: GS_ALL_DONE
;
11462 /* C99 code may assign to an array in a constructed
11463 structure or union, and this has undefined behavior only
11464 on execution, so create a temporary if an lvalue is
11466 else if (fallback
== fb_lvalue
)
11468 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
11469 mark_addressable (*expr_p
);
11476 /* The following are special cases that are not handled by the
11477 original GIMPLE grammar. */
11479 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11482 ret
= gimplify_save_expr (expr_p
, pre_p
, post_p
);
11485 case BIT_FIELD_REF
:
11486 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
11487 post_p
, is_gimple_lvalue
, fb_either
);
11488 recalculate_side_effects (*expr_p
);
11491 case TARGET_MEM_REF
:
11493 enum gimplify_status r0
= GS_ALL_DONE
, r1
= GS_ALL_DONE
;
11495 if (TMR_BASE (*expr_p
))
11496 r0
= gimplify_expr (&TMR_BASE (*expr_p
), pre_p
,
11497 post_p
, is_gimple_mem_ref_addr
, fb_either
);
11498 if (TMR_INDEX (*expr_p
))
11499 r1
= gimplify_expr (&TMR_INDEX (*expr_p
), pre_p
,
11500 post_p
, is_gimple_val
, fb_rvalue
);
11501 if (TMR_INDEX2 (*expr_p
))
11502 r1
= gimplify_expr (&TMR_INDEX2 (*expr_p
), pre_p
,
11503 post_p
, is_gimple_val
, fb_rvalue
);
11504 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11505 ret
= MIN (r0
, r1
);
11509 case NON_LVALUE_EXPR
:
11510 /* This should have been stripped above. */
11511 gcc_unreachable ();
11514 ret
= gimplify_asm_expr (expr_p
, pre_p
, post_p
);
11517 case TRY_FINALLY_EXPR
:
11518 case TRY_CATCH_EXPR
:
11520 gimple_seq eval
, cleanup
;
11523 /* Calls to destructors are generated automatically in FINALLY/CATCH
11524 block. They should have location as UNKNOWN_LOCATION. However,
11525 gimplify_call_expr will reset these call stmts to input_location
11526 if it finds stmt's location is unknown. To prevent resetting for
11527 destructors, we set the input_location to unknown.
11528 Note that this only affects the destructor calls in FINALLY/CATCH
11529 block, and will automatically reset to its original value by the
11530 end of gimplify_expr. */
11531 input_location
= UNKNOWN_LOCATION
;
11532 eval
= cleanup
= NULL
;
11533 gimplify_and_add (TREE_OPERAND (*expr_p
, 0), &eval
);
11534 gimplify_and_add (TREE_OPERAND (*expr_p
, 1), &cleanup
);
11535 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11536 if (gimple_seq_empty_p (cleanup
))
11538 gimple_seq_add_seq (pre_p
, eval
);
11542 try_
= gimple_build_try (eval
, cleanup
,
11543 TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
11544 ? GIMPLE_TRY_FINALLY
11545 : GIMPLE_TRY_CATCH
);
11546 if (EXPR_HAS_LOCATION (save_expr
))
11547 gimple_set_location (try_
, EXPR_LOCATION (save_expr
));
11548 else if (LOCATION_LOCUS (saved_location
) != UNKNOWN_LOCATION
)
11549 gimple_set_location (try_
, saved_location
);
11550 if (TREE_CODE (*expr_p
) == TRY_CATCH_EXPR
)
11551 gimple_try_set_catch_is_cleanup (try_
,
11552 TRY_CATCH_IS_CLEANUP (*expr_p
));
11553 gimplify_seq_add_stmt (pre_p
, try_
);
11558 case CLEANUP_POINT_EXPR
:
11559 ret
= gimplify_cleanup_point_expr (expr_p
, pre_p
);
11563 ret
= gimplify_target_expr (expr_p
, pre_p
, post_p
);
11569 gimple_seq handler
= NULL
;
11570 gimplify_and_add (CATCH_BODY (*expr_p
), &handler
);
11571 c
= gimple_build_catch (CATCH_TYPES (*expr_p
), handler
);
11572 gimplify_seq_add_stmt (pre_p
, c
);
11577 case EH_FILTER_EXPR
:
11580 gimple_seq failure
= NULL
;
11582 gimplify_and_add (EH_FILTER_FAILURE (*expr_p
), &failure
);
11583 ehf
= gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p
), failure
);
11584 gimple_set_no_warning (ehf
, TREE_NO_WARNING (*expr_p
));
11585 gimplify_seq_add_stmt (pre_p
, ehf
);
11592 enum gimplify_status r0
, r1
;
11593 r0
= gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p
), pre_p
,
11594 post_p
, is_gimple_val
, fb_rvalue
);
11595 r1
= gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p
), pre_p
,
11596 post_p
, is_gimple_val
, fb_rvalue
);
11597 TREE_SIDE_EFFECTS (*expr_p
) = 0;
11598 ret
= MIN (r0
, r1
);
11603 /* We get here when taking the address of a label. We mark
11604 the label as "forced"; meaning it can never be removed and
11605 it is a potential target for any computed goto. */
11606 FORCED_LABEL (*expr_p
) = 1;
11610 case STATEMENT_LIST
:
11611 ret
= gimplify_statement_list (expr_p
, pre_p
);
11614 case WITH_SIZE_EXPR
:
11616 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
11617 post_p
== &internal_post
? NULL
: post_p
,
11618 gimple_test_f
, fallback
);
11619 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
11620 is_gimple_val
, fb_rvalue
);
11627 ret
= gimplify_var_or_parm_decl (expr_p
);
11631 /* When within an OMP context, notice uses of variables. */
11632 if (gimplify_omp_ctxp
)
11633 omp_notice_variable (gimplify_omp_ctxp
, *expr_p
, true);
11638 /* Allow callbacks into the gimplifier during optimization. */
11643 gimplify_omp_parallel (expr_p
, pre_p
);
11648 gimplify_omp_task (expr_p
, pre_p
);
11656 case OMP_DISTRIBUTE
:
11659 ret
= gimplify_omp_for (expr_p
, pre_p
);
11663 gimplify_oacc_cache (expr_p
, pre_p
);
11668 gimplify_oacc_declare (expr_p
, pre_p
);
11672 case OACC_HOST_DATA
:
11675 case OACC_PARALLEL
:
11679 case OMP_TARGET_DATA
:
11681 gimplify_omp_workshare (expr_p
, pre_p
);
11685 case OACC_ENTER_DATA
:
11686 case OACC_EXIT_DATA
:
11688 case OMP_TARGET_UPDATE
:
11689 case OMP_TARGET_ENTER_DATA
:
11690 case OMP_TARGET_EXIT_DATA
:
11691 gimplify_omp_target_update (expr_p
, pre_p
);
11697 case OMP_TASKGROUP
:
11701 gimple_seq body
= NULL
;
11704 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
11705 switch (TREE_CODE (*expr_p
))
11708 g
= gimple_build_omp_section (body
);
11711 g
= gimple_build_omp_master (body
);
11713 case OMP_TASKGROUP
:
11715 gimple_seq cleanup
= NULL
;
11717 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END
);
11718 g
= gimple_build_call (fn
, 0);
11719 gimple_seq_add_stmt (&cleanup
, g
);
11720 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
11722 gimple_seq_add_stmt (&body
, g
);
11723 g
= gimple_build_omp_taskgroup (body
);
11727 g
= gimplify_omp_ordered (*expr_p
, body
);
11730 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p
),
11731 pre_p
, ORT_WORKSHARE
, OMP_CRITICAL
);
11732 gimplify_adjust_omp_clauses (pre_p
, body
,
11733 &OMP_CRITICAL_CLAUSES (*expr_p
),
11735 g
= gimple_build_omp_critical (body
,
11736 OMP_CRITICAL_NAME (*expr_p
),
11737 OMP_CRITICAL_CLAUSES (*expr_p
));
11740 gcc_unreachable ();
11742 gimplify_seq_add_stmt (pre_p
, g
);
11748 case OMP_ATOMIC_READ
:
11749 case OMP_ATOMIC_CAPTURE_OLD
:
11750 case OMP_ATOMIC_CAPTURE_NEW
:
11751 ret
= gimplify_omp_atomic (expr_p
, pre_p
);
11754 case TRANSACTION_EXPR
:
11755 ret
= gimplify_transaction (expr_p
, pre_p
);
11758 case TRUTH_AND_EXPR
:
11759 case TRUTH_OR_EXPR
:
11760 case TRUTH_XOR_EXPR
:
11762 tree orig_type
= TREE_TYPE (*expr_p
);
11763 tree new_type
, xop0
, xop1
;
11764 *expr_p
= gimple_boolify (*expr_p
);
11765 new_type
= TREE_TYPE (*expr_p
);
11766 if (!useless_type_conversion_p (orig_type
, new_type
))
11768 *expr_p
= fold_convert_loc (input_location
, orig_type
, *expr_p
);
11773 /* Boolified binary truth expressions are semantically equivalent
11774 to bitwise binary expressions. Canonicalize them to the
11775 bitwise variant. */
11776 switch (TREE_CODE (*expr_p
))
11778 case TRUTH_AND_EXPR
:
11779 TREE_SET_CODE (*expr_p
, BIT_AND_EXPR
);
11781 case TRUTH_OR_EXPR
:
11782 TREE_SET_CODE (*expr_p
, BIT_IOR_EXPR
);
11784 case TRUTH_XOR_EXPR
:
11785 TREE_SET_CODE (*expr_p
, BIT_XOR_EXPR
);
11790 /* Now make sure that operands have compatible type to
11791 expression's new_type. */
11792 xop0
= TREE_OPERAND (*expr_p
, 0);
11793 xop1
= TREE_OPERAND (*expr_p
, 1);
11794 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop0
)))
11795 TREE_OPERAND (*expr_p
, 0) = fold_convert_loc (input_location
,
11798 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop1
)))
11799 TREE_OPERAND (*expr_p
, 1) = fold_convert_loc (input_location
,
11802 /* Continue classified as tcc_binary. */
11806 case VEC_COND_EXPR
:
11808 enum gimplify_status r0
, r1
, r2
;
11810 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
11811 post_p
, is_gimple_condexpr
, fb_rvalue
);
11812 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
11813 post_p
, is_gimple_val
, fb_rvalue
);
11814 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
11815 post_p
, is_gimple_val
, fb_rvalue
);
11817 ret
= MIN (MIN (r0
, r1
), r2
);
11818 recalculate_side_effects (*expr_p
);
11823 case VEC_PERM_EXPR
:
11824 /* Classified as tcc_expression. */
11827 case BIT_INSERT_EXPR
:
11828 /* Argument 3 is a constant. */
11831 case POINTER_PLUS_EXPR
:
11833 enum gimplify_status r0
, r1
;
11834 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
11835 post_p
, is_gimple_val
, fb_rvalue
);
11836 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
11837 post_p
, is_gimple_val
, fb_rvalue
);
11838 recalculate_side_effects (*expr_p
);
11839 ret
= MIN (r0
, r1
);
11843 case CILK_SYNC_STMT
:
11845 if (!fn_contains_cilk_spawn_p (cfun
))
11847 error_at (EXPR_LOCATION (*expr_p
),
11848 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
11853 gimplify_cilk_sync (expr_p
, pre_p
);
11860 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p
)))
11862 case tcc_comparison
:
11863 /* Handle comparison of objects of non scalar mode aggregates
11864 with a call to memcmp. It would be nice to only have to do
11865 this for variable-sized objects, but then we'd have to allow
11866 the same nest of reference nodes we allow for MODIFY_EXPR and
11867 that's too complex.
11869 Compare scalar mode aggregates as scalar mode values. Using
11870 memcmp for them would be very inefficient at best, and is
11871 plain wrong if bitfields are involved. */
11873 tree type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 1));
11875 /* Vector comparisons need no boolification. */
11876 if (TREE_CODE (type
) == VECTOR_TYPE
)
11878 else if (!AGGREGATE_TYPE_P (type
))
11880 tree org_type
= TREE_TYPE (*expr_p
);
11881 *expr_p
= gimple_boolify (*expr_p
);
11882 if (!useless_type_conversion_p (org_type
,
11883 TREE_TYPE (*expr_p
)))
11885 *expr_p
= fold_convert_loc (input_location
,
11886 org_type
, *expr_p
);
11892 else if (TYPE_MODE (type
) != BLKmode
)
11893 ret
= gimplify_scalar_mode_aggregate_compare (expr_p
);
11895 ret
= gimplify_variable_sized_compare (expr_p
);
11900 /* If *EXPR_P does not need to be special-cased, handle it
11901 according to its class. */
11903 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
11904 post_p
, is_gimple_val
, fb_rvalue
);
11910 enum gimplify_status r0
, r1
;
11912 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
11913 post_p
, is_gimple_val
, fb_rvalue
);
11914 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
11915 post_p
, is_gimple_val
, fb_rvalue
);
11917 ret
= MIN (r0
, r1
);
11923 enum gimplify_status r0
, r1
, r2
;
11925 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
11926 post_p
, is_gimple_val
, fb_rvalue
);
11927 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
11928 post_p
, is_gimple_val
, fb_rvalue
);
11929 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
11930 post_p
, is_gimple_val
, fb_rvalue
);
11932 ret
= MIN (MIN (r0
, r1
), r2
);
11936 case tcc_declaration
:
11939 goto dont_recalculate
;
11942 gcc_unreachable ();
11945 recalculate_side_effects (*expr_p
);
11951 gcc_assert (*expr_p
|| ret
!= GS_OK
);
11953 while (ret
== GS_OK
);
11955 /* If we encountered an error_mark somewhere nested inside, either
11956 stub out the statement or propagate the error back out. */
11957 if (ret
== GS_ERROR
)
11964 /* This was only valid as a return value from the langhook, which
11965 we handled. Make sure it doesn't escape from any other context. */
11966 gcc_assert (ret
!= GS_UNHANDLED
);
11968 if (fallback
== fb_none
&& *expr_p
&& !is_gimple_stmt (*expr_p
))
11970 /* We aren't looking for a value, and we don't have a valid
11971 statement. If it doesn't have side-effects, throw it away. */
11972 if (!TREE_SIDE_EFFECTS (*expr_p
))
11974 else if (!TREE_THIS_VOLATILE (*expr_p
))
11976 /* This is probably a _REF that contains something nested that
11977 has side effects. Recurse through the operands to find it. */
11978 enum tree_code code
= TREE_CODE (*expr_p
);
11982 case COMPONENT_REF
:
11983 case REALPART_EXPR
:
11984 case IMAGPART_EXPR
:
11985 case VIEW_CONVERT_EXPR
:
11986 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
11987 gimple_test_f
, fallback
);
11991 case ARRAY_RANGE_REF
:
11992 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
11993 gimple_test_f
, fallback
);
11994 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
11995 gimple_test_f
, fallback
);
11999 /* Anything else with side-effects must be converted to
12000 a valid statement before we get here. */
12001 gcc_unreachable ();
12006 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p
))
12007 && TYPE_MODE (TREE_TYPE (*expr_p
)) != BLKmode
)
12009 /* Historically, the compiler has treated a bare reference
12010 to a non-BLKmode volatile lvalue as forcing a load. */
12011 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p
));
12013 /* Normally, we do not want to create a temporary for a
12014 TREE_ADDRESSABLE type because such a type should not be
12015 copied by bitwise-assignment. However, we make an
12016 exception here, as all we are doing here is ensuring that
12017 we read the bytes that make up the type. We use
12018 create_tmp_var_raw because create_tmp_var will abort when
12019 given a TREE_ADDRESSABLE type. */
12020 tree tmp
= create_tmp_var_raw (type
, "vol");
12021 gimple_add_tmp_var (tmp
);
12022 gimplify_assign (tmp
, *expr_p
, pre_p
);
12026 /* We can't do anything useful with a volatile reference to
12027 an incomplete type, so just throw it away. Likewise for
12028 a BLKmode type, since any implicit inner load should
12029 already have been turned into an explicit one by the
12030 gimplification process. */
12034 /* If we are gimplifying at the statement level, we're done. Tack
12035 everything together and return. */
12036 if (fallback
== fb_none
|| is_statement
)
12038 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12039 it out for GC to reclaim it. */
12040 *expr_p
= NULL_TREE
;
12042 if (!gimple_seq_empty_p (internal_pre
)
12043 || !gimple_seq_empty_p (internal_post
))
12045 gimplify_seq_add_seq (&internal_pre
, internal_post
);
12046 gimplify_seq_add_seq (pre_p
, internal_pre
);
12049 /* The result of gimplifying *EXPR_P is going to be the last few
12050 statements in *PRE_P and *POST_P. Add location information
12051 to all the statements that were added by the gimplification
12053 if (!gimple_seq_empty_p (*pre_p
))
12054 annotate_all_with_location_after (*pre_p
, pre_last_gsi
, input_location
);
12056 if (!gimple_seq_empty_p (*post_p
))
12057 annotate_all_with_location_after (*post_p
, post_last_gsi
,
12063 #ifdef ENABLE_GIMPLE_CHECKING
12066 enum tree_code code
= TREE_CODE (*expr_p
);
12067 /* These expressions should already be in gimple IR form. */
12068 gcc_assert (code
!= MODIFY_EXPR
12069 && code
!= ASM_EXPR
12070 && code
!= BIND_EXPR
12071 && code
!= CATCH_EXPR
12072 && (code
!= COND_EXPR
|| gimplify_ctxp
->allow_rhs_cond_expr
)
12073 && code
!= EH_FILTER_EXPR
12074 && code
!= GOTO_EXPR
12075 && code
!= LABEL_EXPR
12076 && code
!= LOOP_EXPR
12077 && code
!= SWITCH_EXPR
12078 && code
!= TRY_FINALLY_EXPR
12079 && code
!= OACC_PARALLEL
12080 && code
!= OACC_KERNELS
12081 && code
!= OACC_DATA
12082 && code
!= OACC_HOST_DATA
12083 && code
!= OACC_DECLARE
12084 && code
!= OACC_UPDATE
12085 && code
!= OACC_ENTER_DATA
12086 && code
!= OACC_EXIT_DATA
12087 && code
!= OACC_CACHE
12088 && code
!= OMP_CRITICAL
12090 && code
!= OACC_LOOP
12091 && code
!= OMP_MASTER
12092 && code
!= OMP_TASKGROUP
12093 && code
!= OMP_ORDERED
12094 && code
!= OMP_PARALLEL
12095 && code
!= OMP_SECTIONS
12096 && code
!= OMP_SECTION
12097 && code
!= OMP_SINGLE
);
12101 /* Otherwise we're gimplifying a subexpression, so the resulting
12102 value is interesting. If it's a valid operand that matches
12103 GIMPLE_TEST_F, we're done. Unless we are handling some
12104 post-effects internally; if that's the case, we need to copy into
12105 a temporary before adding the post-effects to POST_P. */
12106 if (gimple_seq_empty_p (internal_post
) && (*gimple_test_f
) (*expr_p
))
12109 /* Otherwise, we need to create a new temporary for the gimplified
12112 /* We can't return an lvalue if we have an internal postqueue. The
12113 object the lvalue refers to would (probably) be modified by the
12114 postqueue; we need to copy the value out first, which means an
12116 if ((fallback
& fb_lvalue
)
12117 && gimple_seq_empty_p (internal_post
)
12118 && is_gimple_addressable (*expr_p
))
12120 /* An lvalue will do. Take the address of the expression, store it
12121 in a temporary, and replace the expression with an INDIRECT_REF of
12123 tmp
= build_fold_addr_expr_loc (input_location
, *expr_p
);
12124 gimplify_expr (&tmp
, pre_p
, post_p
, is_gimple_reg
, fb_rvalue
);
12125 *expr_p
= build_simple_mem_ref (tmp
);
12127 else if ((fallback
& fb_rvalue
) && is_gimple_reg_rhs_or_call (*expr_p
))
12129 /* An rvalue will do. Assign the gimplified expression into a
12130 new temporary TMP and replace the original expression with
12131 TMP. First, make sure that the expression has a type so that
12132 it can be assigned into a temporary. */
12133 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p
)));
12134 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
12138 #ifdef ENABLE_GIMPLE_CHECKING
12139 if (!(fallback
& fb_mayfail
))
12141 fprintf (stderr
, "gimplification failed:\n");
12142 print_generic_expr (stderr
, *expr_p
, 0);
12143 debug_tree (*expr_p
);
12144 internal_error ("gimplification failed");
12147 gcc_assert (fallback
& fb_mayfail
);
12149 /* If this is an asm statement, and the user asked for the
12150 impossible, don't die. Fail and let gimplify_asm_expr
12156 /* Make sure the temporary matches our predicate. */
12157 gcc_assert ((*gimple_test_f
) (*expr_p
));
12159 if (!gimple_seq_empty_p (internal_post
))
12161 annotate_all_with_location (internal_post
, input_location
);
12162 gimplify_seq_add_seq (pre_p
, internal_post
);
12166 input_location
= saved_location
;
12170 /* Like gimplify_expr but make sure the gimplified result is not itself
12171 a SSA name (but a decl if it were). Temporaries required by
12172 evaluating *EXPR_P may be still SSA names. */
12174 static enum gimplify_status
12175 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
12176 bool (*gimple_test_f
) (tree
), fallback_t fallback
,
12179 bool was_ssa_name_p
= TREE_CODE (*expr_p
) == SSA_NAME
;
12180 enum gimplify_status ret
= gimplify_expr (expr_p
, pre_p
, post_p
,
12181 gimple_test_f
, fallback
);
12183 && TREE_CODE (*expr_p
) == SSA_NAME
)
12185 tree name
= *expr_p
;
12186 if (was_ssa_name_p
)
12187 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, NULL
, false);
12190 /* Avoid the extra copy if possible. */
12191 *expr_p
= create_tmp_reg (TREE_TYPE (name
));
12192 gimple_set_lhs (SSA_NAME_DEF_STMT (name
), *expr_p
);
12193 release_ssa_name (name
);
12199 /* Look through TYPE for variable-sized objects and gimplify each such
12200 size that we find. Add to LIST_P any statements generated. */
12203 gimplify_type_sizes (tree type
, gimple_seq
*list_p
)
12207 if (type
== NULL
|| type
== error_mark_node
)
12210 /* We first do the main variant, then copy into any other variants. */
12211 type
= TYPE_MAIN_VARIANT (type
);
12213 /* Avoid infinite recursion. */
12214 if (TYPE_SIZES_GIMPLIFIED (type
))
12217 TYPE_SIZES_GIMPLIFIED (type
) = 1;
12219 switch (TREE_CODE (type
))
12222 case ENUMERAL_TYPE
:
12225 case FIXED_POINT_TYPE
:
12226 gimplify_one_sizepos (&TYPE_MIN_VALUE (type
), list_p
);
12227 gimplify_one_sizepos (&TYPE_MAX_VALUE (type
), list_p
);
12229 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
12231 TYPE_MIN_VALUE (t
) = TYPE_MIN_VALUE (type
);
12232 TYPE_MAX_VALUE (t
) = TYPE_MAX_VALUE (type
);
12237 /* These types may not have declarations, so handle them here. */
12238 gimplify_type_sizes (TREE_TYPE (type
), list_p
);
12239 gimplify_type_sizes (TYPE_DOMAIN (type
), list_p
);
12240 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12241 with assigned stack slots, for -O1+ -g they should be tracked
12243 if (!(TYPE_NAME (type
)
12244 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
12245 && DECL_IGNORED_P (TYPE_NAME (type
)))
12246 && TYPE_DOMAIN (type
)
12247 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type
)))
12249 t
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
12250 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
12251 DECL_IGNORED_P (t
) = 0;
12252 t
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
12253 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
12254 DECL_IGNORED_P (t
) = 0;
12260 case QUAL_UNION_TYPE
:
12261 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
12262 if (TREE_CODE (field
) == FIELD_DECL
)
12264 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field
), list_p
);
12265 gimplify_one_sizepos (&DECL_SIZE (field
), list_p
);
12266 gimplify_one_sizepos (&DECL_SIZE_UNIT (field
), list_p
);
12267 gimplify_type_sizes (TREE_TYPE (field
), list_p
);
12272 case REFERENCE_TYPE
:
12273 /* We used to recurse on the pointed-to type here, which turned out to
12274 be incorrect because its definition might refer to variables not
12275 yet initialized at this point if a forward declaration is involved.
12277 It was actually useful for anonymous pointed-to types to ensure
12278 that the sizes evaluation dominates every possible later use of the
12279 values. Restricting to such types here would be safe since there
12280 is no possible forward declaration around, but would introduce an
12281 undesirable middle-end semantic to anonymity. We then defer to
12282 front-ends the responsibility of ensuring that the sizes are
12283 evaluated both early and late enough, e.g. by attaching artificial
12284 type declarations to the tree. */
12291 gimplify_one_sizepos (&TYPE_SIZE (type
), list_p
);
12292 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type
), list_p
);
12294 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
12296 TYPE_SIZE (t
) = TYPE_SIZE (type
);
12297 TYPE_SIZE_UNIT (t
) = TYPE_SIZE_UNIT (type
);
12298 TYPE_SIZES_GIMPLIFIED (t
) = 1;
12302 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12303 a size or position, has had all of its SAVE_EXPRs evaluated.
12304 We add any required statements to *STMT_P. */
12307 gimplify_one_sizepos (tree
*expr_p
, gimple_seq
*stmt_p
)
12309 tree expr
= *expr_p
;
12311 /* We don't do anything if the value isn't there, is constant, or contains
12312 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
12313 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
12314 will want to replace it with a new variable, but that will cause problems
12315 if this type is from outside the function. It's OK to have that here. */
12316 if (is_gimple_sizepos (expr
))
12319 *expr_p
= unshare_expr (expr
);
12321 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12322 if the def vanishes. */
12323 gimplify_expr (expr_p
, stmt_p
, NULL
, is_gimple_val
, fb_rvalue
, false);
12326 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12327 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12328 is true, also gimplify the parameters. */
12331 gimplify_body (tree fndecl
, bool do_parms
)
12333 location_t saved_location
= input_location
;
12334 gimple_seq parm_stmts
, seq
;
12335 gimple
*outer_stmt
;
12337 struct cgraph_node
*cgn
;
12339 timevar_push (TV_TREE_GIMPLIFY
);
12341 init_tree_ssa (cfun
);
12343 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12345 default_rtl_profile ();
12347 gcc_assert (gimplify_ctxp
== NULL
);
12348 push_gimplify_context (true);
12350 if (flag_openacc
|| flag_openmp
)
12352 gcc_assert (gimplify_omp_ctxp
== NULL
);
12353 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl
)))
12354 gimplify_omp_ctxp
= new_omp_context (ORT_TARGET
);
12357 /* Unshare most shared trees in the body and in that of any nested functions.
12358 It would seem we don't have to do this for nested functions because
12359 they are supposed to be output and then the outer function gimplified
12360 first, but the g++ front end doesn't always do it that way. */
12361 unshare_body (fndecl
);
12362 unvisit_body (fndecl
);
12364 cgn
= cgraph_node::get (fndecl
);
12365 if (cgn
&& cgn
->origin
)
12366 nonlocal_vlas
= new hash_set
<tree
>;
12368 /* Make sure input_location isn't set to something weird. */
12369 input_location
= DECL_SOURCE_LOCATION (fndecl
);
12371 /* Resolve callee-copies. This has to be done before processing
12372 the body so that DECL_VALUE_EXPR gets processed correctly. */
12373 parm_stmts
= do_parms
? gimplify_parameters () : NULL
;
12375 /* Gimplify the function's body. */
12377 gimplify_stmt (&DECL_SAVED_TREE (fndecl
), &seq
);
12378 outer_stmt
= gimple_seq_first_stmt (seq
);
12381 outer_stmt
= gimple_build_nop ();
12382 gimplify_seq_add_stmt (&seq
, outer_stmt
);
12385 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12386 not the case, wrap everything in a GIMPLE_BIND to make it so. */
12387 if (gimple_code (outer_stmt
) == GIMPLE_BIND
12388 && gimple_seq_first (seq
) == gimple_seq_last (seq
))
12389 outer_bind
= as_a
<gbind
*> (outer_stmt
);
12391 outer_bind
= gimple_build_bind (NULL_TREE
, seq
, NULL
);
12393 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
12395 /* If we had callee-copies statements, insert them at the beginning
12396 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
12397 if (!gimple_seq_empty_p (parm_stmts
))
12401 gimplify_seq_add_seq (&parm_stmts
, gimple_bind_body (outer_bind
));
12402 gimple_bind_set_body (outer_bind
, parm_stmts
);
12404 for (parm
= DECL_ARGUMENTS (current_function_decl
);
12405 parm
; parm
= DECL_CHAIN (parm
))
12406 if (DECL_HAS_VALUE_EXPR_P (parm
))
12408 DECL_HAS_VALUE_EXPR_P (parm
) = 0;
12409 DECL_IGNORED_P (parm
) = 0;
12415 if (nonlocal_vla_vars
)
12417 /* tree-nested.c may later on call declare_vars (..., true);
12418 which relies on BLOCK_VARS chain to be the tail of the
12419 gimple_bind_vars chain. Ensure we don't violate that
12421 if (gimple_bind_block (outer_bind
)
12422 == DECL_INITIAL (current_function_decl
))
12423 declare_vars (nonlocal_vla_vars
, outer_bind
, true);
12425 BLOCK_VARS (DECL_INITIAL (current_function_decl
))
12426 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl
)),
12427 nonlocal_vla_vars
);
12428 nonlocal_vla_vars
= NULL_TREE
;
12430 delete nonlocal_vlas
;
12431 nonlocal_vlas
= NULL
;
12434 if ((flag_openacc
|| flag_openmp
|| flag_openmp_simd
)
12435 && gimplify_omp_ctxp
)
12437 delete_omp_context (gimplify_omp_ctxp
);
12438 gimplify_omp_ctxp
= NULL
;
12441 pop_gimplify_context (outer_bind
);
12442 gcc_assert (gimplify_ctxp
== NULL
);
12444 if (flag_checking
&& !seen_error ())
12445 verify_gimple_in_seq (gimple_bind_body (outer_bind
));
12447 timevar_pop (TV_TREE_GIMPLIFY
);
12448 input_location
= saved_location
;
12453 typedef char *char_p
; /* For DEF_VEC_P. */
12455 /* Return whether we should exclude FNDECL from instrumentation. */
12458 flag_instrument_functions_exclude_p (tree fndecl
)
12462 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_functions
;
12463 if (v
&& v
->length () > 0)
12469 name
= lang_hooks
.decl_printable_name (fndecl
, 0);
12470 FOR_EACH_VEC_ELT (*v
, i
, s
)
12471 if (strstr (name
, s
) != NULL
)
12475 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_files
;
12476 if (v
&& v
->length () > 0)
12482 name
= DECL_SOURCE_FILE (fndecl
);
12483 FOR_EACH_VEC_ELT (*v
, i
, s
)
12484 if (strstr (name
, s
) != NULL
)
12491 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
12492 node for the function we want to gimplify.
12494 Return the sequence of GIMPLE statements corresponding to the body
12498 gimplify_function_tree (tree fndecl
)
12504 gcc_assert (!gimple_body (fndecl
));
12506 if (DECL_STRUCT_FUNCTION (fndecl
))
12507 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
12509 push_struct_function (fndecl
);
12511 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12513 cfun
->curr_properties
|= PROP_gimple_lva
;
12515 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
12517 /* Preliminarily mark non-addressed complex variables as eligible
12518 for promotion to gimple registers. We'll transform their uses
12519 as we find them. */
12520 if ((TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
12521 || TREE_CODE (TREE_TYPE (parm
)) == VECTOR_TYPE
)
12522 && !TREE_THIS_VOLATILE (parm
)
12523 && !needs_to_live_in_memory (parm
))
12524 DECL_GIMPLE_REG_P (parm
) = 1;
12527 ret
= DECL_RESULT (fndecl
);
12528 if ((TREE_CODE (TREE_TYPE (ret
)) == COMPLEX_TYPE
12529 || TREE_CODE (TREE_TYPE (ret
)) == VECTOR_TYPE
)
12530 && !needs_to_live_in_memory (ret
))
12531 DECL_GIMPLE_REG_P (ret
) = 1;
12533 asan_poisoned_variables
= new hash_set
<tree
> ();
12534 bind
= gimplify_body (fndecl
, true);
12535 delete asan_poisoned_variables
;
12536 asan_poisoned_variables
= NULL
;
12538 /* The tree body of the function is no longer needed, replace it
12539 with the new GIMPLE body. */
12541 gimple_seq_add_stmt (&seq
, bind
);
12542 gimple_set_body (fndecl
, seq
);
12544 /* If we're instrumenting function entry/exit, then prepend the call to
12545 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12546 catch the exit hook. */
12547 /* ??? Add some way to ignore exceptions for this TFE. */
12548 if (flag_instrument_function_entry_exit
12549 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
)
12550 /* Do not instrument extern inline functions. */
12551 && !(DECL_DECLARED_INLINE_P (fndecl
)
12552 && DECL_EXTERNAL (fndecl
)
12553 && DECL_DISREGARD_INLINE_LIMITS (fndecl
))
12554 && !flag_instrument_functions_exclude_p (fndecl
))
12559 gimple_seq cleanup
= NULL
, body
= NULL
;
12563 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
12564 call
= gimple_build_call (x
, 1, integer_zero_node
);
12565 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
12566 gimple_call_set_lhs (call
, tmp_var
);
12567 gimplify_seq_add_stmt (&cleanup
, call
);
12568 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT
);
12569 call
= gimple_build_call (x
, 2,
12570 build_fold_addr_expr (current_function_decl
),
12572 gimplify_seq_add_stmt (&cleanup
, call
);
12573 tf
= gimple_build_try (seq
, cleanup
, GIMPLE_TRY_FINALLY
);
12575 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
12576 call
= gimple_build_call (x
, 1, integer_zero_node
);
12577 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
12578 gimple_call_set_lhs (call
, tmp_var
);
12579 gimplify_seq_add_stmt (&body
, call
);
12580 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER
);
12581 call
= gimple_build_call (x
, 2,
12582 build_fold_addr_expr (current_function_decl
),
12584 gimplify_seq_add_stmt (&body
, call
);
12585 gimplify_seq_add_stmt (&body
, tf
);
12586 new_bind
= gimple_build_bind (NULL
, body
, NULL
);
12588 /* Replace the current function body with the body
12589 wrapped in the try/finally TF. */
12591 gimple_seq_add_stmt (&seq
, new_bind
);
12592 gimple_set_body (fndecl
, seq
);
12596 if ((flag_sanitize
& SANITIZE_THREAD
) != 0
12597 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl
)))
12599 gcall
*call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
12600 gimple
*tf
= gimple_build_try (seq
, call
, GIMPLE_TRY_FINALLY
);
12601 gbind
*new_bind
= gimple_build_bind (NULL
, tf
, NULL
);
12602 /* Replace the current function body with the body
12603 wrapped in the try/finally TF. */
12605 gimple_seq_add_stmt (&seq
, new_bind
);
12606 gimple_set_body (fndecl
, seq
);
12609 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
12610 cfun
->curr_properties
|= PROP_gimple_any
;
12614 dump_function (TDI_generic
, fndecl
);
12617 /* Return a dummy expression of type TYPE in order to keep going after an
12621 dummy_object (tree type
)
12623 tree t
= build_int_cst (build_pointer_type (type
), 0);
12624 return build2 (MEM_REF
, type
, t
, t
);
12627 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
12628 builtin function, but a very special sort of operator. */
12630 enum gimplify_status
12631 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
,
12632 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
12634 tree promoted_type
, have_va_type
;
12635 tree valist
= TREE_OPERAND (*expr_p
, 0);
12636 tree type
= TREE_TYPE (*expr_p
);
12637 tree t
, tag
, aptag
;
12638 location_t loc
= EXPR_LOCATION (*expr_p
);
12640 /* Verify that valist is of the proper type. */
12641 have_va_type
= TREE_TYPE (valist
);
12642 if (have_va_type
== error_mark_node
)
12644 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
12645 if (have_va_type
== NULL_TREE
12646 && POINTER_TYPE_P (TREE_TYPE (valist
)))
12647 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
12649 = targetm
.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist
)));
12650 gcc_assert (have_va_type
!= NULL_TREE
);
12652 /* Generate a diagnostic for requesting data of a type that cannot
12653 be passed through `...' due to type promotion at the call site. */
12654 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
12657 static bool gave_help
;
12659 /* Use the expansion point to handle cases such as passing bool (defined
12660 in a system header) through `...'. */
12661 source_location xloc
12662 = expansion_point_location_if_in_system_header (loc
);
12664 /* Unfortunately, this is merely undefined, rather than a constraint
12665 violation, so we cannot make this an error. If this call is never
12666 executed, the program is still strictly conforming. */
12667 warned
= warning_at (xloc
, 0,
12668 "%qT is promoted to %qT when passed through %<...%>",
12669 type
, promoted_type
);
12670 if (!gave_help
&& warned
)
12673 inform (xloc
, "(so you should pass %qT not %qT to %<va_arg%>)",
12674 promoted_type
, type
);
12677 /* We can, however, treat "undefined" any way we please.
12678 Call abort to encourage the user to fix the program. */
12680 inform (xloc
, "if this code is reached, the program will abort");
12681 /* Before the abort, allow the evaluation of the va_list
12682 expression to exit or longjmp. */
12683 gimplify_and_add (valist
, pre_p
);
12684 t
= build_call_expr_loc (loc
,
12685 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
12686 gimplify_and_add (t
, pre_p
);
12688 /* This is dead code, but go ahead and finish so that the
12689 mode of the result comes out right. */
12690 *expr_p
= dummy_object (type
);
12691 return GS_ALL_DONE
;
12694 tag
= build_int_cst (build_pointer_type (type
), 0);
12695 aptag
= build_int_cst (TREE_TYPE (valist
), 0);
12697 *expr_p
= build_call_expr_internal_loc (loc
, IFN_VA_ARG
, type
, 3,
12698 valist
, tag
, aptag
);
12700 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12701 needs to be expanded. */
12702 cfun
->curr_properties
&= ~PROP_gimple_lva
;
12707 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12709 DST/SRC are the destination and source respectively. You can pass
12710 ungimplified trees in DST or SRC, in which case they will be
12711 converted to a gimple operand if necessary.
12713 This function returns the newly created GIMPLE_ASSIGN tuple. */
12716 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
12718 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12719 gimplify_and_add (t
, seq_p
);
12721 return gimple_seq_last_stmt (*seq_p
);
12725 gimplify_hasher::hash (const elt_t
*p
)
12728 return iterative_hash_expr (t
, 0);
12732 gimplify_hasher::equal (const elt_t
*p1
, const elt_t
*p2
)
12736 enum tree_code code
= TREE_CODE (t1
);
12738 if (TREE_CODE (t2
) != code
12739 || TREE_TYPE (t1
) != TREE_TYPE (t2
))
12742 if (!operand_equal_p (t1
, t2
, 0))
12745 /* Only allow them to compare equal if they also hash equal; otherwise
12746 results are nondeterminate, and we fail bootstrap comparison. */
12747 gcc_checking_assert (hash (p1
) == hash (p2
));