1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
29 #include "double-int.h"
37 #include "fold-const.h"
40 #include "hard-reg-set.h"
44 #include "statistics.h"
46 #include "fixed-value.h"
47 #include "insn-config.h"
57 #include "basic-block.h"
58 #include "tree-ssa-alias.h"
59 #include "internal-fn.h"
60 #include "gimple-fold.h"
62 #include "gimple-expr.h"
66 #include "gimple-iterator.h"
67 #include "stringpool.h"
68 #include "stor-layout.h"
69 #include "print-tree.h"
70 #include "tree-iterator.h"
71 #include "tree-inline.h"
72 #include "tree-pretty-print.h"
73 #include "langhooks.h"
75 #include "gimple-ssa.h"
77 #include "plugin-api.h"
81 #include "tree-ssanames.h"
83 #include "diagnostic-core.h"
85 #include "splay-tree.h"
87 #include "gimple-low.h"
89 #include "gomp-constants.h"
91 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
92 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
95 enum gimplify_omp_var_data
101 GOVD_FIRSTPRIVATE
= 16,
102 GOVD_LASTPRIVATE
= 32,
106 GOVD_DEBUG_PRIVATE
= 512,
107 GOVD_PRIVATE_OUTER_REF
= 1024,
111 /* Flag for GOVD_MAP: don't copy back. */
112 GOVD_MAP_TO_ONLY
= 8192,
114 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
115 GOVD_LINEAR_LASTPRIVATE_NO_OUTER
= 16384,
117 GOVD_DATA_SHARE_CLASS
= (GOVD_SHARED
| GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
118 | GOVD_LASTPRIVATE
| GOVD_REDUCTION
| GOVD_LINEAR
128 ORT_COMBINED_PARALLEL
= 3,
132 ORT_COMBINED_TEAMS
= 9,
134 ORT_TARGET_DATA
= 16,
135 /* Data region with offloading. */
139 /* Gimplify hashtable helper. */
141 struct gimplify_hasher
: typed_free_remove
<elt_t
>
143 typedef elt_t
*value_type
;
144 typedef elt_t
*compare_type
;
145 static inline hashval_t
hash (const elt_t
*);
146 static inline bool equal (const elt_t
*, const elt_t
*);
151 struct gimplify_ctx
*prev_context
;
153 vec
<gbind
*> bind_expr_stack
;
155 gimple_seq conditional_cleanups
;
159 vec
<tree
> case_labels
;
160 /* The formal temporary table. Should this be persistent? */
161 hash_table
<gimplify_hasher
> *temp_htab
;
166 bool allow_rhs_cond_expr
;
167 bool in_cleanup_point_expr
;
170 struct gimplify_omp_ctx
172 struct gimplify_omp_ctx
*outer_context
;
173 splay_tree variables
;
174 hash_set
<tree
> *privatized_types
;
176 enum omp_clause_default_kind default_kind
;
177 enum omp_region_type region_type
;
182 static struct gimplify_ctx
*gimplify_ctxp
;
183 static struct gimplify_omp_ctx
*gimplify_omp_ctxp
;
185 /* Forward declaration. */
186 static enum gimplify_status
gimplify_compound_expr (tree
*, gimple_seq
*, bool);
188 /* Shorter alias name for the above function for use in gimplify.c
192 gimplify_seq_add_stmt (gimple_seq
*seq_p
, gimple gs
)
194 gimple_seq_add_stmt_without_update (seq_p
, gs
);
197 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
198 NULL, a new sequence is allocated. This function is
199 similar to gimple_seq_add_seq, but does not scan the operands.
200 During gimplification, we need to manipulate statement sequences
201 before the def/use vectors have been constructed. */
204 gimplify_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
206 gimple_stmt_iterator si
;
211 si
= gsi_last (*dst_p
);
212 gsi_insert_seq_after_without_update (&si
, src
, GSI_NEW_STMT
);
216 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
217 and popping gimplify contexts. */
219 static struct gimplify_ctx
*ctx_pool
= NULL
;
221 /* Return a gimplify context struct from the pool. */
223 static inline struct gimplify_ctx
*
226 struct gimplify_ctx
* c
= ctx_pool
;
229 ctx_pool
= c
->prev_context
;
231 c
= XNEW (struct gimplify_ctx
);
233 memset (c
, '\0', sizeof (*c
));
237 /* Put gimplify context C back into the pool. */
240 ctx_free (struct gimplify_ctx
*c
)
242 c
->prev_context
= ctx_pool
;
246 /* Free allocated ctx stack memory. */
249 free_gimplify_stack (void)
251 struct gimplify_ctx
*c
;
253 while ((c
= ctx_pool
))
255 ctx_pool
= c
->prev_context
;
261 /* Set up a context for the gimplifier. */
264 push_gimplify_context (bool in_ssa
, bool rhs_cond_ok
)
266 struct gimplify_ctx
*c
= ctx_alloc ();
268 c
->prev_context
= gimplify_ctxp
;
270 gimplify_ctxp
->into_ssa
= in_ssa
;
271 gimplify_ctxp
->allow_rhs_cond_expr
= rhs_cond_ok
;
274 /* Tear down a context for the gimplifier. If BODY is non-null, then
275 put the temporaries into the outer BIND_EXPR. Otherwise, put them
278 BODY is not a sequence, but the first tuple in a sequence. */
281 pop_gimplify_context (gimple body
)
283 struct gimplify_ctx
*c
= gimplify_ctxp
;
286 && (!c
->bind_expr_stack
.exists ()
287 || c
->bind_expr_stack
.is_empty ()));
288 c
->bind_expr_stack
.release ();
289 gimplify_ctxp
= c
->prev_context
;
292 declare_vars (c
->temps
, body
, false);
294 record_vars (c
->temps
);
301 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
304 gimple_push_bind_expr (gbind
*bind_stmt
)
306 gimplify_ctxp
->bind_expr_stack
.reserve (8);
307 gimplify_ctxp
->bind_expr_stack
.safe_push (bind_stmt
);
310 /* Pop the first element off the stack of bindings. */
313 gimple_pop_bind_expr (void)
315 gimplify_ctxp
->bind_expr_stack
.pop ();
318 /* Return the first element of the stack of bindings. */
321 gimple_current_bind_expr (void)
323 return gimplify_ctxp
->bind_expr_stack
.last ();
326 /* Return the stack of bindings created during gimplification. */
329 gimple_bind_expr_stack (void)
331 return gimplify_ctxp
->bind_expr_stack
;
334 /* Return true iff there is a COND_EXPR between us and the innermost
335 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
338 gimple_conditional_context (void)
340 return gimplify_ctxp
->conditions
> 0;
343 /* Note that we've entered a COND_EXPR. */
346 gimple_push_condition (void)
348 #ifdef ENABLE_GIMPLE_CHECKING
349 if (gimplify_ctxp
->conditions
== 0)
350 gcc_assert (gimple_seq_empty_p (gimplify_ctxp
->conditional_cleanups
));
352 ++(gimplify_ctxp
->conditions
);
355 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
356 now, add any conditional cleanups we've seen to the prequeue. */
359 gimple_pop_condition (gimple_seq
*pre_p
)
361 int conds
= --(gimplify_ctxp
->conditions
);
363 gcc_assert (conds
>= 0);
366 gimplify_seq_add_seq (pre_p
, gimplify_ctxp
->conditional_cleanups
);
367 gimplify_ctxp
->conditional_cleanups
= NULL
;
371 /* A stable comparison routine for use with splay trees and DECLs. */
374 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
379 return DECL_UID (a
) - DECL_UID (b
);
382 /* Create a new omp construct that deals with variable remapping. */
384 static struct gimplify_omp_ctx
*
385 new_omp_context (enum omp_region_type region_type
)
387 struct gimplify_omp_ctx
*c
;
389 c
= XCNEW (struct gimplify_omp_ctx
);
390 c
->outer_context
= gimplify_omp_ctxp
;
391 c
->variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
392 c
->privatized_types
= new hash_set
<tree
>;
393 c
->location
= input_location
;
394 c
->region_type
= region_type
;
395 if ((region_type
& ORT_TASK
) == 0)
396 c
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
398 c
->default_kind
= OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
403 /* Destroy an omp construct that deals with variable remapping. */
406 delete_omp_context (struct gimplify_omp_ctx
*c
)
408 splay_tree_delete (c
->variables
);
409 delete c
->privatized_types
;
413 static void omp_add_variable (struct gimplify_omp_ctx
*, tree
, unsigned int);
414 static bool omp_notice_variable (struct gimplify_omp_ctx
*, tree
, bool);
416 /* Both gimplify the statement T and append it to *SEQ_P. This function
417 behaves exactly as gimplify_stmt, but you don't have to pass T as a
421 gimplify_and_add (tree t
, gimple_seq
*seq_p
)
423 gimplify_stmt (&t
, seq_p
);
426 /* Gimplify statement T into sequence *SEQ_P, and return the first
427 tuple in the sequence of generated tuples for this statement.
428 Return NULL if gimplifying T produced no tuples. */
431 gimplify_and_return_first (tree t
, gimple_seq
*seq_p
)
433 gimple_stmt_iterator last
= gsi_last (*seq_p
);
435 gimplify_and_add (t
, seq_p
);
437 if (!gsi_end_p (last
))
440 return gsi_stmt (last
);
443 return gimple_seq_first_stmt (*seq_p
);
446 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
447 LHS, or for a call argument. */
450 is_gimple_mem_rhs (tree t
)
452 /* If we're dealing with a renamable type, either source or dest must be
453 a renamed variable. */
454 if (is_gimple_reg_type (TREE_TYPE (t
)))
455 return is_gimple_val (t
);
457 return is_gimple_val (t
) || is_gimple_lvalue (t
);
460 /* Return true if T is a CALL_EXPR or an expression that can be
461 assigned to a temporary. Note that this predicate should only be
462 used during gimplification. See the rationale for this in
463 gimplify_modify_expr. */
466 is_gimple_reg_rhs_or_call (tree t
)
468 return (get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
469 || TREE_CODE (t
) == CALL_EXPR
);
472 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
473 this predicate should only be used during gimplification. See the
474 rationale for this in gimplify_modify_expr. */
477 is_gimple_mem_rhs_or_call (tree t
)
479 /* If we're dealing with a renamable type, either source or dest must be
480 a renamed variable. */
481 if (is_gimple_reg_type (TREE_TYPE (t
)))
482 return is_gimple_val (t
);
484 return (is_gimple_val (t
) || is_gimple_lvalue (t
)
485 || TREE_CODE (t
) == CALL_EXPR
);
488 /* Create a temporary with a name derived from VAL. Subroutine of
489 lookup_tmp_var; nobody else should call this function. */
492 create_tmp_from_val (tree val
)
494 /* Drop all qualifiers and address-space information from the value type. */
495 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (val
));
496 tree var
= create_tmp_var (type
, get_name (val
));
497 if (TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
498 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
)
499 DECL_GIMPLE_REG_P (var
) = 1;
503 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
504 an existing expression temporary. */
507 lookup_tmp_var (tree val
, bool is_formal
)
511 /* If not optimizing, never really reuse a temporary. local-alloc
512 won't allocate any variable that is used in more than one basic
513 block, which means it will go into memory, causing much extra
514 work in reload and final and poorer code generation, outweighing
515 the extra memory allocation here. */
516 if (!optimize
|| !is_formal
|| TREE_SIDE_EFFECTS (val
))
517 ret
= create_tmp_from_val (val
);
524 if (!gimplify_ctxp
->temp_htab
)
525 gimplify_ctxp
->temp_htab
= new hash_table
<gimplify_hasher
> (1000);
526 slot
= gimplify_ctxp
->temp_htab
->find_slot (&elt
, INSERT
);
529 elt_p
= XNEW (elt_t
);
531 elt_p
->temp
= ret
= create_tmp_from_val (val
);
544 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
547 internal_get_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
552 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
553 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
554 gimplify_expr (&val
, pre_p
, post_p
, is_gimple_reg_rhs_or_call
,
557 if (gimplify_ctxp
->into_ssa
558 && is_gimple_reg_type (TREE_TYPE (val
)))
559 t
= make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val
)));
561 t
= lookup_tmp_var (val
, is_formal
);
563 mod
= build2 (INIT_EXPR
, TREE_TYPE (t
), t
, unshare_expr (val
));
565 SET_EXPR_LOCATION (mod
, EXPR_LOC_OR_LOC (val
, input_location
));
567 /* gimplify_modify_expr might want to reduce this further. */
568 gimplify_and_add (mod
, pre_p
);
574 /* Return a formal temporary variable initialized with VAL. PRE_P is as
575 in gimplify_expr. Only use this function if:
577 1) The value of the unfactored expression represented by VAL will not
578 change between the initialization and use of the temporary, and
579 2) The temporary will not be otherwise modified.
581 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
582 and #2 means it is inappropriate for && temps.
584 For other cases, use get_initialized_tmp_var instead. */
587 get_formal_tmp_var (tree val
, gimple_seq
*pre_p
)
589 return internal_get_tmp_var (val
, pre_p
, NULL
, true);
592 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
593 are as in gimplify_expr. */
596 get_initialized_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
598 return internal_get_tmp_var (val
, pre_p
, post_p
, false);
601 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
602 generate debug info for them; otherwise don't. */
605 declare_vars (tree vars
, gimple gs
, bool debug_info
)
612 gbind
*scope
= as_a
<gbind
*> (gs
);
614 temps
= nreverse (last
);
616 block
= gimple_bind_block (scope
);
617 gcc_assert (!block
|| TREE_CODE (block
) == BLOCK
);
618 if (!block
|| !debug_info
)
620 DECL_CHAIN (last
) = gimple_bind_vars (scope
);
621 gimple_bind_set_vars (scope
, temps
);
625 /* We need to attach the nodes both to the BIND_EXPR and to its
626 associated BLOCK for debugging purposes. The key point here
627 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
628 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
629 if (BLOCK_VARS (block
))
630 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), temps
);
633 gimple_bind_set_vars (scope
,
634 chainon (gimple_bind_vars (scope
), temps
));
635 BLOCK_VARS (block
) = temps
;
641 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
642 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
643 no such upper bound can be obtained. */
646 force_constant_size (tree var
)
648 /* The only attempt we make is by querying the maximum size of objects
649 of the variable's type. */
651 HOST_WIDE_INT max_size
;
653 gcc_assert (TREE_CODE (var
) == VAR_DECL
);
655 max_size
= max_int_size_in_bytes (TREE_TYPE (var
));
657 gcc_assert (max_size
>= 0);
660 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var
)), max_size
);
662 = build_int_cst (TREE_TYPE (DECL_SIZE (var
)), max_size
* BITS_PER_UNIT
);
665 /* Push the temporary variable TMP into the current binding. */
668 gimple_add_tmp_var_fn (struct function
*fn
, tree tmp
)
670 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
672 /* Later processing assumes that the object size is constant, which might
673 not be true at this point. Force the use of a constant upper bound in
675 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp
)))
676 force_constant_size (tmp
);
678 DECL_CONTEXT (tmp
) = fn
->decl
;
679 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
681 record_vars_into (tmp
, fn
->decl
);
684 /* Push the temporary variable TMP into the current binding. */
687 gimple_add_tmp_var (tree tmp
)
689 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
691 /* Later processing assumes that the object size is constant, which might
692 not be true at this point. Force the use of a constant upper bound in
694 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp
)))
695 force_constant_size (tmp
);
697 DECL_CONTEXT (tmp
) = current_function_decl
;
698 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
702 DECL_CHAIN (tmp
) = gimplify_ctxp
->temps
;
703 gimplify_ctxp
->temps
= tmp
;
705 /* Mark temporaries local within the nearest enclosing parallel. */
706 if (gimplify_omp_ctxp
)
708 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
710 && (ctx
->region_type
== ORT_WORKSHARE
711 || ctx
->region_type
== ORT_SIMD
))
712 ctx
= ctx
->outer_context
;
714 omp_add_variable (ctx
, tmp
, GOVD_LOCAL
| GOVD_SEEN
);
723 /* This case is for nested functions. We need to expose the locals
725 body_seq
= gimple_body (current_function_decl
);
726 declare_vars (tmp
, gimple_seq_first_stmt (body_seq
), false);
732 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
733 nodes that are referenced more than once in GENERIC functions. This is
734 necessary because gimplification (translation into GIMPLE) is performed
735 by modifying tree nodes in-place, so gimplication of a shared node in a
736 first context could generate an invalid GIMPLE form in a second context.
738 This is achieved with a simple mark/copy/unmark algorithm that walks the
739 GENERIC representation top-down, marks nodes with TREE_VISITED the first
740 time it encounters them, duplicates them if they already have TREE_VISITED
741 set, and finally removes the TREE_VISITED marks it has set.
743 The algorithm works only at the function level, i.e. it generates a GENERIC
744 representation of a function with no nodes shared within the function when
745 passed a GENERIC function (except for nodes that are allowed to be shared).
747 At the global level, it is also necessary to unshare tree nodes that are
748 referenced in more than one function, for the same aforementioned reason.
749 This requires some cooperation from the front-end. There are 2 strategies:
751 1. Manual unsharing. The front-end needs to call unshare_expr on every
752 expression that might end up being shared across functions.
754 2. Deep unsharing. This is an extension of regular unsharing. Instead
755 of calling unshare_expr on expressions that might be shared across
756 functions, the front-end pre-marks them with TREE_VISITED. This will
757 ensure that they are unshared on the first reference within functions
758 when the regular unsharing algorithm runs. The counterpart is that
759 this algorithm must look deeper than for manual unsharing, which is
760 specified by LANG_HOOKS_DEEP_UNSHARING.
762 If there are only few specific cases of node sharing across functions, it is
763 probably easier for a front-end to unshare the expressions manually. On the
764 contrary, if the expressions generated at the global level are as widespread
765 as expressions generated within functions, deep unsharing is very likely the
768 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
769 These nodes model computations that must be done once. If we were to
770 unshare something like SAVE_EXPR(i++), the gimplification process would
771 create wrong code. However, if DATA is non-null, it must hold a pointer
772 set that is used to unshare the subtrees of these nodes. */
775 mostly_copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data
)
778 enum tree_code code
= TREE_CODE (t
);
780 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
781 copy their subtrees if we can make sure to do it only once. */
782 if (code
== SAVE_EXPR
|| code
== TARGET_EXPR
|| code
== BIND_EXPR
)
784 if (data
&& !((hash_set
<tree
> *)data
)->add (t
))
790 /* Stop at types, decls, constants like copy_tree_r. */
791 else if (TREE_CODE_CLASS (code
) == tcc_type
792 || TREE_CODE_CLASS (code
) == tcc_declaration
793 || TREE_CODE_CLASS (code
) == tcc_constant
794 /* We can't do anything sensible with a BLOCK used as an
795 expression, but we also can't just die when we see it
796 because of non-expression uses. So we avert our eyes
797 and cross our fingers. Silly Java. */
801 /* Cope with the statement expression extension. */
802 else if (code
== STATEMENT_LIST
)
805 /* Leave the bulk of the work to copy_tree_r itself. */
807 copy_tree_r (tp
, walk_subtrees
, NULL
);
812 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
813 If *TP has been visited already, then *TP is deeply copied by calling
814 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
817 copy_if_shared_r (tree
*tp
, int *walk_subtrees
, void *data
)
820 enum tree_code code
= TREE_CODE (t
);
822 /* Skip types, decls, and constants. But we do want to look at their
823 types and the bounds of types. Mark them as visited so we properly
824 unmark their subtrees on the unmark pass. If we've already seen them,
825 don't look down further. */
826 if (TREE_CODE_CLASS (code
) == tcc_type
827 || TREE_CODE_CLASS (code
) == tcc_declaration
828 || TREE_CODE_CLASS (code
) == tcc_constant
)
830 if (TREE_VISITED (t
))
833 TREE_VISITED (t
) = 1;
836 /* If this node has been visited already, unshare it and don't look
838 else if (TREE_VISITED (t
))
840 walk_tree (tp
, mostly_copy_tree_r
, data
, NULL
);
844 /* Otherwise, mark the node as visited and keep looking. */
846 TREE_VISITED (t
) = 1;
851 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
852 copy_if_shared_r callback unmodified. */
855 copy_if_shared (tree
*tp
, void *data
)
857 walk_tree (tp
, copy_if_shared_r
, data
, NULL
);
860 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
861 any nested functions. */
864 unshare_body (tree fndecl
)
866 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
867 /* If the language requires deep unsharing, we need a pointer set to make
868 sure we don't repeatedly unshare subtrees of unshareable nodes. */
869 hash_set
<tree
> *visited
870 = lang_hooks
.deep_unsharing
? new hash_set
<tree
> : NULL
;
872 copy_if_shared (&DECL_SAVED_TREE (fndecl
), visited
);
873 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl
)), visited
);
874 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)), visited
);
879 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
880 unshare_body (cgn
->decl
);
883 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
884 Subtrees are walked until the first unvisited node is encountered. */
887 unmark_visited_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
891 /* If this node has been visited, unmark it and keep looking. */
892 if (TREE_VISITED (t
))
893 TREE_VISITED (t
) = 0;
895 /* Otherwise, don't look any deeper. */
902 /* Unmark the visited trees rooted at *TP. */
905 unmark_visited (tree
*tp
)
907 walk_tree (tp
, unmark_visited_r
, NULL
, NULL
);
910 /* Likewise, but mark all trees as not visited. */
913 unvisit_body (tree fndecl
)
915 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
917 unmark_visited (&DECL_SAVED_TREE (fndecl
));
918 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl
)));
919 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)));
922 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
923 unvisit_body (cgn
->decl
);
926 /* Unconditionally make an unshared copy of EXPR. This is used when using
927 stored expressions which span multiple functions, such as BINFO_VTABLE,
928 as the normal unsharing process can't tell that they're shared. */
931 unshare_expr (tree expr
)
933 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
937 /* Worker for unshare_expr_without_location. */
940 prune_expr_location (tree
*tp
, int *walk_subtrees
, void *)
943 SET_EXPR_LOCATION (*tp
, UNKNOWN_LOCATION
);
949 /* Similar to unshare_expr but also prune all expression locations
953 unshare_expr_without_location (tree expr
)
955 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
957 walk_tree (&expr
, prune_expr_location
, NULL
, NULL
);
961 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
962 contain statements and have a value. Assign its value to a temporary
963 and give it void_type_node. Return the temporary, or NULL_TREE if
964 WRAPPER was already void. */
967 voidify_wrapper_expr (tree wrapper
, tree temp
)
969 tree type
= TREE_TYPE (wrapper
);
970 if (type
&& !VOID_TYPE_P (type
))
974 /* Set p to point to the body of the wrapper. Loop until we find
975 something that isn't a wrapper. */
976 for (p
= &wrapper
; p
&& *p
; )
978 switch (TREE_CODE (*p
))
981 TREE_SIDE_EFFECTS (*p
) = 1;
982 TREE_TYPE (*p
) = void_type_node
;
983 /* For a BIND_EXPR, the body is operand 1. */
984 p
= &BIND_EXPR_BODY (*p
);
987 case CLEANUP_POINT_EXPR
:
988 case TRY_FINALLY_EXPR
:
990 TREE_SIDE_EFFECTS (*p
) = 1;
991 TREE_TYPE (*p
) = void_type_node
;
992 p
= &TREE_OPERAND (*p
, 0);
997 tree_stmt_iterator i
= tsi_last (*p
);
998 TREE_SIDE_EFFECTS (*p
) = 1;
999 TREE_TYPE (*p
) = void_type_node
;
1000 p
= tsi_end_p (i
) ? NULL
: tsi_stmt_ptr (i
);
1005 /* Advance to the last statement. Set all container types to
1007 for (; TREE_CODE (*p
) == COMPOUND_EXPR
; p
= &TREE_OPERAND (*p
, 1))
1009 TREE_SIDE_EFFECTS (*p
) = 1;
1010 TREE_TYPE (*p
) = void_type_node
;
1014 case TRANSACTION_EXPR
:
1015 TREE_SIDE_EFFECTS (*p
) = 1;
1016 TREE_TYPE (*p
) = void_type_node
;
1017 p
= &TRANSACTION_EXPR_BODY (*p
);
1021 /* Assume that any tree upon which voidify_wrapper_expr is
1022 directly called is a wrapper, and that its body is op0. */
1025 TREE_SIDE_EFFECTS (*p
) = 1;
1026 TREE_TYPE (*p
) = void_type_node
;
1027 p
= &TREE_OPERAND (*p
, 0);
1035 if (p
== NULL
|| IS_EMPTY_STMT (*p
))
1039 /* The wrapper is on the RHS of an assignment that we're pushing
1041 gcc_assert (TREE_CODE (temp
) == INIT_EXPR
1042 || TREE_CODE (temp
) == MODIFY_EXPR
);
1043 TREE_OPERAND (temp
, 1) = *p
;
1048 temp
= create_tmp_var (type
, "retval");
1049 *p
= build2 (INIT_EXPR
, type
, temp
, *p
);
1058 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1059 a temporary through which they communicate. */
1062 build_stack_save_restore (gcall
**save
, gcall
**restore
)
1066 *save
= gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE
), 0);
1067 tmp_var
= create_tmp_var (ptr_type_node
, "saved_stack");
1068 gimple_call_set_lhs (*save
, tmp_var
);
1071 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE
),
1075 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1077 static enum gimplify_status
1078 gimplify_bind_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1080 tree bind_expr
= *expr_p
;
1081 bool old_save_stack
= gimplify_ctxp
->save_stack
;
1084 gimple_seq body
, cleanup
;
1086 location_t start_locus
= 0, end_locus
= 0;
1088 tree temp
= voidify_wrapper_expr (bind_expr
, NULL
);
1090 /* Mark variables seen in this bind expr. */
1091 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1093 if (TREE_CODE (t
) == VAR_DECL
)
1095 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
1097 /* Mark variable as local. */
1098 if (ctx
&& !DECL_EXTERNAL (t
)
1099 && (! DECL_SEEN_IN_BIND_EXPR_P (t
)
1100 || splay_tree_lookup (ctx
->variables
,
1101 (splay_tree_key
) t
) == NULL
))
1103 if (ctx
->region_type
== ORT_SIMD
1104 && TREE_ADDRESSABLE (t
)
1105 && !TREE_STATIC (t
))
1106 omp_add_variable (ctx
, t
, GOVD_PRIVATE
| GOVD_SEEN
);
1108 omp_add_variable (ctx
, t
, GOVD_LOCAL
| GOVD_SEEN
);
1111 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
1113 if (DECL_HARD_REGISTER (t
) && !is_global_var (t
) && cfun
)
1114 cfun
->has_local_explicit_reg_vars
= true;
1117 /* Preliminarily mark non-addressed complex variables as eligible
1118 for promotion to gimple registers. We'll transform their uses
1120 if ((TREE_CODE (TREE_TYPE (t
)) == COMPLEX_TYPE
1121 || TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
1122 && !TREE_THIS_VOLATILE (t
)
1123 && (TREE_CODE (t
) == VAR_DECL
&& !DECL_HARD_REGISTER (t
))
1124 && !needs_to_live_in_memory (t
))
1125 DECL_GIMPLE_REG_P (t
) = 1;
1128 bind_stmt
= gimple_build_bind (BIND_EXPR_VARS (bind_expr
), NULL
,
1129 BIND_EXPR_BLOCK (bind_expr
));
1130 gimple_push_bind_expr (bind_stmt
);
1132 gimplify_ctxp
->save_stack
= false;
1134 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1136 gimplify_stmt (&BIND_EXPR_BODY (bind_expr
), &body
);
1137 gimple_bind_set_body (bind_stmt
, body
);
1139 /* Source location wise, the cleanup code (stack_restore and clobbers)
1140 belongs to the end of the block, so propagate what we have. The
1141 stack_save operation belongs to the beginning of block, which we can
1142 infer from the bind_expr directly if the block has no explicit
1144 if (BIND_EXPR_BLOCK (bind_expr
))
1146 end_locus
= BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1147 start_locus
= BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1149 if (start_locus
== 0)
1150 start_locus
= EXPR_LOCATION (bind_expr
);
1154 if (gimplify_ctxp
->save_stack
)
1156 gcall
*stack_restore
;
1158 /* Save stack on entry and restore it on exit. Add a try_finally
1159 block to achieve this. */
1160 build_stack_save_restore (&stack_save
, &stack_restore
);
1162 gimple_set_location (stack_save
, start_locus
);
1163 gimple_set_location (stack_restore
, end_locus
);
1165 gimplify_seq_add_stmt (&cleanup
, stack_restore
);
1168 /* Add clobbers for all variables that go out of scope. */
1169 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1171 if (TREE_CODE (t
) == VAR_DECL
1172 && !is_global_var (t
)
1173 && DECL_CONTEXT (t
) == current_function_decl
1174 && !DECL_HARD_REGISTER (t
)
1175 && !TREE_THIS_VOLATILE (t
)
1176 && !DECL_HAS_VALUE_EXPR_P (t
)
1177 /* Only care for variables that have to be in memory. Others
1178 will be rewritten into SSA names, hence moved to the top-level. */
1179 && !is_gimple_reg (t
)
1180 && flag_stack_reuse
!= SR_NONE
)
1182 tree clobber
= build_constructor (TREE_TYPE (t
), NULL
);
1183 gimple clobber_stmt
;
1184 TREE_THIS_VOLATILE (clobber
) = 1;
1185 clobber_stmt
= gimple_build_assign (t
, clobber
);
1186 gimple_set_location (clobber_stmt
, end_locus
);
1187 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1194 gimple_seq new_body
;
1197 gs
= gimple_build_try (gimple_bind_body (bind_stmt
), cleanup
,
1198 GIMPLE_TRY_FINALLY
);
1201 gimplify_seq_add_stmt (&new_body
, stack_save
);
1202 gimplify_seq_add_stmt (&new_body
, gs
);
1203 gimple_bind_set_body (bind_stmt
, new_body
);
1206 gimplify_ctxp
->save_stack
= old_save_stack
;
1207 gimple_pop_bind_expr ();
1209 gimplify_seq_add_stmt (pre_p
, bind_stmt
);
1217 *expr_p
= NULL_TREE
;
1221 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1222 GIMPLE value, it is assigned to a new temporary and the statement is
1223 re-written to return the temporary.
1225 PRE_P points to the sequence where side effects that must happen before
1226 STMT should be stored. */
1228 static enum gimplify_status
1229 gimplify_return_expr (tree stmt
, gimple_seq
*pre_p
)
1232 tree ret_expr
= TREE_OPERAND (stmt
, 0);
1233 tree result_decl
, result
;
1235 if (ret_expr
== error_mark_node
)
1238 /* Implicit _Cilk_sync must be inserted right before any return statement
1239 if there is a _Cilk_spawn in the function. If the user has provided a
1240 _Cilk_sync, the optimizer should remove this duplicate one. */
1241 if (fn_contains_cilk_spawn_p (cfun
))
1243 tree impl_sync
= build0 (CILK_SYNC_STMT
, void_type_node
);
1244 gimplify_and_add (impl_sync
, pre_p
);
1248 || TREE_CODE (ret_expr
) == RESULT_DECL
1249 || ret_expr
== error_mark_node
)
1251 greturn
*ret
= gimple_build_return (ret_expr
);
1252 gimple_set_no_warning (ret
, TREE_NO_WARNING (stmt
));
1253 gimplify_seq_add_stmt (pre_p
, ret
);
1257 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1258 result_decl
= NULL_TREE
;
1261 result_decl
= TREE_OPERAND (ret_expr
, 0);
1263 /* See through a return by reference. */
1264 if (TREE_CODE (result_decl
) == INDIRECT_REF
)
1265 result_decl
= TREE_OPERAND (result_decl
, 0);
1267 gcc_assert ((TREE_CODE (ret_expr
) == MODIFY_EXPR
1268 || TREE_CODE (ret_expr
) == INIT_EXPR
)
1269 && TREE_CODE (result_decl
) == RESULT_DECL
);
1272 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1273 Recall that aggregate_value_p is FALSE for any aggregate type that is
1274 returned in registers. If we're returning values in registers, then
1275 we don't want to extend the lifetime of the RESULT_DECL, particularly
1276 across another call. In addition, for those aggregates for which
1277 hard_function_value generates a PARALLEL, we'll die during normal
1278 expansion of structure assignments; there's special code in expand_return
1279 to handle this case that does not exist in expand_expr. */
1282 else if (aggregate_value_p (result_decl
, TREE_TYPE (current_function_decl
)))
1284 if (TREE_CODE (DECL_SIZE (result_decl
)) != INTEGER_CST
)
1286 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl
)))
1287 gimplify_type_sizes (TREE_TYPE (result_decl
), pre_p
);
1288 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1289 should be effectively allocated by the caller, i.e. all calls to
1290 this function must be subject to the Return Slot Optimization. */
1291 gimplify_one_sizepos (&DECL_SIZE (result_decl
), pre_p
);
1292 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl
), pre_p
);
1294 result
= result_decl
;
1296 else if (gimplify_ctxp
->return_temp
)
1297 result
= gimplify_ctxp
->return_temp
;
1300 result
= create_tmp_reg (TREE_TYPE (result_decl
));
1302 /* ??? With complex control flow (usually involving abnormal edges),
1303 we can wind up warning about an uninitialized value for this. Due
1304 to how this variable is constructed and initialized, this is never
1305 true. Give up and never warn. */
1306 TREE_NO_WARNING (result
) = 1;
1308 gimplify_ctxp
->return_temp
= result
;
1311 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1312 Then gimplify the whole thing. */
1313 if (result
!= result_decl
)
1314 TREE_OPERAND (ret_expr
, 0) = result
;
1316 gimplify_and_add (TREE_OPERAND (stmt
, 0), pre_p
);
1318 ret
= gimple_build_return (result
);
1319 gimple_set_no_warning (ret
, TREE_NO_WARNING (stmt
));
1320 gimplify_seq_add_stmt (pre_p
, ret
);
1325 /* Gimplify a variable-length array DECL. */
1328 gimplify_vla_decl (tree decl
, gimple_seq
*seq_p
)
1330 /* This is a variable-sized decl. Simplify its size and mark it
1331 for deferred expansion. */
1332 tree t
, addr
, ptr_type
;
1334 gimplify_one_sizepos (&DECL_SIZE (decl
), seq_p
);
1335 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl
), seq_p
);
1337 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1338 if (DECL_HAS_VALUE_EXPR_P (decl
))
1341 /* All occurrences of this decl in final gimplified code will be
1342 replaced by indirection. Setting DECL_VALUE_EXPR does two
1343 things: First, it lets the rest of the gimplifier know what
1344 replacement to use. Second, it lets the debug info know
1345 where to find the value. */
1346 ptr_type
= build_pointer_type (TREE_TYPE (decl
));
1347 addr
= create_tmp_var (ptr_type
, get_name (decl
));
1348 DECL_IGNORED_P (addr
) = 0;
1349 t
= build_fold_indirect_ref (addr
);
1350 TREE_THIS_NOTRAP (t
) = 1;
1351 SET_DECL_VALUE_EXPR (decl
, t
);
1352 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
1354 t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
1355 t
= build_call_expr (t
, 2, DECL_SIZE_UNIT (decl
),
1356 size_int (DECL_ALIGN (decl
)));
1357 /* The call has been built for a variable-sized object. */
1358 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
1359 t
= fold_convert (ptr_type
, t
);
1360 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
1362 gimplify_and_add (t
, seq_p
);
1364 /* Indicate that we need to restore the stack level when the
1365 enclosing BIND_EXPR is exited. */
1366 gimplify_ctxp
->save_stack
= true;
1369 /* A helper function to be called via walk_tree. Mark all labels under *TP
1370 as being forced. To be called for DECL_INITIAL of static variables. */
1373 force_labels_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1377 if (TREE_CODE (*tp
) == LABEL_DECL
)
1378 FORCED_LABEL (*tp
) = 1;
1383 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1384 and initialization explicit. */
1386 static enum gimplify_status
1387 gimplify_decl_expr (tree
*stmt_p
, gimple_seq
*seq_p
)
1389 tree stmt
= *stmt_p
;
1390 tree decl
= DECL_EXPR_DECL (stmt
);
1392 *stmt_p
= NULL_TREE
;
1394 if (TREE_TYPE (decl
) == error_mark_node
)
1397 if ((TREE_CODE (decl
) == TYPE_DECL
1398 || TREE_CODE (decl
) == VAR_DECL
)
1399 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl
)))
1400 gimplify_type_sizes (TREE_TYPE (decl
), seq_p
);
1402 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1403 in case its size expressions contain problematic nodes like CALL_EXPR. */
1404 if (TREE_CODE (decl
) == TYPE_DECL
1405 && DECL_ORIGINAL_TYPE (decl
)
1406 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl
)))
1407 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl
), seq_p
);
1409 if (TREE_CODE (decl
) == VAR_DECL
&& !DECL_EXTERNAL (decl
))
1411 tree init
= DECL_INITIAL (decl
);
1413 if (TREE_CODE (DECL_SIZE_UNIT (decl
)) != INTEGER_CST
1414 || (!TREE_STATIC (decl
)
1415 && flag_stack_check
== GENERIC_STACK_CHECK
1416 && compare_tree_int (DECL_SIZE_UNIT (decl
),
1417 STACK_CHECK_MAX_VAR_SIZE
) > 0))
1418 gimplify_vla_decl (decl
, seq_p
);
1420 /* Some front ends do not explicitly declare all anonymous
1421 artificial variables. We compensate here by declaring the
1422 variables, though it would be better if the front ends would
1423 explicitly declare them. */
1424 if (!DECL_SEEN_IN_BIND_EXPR_P (decl
)
1425 && DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)
1426 gimple_add_tmp_var (decl
);
1428 if (init
&& init
!= error_mark_node
)
1430 if (!TREE_STATIC (decl
))
1432 DECL_INITIAL (decl
) = NULL_TREE
;
1433 init
= build2 (INIT_EXPR
, void_type_node
, decl
, init
);
1434 gimplify_and_add (init
, seq_p
);
1438 /* We must still examine initializers for static variables
1439 as they may contain a label address. */
1440 walk_tree (&init
, force_labels_r
, NULL
, NULL
);
1447 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1448 and replacing the LOOP_EXPR with goto, but if the loop contains an
1449 EXIT_EXPR, we need to append a label for it to jump to. */
1451 static enum gimplify_status
1452 gimplify_loop_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1454 tree saved_label
= gimplify_ctxp
->exit_label
;
1455 tree start_label
= create_artificial_label (UNKNOWN_LOCATION
);
1457 gimplify_seq_add_stmt (pre_p
, gimple_build_label (start_label
));
1459 gimplify_ctxp
->exit_label
= NULL_TREE
;
1461 gimplify_and_add (LOOP_EXPR_BODY (*expr_p
), pre_p
);
1463 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (start_label
));
1465 if (gimplify_ctxp
->exit_label
)
1466 gimplify_seq_add_stmt (pre_p
,
1467 gimple_build_label (gimplify_ctxp
->exit_label
));
1469 gimplify_ctxp
->exit_label
= saved_label
;
1475 /* Gimplify a statement list onto a sequence. These may be created either
1476 by an enlightened front-end, or by shortcut_cond_expr. */
1478 static enum gimplify_status
1479 gimplify_statement_list (tree
*expr_p
, gimple_seq
*pre_p
)
1481 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
1483 tree_stmt_iterator i
= tsi_start (*expr_p
);
1485 while (!tsi_end_p (i
))
1487 gimplify_stmt (tsi_stmt_ptr (i
), pre_p
);
1501 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1504 static enum gimplify_status
1505 gimplify_switch_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1507 tree switch_expr
= *expr_p
;
1508 gimple_seq switch_body_seq
= NULL
;
1509 enum gimplify_status ret
;
1510 tree index_type
= TREE_TYPE (switch_expr
);
1511 if (index_type
== NULL_TREE
)
1512 index_type
= TREE_TYPE (SWITCH_COND (switch_expr
));
1514 ret
= gimplify_expr (&SWITCH_COND (switch_expr
), pre_p
, NULL
, is_gimple_val
,
1516 if (ret
== GS_ERROR
|| ret
== GS_UNHANDLED
)
1519 if (SWITCH_BODY (switch_expr
))
1522 vec
<tree
> saved_labels
;
1523 tree default_case
= NULL_TREE
;
1524 gswitch
*switch_stmt
;
1526 /* If someone can be bothered to fill in the labels, they can
1527 be bothered to null out the body too. */
1528 gcc_assert (!SWITCH_LABELS (switch_expr
));
1530 /* Save old labels, get new ones from body, then restore the old
1531 labels. Save all the things from the switch body to append after. */
1532 saved_labels
= gimplify_ctxp
->case_labels
;
1533 gimplify_ctxp
->case_labels
.create (8);
1535 gimplify_stmt (&SWITCH_BODY (switch_expr
), &switch_body_seq
);
1536 labels
= gimplify_ctxp
->case_labels
;
1537 gimplify_ctxp
->case_labels
= saved_labels
;
1539 preprocess_case_label_vec_for_gimple (labels
, index_type
,
1544 glabel
*new_default
;
1547 = build_case_label (NULL_TREE
, NULL_TREE
,
1548 create_artificial_label (UNKNOWN_LOCATION
));
1549 new_default
= gimple_build_label (CASE_LABEL (default_case
));
1550 gimplify_seq_add_stmt (&switch_body_seq
, new_default
);
1553 switch_stmt
= gimple_build_switch (SWITCH_COND (switch_expr
),
1554 default_case
, labels
);
1555 gimplify_seq_add_stmt (pre_p
, switch_stmt
);
1556 gimplify_seq_add_seq (pre_p
, switch_body_seq
);
1560 gcc_assert (SWITCH_LABELS (switch_expr
));
1565 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1567 static enum gimplify_status
1568 gimplify_case_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1570 struct gimplify_ctx
*ctxp
;
1573 /* Invalid programs can play Duff's Device type games with, for example,
1574 #pragma omp parallel. At least in the C front end, we don't
1575 detect such invalid branches until after gimplification, in the
1576 diagnose_omp_blocks pass. */
1577 for (ctxp
= gimplify_ctxp
; ; ctxp
= ctxp
->prev_context
)
1578 if (ctxp
->case_labels
.exists ())
1581 label_stmt
= gimple_build_label (CASE_LABEL (*expr_p
));
1582 ctxp
->case_labels
.safe_push (*expr_p
);
1583 gimplify_seq_add_stmt (pre_p
, label_stmt
);
1588 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1592 build_and_jump (tree
*label_p
)
1594 if (label_p
== NULL
)
1595 /* If there's nowhere to jump, just fall through. */
1598 if (*label_p
== NULL_TREE
)
1600 tree label
= create_artificial_label (UNKNOWN_LOCATION
);
1604 return build1 (GOTO_EXPR
, void_type_node
, *label_p
);
1607 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1608 This also involves building a label to jump to and communicating it to
1609 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1611 static enum gimplify_status
1612 gimplify_exit_expr (tree
*expr_p
)
1614 tree cond
= TREE_OPERAND (*expr_p
, 0);
1617 expr
= build_and_jump (&gimplify_ctxp
->exit_label
);
1618 expr
= build3 (COND_EXPR
, void_type_node
, cond
, expr
, NULL_TREE
);
1624 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1625 different from its canonical type, wrap the whole thing inside a
1626 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1629 The canonical type of a COMPONENT_REF is the type of the field being
1630 referenced--unless the field is a bit-field which can be read directly
1631 in a smaller mode, in which case the canonical type is the
1632 sign-appropriate type corresponding to that mode. */
1635 canonicalize_component_ref (tree
*expr_p
)
1637 tree expr
= *expr_p
;
1640 gcc_assert (TREE_CODE (expr
) == COMPONENT_REF
);
1642 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
)))
1643 type
= TREE_TYPE (get_unwidened (expr
, NULL_TREE
));
1645 type
= TREE_TYPE (TREE_OPERAND (expr
, 1));
1647 /* One could argue that all the stuff below is not necessary for
1648 the non-bitfield case and declare it a FE error if type
1649 adjustment would be needed. */
1650 if (TREE_TYPE (expr
) != type
)
1652 #ifdef ENABLE_TYPES_CHECKING
1653 tree old_type
= TREE_TYPE (expr
);
1657 /* We need to preserve qualifiers and propagate them from
1659 type_quals
= TYPE_QUALS (type
)
1660 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr
, 0)));
1661 if (TYPE_QUALS (type
) != type_quals
)
1662 type
= build_qualified_type (TYPE_MAIN_VARIANT (type
), type_quals
);
1664 /* Set the type of the COMPONENT_REF to the underlying type. */
1665 TREE_TYPE (expr
) = type
;
1667 #ifdef ENABLE_TYPES_CHECKING
1668 /* It is now a FE error, if the conversion from the canonical
1669 type to the original expression type is not useless. */
1670 gcc_assert (useless_type_conversion_p (old_type
, type
));
1675 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1676 to foo, embed that change in the ADDR_EXPR by converting
1681 where L is the lower bound. For simplicity, only do this for constant
1683 The constraint is that the type of &array[L] is trivially convertible
1687 canonicalize_addr_expr (tree
*expr_p
)
1689 tree expr
= *expr_p
;
1690 tree addr_expr
= TREE_OPERAND (expr
, 0);
1691 tree datype
, ddatype
, pddatype
;
1693 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1694 if (!POINTER_TYPE_P (TREE_TYPE (expr
))
1695 || TREE_CODE (addr_expr
) != ADDR_EXPR
)
1698 /* The addr_expr type should be a pointer to an array. */
1699 datype
= TREE_TYPE (TREE_TYPE (addr_expr
));
1700 if (TREE_CODE (datype
) != ARRAY_TYPE
)
1703 /* The pointer to element type shall be trivially convertible to
1704 the expression pointer type. */
1705 ddatype
= TREE_TYPE (datype
);
1706 pddatype
= build_pointer_type (ddatype
);
1707 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr
)),
1711 /* The lower bound and element sizes must be constant. */
1712 if (!TYPE_SIZE_UNIT (ddatype
)
1713 || TREE_CODE (TYPE_SIZE_UNIT (ddatype
)) != INTEGER_CST
1714 || !TYPE_DOMAIN (datype
) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))
1715 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))) != INTEGER_CST
)
1718 /* All checks succeeded. Build a new node to merge the cast. */
1719 *expr_p
= build4 (ARRAY_REF
, ddatype
, TREE_OPERAND (addr_expr
, 0),
1720 TYPE_MIN_VALUE (TYPE_DOMAIN (datype
)),
1721 NULL_TREE
, NULL_TREE
);
1722 *expr_p
= build1 (ADDR_EXPR
, pddatype
, *expr_p
);
1724 /* We can have stripped a required restrict qualifier above. */
1725 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
1726 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
1729 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1730 underneath as appropriate. */
1732 static enum gimplify_status
1733 gimplify_conversion (tree
*expr_p
)
1735 location_t loc
= EXPR_LOCATION (*expr_p
);
1736 gcc_assert (CONVERT_EXPR_P (*expr_p
));
1738 /* Then strip away all but the outermost conversion. */
1739 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p
, 0));
1741 /* And remove the outermost conversion if it's useless. */
1742 if (tree_ssa_useless_type_conversion (*expr_p
))
1743 *expr_p
= TREE_OPERAND (*expr_p
, 0);
1745 /* If we still have a conversion at the toplevel,
1746 then canonicalize some constructs. */
1747 if (CONVERT_EXPR_P (*expr_p
))
1749 tree sub
= TREE_OPERAND (*expr_p
, 0);
1751 /* If a NOP conversion is changing the type of a COMPONENT_REF
1752 expression, then canonicalize its type now in order to expose more
1753 redundant conversions. */
1754 if (TREE_CODE (sub
) == COMPONENT_REF
)
1755 canonicalize_component_ref (&TREE_OPERAND (*expr_p
, 0));
1757 /* If a NOP conversion is changing a pointer to array of foo
1758 to a pointer to foo, embed that change in the ADDR_EXPR. */
1759 else if (TREE_CODE (sub
) == ADDR_EXPR
)
1760 canonicalize_addr_expr (expr_p
);
1763 /* If we have a conversion to a non-register type force the
1764 use of a VIEW_CONVERT_EXPR instead. */
1765 if (CONVERT_EXPR_P (*expr_p
) && !is_gimple_reg_type (TREE_TYPE (*expr_p
)))
1766 *expr_p
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (*expr_p
),
1767 TREE_OPERAND (*expr_p
, 0));
1769 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
1770 if (TREE_CODE (*expr_p
) == CONVERT_EXPR
)
1771 TREE_SET_CODE (*expr_p
, NOP_EXPR
);
1776 /* Nonlocal VLAs seen in the current function. */
1777 static hash_set
<tree
> *nonlocal_vlas
;
1779 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
1780 static tree nonlocal_vla_vars
;
1782 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
1783 DECL_VALUE_EXPR, and it's worth re-examining things. */
1785 static enum gimplify_status
1786 gimplify_var_or_parm_decl (tree
*expr_p
)
1788 tree decl
= *expr_p
;
1790 /* ??? If this is a local variable, and it has not been seen in any
1791 outer BIND_EXPR, then it's probably the result of a duplicate
1792 declaration, for which we've already issued an error. It would
1793 be really nice if the front end wouldn't leak these at all.
1794 Currently the only known culprit is C++ destructors, as seen
1795 in g++.old-deja/g++.jason/binding.C. */
1796 if (TREE_CODE (decl
) == VAR_DECL
1797 && !DECL_SEEN_IN_BIND_EXPR_P (decl
)
1798 && !TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)
1799 && decl_function_context (decl
) == current_function_decl
)
1801 gcc_assert (seen_error ());
1805 /* When within an OMP context, notice uses of variables. */
1806 if (gimplify_omp_ctxp
&& omp_notice_variable (gimplify_omp_ctxp
, decl
, true))
1809 /* If the decl is an alias for another expression, substitute it now. */
1810 if (DECL_HAS_VALUE_EXPR_P (decl
))
1812 tree value_expr
= DECL_VALUE_EXPR (decl
);
1814 /* For referenced nonlocal VLAs add a decl for debugging purposes
1815 to the current function. */
1816 if (TREE_CODE (decl
) == VAR_DECL
1817 && TREE_CODE (DECL_SIZE_UNIT (decl
)) != INTEGER_CST
1818 && nonlocal_vlas
!= NULL
1819 && TREE_CODE (value_expr
) == INDIRECT_REF
1820 && TREE_CODE (TREE_OPERAND (value_expr
, 0)) == VAR_DECL
1821 && decl_function_context (decl
) != current_function_decl
)
1823 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
1825 && (ctx
->region_type
== ORT_WORKSHARE
1826 || ctx
->region_type
== ORT_SIMD
))
1827 ctx
= ctx
->outer_context
;
1828 if (!ctx
&& !nonlocal_vlas
->add (decl
))
1830 tree copy
= copy_node (decl
);
1832 lang_hooks
.dup_lang_specific_decl (copy
);
1833 SET_DECL_RTL (copy
, 0);
1834 TREE_USED (copy
) = 1;
1835 DECL_CHAIN (copy
) = nonlocal_vla_vars
;
1836 nonlocal_vla_vars
= copy
;
1837 SET_DECL_VALUE_EXPR (copy
, unshare_expr (value_expr
));
1838 DECL_HAS_VALUE_EXPR_P (copy
) = 1;
1842 *expr_p
= unshare_expr (value_expr
);
1849 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
1852 recalculate_side_effects (tree t
)
1854 enum tree_code code
= TREE_CODE (t
);
1855 int len
= TREE_OPERAND_LENGTH (t
);
1858 switch (TREE_CODE_CLASS (code
))
1860 case tcc_expression
:
1866 case PREDECREMENT_EXPR
:
1867 case PREINCREMENT_EXPR
:
1868 case POSTDECREMENT_EXPR
:
1869 case POSTINCREMENT_EXPR
:
1870 /* All of these have side-effects, no matter what their
1879 case tcc_comparison
: /* a comparison expression */
1880 case tcc_unary
: /* a unary arithmetic expression */
1881 case tcc_binary
: /* a binary arithmetic expression */
1882 case tcc_reference
: /* a reference */
1883 case tcc_vl_exp
: /* a function call */
1884 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
1885 for (i
= 0; i
< len
; ++i
)
1887 tree op
= TREE_OPERAND (t
, i
);
1888 if (op
&& TREE_SIDE_EFFECTS (op
))
1889 TREE_SIDE_EFFECTS (t
) = 1;
1894 /* No side-effects. */
1902 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1906 : min_lval '[' val ']'
1908 | compound_lval '[' val ']'
1909 | compound_lval '.' ID
1911 This is not part of the original SIMPLE definition, which separates
1912 array and member references, but it seems reasonable to handle them
1913 together. Also, this way we don't run into problems with union
1914 aliasing; gcc requires that for accesses through a union to alias, the
1915 union reference must be explicit, which was not always the case when we
1916 were splitting up array and member refs.
1918 PRE_P points to the sequence where side effects that must happen before
1919 *EXPR_P should be stored.
1921 POST_P points to the sequence where side effects that must happen after
1922 *EXPR_P should be stored. */
1924 static enum gimplify_status
1925 gimplify_compound_lval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
1926 fallback_t fallback
)
1929 enum gimplify_status ret
= GS_ALL_DONE
, tret
;
1931 location_t loc
= EXPR_LOCATION (*expr_p
);
1932 tree expr
= *expr_p
;
1934 /* Create a stack of the subexpressions so later we can walk them in
1935 order from inner to outer. */
1936 auto_vec
<tree
, 10> expr_stack
;
1938 /* We can handle anything that get_inner_reference can deal with. */
1939 for (p
= expr_p
; ; p
= &TREE_OPERAND (*p
, 0))
1942 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1943 if (TREE_CODE (*p
) == INDIRECT_REF
)
1944 *p
= fold_indirect_ref_loc (loc
, *p
);
1946 if (handled_component_p (*p
))
1948 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1949 additional COMPONENT_REFs. */
1950 else if ((TREE_CODE (*p
) == VAR_DECL
|| TREE_CODE (*p
) == PARM_DECL
)
1951 && gimplify_var_or_parm_decl (p
) == GS_OK
)
1956 expr_stack
.safe_push (*p
);
1959 gcc_assert (expr_stack
.length ());
1961 /* Now EXPR_STACK is a stack of pointers to all the refs we've
1962 walked through and P points to the innermost expression.
1964 Java requires that we elaborated nodes in source order. That
1965 means we must gimplify the inner expression followed by each of
1966 the indices, in order. But we can't gimplify the inner
1967 expression until we deal with any variable bounds, sizes, or
1968 positions in order to deal with PLACEHOLDER_EXPRs.
1970 So we do this in three steps. First we deal with the annotations
1971 for any variables in the components, then we gimplify the base,
1972 then we gimplify any indices, from left to right. */
1973 for (i
= expr_stack
.length () - 1; i
>= 0; i
--)
1975 tree t
= expr_stack
[i
];
1977 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
1979 /* Gimplify the low bound and element type size and put them into
1980 the ARRAY_REF. If these values are set, they have already been
1982 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
1984 tree low
= unshare_expr (array_ref_low_bound (t
));
1985 if (!is_gimple_min_invariant (low
))
1987 TREE_OPERAND (t
, 2) = low
;
1988 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
1989 post_p
, is_gimple_reg
,
1991 ret
= MIN (ret
, tret
);
1996 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
1997 is_gimple_reg
, fb_rvalue
);
1998 ret
= MIN (ret
, tret
);
2001 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
2003 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
, 0)));
2004 tree elmt_size
= unshare_expr (array_ref_element_size (t
));
2005 tree factor
= size_int (TYPE_ALIGN_UNIT (elmt_type
));
2007 /* Divide the element size by the alignment of the element
2010 = size_binop_loc (loc
, EXACT_DIV_EXPR
, elmt_size
, factor
);
2012 if (!is_gimple_min_invariant (elmt_size
))
2014 TREE_OPERAND (t
, 3) = elmt_size
;
2015 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
2016 post_p
, is_gimple_reg
,
2018 ret
= MIN (ret
, tret
);
2023 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
, post_p
,
2024 is_gimple_reg
, fb_rvalue
);
2025 ret
= MIN (ret
, tret
);
2028 else if (TREE_CODE (t
) == COMPONENT_REF
)
2030 /* Set the field offset into T and gimplify it. */
2031 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
2033 tree offset
= unshare_expr (component_ref_field_offset (t
));
2034 tree field
= TREE_OPERAND (t
, 1);
2036 = size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
);
2038 /* Divide the offset by its alignment. */
2039 offset
= size_binop_loc (loc
, EXACT_DIV_EXPR
, offset
, factor
);
2041 if (!is_gimple_min_invariant (offset
))
2043 TREE_OPERAND (t
, 2) = offset
;
2044 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
2045 post_p
, is_gimple_reg
,
2047 ret
= MIN (ret
, tret
);
2052 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
2053 is_gimple_reg
, fb_rvalue
);
2054 ret
= MIN (ret
, tret
);
2059 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2060 so as to match the min_lval predicate. Failure to do so may result
2061 in the creation of large aggregate temporaries. */
2062 tret
= gimplify_expr (p
, pre_p
, post_p
, is_gimple_min_lval
,
2063 fallback
| fb_lvalue
);
2064 ret
= MIN (ret
, tret
);
2066 /* And finally, the indices and operands of ARRAY_REF. During this
2067 loop we also remove any useless conversions. */
2068 for (; expr_stack
.length () > 0; )
2070 tree t
= expr_stack
.pop ();
2072 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
2074 /* Gimplify the dimension. */
2075 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1)))
2077 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, post_p
,
2078 is_gimple_val
, fb_rvalue
);
2079 ret
= MIN (ret
, tret
);
2083 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t
, 0));
2085 /* The innermost expression P may have originally had
2086 TREE_SIDE_EFFECTS set which would have caused all the outer
2087 expressions in *EXPR_P leading to P to also have had
2088 TREE_SIDE_EFFECTS set. */
2089 recalculate_side_effects (t
);
2092 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2093 if ((fallback
& fb_rvalue
) && TREE_CODE (*expr_p
) == COMPONENT_REF
)
2095 canonicalize_component_ref (expr_p
);
2098 expr_stack
.release ();
2100 gcc_assert (*expr_p
== expr
|| ret
!= GS_ALL_DONE
);
2105 /* Gimplify the self modifying expression pointed to by EXPR_P
2108 PRE_P points to the list where side effects that must happen before
2109 *EXPR_P should be stored.
2111 POST_P points to the list where side effects that must happen after
2112 *EXPR_P should be stored.
2114 WANT_VALUE is nonzero iff we want to use the value of this expression
2115 in another expression.
2117 ARITH_TYPE is the type the computation should be performed in. */
2119 enum gimplify_status
2120 gimplify_self_mod_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
2121 bool want_value
, tree arith_type
)
2123 enum tree_code code
;
2124 tree lhs
, lvalue
, rhs
, t1
;
2125 gimple_seq post
= NULL
, *orig_post_p
= post_p
;
2127 enum tree_code arith_code
;
2128 enum gimplify_status ret
;
2129 location_t loc
= EXPR_LOCATION (*expr_p
);
2131 code
= TREE_CODE (*expr_p
);
2133 gcc_assert (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
2134 || code
== PREINCREMENT_EXPR
|| code
== PREDECREMENT_EXPR
);
2136 /* Prefix or postfix? */
2137 if (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
)
2138 /* Faster to treat as prefix if result is not used. */
2139 postfix
= want_value
;
2143 /* For postfix, make sure the inner expression's post side effects
2144 are executed after side effects from this expression. */
2148 /* Add or subtract? */
2149 if (code
== PREINCREMENT_EXPR
|| code
== POSTINCREMENT_EXPR
)
2150 arith_code
= PLUS_EXPR
;
2152 arith_code
= MINUS_EXPR
;
2154 /* Gimplify the LHS into a GIMPLE lvalue. */
2155 lvalue
= TREE_OPERAND (*expr_p
, 0);
2156 ret
= gimplify_expr (&lvalue
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
2157 if (ret
== GS_ERROR
)
2160 /* Extract the operands to the arithmetic operation. */
2162 rhs
= TREE_OPERAND (*expr_p
, 1);
2164 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2165 that as the result value and in the postqueue operation. */
2168 ret
= gimplify_expr (&lhs
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
2169 if (ret
== GS_ERROR
)
2172 lhs
= get_initialized_tmp_var (lhs
, pre_p
, NULL
);
2175 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2176 if (POINTER_TYPE_P (TREE_TYPE (lhs
)))
2178 rhs
= convert_to_ptrofftype_loc (loc
, rhs
);
2179 if (arith_code
== MINUS_EXPR
)
2180 rhs
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (rhs
), rhs
);
2181 t1
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (*expr_p
), lhs
, rhs
);
2184 t1
= fold_convert (TREE_TYPE (*expr_p
),
2185 fold_build2 (arith_code
, arith_type
,
2186 fold_convert (arith_type
, lhs
),
2187 fold_convert (arith_type
, rhs
)));
2191 gimplify_assign (lvalue
, t1
, pre_p
);
2192 gimplify_seq_add_seq (orig_post_p
, post
);
2198 *expr_p
= build2 (MODIFY_EXPR
, TREE_TYPE (lvalue
), lvalue
, t1
);
2203 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2206 maybe_with_size_expr (tree
*expr_p
)
2208 tree expr
= *expr_p
;
2209 tree type
= TREE_TYPE (expr
);
2212 /* If we've already wrapped this or the type is error_mark_node, we can't do
2214 if (TREE_CODE (expr
) == WITH_SIZE_EXPR
2215 || type
== error_mark_node
)
2218 /* If the size isn't known or is a constant, we have nothing to do. */
2219 size
= TYPE_SIZE_UNIT (type
);
2220 if (!size
|| TREE_CODE (size
) == INTEGER_CST
)
2223 /* Otherwise, make a WITH_SIZE_EXPR. */
2224 size
= unshare_expr (size
);
2225 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, expr
);
2226 *expr_p
= build2 (WITH_SIZE_EXPR
, type
, expr
, size
);
2229 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2230 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2233 enum gimplify_status
2234 gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
)
2236 bool (*test
) (tree
);
2239 /* In general, we allow lvalues for function arguments to avoid
2240 extra overhead of copying large aggregates out of even larger
2241 aggregates into temporaries only to copy the temporaries to
2242 the argument list. Make optimizers happy by pulling out to
2243 temporaries those types that fit in registers. */
2244 if (is_gimple_reg_type (TREE_TYPE (*arg_p
)))
2245 test
= is_gimple_val
, fb
= fb_rvalue
;
2248 test
= is_gimple_lvalue
, fb
= fb_either
;
2249 /* Also strip a TARGET_EXPR that would force an extra copy. */
2250 if (TREE_CODE (*arg_p
) == TARGET_EXPR
)
2252 tree init
= TARGET_EXPR_INITIAL (*arg_p
);
2254 && !VOID_TYPE_P (TREE_TYPE (init
)))
2259 /* If this is a variable sized type, we must remember the size. */
2260 maybe_with_size_expr (arg_p
);
2262 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2263 /* Make sure arguments have the same location as the function call
2265 protected_set_expr_location (*arg_p
, call_location
);
2267 /* There is a sequence point before a function call. Side effects in
2268 the argument list must occur before the actual call. So, when
2269 gimplifying arguments, force gimplify_expr to use an internal
2270 post queue which is then appended to the end of PRE_P. */
2271 return gimplify_expr (arg_p
, pre_p
, NULL
, test
, fb
);
2274 /* Don't fold inside offloading regions: it can break code by adding decl
2275 references that weren't in the source. We'll do it during omplower pass
2279 maybe_fold_stmt (gimple_stmt_iterator
*gsi
)
2281 struct gimplify_omp_ctx
*ctx
;
2282 for (ctx
= gimplify_omp_ctxp
; ctx
; ctx
= ctx
->outer_context
)
2283 if (ctx
->region_type
== ORT_TARGET
)
2285 return fold_stmt (gsi
);
2288 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2289 WANT_VALUE is true if the result of the call is desired. */
2291 static enum gimplify_status
2292 gimplify_call_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
2294 tree fndecl
, parms
, p
, fnptrtype
;
2295 enum gimplify_status ret
;
2298 bool builtin_va_start_p
= false;
2299 location_t loc
= EXPR_LOCATION (*expr_p
);
2301 gcc_assert (TREE_CODE (*expr_p
) == CALL_EXPR
);
2303 /* For reliable diagnostics during inlining, it is necessary that
2304 every call_expr be annotated with file and line. */
2305 if (! EXPR_HAS_LOCATION (*expr_p
))
2306 SET_EXPR_LOCATION (*expr_p
, input_location
);
2308 /* Gimplify internal functions created in the FEs. */
2309 if (CALL_EXPR_FN (*expr_p
) == NULL_TREE
)
2314 nargs
= call_expr_nargs (*expr_p
);
2315 enum internal_fn ifn
= CALL_EXPR_IFN (*expr_p
);
2316 auto_vec
<tree
> vargs (nargs
);
2318 for (i
= 0; i
< nargs
; i
++)
2320 gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
2321 EXPR_LOCATION (*expr_p
));
2322 vargs
.quick_push (CALL_EXPR_ARG (*expr_p
, i
));
2324 gimple call
= gimple_build_call_internal_vec (ifn
, vargs
);
2325 gimplify_seq_add_stmt (pre_p
, call
);
2329 /* This may be a call to a builtin function.
2331 Builtin function calls may be transformed into different
2332 (and more efficient) builtin function calls under certain
2333 circumstances. Unfortunately, gimplification can muck things
2334 up enough that the builtin expanders are not aware that certain
2335 transformations are still valid.
2337 So we attempt transformation/gimplification of the call before
2338 we gimplify the CALL_EXPR. At this time we do not manage to
2339 transform all calls in the same manner as the expanders do, but
2340 we do transform most of them. */
2341 fndecl
= get_callee_fndecl (*expr_p
);
2343 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
2344 switch (DECL_FUNCTION_CODE (fndecl
))
2346 case BUILT_IN_VA_START
:
2348 builtin_va_start_p
= TRUE
;
2349 if (call_expr_nargs (*expr_p
) < 2)
2351 error ("too few arguments to function %<va_start%>");
2352 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
2356 if (fold_builtin_next_arg (*expr_p
, true))
2358 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
2365 *expr_p
= build_int_cst (TREE_TYPE (*expr_p
),
2366 LOCATION_LINE (EXPR_LOCATION (*expr_p
)));
2371 const char *locfile
= LOCATION_FILE (EXPR_LOCATION (*expr_p
));
2372 *expr_p
= build_string_literal (strlen (locfile
) + 1, locfile
);
2375 case BUILT_IN_FUNCTION
:
2377 const char *function
;
2378 function
= IDENTIFIER_POINTER (DECL_NAME (current_function_decl
));
2379 *expr_p
= build_string_literal (strlen (function
) + 1, function
);
2385 if (fndecl
&& DECL_BUILT_IN (fndecl
))
2387 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
2388 if (new_tree
&& new_tree
!= *expr_p
)
2390 /* There was a transformation of this call which computes the
2391 same value, but in a more efficient way. Return and try
2398 /* Remember the original function pointer type. */
2399 fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
2401 /* There is a sequence point before the call, so any side effects in
2402 the calling expression must occur before the actual call. Force
2403 gimplify_expr to use an internal post queue. */
2404 ret
= gimplify_expr (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
2405 is_gimple_call_addr
, fb_rvalue
);
2407 nargs
= call_expr_nargs (*expr_p
);
2409 /* Get argument types for verification. */
2410 fndecl
= get_callee_fndecl (*expr_p
);
2413 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
2415 parms
= TYPE_ARG_TYPES (TREE_TYPE (fnptrtype
));
2417 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
2418 p
= DECL_ARGUMENTS (fndecl
);
2423 for (i
= 0; i
< nargs
&& p
; i
++, p
= TREE_CHAIN (p
))
2426 /* If the last argument is __builtin_va_arg_pack () and it is not
2427 passed as a named argument, decrease the number of CALL_EXPR
2428 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2431 && TREE_CODE (CALL_EXPR_ARG (*expr_p
, nargs
- 1)) == CALL_EXPR
)
2433 tree last_arg
= CALL_EXPR_ARG (*expr_p
, nargs
- 1);
2434 tree last_arg_fndecl
= get_callee_fndecl (last_arg
);
2437 && TREE_CODE (last_arg_fndecl
) == FUNCTION_DECL
2438 && DECL_BUILT_IN_CLASS (last_arg_fndecl
) == BUILT_IN_NORMAL
2439 && DECL_FUNCTION_CODE (last_arg_fndecl
) == BUILT_IN_VA_ARG_PACK
)
2441 tree call
= *expr_p
;
2444 *expr_p
= build_call_array_loc (loc
, TREE_TYPE (call
),
2445 CALL_EXPR_FN (call
),
2446 nargs
, CALL_EXPR_ARGP (call
));
2448 /* Copy all CALL_EXPR flags, location and block, except
2449 CALL_EXPR_VA_ARG_PACK flag. */
2450 CALL_EXPR_STATIC_CHAIN (*expr_p
) = CALL_EXPR_STATIC_CHAIN (call
);
2451 CALL_EXPR_TAILCALL (*expr_p
) = CALL_EXPR_TAILCALL (call
);
2452 CALL_EXPR_RETURN_SLOT_OPT (*expr_p
)
2453 = CALL_EXPR_RETURN_SLOT_OPT (call
);
2454 CALL_FROM_THUNK_P (*expr_p
) = CALL_FROM_THUNK_P (call
);
2455 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (call
));
2457 /* Set CALL_EXPR_VA_ARG_PACK. */
2458 CALL_EXPR_VA_ARG_PACK (*expr_p
) = 1;
2462 /* Gimplify the function arguments. */
2465 for (i
= (PUSH_ARGS_REVERSED
? nargs
- 1 : 0);
2466 PUSH_ARGS_REVERSED
? i
>= 0 : i
< nargs
;
2467 PUSH_ARGS_REVERSED
? i
-- : i
++)
2469 enum gimplify_status t
;
2471 /* Avoid gimplifying the second argument to va_start, which needs to
2472 be the plain PARM_DECL. */
2473 if ((i
!= 1) || !builtin_va_start_p
)
2475 t
= gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
2476 EXPR_LOCATION (*expr_p
));
2484 /* Gimplify the static chain. */
2485 if (CALL_EXPR_STATIC_CHAIN (*expr_p
))
2487 if (fndecl
&& !DECL_STATIC_CHAIN (fndecl
))
2488 CALL_EXPR_STATIC_CHAIN (*expr_p
) = NULL
;
2491 enum gimplify_status t
;
2492 t
= gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p
), pre_p
,
2493 EXPR_LOCATION (*expr_p
));
2499 /* Verify the function result. */
2500 if (want_value
&& fndecl
2501 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype
))))
2503 error_at (loc
, "using result of function returning %<void%>");
2507 /* Try this again in case gimplification exposed something. */
2508 if (ret
!= GS_ERROR
)
2510 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
2512 if (new_tree
&& new_tree
!= *expr_p
)
2514 /* There was a transformation of this call which computes the
2515 same value, but in a more efficient way. Return and try
2523 *expr_p
= error_mark_node
;
2527 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2528 decl. This allows us to eliminate redundant or useless
2529 calls to "const" functions. */
2530 if (TREE_CODE (*expr_p
) == CALL_EXPR
)
2532 int flags
= call_expr_flags (*expr_p
);
2533 if (flags
& (ECF_CONST
| ECF_PURE
)
2534 /* An infinite loop is considered a side effect. */
2535 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
2536 TREE_SIDE_EFFECTS (*expr_p
) = 0;
2539 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2540 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2541 form and delegate the creation of a GIMPLE_CALL to
2542 gimplify_modify_expr. This is always possible because when
2543 WANT_VALUE is true, the caller wants the result of this call into
2544 a temporary, which means that we will emit an INIT_EXPR in
2545 internal_get_tmp_var which will then be handled by
2546 gimplify_modify_expr. */
2549 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2550 have to do is replicate it as a GIMPLE_CALL tuple. */
2551 gimple_stmt_iterator gsi
;
2552 call
= gimple_build_call_from_tree (*expr_p
);
2553 gimple_call_set_fntype (call
, TREE_TYPE (fnptrtype
));
2554 notice_special_calls (call
);
2555 gimplify_seq_add_stmt (pre_p
, call
);
2556 gsi
= gsi_last (*pre_p
);
2557 maybe_fold_stmt (&gsi
);
2558 *expr_p
= NULL_TREE
;
2561 /* Remember the original function type. */
2562 CALL_EXPR_FN (*expr_p
) = build1 (NOP_EXPR
, fnptrtype
,
2563 CALL_EXPR_FN (*expr_p
));
2568 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2569 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2571 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2572 condition is true or false, respectively. If null, we should generate
2573 our own to skip over the evaluation of this specific expression.
2575 LOCUS is the source location of the COND_EXPR.
2577 This function is the tree equivalent of do_jump.
2579 shortcut_cond_r should only be called by shortcut_cond_expr. */
2582 shortcut_cond_r (tree pred
, tree
*true_label_p
, tree
*false_label_p
,
2585 tree local_label
= NULL_TREE
;
2586 tree t
, expr
= NULL
;
2588 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2589 retain the shortcut semantics. Just insert the gotos here;
2590 shortcut_cond_expr will append the real blocks later. */
2591 if (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
2593 location_t new_locus
;
2595 /* Turn if (a && b) into
2597 if (a); else goto no;
2598 if (b) goto yes; else goto no;
2601 if (false_label_p
== NULL
)
2602 false_label_p
= &local_label
;
2604 /* Keep the original source location on the first 'if'. */
2605 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), NULL
, false_label_p
, locus
);
2606 append_to_statement_list (t
, &expr
);
2608 /* Set the source location of the && on the second 'if'. */
2609 new_locus
= EXPR_HAS_LOCATION (pred
) ? EXPR_LOCATION (pred
) : locus
;
2610 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
2612 append_to_statement_list (t
, &expr
);
2614 else if (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
2616 location_t new_locus
;
2618 /* Turn if (a || b) into
2621 if (b) goto yes; else goto no;
2624 if (true_label_p
== NULL
)
2625 true_label_p
= &local_label
;
2627 /* Keep the original source location on the first 'if'. */
2628 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), true_label_p
, NULL
, locus
);
2629 append_to_statement_list (t
, &expr
);
2631 /* Set the source location of the || on the second 'if'. */
2632 new_locus
= EXPR_HAS_LOCATION (pred
) ? EXPR_LOCATION (pred
) : locus
;
2633 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
2635 append_to_statement_list (t
, &expr
);
2637 else if (TREE_CODE (pred
) == COND_EXPR
2638 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 1)))
2639 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 2))))
2641 location_t new_locus
;
2643 /* As long as we're messing with gotos, turn if (a ? b : c) into
2645 if (b) goto yes; else goto no;
2647 if (c) goto yes; else goto no;
2649 Don't do this if one of the arms has void type, which can happen
2650 in C++ when the arm is throw. */
2652 /* Keep the original source location on the first 'if'. Set the source
2653 location of the ? on the second 'if'. */
2654 new_locus
= EXPR_HAS_LOCATION (pred
) ? EXPR_LOCATION (pred
) : locus
;
2655 expr
= build3 (COND_EXPR
, void_type_node
, TREE_OPERAND (pred
, 0),
2656 shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
,
2657 false_label_p
, locus
),
2658 shortcut_cond_r (TREE_OPERAND (pred
, 2), true_label_p
,
2659 false_label_p
, new_locus
));
2663 expr
= build3 (COND_EXPR
, void_type_node
, pred
,
2664 build_and_jump (true_label_p
),
2665 build_and_jump (false_label_p
));
2666 SET_EXPR_LOCATION (expr
, locus
);
2671 t
= build1 (LABEL_EXPR
, void_type_node
, local_label
);
2672 append_to_statement_list (t
, &expr
);
2678 /* Given a conditional expression EXPR with short-circuit boolean
2679 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2680 predicate apart into the equivalent sequence of conditionals. */
2683 shortcut_cond_expr (tree expr
)
2685 tree pred
= TREE_OPERAND (expr
, 0);
2686 tree then_
= TREE_OPERAND (expr
, 1);
2687 tree else_
= TREE_OPERAND (expr
, 2);
2688 tree true_label
, false_label
, end_label
, t
;
2690 tree
*false_label_p
;
2691 bool emit_end
, emit_false
, jump_over_else
;
2692 bool then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
2693 bool else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
2695 /* First do simple transformations. */
2698 /* If there is no 'else', turn
2701 if (a) if (b) then c. */
2702 while (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
2704 /* Keep the original source location on the first 'if'. */
2705 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
2706 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
2707 /* Set the source location of the && on the second 'if'. */
2708 if (EXPR_HAS_LOCATION (pred
))
2709 SET_EXPR_LOCATION (expr
, EXPR_LOCATION (pred
));
2710 then_
= shortcut_cond_expr (expr
);
2711 then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
2712 pred
= TREE_OPERAND (pred
, 0);
2713 expr
= build3 (COND_EXPR
, void_type_node
, pred
, then_
, NULL_TREE
);
2714 SET_EXPR_LOCATION (expr
, locus
);
2720 /* If there is no 'then', turn
2723 if (a); else if (b); else d. */
2724 while (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
2726 /* Keep the original source location on the first 'if'. */
2727 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
2728 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
2729 /* Set the source location of the || on the second 'if'. */
2730 if (EXPR_HAS_LOCATION (pred
))
2731 SET_EXPR_LOCATION (expr
, EXPR_LOCATION (pred
));
2732 else_
= shortcut_cond_expr (expr
);
2733 else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
2734 pred
= TREE_OPERAND (pred
, 0);
2735 expr
= build3 (COND_EXPR
, void_type_node
, pred
, NULL_TREE
, else_
);
2736 SET_EXPR_LOCATION (expr
, locus
);
2740 /* If we're done, great. */
2741 if (TREE_CODE (pred
) != TRUTH_ANDIF_EXPR
2742 && TREE_CODE (pred
) != TRUTH_ORIF_EXPR
)
2745 /* Otherwise we need to mess with gotos. Change
2748 if (a); else goto no;
2751 and recursively gimplify the condition. */
2753 true_label
= false_label
= end_label
= NULL_TREE
;
2755 /* If our arms just jump somewhere, hijack those labels so we don't
2756 generate jumps to jumps. */
2759 && TREE_CODE (then_
) == GOTO_EXPR
2760 && TREE_CODE (GOTO_DESTINATION (then_
)) == LABEL_DECL
)
2762 true_label
= GOTO_DESTINATION (then_
);
2768 && TREE_CODE (else_
) == GOTO_EXPR
2769 && TREE_CODE (GOTO_DESTINATION (else_
)) == LABEL_DECL
)
2771 false_label
= GOTO_DESTINATION (else_
);
2776 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2778 true_label_p
= &true_label
;
2780 true_label_p
= NULL
;
2782 /* The 'else' branch also needs a label if it contains interesting code. */
2783 if (false_label
|| else_se
)
2784 false_label_p
= &false_label
;
2786 false_label_p
= NULL
;
2788 /* If there was nothing else in our arms, just forward the label(s). */
2789 if (!then_se
&& !else_se
)
2790 return shortcut_cond_r (pred
, true_label_p
, false_label_p
,
2791 EXPR_LOC_OR_LOC (expr
, input_location
));
2793 /* If our last subexpression already has a terminal label, reuse it. */
2795 t
= expr_last (else_
);
2797 t
= expr_last (then_
);
2800 if (t
&& TREE_CODE (t
) == LABEL_EXPR
)
2801 end_label
= LABEL_EXPR_LABEL (t
);
2803 /* If we don't care about jumping to the 'else' branch, jump to the end
2804 if the condition is false. */
2806 false_label_p
= &end_label
;
2808 /* We only want to emit these labels if we aren't hijacking them. */
2809 emit_end
= (end_label
== NULL_TREE
);
2810 emit_false
= (false_label
== NULL_TREE
);
2812 /* We only emit the jump over the else clause if we have to--if the
2813 then clause may fall through. Otherwise we can wind up with a
2814 useless jump and a useless label at the end of gimplified code,
2815 which will cause us to think that this conditional as a whole
2816 falls through even if it doesn't. If we then inline a function
2817 which ends with such a condition, that can cause us to issue an
2818 inappropriate warning about control reaching the end of a
2819 non-void function. */
2820 jump_over_else
= block_may_fallthru (then_
);
2822 pred
= shortcut_cond_r (pred
, true_label_p
, false_label_p
,
2823 EXPR_LOC_OR_LOC (expr
, input_location
));
2826 append_to_statement_list (pred
, &expr
);
2828 append_to_statement_list (then_
, &expr
);
2833 tree last
= expr_last (expr
);
2834 t
= build_and_jump (&end_label
);
2835 if (EXPR_HAS_LOCATION (last
))
2836 SET_EXPR_LOCATION (t
, EXPR_LOCATION (last
));
2837 append_to_statement_list (t
, &expr
);
2841 t
= build1 (LABEL_EXPR
, void_type_node
, false_label
);
2842 append_to_statement_list (t
, &expr
);
2844 append_to_statement_list (else_
, &expr
);
2846 if (emit_end
&& end_label
)
2848 t
= build1 (LABEL_EXPR
, void_type_node
, end_label
);
2849 append_to_statement_list (t
, &expr
);
2855 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2858 gimple_boolify (tree expr
)
2860 tree type
= TREE_TYPE (expr
);
2861 location_t loc
= EXPR_LOCATION (expr
);
2863 if (TREE_CODE (expr
) == NE_EXPR
2864 && TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
2865 && integer_zerop (TREE_OPERAND (expr
, 1)))
2867 tree call
= TREE_OPERAND (expr
, 0);
2868 tree fn
= get_callee_fndecl (call
);
2870 /* For __builtin_expect ((long) (x), y) recurse into x as well
2871 if x is truth_value_p. */
2873 && DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
2874 && DECL_FUNCTION_CODE (fn
) == BUILT_IN_EXPECT
2875 && call_expr_nargs (call
) == 2)
2877 tree arg
= CALL_EXPR_ARG (call
, 0);
2880 if (TREE_CODE (arg
) == NOP_EXPR
2881 && TREE_TYPE (arg
) == TREE_TYPE (call
))
2882 arg
= TREE_OPERAND (arg
, 0);
2883 if (truth_value_p (TREE_CODE (arg
)))
2885 arg
= gimple_boolify (arg
);
2886 CALL_EXPR_ARG (call
, 0)
2887 = fold_convert_loc (loc
, TREE_TYPE (call
), arg
);
2893 switch (TREE_CODE (expr
))
2895 case TRUTH_AND_EXPR
:
2897 case TRUTH_XOR_EXPR
:
2898 case TRUTH_ANDIF_EXPR
:
2899 case TRUTH_ORIF_EXPR
:
2900 /* Also boolify the arguments of truth exprs. */
2901 TREE_OPERAND (expr
, 1) = gimple_boolify (TREE_OPERAND (expr
, 1));
2904 case TRUTH_NOT_EXPR
:
2905 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
2907 /* These expressions always produce boolean results. */
2908 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
2909 TREE_TYPE (expr
) = boolean_type_node
;
2913 switch ((enum annot_expr_kind
) TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)))
2915 case annot_expr_ivdep_kind
:
2916 case annot_expr_no_vector_kind
:
2917 case annot_expr_vector_kind
:
2918 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
2919 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
2920 TREE_TYPE (expr
) = boolean_type_node
;
2927 if (COMPARISON_CLASS_P (expr
))
2929 /* There expressions always prduce boolean results. */
2930 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
2931 TREE_TYPE (expr
) = boolean_type_node
;
2934 /* Other expressions that get here must have boolean values, but
2935 might need to be converted to the appropriate mode. */
2936 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
2938 return fold_convert_loc (loc
, boolean_type_node
, expr
);
2942 /* Given a conditional expression *EXPR_P without side effects, gimplify
2943 its operands. New statements are inserted to PRE_P. */
2945 static enum gimplify_status
2946 gimplify_pure_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2948 tree expr
= *expr_p
, cond
;
2949 enum gimplify_status ret
, tret
;
2950 enum tree_code code
;
2952 cond
= gimple_boolify (COND_EXPR_COND (expr
));
2954 /* We need to handle && and || specially, as their gimplification
2955 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2956 code
= TREE_CODE (cond
);
2957 if (code
== TRUTH_ANDIF_EXPR
)
2958 TREE_SET_CODE (cond
, TRUTH_AND_EXPR
);
2959 else if (code
== TRUTH_ORIF_EXPR
)
2960 TREE_SET_CODE (cond
, TRUTH_OR_EXPR
);
2961 ret
= gimplify_expr (&cond
, pre_p
, NULL
, is_gimple_condexpr
, fb_rvalue
);
2962 COND_EXPR_COND (*expr_p
) = cond
;
2964 tret
= gimplify_expr (&COND_EXPR_THEN (expr
), pre_p
, NULL
,
2965 is_gimple_val
, fb_rvalue
);
2966 ret
= MIN (ret
, tret
);
2967 tret
= gimplify_expr (&COND_EXPR_ELSE (expr
), pre_p
, NULL
,
2968 is_gimple_val
, fb_rvalue
);
2970 return MIN (ret
, tret
);
2973 /* Return true if evaluating EXPR could trap.
2974 EXPR is GENERIC, while tree_could_trap_p can be called
2978 generic_expr_could_trap_p (tree expr
)
2982 if (!expr
|| is_gimple_val (expr
))
2985 if (!EXPR_P (expr
) || tree_could_trap_p (expr
))
2988 n
= TREE_OPERAND_LENGTH (expr
);
2989 for (i
= 0; i
< n
; i
++)
2990 if (generic_expr_could_trap_p (TREE_OPERAND (expr
, i
)))
2996 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3005 The second form is used when *EXPR_P is of type void.
3007 PRE_P points to the list where side effects that must happen before
3008 *EXPR_P should be stored. */
3010 static enum gimplify_status
3011 gimplify_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
, fallback_t fallback
)
3013 tree expr
= *expr_p
;
3014 tree type
= TREE_TYPE (expr
);
3015 location_t loc
= EXPR_LOCATION (expr
);
3016 tree tmp
, arm1
, arm2
;
3017 enum gimplify_status ret
;
3018 tree label_true
, label_false
, label_cont
;
3019 bool have_then_clause_p
, have_else_clause_p
;
3021 enum tree_code pred_code
;
3022 gimple_seq seq
= NULL
;
3024 /* If this COND_EXPR has a value, copy the values into a temporary within
3026 if (!VOID_TYPE_P (type
))
3028 tree then_
= TREE_OPERAND (expr
, 1), else_
= TREE_OPERAND (expr
, 2);
3031 /* If either an rvalue is ok or we do not require an lvalue, create the
3032 temporary. But we cannot do that if the type is addressable. */
3033 if (((fallback
& fb_rvalue
) || !(fallback
& fb_lvalue
))
3034 && !TREE_ADDRESSABLE (type
))
3036 if (gimplify_ctxp
->allow_rhs_cond_expr
3037 /* If either branch has side effects or could trap, it can't be
3038 evaluated unconditionally. */
3039 && !TREE_SIDE_EFFECTS (then_
)
3040 && !generic_expr_could_trap_p (then_
)
3041 && !TREE_SIDE_EFFECTS (else_
)
3042 && !generic_expr_could_trap_p (else_
))
3043 return gimplify_pure_cond_expr (expr_p
, pre_p
);
3045 tmp
= create_tmp_var (type
, "iftmp");
3049 /* Otherwise, only create and copy references to the values. */
3052 type
= build_pointer_type (type
);
3054 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
3055 then_
= build_fold_addr_expr_loc (loc
, then_
);
3057 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
3058 else_
= build_fold_addr_expr_loc (loc
, else_
);
3061 = build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0), then_
, else_
);
3063 tmp
= create_tmp_var (type
, "iftmp");
3064 result
= build_simple_mem_ref_loc (loc
, tmp
);
3067 /* Build the new then clause, `tmp = then_;'. But don't build the
3068 assignment if the value is void; in C++ it can be if it's a throw. */
3069 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
3070 TREE_OPERAND (expr
, 1) = build2 (MODIFY_EXPR
, type
, tmp
, then_
);
3072 /* Similarly, build the new else clause, `tmp = else_;'. */
3073 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
3074 TREE_OPERAND (expr
, 2) = build2 (MODIFY_EXPR
, type
, tmp
, else_
);
3076 TREE_TYPE (expr
) = void_type_node
;
3077 recalculate_side_effects (expr
);
3079 /* Move the COND_EXPR to the prequeue. */
3080 gimplify_stmt (&expr
, pre_p
);
3086 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3087 STRIP_TYPE_NOPS (TREE_OPERAND (expr
, 0));
3088 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == COMPOUND_EXPR
)
3089 gimplify_compound_expr (&TREE_OPERAND (expr
, 0), pre_p
, true);
3091 /* Make sure the condition has BOOLEAN_TYPE. */
3092 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3094 /* Break apart && and || conditions. */
3095 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ANDIF_EXPR
3096 || TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ORIF_EXPR
)
3098 expr
= shortcut_cond_expr (expr
);
3100 if (expr
!= *expr_p
)
3104 /* We can't rely on gimplify_expr to re-gimplify the expanded
3105 form properly, as cleanups might cause the target labels to be
3106 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3107 set up a conditional context. */
3108 gimple_push_condition ();
3109 gimplify_stmt (expr_p
, &seq
);
3110 gimple_pop_condition (pre_p
);
3111 gimple_seq_add_seq (pre_p
, seq
);
3117 /* Now do the normal gimplification. */
3119 /* Gimplify condition. */
3120 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, NULL
, is_gimple_condexpr
,
3122 if (ret
== GS_ERROR
)
3124 gcc_assert (TREE_OPERAND (expr
, 0) != NULL_TREE
);
3126 gimple_push_condition ();
3128 have_then_clause_p
= have_else_clause_p
= false;
3129 if (TREE_OPERAND (expr
, 1) != NULL
3130 && TREE_CODE (TREE_OPERAND (expr
, 1)) == GOTO_EXPR
3131 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr
, 1))) == LABEL_DECL
3132 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr
, 1)))
3133 == current_function_decl
)
3134 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3135 have different locations, otherwise we end up with incorrect
3136 location information on the branches. */
3138 || !EXPR_HAS_LOCATION (expr
)
3139 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr
, 1))
3140 || EXPR_LOCATION (expr
) == EXPR_LOCATION (TREE_OPERAND (expr
, 1))))
3142 label_true
= GOTO_DESTINATION (TREE_OPERAND (expr
, 1));
3143 have_then_clause_p
= true;
3146 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
3147 if (TREE_OPERAND (expr
, 2) != NULL
3148 && TREE_CODE (TREE_OPERAND (expr
, 2)) == GOTO_EXPR
3149 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr
, 2))) == LABEL_DECL
3150 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr
, 2)))
3151 == current_function_decl
)
3152 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3153 have different locations, otherwise we end up with incorrect
3154 location information on the branches. */
3156 || !EXPR_HAS_LOCATION (expr
)
3157 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr
, 2))
3158 || EXPR_LOCATION (expr
) == EXPR_LOCATION (TREE_OPERAND (expr
, 2))))
3160 label_false
= GOTO_DESTINATION (TREE_OPERAND (expr
, 2));
3161 have_else_clause_p
= true;
3164 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
3166 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr
), &pred_code
, &arm1
,
3169 cond_stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
,
3172 gimplify_seq_add_stmt (&seq
, cond_stmt
);
3173 label_cont
= NULL_TREE
;
3174 if (!have_then_clause_p
)
3176 /* For if (...) {} else { code; } put label_true after
3178 if (TREE_OPERAND (expr
, 1) == NULL_TREE
3179 && !have_else_clause_p
3180 && TREE_OPERAND (expr
, 2) != NULL_TREE
)
3181 label_cont
= label_true
;
3184 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_true
));
3185 have_then_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 1), &seq
);
3186 /* For if (...) { code; } else {} or
3187 if (...) { code; } else goto label; or
3188 if (...) { code; return; } else { ... }
3189 label_cont isn't needed. */
3190 if (!have_else_clause_p
3191 && TREE_OPERAND (expr
, 2) != NULL_TREE
3192 && gimple_seq_may_fallthru (seq
))
3195 label_cont
= create_artificial_label (UNKNOWN_LOCATION
);
3197 g
= gimple_build_goto (label_cont
);
3199 /* GIMPLE_COND's are very low level; they have embedded
3200 gotos. This particular embedded goto should not be marked
3201 with the location of the original COND_EXPR, as it would
3202 correspond to the COND_EXPR's condition, not the ELSE or the
3203 THEN arms. To avoid marking it with the wrong location, flag
3204 it as "no location". */
3205 gimple_set_do_not_emit_location (g
);
3207 gimplify_seq_add_stmt (&seq
, g
);
3211 if (!have_else_clause_p
)
3213 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_false
));
3214 have_else_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 2), &seq
);
3217 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_cont
));
3219 gimple_pop_condition (pre_p
);
3220 gimple_seq_add_seq (pre_p
, seq
);
3222 if (ret
== GS_ERROR
)
3224 else if (have_then_clause_p
|| have_else_clause_p
)
3228 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3229 expr
= TREE_OPERAND (expr
, 0);
3230 gimplify_stmt (&expr
, pre_p
);
3237 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3238 to be marked addressable.
3240 We cannot rely on such an expression being directly markable if a temporary
3241 has been created by the gimplification. In this case, we create another
3242 temporary and initialize it with a copy, which will become a store after we
3243 mark it addressable. This can happen if the front-end passed us something
3244 that it could not mark addressable yet, like a Fortran pass-by-reference
3245 parameter (int) floatvar. */
3248 prepare_gimple_addressable (tree
*expr_p
, gimple_seq
*seq_p
)
3250 while (handled_component_p (*expr_p
))
3251 expr_p
= &TREE_OPERAND (*expr_p
, 0);
3252 if (is_gimple_reg (*expr_p
))
3254 tree var
= get_initialized_tmp_var (*expr_p
, seq_p
, NULL
);
3255 DECL_GIMPLE_REG_P (var
) = 0;
3260 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3261 a call to __builtin_memcpy. */
3263 static enum gimplify_status
3264 gimplify_modify_expr_to_memcpy (tree
*expr_p
, tree size
, bool want_value
,
3267 tree t
, to
, to_ptr
, from
, from_ptr
;
3269 location_t loc
= EXPR_LOCATION (*expr_p
);
3271 to
= TREE_OPERAND (*expr_p
, 0);
3272 from
= TREE_OPERAND (*expr_p
, 1);
3274 /* Mark the RHS addressable. Beware that it may not be possible to do so
3275 directly if a temporary has been created by the gimplification. */
3276 prepare_gimple_addressable (&from
, seq_p
);
3278 mark_addressable (from
);
3279 from_ptr
= build_fold_addr_expr_loc (loc
, from
);
3280 gimplify_arg (&from_ptr
, seq_p
, loc
);
3282 mark_addressable (to
);
3283 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
3284 gimplify_arg (&to_ptr
, seq_p
, loc
);
3286 t
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3288 gs
= gimple_build_call (t
, 3, to_ptr
, from_ptr
, size
);
3292 /* tmp = memcpy() */
3293 t
= create_tmp_var (TREE_TYPE (to_ptr
));
3294 gimple_call_set_lhs (gs
, t
);
3295 gimplify_seq_add_stmt (seq_p
, gs
);
3297 *expr_p
= build_simple_mem_ref (t
);
3301 gimplify_seq_add_stmt (seq_p
, gs
);
3306 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3307 a call to __builtin_memset. In this case we know that the RHS is
3308 a CONSTRUCTOR with an empty element list. */
3310 static enum gimplify_status
3311 gimplify_modify_expr_to_memset (tree
*expr_p
, tree size
, bool want_value
,
3314 tree t
, from
, to
, to_ptr
;
3316 location_t loc
= EXPR_LOCATION (*expr_p
);
3318 /* Assert our assumptions, to abort instead of producing wrong code
3319 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3320 not be immediately exposed. */
3321 from
= TREE_OPERAND (*expr_p
, 1);
3322 if (TREE_CODE (from
) == WITH_SIZE_EXPR
)
3323 from
= TREE_OPERAND (from
, 0);
3325 gcc_assert (TREE_CODE (from
) == CONSTRUCTOR
3326 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from
)));
3329 to
= TREE_OPERAND (*expr_p
, 0);
3331 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
3332 gimplify_arg (&to_ptr
, seq_p
, loc
);
3333 t
= builtin_decl_implicit (BUILT_IN_MEMSET
);
3335 gs
= gimple_build_call (t
, 3, to_ptr
, integer_zero_node
, size
);
3339 /* tmp = memset() */
3340 t
= create_tmp_var (TREE_TYPE (to_ptr
));
3341 gimple_call_set_lhs (gs
, t
);
3342 gimplify_seq_add_stmt (seq_p
, gs
);
3344 *expr_p
= build1 (INDIRECT_REF
, TREE_TYPE (to
), t
);
3348 gimplify_seq_add_stmt (seq_p
, gs
);
3353 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3354 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3355 assignment. Return non-null if we detect a potential overlap. */
3357 struct gimplify_init_ctor_preeval_data
3359 /* The base decl of the lhs object. May be NULL, in which case we
3360 have to assume the lhs is indirect. */
3363 /* The alias set of the lhs object. */
3364 alias_set_type lhs_alias_set
;
3368 gimplify_init_ctor_preeval_1 (tree
*tp
, int *walk_subtrees
, void *xdata
)
3370 struct gimplify_init_ctor_preeval_data
*data
3371 = (struct gimplify_init_ctor_preeval_data
*) xdata
;
3374 /* If we find the base object, obviously we have overlap. */
3375 if (data
->lhs_base_decl
== t
)
3378 /* If the constructor component is indirect, determine if we have a
3379 potential overlap with the lhs. The only bits of information we
3380 have to go on at this point are addressability and alias sets. */
3381 if ((INDIRECT_REF_P (t
)
3382 || TREE_CODE (t
) == MEM_REF
)
3383 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
3384 && alias_sets_conflict_p (data
->lhs_alias_set
, get_alias_set (t
)))
3387 /* If the constructor component is a call, determine if it can hide a
3388 potential overlap with the lhs through an INDIRECT_REF like above.
3389 ??? Ugh - this is completely broken. In fact this whole analysis
3390 doesn't look conservative. */
3391 if (TREE_CODE (t
) == CALL_EXPR
)
3393 tree type
, fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t
)));
3395 for (type
= TYPE_ARG_TYPES (fntype
); type
; type
= TREE_CHAIN (type
))
3396 if (POINTER_TYPE_P (TREE_VALUE (type
))
3397 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
3398 && alias_sets_conflict_p (data
->lhs_alias_set
,
3400 (TREE_TYPE (TREE_VALUE (type
)))))
3404 if (IS_TYPE_OR_DECL_P (t
))
3409 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3410 force values that overlap with the lhs (as described by *DATA)
3411 into temporaries. */
3414 gimplify_init_ctor_preeval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3415 struct gimplify_init_ctor_preeval_data
*data
)
3417 enum gimplify_status one
;
3419 /* If the value is constant, then there's nothing to pre-evaluate. */
3420 if (TREE_CONSTANT (*expr_p
))
3422 /* Ensure it does not have side effects, it might contain a reference to
3423 the object we're initializing. */
3424 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p
));
3428 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3429 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)))
3432 /* Recurse for nested constructors. */
3433 if (TREE_CODE (*expr_p
) == CONSTRUCTOR
)
3435 unsigned HOST_WIDE_INT ix
;
3436 constructor_elt
*ce
;
3437 vec
<constructor_elt
, va_gc
> *v
= CONSTRUCTOR_ELTS (*expr_p
);
3439 FOR_EACH_VEC_SAFE_ELT (v
, ix
, ce
)
3440 gimplify_init_ctor_preeval (&ce
->value
, pre_p
, post_p
, data
);
3445 /* If this is a variable sized type, we must remember the size. */
3446 maybe_with_size_expr (expr_p
);
3448 /* Gimplify the constructor element to something appropriate for the rhs
3449 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3450 the gimplifier will consider this a store to memory. Doing this
3451 gimplification now means that we won't have to deal with complicated
3452 language-specific trees, nor trees like SAVE_EXPR that can induce
3453 exponential search behavior. */
3454 one
= gimplify_expr (expr_p
, pre_p
, post_p
, is_gimple_mem_rhs
, fb_rvalue
);
3455 if (one
== GS_ERROR
)
3461 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3462 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3463 always be true for all scalars, since is_gimple_mem_rhs insists on a
3464 temporary variable for them. */
3465 if (DECL_P (*expr_p
))
3468 /* If this is of variable size, we have no choice but to assume it doesn't
3469 overlap since we can't make a temporary for it. */
3470 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p
))) != INTEGER_CST
)
3473 /* Otherwise, we must search for overlap ... */
3474 if (!walk_tree (expr_p
, gimplify_init_ctor_preeval_1
, data
, NULL
))
3477 /* ... and if found, force the value into a temporary. */
3478 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
3481 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3482 a RANGE_EXPR in a CONSTRUCTOR for an array.
3486 object[var] = value;
3493 We increment var _after_ the loop exit check because we might otherwise
3494 fail if upper == TYPE_MAX_VALUE (type for upper).
3496 Note that we never have to deal with SAVE_EXPRs here, because this has
3497 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3499 static void gimplify_init_ctor_eval (tree
, vec
<constructor_elt
, va_gc
> *,
3500 gimple_seq
*, bool);
3503 gimplify_init_ctor_eval_range (tree object
, tree lower
, tree upper
,
3504 tree value
, tree array_elt_type
,
3505 gimple_seq
*pre_p
, bool cleared
)
3507 tree loop_entry_label
, loop_exit_label
, fall_thru_label
;
3508 tree var
, var_type
, cref
, tmp
;
3510 loop_entry_label
= create_artificial_label (UNKNOWN_LOCATION
);
3511 loop_exit_label
= create_artificial_label (UNKNOWN_LOCATION
);
3512 fall_thru_label
= create_artificial_label (UNKNOWN_LOCATION
);
3514 /* Create and initialize the index variable. */
3515 var_type
= TREE_TYPE (upper
);
3516 var
= create_tmp_var (var_type
);
3517 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, lower
));
3519 /* Add the loop entry label. */
3520 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_entry_label
));
3522 /* Build the reference. */
3523 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
3524 var
, NULL_TREE
, NULL_TREE
);
3526 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3527 the store. Otherwise just assign value to the reference. */
3529 if (TREE_CODE (value
) == CONSTRUCTOR
)
3530 /* NB we might have to call ourself recursively through
3531 gimplify_init_ctor_eval if the value is a constructor. */
3532 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
3535 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (cref
, value
));
3537 /* We exit the loop when the index var is equal to the upper bound. */
3538 gimplify_seq_add_stmt (pre_p
,
3539 gimple_build_cond (EQ_EXPR
, var
, upper
,
3540 loop_exit_label
, fall_thru_label
));
3542 gimplify_seq_add_stmt (pre_p
, gimple_build_label (fall_thru_label
));
3544 /* Otherwise, increment the index var... */
3545 tmp
= build2 (PLUS_EXPR
, var_type
, var
,
3546 fold_convert (var_type
, integer_one_node
));
3547 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, tmp
));
3549 /* ...and jump back to the loop entry. */
3550 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (loop_entry_label
));
3552 /* Add the loop exit label. */
3553 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_exit_label
));
3556 /* Return true if FDECL is accessing a field that is zero sized. */
3559 zero_sized_field_decl (const_tree fdecl
)
3561 if (TREE_CODE (fdecl
) == FIELD_DECL
&& DECL_SIZE (fdecl
)
3562 && integer_zerop (DECL_SIZE (fdecl
)))
3567 /* Return true if TYPE is zero sized. */
3570 zero_sized_type (const_tree type
)
3572 if (AGGREGATE_TYPE_P (type
) && TYPE_SIZE (type
)
3573 && integer_zerop (TYPE_SIZE (type
)))
3578 /* A subroutine of gimplify_init_constructor. Generate individual
3579 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3580 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3581 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3585 gimplify_init_ctor_eval (tree object
, vec
<constructor_elt
, va_gc
> *elts
,
3586 gimple_seq
*pre_p
, bool cleared
)
3588 tree array_elt_type
= NULL
;
3589 unsigned HOST_WIDE_INT ix
;
3590 tree purpose
, value
;
3592 if (TREE_CODE (TREE_TYPE (object
)) == ARRAY_TYPE
)
3593 array_elt_type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object
)));
3595 FOR_EACH_CONSTRUCTOR_ELT (elts
, ix
, purpose
, value
)
3599 /* NULL values are created above for gimplification errors. */
3603 if (cleared
&& initializer_zerop (value
))
3606 /* ??? Here's to hoping the front end fills in all of the indices,
3607 so we don't have to figure out what's missing ourselves. */
3608 gcc_assert (purpose
);
3610 /* Skip zero-sized fields, unless value has side-effects. This can
3611 happen with calls to functions returning a zero-sized type, which
3612 we shouldn't discard. As a number of downstream passes don't
3613 expect sets of zero-sized fields, we rely on the gimplification of
3614 the MODIFY_EXPR we make below to drop the assignment statement. */
3615 if (! TREE_SIDE_EFFECTS (value
) && zero_sized_field_decl (purpose
))
3618 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3620 if (TREE_CODE (purpose
) == RANGE_EXPR
)
3622 tree lower
= TREE_OPERAND (purpose
, 0);
3623 tree upper
= TREE_OPERAND (purpose
, 1);
3625 /* If the lower bound is equal to upper, just treat it as if
3626 upper was the index. */
3627 if (simple_cst_equal (lower
, upper
))
3631 gimplify_init_ctor_eval_range (object
, lower
, upper
, value
,
3632 array_elt_type
, pre_p
, cleared
);
3639 /* Do not use bitsizetype for ARRAY_REF indices. */
3640 if (TYPE_DOMAIN (TREE_TYPE (object
)))
3642 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object
))),
3644 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
3645 purpose
, NULL_TREE
, NULL_TREE
);
3649 gcc_assert (TREE_CODE (purpose
) == FIELD_DECL
);
3650 cref
= build3 (COMPONENT_REF
, TREE_TYPE (purpose
),
3651 unshare_expr (object
), purpose
, NULL_TREE
);
3654 if (TREE_CODE (value
) == CONSTRUCTOR
3655 && TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
)
3656 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
3660 tree init
= build2 (INIT_EXPR
, TREE_TYPE (cref
), cref
, value
);
3661 gimplify_and_add (init
, pre_p
);
3667 /* Return the appropriate RHS predicate for this LHS. */
3670 rhs_predicate_for (tree lhs
)
3672 if (is_gimple_reg (lhs
))
3673 return is_gimple_reg_rhs_or_call
;
3675 return is_gimple_mem_rhs_or_call
;
3678 /* Gimplify a C99 compound literal expression. This just means adding
3679 the DECL_EXPR before the current statement and using its anonymous
3682 static enum gimplify_status
3683 gimplify_compound_literal_expr (tree
*expr_p
, gimple_seq
*pre_p
,
3684 bool (*gimple_test_f
) (tree
),
3685 fallback_t fallback
)
3687 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p
);
3688 tree decl
= DECL_EXPR_DECL (decl_s
);
3689 tree init
= DECL_INITIAL (decl
);
3690 /* Mark the decl as addressable if the compound literal
3691 expression is addressable now, otherwise it is marked too late
3692 after we gimplify the initialization expression. */
3693 if (TREE_ADDRESSABLE (*expr_p
))
3694 TREE_ADDRESSABLE (decl
) = 1;
3695 /* Otherwise, if we don't need an lvalue and have a literal directly
3696 substitute it. Check if it matches the gimple predicate, as
3697 otherwise we'd generate a new temporary, and we can as well just
3698 use the decl we already have. */
3699 else if (!TREE_ADDRESSABLE (decl
)
3701 && (fallback
& fb_lvalue
) == 0
3702 && gimple_test_f (init
))
3708 /* Preliminarily mark non-addressed complex variables as eligible
3709 for promotion to gimple registers. We'll transform their uses
3711 if ((TREE_CODE (TREE_TYPE (decl
)) == COMPLEX_TYPE
3712 || TREE_CODE (TREE_TYPE (decl
)) == VECTOR_TYPE
)
3713 && !TREE_THIS_VOLATILE (decl
)
3714 && !needs_to_live_in_memory (decl
))
3715 DECL_GIMPLE_REG_P (decl
) = 1;
3717 /* If the decl is not addressable, then it is being used in some
3718 expression or on the right hand side of a statement, and it can
3719 be put into a readonly data section. */
3720 if (!TREE_ADDRESSABLE (decl
) && (fallback
& fb_lvalue
) == 0)
3721 TREE_READONLY (decl
) = 1;
3723 /* This decl isn't mentioned in the enclosing block, so add it to the
3724 list of temps. FIXME it seems a bit of a kludge to say that
3725 anonymous artificial vars aren't pushed, but everything else is. */
3726 if (DECL_NAME (decl
) == NULL_TREE
&& !DECL_SEEN_IN_BIND_EXPR_P (decl
))
3727 gimple_add_tmp_var (decl
);
3729 gimplify_and_add (decl_s
, pre_p
);
3734 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3735 return a new CONSTRUCTOR if something changed. */
3738 optimize_compound_literals_in_ctor (tree orig_ctor
)
3740 tree ctor
= orig_ctor
;
3741 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (ctor
);
3742 unsigned int idx
, num
= vec_safe_length (elts
);
3744 for (idx
= 0; idx
< num
; idx
++)
3746 tree value
= (*elts
)[idx
].value
;
3747 tree newval
= value
;
3748 if (TREE_CODE (value
) == CONSTRUCTOR
)
3749 newval
= optimize_compound_literals_in_ctor (value
);
3750 else if (TREE_CODE (value
) == COMPOUND_LITERAL_EXPR
)
3752 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (value
);
3753 tree decl
= DECL_EXPR_DECL (decl_s
);
3754 tree init
= DECL_INITIAL (decl
);
3756 if (!TREE_ADDRESSABLE (value
)
3757 && !TREE_ADDRESSABLE (decl
)
3759 && TREE_CODE (init
) == CONSTRUCTOR
)
3760 newval
= optimize_compound_literals_in_ctor (init
);
3762 if (newval
== value
)
3765 if (ctor
== orig_ctor
)
3767 ctor
= copy_node (orig_ctor
);
3768 CONSTRUCTOR_ELTS (ctor
) = vec_safe_copy (elts
);
3769 elts
= CONSTRUCTOR_ELTS (ctor
);
3771 (*elts
)[idx
].value
= newval
;
3776 /* A subroutine of gimplify_modify_expr. Break out elements of a
3777 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3779 Note that we still need to clear any elements that don't have explicit
3780 initializers, so if not all elements are initialized we keep the
3781 original MODIFY_EXPR, we just remove all of the constructor elements.
3783 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3784 GS_ERROR if we would have to create a temporary when gimplifying
3785 this constructor. Otherwise, return GS_OK.
3787 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3789 static enum gimplify_status
3790 gimplify_init_constructor (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3791 bool want_value
, bool notify_temp_creation
)
3793 tree object
, ctor
, type
;
3794 enum gimplify_status ret
;
3795 vec
<constructor_elt
, va_gc
> *elts
;
3797 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == CONSTRUCTOR
);
3799 if (!notify_temp_creation
)
3801 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
3802 is_gimple_lvalue
, fb_lvalue
);
3803 if (ret
== GS_ERROR
)
3807 object
= TREE_OPERAND (*expr_p
, 0);
3808 ctor
= TREE_OPERAND (*expr_p
, 1) =
3809 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p
, 1));
3810 type
= TREE_TYPE (ctor
);
3811 elts
= CONSTRUCTOR_ELTS (ctor
);
3814 switch (TREE_CODE (type
))
3818 case QUAL_UNION_TYPE
:
3821 struct gimplify_init_ctor_preeval_data preeval_data
;
3822 HOST_WIDE_INT num_ctor_elements
, num_nonzero_elements
;
3823 bool cleared
, complete_p
, valid_const_initializer
;
3825 /* Aggregate types must lower constructors to initialization of
3826 individual elements. The exception is that a CONSTRUCTOR node
3827 with no elements indicates zero-initialization of the whole. */
3828 if (vec_safe_is_empty (elts
))
3830 if (notify_temp_creation
)
3835 /* Fetch information about the constructor to direct later processing.
3836 We might want to make static versions of it in various cases, and
3837 can only do so if it known to be a valid constant initializer. */
3838 valid_const_initializer
3839 = categorize_ctor_elements (ctor
, &num_nonzero_elements
,
3840 &num_ctor_elements
, &complete_p
);
3842 /* If a const aggregate variable is being initialized, then it
3843 should never be a lose to promote the variable to be static. */
3844 if (valid_const_initializer
3845 && num_nonzero_elements
> 1
3846 && TREE_READONLY (object
)
3847 && TREE_CODE (object
) == VAR_DECL
3848 && (flag_merge_constants
>= 2 || !TREE_ADDRESSABLE (object
)))
3850 if (notify_temp_creation
)
3852 DECL_INITIAL (object
) = ctor
;
3853 TREE_STATIC (object
) = 1;
3854 if (!DECL_NAME (object
))
3855 DECL_NAME (object
) = create_tmp_var_name ("C");
3856 walk_tree (&DECL_INITIAL (object
), force_labels_r
, NULL
, NULL
);
3858 /* ??? C++ doesn't automatically append a .<number> to the
3859 assembler name, and even when it does, it looks at FE private
3860 data structures to figure out what that number should be,
3861 which are not set for this variable. I suppose this is
3862 important for local statics for inline functions, which aren't
3863 "local" in the object file sense. So in order to get a unique
3864 TU-local symbol, we must invoke the lhd version now. */
3865 lhd_set_decl_assembler_name (object
);
3867 *expr_p
= NULL_TREE
;
3871 /* If there are "lots" of initialized elements, even discounting
3872 those that are not address constants (and thus *must* be
3873 computed at runtime), then partition the constructor into
3874 constant and non-constant parts. Block copy the constant
3875 parts in, then generate code for the non-constant parts. */
3876 /* TODO. There's code in cp/typeck.c to do this. */
3878 if (int_size_in_bytes (TREE_TYPE (ctor
)) < 0)
3879 /* store_constructor will ignore the clearing of variable-sized
3880 objects. Initializers for such objects must explicitly set
3881 every field that needs to be set. */
3883 else if (!complete_p
&& !CONSTRUCTOR_NO_CLEARING (ctor
))
3884 /* If the constructor isn't complete, clear the whole object
3885 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
3887 ??? This ought not to be needed. For any element not present
3888 in the initializer, we should simply set them to zero. Except
3889 we'd need to *find* the elements that are not present, and that
3890 requires trickery to avoid quadratic compile-time behavior in
3891 large cases or excessive memory use in small cases. */
3893 else if (num_ctor_elements
- num_nonzero_elements
3894 > CLEAR_RATIO (optimize_function_for_speed_p (cfun
))
3895 && num_nonzero_elements
< num_ctor_elements
/ 4)
3896 /* If there are "lots" of zeros, it's more efficient to clear
3897 the memory and then set the nonzero elements. */
3902 /* If there are "lots" of initialized elements, and all of them
3903 are valid address constants, then the entire initializer can
3904 be dropped to memory, and then memcpy'd out. Don't do this
3905 for sparse arrays, though, as it's more efficient to follow
3906 the standard CONSTRUCTOR behavior of memset followed by
3907 individual element initialization. Also don't do this for small
3908 all-zero initializers (which aren't big enough to merit
3909 clearing), and don't try to make bitwise copies of
3910 TREE_ADDRESSABLE types.
3912 We cannot apply such transformation when compiling chkp static
3913 initializer because creation of initializer image in the memory
3914 will require static initialization of bounds for it. It should
3915 result in another gimplification of similar initializer and we
3916 may fall into infinite loop. */
3917 if (valid_const_initializer
3918 && !(cleared
|| num_nonzero_elements
== 0)
3919 && !TREE_ADDRESSABLE (type
)
3920 && (!current_function_decl
3921 || !lookup_attribute ("chkp ctor",
3922 DECL_ATTRIBUTES (current_function_decl
))))
3924 HOST_WIDE_INT size
= int_size_in_bytes (type
);
3927 /* ??? We can still get unbounded array types, at least
3928 from the C++ front end. This seems wrong, but attempt
3929 to work around it for now. */
3932 size
= int_size_in_bytes (TREE_TYPE (object
));
3934 TREE_TYPE (ctor
) = type
= TREE_TYPE (object
);
3937 /* Find the maximum alignment we can assume for the object. */
3938 /* ??? Make use of DECL_OFFSET_ALIGN. */
3939 if (DECL_P (object
))
3940 align
= DECL_ALIGN (object
);
3942 align
= TYPE_ALIGN (type
);
3944 /* Do a block move either if the size is so small as to make
3945 each individual move a sub-unit move on average, or if it
3946 is so large as to make individual moves inefficient. */
3948 && num_nonzero_elements
> 1
3949 && (size
< num_nonzero_elements
3950 || !can_move_by_pieces (size
, align
)))
3952 if (notify_temp_creation
)
3955 walk_tree (&ctor
, force_labels_r
, NULL
, NULL
);
3956 ctor
= tree_output_constant_def (ctor
);
3957 if (!useless_type_conversion_p (type
, TREE_TYPE (ctor
)))
3958 ctor
= build1 (VIEW_CONVERT_EXPR
, type
, ctor
);
3959 TREE_OPERAND (*expr_p
, 1) = ctor
;
3961 /* This is no longer an assignment of a CONSTRUCTOR, but
3962 we still may have processing to do on the LHS. So
3963 pretend we didn't do anything here to let that happen. */
3964 return GS_UNHANDLED
;
3968 /* If the target is volatile, we have non-zero elements and more than
3969 one field to assign, initialize the target from a temporary. */
3970 if (TREE_THIS_VOLATILE (object
)
3971 && !TREE_ADDRESSABLE (type
)
3972 && num_nonzero_elements
> 0
3973 && vec_safe_length (elts
) > 1)
3975 tree temp
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
3976 TREE_OPERAND (*expr_p
, 0) = temp
;
3977 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
3979 build2 (MODIFY_EXPR
, void_type_node
,
3984 if (notify_temp_creation
)
3987 /* If there are nonzero elements and if needed, pre-evaluate to capture
3988 elements overlapping with the lhs into temporaries. We must do this
3989 before clearing to fetch the values before they are zeroed-out. */
3990 if (num_nonzero_elements
> 0 && TREE_CODE (*expr_p
) != INIT_EXPR
)
3992 preeval_data
.lhs_base_decl
= get_base_address (object
);
3993 if (!DECL_P (preeval_data
.lhs_base_decl
))
3994 preeval_data
.lhs_base_decl
= NULL
;
3995 preeval_data
.lhs_alias_set
= get_alias_set (object
);
3997 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p
, 1),
3998 pre_p
, post_p
, &preeval_data
);
4001 bool ctor_has_side_effects_p
4002 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p
, 1));
4006 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4007 Note that we still have to gimplify, in order to handle the
4008 case of variable sized types. Avoid shared tree structures. */
4009 CONSTRUCTOR_ELTS (ctor
) = NULL
;
4010 TREE_SIDE_EFFECTS (ctor
) = 0;
4011 object
= unshare_expr (object
);
4012 gimplify_stmt (expr_p
, pre_p
);
4015 /* If we have not block cleared the object, or if there are nonzero
4016 elements in the constructor, or if the constructor has side effects,
4017 add assignments to the individual scalar fields of the object. */
4019 || num_nonzero_elements
> 0
4020 || ctor_has_side_effects_p
)
4021 gimplify_init_ctor_eval (object
, elts
, pre_p
, cleared
);
4023 *expr_p
= NULL_TREE
;
4031 if (notify_temp_creation
)
4034 /* Extract the real and imaginary parts out of the ctor. */
4035 gcc_assert (elts
->length () == 2);
4036 r
= (*elts
)[0].value
;
4037 i
= (*elts
)[1].value
;
4038 if (r
== NULL
|| i
== NULL
)
4040 tree zero
= build_zero_cst (TREE_TYPE (type
));
4047 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4048 represent creation of a complex value. */
4049 if (TREE_CONSTANT (r
) && TREE_CONSTANT (i
))
4051 ctor
= build_complex (type
, r
, i
);
4052 TREE_OPERAND (*expr_p
, 1) = ctor
;
4056 ctor
= build2 (COMPLEX_EXPR
, type
, r
, i
);
4057 TREE_OPERAND (*expr_p
, 1) = ctor
;
4058 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1),
4061 rhs_predicate_for (TREE_OPERAND (*expr_p
, 0)),
4069 unsigned HOST_WIDE_INT ix
;
4070 constructor_elt
*ce
;
4072 if (notify_temp_creation
)
4075 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4076 if (TREE_CONSTANT (ctor
))
4078 bool constant_p
= true;
4081 /* Even when ctor is constant, it might contain non-*_CST
4082 elements, such as addresses or trapping values like
4083 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4084 in VECTOR_CST nodes. */
4085 FOR_EACH_CONSTRUCTOR_VALUE (elts
, ix
, value
)
4086 if (!CONSTANT_CLASS_P (value
))
4094 TREE_OPERAND (*expr_p
, 1) = build_vector_from_ctor (type
, elts
);
4098 TREE_CONSTANT (ctor
) = 0;
4101 /* Vector types use CONSTRUCTOR all the way through gimple
4102 compilation as a general initializer. */
4103 FOR_EACH_VEC_SAFE_ELT (elts
, ix
, ce
)
4105 enum gimplify_status tret
;
4106 tret
= gimplify_expr (&ce
->value
, pre_p
, post_p
, is_gimple_val
,
4108 if (tret
== GS_ERROR
)
4111 if (!is_gimple_reg (TREE_OPERAND (*expr_p
, 0)))
4112 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (ctor
, pre_p
);
4117 /* So how did we get a CONSTRUCTOR for a scalar type? */
4121 if (ret
== GS_ERROR
)
4123 else if (want_value
)
4130 /* If we have gimplified both sides of the initializer but have
4131 not emitted an assignment, do so now. */
4134 tree lhs
= TREE_OPERAND (*expr_p
, 0);
4135 tree rhs
= TREE_OPERAND (*expr_p
, 1);
4136 gassign
*init
= gimple_build_assign (lhs
, rhs
);
4137 gimplify_seq_add_stmt (pre_p
, init
);
4145 /* Given a pointer value OP0, return a simplified version of an
4146 indirection through OP0, or NULL_TREE if no simplification is
4147 possible. This may only be applied to a rhs of an expression.
4148 Note that the resulting type may be different from the type pointed
4149 to in the sense that it is still compatible from the langhooks
4153 gimple_fold_indirect_ref_rhs (tree t
)
4155 return gimple_fold_indirect_ref (t
);
4158 /* Subroutine of gimplify_modify_expr to do simplifications of
4159 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4160 something changes. */
4162 static enum gimplify_status
4163 gimplify_modify_expr_rhs (tree
*expr_p
, tree
*from_p
, tree
*to_p
,
4164 gimple_seq
*pre_p
, gimple_seq
*post_p
,
4167 enum gimplify_status ret
= GS_UNHANDLED
;
4173 switch (TREE_CODE (*from_p
))
4176 /* If we're assigning from a read-only variable initialized with
4177 a constructor, do the direct assignment from the constructor,
4178 but only if neither source nor target are volatile since this
4179 latter assignment might end up being done on a per-field basis. */
4180 if (DECL_INITIAL (*from_p
)
4181 && TREE_READONLY (*from_p
)
4182 && !TREE_THIS_VOLATILE (*from_p
)
4183 && !TREE_THIS_VOLATILE (*to_p
)
4184 && TREE_CODE (DECL_INITIAL (*from_p
)) == CONSTRUCTOR
)
4186 tree old_from
= *from_p
;
4187 enum gimplify_status subret
;
4189 /* Move the constructor into the RHS. */
4190 *from_p
= unshare_expr (DECL_INITIAL (*from_p
));
4192 /* Let's see if gimplify_init_constructor will need to put
4194 subret
= gimplify_init_constructor (expr_p
, NULL
, NULL
,
4196 if (subret
== GS_ERROR
)
4198 /* If so, revert the change. */
4210 /* If we have code like
4214 where the type of "x" is a (possibly cv-qualified variant
4215 of "A"), treat the entire expression as identical to "x".
4216 This kind of code arises in C++ when an object is bound
4217 to a const reference, and if "x" is a TARGET_EXPR we want
4218 to take advantage of the optimization below. */
4219 bool volatile_p
= TREE_THIS_VOLATILE (*from_p
);
4220 tree t
= gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p
, 0));
4223 if (TREE_THIS_VOLATILE (t
) != volatile_p
)
4225 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
4226 t
= build_simple_mem_ref_loc (EXPR_LOCATION (*from_p
),
4227 build_fold_addr_expr (t
));
4228 if (REFERENCE_CLASS_P (t
))
4229 TREE_THIS_VOLATILE (t
) = volatile_p
;
4240 /* If we are initializing something from a TARGET_EXPR, strip the
4241 TARGET_EXPR and initialize it directly, if possible. This can't
4242 be done if the initializer is void, since that implies that the
4243 temporary is set in some non-trivial way.
4245 ??? What about code that pulls out the temp and uses it
4246 elsewhere? I think that such code never uses the TARGET_EXPR as
4247 an initializer. If I'm wrong, we'll die because the temp won't
4248 have any RTL. In that case, I guess we'll need to replace
4249 references somehow. */
4250 tree init
= TARGET_EXPR_INITIAL (*from_p
);
4253 && !VOID_TYPE_P (TREE_TYPE (init
)))
4263 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4265 gimplify_compound_expr (from_p
, pre_p
, true);
4271 /* If we already made some changes, let the front end have a
4272 crack at this before we break it down. */
4273 if (ret
!= GS_UNHANDLED
)
4275 /* If we're initializing from a CONSTRUCTOR, break this into
4276 individual MODIFY_EXPRs. */
4277 return gimplify_init_constructor (expr_p
, pre_p
, post_p
, want_value
,
4281 /* If we're assigning to a non-register type, push the assignment
4282 down into the branches. This is mandatory for ADDRESSABLE types,
4283 since we cannot generate temporaries for such, but it saves a
4284 copy in other cases as well. */
4285 if (!is_gimple_reg_type (TREE_TYPE (*from_p
)))
4287 /* This code should mirror the code in gimplify_cond_expr. */
4288 enum tree_code code
= TREE_CODE (*expr_p
);
4289 tree cond
= *from_p
;
4290 tree result
= *to_p
;
4292 ret
= gimplify_expr (&result
, pre_p
, post_p
,
4293 is_gimple_lvalue
, fb_lvalue
);
4294 if (ret
!= GS_ERROR
)
4297 if (TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
)
4298 TREE_OPERAND (cond
, 1)
4299 = build2 (code
, void_type_node
, result
,
4300 TREE_OPERAND (cond
, 1));
4301 if (TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
4302 TREE_OPERAND (cond
, 2)
4303 = build2 (code
, void_type_node
, unshare_expr (result
),
4304 TREE_OPERAND (cond
, 2));
4306 TREE_TYPE (cond
) = void_type_node
;
4307 recalculate_side_effects (cond
);
4311 gimplify_and_add (cond
, pre_p
);
4312 *expr_p
= unshare_expr (result
);
4321 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4322 return slot so that we don't generate a temporary. */
4323 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p
)
4324 && aggregate_value_p (*from_p
, *from_p
))
4328 if (!(rhs_predicate_for (*to_p
))(*from_p
))
4329 /* If we need a temporary, *to_p isn't accurate. */
4331 /* It's OK to use the return slot directly unless it's an NRV. */
4332 else if (TREE_CODE (*to_p
) == RESULT_DECL
4333 && DECL_NAME (*to_p
) == NULL_TREE
4334 && needs_to_live_in_memory (*to_p
))
4336 else if (is_gimple_reg_type (TREE_TYPE (*to_p
))
4337 || (DECL_P (*to_p
) && DECL_REGISTER (*to_p
)))
4338 /* Don't force regs into memory. */
4340 else if (TREE_CODE (*expr_p
) == INIT_EXPR
)
4341 /* It's OK to use the target directly if it's being
4344 else if (variably_modified_type_p (TREE_TYPE (*to_p
), NULL_TREE
))
4345 /* Always use the target and thus RSO for variable-sized types.
4346 GIMPLE cannot deal with a variable-sized assignment
4347 embedded in a call statement. */
4349 else if (TREE_CODE (*to_p
) != SSA_NAME
4350 && (!is_gimple_variable (*to_p
)
4351 || needs_to_live_in_memory (*to_p
)))
4352 /* Don't use the original target if it's already addressable;
4353 if its address escapes, and the called function uses the
4354 NRV optimization, a conforming program could see *to_p
4355 change before the called function returns; see c++/19317.
4356 When optimizing, the return_slot pass marks more functions
4357 as safe after we have escape info. */
4364 CALL_EXPR_RETURN_SLOT_OPT (*from_p
) = 1;
4365 mark_addressable (*to_p
);
4370 case WITH_SIZE_EXPR
:
4371 /* Likewise for calls that return an aggregate of non-constant size,
4372 since we would not be able to generate a temporary at all. */
4373 if (TREE_CODE (TREE_OPERAND (*from_p
, 0)) == CALL_EXPR
)
4375 *from_p
= TREE_OPERAND (*from_p
, 0);
4376 /* We don't change ret in this case because the
4377 WITH_SIZE_EXPR might have been added in
4378 gimplify_modify_expr, so returning GS_OK would lead to an
4384 /* If we're initializing from a container, push the initialization
4386 case CLEANUP_POINT_EXPR
:
4388 case STATEMENT_LIST
:
4390 tree wrap
= *from_p
;
4393 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_min_lval
,
4395 if (ret
!= GS_ERROR
)
4398 t
= voidify_wrapper_expr (wrap
, *expr_p
);
4399 gcc_assert (t
== *expr_p
);
4403 gimplify_and_add (wrap
, pre_p
);
4404 *expr_p
= unshare_expr (*to_p
);
4411 case COMPOUND_LITERAL_EXPR
:
4413 tree complit
= TREE_OPERAND (*expr_p
, 1);
4414 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (complit
);
4415 tree decl
= DECL_EXPR_DECL (decl_s
);
4416 tree init
= DECL_INITIAL (decl
);
4418 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4419 into struct T x = { 0, 1, 2 } if the address of the
4420 compound literal has never been taken. */
4421 if (!TREE_ADDRESSABLE (complit
)
4422 && !TREE_ADDRESSABLE (decl
)
4425 *expr_p
= copy_node (*expr_p
);
4426 TREE_OPERAND (*expr_p
, 1) = init
;
4441 /* Return true if T looks like a valid GIMPLE statement. */
4444 is_gimple_stmt (tree t
)
4446 const enum tree_code code
= TREE_CODE (t
);
4451 /* The only valid NOP_EXPR is the empty statement. */
4452 return IS_EMPTY_STMT (t
);
4456 /* These are only valid if they're void. */
4457 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
4463 case CASE_LABEL_EXPR
:
4464 case TRY_CATCH_EXPR
:
4465 case TRY_FINALLY_EXPR
:
4466 case EH_FILTER_EXPR
:
4469 case STATEMENT_LIST
:
4473 case OACC_HOST_DATA
:
4476 case OACC_ENTER_DATA
:
4477 case OACC_EXIT_DATA
:
4483 case OMP_DISTRIBUTE
:
4493 /* These are always void. */
4499 /* These are valid regardless of their type. */
4508 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4509 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4510 DECL_GIMPLE_REG_P set.
4512 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4513 other, unmodified part of the complex object just before the total store.
4514 As a consequence, if the object is still uninitialized, an undefined value
4515 will be loaded into a register, which may result in a spurious exception
4516 if the register is floating-point and the value happens to be a signaling
4517 NaN for example. Then the fully-fledged complex operations lowering pass
4518 followed by a DCE pass are necessary in order to fix things up. */
4520 static enum gimplify_status
4521 gimplify_modify_expr_complex_part (tree
*expr_p
, gimple_seq
*pre_p
,
4524 enum tree_code code
, ocode
;
4525 tree lhs
, rhs
, new_rhs
, other
, realpart
, imagpart
;
4527 lhs
= TREE_OPERAND (*expr_p
, 0);
4528 rhs
= TREE_OPERAND (*expr_p
, 1);
4529 code
= TREE_CODE (lhs
);
4530 lhs
= TREE_OPERAND (lhs
, 0);
4532 ocode
= code
== REALPART_EXPR
? IMAGPART_EXPR
: REALPART_EXPR
;
4533 other
= build1 (ocode
, TREE_TYPE (rhs
), lhs
);
4534 TREE_NO_WARNING (other
) = 1;
4535 other
= get_formal_tmp_var (other
, pre_p
);
4537 realpart
= code
== REALPART_EXPR
? rhs
: other
;
4538 imagpart
= code
== REALPART_EXPR
? other
: rhs
;
4540 if (TREE_CONSTANT (realpart
) && TREE_CONSTANT (imagpart
))
4541 new_rhs
= build_complex (TREE_TYPE (lhs
), realpart
, imagpart
);
4543 new_rhs
= build2 (COMPLEX_EXPR
, TREE_TYPE (lhs
), realpart
, imagpart
);
4545 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (lhs
, new_rhs
));
4546 *expr_p
= (want_value
) ? rhs
: NULL_TREE
;
4551 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4557 PRE_P points to the list where side effects that must happen before
4558 *EXPR_P should be stored.
4560 POST_P points to the list where side effects that must happen after
4561 *EXPR_P should be stored.
4563 WANT_VALUE is nonzero iff we want to use the value of this expression
4564 in another expression. */
4566 static enum gimplify_status
4567 gimplify_modify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4570 tree
*from_p
= &TREE_OPERAND (*expr_p
, 1);
4571 tree
*to_p
= &TREE_OPERAND (*expr_p
, 0);
4572 enum gimplify_status ret
= GS_UNHANDLED
;
4574 location_t loc
= EXPR_LOCATION (*expr_p
);
4575 gimple_stmt_iterator gsi
;
4577 gcc_assert (TREE_CODE (*expr_p
) == MODIFY_EXPR
4578 || TREE_CODE (*expr_p
) == INIT_EXPR
);
4580 /* Trying to simplify a clobber using normal logic doesn't work,
4581 so handle it here. */
4582 if (TREE_CLOBBER_P (*from_p
))
4584 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
4585 if (ret
== GS_ERROR
)
4587 gcc_assert (!want_value
4588 && (TREE_CODE (*to_p
) == VAR_DECL
4589 || TREE_CODE (*to_p
) == MEM_REF
));
4590 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (*to_p
, *from_p
));
4595 /* Insert pointer conversions required by the middle-end that are not
4596 required by the frontend. This fixes middle-end type checking for
4597 for example gcc.dg/redecl-6.c. */
4598 if (POINTER_TYPE_P (TREE_TYPE (*to_p
)))
4600 STRIP_USELESS_TYPE_CONVERSION (*from_p
);
4601 if (!useless_type_conversion_p (TREE_TYPE (*to_p
), TREE_TYPE (*from_p
)))
4602 *from_p
= fold_convert_loc (loc
, TREE_TYPE (*to_p
), *from_p
);
4605 /* See if any simplifications can be done based on what the RHS is. */
4606 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
4608 if (ret
!= GS_UNHANDLED
)
4611 /* For zero sized types only gimplify the left hand side and right hand
4612 side as statements and throw away the assignment. Do this after
4613 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4615 if (zero_sized_type (TREE_TYPE (*from_p
)) && !want_value
)
4617 gimplify_stmt (from_p
, pre_p
);
4618 gimplify_stmt (to_p
, pre_p
);
4619 *expr_p
= NULL_TREE
;
4623 /* If the value being copied is of variable width, compute the length
4624 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4625 before gimplifying any of the operands so that we can resolve any
4626 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4627 the size of the expression to be copied, not of the destination, so
4628 that is what we must do here. */
4629 maybe_with_size_expr (from_p
);
4631 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
4632 if (ret
== GS_ERROR
)
4635 /* As a special case, we have to temporarily allow for assignments
4636 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4637 a toplevel statement, when gimplifying the GENERIC expression
4638 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4639 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4641 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4642 prevent gimplify_expr from trying to create a new temporary for
4643 foo's LHS, we tell it that it should only gimplify until it
4644 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4645 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4646 and all we need to do here is set 'a' to be its LHS. */
4647 ret
= gimplify_expr (from_p
, pre_p
, post_p
, rhs_predicate_for (*to_p
),
4649 if (ret
== GS_ERROR
)
4652 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
4653 size as argument to the the call. */
4654 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
4656 tree call
= TREE_OPERAND (*from_p
, 0);
4657 tree vlasize
= TREE_OPERAND (*from_p
, 1);
4659 if (TREE_CODE (call
) == CALL_EXPR
4660 && CALL_EXPR_IFN (call
) == IFN_VA_ARG
)
4662 int nargs
= call_expr_nargs (call
);
4663 tree type
= TREE_TYPE (call
);
4664 tree ap
= CALL_EXPR_ARG (call
, 0);
4665 tree tag
= CALL_EXPR_ARG (call
, 1);
4666 tree newcall
= build_call_expr_internal_loc (EXPR_LOCATION (call
),
4670 tree
*call_p
= &(TREE_OPERAND (*from_p
, 0));
4675 /* Now see if the above changed *from_p to something we handle specially. */
4676 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
4678 if (ret
!= GS_UNHANDLED
)
4681 /* If we've got a variable sized assignment between two lvalues (i.e. does
4682 not involve a call), then we can make things a bit more straightforward
4683 by converting the assignment to memcpy or memset. */
4684 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
4686 tree from
= TREE_OPERAND (*from_p
, 0);
4687 tree size
= TREE_OPERAND (*from_p
, 1);
4689 if (TREE_CODE (from
) == CONSTRUCTOR
)
4690 return gimplify_modify_expr_to_memset (expr_p
, size
, want_value
, pre_p
);
4692 if (is_gimple_addressable (from
))
4695 return gimplify_modify_expr_to_memcpy (expr_p
, size
, want_value
,
4700 /* Transform partial stores to non-addressable complex variables into
4701 total stores. This allows us to use real instead of virtual operands
4702 for these variables, which improves optimization. */
4703 if ((TREE_CODE (*to_p
) == REALPART_EXPR
4704 || TREE_CODE (*to_p
) == IMAGPART_EXPR
)
4705 && is_gimple_reg (TREE_OPERAND (*to_p
, 0)))
4706 return gimplify_modify_expr_complex_part (expr_p
, pre_p
, want_value
);
4708 /* Try to alleviate the effects of the gimplification creating artificial
4709 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4710 if (!gimplify_ctxp
->into_ssa
4711 && TREE_CODE (*from_p
) == VAR_DECL
4712 && DECL_IGNORED_P (*from_p
)
4714 && !DECL_IGNORED_P (*to_p
))
4716 if (!DECL_NAME (*from_p
) && DECL_NAME (*to_p
))
4718 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p
)));
4719 DECL_HAS_DEBUG_EXPR_P (*from_p
) = 1;
4720 SET_DECL_DEBUG_EXPR (*from_p
, *to_p
);
4723 if (want_value
&& TREE_THIS_VOLATILE (*to_p
))
4724 *from_p
= get_initialized_tmp_var (*from_p
, pre_p
, post_p
);
4726 if (TREE_CODE (*from_p
) == CALL_EXPR
)
4728 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4729 instead of a GIMPLE_ASSIGN. */
4731 if (CALL_EXPR_FN (*from_p
) == NULL_TREE
)
4733 /* Gimplify internal functions created in the FEs. */
4734 int nargs
= call_expr_nargs (*from_p
), i
;
4735 enum internal_fn ifn
= CALL_EXPR_IFN (*from_p
);
4736 auto_vec
<tree
> vargs (nargs
);
4738 for (i
= 0; i
< nargs
; i
++)
4740 gimplify_arg (&CALL_EXPR_ARG (*from_p
, i
), pre_p
,
4741 EXPR_LOCATION (*from_p
));
4742 vargs
.quick_push (CALL_EXPR_ARG (*from_p
, i
));
4744 call_stmt
= gimple_build_call_internal_vec (ifn
, vargs
);
4745 gimple_set_location (call_stmt
, EXPR_LOCATION (*expr_p
));
4749 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*from_p
));
4750 CALL_EXPR_FN (*from_p
) = TREE_OPERAND (CALL_EXPR_FN (*from_p
), 0);
4751 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p
));
4752 tree fndecl
= get_callee_fndecl (*from_p
);
4754 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
4755 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
4756 && call_expr_nargs (*from_p
) == 3)
4757 call_stmt
= gimple_build_call_internal (IFN_BUILTIN_EXPECT
, 3,
4758 CALL_EXPR_ARG (*from_p
, 0),
4759 CALL_EXPR_ARG (*from_p
, 1),
4760 CALL_EXPR_ARG (*from_p
, 2));
4763 call_stmt
= gimple_build_call_from_tree (*from_p
);
4764 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fnptrtype
));
4767 notice_special_calls (call_stmt
);
4768 if (!gimple_call_noreturn_p (call_stmt
))
4769 gimple_call_set_lhs (call_stmt
, *to_p
);
4774 assign
= gimple_build_assign (*to_p
, *from_p
);
4775 gimple_set_location (assign
, EXPR_LOCATION (*expr_p
));
4778 if (gimplify_ctxp
->into_ssa
&& is_gimple_reg (*to_p
))
4780 /* We should have got an SSA name from the start. */
4781 gcc_assert (TREE_CODE (*to_p
) == SSA_NAME
);
4784 gimplify_seq_add_stmt (pre_p
, assign
);
4785 gsi
= gsi_last (*pre_p
);
4786 maybe_fold_stmt (&gsi
);
4790 *expr_p
= TREE_THIS_VOLATILE (*to_p
) ? *from_p
: unshare_expr (*to_p
);
4799 /* Gimplify a comparison between two variable-sized objects. Do this
4800 with a call to BUILT_IN_MEMCMP. */
4802 static enum gimplify_status
4803 gimplify_variable_sized_compare (tree
*expr_p
)
4805 location_t loc
= EXPR_LOCATION (*expr_p
);
4806 tree op0
= TREE_OPERAND (*expr_p
, 0);
4807 tree op1
= TREE_OPERAND (*expr_p
, 1);
4808 tree t
, arg
, dest
, src
, expr
;
4810 arg
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
4811 arg
= unshare_expr (arg
);
4812 arg
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg
, op0
);
4813 src
= build_fold_addr_expr_loc (loc
, op1
);
4814 dest
= build_fold_addr_expr_loc (loc
, op0
);
4815 t
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
4816 t
= build_call_expr_loc (loc
, t
, 3, dest
, src
, arg
);
4819 = build2 (TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), t
, integer_zero_node
);
4820 SET_EXPR_LOCATION (expr
, loc
);
4826 /* Gimplify a comparison between two aggregate objects of integral scalar
4827 mode as a comparison between the bitwise equivalent scalar values. */
4829 static enum gimplify_status
4830 gimplify_scalar_mode_aggregate_compare (tree
*expr_p
)
4832 location_t loc
= EXPR_LOCATION (*expr_p
);
4833 tree op0
= TREE_OPERAND (*expr_p
, 0);
4834 tree op1
= TREE_OPERAND (*expr_p
, 1);
4836 tree type
= TREE_TYPE (op0
);
4837 tree scalar_type
= lang_hooks
.types
.type_for_mode (TYPE_MODE (type
), 1);
4839 op0
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op0
);
4840 op1
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op1
);
4843 = fold_build2_loc (loc
, TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), op0
, op1
);
4848 /* Gimplify an expression sequence. This function gimplifies each
4849 expression and rewrites the original expression with the last
4850 expression of the sequence in GIMPLE form.
4852 PRE_P points to the list where the side effects for all the
4853 expressions in the sequence will be emitted.
4855 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4857 static enum gimplify_status
4858 gimplify_compound_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
4864 tree
*sub_p
= &TREE_OPERAND (t
, 0);
4866 if (TREE_CODE (*sub_p
) == COMPOUND_EXPR
)
4867 gimplify_compound_expr (sub_p
, pre_p
, false);
4869 gimplify_stmt (sub_p
, pre_p
);
4871 t
= TREE_OPERAND (t
, 1);
4873 while (TREE_CODE (t
) == COMPOUND_EXPR
);
4880 gimplify_stmt (expr_p
, pre_p
);
4885 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4886 gimplify. After gimplification, EXPR_P will point to a new temporary
4887 that holds the original value of the SAVE_EXPR node.
4889 PRE_P points to the list where side effects that must happen before
4890 *EXPR_P should be stored. */
4892 static enum gimplify_status
4893 gimplify_save_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
4895 enum gimplify_status ret
= GS_ALL_DONE
;
4898 gcc_assert (TREE_CODE (*expr_p
) == SAVE_EXPR
);
4899 val
= TREE_OPERAND (*expr_p
, 0);
4901 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4902 if (!SAVE_EXPR_RESOLVED_P (*expr_p
))
4904 /* The operand may be a void-valued expression such as SAVE_EXPRs
4905 generated by the Java frontend for class initialization. It is
4906 being executed only for its side-effects. */
4907 if (TREE_TYPE (val
) == void_type_node
)
4909 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
4910 is_gimple_stmt
, fb_none
);
4914 val
= get_initialized_tmp_var (val
, pre_p
, post_p
);
4916 TREE_OPERAND (*expr_p
, 0) = val
;
4917 SAVE_EXPR_RESOLVED_P (*expr_p
) = 1;
4925 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
4932 PRE_P points to the list where side effects that must happen before
4933 *EXPR_P should be stored.
4935 POST_P points to the list where side effects that must happen after
4936 *EXPR_P should be stored. */
4938 static enum gimplify_status
4939 gimplify_addr_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
4941 tree expr
= *expr_p
;
4942 tree op0
= TREE_OPERAND (expr
, 0);
4943 enum gimplify_status ret
;
4944 location_t loc
= EXPR_LOCATION (*expr_p
);
4946 switch (TREE_CODE (op0
))
4950 /* Check if we are dealing with an expression of the form '&*ptr'.
4951 While the front end folds away '&*ptr' into 'ptr', these
4952 expressions may be generated internally by the compiler (e.g.,
4953 builtins like __builtin_va_end). */
4954 /* Caution: the silent array decomposition semantics we allow for
4955 ADDR_EXPR means we can't always discard the pair. */
4956 /* Gimplification of the ADDR_EXPR operand may drop
4957 cv-qualification conversions, so make sure we add them if
4960 tree op00
= TREE_OPERAND (op0
, 0);
4961 tree t_expr
= TREE_TYPE (expr
);
4962 tree t_op00
= TREE_TYPE (op00
);
4964 if (!useless_type_conversion_p (t_expr
, t_op00
))
4965 op00
= fold_convert_loc (loc
, TREE_TYPE (expr
), op00
);
4971 case VIEW_CONVERT_EXPR
:
4972 /* Take the address of our operand and then convert it to the type of
4975 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4976 all clear. The impact of this transformation is even less clear. */
4978 /* If the operand is a useless conversion, look through it. Doing so
4979 guarantees that the ADDR_EXPR and its operand will remain of the
4981 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0
, 0)))
4982 op0
= TREE_OPERAND (op0
, 0);
4984 *expr_p
= fold_convert_loc (loc
, TREE_TYPE (expr
),
4985 build_fold_addr_expr_loc (loc
,
4986 TREE_OPERAND (op0
, 0)));
4991 /* If we see a call to a declared builtin or see its address
4992 being taken (we can unify those cases here) then we can mark
4993 the builtin for implicit generation by GCC. */
4994 if (TREE_CODE (op0
) == FUNCTION_DECL
4995 && DECL_BUILT_IN_CLASS (op0
) == BUILT_IN_NORMAL
4996 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0
)))
4997 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0
), true);
4999 /* We use fb_either here because the C frontend sometimes takes
5000 the address of a call that returns a struct; see
5001 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5002 the implied temporary explicit. */
5004 /* Make the operand addressable. */
5005 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, post_p
,
5006 is_gimple_addressable
, fb_either
);
5007 if (ret
== GS_ERROR
)
5010 /* Then mark it. Beware that it may not be possible to do so directly
5011 if a temporary has been created by the gimplification. */
5012 prepare_gimple_addressable (&TREE_OPERAND (expr
, 0), pre_p
);
5014 op0
= TREE_OPERAND (expr
, 0);
5016 /* For various reasons, the gimplification of the expression
5017 may have made a new INDIRECT_REF. */
5018 if (TREE_CODE (op0
) == INDIRECT_REF
)
5019 goto do_indirect_ref
;
5021 mark_addressable (TREE_OPERAND (expr
, 0));
5023 /* The FEs may end up building ADDR_EXPRs early on a decl with
5024 an incomplete type. Re-build ADDR_EXPRs in canonical form
5026 if (!types_compatible_p (TREE_TYPE (op0
), TREE_TYPE (TREE_TYPE (expr
))))
5027 *expr_p
= build_fold_addr_expr (op0
);
5029 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5030 recompute_tree_invariant_for_addr_expr (*expr_p
);
5032 /* If we re-built the ADDR_EXPR add a conversion to the original type
5034 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
5035 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
5043 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5044 value; output operands should be a gimple lvalue. */
5046 static enum gimplify_status
5047 gimplify_asm_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
5051 const char **oconstraints
;
5054 const char *constraint
;
5055 bool allows_mem
, allows_reg
, is_inout
;
5056 enum gimplify_status ret
, tret
;
5058 vec
<tree
, va_gc
> *inputs
;
5059 vec
<tree
, va_gc
> *outputs
;
5060 vec
<tree
, va_gc
> *clobbers
;
5061 vec
<tree
, va_gc
> *labels
;
5065 noutputs
= list_length (ASM_OUTPUTS (expr
));
5066 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
5074 link_next
= NULL_TREE
;
5075 for (i
= 0, link
= ASM_OUTPUTS (expr
); link
; ++i
, link
= link_next
)
5078 size_t constraint_len
;
5080 link_next
= TREE_CHAIN (link
);
5084 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
5085 constraint_len
= strlen (constraint
);
5086 if (constraint_len
== 0)
5089 ok
= parse_output_constraint (&constraint
, i
, 0, 0,
5090 &allows_mem
, &allows_reg
, &is_inout
);
5097 if (!allows_reg
&& allows_mem
)
5098 mark_addressable (TREE_VALUE (link
));
5100 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
5101 is_inout
? is_gimple_min_lval
: is_gimple_lvalue
,
5102 fb_lvalue
| fb_mayfail
);
5103 if (tret
== GS_ERROR
)
5105 error ("invalid lvalue in asm output %d", i
);
5109 vec_safe_push (outputs
, link
);
5110 TREE_CHAIN (link
) = NULL_TREE
;
5114 /* An input/output operand. To give the optimizers more
5115 flexibility, split it into separate input and output
5120 /* Turn the in/out constraint into an output constraint. */
5121 char *p
= xstrdup (constraint
);
5123 TREE_VALUE (TREE_PURPOSE (link
)) = build_string (constraint_len
, p
);
5125 /* And add a matching input constraint. */
5128 sprintf (buf
, "%d", i
);
5130 /* If there are multiple alternatives in the constraint,
5131 handle each of them individually. Those that allow register
5132 will be replaced with operand number, the others will stay
5134 if (strchr (p
, ',') != NULL
)
5136 size_t len
= 0, buflen
= strlen (buf
);
5137 char *beg
, *end
, *str
, *dst
;
5141 end
= strchr (beg
, ',');
5143 end
= strchr (beg
, '\0');
5144 if ((size_t) (end
- beg
) < buflen
)
5147 len
+= end
- beg
+ 1;
5154 str
= (char *) alloca (len
);
5155 for (beg
= p
+ 1, dst
= str
;;)
5158 bool mem_p
, reg_p
, inout_p
;
5160 end
= strchr (beg
, ',');
5165 parse_output_constraint (&tem
, i
, 0, 0,
5166 &mem_p
, ®_p
, &inout_p
);
5171 memcpy (dst
, buf
, buflen
);
5180 memcpy (dst
, beg
, len
);
5189 input
= build_string (dst
- str
, str
);
5192 input
= build_string (strlen (buf
), buf
);
5195 input
= build_string (constraint_len
- 1, constraint
+ 1);
5199 input
= build_tree_list (build_tree_list (NULL_TREE
, input
),
5200 unshare_expr (TREE_VALUE (link
)));
5201 ASM_INPUTS (expr
) = chainon (ASM_INPUTS (expr
), input
);
5205 link_next
= NULL_TREE
;
5206 for (link
= ASM_INPUTS (expr
); link
; ++i
, link
= link_next
)
5208 link_next
= TREE_CHAIN (link
);
5209 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
5210 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
5211 oconstraints
, &allows_mem
, &allows_reg
);
5213 /* If we can't make copies, we can only accept memory. */
5214 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link
))))
5220 error ("impossible constraint in %<asm%>");
5221 error ("non-memory input %d must stay in memory", i
);
5226 /* If the operand is a memory input, it should be an lvalue. */
5227 if (!allows_reg
&& allows_mem
)
5229 tree inputv
= TREE_VALUE (link
);
5230 STRIP_NOPS (inputv
);
5231 if (TREE_CODE (inputv
) == PREDECREMENT_EXPR
5232 || TREE_CODE (inputv
) == PREINCREMENT_EXPR
5233 || TREE_CODE (inputv
) == POSTDECREMENT_EXPR
5234 || TREE_CODE (inputv
) == POSTINCREMENT_EXPR
)
5235 TREE_VALUE (link
) = error_mark_node
;
5236 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
5237 is_gimple_lvalue
, fb_lvalue
| fb_mayfail
);
5238 mark_addressable (TREE_VALUE (link
));
5239 if (tret
== GS_ERROR
)
5241 if (EXPR_HAS_LOCATION (TREE_VALUE (link
)))
5242 input_location
= EXPR_LOCATION (TREE_VALUE (link
));
5243 error ("memory input %d is not directly addressable", i
);
5249 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
5250 is_gimple_asm_val
, fb_rvalue
);
5251 if (tret
== GS_ERROR
)
5255 TREE_CHAIN (link
) = NULL_TREE
;
5256 vec_safe_push (inputs
, link
);
5259 link_next
= NULL_TREE
;
5260 for (link
= ASM_CLOBBERS (expr
); link
; ++i
, link
= link_next
)
5262 link_next
= TREE_CHAIN (link
);
5263 TREE_CHAIN (link
) = NULL_TREE
;
5264 vec_safe_push (clobbers
, link
);
5267 link_next
= NULL_TREE
;
5268 for (link
= ASM_LABELS (expr
); link
; ++i
, link
= link_next
)
5270 link_next
= TREE_CHAIN (link
);
5271 TREE_CHAIN (link
) = NULL_TREE
;
5272 vec_safe_push (labels
, link
);
5275 /* Do not add ASMs with errors to the gimple IL stream. */
5276 if (ret
!= GS_ERROR
)
5278 stmt
= gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr
)),
5279 inputs
, outputs
, clobbers
, labels
);
5281 gimple_asm_set_volatile (stmt
, ASM_VOLATILE_P (expr
) || noutputs
== 0);
5282 gimple_asm_set_input (stmt
, ASM_INPUT_P (expr
));
5284 gimplify_seq_add_stmt (pre_p
, stmt
);
5290 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5291 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5292 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5293 return to this function.
5295 FIXME should we complexify the prequeue handling instead? Or use flags
5296 for all the cleanups and let the optimizer tighten them up? The current
5297 code seems pretty fragile; it will break on a cleanup within any
5298 non-conditional nesting. But any such nesting would be broken, anyway;
5299 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5300 and continues out of it. We can do that at the RTL level, though, so
5301 having an optimizer to tighten up try/finally regions would be a Good
5304 static enum gimplify_status
5305 gimplify_cleanup_point_expr (tree
*expr_p
, gimple_seq
*pre_p
)
5307 gimple_stmt_iterator iter
;
5308 gimple_seq body_sequence
= NULL
;
5310 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
5312 /* We only care about the number of conditions between the innermost
5313 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5314 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5315 int old_conds
= gimplify_ctxp
->conditions
;
5316 gimple_seq old_cleanups
= gimplify_ctxp
->conditional_cleanups
;
5317 bool old_in_cleanup_point_expr
= gimplify_ctxp
->in_cleanup_point_expr
;
5318 gimplify_ctxp
->conditions
= 0;
5319 gimplify_ctxp
->conditional_cleanups
= NULL
;
5320 gimplify_ctxp
->in_cleanup_point_expr
= true;
5322 gimplify_stmt (&TREE_OPERAND (*expr_p
, 0), &body_sequence
);
5324 gimplify_ctxp
->conditions
= old_conds
;
5325 gimplify_ctxp
->conditional_cleanups
= old_cleanups
;
5326 gimplify_ctxp
->in_cleanup_point_expr
= old_in_cleanup_point_expr
;
5328 for (iter
= gsi_start (body_sequence
); !gsi_end_p (iter
); )
5330 gimple wce
= gsi_stmt (iter
);
5332 if (gimple_code (wce
) == GIMPLE_WITH_CLEANUP_EXPR
)
5334 if (gsi_one_before_end_p (iter
))
5336 /* Note that gsi_insert_seq_before and gsi_remove do not
5337 scan operands, unlike some other sequence mutators. */
5338 if (!gimple_wce_cleanup_eh_only (wce
))
5339 gsi_insert_seq_before_without_update (&iter
,
5340 gimple_wce_cleanup (wce
),
5342 gsi_remove (&iter
, true);
5349 enum gimple_try_flags kind
;
5351 if (gimple_wce_cleanup_eh_only (wce
))
5352 kind
= GIMPLE_TRY_CATCH
;
5354 kind
= GIMPLE_TRY_FINALLY
;
5355 seq
= gsi_split_seq_after (iter
);
5357 gtry
= gimple_build_try (seq
, gimple_wce_cleanup (wce
), kind
);
5358 /* Do not use gsi_replace here, as it may scan operands.
5359 We want to do a simple structural modification only. */
5360 gsi_set_stmt (&iter
, gtry
);
5361 iter
= gsi_start (gtry
->eval
);
5368 gimplify_seq_add_seq (pre_p
, body_sequence
);
5381 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5382 is the cleanup action required. EH_ONLY is true if the cleanup should
5383 only be executed if an exception is thrown, not on normal exit. */
5386 gimple_push_cleanup (tree var
, tree cleanup
, bool eh_only
, gimple_seq
*pre_p
)
5389 gimple_seq cleanup_stmts
= NULL
;
5391 /* Errors can result in improperly nested cleanups. Which results in
5392 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5396 if (gimple_conditional_context ())
5398 /* If we're in a conditional context, this is more complex. We only
5399 want to run the cleanup if we actually ran the initialization that
5400 necessitates it, but we want to run it after the end of the
5401 conditional context. So we wrap the try/finally around the
5402 condition and use a flag to determine whether or not to actually
5403 run the destructor. Thus
5407 becomes (approximately)
5411 if (test) { A::A(temp); flag = 1; val = f(temp); }
5414 if (flag) A::~A(temp);
5418 tree flag
= create_tmp_var (boolean_type_node
, "cleanup");
5419 gassign
*ffalse
= gimple_build_assign (flag
, boolean_false_node
);
5420 gassign
*ftrue
= gimple_build_assign (flag
, boolean_true_node
);
5422 cleanup
= build3 (COND_EXPR
, void_type_node
, flag
, cleanup
, NULL
);
5423 gimplify_stmt (&cleanup
, &cleanup_stmts
);
5424 wce
= gimple_build_wce (cleanup_stmts
);
5426 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, ffalse
);
5427 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
5428 gimplify_seq_add_stmt (pre_p
, ftrue
);
5430 /* Because of this manipulation, and the EH edges that jump
5431 threading cannot redirect, the temporary (VAR) will appear
5432 to be used uninitialized. Don't warn. */
5433 TREE_NO_WARNING (var
) = 1;
5437 gimplify_stmt (&cleanup
, &cleanup_stmts
);
5438 wce
= gimple_build_wce (cleanup_stmts
);
5439 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
5440 gimplify_seq_add_stmt (pre_p
, wce
);
5444 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5446 static enum gimplify_status
5447 gimplify_target_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
5449 tree targ
= *expr_p
;
5450 tree temp
= TARGET_EXPR_SLOT (targ
);
5451 tree init
= TARGET_EXPR_INITIAL (targ
);
5452 enum gimplify_status ret
;
5456 tree cleanup
= NULL_TREE
;
5458 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5459 to the temps list. Handle also variable length TARGET_EXPRs. */
5460 if (TREE_CODE (DECL_SIZE (temp
)) != INTEGER_CST
)
5462 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp
)))
5463 gimplify_type_sizes (TREE_TYPE (temp
), pre_p
);
5464 gimplify_vla_decl (temp
, pre_p
);
5467 gimple_add_tmp_var (temp
);
5469 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5470 expression is supposed to initialize the slot. */
5471 if (VOID_TYPE_P (TREE_TYPE (init
)))
5472 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
5475 tree init_expr
= build2 (INIT_EXPR
, void_type_node
, temp
, init
);
5477 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
5479 ggc_free (init_expr
);
5481 if (ret
== GS_ERROR
)
5483 /* PR c++/28266 Make sure this is expanded only once. */
5484 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
5488 gimplify_and_add (init
, pre_p
);
5490 /* If needed, push the cleanup for the temp. */
5491 if (TARGET_EXPR_CLEANUP (targ
))
5493 if (CLEANUP_EH_ONLY (targ
))
5494 gimple_push_cleanup (temp
, TARGET_EXPR_CLEANUP (targ
),
5495 CLEANUP_EH_ONLY (targ
), pre_p
);
5497 cleanup
= TARGET_EXPR_CLEANUP (targ
);
5500 /* Add a clobber for the temporary going out of scope, like
5501 gimplify_bind_expr. */
5502 if (gimplify_ctxp
->in_cleanup_point_expr
5503 && needs_to_live_in_memory (temp
)
5504 && flag_stack_reuse
== SR_ALL
)
5506 tree clobber
= build_constructor (TREE_TYPE (temp
),
5508 TREE_THIS_VOLATILE (clobber
) = true;
5509 clobber
= build2 (MODIFY_EXPR
, TREE_TYPE (temp
), temp
, clobber
);
5511 cleanup
= build2 (COMPOUND_EXPR
, void_type_node
, cleanup
,
5518 gimple_push_cleanup (temp
, cleanup
, false, pre_p
);
5520 /* Only expand this once. */
5521 TREE_OPERAND (targ
, 3) = init
;
5522 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
5525 /* We should have expanded this before. */
5526 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp
));
5532 /* Gimplification of expression trees. */
5534 /* Gimplify an expression which appears at statement context. The
5535 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5536 NULL, a new sequence is allocated.
5538 Return true if we actually added a statement to the queue. */
5541 gimplify_stmt (tree
*stmt_p
, gimple_seq
*seq_p
)
5543 gimple_seq_node last
;
5545 last
= gimple_seq_last (*seq_p
);
5546 gimplify_expr (stmt_p
, seq_p
, NULL
, is_gimple_stmt
, fb_none
);
5547 return last
!= gimple_seq_last (*seq_p
);
5550 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5551 to CTX. If entries already exist, force them to be some flavor of private.
5552 If there is no enclosing parallel, do nothing. */
5555 omp_firstprivatize_variable (struct gimplify_omp_ctx
*ctx
, tree decl
)
5559 if (decl
== NULL
|| !DECL_P (decl
))
5564 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
5567 if (n
->value
& GOVD_SHARED
)
5568 n
->value
= GOVD_FIRSTPRIVATE
| (n
->value
& GOVD_SEEN
);
5569 else if (n
->value
& GOVD_MAP
)
5570 n
->value
|= GOVD_MAP_TO_ONLY
;
5574 else if (ctx
->region_type
== ORT_TARGET
)
5575 omp_add_variable (ctx
, decl
, GOVD_MAP
| GOVD_MAP_TO_ONLY
);
5576 else if (ctx
->region_type
!= ORT_WORKSHARE
5577 && ctx
->region_type
!= ORT_SIMD
5578 && ctx
->region_type
!= ORT_TARGET_DATA
)
5579 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
5581 ctx
= ctx
->outer_context
;
5586 /* Similarly for each of the type sizes of TYPE. */
5589 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
5591 if (type
== NULL
|| type
== error_mark_node
)
5593 type
= TYPE_MAIN_VARIANT (type
);
5595 if (ctx
->privatized_types
->add (type
))
5598 switch (TREE_CODE (type
))
5604 case FIXED_POINT_TYPE
:
5605 omp_firstprivatize_variable (ctx
, TYPE_MIN_VALUE (type
));
5606 omp_firstprivatize_variable (ctx
, TYPE_MAX_VALUE (type
));
5610 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
5611 omp_firstprivatize_type_sizes (ctx
, TYPE_DOMAIN (type
));
5616 case QUAL_UNION_TYPE
:
5619 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
5620 if (TREE_CODE (field
) == FIELD_DECL
)
5622 omp_firstprivatize_variable (ctx
, DECL_FIELD_OFFSET (field
));
5623 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (field
));
5629 case REFERENCE_TYPE
:
5630 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
5637 omp_firstprivatize_variable (ctx
, TYPE_SIZE (type
));
5638 omp_firstprivatize_variable (ctx
, TYPE_SIZE_UNIT (type
));
5639 lang_hooks
.types
.omp_firstprivatize_type_sizes (ctx
, type
);
5642 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
5645 omp_add_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned int flags
)
5648 unsigned int nflags
;
5651 if (error_operand_p (decl
))
5654 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5655 there are constructors involved somewhere. */
5656 if (TREE_ADDRESSABLE (TREE_TYPE (decl
))
5657 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl
)))
5660 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
5661 if (n
!= NULL
&& n
->value
!= GOVD_ALIGNED
)
5663 /* We shouldn't be re-adding the decl with the same data
5665 gcc_assert ((n
->value
& GOVD_DATA_SHARE_CLASS
& flags
) == 0);
5666 /* The only combination of data sharing classes we should see is
5667 FIRSTPRIVATE and LASTPRIVATE. */
5668 nflags
= n
->value
| flags
;
5669 gcc_assert ((nflags
& GOVD_DATA_SHARE_CLASS
)
5670 == (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
)
5671 || (flags
& GOVD_DATA_SHARE_CLASS
) == 0);
5676 /* When adding a variable-sized variable, we have to handle all sorts
5677 of additional bits of data: the pointer replacement variable, and
5678 the parameters of the type. */
5679 if (DECL_SIZE (decl
) && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
5681 /* Add the pointer replacement variable as PRIVATE if the variable
5682 replacement is private, else FIRSTPRIVATE since we'll need the
5683 address of the original variable either for SHARED, or for the
5684 copy into or out of the context. */
5685 if (!(flags
& GOVD_LOCAL
))
5687 if (flags
& GOVD_MAP
)
5688 nflags
= GOVD_MAP
| GOVD_MAP_TO_ONLY
| GOVD_EXPLICIT
;
5689 else if (flags
& GOVD_PRIVATE
)
5690 nflags
= GOVD_PRIVATE
;
5692 nflags
= GOVD_FIRSTPRIVATE
;
5693 nflags
|= flags
& GOVD_SEEN
;
5694 t
= DECL_VALUE_EXPR (decl
);
5695 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
5696 t
= TREE_OPERAND (t
, 0);
5697 gcc_assert (DECL_P (t
));
5698 omp_add_variable (ctx
, t
, nflags
);
5701 /* Add all of the variable and type parameters (which should have
5702 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5703 omp_firstprivatize_variable (ctx
, DECL_SIZE_UNIT (decl
));
5704 omp_firstprivatize_variable (ctx
, DECL_SIZE (decl
));
5705 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
5707 /* The variable-sized variable itself is never SHARED, only some form
5708 of PRIVATE. The sharing would take place via the pointer variable
5709 which we remapped above. */
5710 if (flags
& GOVD_SHARED
)
5711 flags
= GOVD_PRIVATE
| GOVD_DEBUG_PRIVATE
5712 | (flags
& (GOVD_SEEN
| GOVD_EXPLICIT
));
5714 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5715 alloca statement we generate for the variable, so make sure it
5716 is available. This isn't automatically needed for the SHARED
5717 case, since we won't be allocating local storage then.
5718 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5719 in this case omp_notice_variable will be called later
5720 on when it is gimplified. */
5721 else if (! (flags
& (GOVD_LOCAL
| GOVD_MAP
))
5722 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl
))))
5723 omp_notice_variable (ctx
, TYPE_SIZE_UNIT (TREE_TYPE (decl
)), true);
5725 else if ((flags
& (GOVD_MAP
| GOVD_LOCAL
)) == 0
5726 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
5728 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
5730 /* Similar to the direct variable sized case above, we'll need the
5731 size of references being privatized. */
5732 if ((flags
& GOVD_SHARED
) == 0)
5734 t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
5735 if (TREE_CODE (t
) != INTEGER_CST
)
5736 omp_notice_variable (ctx
, t
, true);
5743 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, flags
);
5746 /* Notice a threadprivate variable DECL used in OMP context CTX.
5747 This just prints out diagnostics about threadprivate variable uses
5748 in untied tasks. If DECL2 is non-NULL, prevent this warning
5749 on that variable. */
5752 omp_notice_threadprivate_variable (struct gimplify_omp_ctx
*ctx
, tree decl
,
5756 struct gimplify_omp_ctx
*octx
;
5758 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
5759 if (octx
->region_type
== ORT_TARGET
)
5761 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
)decl
);
5764 error ("threadprivate variable %qE used in target region",
5766 error_at (octx
->location
, "enclosing target region");
5767 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl
, 0);
5770 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl2
, 0);
5773 if (ctx
->region_type
!= ORT_UNTIED_TASK
)
5775 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
5778 error ("threadprivate variable %qE used in untied task",
5780 error_at (ctx
->location
, "enclosing task");
5781 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, 0);
5784 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl2
, 0);
5788 /* Record the fact that DECL was used within the OMP context CTX.
5789 IN_CODE is true when real code uses DECL, and false when we should
5790 merely emit default(none) errors. Return true if DECL is going to
5791 be remapped and thus DECL shouldn't be gimplified into its
5792 DECL_VALUE_EXPR (if any). */
5795 omp_notice_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, bool in_code
)
5798 unsigned flags
= in_code
? GOVD_SEEN
: 0;
5799 bool ret
= false, shared
;
5801 if (error_operand_p (decl
))
5804 /* Threadprivate variables are predetermined. */
5805 if (is_global_var (decl
))
5807 if (DECL_THREAD_LOCAL_P (decl
))
5808 return omp_notice_threadprivate_variable (ctx
, decl
, NULL_TREE
);
5810 if (DECL_HAS_VALUE_EXPR_P (decl
))
5812 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
5814 if (value
&& DECL_P (value
) && DECL_THREAD_LOCAL_P (value
))
5815 return omp_notice_threadprivate_variable (ctx
, decl
, value
);
5819 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
5820 if (ctx
->region_type
== ORT_TARGET
)
5822 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, true);
5825 if (!lang_hooks
.types
.omp_mappable_type (TREE_TYPE (decl
)))
5827 error ("%qD referenced in target region does not have "
5828 "a mappable type", decl
);
5829 omp_add_variable (ctx
, decl
, GOVD_MAP
| GOVD_EXPLICIT
| flags
);
5832 omp_add_variable (ctx
, decl
, GOVD_MAP
| flags
);
5836 /* If nothing changed, there's nothing left to do. */
5837 if ((n
->value
& flags
) == flags
)
5846 enum omp_clause_default_kind default_kind
, kind
;
5847 struct gimplify_omp_ctx
*octx
;
5849 if (ctx
->region_type
== ORT_WORKSHARE
5850 || ctx
->region_type
== ORT_SIMD
5851 || ctx
->region_type
== ORT_TARGET_DATA
)
5854 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5855 remapped firstprivate instead of shared. To some extent this is
5856 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5857 default_kind
= ctx
->default_kind
;
5858 kind
= lang_hooks
.decls
.omp_predetermined_sharing (decl
);
5859 if (kind
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
5860 default_kind
= kind
;
5862 switch (default_kind
)
5864 case OMP_CLAUSE_DEFAULT_NONE
:
5865 if ((ctx
->region_type
& ORT_PARALLEL
) != 0)
5867 error ("%qE not specified in enclosing parallel",
5868 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)));
5869 error_at (ctx
->location
, "enclosing parallel");
5871 else if ((ctx
->region_type
& ORT_TASK
) != 0)
5873 error ("%qE not specified in enclosing task",
5874 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)));
5875 error_at (ctx
->location
, "enclosing task");
5877 else if (ctx
->region_type
& ORT_TEAMS
)
5879 error ("%qE not specified in enclosing teams construct",
5880 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)));
5881 error_at (ctx
->location
, "enclosing teams construct");
5886 case OMP_CLAUSE_DEFAULT_SHARED
:
5887 flags
|= GOVD_SHARED
;
5889 case OMP_CLAUSE_DEFAULT_PRIVATE
:
5890 flags
|= GOVD_PRIVATE
;
5892 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
:
5893 flags
|= GOVD_FIRSTPRIVATE
;
5895 case OMP_CLAUSE_DEFAULT_UNSPECIFIED
:
5896 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5897 gcc_assert ((ctx
->region_type
& ORT_TASK
) != 0);
5898 if (ctx
->outer_context
)
5899 omp_notice_variable (ctx
->outer_context
, decl
, in_code
);
5900 for (octx
= ctx
->outer_context
; octx
; octx
= octx
->outer_context
)
5904 if ((octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)) != 0)
5906 n2
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
5907 if (n2
&& (n2
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
)
5909 flags
|= GOVD_FIRSTPRIVATE
;
5912 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TEAMS
)) != 0)
5915 if (flags
& GOVD_FIRSTPRIVATE
)
5918 && (TREE_CODE (decl
) == PARM_DECL
5919 || (!is_global_var (decl
)
5920 && DECL_CONTEXT (decl
) == current_function_decl
)))
5922 flags
|= GOVD_FIRSTPRIVATE
;
5925 flags
|= GOVD_SHARED
;
5931 if ((flags
& GOVD_PRIVATE
)
5932 && lang_hooks
.decls
.omp_private_outer_ref (decl
))
5933 flags
|= GOVD_PRIVATE_OUTER_REF
;
5935 omp_add_variable (ctx
, decl
, flags
);
5937 shared
= (flags
& GOVD_SHARED
) != 0;
5938 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
5942 if ((n
->value
& (GOVD_SEEN
| GOVD_LOCAL
)) == 0
5943 && (flags
& (GOVD_SEEN
| GOVD_LOCAL
)) == GOVD_SEEN
5945 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
5948 tree t
= DECL_VALUE_EXPR (decl
);
5949 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
5950 t
= TREE_OPERAND (t
, 0);
5951 gcc_assert (DECL_P (t
));
5952 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
5953 n2
->value
|= GOVD_SEEN
;
5956 shared
= ((flags
| n
->value
) & GOVD_SHARED
) != 0;
5957 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
5959 /* If nothing changed, there's nothing left to do. */
5960 if ((n
->value
& flags
) == flags
)
5966 /* If the variable is private in the current context, then we don't
5967 need to propagate anything to an outer context. */
5968 if ((flags
& GOVD_PRIVATE
) && !(flags
& GOVD_PRIVATE_OUTER_REF
))
5970 if ((flags
& (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
5971 == (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
5973 if ((flags
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
5974 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
5975 == (GOVD_LASTPRIVATE
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
5977 if (ctx
->outer_context
5978 && omp_notice_variable (ctx
->outer_context
, decl
, in_code
))
5983 /* Verify that DECL is private within CTX. If there's specific information
5984 to the contrary in the innermost scope, generate an error. */
5987 omp_is_private (struct gimplify_omp_ctx
*ctx
, tree decl
, int simd
)
5991 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
5994 if (n
->value
& GOVD_SHARED
)
5996 if (ctx
== gimplify_omp_ctxp
)
5999 error ("iteration variable %qE is predetermined linear",
6002 error ("iteration variable %qE should be private",
6004 n
->value
= GOVD_PRIVATE
;
6010 else if ((n
->value
& GOVD_EXPLICIT
) != 0
6011 && (ctx
== gimplify_omp_ctxp
6012 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
6013 && gimplify_omp_ctxp
->outer_context
== ctx
)))
6015 if ((n
->value
& GOVD_FIRSTPRIVATE
) != 0)
6016 error ("iteration variable %qE should not be firstprivate",
6018 else if ((n
->value
& GOVD_REDUCTION
) != 0)
6019 error ("iteration variable %qE should not be reduction",
6021 else if (simd
== 1 && (n
->value
& GOVD_LASTPRIVATE
) != 0)
6022 error ("iteration variable %qE should not be lastprivate",
6024 else if (simd
&& (n
->value
& GOVD_PRIVATE
) != 0)
6025 error ("iteration variable %qE should not be private",
6027 else if (simd
== 2 && (n
->value
& GOVD_LINEAR
) != 0)
6028 error ("iteration variable %qE is predetermined linear",
6031 return (ctx
== gimplify_omp_ctxp
6032 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
6033 && gimplify_omp_ctxp
->outer_context
== ctx
));
6036 if (ctx
->region_type
!= ORT_WORKSHARE
6037 && ctx
->region_type
!= ORT_SIMD
)
6039 else if (ctx
->outer_context
)
6040 return omp_is_private (ctx
->outer_context
, decl
, simd
);
6044 /* Return true if DECL is private within a parallel region
6045 that binds to the current construct's context or in parallel
6046 region's REDUCTION clause. */
6049 omp_check_private (struct gimplify_omp_ctx
*ctx
, tree decl
, bool copyprivate
)
6055 ctx
= ctx
->outer_context
;
6057 return !(is_global_var (decl
)
6058 /* References might be private, but might be shared too,
6059 when checking for copyprivate, assume they might be
6060 private, otherwise assume they might be shared. */
6062 && lang_hooks
.decls
.omp_privatize_by_reference (decl
)));
6064 if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0)
6067 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
6069 return (n
->value
& GOVD_SHARED
) == 0;
6071 while (ctx
->region_type
== ORT_WORKSHARE
6072 || ctx
->region_type
== ORT_SIMD
);
6076 /* Return true if the CTX is combined with distribute and thus
6077 lastprivate can't be supported. */
6080 omp_no_lastprivate (struct gimplify_omp_ctx
*ctx
)
6084 if (ctx
->outer_context
== NULL
)
6086 ctx
= ctx
->outer_context
;
6087 switch (ctx
->region_type
)
6090 if (!ctx
->combined_loop
)
6092 if (ctx
->distribute
)
6095 case ORT_COMBINED_PARALLEL
:
6097 case ORT_COMBINED_TEAMS
:
6106 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
6107 and previous omp contexts. */
6110 gimplify_scan_omp_clauses (tree
*list_p
, gimple_seq
*pre_p
,
6111 enum omp_region_type region_type
)
6113 struct gimplify_omp_ctx
*ctx
, *outer_ctx
;
6116 ctx
= new_omp_context (region_type
);
6117 outer_ctx
= ctx
->outer_context
;
6119 while ((c
= *list_p
) != NULL
)
6121 bool remove
= false;
6122 bool notice_outer
= true;
6123 const char *check_non_private
= NULL
;
6127 switch (OMP_CLAUSE_CODE (c
))
6129 case OMP_CLAUSE_PRIVATE
:
6130 flags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
6131 if (lang_hooks
.decls
.omp_private_outer_ref (OMP_CLAUSE_DECL (c
)))
6133 flags
|= GOVD_PRIVATE_OUTER_REF
;
6134 OMP_CLAUSE_PRIVATE_OUTER_REF (c
) = 1;
6137 notice_outer
= false;
6139 case OMP_CLAUSE_SHARED
:
6140 flags
= GOVD_SHARED
| GOVD_EXPLICIT
;
6142 case OMP_CLAUSE_FIRSTPRIVATE
:
6143 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
6144 check_non_private
= "firstprivate";
6146 case OMP_CLAUSE_LASTPRIVATE
:
6147 flags
= GOVD_LASTPRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
;
6148 check_non_private
= "lastprivate";
6149 decl
= OMP_CLAUSE_DECL (c
);
6150 if (omp_no_lastprivate (ctx
))
6152 notice_outer
= false;
6153 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
6155 else if (error_operand_p (decl
))
6158 && outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
6159 && splay_tree_lookup (outer_ctx
->variables
,
6160 (splay_tree_key
) decl
) == NULL
)
6161 omp_add_variable (outer_ctx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
6163 && outer_ctx
->region_type
== ORT_WORKSHARE
6164 && outer_ctx
->combined_loop
6165 && splay_tree_lookup (outer_ctx
->variables
,
6166 (splay_tree_key
) decl
) == NULL
6167 && !omp_check_private (outer_ctx
, decl
, false))
6169 omp_add_variable (outer_ctx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
6170 if (outer_ctx
->outer_context
6171 && (outer_ctx
->outer_context
->region_type
6172 == ORT_COMBINED_PARALLEL
)
6173 && splay_tree_lookup (outer_ctx
->outer_context
->variables
,
6174 (splay_tree_key
) decl
) == NULL
)
6175 omp_add_variable (outer_ctx
->outer_context
, decl
,
6176 GOVD_SHARED
| GOVD_SEEN
);
6179 case OMP_CLAUSE_REDUCTION
:
6180 flags
= GOVD_REDUCTION
| GOVD_SEEN
| GOVD_EXPLICIT
;
6181 check_non_private
= "reduction";
6183 case OMP_CLAUSE_LINEAR
:
6184 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
), pre_p
, NULL
,
6185 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
6192 /* For combined #pragma omp parallel for simd, need to put
6193 lastprivate and perhaps firstprivate too on the
6194 parallel. Similarly for #pragma omp for simd. */
6195 struct gimplify_omp_ctx
*octx
= outer_ctx
;
6197 if (omp_no_lastprivate (ctx
))
6198 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
6201 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
6202 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
6204 decl
= OMP_CLAUSE_DECL (c
);
6205 if (error_operand_p (decl
))
6211 && octx
->region_type
== ORT_WORKSHARE
6212 && octx
->combined_loop
)
6214 if (octx
->outer_context
6215 && (octx
->outer_context
->region_type
6216 == ORT_COMBINED_PARALLEL
6217 || (octx
->outer_context
->region_type
6218 == ORT_COMBINED_TEAMS
)))
6219 octx
= octx
->outer_context
;
6220 else if (omp_check_private (octx
, decl
, false))
6225 gcc_checking_assert (splay_tree_lookup (octx
->variables
,
6229 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
6230 flags
|= GOVD_FIRSTPRIVATE
;
6231 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
6232 flags
|= GOVD_LASTPRIVATE
;
6233 omp_add_variable (octx
, decl
, flags
);
6234 if (octx
->outer_context
== NULL
)
6236 octx
= octx
->outer_context
;
6241 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
6242 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
6243 omp_notice_variable (octx
, decl
, true);
6245 flags
= GOVD_LINEAR
| GOVD_EXPLICIT
;
6246 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
6247 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
6249 notice_outer
= false;
6250 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
6254 case OMP_CLAUSE_MAP
:
6255 decl
= OMP_CLAUSE_DECL (c
);
6256 if (error_operand_p (decl
))
6261 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
6262 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
6263 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
6264 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
6265 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
6272 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
,
6273 NULL
, is_gimple_lvalue
, fb_lvalue
)
6281 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
6284 case OMP_CLAUSE_DEPEND
:
6285 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
6287 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
6288 NULL
, is_gimple_val
, fb_rvalue
);
6289 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
6291 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
6296 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
6297 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
6298 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
6306 case OMP_CLAUSE_FROM
:
6307 case OMP_CLAUSE__CACHE_
:
6308 decl
= OMP_CLAUSE_DECL (c
);
6309 if (error_operand_p (decl
))
6314 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
6315 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
6316 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
6317 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
6318 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
6325 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
,
6326 NULL
, is_gimple_lvalue
, fb_lvalue
)
6337 decl
= OMP_CLAUSE_DECL (c
);
6338 if (error_operand_p (decl
))
6343 omp_add_variable (ctx
, decl
, flags
);
6344 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
6345 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
6347 omp_add_variable (ctx
, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
6348 GOVD_LOCAL
| GOVD_SEEN
);
6349 gimplify_omp_ctxp
= ctx
;
6350 push_gimplify_context ();
6352 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
6353 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
6355 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c
),
6356 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
));
6357 pop_gimplify_context
6358 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
)));
6359 push_gimplify_context ();
6360 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c
),
6361 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
6362 pop_gimplify_context
6363 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
)));
6364 OMP_CLAUSE_REDUCTION_INIT (c
) = NULL_TREE
;
6365 OMP_CLAUSE_REDUCTION_MERGE (c
) = NULL_TREE
;
6367 gimplify_omp_ctxp
= outer_ctx
;
6369 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
6370 && OMP_CLAUSE_LASTPRIVATE_STMT (c
))
6372 gimplify_omp_ctxp
= ctx
;
6373 push_gimplify_context ();
6374 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c
)) != BIND_EXPR
)
6376 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
6378 TREE_SIDE_EFFECTS (bind
) = 1;
6379 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LASTPRIVATE_STMT (c
);
6380 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = bind
;
6382 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c
),
6383 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
6384 pop_gimplify_context
6385 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
)));
6386 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = NULL_TREE
;
6388 gimplify_omp_ctxp
= outer_ctx
;
6390 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
6391 && OMP_CLAUSE_LINEAR_STMT (c
))
6393 gimplify_omp_ctxp
= ctx
;
6394 push_gimplify_context ();
6395 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c
)) != BIND_EXPR
)
6397 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
6399 TREE_SIDE_EFFECTS (bind
) = 1;
6400 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LINEAR_STMT (c
);
6401 OMP_CLAUSE_LINEAR_STMT (c
) = bind
;
6403 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c
),
6404 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
6405 pop_gimplify_context
6406 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
)));
6407 OMP_CLAUSE_LINEAR_STMT (c
) = NULL_TREE
;
6409 gimplify_omp_ctxp
= outer_ctx
;
6415 case OMP_CLAUSE_COPYIN
:
6416 case OMP_CLAUSE_COPYPRIVATE
:
6417 decl
= OMP_CLAUSE_DECL (c
);
6418 if (error_operand_p (decl
))
6423 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_COPYPRIVATE
6425 && !omp_check_private (ctx
, decl
, true))
6428 if (is_global_var (decl
))
6430 if (DECL_THREAD_LOCAL_P (decl
))
6432 else if (DECL_HAS_VALUE_EXPR_P (decl
))
6434 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
6438 && DECL_THREAD_LOCAL_P (value
))
6443 error_at (OMP_CLAUSE_LOCATION (c
),
6444 "copyprivate variable %qE is not threadprivate"
6445 " or private in outer context", DECL_NAME (decl
));
6449 omp_notice_variable (outer_ctx
, decl
, true);
6450 if (check_non_private
6451 && region_type
== ORT_WORKSHARE
6452 && omp_check_private (ctx
, decl
, false))
6454 error ("%s variable %qE is private in outer context",
6455 check_non_private
, DECL_NAME (decl
));
6460 case OMP_CLAUSE_FINAL
:
6462 OMP_CLAUSE_OPERAND (c
, 0)
6463 = gimple_boolify (OMP_CLAUSE_OPERAND (c
, 0));
6466 case OMP_CLAUSE_SCHEDULE
:
6467 case OMP_CLAUSE_NUM_THREADS
:
6468 case OMP_CLAUSE_NUM_TEAMS
:
6469 case OMP_CLAUSE_THREAD_LIMIT
:
6470 case OMP_CLAUSE_DIST_SCHEDULE
:
6471 case OMP_CLAUSE_DEVICE
:
6472 case OMP_CLAUSE__CILK_FOR_COUNT_
:
6473 case OMP_CLAUSE_ASYNC
:
6474 case OMP_CLAUSE_WAIT
:
6475 case OMP_CLAUSE_NUM_GANGS
:
6476 case OMP_CLAUSE_NUM_WORKERS
:
6477 case OMP_CLAUSE_VECTOR_LENGTH
:
6478 case OMP_CLAUSE_GANG
:
6479 case OMP_CLAUSE_WORKER
:
6480 case OMP_CLAUSE_VECTOR
:
6481 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
6482 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
6484 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_GANG
6485 && gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 1), pre_p
, NULL
,
6486 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
6490 case OMP_CLAUSE_DEVICE_RESIDENT
:
6491 case OMP_CLAUSE_USE_DEVICE
:
6492 case OMP_CLAUSE_INDEPENDENT
:
6496 case OMP_CLAUSE_NOWAIT
:
6497 case OMP_CLAUSE_ORDERED
:
6498 case OMP_CLAUSE_UNTIED
:
6499 case OMP_CLAUSE_COLLAPSE
:
6500 case OMP_CLAUSE_AUTO
:
6501 case OMP_CLAUSE_SEQ
:
6502 case OMP_CLAUSE_MERGEABLE
:
6503 case OMP_CLAUSE_PROC_BIND
:
6504 case OMP_CLAUSE_SAFELEN
:
6507 case OMP_CLAUSE_ALIGNED
:
6508 decl
= OMP_CLAUSE_DECL (c
);
6509 if (error_operand_p (decl
))
6514 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c
), pre_p
, NULL
,
6515 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
6520 if (!is_global_var (decl
)
6521 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
6522 omp_add_variable (ctx
, decl
, GOVD_ALIGNED
);
6525 case OMP_CLAUSE_DEFAULT
:
6526 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
6534 *list_p
= OMP_CLAUSE_CHAIN (c
);
6536 list_p
= &OMP_CLAUSE_CHAIN (c
);
6539 gimplify_omp_ctxp
= ctx
;
6542 struct gimplify_adjust_omp_clauses_data
6548 /* For all variables that were not actually used within the context,
6549 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
6552 gimplify_adjust_omp_clauses_1 (splay_tree_node n
, void *data
)
6554 tree
*list_p
= ((struct gimplify_adjust_omp_clauses_data
*) data
)->list_p
;
6556 = ((struct gimplify_adjust_omp_clauses_data
*) data
)->pre_p
;
6557 tree decl
= (tree
) n
->key
;
6558 unsigned flags
= n
->value
;
6559 enum omp_clause_code code
;
6563 if (flags
& (GOVD_EXPLICIT
| GOVD_LOCAL
))
6565 if ((flags
& GOVD_SEEN
) == 0)
6567 if (flags
& GOVD_DEBUG_PRIVATE
)
6569 gcc_assert ((flags
& GOVD_DATA_SHARE_CLASS
) == GOVD_PRIVATE
);
6570 private_debug
= true;
6572 else if (flags
& GOVD_MAP
)
6573 private_debug
= false;
6576 = lang_hooks
.decls
.omp_private_debug_clause (decl
,
6577 !!(flags
& GOVD_SHARED
));
6579 code
= OMP_CLAUSE_PRIVATE
;
6580 else if (flags
& GOVD_MAP
)
6581 code
= OMP_CLAUSE_MAP
;
6582 else if (flags
& GOVD_SHARED
)
6584 if (is_global_var (decl
))
6586 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
6590 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
6591 if (on
&& (on
->value
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
6592 | GOVD_PRIVATE
| GOVD_REDUCTION
6593 | GOVD_LINEAR
| GOVD_MAP
)) != 0)
6595 ctx
= ctx
->outer_context
;
6600 code
= OMP_CLAUSE_SHARED
;
6602 else if (flags
& GOVD_PRIVATE
)
6603 code
= OMP_CLAUSE_PRIVATE
;
6604 else if (flags
& GOVD_FIRSTPRIVATE
)
6605 code
= OMP_CLAUSE_FIRSTPRIVATE
;
6606 else if (flags
& GOVD_LASTPRIVATE
)
6607 code
= OMP_CLAUSE_LASTPRIVATE
;
6608 else if (flags
& GOVD_ALIGNED
)
6613 clause
= build_omp_clause (input_location
, code
);
6614 OMP_CLAUSE_DECL (clause
) = decl
;
6615 OMP_CLAUSE_CHAIN (clause
) = *list_p
;
6617 OMP_CLAUSE_PRIVATE_DEBUG (clause
) = 1;
6618 else if (code
== OMP_CLAUSE_PRIVATE
&& (flags
& GOVD_PRIVATE_OUTER_REF
))
6619 OMP_CLAUSE_PRIVATE_OUTER_REF (clause
) = 1;
6620 else if (code
== OMP_CLAUSE_MAP
)
6622 OMP_CLAUSE_SET_MAP_KIND (clause
,
6623 flags
& GOVD_MAP_TO_ONLY
6626 if (DECL_SIZE (decl
)
6627 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
6629 tree decl2
= DECL_VALUE_EXPR (decl
);
6630 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
6631 decl2
= TREE_OPERAND (decl2
, 0);
6632 gcc_assert (DECL_P (decl2
));
6633 tree mem
= build_simple_mem_ref (decl2
);
6634 OMP_CLAUSE_DECL (clause
) = mem
;
6635 OMP_CLAUSE_SIZE (clause
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
6636 if (gimplify_omp_ctxp
->outer_context
)
6638 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
6639 omp_notice_variable (ctx
, decl2
, true);
6640 omp_notice_variable (ctx
, OMP_CLAUSE_SIZE (clause
), true);
6642 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
6644 OMP_CLAUSE_DECL (nc
) = decl
;
6645 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
6646 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
6647 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
6648 OMP_CLAUSE_CHAIN (clause
) = nc
;
6651 OMP_CLAUSE_SIZE (clause
) = DECL_SIZE_UNIT (decl
);
6653 if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_LASTPRIVATE
) != 0)
6655 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
6656 OMP_CLAUSE_DECL (nc
) = decl
;
6657 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc
) = 1;
6658 OMP_CLAUSE_CHAIN (nc
) = *list_p
;
6659 OMP_CLAUSE_CHAIN (clause
) = nc
;
6660 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
6661 gimplify_omp_ctxp
= ctx
->outer_context
;
6662 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
);
6663 gimplify_omp_ctxp
= ctx
;
6666 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
6667 gimplify_omp_ctxp
= ctx
->outer_context
;
6668 lang_hooks
.decls
.omp_finish_clause (clause
, pre_p
);
6669 gimplify_omp_ctxp
= ctx
;
6674 gimplify_adjust_omp_clauses (gimple_seq
*pre_p
, tree
*list_p
)
6676 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
6679 while ((c
= *list_p
) != NULL
)
6682 bool remove
= false;
6684 switch (OMP_CLAUSE_CODE (c
))
6686 case OMP_CLAUSE_PRIVATE
:
6687 case OMP_CLAUSE_SHARED
:
6688 case OMP_CLAUSE_FIRSTPRIVATE
:
6689 case OMP_CLAUSE_LINEAR
:
6690 decl
= OMP_CLAUSE_DECL (c
);
6691 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
6692 remove
= !(n
->value
& GOVD_SEEN
);
6695 bool shared
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
;
6696 if ((n
->value
& GOVD_DEBUG_PRIVATE
)
6697 || lang_hooks
.decls
.omp_private_debug_clause (decl
, shared
))
6699 gcc_assert ((n
->value
& GOVD_DEBUG_PRIVATE
) == 0
6700 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
6702 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_PRIVATE
);
6703 OMP_CLAUSE_PRIVATE_DEBUG (c
) = 1;
6708 case OMP_CLAUSE_LASTPRIVATE
:
6709 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6710 accurately reflect the presence of a FIRSTPRIVATE clause. */
6711 decl
= OMP_CLAUSE_DECL (c
);
6712 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
6713 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
6714 = (n
->value
& GOVD_FIRSTPRIVATE
) != 0;
6715 if (omp_no_lastprivate (ctx
))
6717 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
6720 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_PRIVATE
;
6724 case OMP_CLAUSE_ALIGNED
:
6725 decl
= OMP_CLAUSE_DECL (c
);
6726 if (!is_global_var (decl
))
6728 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
6729 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
6730 if (!remove
&& TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
6732 struct gimplify_omp_ctx
*octx
;
6734 && (n
->value
& (GOVD_DATA_SHARE_CLASS
6735 & ~GOVD_FIRSTPRIVATE
)))
6738 for (octx
= ctx
->outer_context
; octx
;
6739 octx
= octx
->outer_context
)
6741 n
= splay_tree_lookup (octx
->variables
,
6742 (splay_tree_key
) decl
);
6745 if (n
->value
& GOVD_LOCAL
)
6747 /* We have to avoid assigning a shared variable
6748 to itself when trying to add
6749 __builtin_assume_aligned. */
6750 if (n
->value
& GOVD_SHARED
)
6758 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
6760 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
6761 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
6766 case OMP_CLAUSE_MAP
:
6767 decl
= OMP_CLAUSE_DECL (c
);
6770 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
6771 if (ctx
->region_type
== ORT_TARGET
&& !(n
->value
& GOVD_SEEN
))
6773 else if (DECL_SIZE (decl
)
6774 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
6775 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_POINTER
)
6777 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
6778 for these, TREE_CODE (DECL_SIZE (decl)) will always be
6780 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DEVICEPTR
);
6782 tree decl2
= DECL_VALUE_EXPR (decl
);
6783 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
6784 decl2
= TREE_OPERAND (decl2
, 0);
6785 gcc_assert (DECL_P (decl2
));
6786 tree mem
= build_simple_mem_ref (decl2
);
6787 OMP_CLAUSE_DECL (c
) = mem
;
6788 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
6789 if (ctx
->outer_context
)
6791 omp_notice_variable (ctx
->outer_context
, decl2
, true);
6792 omp_notice_variable (ctx
->outer_context
,
6793 OMP_CLAUSE_SIZE (c
), true);
6795 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
6797 OMP_CLAUSE_DECL (nc
) = decl
;
6798 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
6799 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
6800 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (c
);
6801 OMP_CLAUSE_CHAIN (c
) = nc
;
6804 else if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
6805 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
6809 case OMP_CLAUSE_FROM
:
6810 case OMP_CLAUSE__CACHE_
:
6811 decl
= OMP_CLAUSE_DECL (c
);
6814 if (DECL_SIZE (decl
)
6815 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
6817 tree decl2
= DECL_VALUE_EXPR (decl
);
6818 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
6819 decl2
= TREE_OPERAND (decl2
, 0);
6820 gcc_assert (DECL_P (decl2
));
6821 tree mem
= build_simple_mem_ref (decl2
);
6822 OMP_CLAUSE_DECL (c
) = mem
;
6823 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
6824 if (ctx
->outer_context
)
6826 omp_notice_variable (ctx
->outer_context
, decl2
, true);
6827 omp_notice_variable (ctx
->outer_context
,
6828 OMP_CLAUSE_SIZE (c
), true);
6831 else if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
6832 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
6835 case OMP_CLAUSE_REDUCTION
:
6836 case OMP_CLAUSE_COPYIN
:
6837 case OMP_CLAUSE_COPYPRIVATE
:
6839 case OMP_CLAUSE_NUM_THREADS
:
6840 case OMP_CLAUSE_NUM_TEAMS
:
6841 case OMP_CLAUSE_THREAD_LIMIT
:
6842 case OMP_CLAUSE_DIST_SCHEDULE
:
6843 case OMP_CLAUSE_DEVICE
:
6844 case OMP_CLAUSE_SCHEDULE
:
6845 case OMP_CLAUSE_NOWAIT
:
6846 case OMP_CLAUSE_ORDERED
:
6847 case OMP_CLAUSE_DEFAULT
:
6848 case OMP_CLAUSE_UNTIED
:
6849 case OMP_CLAUSE_COLLAPSE
:
6850 case OMP_CLAUSE_FINAL
:
6851 case OMP_CLAUSE_MERGEABLE
:
6852 case OMP_CLAUSE_PROC_BIND
:
6853 case OMP_CLAUSE_SAFELEN
:
6854 case OMP_CLAUSE_DEPEND
:
6855 case OMP_CLAUSE__CILK_FOR_COUNT_
:
6856 case OMP_CLAUSE_ASYNC
:
6857 case OMP_CLAUSE_WAIT
:
6858 case OMP_CLAUSE_DEVICE_RESIDENT
:
6859 case OMP_CLAUSE_USE_DEVICE
:
6860 case OMP_CLAUSE_INDEPENDENT
:
6861 case OMP_CLAUSE_NUM_GANGS
:
6862 case OMP_CLAUSE_NUM_WORKERS
:
6863 case OMP_CLAUSE_VECTOR_LENGTH
:
6864 case OMP_CLAUSE_GANG
:
6865 case OMP_CLAUSE_WORKER
:
6866 case OMP_CLAUSE_VECTOR
:
6867 case OMP_CLAUSE_AUTO
:
6868 case OMP_CLAUSE_SEQ
:
6876 *list_p
= OMP_CLAUSE_CHAIN (c
);
6878 list_p
= &OMP_CLAUSE_CHAIN (c
);
6881 /* Add in any implicit data sharing. */
6882 struct gimplify_adjust_omp_clauses_data data
;
6883 data
.list_p
= list_p
;
6885 splay_tree_foreach (ctx
->variables
, gimplify_adjust_omp_clauses_1
, &data
);
6887 gimplify_omp_ctxp
= ctx
->outer_context
;
6888 delete_omp_context (ctx
);
6891 /* Gimplify OACC_CACHE. */
6894 gimplify_oacc_cache (tree
*expr_p
, gimple_seq
*pre_p
)
6896 tree expr
= *expr_p
;
6898 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr
), pre_p
, ORT_WORKSHARE
);
6899 gimplify_adjust_omp_clauses (pre_p
, &OACC_CACHE_CLAUSES (expr
));
6901 /* TODO: Do something sensible with this information. */
6903 *expr_p
= NULL_TREE
;
6906 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
6907 gimplification of the body, as well as scanning the body for used
6908 variables. We need to do this scan now, because variable-sized
6909 decls will be decomposed during gimplification. */
6912 gimplify_omp_parallel (tree
*expr_p
, gimple_seq
*pre_p
)
6914 tree expr
= *expr_p
;
6916 gimple_seq body
= NULL
;
6918 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr
), pre_p
,
6919 OMP_PARALLEL_COMBINED (expr
)
6920 ? ORT_COMBINED_PARALLEL
6923 push_gimplify_context ();
6925 g
= gimplify_and_return_first (OMP_PARALLEL_BODY (expr
), &body
);
6926 if (gimple_code (g
) == GIMPLE_BIND
)
6927 pop_gimplify_context (g
);
6929 pop_gimplify_context (NULL
);
6931 gimplify_adjust_omp_clauses (pre_p
, &OMP_PARALLEL_CLAUSES (expr
));
6933 g
= gimple_build_omp_parallel (body
,
6934 OMP_PARALLEL_CLAUSES (expr
),
6935 NULL_TREE
, NULL_TREE
);
6936 if (OMP_PARALLEL_COMBINED (expr
))
6937 gimple_omp_set_subcode (g
, GF_OMP_PARALLEL_COMBINED
);
6938 gimplify_seq_add_stmt (pre_p
, g
);
6939 *expr_p
= NULL_TREE
;
6942 /* Gimplify the contents of an OMP_TASK statement. This involves
6943 gimplification of the body, as well as scanning the body for used
6944 variables. We need to do this scan now, because variable-sized
6945 decls will be decomposed during gimplification. */
6948 gimplify_omp_task (tree
*expr_p
, gimple_seq
*pre_p
)
6950 tree expr
= *expr_p
;
6952 gimple_seq body
= NULL
;
6954 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr
), pre_p
,
6955 find_omp_clause (OMP_TASK_CLAUSES (expr
),
6957 ? ORT_UNTIED_TASK
: ORT_TASK
);
6959 push_gimplify_context ();
6961 g
= gimplify_and_return_first (OMP_TASK_BODY (expr
), &body
);
6962 if (gimple_code (g
) == GIMPLE_BIND
)
6963 pop_gimplify_context (g
);
6965 pop_gimplify_context (NULL
);
6967 gimplify_adjust_omp_clauses (pre_p
, &OMP_TASK_CLAUSES (expr
));
6969 g
= gimple_build_omp_task (body
,
6970 OMP_TASK_CLAUSES (expr
),
6971 NULL_TREE
, NULL_TREE
,
6972 NULL_TREE
, NULL_TREE
, NULL_TREE
);
6973 gimplify_seq_add_stmt (pre_p
, g
);
6974 *expr_p
= NULL_TREE
;
6977 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
6978 with non-NULL OMP_FOR_INIT. */
6981 find_combined_omp_for (tree
*tp
, int *walk_subtrees
, void *)
6984 switch (TREE_CODE (*tp
))
6990 if (OMP_FOR_INIT (*tp
) != NULL_TREE
)
6994 case STATEMENT_LIST
:
7004 /* Gimplify the gross structure of an OMP_FOR statement. */
7006 static enum gimplify_status
7007 gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
7009 tree for_stmt
, orig_for_stmt
, decl
, var
, t
;
7010 enum gimplify_status ret
= GS_ALL_DONE
;
7011 enum gimplify_status tret
;
7013 gimple_seq for_body
, for_pre_body
;
7016 bitmap has_decl_expr
= NULL
;
7018 orig_for_stmt
= for_stmt
= *expr_p
;
7020 switch (TREE_CODE (for_stmt
))
7024 case OMP_DISTRIBUTE
:
7036 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
7037 clause for the IV. */
7038 if (simd
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
7040 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), 0);
7041 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
7042 decl
= TREE_OPERAND (t
, 0);
7043 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
7044 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7045 && OMP_CLAUSE_DECL (c
) == decl
)
7047 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
7052 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt
), pre_p
,
7053 simd
? ORT_SIMD
: ORT_WORKSHARE
);
7054 if (TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
)
7055 gimplify_omp_ctxp
->distribute
= true;
7057 /* Handle OMP_FOR_INIT. */
7058 for_pre_body
= NULL
;
7059 if (simd
&& OMP_FOR_PRE_BODY (for_stmt
))
7061 has_decl_expr
= BITMAP_ALLOC (NULL
);
7062 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == DECL_EXPR
7063 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt
)))
7066 t
= OMP_FOR_PRE_BODY (for_stmt
);
7067 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
7069 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == STATEMENT_LIST
)
7071 tree_stmt_iterator si
;
7072 for (si
= tsi_start (OMP_FOR_PRE_BODY (for_stmt
)); !tsi_end_p (si
);
7076 if (TREE_CODE (t
) == DECL_EXPR
7077 && TREE_CODE (DECL_EXPR_DECL (t
)) == VAR_DECL
)
7078 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
7082 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
7083 OMP_FOR_PRE_BODY (for_stmt
) = NULL_TREE
;
7085 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
7087 gcc_assert (TREE_CODE (for_stmt
) != OACC_LOOP
);
7088 for_stmt
= walk_tree (&OMP_FOR_BODY (for_stmt
), find_combined_omp_for
,
7090 gcc_assert (for_stmt
!= NULL_TREE
);
7091 gimplify_omp_ctxp
->combined_loop
= true;
7095 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
7096 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt
)));
7097 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
7098 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt
)));
7099 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
7101 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
7102 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
7103 decl
= TREE_OPERAND (t
, 0);
7104 gcc_assert (DECL_P (decl
));
7105 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl
))
7106 || POINTER_TYPE_P (TREE_TYPE (decl
)));
7108 /* Make sure the iteration variable is private. */
7110 tree c2
= NULL_TREE
;
7111 if (orig_for_stmt
!= for_stmt
)
7112 /* Do this only on innermost construct for combined ones. */;
7115 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
7116 (splay_tree_key
)decl
);
7117 omp_is_private (gimplify_omp_ctxp
, decl
,
7118 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
7120 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7121 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
7122 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
7124 c
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
7125 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
7126 unsigned int flags
= GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
;
7128 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
7129 || omp_no_lastprivate (gimplify_omp_ctxp
))
7131 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
7132 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
7134 OMP_CLAUSE_DECL (c
) = decl
;
7135 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
7136 OMP_FOR_CLAUSES (for_stmt
) = c
;
7138 omp_add_variable (gimplify_omp_ctxp
, decl
, flags
);
7139 struct gimplify_omp_ctx
*outer
7140 = gimplify_omp_ctxp
->outer_context
;
7141 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
7143 if (outer
->region_type
== ORT_WORKSHARE
7144 && outer
->combined_loop
)
7146 if (outer
->outer_context
7147 && (outer
->outer_context
->region_type
7148 == ORT_COMBINED_PARALLEL
))
7149 outer
= outer
->outer_context
;
7150 else if (omp_check_private (outer
, decl
, false))
7153 else if (outer
->region_type
!= ORT_COMBINED_PARALLEL
)
7157 omp_add_variable (outer
, decl
,
7158 GOVD_LASTPRIVATE
| GOVD_SEEN
);
7159 if (outer
->outer_context
)
7160 omp_notice_variable (outer
->outer_context
, decl
, true);
7168 || !bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
7169 && !omp_no_lastprivate (gimplify_omp_ctxp
);
7170 struct gimplify_omp_ctx
*outer
7171 = gimplify_omp_ctxp
->outer_context
;
7172 if (outer
&& lastprivate
)
7174 if (outer
->region_type
== ORT_WORKSHARE
7175 && outer
->combined_loop
)
7177 if (outer
->outer_context
7178 && (outer
->outer_context
->region_type
7179 == ORT_COMBINED_PARALLEL
))
7180 outer
= outer
->outer_context
;
7181 else if (omp_check_private (outer
, decl
, false))
7184 else if (outer
->region_type
!= ORT_COMBINED_PARALLEL
)
7188 omp_add_variable (outer
, decl
,
7189 GOVD_LASTPRIVATE
| GOVD_SEEN
);
7190 if (outer
->outer_context
)
7191 omp_notice_variable (outer
->outer_context
, decl
, true);
7195 c
= build_omp_clause (input_location
,
7196 lastprivate
? OMP_CLAUSE_LASTPRIVATE
7197 : OMP_CLAUSE_PRIVATE
);
7198 OMP_CLAUSE_DECL (c
) = decl
;
7199 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
7200 OMP_FOR_CLAUSES (for_stmt
) = c
;
7201 omp_add_variable (gimplify_omp_ctxp
, decl
,
7202 (lastprivate
? GOVD_LASTPRIVATE
: GOVD_PRIVATE
)
7203 | GOVD_EXPLICIT
| GOVD_SEEN
);
7207 else if (omp_is_private (gimplify_omp_ctxp
, decl
, 0))
7208 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
7210 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_PRIVATE
| GOVD_SEEN
);
7212 /* If DECL is not a gimple register, create a temporary variable to act
7213 as an iteration counter. This is valid, since DECL cannot be
7214 modified in the body of the loop. Similarly for any iteration vars
7215 in simd with collapse > 1 where the iterator vars must be
7217 if (orig_for_stmt
!= for_stmt
)
7219 else if (!is_gimple_reg (decl
)
7220 || (simd
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) > 1))
7222 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
7223 TREE_OPERAND (t
, 0) = var
;
7225 gimplify_seq_add_stmt (&for_body
, gimple_build_assign (decl
, var
));
7227 if (simd
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
7229 c2
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
7230 OMP_CLAUSE_LINEAR_NO_COPYIN (c2
) = 1;
7231 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2
) = 1;
7232 OMP_CLAUSE_DECL (c2
) = var
;
7233 OMP_CLAUSE_CHAIN (c2
) = OMP_FOR_CLAUSES (for_stmt
);
7234 OMP_FOR_CLAUSES (for_stmt
) = c2
;
7235 omp_add_variable (gimplify_omp_ctxp
, var
,
7236 GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
);
7244 omp_add_variable (gimplify_omp_ctxp
, var
,
7245 GOVD_PRIVATE
| GOVD_SEEN
);
7250 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
7251 is_gimple_val
, fb_rvalue
);
7252 ret
= MIN (ret
, tret
);
7253 if (ret
== GS_ERROR
)
7256 /* Handle OMP_FOR_COND. */
7257 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
7258 gcc_assert (COMPARISON_CLASS_P (t
));
7259 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
7261 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
7262 is_gimple_val
, fb_rvalue
);
7263 ret
= MIN (ret
, tret
);
7265 /* Handle OMP_FOR_INCR. */
7266 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
7267 switch (TREE_CODE (t
))
7269 case PREINCREMENT_EXPR
:
7270 case POSTINCREMENT_EXPR
:
7272 tree decl
= TREE_OPERAND (t
, 0);
7273 /* c_omp_for_incr_canonicalize_ptr() should have been
7274 called to massage things appropriately. */
7275 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
7277 if (orig_for_stmt
!= for_stmt
)
7279 t
= build_int_cst (TREE_TYPE (decl
), 1);
7281 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
7282 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
7283 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
7284 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
7288 case PREDECREMENT_EXPR
:
7289 case POSTDECREMENT_EXPR
:
7290 /* c_omp_for_incr_canonicalize_ptr() should have been
7291 called to massage things appropriately. */
7292 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
7293 if (orig_for_stmt
!= for_stmt
)
7295 t
= build_int_cst (TREE_TYPE (decl
), -1);
7297 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
7298 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
7299 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
7300 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
7304 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
7305 TREE_OPERAND (t
, 0) = var
;
7307 t
= TREE_OPERAND (t
, 1);
7308 switch (TREE_CODE (t
))
7311 if (TREE_OPERAND (t
, 1) == decl
)
7313 TREE_OPERAND (t
, 1) = TREE_OPERAND (t
, 0);
7314 TREE_OPERAND (t
, 0) = var
;
7320 case POINTER_PLUS_EXPR
:
7321 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
7322 TREE_OPERAND (t
, 0) = var
;
7328 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
7329 is_gimple_val
, fb_rvalue
);
7330 ret
= MIN (ret
, tret
);
7333 tree step
= TREE_OPERAND (t
, 1);
7334 tree stept
= TREE_TYPE (decl
);
7335 if (POINTER_TYPE_P (stept
))
7337 step
= fold_convert (stept
, step
);
7338 if (TREE_CODE (t
) == MINUS_EXPR
)
7339 step
= fold_build1 (NEGATE_EXPR
, stept
, step
);
7340 OMP_CLAUSE_LINEAR_STEP (c
) = step
;
7341 if (step
!= TREE_OPERAND (t
, 1))
7343 tret
= gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
),
7344 &for_pre_body
, NULL
,
7345 is_gimple_val
, fb_rvalue
);
7346 ret
= MIN (ret
, tret
);
7358 OMP_CLAUSE_LINEAR_STEP (c2
) = OMP_CLAUSE_LINEAR_STEP (c
);
7361 if ((var
!= decl
|| TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) > 1)
7362 && orig_for_stmt
== for_stmt
)
7364 for (c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
7365 if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
7366 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) == NULL
)
7367 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
7368 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)
7369 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) == NULL
))
7370 && OMP_CLAUSE_DECL (c
) == decl
)
7372 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
7373 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
7374 gcc_assert (TREE_OPERAND (t
, 0) == var
);
7375 t
= TREE_OPERAND (t
, 1);
7376 gcc_assert (TREE_CODE (t
) == PLUS_EXPR
7377 || TREE_CODE (t
) == MINUS_EXPR
7378 || TREE_CODE (t
) == POINTER_PLUS_EXPR
);
7379 gcc_assert (TREE_OPERAND (t
, 0) == var
);
7380 t
= build2 (TREE_CODE (t
), TREE_TYPE (decl
), decl
,
7381 TREE_OPERAND (t
, 1));
7383 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
7384 seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
);
7386 seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
);
7387 gimplify_assign (decl
, t
, seq
);
7392 BITMAP_FREE (has_decl_expr
);
7394 gimplify_and_add (OMP_FOR_BODY (orig_for_stmt
), &for_body
);
7396 if (orig_for_stmt
!= for_stmt
)
7397 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
7399 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
7400 decl
= TREE_OPERAND (t
, 0);
7401 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
7402 omp_add_variable (gimplify_omp_ctxp
, var
, GOVD_PRIVATE
| GOVD_SEEN
);
7403 TREE_OPERAND (t
, 0) = var
;
7404 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
7405 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
7406 TREE_OPERAND (TREE_OPERAND (t
, 1), 0) = var
;
7409 gimplify_adjust_omp_clauses (pre_p
, &OMP_FOR_CLAUSES (orig_for_stmt
));
7412 switch (TREE_CODE (orig_for_stmt
))
7414 case OMP_FOR
: kind
= GF_OMP_FOR_KIND_FOR
; break;
7415 case OMP_SIMD
: kind
= GF_OMP_FOR_KIND_SIMD
; break;
7416 case CILK_SIMD
: kind
= GF_OMP_FOR_KIND_CILKSIMD
; break;
7417 case CILK_FOR
: kind
= GF_OMP_FOR_KIND_CILKFOR
; break;
7418 case OMP_DISTRIBUTE
: kind
= GF_OMP_FOR_KIND_DISTRIBUTE
; break;
7419 case OACC_LOOP
: kind
= GF_OMP_FOR_KIND_OACC_LOOP
; break;
7423 gfor
= gimple_build_omp_for (for_body
, kind
, OMP_FOR_CLAUSES (orig_for_stmt
),
7424 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)),
7426 if (orig_for_stmt
!= for_stmt
)
7427 gimple_omp_for_set_combined_p (gfor
, true);
7428 if (gimplify_omp_ctxp
7429 && (gimplify_omp_ctxp
->combined_loop
7430 || (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
7431 && gimplify_omp_ctxp
->outer_context
7432 && gimplify_omp_ctxp
->outer_context
->combined_loop
)))
7434 gimple_omp_for_set_combined_into_p (gfor
, true);
7435 if (gimplify_omp_ctxp
->combined_loop
)
7436 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_SIMD
);
7438 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_FOR
);
7441 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
7443 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
7444 gimple_omp_for_set_index (gfor
, i
, TREE_OPERAND (t
, 0));
7445 gimple_omp_for_set_initial (gfor
, i
, TREE_OPERAND (t
, 1));
7446 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
7447 gimple_omp_for_set_cond (gfor
, i
, TREE_CODE (t
));
7448 gimple_omp_for_set_final (gfor
, i
, TREE_OPERAND (t
, 1));
7449 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
7450 gimple_omp_for_set_incr (gfor
, i
, TREE_OPERAND (t
, 1));
7453 gimplify_seq_add_stmt (pre_p
, gfor
);
7454 if (ret
!= GS_ALL_DONE
)
7456 *expr_p
= NULL_TREE
;
7460 /* Gimplify the gross structure of several OMP constructs. */
7463 gimplify_omp_workshare (tree
*expr_p
, gimple_seq
*pre_p
)
7465 tree expr
= *expr_p
;
7467 gimple_seq body
= NULL
;
7468 enum omp_region_type ort
;
7470 switch (TREE_CODE (expr
))
7474 ort
= ORT_WORKSHARE
;
7482 case OMP_TARGET_DATA
:
7483 ort
= ORT_TARGET_DATA
;
7486 ort
= OMP_TEAMS_COMBINED (expr
) ? ORT_COMBINED_TEAMS
: ORT_TEAMS
;
7491 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr
), pre_p
, ort
);
7492 if (ort
== ORT_TARGET
|| ort
== ORT_TARGET_DATA
)
7494 push_gimplify_context ();
7495 gimple g
= gimplify_and_return_first (OMP_BODY (expr
), &body
);
7496 if (gimple_code (g
) == GIMPLE_BIND
)
7497 pop_gimplify_context (g
);
7499 pop_gimplify_context (NULL
);
7500 if (ort
== ORT_TARGET_DATA
)
7502 enum built_in_function end_ix
;
7503 switch (TREE_CODE (expr
))
7506 end_ix
= BUILT_IN_GOACC_DATA_END
;
7508 case OMP_TARGET_DATA
:
7509 end_ix
= BUILT_IN_GOMP_TARGET_END_DATA
;
7514 tree fn
= builtin_decl_explicit (end_ix
);
7515 g
= gimple_build_call (fn
, 0);
7516 gimple_seq cleanup
= NULL
;
7517 gimple_seq_add_stmt (&cleanup
, g
);
7518 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
7520 gimple_seq_add_stmt (&body
, g
);
7524 gimplify_and_add (OMP_BODY (expr
), &body
);
7525 gimplify_adjust_omp_clauses (pre_p
, &OMP_CLAUSES (expr
));
7527 switch (TREE_CODE (expr
))
7530 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_DATA
,
7531 OMP_CLAUSES (expr
));
7534 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_KERNELS
,
7535 OMP_CLAUSES (expr
));
7538 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_PARALLEL
,
7539 OMP_CLAUSES (expr
));
7542 stmt
= gimple_build_omp_sections (body
, OMP_CLAUSES (expr
));
7545 stmt
= gimple_build_omp_single (body
, OMP_CLAUSES (expr
));
7548 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_REGION
,
7549 OMP_CLAUSES (expr
));
7551 case OMP_TARGET_DATA
:
7552 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_DATA
,
7553 OMP_CLAUSES (expr
));
7556 stmt
= gimple_build_omp_teams (body
, OMP_CLAUSES (expr
));
7562 gimplify_seq_add_stmt (pre_p
, stmt
);
7563 *expr_p
= NULL_TREE
;
7566 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
7567 target update constructs. */
7570 gimplify_omp_target_update (tree
*expr_p
, gimple_seq
*pre_p
)
7572 tree expr
= *expr_p
;
7576 switch (TREE_CODE (expr
))
7578 case OACC_ENTER_DATA
:
7579 kind
= GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
;
7581 case OACC_EXIT_DATA
:
7582 kind
= GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
;
7585 kind
= GF_OMP_TARGET_KIND_OACC_UPDATE
;
7587 case OMP_TARGET_UPDATE
:
7588 kind
= GF_OMP_TARGET_KIND_UPDATE
;
7593 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr
), pre_p
,
7595 gimplify_adjust_omp_clauses (pre_p
, &OMP_STANDALONE_CLAUSES (expr
));
7596 stmt
= gimple_build_omp_target (NULL
, kind
, OMP_STANDALONE_CLAUSES (expr
));
7598 gimplify_seq_add_stmt (pre_p
, stmt
);
7599 *expr_p
= NULL_TREE
;
7602 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
7603 stabilized the lhs of the atomic operation as *ADDR. Return true if
7604 EXPR is this stabilized form. */
7607 goa_lhs_expr_p (tree expr
, tree addr
)
7609 /* Also include casts to other type variants. The C front end is fond
7610 of adding these for e.g. volatile variables. This is like
7611 STRIP_TYPE_NOPS but includes the main variant lookup. */
7612 STRIP_USELESS_TYPE_CONVERSION (expr
);
7614 if (TREE_CODE (expr
) == INDIRECT_REF
)
7616 expr
= TREE_OPERAND (expr
, 0);
7618 && (CONVERT_EXPR_P (expr
)
7619 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
7620 && TREE_CODE (expr
) == TREE_CODE (addr
)
7621 && types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (addr
)))
7623 expr
= TREE_OPERAND (expr
, 0);
7624 addr
= TREE_OPERAND (addr
, 0);
7628 return (TREE_CODE (addr
) == ADDR_EXPR
7629 && TREE_CODE (expr
) == ADDR_EXPR
7630 && TREE_OPERAND (addr
, 0) == TREE_OPERAND (expr
, 0));
7632 if (TREE_CODE (addr
) == ADDR_EXPR
&& expr
== TREE_OPERAND (addr
, 0))
7637 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
7638 expression does not involve the lhs, evaluate it into a temporary.
7639 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
7640 or -1 if an error was encountered. */
7643 goa_stabilize_expr (tree
*expr_p
, gimple_seq
*pre_p
, tree lhs_addr
,
7646 tree expr
= *expr_p
;
7649 if (goa_lhs_expr_p (expr
, lhs_addr
))
7654 if (is_gimple_val (expr
))
7658 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
7661 case tcc_comparison
:
7662 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
, lhs_addr
,
7665 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
, lhs_addr
,
7668 case tcc_expression
:
7669 switch (TREE_CODE (expr
))
7671 case TRUTH_ANDIF_EXPR
:
7672 case TRUTH_ORIF_EXPR
:
7673 case TRUTH_AND_EXPR
:
7675 case TRUTH_XOR_EXPR
:
7676 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
7678 case TRUTH_NOT_EXPR
:
7679 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
7683 /* Break out any preevaluations from cp_build_modify_expr. */
7684 for (; TREE_CODE (expr
) == COMPOUND_EXPR
;
7685 expr
= TREE_OPERAND (expr
, 1))
7686 gimplify_stmt (&TREE_OPERAND (expr
, 0), pre_p
);
7688 return goa_stabilize_expr (expr_p
, pre_p
, lhs_addr
, lhs_var
);
7699 enum gimplify_status gs
;
7700 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
7701 if (gs
!= GS_ALL_DONE
)
7708 /* Gimplify an OMP_ATOMIC statement. */
7710 static enum gimplify_status
7711 gimplify_omp_atomic (tree
*expr_p
, gimple_seq
*pre_p
)
7713 tree addr
= TREE_OPERAND (*expr_p
, 0);
7714 tree rhs
= TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
7715 ? NULL
: TREE_OPERAND (*expr_p
, 1);
7716 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr
)));
7718 gomp_atomic_load
*loadstmt
;
7719 gomp_atomic_store
*storestmt
;
7721 tmp_load
= create_tmp_reg (type
);
7722 if (rhs
&& goa_stabilize_expr (&rhs
, pre_p
, addr
, tmp_load
) < 0)
7725 if (gimplify_expr (&addr
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
7729 loadstmt
= gimple_build_omp_atomic_load (tmp_load
, addr
);
7730 gimplify_seq_add_stmt (pre_p
, loadstmt
);
7731 if (rhs
&& gimplify_expr (&rhs
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
7735 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
)
7737 storestmt
= gimple_build_omp_atomic_store (rhs
);
7738 gimplify_seq_add_stmt (pre_p
, storestmt
);
7739 if (OMP_ATOMIC_SEQ_CST (*expr_p
))
7741 gimple_omp_atomic_set_seq_cst (loadstmt
);
7742 gimple_omp_atomic_set_seq_cst (storestmt
);
7744 switch (TREE_CODE (*expr_p
))
7746 case OMP_ATOMIC_READ
:
7747 case OMP_ATOMIC_CAPTURE_OLD
:
7749 gimple_omp_atomic_set_need_value (loadstmt
);
7751 case OMP_ATOMIC_CAPTURE_NEW
:
7753 gimple_omp_atomic_set_need_value (storestmt
);
7763 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
7764 body, and adding some EH bits. */
7766 static enum gimplify_status
7767 gimplify_transaction (tree
*expr_p
, gimple_seq
*pre_p
)
7769 tree expr
= *expr_p
, temp
, tbody
= TRANSACTION_EXPR_BODY (expr
);
7771 gtransaction
*trans_stmt
;
7772 gimple_seq body
= NULL
;
7775 /* Wrap the transaction body in a BIND_EXPR so we have a context
7776 where to put decls for OMP. */
7777 if (TREE_CODE (tbody
) != BIND_EXPR
)
7779 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, tbody
, NULL
);
7780 TREE_SIDE_EFFECTS (bind
) = 1;
7781 SET_EXPR_LOCATION (bind
, EXPR_LOCATION (tbody
));
7782 TRANSACTION_EXPR_BODY (expr
) = bind
;
7785 push_gimplify_context ();
7786 temp
= voidify_wrapper_expr (*expr_p
, NULL
);
7788 body_stmt
= gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr
), &body
);
7789 pop_gimplify_context (body_stmt
);
7791 trans_stmt
= gimple_build_transaction (body
, NULL
);
7792 if (TRANSACTION_EXPR_OUTER (expr
))
7793 subcode
= GTMA_IS_OUTER
;
7794 else if (TRANSACTION_EXPR_RELAXED (expr
))
7795 subcode
= GTMA_IS_RELAXED
;
7796 gimple_transaction_set_subcode (trans_stmt
, subcode
);
7798 gimplify_seq_add_stmt (pre_p
, trans_stmt
);
7806 *expr_p
= NULL_TREE
;
7810 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
7811 expression produces a value to be used as an operand inside a GIMPLE
7812 statement, the value will be stored back in *EXPR_P. This value will
7813 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
7814 an SSA_NAME. The corresponding sequence of GIMPLE statements is
7815 emitted in PRE_P and POST_P.
7817 Additionally, this process may overwrite parts of the input
7818 expression during gimplification. Ideally, it should be
7819 possible to do non-destructive gimplification.
7821 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
7822 the expression needs to evaluate to a value to be used as
7823 an operand in a GIMPLE statement, this value will be stored in
7824 *EXPR_P on exit. This happens when the caller specifies one
7825 of fb_lvalue or fb_rvalue fallback flags.
7827 PRE_P will contain the sequence of GIMPLE statements corresponding
7828 to the evaluation of EXPR and all the side-effects that must
7829 be executed before the main expression. On exit, the last
7830 statement of PRE_P is the core statement being gimplified. For
7831 instance, when gimplifying 'if (++a)' the last statement in
7832 PRE_P will be 'if (t.1)' where t.1 is the result of
7833 pre-incrementing 'a'.
7835 POST_P will contain the sequence of GIMPLE statements corresponding
7836 to the evaluation of all the side-effects that must be executed
7837 after the main expression. If this is NULL, the post
7838 side-effects are stored at the end of PRE_P.
7840 The reason why the output is split in two is to handle post
7841 side-effects explicitly. In some cases, an expression may have
7842 inner and outer post side-effects which need to be emitted in
7843 an order different from the one given by the recursive
7844 traversal. For instance, for the expression (*p--)++ the post
7845 side-effects of '--' must actually occur *after* the post
7846 side-effects of '++'. However, gimplification will first visit
7847 the inner expression, so if a separate POST sequence was not
7848 used, the resulting sequence would be:
7855 However, the post-decrement operation in line #2 must not be
7856 evaluated until after the store to *p at line #4, so the
7857 correct sequence should be:
7864 So, by specifying a separate post queue, it is possible
7865 to emit the post side-effects in the correct order.
7866 If POST_P is NULL, an internal queue will be used. Before
7867 returning to the caller, the sequence POST_P is appended to
7868 the main output sequence PRE_P.
7870 GIMPLE_TEST_F points to a function that takes a tree T and
7871 returns nonzero if T is in the GIMPLE form requested by the
7872 caller. The GIMPLE predicates are in gimple.c.
7874 FALLBACK tells the function what sort of a temporary we want if
7875 gimplification cannot produce an expression that complies with
7878 fb_none means that no temporary should be generated
7879 fb_rvalue means that an rvalue is OK to generate
7880 fb_lvalue means that an lvalue is OK to generate
7881 fb_either means that either is OK, but an lvalue is preferable.
7882 fb_mayfail means that gimplification may fail (in which case
7883 GS_ERROR will be returned)
7885 The return value is either GS_ERROR or GS_ALL_DONE, since this
7886 function iterates until EXPR is completely gimplified or an error
7889 enum gimplify_status
7890 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
7891 bool (*gimple_test_f
) (tree
), fallback_t fallback
)
7894 gimple_seq internal_pre
= NULL
;
7895 gimple_seq internal_post
= NULL
;
7898 location_t saved_location
;
7899 enum gimplify_status ret
;
7900 gimple_stmt_iterator pre_last_gsi
, post_last_gsi
;
7902 save_expr
= *expr_p
;
7903 if (save_expr
== NULL_TREE
)
7906 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
7907 is_statement
= gimple_test_f
== is_gimple_stmt
;
7911 /* Consistency checks. */
7912 if (gimple_test_f
== is_gimple_reg
)
7913 gcc_assert (fallback
& (fb_rvalue
| fb_lvalue
));
7914 else if (gimple_test_f
== is_gimple_val
7915 || gimple_test_f
== is_gimple_call_addr
7916 || gimple_test_f
== is_gimple_condexpr
7917 || gimple_test_f
== is_gimple_mem_rhs
7918 || gimple_test_f
== is_gimple_mem_rhs_or_call
7919 || gimple_test_f
== is_gimple_reg_rhs
7920 || gimple_test_f
== is_gimple_reg_rhs_or_call
7921 || gimple_test_f
== is_gimple_asm_val
7922 || gimple_test_f
== is_gimple_mem_ref_addr
)
7923 gcc_assert (fallback
& fb_rvalue
);
7924 else if (gimple_test_f
== is_gimple_min_lval
7925 || gimple_test_f
== is_gimple_lvalue
)
7926 gcc_assert (fallback
& fb_lvalue
);
7927 else if (gimple_test_f
== is_gimple_addressable
)
7928 gcc_assert (fallback
& fb_either
);
7929 else if (gimple_test_f
== is_gimple_stmt
)
7930 gcc_assert (fallback
== fb_none
);
7933 /* We should have recognized the GIMPLE_TEST_F predicate to
7934 know what kind of fallback to use in case a temporary is
7935 needed to hold the value or address of *EXPR_P. */
7939 /* We used to check the predicate here and return immediately if it
7940 succeeds. This is wrong; the design is for gimplification to be
7941 idempotent, and for the predicates to only test for valid forms, not
7942 whether they are fully simplified. */
7944 pre_p
= &internal_pre
;
7947 post_p
= &internal_post
;
7949 /* Remember the last statements added to PRE_P and POST_P. Every
7950 new statement added by the gimplification helpers needs to be
7951 annotated with location information. To centralize the
7952 responsibility, we remember the last statement that had been
7953 added to both queues before gimplifying *EXPR_P. If
7954 gimplification produces new statements in PRE_P and POST_P, those
7955 statements will be annotated with the same location information
7957 pre_last_gsi
= gsi_last (*pre_p
);
7958 post_last_gsi
= gsi_last (*post_p
);
7960 saved_location
= input_location
;
7961 if (save_expr
!= error_mark_node
7962 && EXPR_HAS_LOCATION (*expr_p
))
7963 input_location
= EXPR_LOCATION (*expr_p
);
7965 /* Loop over the specific gimplifiers until the toplevel node
7966 remains the same. */
7969 /* Strip away as many useless type conversions as possible
7971 STRIP_USELESS_TYPE_CONVERSION (*expr_p
);
7973 /* Remember the expr. */
7974 save_expr
= *expr_p
;
7976 /* Die, die, die, my darling. */
7977 if (save_expr
== error_mark_node
7978 || (TREE_TYPE (save_expr
)
7979 && TREE_TYPE (save_expr
) == error_mark_node
))
7985 /* Do any language-specific gimplification. */
7986 ret
= ((enum gimplify_status
)
7987 lang_hooks
.gimplify_expr (expr_p
, pre_p
, post_p
));
7990 if (*expr_p
== NULL_TREE
)
7992 if (*expr_p
!= save_expr
)
7995 else if (ret
!= GS_UNHANDLED
)
7998 /* Make sure that all the cases set 'ret' appropriately. */
8000 switch (TREE_CODE (*expr_p
))
8002 /* First deal with the special cases. */
8004 case POSTINCREMENT_EXPR
:
8005 case POSTDECREMENT_EXPR
:
8006 case PREINCREMENT_EXPR
:
8007 case PREDECREMENT_EXPR
:
8008 ret
= gimplify_self_mod_expr (expr_p
, pre_p
, post_p
,
8009 fallback
!= fb_none
,
8010 TREE_TYPE (*expr_p
));
8013 case VIEW_CONVERT_EXPR
:
8014 if (is_gimple_reg_type (TREE_TYPE (*expr_p
))
8015 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
8017 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
8018 post_p
, is_gimple_val
, fb_rvalue
);
8019 recalculate_side_effects (*expr_p
);
8025 case ARRAY_RANGE_REF
:
8029 ret
= gimplify_compound_lval (expr_p
, pre_p
, post_p
,
8030 fallback
? fallback
: fb_rvalue
);
8034 ret
= gimplify_cond_expr (expr_p
, pre_p
, fallback
);
8036 /* C99 code may assign to an array in a structure value of a
8037 conditional expression, and this has undefined behavior
8038 only on execution, so create a temporary if an lvalue is
8040 if (fallback
== fb_lvalue
)
8042 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
);
8043 mark_addressable (*expr_p
);
8049 ret
= gimplify_call_expr (expr_p
, pre_p
, fallback
!= fb_none
);
8051 /* C99 code may assign to an array in a structure returned
8052 from a function, and this has undefined behavior only on
8053 execution, so create a temporary if an lvalue is
8055 if (fallback
== fb_lvalue
)
8057 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
);
8058 mark_addressable (*expr_p
);
8067 ret
= gimplify_compound_expr (expr_p
, pre_p
, fallback
!= fb_none
);
8070 case COMPOUND_LITERAL_EXPR
:
8071 ret
= gimplify_compound_literal_expr (expr_p
, pre_p
,
8072 gimple_test_f
, fallback
);
8077 ret
= gimplify_modify_expr (expr_p
, pre_p
, post_p
,
8078 fallback
!= fb_none
);
8081 case TRUTH_ANDIF_EXPR
:
8082 case TRUTH_ORIF_EXPR
:
8084 /* Preserve the original type of the expression and the
8085 source location of the outer expression. */
8086 tree org_type
= TREE_TYPE (*expr_p
);
8087 *expr_p
= gimple_boolify (*expr_p
);
8088 *expr_p
= build3_loc (input_location
, COND_EXPR
,
8092 org_type
, boolean_true_node
),
8095 org_type
, boolean_false_node
));
8100 case TRUTH_NOT_EXPR
:
8102 tree type
= TREE_TYPE (*expr_p
);
8103 /* The parsers are careful to generate TRUTH_NOT_EXPR
8104 only with operands that are always zero or one.
8105 We do not fold here but handle the only interesting case
8106 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
8107 *expr_p
= gimple_boolify (*expr_p
);
8108 if (TYPE_PRECISION (TREE_TYPE (*expr_p
)) == 1)
8109 *expr_p
= build1_loc (input_location
, BIT_NOT_EXPR
,
8110 TREE_TYPE (*expr_p
),
8111 TREE_OPERAND (*expr_p
, 0));
8113 *expr_p
= build2_loc (input_location
, BIT_XOR_EXPR
,
8114 TREE_TYPE (*expr_p
),
8115 TREE_OPERAND (*expr_p
, 0),
8116 build_int_cst (TREE_TYPE (*expr_p
), 1));
8117 if (!useless_type_conversion_p (type
, TREE_TYPE (*expr_p
)))
8118 *expr_p
= fold_convert_loc (input_location
, type
, *expr_p
);
8124 ret
= gimplify_addr_expr (expr_p
, pre_p
, post_p
);
8129 tree cond
= TREE_OPERAND (*expr_p
, 0);
8130 tree kind
= TREE_OPERAND (*expr_p
, 1);
8131 tree type
= TREE_TYPE (cond
);
8132 if (!INTEGRAL_TYPE_P (type
))
8138 tree tmp
= create_tmp_var (type
);
8139 gimplify_arg (&cond
, pre_p
, EXPR_LOCATION (*expr_p
));
8141 = gimple_build_call_internal (IFN_ANNOTATE
, 2, cond
, kind
);
8142 gimple_call_set_lhs (call
, tmp
);
8143 gimplify_seq_add_stmt (pre_p
, call
);
8150 ret
= gimplify_va_arg_expr (expr_p
, pre_p
, post_p
);
8154 if (IS_EMPTY_STMT (*expr_p
))
8160 if (VOID_TYPE_P (TREE_TYPE (*expr_p
))
8161 || fallback
== fb_none
)
8163 /* Just strip a conversion to void (or in void context) and
8165 *expr_p
= TREE_OPERAND (*expr_p
, 0);
8170 ret
= gimplify_conversion (expr_p
);
8171 if (ret
== GS_ERROR
)
8173 if (*expr_p
!= save_expr
)
8177 case FIX_TRUNC_EXPR
:
8178 /* unary_expr: ... | '(' cast ')' val | ... */
8179 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
8180 is_gimple_val
, fb_rvalue
);
8181 recalculate_side_effects (*expr_p
);
8186 bool volatilep
= TREE_THIS_VOLATILE (*expr_p
);
8187 bool notrap
= TREE_THIS_NOTRAP (*expr_p
);
8188 tree saved_ptr_type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 0));
8190 *expr_p
= fold_indirect_ref_loc (input_location
, *expr_p
);
8191 if (*expr_p
!= save_expr
)
8197 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
8198 is_gimple_reg
, fb_rvalue
);
8199 if (ret
== GS_ERROR
)
8202 recalculate_side_effects (*expr_p
);
8203 *expr_p
= fold_build2_loc (input_location
, MEM_REF
,
8204 TREE_TYPE (*expr_p
),
8205 TREE_OPERAND (*expr_p
, 0),
8206 build_int_cst (saved_ptr_type
, 0));
8207 TREE_THIS_VOLATILE (*expr_p
) = volatilep
;
8208 TREE_THIS_NOTRAP (*expr_p
) = notrap
;
8213 /* We arrive here through the various re-gimplifcation paths. */
8215 /* First try re-folding the whole thing. */
8216 tmp
= fold_binary (MEM_REF
, TREE_TYPE (*expr_p
),
8217 TREE_OPERAND (*expr_p
, 0),
8218 TREE_OPERAND (*expr_p
, 1));
8222 recalculate_side_effects (*expr_p
);
8226 /* Avoid re-gimplifying the address operand if it is already
8227 in suitable form. Re-gimplifying would mark the address
8228 operand addressable. Always gimplify when not in SSA form
8229 as we still may have to gimplify decls with value-exprs. */
8230 if (!gimplify_ctxp
|| !gimplify_ctxp
->into_ssa
8231 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p
, 0)))
8233 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
8234 is_gimple_mem_ref_addr
, fb_rvalue
);
8235 if (ret
== GS_ERROR
)
8238 recalculate_side_effects (*expr_p
);
8242 /* Constants need not be gimplified. */
8249 /* Drop the overflow flag on constants, we do not want
8250 that in the GIMPLE IL. */
8251 if (TREE_OVERFLOW_P (*expr_p
))
8252 *expr_p
= drop_tree_overflow (*expr_p
);
8257 /* If we require an lvalue, such as for ADDR_EXPR, retain the
8258 CONST_DECL node. Otherwise the decl is replaceable by its
8260 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
8261 if (fallback
& fb_lvalue
)
8265 *expr_p
= DECL_INITIAL (*expr_p
);
8271 ret
= gimplify_decl_expr (expr_p
, pre_p
);
8275 ret
= gimplify_bind_expr (expr_p
, pre_p
);
8279 ret
= gimplify_loop_expr (expr_p
, pre_p
);
8283 ret
= gimplify_switch_expr (expr_p
, pre_p
);
8287 ret
= gimplify_exit_expr (expr_p
);
8291 /* If the target is not LABEL, then it is a computed jump
8292 and the target needs to be gimplified. */
8293 if (TREE_CODE (GOTO_DESTINATION (*expr_p
)) != LABEL_DECL
)
8295 ret
= gimplify_expr (&GOTO_DESTINATION (*expr_p
), pre_p
,
8296 NULL
, is_gimple_val
, fb_rvalue
);
8297 if (ret
== GS_ERROR
)
8300 gimplify_seq_add_stmt (pre_p
,
8301 gimple_build_goto (GOTO_DESTINATION (*expr_p
)));
8306 gimplify_seq_add_stmt (pre_p
,
8307 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p
),
8308 PREDICT_EXPR_OUTCOME (*expr_p
)));
8314 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p
))
8315 == current_function_decl
);
8316 gimplify_seq_add_stmt (pre_p
,
8317 gimple_build_label (LABEL_EXPR_LABEL (*expr_p
)));
8320 case CASE_LABEL_EXPR
:
8321 ret
= gimplify_case_label_expr (expr_p
, pre_p
);
8325 ret
= gimplify_return_expr (*expr_p
, pre_p
);
8329 /* Don't reduce this in place; let gimplify_init_constructor work its
8330 magic. Buf if we're just elaborating this for side effects, just
8331 gimplify any element that has side-effects. */
8332 if (fallback
== fb_none
)
8334 unsigned HOST_WIDE_INT ix
;
8336 tree temp
= NULL_TREE
;
8337 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p
), ix
, val
)
8338 if (TREE_SIDE_EFFECTS (val
))
8339 append_to_statement_list (val
, &temp
);
8342 ret
= temp
? GS_OK
: GS_ALL_DONE
;
8344 /* C99 code may assign to an array in a constructed
8345 structure or union, and this has undefined behavior only
8346 on execution, so create a temporary if an lvalue is
8348 else if (fallback
== fb_lvalue
)
8350 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
);
8351 mark_addressable (*expr_p
);
8358 /* The following are special cases that are not handled by the
8359 original GIMPLE grammar. */
8361 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
8364 ret
= gimplify_save_expr (expr_p
, pre_p
, post_p
);
8368 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
8369 post_p
, is_gimple_lvalue
, fb_either
);
8370 recalculate_side_effects (*expr_p
);
8373 case TARGET_MEM_REF
:
8375 enum gimplify_status r0
= GS_ALL_DONE
, r1
= GS_ALL_DONE
;
8377 if (TMR_BASE (*expr_p
))
8378 r0
= gimplify_expr (&TMR_BASE (*expr_p
), pre_p
,
8379 post_p
, is_gimple_mem_ref_addr
, fb_either
);
8380 if (TMR_INDEX (*expr_p
))
8381 r1
= gimplify_expr (&TMR_INDEX (*expr_p
), pre_p
,
8382 post_p
, is_gimple_val
, fb_rvalue
);
8383 if (TMR_INDEX2 (*expr_p
))
8384 r1
= gimplify_expr (&TMR_INDEX2 (*expr_p
), pre_p
,
8385 post_p
, is_gimple_val
, fb_rvalue
);
8386 /* TMR_STEP and TMR_OFFSET are always integer constants. */
8391 case NON_LVALUE_EXPR
:
8392 /* This should have been stripped above. */
8396 ret
= gimplify_asm_expr (expr_p
, pre_p
, post_p
);
8399 case TRY_FINALLY_EXPR
:
8400 case TRY_CATCH_EXPR
:
8402 gimple_seq eval
, cleanup
;
8405 /* Calls to destructors are generated automatically in FINALLY/CATCH
8406 block. They should have location as UNKNOWN_LOCATION. However,
8407 gimplify_call_expr will reset these call stmts to input_location
8408 if it finds stmt's location is unknown. To prevent resetting for
8409 destructors, we set the input_location to unknown.
8410 Note that this only affects the destructor calls in FINALLY/CATCH
8411 block, and will automatically reset to its original value by the
8412 end of gimplify_expr. */
8413 input_location
= UNKNOWN_LOCATION
;
8414 eval
= cleanup
= NULL
;
8415 gimplify_and_add (TREE_OPERAND (*expr_p
, 0), &eval
);
8416 gimplify_and_add (TREE_OPERAND (*expr_p
, 1), &cleanup
);
8417 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
8418 if (gimple_seq_empty_p (cleanup
))
8420 gimple_seq_add_seq (pre_p
, eval
);
8424 try_
= gimple_build_try (eval
, cleanup
,
8425 TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
8426 ? GIMPLE_TRY_FINALLY
8427 : GIMPLE_TRY_CATCH
);
8428 if (EXPR_HAS_LOCATION (save_expr
))
8429 gimple_set_location (try_
, EXPR_LOCATION (save_expr
));
8430 else if (LOCATION_LOCUS (saved_location
) != UNKNOWN_LOCATION
)
8431 gimple_set_location (try_
, saved_location
);
8432 if (TREE_CODE (*expr_p
) == TRY_CATCH_EXPR
)
8433 gimple_try_set_catch_is_cleanup (try_
,
8434 TRY_CATCH_IS_CLEANUP (*expr_p
));
8435 gimplify_seq_add_stmt (pre_p
, try_
);
8440 case CLEANUP_POINT_EXPR
:
8441 ret
= gimplify_cleanup_point_expr (expr_p
, pre_p
);
8445 ret
= gimplify_target_expr (expr_p
, pre_p
, post_p
);
8451 gimple_seq handler
= NULL
;
8452 gimplify_and_add (CATCH_BODY (*expr_p
), &handler
);
8453 c
= gimple_build_catch (CATCH_TYPES (*expr_p
), handler
);
8454 gimplify_seq_add_stmt (pre_p
, c
);
8459 case EH_FILTER_EXPR
:
8462 gimple_seq failure
= NULL
;
8464 gimplify_and_add (EH_FILTER_FAILURE (*expr_p
), &failure
);
8465 ehf
= gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p
), failure
);
8466 gimple_set_no_warning (ehf
, TREE_NO_WARNING (*expr_p
));
8467 gimplify_seq_add_stmt (pre_p
, ehf
);
8474 enum gimplify_status r0
, r1
;
8475 r0
= gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p
), pre_p
,
8476 post_p
, is_gimple_val
, fb_rvalue
);
8477 r1
= gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p
), pre_p
,
8478 post_p
, is_gimple_val
, fb_rvalue
);
8479 TREE_SIDE_EFFECTS (*expr_p
) = 0;
8485 /* We get here when taking the address of a label. We mark
8486 the label as "forced"; meaning it can never be removed and
8487 it is a potential target for any computed goto. */
8488 FORCED_LABEL (*expr_p
) = 1;
8492 case STATEMENT_LIST
:
8493 ret
= gimplify_statement_list (expr_p
, pre_p
);
8496 case WITH_SIZE_EXPR
:
8498 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
8499 post_p
== &internal_post
? NULL
: post_p
,
8500 gimple_test_f
, fallback
);
8501 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
8502 is_gimple_val
, fb_rvalue
);
8509 ret
= gimplify_var_or_parm_decl (expr_p
);
8513 /* When within an OMP context, notice uses of variables. */
8514 if (gimplify_omp_ctxp
)
8515 omp_notice_variable (gimplify_omp_ctxp
, *expr_p
, true);
8520 /* Allow callbacks into the gimplifier during optimization. */
8525 gimplify_omp_parallel (expr_p
, pre_p
);
8530 gimplify_omp_task (expr_p
, pre_p
);
8538 case OMP_DISTRIBUTE
:
8540 ret
= gimplify_omp_for (expr_p
, pre_p
);
8544 gimplify_oacc_cache (expr_p
, pre_p
);
8548 case OACC_HOST_DATA
:
8550 sorry ("directive not yet implemented");
8555 if (OACC_KERNELS_COMBINED (*expr_p
))
8556 sorry ("directive not yet implemented");
8558 gimplify_omp_workshare (expr_p
, pre_p
);
8563 if (OACC_PARALLEL_COMBINED (*expr_p
))
8564 sorry ("directive not yet implemented");
8566 gimplify_omp_workshare (expr_p
, pre_p
);
8574 case OMP_TARGET_DATA
:
8576 gimplify_omp_workshare (expr_p
, pre_p
);
8580 case OACC_ENTER_DATA
:
8581 case OACC_EXIT_DATA
:
8583 case OMP_TARGET_UPDATE
:
8584 gimplify_omp_target_update (expr_p
, pre_p
);
8594 gimple_seq body
= NULL
;
8597 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
8598 switch (TREE_CODE (*expr_p
))
8601 g
= gimple_build_omp_section (body
);
8604 g
= gimple_build_omp_master (body
);
8608 gimple_seq cleanup
= NULL
;
8610 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END
);
8611 g
= gimple_build_call (fn
, 0);
8612 gimple_seq_add_stmt (&cleanup
, g
);
8613 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
8615 gimple_seq_add_stmt (&body
, g
);
8616 g
= gimple_build_omp_taskgroup (body
);
8620 g
= gimple_build_omp_ordered (body
);
8623 g
= gimple_build_omp_critical (body
,
8624 OMP_CRITICAL_NAME (*expr_p
));
8629 gimplify_seq_add_stmt (pre_p
, g
);
8635 case OMP_ATOMIC_READ
:
8636 case OMP_ATOMIC_CAPTURE_OLD
:
8637 case OMP_ATOMIC_CAPTURE_NEW
:
8638 ret
= gimplify_omp_atomic (expr_p
, pre_p
);
8641 case TRANSACTION_EXPR
:
8642 ret
= gimplify_transaction (expr_p
, pre_p
);
8645 case TRUTH_AND_EXPR
:
8647 case TRUTH_XOR_EXPR
:
8649 tree orig_type
= TREE_TYPE (*expr_p
);
8650 tree new_type
, xop0
, xop1
;
8651 *expr_p
= gimple_boolify (*expr_p
);
8652 new_type
= TREE_TYPE (*expr_p
);
8653 if (!useless_type_conversion_p (orig_type
, new_type
))
8655 *expr_p
= fold_convert_loc (input_location
, orig_type
, *expr_p
);
8660 /* Boolified binary truth expressions are semantically equivalent
8661 to bitwise binary expressions. Canonicalize them to the
8663 switch (TREE_CODE (*expr_p
))
8665 case TRUTH_AND_EXPR
:
8666 TREE_SET_CODE (*expr_p
, BIT_AND_EXPR
);
8669 TREE_SET_CODE (*expr_p
, BIT_IOR_EXPR
);
8671 case TRUTH_XOR_EXPR
:
8672 TREE_SET_CODE (*expr_p
, BIT_XOR_EXPR
);
8677 /* Now make sure that operands have compatible type to
8678 expression's new_type. */
8679 xop0
= TREE_OPERAND (*expr_p
, 0);
8680 xop1
= TREE_OPERAND (*expr_p
, 1);
8681 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop0
)))
8682 TREE_OPERAND (*expr_p
, 0) = fold_convert_loc (input_location
,
8685 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop1
)))
8686 TREE_OPERAND (*expr_p
, 1) = fold_convert_loc (input_location
,
8689 /* Continue classified as tcc_binary. */
8696 /* Classified as tcc_expression. */
8699 case POINTER_PLUS_EXPR
:
8701 enum gimplify_status r0
, r1
;
8702 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
8703 post_p
, is_gimple_val
, fb_rvalue
);
8704 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
8705 post_p
, is_gimple_val
, fb_rvalue
);
8706 recalculate_side_effects (*expr_p
);
8711 case CILK_SYNC_STMT
:
8713 if (!fn_contains_cilk_spawn_p (cfun
))
8715 error_at (EXPR_LOCATION (*expr_p
),
8716 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
8721 gimplify_cilk_sync (expr_p
, pre_p
);
8728 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p
)))
8730 case tcc_comparison
:
8731 /* Handle comparison of objects of non scalar mode aggregates
8732 with a call to memcmp. It would be nice to only have to do
8733 this for variable-sized objects, but then we'd have to allow
8734 the same nest of reference nodes we allow for MODIFY_EXPR and
8737 Compare scalar mode aggregates as scalar mode values. Using
8738 memcmp for them would be very inefficient at best, and is
8739 plain wrong if bitfields are involved. */
8741 tree type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 1));
8743 /* Vector comparisons need no boolification. */
8744 if (TREE_CODE (type
) == VECTOR_TYPE
)
8746 else if (!AGGREGATE_TYPE_P (type
))
8748 tree org_type
= TREE_TYPE (*expr_p
);
8749 *expr_p
= gimple_boolify (*expr_p
);
8750 if (!useless_type_conversion_p (org_type
,
8751 TREE_TYPE (*expr_p
)))
8753 *expr_p
= fold_convert_loc (input_location
,
8760 else if (TYPE_MODE (type
) != BLKmode
)
8761 ret
= gimplify_scalar_mode_aggregate_compare (expr_p
);
8763 ret
= gimplify_variable_sized_compare (expr_p
);
8768 /* If *EXPR_P does not need to be special-cased, handle it
8769 according to its class. */
8771 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
8772 post_p
, is_gimple_val
, fb_rvalue
);
8778 enum gimplify_status r0
, r1
;
8780 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
8781 post_p
, is_gimple_val
, fb_rvalue
);
8782 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
8783 post_p
, is_gimple_val
, fb_rvalue
);
8791 enum gimplify_status r0
, r1
, r2
;
8793 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
8794 post_p
, is_gimple_val
, fb_rvalue
);
8795 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
8796 post_p
, is_gimple_val
, fb_rvalue
);
8797 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
8798 post_p
, is_gimple_val
, fb_rvalue
);
8800 ret
= MIN (MIN (r0
, r1
), r2
);
8804 case tcc_declaration
:
8807 goto dont_recalculate
;
8813 recalculate_side_effects (*expr_p
);
8819 gcc_assert (*expr_p
|| ret
!= GS_OK
);
8821 while (ret
== GS_OK
);
8823 /* If we encountered an error_mark somewhere nested inside, either
8824 stub out the statement or propagate the error back out. */
8825 if (ret
== GS_ERROR
)
8832 /* This was only valid as a return value from the langhook, which
8833 we handled. Make sure it doesn't escape from any other context. */
8834 gcc_assert (ret
!= GS_UNHANDLED
);
8836 if (fallback
== fb_none
&& *expr_p
&& !is_gimple_stmt (*expr_p
))
8838 /* We aren't looking for a value, and we don't have a valid
8839 statement. If it doesn't have side-effects, throw it away. */
8840 if (!TREE_SIDE_EFFECTS (*expr_p
))
8842 else if (!TREE_THIS_VOLATILE (*expr_p
))
8844 /* This is probably a _REF that contains something nested that
8845 has side effects. Recurse through the operands to find it. */
8846 enum tree_code code
= TREE_CODE (*expr_p
);
8853 case VIEW_CONVERT_EXPR
:
8854 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
8855 gimple_test_f
, fallback
);
8859 case ARRAY_RANGE_REF
:
8860 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
8861 gimple_test_f
, fallback
);
8862 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
8863 gimple_test_f
, fallback
);
8867 /* Anything else with side-effects must be converted to
8868 a valid statement before we get here. */
8874 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p
))
8875 && TYPE_MODE (TREE_TYPE (*expr_p
)) != BLKmode
)
8877 /* Historically, the compiler has treated a bare reference
8878 to a non-BLKmode volatile lvalue as forcing a load. */
8879 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p
));
8881 /* Normally, we do not want to create a temporary for a
8882 TREE_ADDRESSABLE type because such a type should not be
8883 copied by bitwise-assignment. However, we make an
8884 exception here, as all we are doing here is ensuring that
8885 we read the bytes that make up the type. We use
8886 create_tmp_var_raw because create_tmp_var will abort when
8887 given a TREE_ADDRESSABLE type. */
8888 tree tmp
= create_tmp_var_raw (type
, "vol");
8889 gimple_add_tmp_var (tmp
);
8890 gimplify_assign (tmp
, *expr_p
, pre_p
);
8894 /* We can't do anything useful with a volatile reference to
8895 an incomplete type, so just throw it away. Likewise for
8896 a BLKmode type, since any implicit inner load should
8897 already have been turned into an explicit one by the
8898 gimplification process. */
8902 /* If we are gimplifying at the statement level, we're done. Tack
8903 everything together and return. */
8904 if (fallback
== fb_none
|| is_statement
)
8906 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
8907 it out for GC to reclaim it. */
8908 *expr_p
= NULL_TREE
;
8910 if (!gimple_seq_empty_p (internal_pre
)
8911 || !gimple_seq_empty_p (internal_post
))
8913 gimplify_seq_add_seq (&internal_pre
, internal_post
);
8914 gimplify_seq_add_seq (pre_p
, internal_pre
);
8917 /* The result of gimplifying *EXPR_P is going to be the last few
8918 statements in *PRE_P and *POST_P. Add location information
8919 to all the statements that were added by the gimplification
8921 if (!gimple_seq_empty_p (*pre_p
))
8922 annotate_all_with_location_after (*pre_p
, pre_last_gsi
, input_location
);
8924 if (!gimple_seq_empty_p (*post_p
))
8925 annotate_all_with_location_after (*post_p
, post_last_gsi
,
8931 #ifdef ENABLE_GIMPLE_CHECKING
8934 enum tree_code code
= TREE_CODE (*expr_p
);
8935 /* These expressions should already be in gimple IR form. */
8936 gcc_assert (code
!= MODIFY_EXPR
8938 && code
!= BIND_EXPR
8939 && code
!= CATCH_EXPR
8940 && (code
!= COND_EXPR
|| gimplify_ctxp
->allow_rhs_cond_expr
)
8941 && code
!= EH_FILTER_EXPR
8942 && code
!= GOTO_EXPR
8943 && code
!= LABEL_EXPR
8944 && code
!= LOOP_EXPR
8945 && code
!= SWITCH_EXPR
8946 && code
!= TRY_FINALLY_EXPR
8947 && code
!= OACC_PARALLEL
8948 && code
!= OACC_KERNELS
8949 && code
!= OACC_DATA
8950 && code
!= OACC_HOST_DATA
8951 && code
!= OACC_DECLARE
8952 && code
!= OACC_UPDATE
8953 && code
!= OACC_ENTER_DATA
8954 && code
!= OACC_EXIT_DATA
8955 && code
!= OACC_CACHE
8956 && code
!= OMP_CRITICAL
8958 && code
!= OACC_LOOP
8959 && code
!= OMP_MASTER
8960 && code
!= OMP_TASKGROUP
8961 && code
!= OMP_ORDERED
8962 && code
!= OMP_PARALLEL
8963 && code
!= OMP_SECTIONS
8964 && code
!= OMP_SECTION
8965 && code
!= OMP_SINGLE
);
8969 /* Otherwise we're gimplifying a subexpression, so the resulting
8970 value is interesting. If it's a valid operand that matches
8971 GIMPLE_TEST_F, we're done. Unless we are handling some
8972 post-effects internally; if that's the case, we need to copy into
8973 a temporary before adding the post-effects to POST_P. */
8974 if (gimple_seq_empty_p (internal_post
) && (*gimple_test_f
) (*expr_p
))
8977 /* Otherwise, we need to create a new temporary for the gimplified
8980 /* We can't return an lvalue if we have an internal postqueue. The
8981 object the lvalue refers to would (probably) be modified by the
8982 postqueue; we need to copy the value out first, which means an
8984 if ((fallback
& fb_lvalue
)
8985 && gimple_seq_empty_p (internal_post
)
8986 && is_gimple_addressable (*expr_p
))
8988 /* An lvalue will do. Take the address of the expression, store it
8989 in a temporary, and replace the expression with an INDIRECT_REF of
8991 tmp
= build_fold_addr_expr_loc (input_location
, *expr_p
);
8992 gimplify_expr (&tmp
, pre_p
, post_p
, is_gimple_reg
, fb_rvalue
);
8993 *expr_p
= build_simple_mem_ref (tmp
);
8995 else if ((fallback
& fb_rvalue
) && is_gimple_reg_rhs_or_call (*expr_p
))
8997 /* An rvalue will do. Assign the gimplified expression into a
8998 new temporary TMP and replace the original expression with
8999 TMP. First, make sure that the expression has a type so that
9000 it can be assigned into a temporary. */
9001 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p
)));
9002 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
9006 #ifdef ENABLE_GIMPLE_CHECKING
9007 if (!(fallback
& fb_mayfail
))
9009 fprintf (stderr
, "gimplification failed:\n");
9010 print_generic_expr (stderr
, *expr_p
, 0);
9011 debug_tree (*expr_p
);
9012 internal_error ("gimplification failed");
9015 gcc_assert (fallback
& fb_mayfail
);
9017 /* If this is an asm statement, and the user asked for the
9018 impossible, don't die. Fail and let gimplify_asm_expr
9024 /* Make sure the temporary matches our predicate. */
9025 gcc_assert ((*gimple_test_f
) (*expr_p
));
9027 if (!gimple_seq_empty_p (internal_post
))
9029 annotate_all_with_location (internal_post
, input_location
);
9030 gimplify_seq_add_seq (pre_p
, internal_post
);
9034 input_location
= saved_location
;
9038 /* Look through TYPE for variable-sized objects and gimplify each such
9039 size that we find. Add to LIST_P any statements generated. */
9042 gimplify_type_sizes (tree type
, gimple_seq
*list_p
)
9046 if (type
== NULL
|| type
== error_mark_node
)
9049 /* We first do the main variant, then copy into any other variants. */
9050 type
= TYPE_MAIN_VARIANT (type
);
9052 /* Avoid infinite recursion. */
9053 if (TYPE_SIZES_GIMPLIFIED (type
))
9056 TYPE_SIZES_GIMPLIFIED (type
) = 1;
9058 switch (TREE_CODE (type
))
9064 case FIXED_POINT_TYPE
:
9065 gimplify_one_sizepos (&TYPE_MIN_VALUE (type
), list_p
);
9066 gimplify_one_sizepos (&TYPE_MAX_VALUE (type
), list_p
);
9068 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
9070 TYPE_MIN_VALUE (t
) = TYPE_MIN_VALUE (type
);
9071 TYPE_MAX_VALUE (t
) = TYPE_MAX_VALUE (type
);
9076 /* These types may not have declarations, so handle them here. */
9077 gimplify_type_sizes (TREE_TYPE (type
), list_p
);
9078 gimplify_type_sizes (TYPE_DOMAIN (type
), list_p
);
9079 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
9080 with assigned stack slots, for -O1+ -g they should be tracked
9082 if (!(TYPE_NAME (type
)
9083 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
9084 && DECL_IGNORED_P (TYPE_NAME (type
)))
9085 && TYPE_DOMAIN (type
)
9086 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type
)))
9088 t
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
9089 if (t
&& TREE_CODE (t
) == VAR_DECL
&& DECL_ARTIFICIAL (t
))
9090 DECL_IGNORED_P (t
) = 0;
9091 t
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
9092 if (t
&& TREE_CODE (t
) == VAR_DECL
&& DECL_ARTIFICIAL (t
))
9093 DECL_IGNORED_P (t
) = 0;
9099 case QUAL_UNION_TYPE
:
9100 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
9101 if (TREE_CODE (field
) == FIELD_DECL
)
9103 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field
), list_p
);
9104 gimplify_one_sizepos (&DECL_SIZE (field
), list_p
);
9105 gimplify_one_sizepos (&DECL_SIZE_UNIT (field
), list_p
);
9106 gimplify_type_sizes (TREE_TYPE (field
), list_p
);
9111 case REFERENCE_TYPE
:
9112 /* We used to recurse on the pointed-to type here, which turned out to
9113 be incorrect because its definition might refer to variables not
9114 yet initialized at this point if a forward declaration is involved.
9116 It was actually useful for anonymous pointed-to types to ensure
9117 that the sizes evaluation dominates every possible later use of the
9118 values. Restricting to such types here would be safe since there
9119 is no possible forward declaration around, but would introduce an
9120 undesirable middle-end semantic to anonymity. We then defer to
9121 front-ends the responsibility of ensuring that the sizes are
9122 evaluated both early and late enough, e.g. by attaching artificial
9123 type declarations to the tree. */
9130 gimplify_one_sizepos (&TYPE_SIZE (type
), list_p
);
9131 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type
), list_p
);
9133 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
9135 TYPE_SIZE (t
) = TYPE_SIZE (type
);
9136 TYPE_SIZE_UNIT (t
) = TYPE_SIZE_UNIT (type
);
9137 TYPE_SIZES_GIMPLIFIED (t
) = 1;
9141 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
9142 a size or position, has had all of its SAVE_EXPRs evaluated.
9143 We add any required statements to *STMT_P. */
9146 gimplify_one_sizepos (tree
*expr_p
, gimple_seq
*stmt_p
)
9148 tree expr
= *expr_p
;
9150 /* We don't do anything if the value isn't there, is constant, or contains
9151 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
9152 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
9153 will want to replace it with a new variable, but that will cause problems
9154 if this type is from outside the function. It's OK to have that here. */
9155 if (is_gimple_sizepos (expr
))
9158 *expr_p
= unshare_expr (expr
);
9160 gimplify_expr (expr_p
, stmt_p
, NULL
, is_gimple_val
, fb_rvalue
);
9163 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
9164 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
9165 is true, also gimplify the parameters. */
9168 gimplify_body (tree fndecl
, bool do_parms
)
9170 location_t saved_location
= input_location
;
9171 gimple_seq parm_stmts
, seq
;
9174 struct cgraph_node
*cgn
;
9176 timevar_push (TV_TREE_GIMPLIFY
);
9178 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
9180 default_rtl_profile ();
9182 gcc_assert (gimplify_ctxp
== NULL
);
9183 push_gimplify_context ();
9185 if (flag_openacc
|| flag_openmp
)
9187 gcc_assert (gimplify_omp_ctxp
== NULL
);
9188 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl
)))
9189 gimplify_omp_ctxp
= new_omp_context (ORT_TARGET
);
9192 /* Unshare most shared trees in the body and in that of any nested functions.
9193 It would seem we don't have to do this for nested functions because
9194 they are supposed to be output and then the outer function gimplified
9195 first, but the g++ front end doesn't always do it that way. */
9196 unshare_body (fndecl
);
9197 unvisit_body (fndecl
);
9199 cgn
= cgraph_node::get (fndecl
);
9200 if (cgn
&& cgn
->origin
)
9201 nonlocal_vlas
= new hash_set
<tree
>;
9203 /* Make sure input_location isn't set to something weird. */
9204 input_location
= DECL_SOURCE_LOCATION (fndecl
);
9206 /* Resolve callee-copies. This has to be done before processing
9207 the body so that DECL_VALUE_EXPR gets processed correctly. */
9208 parm_stmts
= do_parms
? gimplify_parameters () : NULL
;
9210 /* Gimplify the function's body. */
9212 gimplify_stmt (&DECL_SAVED_TREE (fndecl
), &seq
);
9213 outer_stmt
= gimple_seq_first_stmt (seq
);
9216 outer_stmt
= gimple_build_nop ();
9217 gimplify_seq_add_stmt (&seq
, outer_stmt
);
9220 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
9221 not the case, wrap everything in a GIMPLE_BIND to make it so. */
9222 if (gimple_code (outer_stmt
) == GIMPLE_BIND
9223 && gimple_seq_first (seq
) == gimple_seq_last (seq
))
9224 outer_bind
= as_a
<gbind
*> (outer_stmt
);
9226 outer_bind
= gimple_build_bind (NULL_TREE
, seq
, NULL
);
9228 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
9230 /* If we had callee-copies statements, insert them at the beginning
9231 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
9232 if (!gimple_seq_empty_p (parm_stmts
))
9236 gimplify_seq_add_seq (&parm_stmts
, gimple_bind_body (outer_bind
));
9237 gimple_bind_set_body (outer_bind
, parm_stmts
);
9239 for (parm
= DECL_ARGUMENTS (current_function_decl
);
9240 parm
; parm
= DECL_CHAIN (parm
))
9241 if (DECL_HAS_VALUE_EXPR_P (parm
))
9243 DECL_HAS_VALUE_EXPR_P (parm
) = 0;
9244 DECL_IGNORED_P (parm
) = 0;
9250 if (nonlocal_vla_vars
)
9252 /* tree-nested.c may later on call declare_vars (..., true);
9253 which relies on BLOCK_VARS chain to be the tail of the
9254 gimple_bind_vars chain. Ensure we don't violate that
9256 if (gimple_bind_block (outer_bind
)
9257 == DECL_INITIAL (current_function_decl
))
9258 declare_vars (nonlocal_vla_vars
, outer_bind
, true);
9260 BLOCK_VARS (DECL_INITIAL (current_function_decl
))
9261 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl
)),
9263 nonlocal_vla_vars
= NULL_TREE
;
9265 delete nonlocal_vlas
;
9266 nonlocal_vlas
= NULL
;
9269 if ((flag_openacc
|| flag_openmp
|| flag_openmp_simd
)
9270 && gimplify_omp_ctxp
)
9272 delete_omp_context (gimplify_omp_ctxp
);
9273 gimplify_omp_ctxp
= NULL
;
9276 pop_gimplify_context (outer_bind
);
9277 gcc_assert (gimplify_ctxp
== NULL
);
9279 #ifdef ENABLE_CHECKING
9281 verify_gimple_in_seq (gimple_bind_body (outer_bind
));
9284 timevar_pop (TV_TREE_GIMPLIFY
);
9285 input_location
= saved_location
;
9290 typedef char *char_p
; /* For DEF_VEC_P. */
9292 /* Return whether we should exclude FNDECL from instrumentation. */
9295 flag_instrument_functions_exclude_p (tree fndecl
)
9299 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_functions
;
9300 if (v
&& v
->length () > 0)
9306 name
= lang_hooks
.decl_printable_name (fndecl
, 0);
9307 FOR_EACH_VEC_ELT (*v
, i
, s
)
9308 if (strstr (name
, s
) != NULL
)
9312 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_files
;
9313 if (v
&& v
->length () > 0)
9319 name
= DECL_SOURCE_FILE (fndecl
);
9320 FOR_EACH_VEC_ELT (*v
, i
, s
)
9321 if (strstr (name
, s
) != NULL
)
9328 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
9329 node for the function we want to gimplify.
9331 Return the sequence of GIMPLE statements corresponding to the body
9335 gimplify_function_tree (tree fndecl
)
9341 gcc_assert (!gimple_body (fndecl
));
9343 if (DECL_STRUCT_FUNCTION (fndecl
))
9344 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
9346 push_struct_function (fndecl
);
9348 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
9350 cfun
->curr_properties
|= PROP_gimple_lva
;
9352 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
9354 /* Preliminarily mark non-addressed complex variables as eligible
9355 for promotion to gimple registers. We'll transform their uses
9357 if ((TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
9358 || TREE_CODE (TREE_TYPE (parm
)) == VECTOR_TYPE
)
9359 && !TREE_THIS_VOLATILE (parm
)
9360 && !needs_to_live_in_memory (parm
))
9361 DECL_GIMPLE_REG_P (parm
) = 1;
9364 ret
= DECL_RESULT (fndecl
);
9365 if ((TREE_CODE (TREE_TYPE (ret
)) == COMPLEX_TYPE
9366 || TREE_CODE (TREE_TYPE (ret
)) == VECTOR_TYPE
)
9367 && !needs_to_live_in_memory (ret
))
9368 DECL_GIMPLE_REG_P (ret
) = 1;
9370 bind
= gimplify_body (fndecl
, true);
9372 /* The tree body of the function is no longer needed, replace it
9373 with the new GIMPLE body. */
9375 gimple_seq_add_stmt (&seq
, bind
);
9376 gimple_set_body (fndecl
, seq
);
9378 /* If we're instrumenting function entry/exit, then prepend the call to
9379 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
9380 catch the exit hook. */
9381 /* ??? Add some way to ignore exceptions for this TFE. */
9382 if (flag_instrument_function_entry_exit
9383 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
)
9384 && !flag_instrument_functions_exclude_p (fndecl
))
9389 gimple_seq cleanup
= NULL
, body
= NULL
;
9393 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
9394 call
= gimple_build_call (x
, 1, integer_zero_node
);
9395 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
9396 gimple_call_set_lhs (call
, tmp_var
);
9397 gimplify_seq_add_stmt (&cleanup
, call
);
9398 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT
);
9399 call
= gimple_build_call (x
, 2,
9400 build_fold_addr_expr (current_function_decl
),
9402 gimplify_seq_add_stmt (&cleanup
, call
);
9403 tf
= gimple_build_try (seq
, cleanup
, GIMPLE_TRY_FINALLY
);
9405 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
9406 call
= gimple_build_call (x
, 1, integer_zero_node
);
9407 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
9408 gimple_call_set_lhs (call
, tmp_var
);
9409 gimplify_seq_add_stmt (&body
, call
);
9410 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER
);
9411 call
= gimple_build_call (x
, 2,
9412 build_fold_addr_expr (current_function_decl
),
9414 gimplify_seq_add_stmt (&body
, call
);
9415 gimplify_seq_add_stmt (&body
, tf
);
9416 new_bind
= gimple_build_bind (NULL
, body
, gimple_bind_block (bind
));
9417 /* Clear the block for BIND, since it is no longer directly inside
9418 the function, but within a try block. */
9419 gimple_bind_set_block (bind
, NULL
);
9421 /* Replace the current function body with the body
9422 wrapped in the try/finally TF. */
9424 gimple_seq_add_stmt (&seq
, new_bind
);
9425 gimple_set_body (fndecl
, seq
);
9429 if ((flag_sanitize
& SANITIZE_THREAD
) != 0
9430 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl
)))
9432 gcall
*call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
9433 gimple tf
= gimple_build_try (seq
, call
, GIMPLE_TRY_FINALLY
);
9434 gbind
*new_bind
= gimple_build_bind (NULL
, tf
, gimple_bind_block (bind
));
9435 /* Clear the block for BIND, since it is no longer directly inside
9436 the function, but within a try block. */
9437 gimple_bind_set_block (bind
, NULL
);
9438 /* Replace the current function body with the body
9439 wrapped in the try/finally TF. */
9441 gimple_seq_add_stmt (&seq
, new_bind
);
9442 gimple_set_body (fndecl
, seq
);
9445 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
9446 cfun
->curr_properties
|= PROP_gimple_any
;
9451 /* Return a dummy expression of type TYPE in order to keep going after an
9455 dummy_object (tree type
)
9457 tree t
= build_int_cst (build_pointer_type (type
), 0);
9458 return build2 (MEM_REF
, type
, t
, t
);
9461 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
9462 builtin function, but a very special sort of operator. */
9464 enum gimplify_status
9465 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
,
9466 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
9468 tree promoted_type
, have_va_type
;
9469 tree valist
= TREE_OPERAND (*expr_p
, 0);
9470 tree type
= TREE_TYPE (*expr_p
);
9472 location_t loc
= EXPR_LOCATION (*expr_p
);
9474 /* Verify that valist is of the proper type. */
9475 have_va_type
= TREE_TYPE (valist
);
9476 if (have_va_type
== error_mark_node
)
9478 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
9480 if (have_va_type
== NULL_TREE
)
9482 error_at (loc
, "first argument to %<va_arg%> not of type %<va_list%>");
9486 /* Generate a diagnostic for requesting data of a type that cannot
9487 be passed through `...' due to type promotion at the call site. */
9488 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
9491 static bool gave_help
;
9494 /* Unfortunately, this is merely undefined, rather than a constraint
9495 violation, so we cannot make this an error. If this call is never
9496 executed, the program is still strictly conforming. */
9497 warned
= warning_at (loc
, 0,
9498 "%qT is promoted to %qT when passed through %<...%>",
9499 type
, promoted_type
);
9500 if (!gave_help
&& warned
)
9503 inform (loc
, "(so you should pass %qT not %qT to %<va_arg%>)",
9504 promoted_type
, type
);
9507 /* We can, however, treat "undefined" any way we please.
9508 Call abort to encourage the user to fix the program. */
9510 inform (loc
, "if this code is reached, the program will abort");
9511 /* Before the abort, allow the evaluation of the va_list
9512 expression to exit or longjmp. */
9513 gimplify_and_add (valist
, pre_p
);
9514 t
= build_call_expr_loc (loc
,
9515 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
9516 gimplify_and_add (t
, pre_p
);
9518 /* This is dead code, but go ahead and finish so that the
9519 mode of the result comes out right. */
9520 *expr_p
= dummy_object (type
);
9524 tag
= build_int_cst (build_pointer_type (type
), 0);
9525 *expr_p
= build_call_expr_internal_loc (loc
, IFN_VA_ARG
, type
, 2, valist
, tag
);
9527 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
9528 needs to be expanded. */
9529 cfun
->curr_properties
&= ~PROP_gimple_lva
;
9534 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
9536 DST/SRC are the destination and source respectively. You can pass
9537 ungimplified trees in DST or SRC, in which case they will be
9538 converted to a gimple operand if necessary.
9540 This function returns the newly created GIMPLE_ASSIGN tuple. */
9543 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
9545 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
9546 gimplify_and_add (t
, seq_p
);
9548 return gimple_seq_last_stmt (*seq_p
);
9552 gimplify_hasher::hash (const elt_t
*p
)
9555 return iterative_hash_expr (t
, 0);
9559 gimplify_hasher::equal (const elt_t
*p1
, const elt_t
*p2
)
9563 enum tree_code code
= TREE_CODE (t1
);
9565 if (TREE_CODE (t2
) != code
9566 || TREE_TYPE (t1
) != TREE_TYPE (t2
))
9569 if (!operand_equal_p (t1
, t2
, 0))
9572 #ifdef ENABLE_CHECKING
9573 /* Only allow them to compare equal if they also hash equal; otherwise
9574 results are nondeterminate, and we fail bootstrap comparison. */
9575 gcc_assert (hash (p1
) == hash (p2
));