[AArch64] [3/4 Fix vtbx1]Implement bsl intrinsics using builtins
[official-gcc.git] / gcc / gimplify.c
blobb252bef6508c0216fa1380034ed1f106ac843b02
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2013 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tree.h"
27 #include "expr.h"
28 #include "pointer-set.h"
29 #include "hash-table.h"
30 #include "basic-block.h"
31 #include "tree-ssa-alias.h"
32 #include "internal-fn.h"
33 #include "gimple-fold.h"
34 #include "tree-eh.h"
35 #include "gimple-expr.h"
36 #include "is-a.h"
37 #include "gimple.h"
38 #include "gimplify.h"
39 #include "gimple-iterator.h"
40 #include "stringpool.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stor-layout.h"
44 #include "stmt.h"
45 #include "print-tree.h"
46 #include "tree-iterator.h"
47 #include "tree-inline.h"
48 #include "tree-pretty-print.h"
49 #include "langhooks.h"
50 #include "bitmap.h"
51 #include "gimple-ssa.h"
52 #include "cgraph.h"
53 #include "tree-cfg.h"
54 #include "tree-ssanames.h"
55 #include "tree-ssa.h"
56 #include "diagnostic-core.h"
57 #include "target.h"
58 #include "splay-tree.h"
59 #include "omp-low.h"
60 #include "gimple-low.h"
61 #include "cilk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
66 enum gimplify_omp_var_data
68 GOVD_SEEN = 1,
69 GOVD_EXPLICIT = 2,
70 GOVD_SHARED = 4,
71 GOVD_PRIVATE = 8,
72 GOVD_FIRSTPRIVATE = 16,
73 GOVD_LASTPRIVATE = 32,
74 GOVD_REDUCTION = 64,
75 GOVD_LOCAL = 128,
76 GOVD_MAP = 256,
77 GOVD_DEBUG_PRIVATE = 512,
78 GOVD_PRIVATE_OUTER_REF = 1024,
79 GOVD_LINEAR = 2048,
80 GOVD_ALIGNED = 4096,
81 GOVD_MAP_TO_ONLY = 8192,
82 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
83 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
84 | GOVD_LOCAL)
88 enum omp_region_type
90 ORT_WORKSHARE = 0,
91 ORT_SIMD = 1,
92 ORT_PARALLEL = 2,
93 ORT_COMBINED_PARALLEL = 3,
94 ORT_TASK = 4,
95 ORT_UNTIED_TASK = 5,
96 ORT_TEAMS = 8,
97 ORT_TARGET_DATA = 16,
98 ORT_TARGET = 32
101 /* Gimplify hashtable helper. */
103 struct gimplify_hasher : typed_free_remove <elt_t>
105 typedef elt_t value_type;
106 typedef elt_t compare_type;
107 static inline hashval_t hash (const value_type *);
108 static inline bool equal (const value_type *, const compare_type *);
111 struct gimplify_ctx
113 struct gimplify_ctx *prev_context;
115 vec<gimple> bind_expr_stack;
116 tree temps;
117 gimple_seq conditional_cleanups;
118 tree exit_label;
119 tree return_temp;
121 vec<tree> case_labels;
122 /* The formal temporary table. Should this be persistent? */
123 hash_table <gimplify_hasher> temp_htab;
125 int conditions;
126 bool save_stack;
127 bool into_ssa;
128 bool allow_rhs_cond_expr;
129 bool in_cleanup_point_expr;
132 struct gimplify_omp_ctx
134 struct gimplify_omp_ctx *outer_context;
135 splay_tree variables;
136 struct pointer_set_t *privatized_types;
137 location_t location;
138 enum omp_clause_default_kind default_kind;
139 enum omp_region_type region_type;
140 bool combined_loop;
143 static struct gimplify_ctx *gimplify_ctxp;
144 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
146 /* Forward declaration. */
147 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
149 /* Shorter alias name for the above function for use in gimplify.c
150 only. */
152 static inline void
153 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
155 gimple_seq_add_stmt_without_update (seq_p, gs);
158 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
159 NULL, a new sequence is allocated. This function is
160 similar to gimple_seq_add_seq, but does not scan the operands.
161 During gimplification, we need to manipulate statement sequences
162 before the def/use vectors have been constructed. */
164 static void
165 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
167 gimple_stmt_iterator si;
169 if (src == NULL)
170 return;
172 si = gsi_last (*dst_p);
173 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
177 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
178 and popping gimplify contexts. */
180 static struct gimplify_ctx *ctx_pool = NULL;
182 /* Return a gimplify context struct from the pool. */
184 static inline struct gimplify_ctx *
185 ctx_alloc (void)
187 struct gimplify_ctx * c = ctx_pool;
189 if (c)
190 ctx_pool = c->prev_context;
191 else
192 c = XNEW (struct gimplify_ctx);
194 memset (c, '\0', sizeof (*c));
195 return c;
198 /* Put gimplify context C back into the pool. */
200 static inline void
201 ctx_free (struct gimplify_ctx *c)
203 c->prev_context = ctx_pool;
204 ctx_pool = c;
207 /* Free allocated ctx stack memory. */
209 void
210 free_gimplify_stack (void)
212 struct gimplify_ctx *c;
214 while ((c = ctx_pool))
216 ctx_pool = c->prev_context;
217 free (c);
222 /* Set up a context for the gimplifier. */
224 void
225 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
227 struct gimplify_ctx *c = ctx_alloc ();
229 c->prev_context = gimplify_ctxp;
230 gimplify_ctxp = c;
231 gimplify_ctxp->into_ssa = in_ssa;
232 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
235 /* Tear down a context for the gimplifier. If BODY is non-null, then
236 put the temporaries into the outer BIND_EXPR. Otherwise, put them
237 in the local_decls.
239 BODY is not a sequence, but the first tuple in a sequence. */
241 void
242 pop_gimplify_context (gimple body)
244 struct gimplify_ctx *c = gimplify_ctxp;
246 gcc_assert (c
247 && (!c->bind_expr_stack.exists ()
248 || c->bind_expr_stack.is_empty ()));
249 c->bind_expr_stack.release ();
250 gimplify_ctxp = c->prev_context;
252 if (body)
253 declare_vars (c->temps, body, false);
254 else
255 record_vars (c->temps);
257 if (c->temp_htab.is_created ())
258 c->temp_htab.dispose ();
259 ctx_free (c);
262 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
264 static void
265 gimple_push_bind_expr (gimple gimple_bind)
267 gimplify_ctxp->bind_expr_stack.reserve (8);
268 gimplify_ctxp->bind_expr_stack.safe_push (gimple_bind);
271 /* Pop the first element off the stack of bindings. */
273 static void
274 gimple_pop_bind_expr (void)
276 gimplify_ctxp->bind_expr_stack.pop ();
279 /* Return the first element of the stack of bindings. */
281 gimple
282 gimple_current_bind_expr (void)
284 return gimplify_ctxp->bind_expr_stack.last ();
287 /* Return the stack of bindings created during gimplification. */
289 vec<gimple>
290 gimple_bind_expr_stack (void)
292 return gimplify_ctxp->bind_expr_stack;
295 /* Return true iff there is a COND_EXPR between us and the innermost
296 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
298 static bool
299 gimple_conditional_context (void)
301 return gimplify_ctxp->conditions > 0;
304 /* Note that we've entered a COND_EXPR. */
306 static void
307 gimple_push_condition (void)
309 #ifdef ENABLE_GIMPLE_CHECKING
310 if (gimplify_ctxp->conditions == 0)
311 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
312 #endif
313 ++(gimplify_ctxp->conditions);
316 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
317 now, add any conditional cleanups we've seen to the prequeue. */
319 static void
320 gimple_pop_condition (gimple_seq *pre_p)
322 int conds = --(gimplify_ctxp->conditions);
324 gcc_assert (conds >= 0);
325 if (conds == 0)
327 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
328 gimplify_ctxp->conditional_cleanups = NULL;
332 /* A stable comparison routine for use with splay trees and DECLs. */
334 static int
335 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
337 tree a = (tree) xa;
338 tree b = (tree) xb;
340 return DECL_UID (a) - DECL_UID (b);
343 /* Create a new omp construct that deals with variable remapping. */
345 static struct gimplify_omp_ctx *
346 new_omp_context (enum omp_region_type region_type)
348 struct gimplify_omp_ctx *c;
350 c = XCNEW (struct gimplify_omp_ctx);
351 c->outer_context = gimplify_omp_ctxp;
352 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
353 c->privatized_types = pointer_set_create ();
354 c->location = input_location;
355 c->region_type = region_type;
356 if ((region_type & ORT_TASK) == 0)
357 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
358 else
359 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
361 return c;
364 /* Destroy an omp construct that deals with variable remapping. */
366 static void
367 delete_omp_context (struct gimplify_omp_ctx *c)
369 splay_tree_delete (c->variables);
370 pointer_set_destroy (c->privatized_types);
371 XDELETE (c);
374 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
375 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
377 /* Both gimplify the statement T and append it to *SEQ_P. This function
378 behaves exactly as gimplify_stmt, but you don't have to pass T as a
379 reference. */
381 void
382 gimplify_and_add (tree t, gimple_seq *seq_p)
384 gimplify_stmt (&t, seq_p);
387 /* Gimplify statement T into sequence *SEQ_P, and return the first
388 tuple in the sequence of generated tuples for this statement.
389 Return NULL if gimplifying T produced no tuples. */
391 static gimple
392 gimplify_and_return_first (tree t, gimple_seq *seq_p)
394 gimple_stmt_iterator last = gsi_last (*seq_p);
396 gimplify_and_add (t, seq_p);
398 if (!gsi_end_p (last))
400 gsi_next (&last);
401 return gsi_stmt (last);
403 else
404 return gimple_seq_first_stmt (*seq_p);
407 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
408 LHS, or for a call argument. */
410 static bool
411 is_gimple_mem_rhs (tree t)
413 /* If we're dealing with a renamable type, either source or dest must be
414 a renamed variable. */
415 if (is_gimple_reg_type (TREE_TYPE (t)))
416 return is_gimple_val (t);
417 else
418 return is_gimple_val (t) || is_gimple_lvalue (t);
421 /* Return true if T is a CALL_EXPR or an expression that can be
422 assigned to a temporary. Note that this predicate should only be
423 used during gimplification. See the rationale for this in
424 gimplify_modify_expr. */
426 static bool
427 is_gimple_reg_rhs_or_call (tree t)
429 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
430 || TREE_CODE (t) == CALL_EXPR);
433 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
434 this predicate should only be used during gimplification. See the
435 rationale for this in gimplify_modify_expr. */
437 static bool
438 is_gimple_mem_rhs_or_call (tree t)
440 /* If we're dealing with a renamable type, either source or dest must be
441 a renamed variable. */
442 if (is_gimple_reg_type (TREE_TYPE (t)))
443 return is_gimple_val (t);
444 else
445 return (is_gimple_val (t) || is_gimple_lvalue (t)
446 || TREE_CODE (t) == CALL_EXPR);
449 /* Create a temporary with a name derived from VAL. Subroutine of
450 lookup_tmp_var; nobody else should call this function. */
452 static inline tree
453 create_tmp_from_val (tree val, bool is_formal)
455 /* Drop all qualifiers and address-space information from the value type. */
456 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
457 tree var = create_tmp_var (type, get_name (val));
458 if (is_formal
459 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
460 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE))
461 DECL_GIMPLE_REG_P (var) = 1;
462 return var;
465 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
466 an existing expression temporary. */
468 static tree
469 lookup_tmp_var (tree val, bool is_formal)
471 tree ret;
473 /* If not optimizing, never really reuse a temporary. local-alloc
474 won't allocate any variable that is used in more than one basic
475 block, which means it will go into memory, causing much extra
476 work in reload and final and poorer code generation, outweighing
477 the extra memory allocation here. */
478 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
479 ret = create_tmp_from_val (val, is_formal);
480 else
482 elt_t elt, *elt_p;
483 elt_t **slot;
485 elt.val = val;
486 if (!gimplify_ctxp->temp_htab.is_created ())
487 gimplify_ctxp->temp_htab.create (1000);
488 slot = gimplify_ctxp->temp_htab.find_slot (&elt, INSERT);
489 if (*slot == NULL)
491 elt_p = XNEW (elt_t);
492 elt_p->val = val;
493 elt_p->temp = ret = create_tmp_from_val (val, is_formal);
494 *slot = elt_p;
496 else
498 elt_p = *slot;
499 ret = elt_p->temp;
503 return ret;
506 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
508 static tree
509 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
510 bool is_formal)
512 tree t, mod;
514 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
515 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
516 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
517 fb_rvalue);
519 if (gimplify_ctxp->into_ssa
520 && is_gimple_reg_type (TREE_TYPE (val)))
521 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)), NULL);
522 else
523 t = lookup_tmp_var (val, is_formal);
525 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
527 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
529 /* gimplify_modify_expr might want to reduce this further. */
530 gimplify_and_add (mod, pre_p);
531 ggc_free (mod);
533 return t;
536 /* Return a formal temporary variable initialized with VAL. PRE_P is as
537 in gimplify_expr. Only use this function if:
539 1) The value of the unfactored expression represented by VAL will not
540 change between the initialization and use of the temporary, and
541 2) The temporary will not be otherwise modified.
543 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
544 and #2 means it is inappropriate for && temps.
546 For other cases, use get_initialized_tmp_var instead. */
548 tree
549 get_formal_tmp_var (tree val, gimple_seq *pre_p)
551 return internal_get_tmp_var (val, pre_p, NULL, true);
554 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
555 are as in gimplify_expr. */
557 tree
558 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
560 return internal_get_tmp_var (val, pre_p, post_p, false);
563 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
564 generate debug info for them; otherwise don't. */
566 void
567 declare_vars (tree vars, gimple scope, bool debug_info)
569 tree last = vars;
570 if (last)
572 tree temps, block;
574 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
576 temps = nreverse (last);
578 block = gimple_bind_block (scope);
579 gcc_assert (!block || TREE_CODE (block) == BLOCK);
580 if (!block || !debug_info)
582 DECL_CHAIN (last) = gimple_bind_vars (scope);
583 gimple_bind_set_vars (scope, temps);
585 else
587 /* We need to attach the nodes both to the BIND_EXPR and to its
588 associated BLOCK for debugging purposes. The key point here
589 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
590 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
591 if (BLOCK_VARS (block))
592 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
593 else
595 gimple_bind_set_vars (scope,
596 chainon (gimple_bind_vars (scope), temps));
597 BLOCK_VARS (block) = temps;
603 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
604 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
605 no such upper bound can be obtained. */
607 static void
608 force_constant_size (tree var)
610 /* The only attempt we make is by querying the maximum size of objects
611 of the variable's type. */
613 HOST_WIDE_INT max_size;
615 gcc_assert (TREE_CODE (var) == VAR_DECL);
617 max_size = max_int_size_in_bytes (TREE_TYPE (var));
619 gcc_assert (max_size >= 0);
621 DECL_SIZE_UNIT (var)
622 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
623 DECL_SIZE (var)
624 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
627 /* Push the temporary variable TMP into the current binding. */
629 void
630 gimple_add_tmp_var (tree tmp)
632 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
634 /* Later processing assumes that the object size is constant, which might
635 not be true at this point. Force the use of a constant upper bound in
636 this case. */
637 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
638 force_constant_size (tmp);
640 DECL_CONTEXT (tmp) = current_function_decl;
641 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
643 if (gimplify_ctxp)
645 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
646 gimplify_ctxp->temps = tmp;
648 /* Mark temporaries local within the nearest enclosing parallel. */
649 if (gimplify_omp_ctxp)
651 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
652 while (ctx
653 && (ctx->region_type == ORT_WORKSHARE
654 || ctx->region_type == ORT_SIMD))
655 ctx = ctx->outer_context;
656 if (ctx)
657 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
660 else if (cfun)
661 record_vars (tmp);
662 else
664 gimple_seq body_seq;
666 /* This case is for nested functions. We need to expose the locals
667 they create. */
668 body_seq = gimple_body (current_function_decl);
669 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
675 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
676 nodes that are referenced more than once in GENERIC functions. This is
677 necessary because gimplification (translation into GIMPLE) is performed
678 by modifying tree nodes in-place, so gimplication of a shared node in a
679 first context could generate an invalid GIMPLE form in a second context.
681 This is achieved with a simple mark/copy/unmark algorithm that walks the
682 GENERIC representation top-down, marks nodes with TREE_VISITED the first
683 time it encounters them, duplicates them if they already have TREE_VISITED
684 set, and finally removes the TREE_VISITED marks it has set.
686 The algorithm works only at the function level, i.e. it generates a GENERIC
687 representation of a function with no nodes shared within the function when
688 passed a GENERIC function (except for nodes that are allowed to be shared).
690 At the global level, it is also necessary to unshare tree nodes that are
691 referenced in more than one function, for the same aforementioned reason.
692 This requires some cooperation from the front-end. There are 2 strategies:
694 1. Manual unsharing. The front-end needs to call unshare_expr on every
695 expression that might end up being shared across functions.
697 2. Deep unsharing. This is an extension of regular unsharing. Instead
698 of calling unshare_expr on expressions that might be shared across
699 functions, the front-end pre-marks them with TREE_VISITED. This will
700 ensure that they are unshared on the first reference within functions
701 when the regular unsharing algorithm runs. The counterpart is that
702 this algorithm must look deeper than for manual unsharing, which is
703 specified by LANG_HOOKS_DEEP_UNSHARING.
705 If there are only few specific cases of node sharing across functions, it is
706 probably easier for a front-end to unshare the expressions manually. On the
707 contrary, if the expressions generated at the global level are as widespread
708 as expressions generated within functions, deep unsharing is very likely the
709 way to go. */
711 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
712 These nodes model computations that must be done once. If we were to
713 unshare something like SAVE_EXPR(i++), the gimplification process would
714 create wrong code. However, if DATA is non-null, it must hold a pointer
715 set that is used to unshare the subtrees of these nodes. */
717 static tree
718 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
720 tree t = *tp;
721 enum tree_code code = TREE_CODE (t);
723 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
724 copy their subtrees if we can make sure to do it only once. */
725 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
727 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
729 else
730 *walk_subtrees = 0;
733 /* Stop at types, decls, constants like copy_tree_r. */
734 else if (TREE_CODE_CLASS (code) == tcc_type
735 || TREE_CODE_CLASS (code) == tcc_declaration
736 || TREE_CODE_CLASS (code) == tcc_constant
737 /* We can't do anything sensible with a BLOCK used as an
738 expression, but we also can't just die when we see it
739 because of non-expression uses. So we avert our eyes
740 and cross our fingers. Silly Java. */
741 || code == BLOCK)
742 *walk_subtrees = 0;
744 /* Cope with the statement expression extension. */
745 else if (code == STATEMENT_LIST)
748 /* Leave the bulk of the work to copy_tree_r itself. */
749 else
750 copy_tree_r (tp, walk_subtrees, NULL);
752 return NULL_TREE;
755 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
756 If *TP has been visited already, then *TP is deeply copied by calling
757 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
759 static tree
760 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
762 tree t = *tp;
763 enum tree_code code = TREE_CODE (t);
765 /* Skip types, decls, and constants. But we do want to look at their
766 types and the bounds of types. Mark them as visited so we properly
767 unmark their subtrees on the unmark pass. If we've already seen them,
768 don't look down further. */
769 if (TREE_CODE_CLASS (code) == tcc_type
770 || TREE_CODE_CLASS (code) == tcc_declaration
771 || TREE_CODE_CLASS (code) == tcc_constant)
773 if (TREE_VISITED (t))
774 *walk_subtrees = 0;
775 else
776 TREE_VISITED (t) = 1;
779 /* If this node has been visited already, unshare it and don't look
780 any deeper. */
781 else if (TREE_VISITED (t))
783 walk_tree (tp, mostly_copy_tree_r, data, NULL);
784 *walk_subtrees = 0;
787 /* Otherwise, mark the node as visited and keep looking. */
788 else
789 TREE_VISITED (t) = 1;
791 return NULL_TREE;
794 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
795 copy_if_shared_r callback unmodified. */
797 static inline void
798 copy_if_shared (tree *tp, void *data)
800 walk_tree (tp, copy_if_shared_r, data, NULL);
803 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
804 any nested functions. */
806 static void
807 unshare_body (tree fndecl)
809 struct cgraph_node *cgn = cgraph_get_node (fndecl);
810 /* If the language requires deep unsharing, we need a pointer set to make
811 sure we don't repeatedly unshare subtrees of unshareable nodes. */
812 struct pointer_set_t *visited
813 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
815 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
816 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
817 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
819 if (visited)
820 pointer_set_destroy (visited);
822 if (cgn)
823 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
824 unshare_body (cgn->decl);
827 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
828 Subtrees are walked until the first unvisited node is encountered. */
830 static tree
831 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
833 tree t = *tp;
835 /* If this node has been visited, unmark it and keep looking. */
836 if (TREE_VISITED (t))
837 TREE_VISITED (t) = 0;
839 /* Otherwise, don't look any deeper. */
840 else
841 *walk_subtrees = 0;
843 return NULL_TREE;
846 /* Unmark the visited trees rooted at *TP. */
848 static inline void
849 unmark_visited (tree *tp)
851 walk_tree (tp, unmark_visited_r, NULL, NULL);
854 /* Likewise, but mark all trees as not visited. */
856 static void
857 unvisit_body (tree fndecl)
859 struct cgraph_node *cgn = cgraph_get_node (fndecl);
861 unmark_visited (&DECL_SAVED_TREE (fndecl));
862 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
863 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
865 if (cgn)
866 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
867 unvisit_body (cgn->decl);
870 /* Unconditionally make an unshared copy of EXPR. This is used when using
871 stored expressions which span multiple functions, such as BINFO_VTABLE,
872 as the normal unsharing process can't tell that they're shared. */
874 tree
875 unshare_expr (tree expr)
877 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
878 return expr;
881 /* Worker for unshare_expr_without_location. */
883 static tree
884 prune_expr_location (tree *tp, int *walk_subtrees, void *)
886 if (EXPR_P (*tp))
887 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
888 else
889 *walk_subtrees = 0;
890 return NULL_TREE;
893 /* Similar to unshare_expr but also prune all expression locations
894 from EXPR. */
896 tree
897 unshare_expr_without_location (tree expr)
899 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
900 if (EXPR_P (expr))
901 walk_tree (&expr, prune_expr_location, NULL, NULL);
902 return expr;
905 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
906 contain statements and have a value. Assign its value to a temporary
907 and give it void_type_node. Return the temporary, or NULL_TREE if
908 WRAPPER was already void. */
910 tree
911 voidify_wrapper_expr (tree wrapper, tree temp)
913 tree type = TREE_TYPE (wrapper);
914 if (type && !VOID_TYPE_P (type))
916 tree *p;
918 /* Set p to point to the body of the wrapper. Loop until we find
919 something that isn't a wrapper. */
920 for (p = &wrapper; p && *p; )
922 switch (TREE_CODE (*p))
924 case BIND_EXPR:
925 TREE_SIDE_EFFECTS (*p) = 1;
926 TREE_TYPE (*p) = void_type_node;
927 /* For a BIND_EXPR, the body is operand 1. */
928 p = &BIND_EXPR_BODY (*p);
929 break;
931 case CLEANUP_POINT_EXPR:
932 case TRY_FINALLY_EXPR:
933 case TRY_CATCH_EXPR:
934 TREE_SIDE_EFFECTS (*p) = 1;
935 TREE_TYPE (*p) = void_type_node;
936 p = &TREE_OPERAND (*p, 0);
937 break;
939 case STATEMENT_LIST:
941 tree_stmt_iterator i = tsi_last (*p);
942 TREE_SIDE_EFFECTS (*p) = 1;
943 TREE_TYPE (*p) = void_type_node;
944 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
946 break;
948 case COMPOUND_EXPR:
949 /* Advance to the last statement. Set all container types to
950 void. */
951 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
953 TREE_SIDE_EFFECTS (*p) = 1;
954 TREE_TYPE (*p) = void_type_node;
956 break;
958 case TRANSACTION_EXPR:
959 TREE_SIDE_EFFECTS (*p) = 1;
960 TREE_TYPE (*p) = void_type_node;
961 p = &TRANSACTION_EXPR_BODY (*p);
962 break;
964 default:
965 /* Assume that any tree upon which voidify_wrapper_expr is
966 directly called is a wrapper, and that its body is op0. */
967 if (p == &wrapper)
969 TREE_SIDE_EFFECTS (*p) = 1;
970 TREE_TYPE (*p) = void_type_node;
971 p = &TREE_OPERAND (*p, 0);
972 break;
974 goto out;
978 out:
979 if (p == NULL || IS_EMPTY_STMT (*p))
980 temp = NULL_TREE;
981 else if (temp)
983 /* The wrapper is on the RHS of an assignment that we're pushing
984 down. */
985 gcc_assert (TREE_CODE (temp) == INIT_EXPR
986 || TREE_CODE (temp) == MODIFY_EXPR);
987 TREE_OPERAND (temp, 1) = *p;
988 *p = temp;
990 else
992 temp = create_tmp_var (type, "retval");
993 *p = build2 (INIT_EXPR, type, temp, *p);
996 return temp;
999 return NULL_TREE;
1002 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1003 a temporary through which they communicate. */
1005 static void
1006 build_stack_save_restore (gimple *save, gimple *restore)
1008 tree tmp_var;
1010 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1011 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1012 gimple_call_set_lhs (*save, tmp_var);
1014 *restore
1015 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1016 1, tmp_var);
1019 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1021 static enum gimplify_status
1022 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1024 tree bind_expr = *expr_p;
1025 bool old_save_stack = gimplify_ctxp->save_stack;
1026 tree t;
1027 gimple gimple_bind;
1028 gimple_seq body, cleanup;
1029 gimple stack_save;
1031 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1033 /* Mark variables seen in this bind expr. */
1034 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1036 if (TREE_CODE (t) == VAR_DECL)
1038 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1040 /* Mark variable as local. */
1041 if (ctx && !DECL_EXTERNAL (t)
1042 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1043 || splay_tree_lookup (ctx->variables,
1044 (splay_tree_key) t) == NULL))
1045 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1047 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1049 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1050 cfun->has_local_explicit_reg_vars = true;
1053 /* Preliminarily mark non-addressed complex variables as eligible
1054 for promotion to gimple registers. We'll transform their uses
1055 as we find them. */
1056 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1057 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1058 && !TREE_THIS_VOLATILE (t)
1059 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1060 && !needs_to_live_in_memory (t))
1061 DECL_GIMPLE_REG_P (t) = 1;
1064 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1065 BIND_EXPR_BLOCK (bind_expr));
1066 gimple_push_bind_expr (gimple_bind);
1068 gimplify_ctxp->save_stack = false;
1070 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1071 body = NULL;
1072 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1073 gimple_bind_set_body (gimple_bind, body);
1075 cleanup = NULL;
1076 stack_save = NULL;
1077 if (gimplify_ctxp->save_stack)
1079 gimple stack_restore;
1081 /* Save stack on entry and restore it on exit. Add a try_finally
1082 block to achieve this. */
1083 build_stack_save_restore (&stack_save, &stack_restore);
1085 gimplify_seq_add_stmt (&cleanup, stack_restore);
1088 /* Add clobbers for all variables that go out of scope. */
1089 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1091 if (TREE_CODE (t) == VAR_DECL
1092 && !is_global_var (t)
1093 && DECL_CONTEXT (t) == current_function_decl
1094 && !DECL_HARD_REGISTER (t)
1095 && !TREE_THIS_VOLATILE (t)
1096 && !DECL_HAS_VALUE_EXPR_P (t)
1097 /* Only care for variables that have to be in memory. Others
1098 will be rewritten into SSA names, hence moved to the top-level. */
1099 && !is_gimple_reg (t)
1100 && flag_stack_reuse != SR_NONE)
1102 tree clobber = build_constructor (TREE_TYPE (t),
1103 NULL);
1104 TREE_THIS_VOLATILE (clobber) = 1;
1105 gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
1109 if (cleanup)
1111 gimple gs;
1112 gimple_seq new_body;
1114 new_body = NULL;
1115 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1116 GIMPLE_TRY_FINALLY);
1118 if (stack_save)
1119 gimplify_seq_add_stmt (&new_body, stack_save);
1120 gimplify_seq_add_stmt (&new_body, gs);
1121 gimple_bind_set_body (gimple_bind, new_body);
1124 gimplify_ctxp->save_stack = old_save_stack;
1125 gimple_pop_bind_expr ();
1127 gimplify_seq_add_stmt (pre_p, gimple_bind);
1129 if (temp)
1131 *expr_p = temp;
1132 return GS_OK;
1135 *expr_p = NULL_TREE;
1136 return GS_ALL_DONE;
1139 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1140 GIMPLE value, it is assigned to a new temporary and the statement is
1141 re-written to return the temporary.
1143 PRE_P points to the sequence where side effects that must happen before
1144 STMT should be stored. */
1146 static enum gimplify_status
1147 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1149 gimple ret;
1150 tree ret_expr = TREE_OPERAND (stmt, 0);
1151 tree result_decl, result;
1153 if (ret_expr == error_mark_node)
1154 return GS_ERROR;
1156 /* Implicit _Cilk_sync must be inserted right before any return statement
1157 if there is a _Cilk_spawn in the function. If the user has provided a
1158 _Cilk_sync, the optimizer should remove this duplicate one. */
1159 if (fn_contains_cilk_spawn_p (cfun))
1161 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1162 gimplify_and_add (impl_sync, pre_p);
1165 if (!ret_expr
1166 || TREE_CODE (ret_expr) == RESULT_DECL
1167 || ret_expr == error_mark_node)
1169 gimple ret = gimple_build_return (ret_expr);
1170 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1171 gimplify_seq_add_stmt (pre_p, ret);
1172 return GS_ALL_DONE;
1175 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1176 result_decl = NULL_TREE;
1177 else
1179 result_decl = TREE_OPERAND (ret_expr, 0);
1181 /* See through a return by reference. */
1182 if (TREE_CODE (result_decl) == INDIRECT_REF)
1183 result_decl = TREE_OPERAND (result_decl, 0);
1185 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1186 || TREE_CODE (ret_expr) == INIT_EXPR)
1187 && TREE_CODE (result_decl) == RESULT_DECL);
1190 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1191 Recall that aggregate_value_p is FALSE for any aggregate type that is
1192 returned in registers. If we're returning values in registers, then
1193 we don't want to extend the lifetime of the RESULT_DECL, particularly
1194 across another call. In addition, for those aggregates for which
1195 hard_function_value generates a PARALLEL, we'll die during normal
1196 expansion of structure assignments; there's special code in expand_return
1197 to handle this case that does not exist in expand_expr. */
1198 if (!result_decl)
1199 result = NULL_TREE;
1200 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1202 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1204 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1205 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1206 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1207 should be effectively allocated by the caller, i.e. all calls to
1208 this function must be subject to the Return Slot Optimization. */
1209 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1210 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1212 result = result_decl;
1214 else if (gimplify_ctxp->return_temp)
1215 result = gimplify_ctxp->return_temp;
1216 else
1218 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
1220 /* ??? With complex control flow (usually involving abnormal edges),
1221 we can wind up warning about an uninitialized value for this. Due
1222 to how this variable is constructed and initialized, this is never
1223 true. Give up and never warn. */
1224 TREE_NO_WARNING (result) = 1;
1226 gimplify_ctxp->return_temp = result;
1229 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1230 Then gimplify the whole thing. */
1231 if (result != result_decl)
1232 TREE_OPERAND (ret_expr, 0) = result;
1234 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1236 ret = gimple_build_return (result);
1237 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1238 gimplify_seq_add_stmt (pre_p, ret);
1240 return GS_ALL_DONE;
1243 /* Gimplify a variable-length array DECL. */
1245 static void
1246 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1248 /* This is a variable-sized decl. Simplify its size and mark it
1249 for deferred expansion. */
1250 tree t, addr, ptr_type;
1252 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1253 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1255 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1256 if (DECL_HAS_VALUE_EXPR_P (decl))
1257 return;
1259 /* All occurrences of this decl in final gimplified code will be
1260 replaced by indirection. Setting DECL_VALUE_EXPR does two
1261 things: First, it lets the rest of the gimplifier know what
1262 replacement to use. Second, it lets the debug info know
1263 where to find the value. */
1264 ptr_type = build_pointer_type (TREE_TYPE (decl));
1265 addr = create_tmp_var (ptr_type, get_name (decl));
1266 DECL_IGNORED_P (addr) = 0;
1267 t = build_fold_indirect_ref (addr);
1268 TREE_THIS_NOTRAP (t) = 1;
1269 SET_DECL_VALUE_EXPR (decl, t);
1270 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1272 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1273 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1274 size_int (DECL_ALIGN (decl)));
1275 /* The call has been built for a variable-sized object. */
1276 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1277 t = fold_convert (ptr_type, t);
1278 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1280 gimplify_and_add (t, seq_p);
1282 /* Indicate that we need to restore the stack level when the
1283 enclosing BIND_EXPR is exited. */
1284 gimplify_ctxp->save_stack = true;
1287 /* A helper function to be called via walk_tree. Mark all labels under *TP
1288 as being forced. To be called for DECL_INITIAL of static variables. */
1290 static tree
1291 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1293 if (TYPE_P (*tp))
1294 *walk_subtrees = 0;
1295 if (TREE_CODE (*tp) == LABEL_DECL)
1296 FORCED_LABEL (*tp) = 1;
1298 return NULL_TREE;
1301 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1302 and initialization explicit. */
1304 static enum gimplify_status
1305 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1307 tree stmt = *stmt_p;
1308 tree decl = DECL_EXPR_DECL (stmt);
1310 *stmt_p = NULL_TREE;
1312 if (TREE_TYPE (decl) == error_mark_node)
1313 return GS_ERROR;
1315 if ((TREE_CODE (decl) == TYPE_DECL
1316 || TREE_CODE (decl) == VAR_DECL)
1317 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1318 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1320 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1321 in case its size expressions contain problematic nodes like CALL_EXPR. */
1322 if (TREE_CODE (decl) == TYPE_DECL
1323 && DECL_ORIGINAL_TYPE (decl)
1324 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1325 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1327 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1329 tree init = DECL_INITIAL (decl);
1331 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1332 || (!TREE_STATIC (decl)
1333 && flag_stack_check == GENERIC_STACK_CHECK
1334 && compare_tree_int (DECL_SIZE_UNIT (decl),
1335 STACK_CHECK_MAX_VAR_SIZE) > 0))
1336 gimplify_vla_decl (decl, seq_p);
1338 /* Some front ends do not explicitly declare all anonymous
1339 artificial variables. We compensate here by declaring the
1340 variables, though it would be better if the front ends would
1341 explicitly declare them. */
1342 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1343 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1344 gimple_add_tmp_var (decl);
1346 if (init && init != error_mark_node)
1348 if (!TREE_STATIC (decl))
1350 DECL_INITIAL (decl) = NULL_TREE;
1351 init = build2 (INIT_EXPR, void_type_node, decl, init);
1352 gimplify_and_add (init, seq_p);
1353 ggc_free (init);
1355 else
1356 /* We must still examine initializers for static variables
1357 as they may contain a label address. */
1358 walk_tree (&init, force_labels_r, NULL, NULL);
1362 return GS_ALL_DONE;
1365 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1366 and replacing the LOOP_EXPR with goto, but if the loop contains an
1367 EXIT_EXPR, we need to append a label for it to jump to. */
1369 static enum gimplify_status
1370 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1372 tree saved_label = gimplify_ctxp->exit_label;
1373 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1375 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1377 gimplify_ctxp->exit_label = NULL_TREE;
1379 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1381 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1383 if (gimplify_ctxp->exit_label)
1384 gimplify_seq_add_stmt (pre_p,
1385 gimple_build_label (gimplify_ctxp->exit_label));
1387 gimplify_ctxp->exit_label = saved_label;
1389 *expr_p = NULL;
1390 return GS_ALL_DONE;
1393 /* Gimplify a statement list onto a sequence. These may be created either
1394 by an enlightened front-end, or by shortcut_cond_expr. */
1396 static enum gimplify_status
1397 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1399 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1401 tree_stmt_iterator i = tsi_start (*expr_p);
1403 while (!tsi_end_p (i))
1405 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1406 tsi_delink (&i);
1409 if (temp)
1411 *expr_p = temp;
1412 return GS_OK;
1415 return GS_ALL_DONE;
1419 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1420 branch to. */
1422 static enum gimplify_status
1423 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1425 tree switch_expr = *expr_p;
1426 gimple_seq switch_body_seq = NULL;
1427 enum gimplify_status ret;
1428 tree index_type = TREE_TYPE (switch_expr);
1429 if (index_type == NULL_TREE)
1430 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1432 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1433 fb_rvalue);
1434 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1435 return ret;
1437 if (SWITCH_BODY (switch_expr))
1439 vec<tree> labels;
1440 vec<tree> saved_labels;
1441 tree default_case = NULL_TREE;
1442 gimple gimple_switch;
1444 /* If someone can be bothered to fill in the labels, they can
1445 be bothered to null out the body too. */
1446 gcc_assert (!SWITCH_LABELS (switch_expr));
1448 /* Save old labels, get new ones from body, then restore the old
1449 labels. Save all the things from the switch body to append after. */
1450 saved_labels = gimplify_ctxp->case_labels;
1451 gimplify_ctxp->case_labels.create (8);
1453 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1454 labels = gimplify_ctxp->case_labels;
1455 gimplify_ctxp->case_labels = saved_labels;
1457 preprocess_case_label_vec_for_gimple (labels, index_type,
1458 &default_case);
1460 if (!default_case)
1462 gimple new_default;
1464 default_case
1465 = build_case_label (NULL_TREE, NULL_TREE,
1466 create_artificial_label (UNKNOWN_LOCATION));
1467 new_default = gimple_build_label (CASE_LABEL (default_case));
1468 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1471 gimple_switch = gimple_build_switch (SWITCH_COND (switch_expr),
1472 default_case, labels);
1473 gimplify_seq_add_stmt (pre_p, gimple_switch);
1474 gimplify_seq_add_seq (pre_p, switch_body_seq);
1475 labels.release ();
1477 else
1478 gcc_assert (SWITCH_LABELS (switch_expr));
1480 return GS_ALL_DONE;
1483 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1485 static enum gimplify_status
1486 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1488 struct gimplify_ctx *ctxp;
1489 gimple gimple_label;
1491 /* Invalid OpenMP programs can play Duff's Device type games with
1492 #pragma omp parallel. At least in the C front end, we don't
1493 detect such invalid branches until after gimplification. */
1494 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1495 if (ctxp->case_labels.exists ())
1496 break;
1498 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1499 ctxp->case_labels.safe_push (*expr_p);
1500 gimplify_seq_add_stmt (pre_p, gimple_label);
1502 return GS_ALL_DONE;
1505 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1506 if necessary. */
1508 tree
1509 build_and_jump (tree *label_p)
1511 if (label_p == NULL)
1512 /* If there's nowhere to jump, just fall through. */
1513 return NULL_TREE;
1515 if (*label_p == NULL_TREE)
1517 tree label = create_artificial_label (UNKNOWN_LOCATION);
1518 *label_p = label;
1521 return build1 (GOTO_EXPR, void_type_node, *label_p);
1524 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1525 This also involves building a label to jump to and communicating it to
1526 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1528 static enum gimplify_status
1529 gimplify_exit_expr (tree *expr_p)
1531 tree cond = TREE_OPERAND (*expr_p, 0);
1532 tree expr;
1534 expr = build_and_jump (&gimplify_ctxp->exit_label);
1535 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1536 *expr_p = expr;
1538 return GS_OK;
1541 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1542 different from its canonical type, wrap the whole thing inside a
1543 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1544 type.
1546 The canonical type of a COMPONENT_REF is the type of the field being
1547 referenced--unless the field is a bit-field which can be read directly
1548 in a smaller mode, in which case the canonical type is the
1549 sign-appropriate type corresponding to that mode. */
1551 static void
1552 canonicalize_component_ref (tree *expr_p)
1554 tree expr = *expr_p;
1555 tree type;
1557 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1559 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1560 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1561 else
1562 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1564 /* One could argue that all the stuff below is not necessary for
1565 the non-bitfield case and declare it a FE error if type
1566 adjustment would be needed. */
1567 if (TREE_TYPE (expr) != type)
1569 #ifdef ENABLE_TYPES_CHECKING
1570 tree old_type = TREE_TYPE (expr);
1571 #endif
1572 int type_quals;
1574 /* We need to preserve qualifiers and propagate them from
1575 operand 0. */
1576 type_quals = TYPE_QUALS (type)
1577 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1578 if (TYPE_QUALS (type) != type_quals)
1579 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1581 /* Set the type of the COMPONENT_REF to the underlying type. */
1582 TREE_TYPE (expr) = type;
1584 #ifdef ENABLE_TYPES_CHECKING
1585 /* It is now a FE error, if the conversion from the canonical
1586 type to the original expression type is not useless. */
1587 gcc_assert (useless_type_conversion_p (old_type, type));
1588 #endif
1592 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1593 to foo, embed that change in the ADDR_EXPR by converting
1594 T array[U];
1595 (T *)&array
1597 &array[L]
1598 where L is the lower bound. For simplicity, only do this for constant
1599 lower bound.
1600 The constraint is that the type of &array[L] is trivially convertible
1601 to T *. */
1603 static void
1604 canonicalize_addr_expr (tree *expr_p)
1606 tree expr = *expr_p;
1607 tree addr_expr = TREE_OPERAND (expr, 0);
1608 tree datype, ddatype, pddatype;
1610 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1611 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1612 || TREE_CODE (addr_expr) != ADDR_EXPR)
1613 return;
1615 /* The addr_expr type should be a pointer to an array. */
1616 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1617 if (TREE_CODE (datype) != ARRAY_TYPE)
1618 return;
1620 /* The pointer to element type shall be trivially convertible to
1621 the expression pointer type. */
1622 ddatype = TREE_TYPE (datype);
1623 pddatype = build_pointer_type (ddatype);
1624 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1625 pddatype))
1626 return;
1628 /* The lower bound and element sizes must be constant. */
1629 if (!TYPE_SIZE_UNIT (ddatype)
1630 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1631 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1632 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1633 return;
1635 /* All checks succeeded. Build a new node to merge the cast. */
1636 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1637 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1638 NULL_TREE, NULL_TREE);
1639 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1641 /* We can have stripped a required restrict qualifier above. */
1642 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1643 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1646 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1647 underneath as appropriate. */
1649 static enum gimplify_status
1650 gimplify_conversion (tree *expr_p)
1652 location_t loc = EXPR_LOCATION (*expr_p);
1653 gcc_assert (CONVERT_EXPR_P (*expr_p));
1655 /* Then strip away all but the outermost conversion. */
1656 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1658 /* And remove the outermost conversion if it's useless. */
1659 if (tree_ssa_useless_type_conversion (*expr_p))
1660 *expr_p = TREE_OPERAND (*expr_p, 0);
1662 /* If we still have a conversion at the toplevel,
1663 then canonicalize some constructs. */
1664 if (CONVERT_EXPR_P (*expr_p))
1666 tree sub = TREE_OPERAND (*expr_p, 0);
1668 /* If a NOP conversion is changing the type of a COMPONENT_REF
1669 expression, then canonicalize its type now in order to expose more
1670 redundant conversions. */
1671 if (TREE_CODE (sub) == COMPONENT_REF)
1672 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1674 /* If a NOP conversion is changing a pointer to array of foo
1675 to a pointer to foo, embed that change in the ADDR_EXPR. */
1676 else if (TREE_CODE (sub) == ADDR_EXPR)
1677 canonicalize_addr_expr (expr_p);
1680 /* If we have a conversion to a non-register type force the
1681 use of a VIEW_CONVERT_EXPR instead. */
1682 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1683 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1684 TREE_OPERAND (*expr_p, 0));
1686 return GS_OK;
1689 /* Nonlocal VLAs seen in the current function. */
1690 static struct pointer_set_t *nonlocal_vlas;
1692 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
1693 DECL_VALUE_EXPR, and it's worth re-examining things. */
1695 static enum gimplify_status
1696 gimplify_var_or_parm_decl (tree *expr_p)
1698 tree decl = *expr_p;
1700 /* ??? If this is a local variable, and it has not been seen in any
1701 outer BIND_EXPR, then it's probably the result of a duplicate
1702 declaration, for which we've already issued an error. It would
1703 be really nice if the front end wouldn't leak these at all.
1704 Currently the only known culprit is C++ destructors, as seen
1705 in g++.old-deja/g++.jason/binding.C. */
1706 if (TREE_CODE (decl) == VAR_DECL
1707 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1708 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1709 && decl_function_context (decl) == current_function_decl)
1711 gcc_assert (seen_error ());
1712 return GS_ERROR;
1715 /* When within an OpenMP context, notice uses of variables. */
1716 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1717 return GS_ALL_DONE;
1719 /* If the decl is an alias for another expression, substitute it now. */
1720 if (DECL_HAS_VALUE_EXPR_P (decl))
1722 tree value_expr = DECL_VALUE_EXPR (decl);
1724 /* For referenced nonlocal VLAs add a decl for debugging purposes
1725 to the current function. */
1726 if (TREE_CODE (decl) == VAR_DECL
1727 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1728 && nonlocal_vlas != NULL
1729 && TREE_CODE (value_expr) == INDIRECT_REF
1730 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1731 && decl_function_context (decl) != current_function_decl)
1733 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1734 while (ctx
1735 && (ctx->region_type == ORT_WORKSHARE
1736 || ctx->region_type == ORT_SIMD))
1737 ctx = ctx->outer_context;
1738 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1740 tree copy = copy_node (decl), block;
1742 lang_hooks.dup_lang_specific_decl (copy);
1743 SET_DECL_RTL (copy, 0);
1744 TREE_USED (copy) = 1;
1745 block = DECL_INITIAL (current_function_decl);
1746 DECL_CHAIN (copy) = BLOCK_VARS (block);
1747 BLOCK_VARS (block) = copy;
1748 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1749 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1753 *expr_p = unshare_expr (value_expr);
1754 return GS_OK;
1757 return GS_ALL_DONE;
1760 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
1762 static void
1763 recalculate_side_effects (tree t)
1765 enum tree_code code = TREE_CODE (t);
1766 int len = TREE_OPERAND_LENGTH (t);
1767 int i;
1769 switch (TREE_CODE_CLASS (code))
1771 case tcc_expression:
1772 switch (code)
1774 case INIT_EXPR:
1775 case MODIFY_EXPR:
1776 case VA_ARG_EXPR:
1777 case PREDECREMENT_EXPR:
1778 case PREINCREMENT_EXPR:
1779 case POSTDECREMENT_EXPR:
1780 case POSTINCREMENT_EXPR:
1781 /* All of these have side-effects, no matter what their
1782 operands are. */
1783 return;
1785 default:
1786 break;
1788 /* Fall through. */
1790 case tcc_comparison: /* a comparison expression */
1791 case tcc_unary: /* a unary arithmetic expression */
1792 case tcc_binary: /* a binary arithmetic expression */
1793 case tcc_reference: /* a reference */
1794 case tcc_vl_exp: /* a function call */
1795 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1796 for (i = 0; i < len; ++i)
1798 tree op = TREE_OPERAND (t, i);
1799 if (op && TREE_SIDE_EFFECTS (op))
1800 TREE_SIDE_EFFECTS (t) = 1;
1802 break;
1804 case tcc_constant:
1805 /* No side-effects. */
1806 return;
1808 default:
1809 gcc_unreachable ();
1813 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1814 node *EXPR_P.
1816 compound_lval
1817 : min_lval '[' val ']'
1818 | min_lval '.' ID
1819 | compound_lval '[' val ']'
1820 | compound_lval '.' ID
1822 This is not part of the original SIMPLE definition, which separates
1823 array and member references, but it seems reasonable to handle them
1824 together. Also, this way we don't run into problems with union
1825 aliasing; gcc requires that for accesses through a union to alias, the
1826 union reference must be explicit, which was not always the case when we
1827 were splitting up array and member refs.
1829 PRE_P points to the sequence where side effects that must happen before
1830 *EXPR_P should be stored.
1832 POST_P points to the sequence where side effects that must happen after
1833 *EXPR_P should be stored. */
1835 static enum gimplify_status
1836 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1837 fallback_t fallback)
1839 tree *p;
1840 enum gimplify_status ret = GS_ALL_DONE, tret;
1841 int i;
1842 location_t loc = EXPR_LOCATION (*expr_p);
1843 tree expr = *expr_p;
1845 /* Create a stack of the subexpressions so later we can walk them in
1846 order from inner to outer. */
1847 stack_vec<tree, 10> expr_stack;
1849 /* We can handle anything that get_inner_reference can deal with. */
1850 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1852 restart:
1853 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1854 if (TREE_CODE (*p) == INDIRECT_REF)
1855 *p = fold_indirect_ref_loc (loc, *p);
1857 if (handled_component_p (*p))
1859 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1860 additional COMPONENT_REFs. */
1861 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1862 && gimplify_var_or_parm_decl (p) == GS_OK)
1863 goto restart;
1864 else
1865 break;
1867 expr_stack.safe_push (*p);
1870 gcc_assert (expr_stack.length ());
1872 /* Now EXPR_STACK is a stack of pointers to all the refs we've
1873 walked through and P points to the innermost expression.
1875 Java requires that we elaborated nodes in source order. That
1876 means we must gimplify the inner expression followed by each of
1877 the indices, in order. But we can't gimplify the inner
1878 expression until we deal with any variable bounds, sizes, or
1879 positions in order to deal with PLACEHOLDER_EXPRs.
1881 So we do this in three steps. First we deal with the annotations
1882 for any variables in the components, then we gimplify the base,
1883 then we gimplify any indices, from left to right. */
1884 for (i = expr_stack.length () - 1; i >= 0; i--)
1886 tree t = expr_stack[i];
1888 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1890 /* Gimplify the low bound and element type size and put them into
1891 the ARRAY_REF. If these values are set, they have already been
1892 gimplified. */
1893 if (TREE_OPERAND (t, 2) == NULL_TREE)
1895 tree low = unshare_expr (array_ref_low_bound (t));
1896 if (!is_gimple_min_invariant (low))
1898 TREE_OPERAND (t, 2) = low;
1899 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1900 post_p, is_gimple_reg,
1901 fb_rvalue);
1902 ret = MIN (ret, tret);
1905 else
1907 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1908 is_gimple_reg, fb_rvalue);
1909 ret = MIN (ret, tret);
1912 if (TREE_OPERAND (t, 3) == NULL_TREE)
1914 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1915 tree elmt_size = unshare_expr (array_ref_element_size (t));
1916 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1918 /* Divide the element size by the alignment of the element
1919 type (above). */
1920 elmt_size
1921 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
1923 if (!is_gimple_min_invariant (elmt_size))
1925 TREE_OPERAND (t, 3) = elmt_size;
1926 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
1927 post_p, is_gimple_reg,
1928 fb_rvalue);
1929 ret = MIN (ret, tret);
1932 else
1934 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1935 is_gimple_reg, fb_rvalue);
1936 ret = MIN (ret, tret);
1939 else if (TREE_CODE (t) == COMPONENT_REF)
1941 /* Set the field offset into T and gimplify it. */
1942 if (TREE_OPERAND (t, 2) == NULL_TREE)
1944 tree offset = unshare_expr (component_ref_field_offset (t));
1945 tree field = TREE_OPERAND (t, 1);
1946 tree factor
1947 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1949 /* Divide the offset by its alignment. */
1950 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
1952 if (!is_gimple_min_invariant (offset))
1954 TREE_OPERAND (t, 2) = offset;
1955 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1956 post_p, is_gimple_reg,
1957 fb_rvalue);
1958 ret = MIN (ret, tret);
1961 else
1963 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1964 is_gimple_reg, fb_rvalue);
1965 ret = MIN (ret, tret);
1970 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
1971 so as to match the min_lval predicate. Failure to do so may result
1972 in the creation of large aggregate temporaries. */
1973 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
1974 fallback | fb_lvalue);
1975 ret = MIN (ret, tret);
1977 /* And finally, the indices and operands of ARRAY_REF. During this
1978 loop we also remove any useless conversions. */
1979 for (; expr_stack.length () > 0; )
1981 tree t = expr_stack.pop ();
1983 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1985 /* Gimplify the dimension. */
1986 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
1988 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1989 is_gimple_val, fb_rvalue);
1990 ret = MIN (ret, tret);
1994 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
1996 /* The innermost expression P may have originally had
1997 TREE_SIDE_EFFECTS set which would have caused all the outer
1998 expressions in *EXPR_P leading to P to also have had
1999 TREE_SIDE_EFFECTS set. */
2000 recalculate_side_effects (t);
2003 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2004 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2006 canonicalize_component_ref (expr_p);
2009 expr_stack.release ();
2011 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2013 return ret;
2016 /* Gimplify the self modifying expression pointed to by EXPR_P
2017 (++, --, +=, -=).
2019 PRE_P points to the list where side effects that must happen before
2020 *EXPR_P should be stored.
2022 POST_P points to the list where side effects that must happen after
2023 *EXPR_P should be stored.
2025 WANT_VALUE is nonzero iff we want to use the value of this expression
2026 in another expression.
2028 ARITH_TYPE is the type the computation should be performed in. */
2030 enum gimplify_status
2031 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2032 bool want_value, tree arith_type)
2034 enum tree_code code;
2035 tree lhs, lvalue, rhs, t1;
2036 gimple_seq post = NULL, *orig_post_p = post_p;
2037 bool postfix;
2038 enum tree_code arith_code;
2039 enum gimplify_status ret;
2040 location_t loc = EXPR_LOCATION (*expr_p);
2042 code = TREE_CODE (*expr_p);
2044 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2045 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2047 /* Prefix or postfix? */
2048 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2049 /* Faster to treat as prefix if result is not used. */
2050 postfix = want_value;
2051 else
2052 postfix = false;
2054 /* For postfix, make sure the inner expression's post side effects
2055 are executed after side effects from this expression. */
2056 if (postfix)
2057 post_p = &post;
2059 /* Add or subtract? */
2060 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2061 arith_code = PLUS_EXPR;
2062 else
2063 arith_code = MINUS_EXPR;
2065 /* Gimplify the LHS into a GIMPLE lvalue. */
2066 lvalue = TREE_OPERAND (*expr_p, 0);
2067 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2068 if (ret == GS_ERROR)
2069 return ret;
2071 /* Extract the operands to the arithmetic operation. */
2072 lhs = lvalue;
2073 rhs = TREE_OPERAND (*expr_p, 1);
2075 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2076 that as the result value and in the postqueue operation. */
2077 if (postfix)
2079 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2080 if (ret == GS_ERROR)
2081 return ret;
2083 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2086 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2087 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2089 rhs = convert_to_ptrofftype_loc (loc, rhs);
2090 if (arith_code == MINUS_EXPR)
2091 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2092 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2094 else
2095 t1 = fold_convert (TREE_TYPE (*expr_p),
2096 fold_build2 (arith_code, arith_type,
2097 fold_convert (arith_type, lhs),
2098 fold_convert (arith_type, rhs)));
2100 if (postfix)
2102 gimplify_assign (lvalue, t1, pre_p);
2103 gimplify_seq_add_seq (orig_post_p, post);
2104 *expr_p = lhs;
2105 return GS_ALL_DONE;
2107 else
2109 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2110 return GS_OK;
2114 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2116 static void
2117 maybe_with_size_expr (tree *expr_p)
2119 tree expr = *expr_p;
2120 tree type = TREE_TYPE (expr);
2121 tree size;
2123 /* If we've already wrapped this or the type is error_mark_node, we can't do
2124 anything. */
2125 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2126 || type == error_mark_node)
2127 return;
2129 /* If the size isn't known or is a constant, we have nothing to do. */
2130 size = TYPE_SIZE_UNIT (type);
2131 if (!size || TREE_CODE (size) == INTEGER_CST)
2132 return;
2134 /* Otherwise, make a WITH_SIZE_EXPR. */
2135 size = unshare_expr (size);
2136 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2137 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2140 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2141 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2142 the CALL_EXPR. */
2144 static enum gimplify_status
2145 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2147 bool (*test) (tree);
2148 fallback_t fb;
2150 /* In general, we allow lvalues for function arguments to avoid
2151 extra overhead of copying large aggregates out of even larger
2152 aggregates into temporaries only to copy the temporaries to
2153 the argument list. Make optimizers happy by pulling out to
2154 temporaries those types that fit in registers. */
2155 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2156 test = is_gimple_val, fb = fb_rvalue;
2157 else
2159 test = is_gimple_lvalue, fb = fb_either;
2160 /* Also strip a TARGET_EXPR that would force an extra copy. */
2161 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2163 tree init = TARGET_EXPR_INITIAL (*arg_p);
2164 if (init
2165 && !VOID_TYPE_P (TREE_TYPE (init)))
2166 *arg_p = init;
2170 /* If this is a variable sized type, we must remember the size. */
2171 maybe_with_size_expr (arg_p);
2173 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2174 /* Make sure arguments have the same location as the function call
2175 itself. */
2176 protected_set_expr_location (*arg_p, call_location);
2178 /* There is a sequence point before a function call. Side effects in
2179 the argument list must occur before the actual call. So, when
2180 gimplifying arguments, force gimplify_expr to use an internal
2181 post queue which is then appended to the end of PRE_P. */
2182 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2185 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2186 WANT_VALUE is true if the result of the call is desired. */
2188 static enum gimplify_status
2189 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2191 tree fndecl, parms, p, fnptrtype;
2192 enum gimplify_status ret;
2193 int i, nargs;
2194 gimple call;
2195 bool builtin_va_start_p = FALSE;
2196 location_t loc = EXPR_LOCATION (*expr_p);
2198 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2200 /* For reliable diagnostics during inlining, it is necessary that
2201 every call_expr be annotated with file and line. */
2202 if (! EXPR_HAS_LOCATION (*expr_p))
2203 SET_EXPR_LOCATION (*expr_p, input_location);
2205 if (fn_contains_cilk_spawn_p (cfun)
2206 && lang_hooks.cilkplus.cilk_detect_spawn_and_unwrap (expr_p)
2207 && !seen_error ())
2208 return (enum gimplify_status)
2209 lang_hooks.cilkplus.gimplify_cilk_spawn (expr_p, pre_p, NULL);
2211 /* This may be a call to a builtin function.
2213 Builtin function calls may be transformed into different
2214 (and more efficient) builtin function calls under certain
2215 circumstances. Unfortunately, gimplification can muck things
2216 up enough that the builtin expanders are not aware that certain
2217 transformations are still valid.
2219 So we attempt transformation/gimplification of the call before
2220 we gimplify the CALL_EXPR. At this time we do not manage to
2221 transform all calls in the same manner as the expanders do, but
2222 we do transform most of them. */
2223 fndecl = get_callee_fndecl (*expr_p);
2224 if (fndecl
2225 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2226 switch (DECL_FUNCTION_CODE (fndecl))
2228 case BUILT_IN_VA_START:
2230 builtin_va_start_p = TRUE;
2231 if (call_expr_nargs (*expr_p) < 2)
2233 error ("too few arguments to function %<va_start%>");
2234 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2235 return GS_OK;
2238 if (fold_builtin_next_arg (*expr_p, true))
2240 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2241 return GS_OK;
2243 break;
2245 case BUILT_IN_LINE:
2247 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2248 *expr_p = build_int_cst (TREE_TYPE (*expr_p), loc.line);
2249 return GS_OK;
2251 case BUILT_IN_FILE:
2253 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2254 *expr_p = build_string_literal (strlen (loc.file) + 1, loc.file);
2255 return GS_OK;
2257 case BUILT_IN_FUNCTION:
2259 const char *function;
2260 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2261 *expr_p = build_string_literal (strlen (function) + 1, function);
2262 return GS_OK;
2264 default:
2267 if (fndecl && DECL_BUILT_IN (fndecl))
2269 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2270 if (new_tree && new_tree != *expr_p)
2272 /* There was a transformation of this call which computes the
2273 same value, but in a more efficient way. Return and try
2274 again. */
2275 *expr_p = new_tree;
2276 return GS_OK;
2280 /* Remember the original function pointer type. */
2281 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2283 /* There is a sequence point before the call, so any side effects in
2284 the calling expression must occur before the actual call. Force
2285 gimplify_expr to use an internal post queue. */
2286 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2287 is_gimple_call_addr, fb_rvalue);
2289 nargs = call_expr_nargs (*expr_p);
2291 /* Get argument types for verification. */
2292 fndecl = get_callee_fndecl (*expr_p);
2293 parms = NULL_TREE;
2294 if (fndecl)
2295 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2296 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2297 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2299 if (fndecl && DECL_ARGUMENTS (fndecl))
2300 p = DECL_ARGUMENTS (fndecl);
2301 else if (parms)
2302 p = parms;
2303 else
2304 p = NULL_TREE;
2305 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2308 /* If the last argument is __builtin_va_arg_pack () and it is not
2309 passed as a named argument, decrease the number of CALL_EXPR
2310 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2311 if (!p
2312 && i < nargs
2313 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2315 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2316 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2318 if (last_arg_fndecl
2319 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2320 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2321 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2323 tree call = *expr_p;
2325 --nargs;
2326 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2327 CALL_EXPR_FN (call),
2328 nargs, CALL_EXPR_ARGP (call));
2330 /* Copy all CALL_EXPR flags, location and block, except
2331 CALL_EXPR_VA_ARG_PACK flag. */
2332 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2333 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2334 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2335 = CALL_EXPR_RETURN_SLOT_OPT (call);
2336 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2337 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2339 /* Set CALL_EXPR_VA_ARG_PACK. */
2340 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2344 /* Finally, gimplify the function arguments. */
2345 if (nargs > 0)
2347 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2348 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2349 PUSH_ARGS_REVERSED ? i-- : i++)
2351 enum gimplify_status t;
2353 /* Avoid gimplifying the second argument to va_start, which needs to
2354 be the plain PARM_DECL. */
2355 if ((i != 1) || !builtin_va_start_p)
2357 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2358 EXPR_LOCATION (*expr_p));
2360 if (t == GS_ERROR)
2361 ret = GS_ERROR;
2366 /* Verify the function result. */
2367 if (want_value && fndecl
2368 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2370 error_at (loc, "using result of function returning %<void%>");
2371 ret = GS_ERROR;
2374 /* Try this again in case gimplification exposed something. */
2375 if (ret != GS_ERROR)
2377 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2379 if (new_tree && new_tree != *expr_p)
2381 /* There was a transformation of this call which computes the
2382 same value, but in a more efficient way. Return and try
2383 again. */
2384 *expr_p = new_tree;
2385 return GS_OK;
2388 else
2390 *expr_p = error_mark_node;
2391 return GS_ERROR;
2394 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2395 decl. This allows us to eliminate redundant or useless
2396 calls to "const" functions. */
2397 if (TREE_CODE (*expr_p) == CALL_EXPR)
2399 int flags = call_expr_flags (*expr_p);
2400 if (flags & (ECF_CONST | ECF_PURE)
2401 /* An infinite loop is considered a side effect. */
2402 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2403 TREE_SIDE_EFFECTS (*expr_p) = 0;
2406 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2407 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2408 form and delegate the creation of a GIMPLE_CALL to
2409 gimplify_modify_expr. This is always possible because when
2410 WANT_VALUE is true, the caller wants the result of this call into
2411 a temporary, which means that we will emit an INIT_EXPR in
2412 internal_get_tmp_var which will then be handled by
2413 gimplify_modify_expr. */
2414 if (!want_value)
2416 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2417 have to do is replicate it as a GIMPLE_CALL tuple. */
2418 gimple_stmt_iterator gsi;
2419 call = gimple_build_call_from_tree (*expr_p);
2420 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2421 notice_special_calls (call);
2422 gimplify_seq_add_stmt (pre_p, call);
2423 gsi = gsi_last (*pre_p);
2424 /* Don't fold stmts inside of target construct. We'll do it
2425 during omplower pass instead. */
2426 struct gimplify_omp_ctx *ctx;
2427 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2428 if (ctx->region_type == ORT_TARGET)
2429 break;
2430 if (ctx == NULL)
2431 fold_stmt (&gsi);
2432 *expr_p = NULL_TREE;
2434 else
2435 /* Remember the original function type. */
2436 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2437 CALL_EXPR_FN (*expr_p));
2439 return ret;
2442 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2443 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2445 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2446 condition is true or false, respectively. If null, we should generate
2447 our own to skip over the evaluation of this specific expression.
2449 LOCUS is the source location of the COND_EXPR.
2451 This function is the tree equivalent of do_jump.
2453 shortcut_cond_r should only be called by shortcut_cond_expr. */
2455 static tree
2456 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2457 location_t locus)
2459 tree local_label = NULL_TREE;
2460 tree t, expr = NULL;
2462 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2463 retain the shortcut semantics. Just insert the gotos here;
2464 shortcut_cond_expr will append the real blocks later. */
2465 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2467 location_t new_locus;
2469 /* Turn if (a && b) into
2471 if (a); else goto no;
2472 if (b) goto yes; else goto no;
2473 (no:) */
2475 if (false_label_p == NULL)
2476 false_label_p = &local_label;
2478 /* Keep the original source location on the first 'if'. */
2479 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2480 append_to_statement_list (t, &expr);
2482 /* Set the source location of the && on the second 'if'. */
2483 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2484 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2485 new_locus);
2486 append_to_statement_list (t, &expr);
2488 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2490 location_t new_locus;
2492 /* Turn if (a || b) into
2494 if (a) goto yes;
2495 if (b) goto yes; else goto no;
2496 (yes:) */
2498 if (true_label_p == NULL)
2499 true_label_p = &local_label;
2501 /* Keep the original source location on the first 'if'. */
2502 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2503 append_to_statement_list (t, &expr);
2505 /* Set the source location of the || on the second 'if'. */
2506 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2507 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2508 new_locus);
2509 append_to_statement_list (t, &expr);
2511 else if (TREE_CODE (pred) == COND_EXPR
2512 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2513 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2515 location_t new_locus;
2517 /* As long as we're messing with gotos, turn if (a ? b : c) into
2518 if (a)
2519 if (b) goto yes; else goto no;
2520 else
2521 if (c) goto yes; else goto no;
2523 Don't do this if one of the arms has void type, which can happen
2524 in C++ when the arm is throw. */
2526 /* Keep the original source location on the first 'if'. Set the source
2527 location of the ? on the second 'if'. */
2528 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2529 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2530 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2531 false_label_p, locus),
2532 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2533 false_label_p, new_locus));
2535 else
2537 expr = build3 (COND_EXPR, void_type_node, pred,
2538 build_and_jump (true_label_p),
2539 build_and_jump (false_label_p));
2540 SET_EXPR_LOCATION (expr, locus);
2543 if (local_label)
2545 t = build1 (LABEL_EXPR, void_type_node, local_label);
2546 append_to_statement_list (t, &expr);
2549 return expr;
2552 /* Given a conditional expression EXPR with short-circuit boolean
2553 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2554 predicate apart into the equivalent sequence of conditionals. */
2556 static tree
2557 shortcut_cond_expr (tree expr)
2559 tree pred = TREE_OPERAND (expr, 0);
2560 tree then_ = TREE_OPERAND (expr, 1);
2561 tree else_ = TREE_OPERAND (expr, 2);
2562 tree true_label, false_label, end_label, t;
2563 tree *true_label_p;
2564 tree *false_label_p;
2565 bool emit_end, emit_false, jump_over_else;
2566 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2567 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2569 /* First do simple transformations. */
2570 if (!else_se)
2572 /* If there is no 'else', turn
2573 if (a && b) then c
2574 into
2575 if (a) if (b) then c. */
2576 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2578 /* Keep the original source location on the first 'if'. */
2579 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2580 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2581 /* Set the source location of the && on the second 'if'. */
2582 if (EXPR_HAS_LOCATION (pred))
2583 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2584 then_ = shortcut_cond_expr (expr);
2585 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2586 pred = TREE_OPERAND (pred, 0);
2587 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2588 SET_EXPR_LOCATION (expr, locus);
2592 if (!then_se)
2594 /* If there is no 'then', turn
2595 if (a || b); else d
2596 into
2597 if (a); else if (b); else d. */
2598 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2600 /* Keep the original source location on the first 'if'. */
2601 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2602 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2603 /* Set the source location of the || on the second 'if'. */
2604 if (EXPR_HAS_LOCATION (pred))
2605 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2606 else_ = shortcut_cond_expr (expr);
2607 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2608 pred = TREE_OPERAND (pred, 0);
2609 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2610 SET_EXPR_LOCATION (expr, locus);
2614 /* If we're done, great. */
2615 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2616 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2617 return expr;
2619 /* Otherwise we need to mess with gotos. Change
2620 if (a) c; else d;
2622 if (a); else goto no;
2623 c; goto end;
2624 no: d; end:
2625 and recursively gimplify the condition. */
2627 true_label = false_label = end_label = NULL_TREE;
2629 /* If our arms just jump somewhere, hijack those labels so we don't
2630 generate jumps to jumps. */
2632 if (then_
2633 && TREE_CODE (then_) == GOTO_EXPR
2634 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2636 true_label = GOTO_DESTINATION (then_);
2637 then_ = NULL;
2638 then_se = false;
2641 if (else_
2642 && TREE_CODE (else_) == GOTO_EXPR
2643 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2645 false_label = GOTO_DESTINATION (else_);
2646 else_ = NULL;
2647 else_se = false;
2650 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2651 if (true_label)
2652 true_label_p = &true_label;
2653 else
2654 true_label_p = NULL;
2656 /* The 'else' branch also needs a label if it contains interesting code. */
2657 if (false_label || else_se)
2658 false_label_p = &false_label;
2659 else
2660 false_label_p = NULL;
2662 /* If there was nothing else in our arms, just forward the label(s). */
2663 if (!then_se && !else_se)
2664 return shortcut_cond_r (pred, true_label_p, false_label_p,
2665 EXPR_LOC_OR_LOC (expr, input_location));
2667 /* If our last subexpression already has a terminal label, reuse it. */
2668 if (else_se)
2669 t = expr_last (else_);
2670 else if (then_se)
2671 t = expr_last (then_);
2672 else
2673 t = NULL;
2674 if (t && TREE_CODE (t) == LABEL_EXPR)
2675 end_label = LABEL_EXPR_LABEL (t);
2677 /* If we don't care about jumping to the 'else' branch, jump to the end
2678 if the condition is false. */
2679 if (!false_label_p)
2680 false_label_p = &end_label;
2682 /* We only want to emit these labels if we aren't hijacking them. */
2683 emit_end = (end_label == NULL_TREE);
2684 emit_false = (false_label == NULL_TREE);
2686 /* We only emit the jump over the else clause if we have to--if the
2687 then clause may fall through. Otherwise we can wind up with a
2688 useless jump and a useless label at the end of gimplified code,
2689 which will cause us to think that this conditional as a whole
2690 falls through even if it doesn't. If we then inline a function
2691 which ends with such a condition, that can cause us to issue an
2692 inappropriate warning about control reaching the end of a
2693 non-void function. */
2694 jump_over_else = block_may_fallthru (then_);
2696 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2697 EXPR_LOC_OR_LOC (expr, input_location));
2699 expr = NULL;
2700 append_to_statement_list (pred, &expr);
2702 append_to_statement_list (then_, &expr);
2703 if (else_se)
2705 if (jump_over_else)
2707 tree last = expr_last (expr);
2708 t = build_and_jump (&end_label);
2709 if (EXPR_HAS_LOCATION (last))
2710 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2711 append_to_statement_list (t, &expr);
2713 if (emit_false)
2715 t = build1 (LABEL_EXPR, void_type_node, false_label);
2716 append_to_statement_list (t, &expr);
2718 append_to_statement_list (else_, &expr);
2720 if (emit_end && end_label)
2722 t = build1 (LABEL_EXPR, void_type_node, end_label);
2723 append_to_statement_list (t, &expr);
2726 return expr;
2729 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2731 tree
2732 gimple_boolify (tree expr)
2734 tree type = TREE_TYPE (expr);
2735 location_t loc = EXPR_LOCATION (expr);
2737 if (TREE_CODE (expr) == NE_EXPR
2738 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2739 && integer_zerop (TREE_OPERAND (expr, 1)))
2741 tree call = TREE_OPERAND (expr, 0);
2742 tree fn = get_callee_fndecl (call);
2744 /* For __builtin_expect ((long) (x), y) recurse into x as well
2745 if x is truth_value_p. */
2746 if (fn
2747 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2748 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2749 && call_expr_nargs (call) == 2)
2751 tree arg = CALL_EXPR_ARG (call, 0);
2752 if (arg)
2754 if (TREE_CODE (arg) == NOP_EXPR
2755 && TREE_TYPE (arg) == TREE_TYPE (call))
2756 arg = TREE_OPERAND (arg, 0);
2757 if (truth_value_p (TREE_CODE (arg)))
2759 arg = gimple_boolify (arg);
2760 CALL_EXPR_ARG (call, 0)
2761 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2767 switch (TREE_CODE (expr))
2769 case TRUTH_AND_EXPR:
2770 case TRUTH_OR_EXPR:
2771 case TRUTH_XOR_EXPR:
2772 case TRUTH_ANDIF_EXPR:
2773 case TRUTH_ORIF_EXPR:
2774 /* Also boolify the arguments of truth exprs. */
2775 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2776 /* FALLTHRU */
2778 case TRUTH_NOT_EXPR:
2779 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2781 /* These expressions always produce boolean results. */
2782 if (TREE_CODE (type) != BOOLEAN_TYPE)
2783 TREE_TYPE (expr) = boolean_type_node;
2784 return expr;
2786 case ANNOTATE_EXPR:
2787 if ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1))
2788 == annot_expr_ivdep_kind)
2790 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2791 if (TREE_CODE (type) != BOOLEAN_TYPE)
2792 TREE_TYPE (expr) = boolean_type_node;
2793 return expr;
2795 /* FALLTHRU */
2797 default:
2798 if (COMPARISON_CLASS_P (expr))
2800 /* There expressions always prduce boolean results. */
2801 if (TREE_CODE (type) != BOOLEAN_TYPE)
2802 TREE_TYPE (expr) = boolean_type_node;
2803 return expr;
2805 /* Other expressions that get here must have boolean values, but
2806 might need to be converted to the appropriate mode. */
2807 if (TREE_CODE (type) == BOOLEAN_TYPE)
2808 return expr;
2809 return fold_convert_loc (loc, boolean_type_node, expr);
2813 /* Given a conditional expression *EXPR_P without side effects, gimplify
2814 its operands. New statements are inserted to PRE_P. */
2816 static enum gimplify_status
2817 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2819 tree expr = *expr_p, cond;
2820 enum gimplify_status ret, tret;
2821 enum tree_code code;
2823 cond = gimple_boolify (COND_EXPR_COND (expr));
2825 /* We need to handle && and || specially, as their gimplification
2826 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2827 code = TREE_CODE (cond);
2828 if (code == TRUTH_ANDIF_EXPR)
2829 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2830 else if (code == TRUTH_ORIF_EXPR)
2831 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2832 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2833 COND_EXPR_COND (*expr_p) = cond;
2835 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2836 is_gimple_val, fb_rvalue);
2837 ret = MIN (ret, tret);
2838 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2839 is_gimple_val, fb_rvalue);
2841 return MIN (ret, tret);
2844 /* Return true if evaluating EXPR could trap.
2845 EXPR is GENERIC, while tree_could_trap_p can be called
2846 only on GIMPLE. */
2848 static bool
2849 generic_expr_could_trap_p (tree expr)
2851 unsigned i, n;
2853 if (!expr || is_gimple_val (expr))
2854 return false;
2856 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2857 return true;
2859 n = TREE_OPERAND_LENGTH (expr);
2860 for (i = 0; i < n; i++)
2861 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2862 return true;
2864 return false;
2867 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2868 into
2870 if (p) if (p)
2871 t1 = a; a;
2872 else or else
2873 t1 = b; b;
2876 The second form is used when *EXPR_P is of type void.
2878 PRE_P points to the list where side effects that must happen before
2879 *EXPR_P should be stored. */
2881 static enum gimplify_status
2882 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2884 tree expr = *expr_p;
2885 tree type = TREE_TYPE (expr);
2886 location_t loc = EXPR_LOCATION (expr);
2887 tree tmp, arm1, arm2;
2888 enum gimplify_status ret;
2889 tree label_true, label_false, label_cont;
2890 bool have_then_clause_p, have_else_clause_p;
2891 gimple gimple_cond;
2892 enum tree_code pred_code;
2893 gimple_seq seq = NULL;
2895 /* If this COND_EXPR has a value, copy the values into a temporary within
2896 the arms. */
2897 if (!VOID_TYPE_P (type))
2899 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
2900 tree result;
2902 /* If either an rvalue is ok or we do not require an lvalue, create the
2903 temporary. But we cannot do that if the type is addressable. */
2904 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
2905 && !TREE_ADDRESSABLE (type))
2907 if (gimplify_ctxp->allow_rhs_cond_expr
2908 /* If either branch has side effects or could trap, it can't be
2909 evaluated unconditionally. */
2910 && !TREE_SIDE_EFFECTS (then_)
2911 && !generic_expr_could_trap_p (then_)
2912 && !TREE_SIDE_EFFECTS (else_)
2913 && !generic_expr_could_trap_p (else_))
2914 return gimplify_pure_cond_expr (expr_p, pre_p);
2916 tmp = create_tmp_var (type, "iftmp");
2917 result = tmp;
2920 /* Otherwise, only create and copy references to the values. */
2921 else
2923 type = build_pointer_type (type);
2925 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2926 then_ = build_fold_addr_expr_loc (loc, then_);
2928 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2929 else_ = build_fold_addr_expr_loc (loc, else_);
2931 expr
2932 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
2934 tmp = create_tmp_var (type, "iftmp");
2935 result = build_simple_mem_ref_loc (loc, tmp);
2938 /* Build the new then clause, `tmp = then_;'. But don't build the
2939 assignment if the value is void; in C++ it can be if it's a throw. */
2940 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2941 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
2943 /* Similarly, build the new else clause, `tmp = else_;'. */
2944 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2945 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
2947 TREE_TYPE (expr) = void_type_node;
2948 recalculate_side_effects (expr);
2950 /* Move the COND_EXPR to the prequeue. */
2951 gimplify_stmt (&expr, pre_p);
2953 *expr_p = result;
2954 return GS_ALL_DONE;
2957 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
2958 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
2959 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
2960 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
2962 /* Make sure the condition has BOOLEAN_TYPE. */
2963 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2965 /* Break apart && and || conditions. */
2966 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2967 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2969 expr = shortcut_cond_expr (expr);
2971 if (expr != *expr_p)
2973 *expr_p = expr;
2975 /* We can't rely on gimplify_expr to re-gimplify the expanded
2976 form properly, as cleanups might cause the target labels to be
2977 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2978 set up a conditional context. */
2979 gimple_push_condition ();
2980 gimplify_stmt (expr_p, &seq);
2981 gimple_pop_condition (pre_p);
2982 gimple_seq_add_seq (pre_p, seq);
2984 return GS_ALL_DONE;
2988 /* Now do the normal gimplification. */
2990 /* Gimplify condition. */
2991 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
2992 fb_rvalue);
2993 if (ret == GS_ERROR)
2994 return GS_ERROR;
2995 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
2997 gimple_push_condition ();
2999 have_then_clause_p = have_else_clause_p = false;
3000 if (TREE_OPERAND (expr, 1) != NULL
3001 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3002 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3003 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3004 == current_function_decl)
3005 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3006 have different locations, otherwise we end up with incorrect
3007 location information on the branches. */
3008 && (optimize
3009 || !EXPR_HAS_LOCATION (expr)
3010 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3011 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3013 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3014 have_then_clause_p = true;
3016 else
3017 label_true = create_artificial_label (UNKNOWN_LOCATION);
3018 if (TREE_OPERAND (expr, 2) != NULL
3019 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3020 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3021 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3022 == current_function_decl)
3023 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3024 have different locations, otherwise we end up with incorrect
3025 location information on the branches. */
3026 && (optimize
3027 || !EXPR_HAS_LOCATION (expr)
3028 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3029 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3031 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3032 have_else_clause_p = true;
3034 else
3035 label_false = create_artificial_label (UNKNOWN_LOCATION);
3037 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3038 &arm2);
3040 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3041 label_false);
3043 gimplify_seq_add_stmt (&seq, gimple_cond);
3044 label_cont = NULL_TREE;
3045 if (!have_then_clause_p)
3047 /* For if (...) {} else { code; } put label_true after
3048 the else block. */
3049 if (TREE_OPERAND (expr, 1) == NULL_TREE
3050 && !have_else_clause_p
3051 && TREE_OPERAND (expr, 2) != NULL_TREE)
3052 label_cont = label_true;
3053 else
3055 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3056 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3057 /* For if (...) { code; } else {} or
3058 if (...) { code; } else goto label; or
3059 if (...) { code; return; } else { ... }
3060 label_cont isn't needed. */
3061 if (!have_else_clause_p
3062 && TREE_OPERAND (expr, 2) != NULL_TREE
3063 && gimple_seq_may_fallthru (seq))
3065 gimple g;
3066 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3068 g = gimple_build_goto (label_cont);
3070 /* GIMPLE_COND's are very low level; they have embedded
3071 gotos. This particular embedded goto should not be marked
3072 with the location of the original COND_EXPR, as it would
3073 correspond to the COND_EXPR's condition, not the ELSE or the
3074 THEN arms. To avoid marking it with the wrong location, flag
3075 it as "no location". */
3076 gimple_set_do_not_emit_location (g);
3078 gimplify_seq_add_stmt (&seq, g);
3082 if (!have_else_clause_p)
3084 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3085 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3087 if (label_cont)
3088 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3090 gimple_pop_condition (pre_p);
3091 gimple_seq_add_seq (pre_p, seq);
3093 if (ret == GS_ERROR)
3094 ; /* Do nothing. */
3095 else if (have_then_clause_p || have_else_clause_p)
3096 ret = GS_ALL_DONE;
3097 else
3099 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3100 expr = TREE_OPERAND (expr, 0);
3101 gimplify_stmt (&expr, pre_p);
3104 *expr_p = NULL;
3105 return ret;
3108 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3109 to be marked addressable.
3111 We cannot rely on such an expression being directly markable if a temporary
3112 has been created by the gimplification. In this case, we create another
3113 temporary and initialize it with a copy, which will become a store after we
3114 mark it addressable. This can happen if the front-end passed us something
3115 that it could not mark addressable yet, like a Fortran pass-by-reference
3116 parameter (int) floatvar. */
3118 static void
3119 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3121 while (handled_component_p (*expr_p))
3122 expr_p = &TREE_OPERAND (*expr_p, 0);
3123 if (is_gimple_reg (*expr_p))
3124 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3127 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3128 a call to __builtin_memcpy. */
3130 static enum gimplify_status
3131 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3132 gimple_seq *seq_p)
3134 tree t, to, to_ptr, from, from_ptr;
3135 gimple gs;
3136 location_t loc = EXPR_LOCATION (*expr_p);
3138 to = TREE_OPERAND (*expr_p, 0);
3139 from = TREE_OPERAND (*expr_p, 1);
3141 /* Mark the RHS addressable. Beware that it may not be possible to do so
3142 directly if a temporary has been created by the gimplification. */
3143 prepare_gimple_addressable (&from, seq_p);
3145 mark_addressable (from);
3146 from_ptr = build_fold_addr_expr_loc (loc, from);
3147 gimplify_arg (&from_ptr, seq_p, loc);
3149 mark_addressable (to);
3150 to_ptr = build_fold_addr_expr_loc (loc, to);
3151 gimplify_arg (&to_ptr, seq_p, loc);
3153 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3155 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3157 if (want_value)
3159 /* tmp = memcpy() */
3160 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3161 gimple_call_set_lhs (gs, t);
3162 gimplify_seq_add_stmt (seq_p, gs);
3164 *expr_p = build_simple_mem_ref (t);
3165 return GS_ALL_DONE;
3168 gimplify_seq_add_stmt (seq_p, gs);
3169 *expr_p = NULL;
3170 return GS_ALL_DONE;
3173 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3174 a call to __builtin_memset. In this case we know that the RHS is
3175 a CONSTRUCTOR with an empty element list. */
3177 static enum gimplify_status
3178 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3179 gimple_seq *seq_p)
3181 tree t, from, to, to_ptr;
3182 gimple gs;
3183 location_t loc = EXPR_LOCATION (*expr_p);
3185 /* Assert our assumptions, to abort instead of producing wrong code
3186 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3187 not be immediately exposed. */
3188 from = TREE_OPERAND (*expr_p, 1);
3189 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3190 from = TREE_OPERAND (from, 0);
3192 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3193 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3195 /* Now proceed. */
3196 to = TREE_OPERAND (*expr_p, 0);
3198 to_ptr = build_fold_addr_expr_loc (loc, to);
3199 gimplify_arg (&to_ptr, seq_p, loc);
3200 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3202 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3204 if (want_value)
3206 /* tmp = memset() */
3207 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3208 gimple_call_set_lhs (gs, t);
3209 gimplify_seq_add_stmt (seq_p, gs);
3211 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3212 return GS_ALL_DONE;
3215 gimplify_seq_add_stmt (seq_p, gs);
3216 *expr_p = NULL;
3217 return GS_ALL_DONE;
3220 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3221 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3222 assignment. Return non-null if we detect a potential overlap. */
3224 struct gimplify_init_ctor_preeval_data
3226 /* The base decl of the lhs object. May be NULL, in which case we
3227 have to assume the lhs is indirect. */
3228 tree lhs_base_decl;
3230 /* The alias set of the lhs object. */
3231 alias_set_type lhs_alias_set;
3234 static tree
3235 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3237 struct gimplify_init_ctor_preeval_data *data
3238 = (struct gimplify_init_ctor_preeval_data *) xdata;
3239 tree t = *tp;
3241 /* If we find the base object, obviously we have overlap. */
3242 if (data->lhs_base_decl == t)
3243 return t;
3245 /* If the constructor component is indirect, determine if we have a
3246 potential overlap with the lhs. The only bits of information we
3247 have to go on at this point are addressability and alias sets. */
3248 if ((INDIRECT_REF_P (t)
3249 || TREE_CODE (t) == MEM_REF)
3250 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3251 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3252 return t;
3254 /* If the constructor component is a call, determine if it can hide a
3255 potential overlap with the lhs through an INDIRECT_REF like above.
3256 ??? Ugh - this is completely broken. In fact this whole analysis
3257 doesn't look conservative. */
3258 if (TREE_CODE (t) == CALL_EXPR)
3260 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3262 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3263 if (POINTER_TYPE_P (TREE_VALUE (type))
3264 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3265 && alias_sets_conflict_p (data->lhs_alias_set,
3266 get_alias_set
3267 (TREE_TYPE (TREE_VALUE (type)))))
3268 return t;
3271 if (IS_TYPE_OR_DECL_P (t))
3272 *walk_subtrees = 0;
3273 return NULL;
3276 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3277 force values that overlap with the lhs (as described by *DATA)
3278 into temporaries. */
3280 static void
3281 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3282 struct gimplify_init_ctor_preeval_data *data)
3284 enum gimplify_status one;
3286 /* If the value is constant, then there's nothing to pre-evaluate. */
3287 if (TREE_CONSTANT (*expr_p))
3289 /* Ensure it does not have side effects, it might contain a reference to
3290 the object we're initializing. */
3291 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3292 return;
3295 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3296 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3297 return;
3299 /* Recurse for nested constructors. */
3300 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3302 unsigned HOST_WIDE_INT ix;
3303 constructor_elt *ce;
3304 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3306 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3307 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3309 return;
3312 /* If this is a variable sized type, we must remember the size. */
3313 maybe_with_size_expr (expr_p);
3315 /* Gimplify the constructor element to something appropriate for the rhs
3316 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3317 the gimplifier will consider this a store to memory. Doing this
3318 gimplification now means that we won't have to deal with complicated
3319 language-specific trees, nor trees like SAVE_EXPR that can induce
3320 exponential search behavior. */
3321 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3322 if (one == GS_ERROR)
3324 *expr_p = NULL;
3325 return;
3328 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3329 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3330 always be true for all scalars, since is_gimple_mem_rhs insists on a
3331 temporary variable for them. */
3332 if (DECL_P (*expr_p))
3333 return;
3335 /* If this is of variable size, we have no choice but to assume it doesn't
3336 overlap since we can't make a temporary for it. */
3337 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3338 return;
3340 /* Otherwise, we must search for overlap ... */
3341 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3342 return;
3344 /* ... and if found, force the value into a temporary. */
3345 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3348 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3349 a RANGE_EXPR in a CONSTRUCTOR for an array.
3351 var = lower;
3352 loop_entry:
3353 object[var] = value;
3354 if (var == upper)
3355 goto loop_exit;
3356 var = var + 1;
3357 goto loop_entry;
3358 loop_exit:
3360 We increment var _after_ the loop exit check because we might otherwise
3361 fail if upper == TYPE_MAX_VALUE (type for upper).
3363 Note that we never have to deal with SAVE_EXPRs here, because this has
3364 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3366 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3367 gimple_seq *, bool);
3369 static void
3370 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3371 tree value, tree array_elt_type,
3372 gimple_seq *pre_p, bool cleared)
3374 tree loop_entry_label, loop_exit_label, fall_thru_label;
3375 tree var, var_type, cref, tmp;
3377 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3378 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3379 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3381 /* Create and initialize the index variable. */
3382 var_type = TREE_TYPE (upper);
3383 var = create_tmp_var (var_type, NULL);
3384 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3386 /* Add the loop entry label. */
3387 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3389 /* Build the reference. */
3390 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3391 var, NULL_TREE, NULL_TREE);
3393 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3394 the store. Otherwise just assign value to the reference. */
3396 if (TREE_CODE (value) == CONSTRUCTOR)
3397 /* NB we might have to call ourself recursively through
3398 gimplify_init_ctor_eval if the value is a constructor. */
3399 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3400 pre_p, cleared);
3401 else
3402 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3404 /* We exit the loop when the index var is equal to the upper bound. */
3405 gimplify_seq_add_stmt (pre_p,
3406 gimple_build_cond (EQ_EXPR, var, upper,
3407 loop_exit_label, fall_thru_label));
3409 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3411 /* Otherwise, increment the index var... */
3412 tmp = build2 (PLUS_EXPR, var_type, var,
3413 fold_convert (var_type, integer_one_node));
3414 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3416 /* ...and jump back to the loop entry. */
3417 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3419 /* Add the loop exit label. */
3420 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3423 /* Return true if FDECL is accessing a field that is zero sized. */
3425 static bool
3426 zero_sized_field_decl (const_tree fdecl)
3428 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3429 && integer_zerop (DECL_SIZE (fdecl)))
3430 return true;
3431 return false;
3434 /* Return true if TYPE is zero sized. */
3436 static bool
3437 zero_sized_type (const_tree type)
3439 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3440 && integer_zerop (TYPE_SIZE (type)))
3441 return true;
3442 return false;
3445 /* A subroutine of gimplify_init_constructor. Generate individual
3446 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3447 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3448 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3449 zeroed first. */
3451 static void
3452 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3453 gimple_seq *pre_p, bool cleared)
3455 tree array_elt_type = NULL;
3456 unsigned HOST_WIDE_INT ix;
3457 tree purpose, value;
3459 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3460 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3462 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3464 tree cref;
3466 /* NULL values are created above for gimplification errors. */
3467 if (value == NULL)
3468 continue;
3470 if (cleared && initializer_zerop (value))
3471 continue;
3473 /* ??? Here's to hoping the front end fills in all of the indices,
3474 so we don't have to figure out what's missing ourselves. */
3475 gcc_assert (purpose);
3477 /* Skip zero-sized fields, unless value has side-effects. This can
3478 happen with calls to functions returning a zero-sized type, which
3479 we shouldn't discard. As a number of downstream passes don't
3480 expect sets of zero-sized fields, we rely on the gimplification of
3481 the MODIFY_EXPR we make below to drop the assignment statement. */
3482 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3483 continue;
3485 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3486 whole range. */
3487 if (TREE_CODE (purpose) == RANGE_EXPR)
3489 tree lower = TREE_OPERAND (purpose, 0);
3490 tree upper = TREE_OPERAND (purpose, 1);
3492 /* If the lower bound is equal to upper, just treat it as if
3493 upper was the index. */
3494 if (simple_cst_equal (lower, upper))
3495 purpose = upper;
3496 else
3498 gimplify_init_ctor_eval_range (object, lower, upper, value,
3499 array_elt_type, pre_p, cleared);
3500 continue;
3504 if (array_elt_type)
3506 /* Do not use bitsizetype for ARRAY_REF indices. */
3507 if (TYPE_DOMAIN (TREE_TYPE (object)))
3508 purpose
3509 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3510 purpose);
3511 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3512 purpose, NULL_TREE, NULL_TREE);
3514 else
3516 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3517 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3518 unshare_expr (object), purpose, NULL_TREE);
3521 if (TREE_CODE (value) == CONSTRUCTOR
3522 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3523 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3524 pre_p, cleared);
3525 else
3527 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3528 gimplify_and_add (init, pre_p);
3529 ggc_free (init);
3534 /* Return the appropriate RHS predicate for this LHS. */
3536 gimple_predicate
3537 rhs_predicate_for (tree lhs)
3539 if (is_gimple_reg (lhs))
3540 return is_gimple_reg_rhs_or_call;
3541 else
3542 return is_gimple_mem_rhs_or_call;
3545 /* Gimplify a C99 compound literal expression. This just means adding
3546 the DECL_EXPR before the current statement and using its anonymous
3547 decl instead. */
3549 static enum gimplify_status
3550 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3551 bool (*gimple_test_f) (tree),
3552 fallback_t fallback)
3554 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3555 tree decl = DECL_EXPR_DECL (decl_s);
3556 tree init = DECL_INITIAL (decl);
3557 /* Mark the decl as addressable if the compound literal
3558 expression is addressable now, otherwise it is marked too late
3559 after we gimplify the initialization expression. */
3560 if (TREE_ADDRESSABLE (*expr_p))
3561 TREE_ADDRESSABLE (decl) = 1;
3562 /* Otherwise, if we don't need an lvalue and have a literal directly
3563 substitute it. Check if it matches the gimple predicate, as
3564 otherwise we'd generate a new temporary, and we can as well just
3565 use the decl we already have. */
3566 else if (!TREE_ADDRESSABLE (decl)
3567 && init
3568 && (fallback & fb_lvalue) == 0
3569 && gimple_test_f (init))
3571 *expr_p = init;
3572 return GS_OK;
3575 /* Preliminarily mark non-addressed complex variables as eligible
3576 for promotion to gimple registers. We'll transform their uses
3577 as we find them. */
3578 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3579 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3580 && !TREE_THIS_VOLATILE (decl)
3581 && !needs_to_live_in_memory (decl))
3582 DECL_GIMPLE_REG_P (decl) = 1;
3584 /* If the decl is not addressable, then it is being used in some
3585 expression or on the right hand side of a statement, and it can
3586 be put into a readonly data section. */
3587 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3588 TREE_READONLY (decl) = 1;
3590 /* This decl isn't mentioned in the enclosing block, so add it to the
3591 list of temps. FIXME it seems a bit of a kludge to say that
3592 anonymous artificial vars aren't pushed, but everything else is. */
3593 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3594 gimple_add_tmp_var (decl);
3596 gimplify_and_add (decl_s, pre_p);
3597 *expr_p = decl;
3598 return GS_OK;
3601 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3602 return a new CONSTRUCTOR if something changed. */
3604 static tree
3605 optimize_compound_literals_in_ctor (tree orig_ctor)
3607 tree ctor = orig_ctor;
3608 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3609 unsigned int idx, num = vec_safe_length (elts);
3611 for (idx = 0; idx < num; idx++)
3613 tree value = (*elts)[idx].value;
3614 tree newval = value;
3615 if (TREE_CODE (value) == CONSTRUCTOR)
3616 newval = optimize_compound_literals_in_ctor (value);
3617 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3619 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3620 tree decl = DECL_EXPR_DECL (decl_s);
3621 tree init = DECL_INITIAL (decl);
3623 if (!TREE_ADDRESSABLE (value)
3624 && !TREE_ADDRESSABLE (decl)
3625 && init
3626 && TREE_CODE (init) == CONSTRUCTOR)
3627 newval = optimize_compound_literals_in_ctor (init);
3629 if (newval == value)
3630 continue;
3632 if (ctor == orig_ctor)
3634 ctor = copy_node (orig_ctor);
3635 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3636 elts = CONSTRUCTOR_ELTS (ctor);
3638 (*elts)[idx].value = newval;
3640 return ctor;
3643 /* A subroutine of gimplify_modify_expr. Break out elements of a
3644 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3646 Note that we still need to clear any elements that don't have explicit
3647 initializers, so if not all elements are initialized we keep the
3648 original MODIFY_EXPR, we just remove all of the constructor elements.
3650 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3651 GS_ERROR if we would have to create a temporary when gimplifying
3652 this constructor. Otherwise, return GS_OK.
3654 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3656 static enum gimplify_status
3657 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3658 bool want_value, bool notify_temp_creation)
3660 tree object, ctor, type;
3661 enum gimplify_status ret;
3662 vec<constructor_elt, va_gc> *elts;
3664 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3666 if (!notify_temp_creation)
3668 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3669 is_gimple_lvalue, fb_lvalue);
3670 if (ret == GS_ERROR)
3671 return ret;
3674 object = TREE_OPERAND (*expr_p, 0);
3675 ctor = TREE_OPERAND (*expr_p, 1) =
3676 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3677 type = TREE_TYPE (ctor);
3678 elts = CONSTRUCTOR_ELTS (ctor);
3679 ret = GS_ALL_DONE;
3681 switch (TREE_CODE (type))
3683 case RECORD_TYPE:
3684 case UNION_TYPE:
3685 case QUAL_UNION_TYPE:
3686 case ARRAY_TYPE:
3688 struct gimplify_init_ctor_preeval_data preeval_data;
3689 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3690 bool cleared, complete_p, valid_const_initializer;
3692 /* Aggregate types must lower constructors to initialization of
3693 individual elements. The exception is that a CONSTRUCTOR node
3694 with no elements indicates zero-initialization of the whole. */
3695 if (vec_safe_is_empty (elts))
3697 if (notify_temp_creation)
3698 return GS_OK;
3699 break;
3702 /* Fetch information about the constructor to direct later processing.
3703 We might want to make static versions of it in various cases, and
3704 can only do so if it known to be a valid constant initializer. */
3705 valid_const_initializer
3706 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3707 &num_ctor_elements, &complete_p);
3709 /* If a const aggregate variable is being initialized, then it
3710 should never be a lose to promote the variable to be static. */
3711 if (valid_const_initializer
3712 && num_nonzero_elements > 1
3713 && TREE_READONLY (object)
3714 && TREE_CODE (object) == VAR_DECL
3715 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3717 if (notify_temp_creation)
3718 return GS_ERROR;
3719 DECL_INITIAL (object) = ctor;
3720 TREE_STATIC (object) = 1;
3721 if (!DECL_NAME (object))
3722 DECL_NAME (object) = create_tmp_var_name ("C");
3723 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3725 /* ??? C++ doesn't automatically append a .<number> to the
3726 assembler name, and even when it does, it looks at FE private
3727 data structures to figure out what that number should be,
3728 which are not set for this variable. I suppose this is
3729 important for local statics for inline functions, which aren't
3730 "local" in the object file sense. So in order to get a unique
3731 TU-local symbol, we must invoke the lhd version now. */
3732 lhd_set_decl_assembler_name (object);
3734 *expr_p = NULL_TREE;
3735 break;
3738 /* If there are "lots" of initialized elements, even discounting
3739 those that are not address constants (and thus *must* be
3740 computed at runtime), then partition the constructor into
3741 constant and non-constant parts. Block copy the constant
3742 parts in, then generate code for the non-constant parts. */
3743 /* TODO. There's code in cp/typeck.c to do this. */
3745 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3746 /* store_constructor will ignore the clearing of variable-sized
3747 objects. Initializers for such objects must explicitly set
3748 every field that needs to be set. */
3749 cleared = false;
3750 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
3751 /* If the constructor isn't complete, clear the whole object
3752 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
3754 ??? This ought not to be needed. For any element not present
3755 in the initializer, we should simply set them to zero. Except
3756 we'd need to *find* the elements that are not present, and that
3757 requires trickery to avoid quadratic compile-time behavior in
3758 large cases or excessive memory use in small cases. */
3759 cleared = true;
3760 else if (num_ctor_elements - num_nonzero_elements
3761 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3762 && num_nonzero_elements < num_ctor_elements / 4)
3763 /* If there are "lots" of zeros, it's more efficient to clear
3764 the memory and then set the nonzero elements. */
3765 cleared = true;
3766 else
3767 cleared = false;
3769 /* If there are "lots" of initialized elements, and all of them
3770 are valid address constants, then the entire initializer can
3771 be dropped to memory, and then memcpy'd out. Don't do this
3772 for sparse arrays, though, as it's more efficient to follow
3773 the standard CONSTRUCTOR behavior of memset followed by
3774 individual element initialization. Also don't do this for small
3775 all-zero initializers (which aren't big enough to merit
3776 clearing), and don't try to make bitwise copies of
3777 TREE_ADDRESSABLE types.
3779 We cannot apply such transformation when compiling chkp static
3780 initializer because creation of initializer image in the memory
3781 will require static initialization of bounds for it. It should
3782 result in another gimplification of similar initializer and we
3783 may fall into infinite loop. */
3784 if (valid_const_initializer
3785 && !(cleared || num_nonzero_elements == 0)
3786 && !TREE_ADDRESSABLE (type)
3787 && (!current_function_decl
3788 || !lookup_attribute ("chkp ctor",
3789 DECL_ATTRIBUTES (current_function_decl))))
3791 HOST_WIDE_INT size = int_size_in_bytes (type);
3792 unsigned int align;
3794 /* ??? We can still get unbounded array types, at least
3795 from the C++ front end. This seems wrong, but attempt
3796 to work around it for now. */
3797 if (size < 0)
3799 size = int_size_in_bytes (TREE_TYPE (object));
3800 if (size >= 0)
3801 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3804 /* Find the maximum alignment we can assume for the object. */
3805 /* ??? Make use of DECL_OFFSET_ALIGN. */
3806 if (DECL_P (object))
3807 align = DECL_ALIGN (object);
3808 else
3809 align = TYPE_ALIGN (type);
3811 /* Do a block move either if the size is so small as to make
3812 each individual move a sub-unit move on average, or if it
3813 is so large as to make individual moves inefficient. */
3814 if (size > 0
3815 && num_nonzero_elements > 1
3816 && (size < num_nonzero_elements
3817 || !can_move_by_pieces (size, align)))
3819 if (notify_temp_creation)
3820 return GS_ERROR;
3822 walk_tree (&ctor, force_labels_r, NULL, NULL);
3823 ctor = tree_output_constant_def (ctor);
3824 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3825 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3826 TREE_OPERAND (*expr_p, 1) = ctor;
3828 /* This is no longer an assignment of a CONSTRUCTOR, but
3829 we still may have processing to do on the LHS. So
3830 pretend we didn't do anything here to let that happen. */
3831 return GS_UNHANDLED;
3835 /* If the target is volatile, we have non-zero elements and more than
3836 one field to assign, initialize the target from a temporary. */
3837 if (TREE_THIS_VOLATILE (object)
3838 && !TREE_ADDRESSABLE (type)
3839 && num_nonzero_elements > 0
3840 && vec_safe_length (elts) > 1)
3842 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3843 TREE_OPERAND (*expr_p, 0) = temp;
3844 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3845 *expr_p,
3846 build2 (MODIFY_EXPR, void_type_node,
3847 object, temp));
3848 return GS_OK;
3851 if (notify_temp_creation)
3852 return GS_OK;
3854 /* If there are nonzero elements and if needed, pre-evaluate to capture
3855 elements overlapping with the lhs into temporaries. We must do this
3856 before clearing to fetch the values before they are zeroed-out. */
3857 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
3859 preeval_data.lhs_base_decl = get_base_address (object);
3860 if (!DECL_P (preeval_data.lhs_base_decl))
3861 preeval_data.lhs_base_decl = NULL;
3862 preeval_data.lhs_alias_set = get_alias_set (object);
3864 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3865 pre_p, post_p, &preeval_data);
3868 if (cleared)
3870 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3871 Note that we still have to gimplify, in order to handle the
3872 case of variable sized types. Avoid shared tree structures. */
3873 CONSTRUCTOR_ELTS (ctor) = NULL;
3874 TREE_SIDE_EFFECTS (ctor) = 0;
3875 object = unshare_expr (object);
3876 gimplify_stmt (expr_p, pre_p);
3879 /* If we have not block cleared the object, or if there are nonzero
3880 elements in the constructor, add assignments to the individual
3881 scalar fields of the object. */
3882 if (!cleared || num_nonzero_elements > 0)
3883 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3885 *expr_p = NULL_TREE;
3887 break;
3889 case COMPLEX_TYPE:
3891 tree r, i;
3893 if (notify_temp_creation)
3894 return GS_OK;
3896 /* Extract the real and imaginary parts out of the ctor. */
3897 gcc_assert (elts->length () == 2);
3898 r = (*elts)[0].value;
3899 i = (*elts)[1].value;
3900 if (r == NULL || i == NULL)
3902 tree zero = build_zero_cst (TREE_TYPE (type));
3903 if (r == NULL)
3904 r = zero;
3905 if (i == NULL)
3906 i = zero;
3909 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3910 represent creation of a complex value. */
3911 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3913 ctor = build_complex (type, r, i);
3914 TREE_OPERAND (*expr_p, 1) = ctor;
3916 else
3918 ctor = build2 (COMPLEX_EXPR, type, r, i);
3919 TREE_OPERAND (*expr_p, 1) = ctor;
3920 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3921 pre_p,
3922 post_p,
3923 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3924 fb_rvalue);
3927 break;
3929 case VECTOR_TYPE:
3931 unsigned HOST_WIDE_INT ix;
3932 constructor_elt *ce;
3934 if (notify_temp_creation)
3935 return GS_OK;
3937 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3938 if (TREE_CONSTANT (ctor))
3940 bool constant_p = true;
3941 tree value;
3943 /* Even when ctor is constant, it might contain non-*_CST
3944 elements, such as addresses or trapping values like
3945 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3946 in VECTOR_CST nodes. */
3947 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3948 if (!CONSTANT_CLASS_P (value))
3950 constant_p = false;
3951 break;
3954 if (constant_p)
3956 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3957 break;
3960 /* Don't reduce an initializer constant even if we can't
3961 make a VECTOR_CST. It won't do anything for us, and it'll
3962 prevent us from representing it as a single constant. */
3963 if (initializer_constant_valid_p (ctor, type))
3964 break;
3966 TREE_CONSTANT (ctor) = 0;
3969 /* Vector types use CONSTRUCTOR all the way through gimple
3970 compilation as a general initializer. */
3971 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
3973 enum gimplify_status tret;
3974 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3975 fb_rvalue);
3976 if (tret == GS_ERROR)
3977 ret = GS_ERROR;
3979 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3980 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3982 break;
3984 default:
3985 /* So how did we get a CONSTRUCTOR for a scalar type? */
3986 gcc_unreachable ();
3989 if (ret == GS_ERROR)
3990 return GS_ERROR;
3991 else if (want_value)
3993 *expr_p = object;
3994 return GS_OK;
3996 else
3998 /* If we have gimplified both sides of the initializer but have
3999 not emitted an assignment, do so now. */
4000 if (*expr_p)
4002 tree lhs = TREE_OPERAND (*expr_p, 0);
4003 tree rhs = TREE_OPERAND (*expr_p, 1);
4004 gimple init = gimple_build_assign (lhs, rhs);
4005 gimplify_seq_add_stmt (pre_p, init);
4006 *expr_p = NULL;
4009 return GS_ALL_DONE;
4013 /* Given a pointer value OP0, return a simplified version of an
4014 indirection through OP0, or NULL_TREE if no simplification is
4015 possible. This may only be applied to a rhs of an expression.
4016 Note that the resulting type may be different from the type pointed
4017 to in the sense that it is still compatible from the langhooks
4018 point of view. */
4020 static tree
4021 gimple_fold_indirect_ref_rhs (tree t)
4023 return gimple_fold_indirect_ref (t);
4026 /* Subroutine of gimplify_modify_expr to do simplifications of
4027 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4028 something changes. */
4030 static enum gimplify_status
4031 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4032 gimple_seq *pre_p, gimple_seq *post_p,
4033 bool want_value)
4035 enum gimplify_status ret = GS_UNHANDLED;
4036 bool changed;
4040 changed = false;
4041 switch (TREE_CODE (*from_p))
4043 case VAR_DECL:
4044 /* If we're assigning from a read-only variable initialized with
4045 a constructor, do the direct assignment from the constructor,
4046 but only if neither source nor target are volatile since this
4047 latter assignment might end up being done on a per-field basis. */
4048 if (DECL_INITIAL (*from_p)
4049 && TREE_READONLY (*from_p)
4050 && !TREE_THIS_VOLATILE (*from_p)
4051 && !TREE_THIS_VOLATILE (*to_p)
4052 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4054 tree old_from = *from_p;
4055 enum gimplify_status subret;
4057 /* Move the constructor into the RHS. */
4058 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4060 /* Let's see if gimplify_init_constructor will need to put
4061 it in memory. */
4062 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4063 false, true);
4064 if (subret == GS_ERROR)
4066 /* If so, revert the change. */
4067 *from_p = old_from;
4069 else
4071 ret = GS_OK;
4072 changed = true;
4075 break;
4076 case INDIRECT_REF:
4078 /* If we have code like
4080 *(const A*)(A*)&x
4082 where the type of "x" is a (possibly cv-qualified variant
4083 of "A"), treat the entire expression as identical to "x".
4084 This kind of code arises in C++ when an object is bound
4085 to a const reference, and if "x" is a TARGET_EXPR we want
4086 to take advantage of the optimization below. */
4087 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4088 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4089 if (t)
4091 if (TREE_THIS_VOLATILE (t) != volatile_p)
4093 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4094 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4095 build_fold_addr_expr (t));
4096 if (REFERENCE_CLASS_P (t))
4097 TREE_THIS_VOLATILE (t) = volatile_p;
4099 *from_p = t;
4100 ret = GS_OK;
4101 changed = true;
4103 break;
4106 case TARGET_EXPR:
4108 /* If we are initializing something from a TARGET_EXPR, strip the
4109 TARGET_EXPR and initialize it directly, if possible. This can't
4110 be done if the initializer is void, since that implies that the
4111 temporary is set in some non-trivial way.
4113 ??? What about code that pulls out the temp and uses it
4114 elsewhere? I think that such code never uses the TARGET_EXPR as
4115 an initializer. If I'm wrong, we'll die because the temp won't
4116 have any RTL. In that case, I guess we'll need to replace
4117 references somehow. */
4118 tree init = TARGET_EXPR_INITIAL (*from_p);
4120 if (init
4121 && !VOID_TYPE_P (TREE_TYPE (init)))
4123 *from_p = init;
4124 ret = GS_OK;
4125 changed = true;
4128 break;
4130 case COMPOUND_EXPR:
4131 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4132 caught. */
4133 gimplify_compound_expr (from_p, pre_p, true);
4134 ret = GS_OK;
4135 changed = true;
4136 break;
4138 case CONSTRUCTOR:
4139 /* If we already made some changes, let the front end have a
4140 crack at this before we break it down. */
4141 if (ret != GS_UNHANDLED)
4142 break;
4143 /* If we're initializing from a CONSTRUCTOR, break this into
4144 individual MODIFY_EXPRs. */
4145 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4146 false);
4148 case COND_EXPR:
4149 /* If we're assigning to a non-register type, push the assignment
4150 down into the branches. This is mandatory for ADDRESSABLE types,
4151 since we cannot generate temporaries for such, but it saves a
4152 copy in other cases as well. */
4153 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4155 /* This code should mirror the code in gimplify_cond_expr. */
4156 enum tree_code code = TREE_CODE (*expr_p);
4157 tree cond = *from_p;
4158 tree result = *to_p;
4160 ret = gimplify_expr (&result, pre_p, post_p,
4161 is_gimple_lvalue, fb_lvalue);
4162 if (ret != GS_ERROR)
4163 ret = GS_OK;
4165 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4166 TREE_OPERAND (cond, 1)
4167 = build2 (code, void_type_node, result,
4168 TREE_OPERAND (cond, 1));
4169 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4170 TREE_OPERAND (cond, 2)
4171 = build2 (code, void_type_node, unshare_expr (result),
4172 TREE_OPERAND (cond, 2));
4174 TREE_TYPE (cond) = void_type_node;
4175 recalculate_side_effects (cond);
4177 if (want_value)
4179 gimplify_and_add (cond, pre_p);
4180 *expr_p = unshare_expr (result);
4182 else
4183 *expr_p = cond;
4184 return ret;
4186 break;
4188 case CALL_EXPR:
4189 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4190 return slot so that we don't generate a temporary. */
4191 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4192 && aggregate_value_p (*from_p, *from_p))
4194 bool use_target;
4196 if (!(rhs_predicate_for (*to_p))(*from_p))
4197 /* If we need a temporary, *to_p isn't accurate. */
4198 use_target = false;
4199 /* It's OK to use the return slot directly unless it's an NRV. */
4200 else if (TREE_CODE (*to_p) == RESULT_DECL
4201 && DECL_NAME (*to_p) == NULL_TREE
4202 && needs_to_live_in_memory (*to_p))
4203 use_target = true;
4204 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4205 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4206 /* Don't force regs into memory. */
4207 use_target = false;
4208 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4209 /* It's OK to use the target directly if it's being
4210 initialized. */
4211 use_target = true;
4212 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4213 /* Always use the target and thus RSO for variable-sized types.
4214 GIMPLE cannot deal with a variable-sized assignment
4215 embedded in a call statement. */
4216 use_target = true;
4217 else if (TREE_CODE (*to_p) != SSA_NAME
4218 && (!is_gimple_variable (*to_p)
4219 || needs_to_live_in_memory (*to_p)))
4220 /* Don't use the original target if it's already addressable;
4221 if its address escapes, and the called function uses the
4222 NRV optimization, a conforming program could see *to_p
4223 change before the called function returns; see c++/19317.
4224 When optimizing, the return_slot pass marks more functions
4225 as safe after we have escape info. */
4226 use_target = false;
4227 else
4228 use_target = true;
4230 if (use_target)
4232 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4233 mark_addressable (*to_p);
4236 break;
4238 case WITH_SIZE_EXPR:
4239 /* Likewise for calls that return an aggregate of non-constant size,
4240 since we would not be able to generate a temporary at all. */
4241 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4243 *from_p = TREE_OPERAND (*from_p, 0);
4244 /* We don't change ret in this case because the
4245 WITH_SIZE_EXPR might have been added in
4246 gimplify_modify_expr, so returning GS_OK would lead to an
4247 infinite loop. */
4248 changed = true;
4250 break;
4252 /* If we're initializing from a container, push the initialization
4253 inside it. */
4254 case CLEANUP_POINT_EXPR:
4255 case BIND_EXPR:
4256 case STATEMENT_LIST:
4258 tree wrap = *from_p;
4259 tree t;
4261 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4262 fb_lvalue);
4263 if (ret != GS_ERROR)
4264 ret = GS_OK;
4266 t = voidify_wrapper_expr (wrap, *expr_p);
4267 gcc_assert (t == *expr_p);
4269 if (want_value)
4271 gimplify_and_add (wrap, pre_p);
4272 *expr_p = unshare_expr (*to_p);
4274 else
4275 *expr_p = wrap;
4276 return GS_OK;
4279 case COMPOUND_LITERAL_EXPR:
4281 tree complit = TREE_OPERAND (*expr_p, 1);
4282 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4283 tree decl = DECL_EXPR_DECL (decl_s);
4284 tree init = DECL_INITIAL (decl);
4286 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4287 into struct T x = { 0, 1, 2 } if the address of the
4288 compound literal has never been taken. */
4289 if (!TREE_ADDRESSABLE (complit)
4290 && !TREE_ADDRESSABLE (decl)
4291 && init)
4293 *expr_p = copy_node (*expr_p);
4294 TREE_OPERAND (*expr_p, 1) = init;
4295 return GS_OK;
4299 default:
4300 break;
4303 while (changed);
4305 return ret;
4309 /* Return true if T looks like a valid GIMPLE statement. */
4311 static bool
4312 is_gimple_stmt (tree t)
4314 const enum tree_code code = TREE_CODE (t);
4316 switch (code)
4318 case NOP_EXPR:
4319 /* The only valid NOP_EXPR is the empty statement. */
4320 return IS_EMPTY_STMT (t);
4322 case BIND_EXPR:
4323 case COND_EXPR:
4324 /* These are only valid if they're void. */
4325 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4327 case SWITCH_EXPR:
4328 case GOTO_EXPR:
4329 case RETURN_EXPR:
4330 case LABEL_EXPR:
4331 case CASE_LABEL_EXPR:
4332 case TRY_CATCH_EXPR:
4333 case TRY_FINALLY_EXPR:
4334 case EH_FILTER_EXPR:
4335 case CATCH_EXPR:
4336 case ASM_EXPR:
4337 case STATEMENT_LIST:
4338 case OMP_PARALLEL:
4339 case OMP_FOR:
4340 case OMP_SIMD:
4341 case CILK_SIMD:
4342 case OMP_DISTRIBUTE:
4343 case OMP_SECTIONS:
4344 case OMP_SECTION:
4345 case OMP_SINGLE:
4346 case OMP_MASTER:
4347 case OMP_TASKGROUP:
4348 case OMP_ORDERED:
4349 case OMP_CRITICAL:
4350 case OMP_TASK:
4351 /* These are always void. */
4352 return true;
4354 case CALL_EXPR:
4355 case MODIFY_EXPR:
4356 case PREDICT_EXPR:
4357 /* These are valid regardless of their type. */
4358 return true;
4360 default:
4361 return false;
4366 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4367 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4368 DECL_GIMPLE_REG_P set.
4370 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4371 other, unmodified part of the complex object just before the total store.
4372 As a consequence, if the object is still uninitialized, an undefined value
4373 will be loaded into a register, which may result in a spurious exception
4374 if the register is floating-point and the value happens to be a signaling
4375 NaN for example. Then the fully-fledged complex operations lowering pass
4376 followed by a DCE pass are necessary in order to fix things up. */
4378 static enum gimplify_status
4379 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4380 bool want_value)
4382 enum tree_code code, ocode;
4383 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4385 lhs = TREE_OPERAND (*expr_p, 0);
4386 rhs = TREE_OPERAND (*expr_p, 1);
4387 code = TREE_CODE (lhs);
4388 lhs = TREE_OPERAND (lhs, 0);
4390 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4391 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4392 TREE_NO_WARNING (other) = 1;
4393 other = get_formal_tmp_var (other, pre_p);
4395 realpart = code == REALPART_EXPR ? rhs : other;
4396 imagpart = code == REALPART_EXPR ? other : rhs;
4398 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4399 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4400 else
4401 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4403 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4404 *expr_p = (want_value) ? rhs : NULL_TREE;
4406 return GS_ALL_DONE;
4409 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4411 modify_expr
4412 : varname '=' rhs
4413 | '*' ID '=' rhs
4415 PRE_P points to the list where side effects that must happen before
4416 *EXPR_P should be stored.
4418 POST_P points to the list where side effects that must happen after
4419 *EXPR_P should be stored.
4421 WANT_VALUE is nonzero iff we want to use the value of this expression
4422 in another expression. */
4424 static enum gimplify_status
4425 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4426 bool want_value)
4428 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4429 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4430 enum gimplify_status ret = GS_UNHANDLED;
4431 gimple assign;
4432 location_t loc = EXPR_LOCATION (*expr_p);
4433 gimple_stmt_iterator gsi;
4435 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4436 || TREE_CODE (*expr_p) == INIT_EXPR);
4438 if (fn_contains_cilk_spawn_p (cfun)
4439 && lang_hooks.cilkplus.cilk_detect_spawn_and_unwrap (expr_p)
4440 && !seen_error ())
4441 return (enum gimplify_status)
4442 lang_hooks.cilkplus.gimplify_cilk_spawn (expr_p, pre_p, post_p);
4444 /* Trying to simplify a clobber using normal logic doesn't work,
4445 so handle it here. */
4446 if (TREE_CLOBBER_P (*from_p))
4448 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4449 if (ret == GS_ERROR)
4450 return ret;
4451 gcc_assert (!want_value
4452 && (TREE_CODE (*to_p) == VAR_DECL
4453 || TREE_CODE (*to_p) == MEM_REF));
4454 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4455 *expr_p = NULL;
4456 return GS_ALL_DONE;
4459 /* Insert pointer conversions required by the middle-end that are not
4460 required by the frontend. This fixes middle-end type checking for
4461 for example gcc.dg/redecl-6.c. */
4462 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4464 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4465 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4466 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4469 /* See if any simplifications can be done based on what the RHS is. */
4470 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4471 want_value);
4472 if (ret != GS_UNHANDLED)
4473 return ret;
4475 /* For zero sized types only gimplify the left hand side and right hand
4476 side as statements and throw away the assignment. Do this after
4477 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4478 types properly. */
4479 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4481 gimplify_stmt (from_p, pre_p);
4482 gimplify_stmt (to_p, pre_p);
4483 *expr_p = NULL_TREE;
4484 return GS_ALL_DONE;
4487 /* If the value being copied is of variable width, compute the length
4488 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4489 before gimplifying any of the operands so that we can resolve any
4490 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4491 the size of the expression to be copied, not of the destination, so
4492 that is what we must do here. */
4493 maybe_with_size_expr (from_p);
4495 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4496 if (ret == GS_ERROR)
4497 return ret;
4499 /* As a special case, we have to temporarily allow for assignments
4500 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4501 a toplevel statement, when gimplifying the GENERIC expression
4502 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4503 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4505 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4506 prevent gimplify_expr from trying to create a new temporary for
4507 foo's LHS, we tell it that it should only gimplify until it
4508 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4509 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4510 and all we need to do here is set 'a' to be its LHS. */
4511 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4512 fb_rvalue);
4513 if (ret == GS_ERROR)
4514 return ret;
4516 /* Now see if the above changed *from_p to something we handle specially. */
4517 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4518 want_value);
4519 if (ret != GS_UNHANDLED)
4520 return ret;
4522 /* If we've got a variable sized assignment between two lvalues (i.e. does
4523 not involve a call), then we can make things a bit more straightforward
4524 by converting the assignment to memcpy or memset. */
4525 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4527 tree from = TREE_OPERAND (*from_p, 0);
4528 tree size = TREE_OPERAND (*from_p, 1);
4530 if (TREE_CODE (from) == CONSTRUCTOR)
4531 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4533 if (is_gimple_addressable (from))
4535 *from_p = from;
4536 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4537 pre_p);
4541 /* Transform partial stores to non-addressable complex variables into
4542 total stores. This allows us to use real instead of virtual operands
4543 for these variables, which improves optimization. */
4544 if ((TREE_CODE (*to_p) == REALPART_EXPR
4545 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4546 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4547 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4549 /* Try to alleviate the effects of the gimplification creating artificial
4550 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4551 if (!gimplify_ctxp->into_ssa
4552 && TREE_CODE (*from_p) == VAR_DECL
4553 && DECL_IGNORED_P (*from_p)
4554 && DECL_P (*to_p)
4555 && !DECL_IGNORED_P (*to_p))
4557 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4558 DECL_NAME (*from_p)
4559 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4560 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4561 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4564 if (want_value && TREE_THIS_VOLATILE (*to_p))
4565 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4567 if (TREE_CODE (*from_p) == CALL_EXPR)
4569 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4570 instead of a GIMPLE_ASSIGN. */
4571 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4572 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4573 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4574 assign = gimple_build_call_from_tree (*from_p);
4575 gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
4576 notice_special_calls (assign);
4577 if (!gimple_call_noreturn_p (assign))
4578 gimple_call_set_lhs (assign, *to_p);
4580 else
4582 assign = gimple_build_assign (*to_p, *from_p);
4583 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4586 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4588 /* We should have got an SSA name from the start. */
4589 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4592 gimplify_seq_add_stmt (pre_p, assign);
4593 gsi = gsi_last (*pre_p);
4594 /* Don't fold stmts inside of target construct. We'll do it
4595 during omplower pass instead. */
4596 struct gimplify_omp_ctx *ctx;
4597 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
4598 if (ctx->region_type == ORT_TARGET)
4599 break;
4600 if (ctx == NULL)
4601 fold_stmt (&gsi);
4603 if (want_value)
4605 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4606 return GS_OK;
4608 else
4609 *expr_p = NULL;
4611 return GS_ALL_DONE;
4614 /* Gimplify a comparison between two variable-sized objects. Do this
4615 with a call to BUILT_IN_MEMCMP. */
4617 static enum gimplify_status
4618 gimplify_variable_sized_compare (tree *expr_p)
4620 location_t loc = EXPR_LOCATION (*expr_p);
4621 tree op0 = TREE_OPERAND (*expr_p, 0);
4622 tree op1 = TREE_OPERAND (*expr_p, 1);
4623 tree t, arg, dest, src, expr;
4625 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4626 arg = unshare_expr (arg);
4627 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4628 src = build_fold_addr_expr_loc (loc, op1);
4629 dest = build_fold_addr_expr_loc (loc, op0);
4630 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4631 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4633 expr
4634 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4635 SET_EXPR_LOCATION (expr, loc);
4636 *expr_p = expr;
4638 return GS_OK;
4641 /* Gimplify a comparison between two aggregate objects of integral scalar
4642 mode as a comparison between the bitwise equivalent scalar values. */
4644 static enum gimplify_status
4645 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4647 location_t loc = EXPR_LOCATION (*expr_p);
4648 tree op0 = TREE_OPERAND (*expr_p, 0);
4649 tree op1 = TREE_OPERAND (*expr_p, 1);
4651 tree type = TREE_TYPE (op0);
4652 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4654 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4655 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4657 *expr_p
4658 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4660 return GS_OK;
4663 /* Gimplify an expression sequence. This function gimplifies each
4664 expression and rewrites the original expression with the last
4665 expression of the sequence in GIMPLE form.
4667 PRE_P points to the list where the side effects for all the
4668 expressions in the sequence will be emitted.
4670 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4672 static enum gimplify_status
4673 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4675 tree t = *expr_p;
4679 tree *sub_p = &TREE_OPERAND (t, 0);
4681 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4682 gimplify_compound_expr (sub_p, pre_p, false);
4683 else
4684 gimplify_stmt (sub_p, pre_p);
4686 t = TREE_OPERAND (t, 1);
4688 while (TREE_CODE (t) == COMPOUND_EXPR);
4690 *expr_p = t;
4691 if (want_value)
4692 return GS_OK;
4693 else
4695 gimplify_stmt (expr_p, pre_p);
4696 return GS_ALL_DONE;
4700 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4701 gimplify. After gimplification, EXPR_P will point to a new temporary
4702 that holds the original value of the SAVE_EXPR node.
4704 PRE_P points to the list where side effects that must happen before
4705 *EXPR_P should be stored. */
4707 static enum gimplify_status
4708 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4710 enum gimplify_status ret = GS_ALL_DONE;
4711 tree val;
4713 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4714 val = TREE_OPERAND (*expr_p, 0);
4716 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4717 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4719 /* The operand may be a void-valued expression such as SAVE_EXPRs
4720 generated by the Java frontend for class initialization. It is
4721 being executed only for its side-effects. */
4722 if (TREE_TYPE (val) == void_type_node)
4724 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4725 is_gimple_stmt, fb_none);
4726 val = NULL;
4728 else
4729 val = get_initialized_tmp_var (val, pre_p, post_p);
4731 TREE_OPERAND (*expr_p, 0) = val;
4732 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4735 *expr_p = val;
4737 return ret;
4740 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
4742 unary_expr
4743 : ...
4744 | '&' varname
4747 PRE_P points to the list where side effects that must happen before
4748 *EXPR_P should be stored.
4750 POST_P points to the list where side effects that must happen after
4751 *EXPR_P should be stored. */
4753 static enum gimplify_status
4754 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4756 tree expr = *expr_p;
4757 tree op0 = TREE_OPERAND (expr, 0);
4758 enum gimplify_status ret;
4759 location_t loc = EXPR_LOCATION (*expr_p);
4761 switch (TREE_CODE (op0))
4763 case INDIRECT_REF:
4764 do_indirect_ref:
4765 /* Check if we are dealing with an expression of the form '&*ptr'.
4766 While the front end folds away '&*ptr' into 'ptr', these
4767 expressions may be generated internally by the compiler (e.g.,
4768 builtins like __builtin_va_end). */
4769 /* Caution: the silent array decomposition semantics we allow for
4770 ADDR_EXPR means we can't always discard the pair. */
4771 /* Gimplification of the ADDR_EXPR operand may drop
4772 cv-qualification conversions, so make sure we add them if
4773 needed. */
4775 tree op00 = TREE_OPERAND (op0, 0);
4776 tree t_expr = TREE_TYPE (expr);
4777 tree t_op00 = TREE_TYPE (op00);
4779 if (!useless_type_conversion_p (t_expr, t_op00))
4780 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4781 *expr_p = op00;
4782 ret = GS_OK;
4784 break;
4786 case VIEW_CONVERT_EXPR:
4787 /* Take the address of our operand and then convert it to the type of
4788 this ADDR_EXPR.
4790 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4791 all clear. The impact of this transformation is even less clear. */
4793 /* If the operand is a useless conversion, look through it. Doing so
4794 guarantees that the ADDR_EXPR and its operand will remain of the
4795 same type. */
4796 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4797 op0 = TREE_OPERAND (op0, 0);
4799 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4800 build_fold_addr_expr_loc (loc,
4801 TREE_OPERAND (op0, 0)));
4802 ret = GS_OK;
4803 break;
4805 default:
4806 /* We use fb_either here because the C frontend sometimes takes
4807 the address of a call that returns a struct; see
4808 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4809 the implied temporary explicit. */
4811 /* Make the operand addressable. */
4812 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4813 is_gimple_addressable, fb_either);
4814 if (ret == GS_ERROR)
4815 break;
4817 /* Then mark it. Beware that it may not be possible to do so directly
4818 if a temporary has been created by the gimplification. */
4819 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
4821 op0 = TREE_OPERAND (expr, 0);
4823 /* For various reasons, the gimplification of the expression
4824 may have made a new INDIRECT_REF. */
4825 if (TREE_CODE (op0) == INDIRECT_REF)
4826 goto do_indirect_ref;
4828 mark_addressable (TREE_OPERAND (expr, 0));
4830 /* The FEs may end up building ADDR_EXPRs early on a decl with
4831 an incomplete type. Re-build ADDR_EXPRs in canonical form
4832 here. */
4833 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4834 *expr_p = build_fold_addr_expr (op0);
4836 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
4837 recompute_tree_invariant_for_addr_expr (*expr_p);
4839 /* If we re-built the ADDR_EXPR add a conversion to the original type
4840 if required. */
4841 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4842 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
4844 break;
4847 return ret;
4850 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4851 value; output operands should be a gimple lvalue. */
4853 static enum gimplify_status
4854 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4856 tree expr;
4857 int noutputs;
4858 const char **oconstraints;
4859 int i;
4860 tree link;
4861 const char *constraint;
4862 bool allows_mem, allows_reg, is_inout;
4863 enum gimplify_status ret, tret;
4864 gimple stmt;
4865 vec<tree, va_gc> *inputs;
4866 vec<tree, va_gc> *outputs;
4867 vec<tree, va_gc> *clobbers;
4868 vec<tree, va_gc> *labels;
4869 tree link_next;
4871 expr = *expr_p;
4872 noutputs = list_length (ASM_OUTPUTS (expr));
4873 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4875 inputs = NULL;
4876 outputs = NULL;
4877 clobbers = NULL;
4878 labels = NULL;
4880 ret = GS_ALL_DONE;
4881 link_next = NULL_TREE;
4882 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4884 bool ok;
4885 size_t constraint_len;
4887 link_next = TREE_CHAIN (link);
4889 oconstraints[i]
4890 = constraint
4891 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4892 constraint_len = strlen (constraint);
4893 if (constraint_len == 0)
4894 continue;
4896 ok = parse_output_constraint (&constraint, i, 0, 0,
4897 &allows_mem, &allows_reg, &is_inout);
4898 if (!ok)
4900 ret = GS_ERROR;
4901 is_inout = false;
4904 if (!allows_reg && allows_mem)
4905 mark_addressable (TREE_VALUE (link));
4907 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4908 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4909 fb_lvalue | fb_mayfail);
4910 if (tret == GS_ERROR)
4912 error ("invalid lvalue in asm output %d", i);
4913 ret = tret;
4916 vec_safe_push (outputs, link);
4917 TREE_CHAIN (link) = NULL_TREE;
4919 if (is_inout)
4921 /* An input/output operand. To give the optimizers more
4922 flexibility, split it into separate input and output
4923 operands. */
4924 tree input;
4925 char buf[10];
4927 /* Turn the in/out constraint into an output constraint. */
4928 char *p = xstrdup (constraint);
4929 p[0] = '=';
4930 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4932 /* And add a matching input constraint. */
4933 if (allows_reg)
4935 sprintf (buf, "%d", i);
4937 /* If there are multiple alternatives in the constraint,
4938 handle each of them individually. Those that allow register
4939 will be replaced with operand number, the others will stay
4940 unchanged. */
4941 if (strchr (p, ',') != NULL)
4943 size_t len = 0, buflen = strlen (buf);
4944 char *beg, *end, *str, *dst;
4946 for (beg = p + 1;;)
4948 end = strchr (beg, ',');
4949 if (end == NULL)
4950 end = strchr (beg, '\0');
4951 if ((size_t) (end - beg) < buflen)
4952 len += buflen + 1;
4953 else
4954 len += end - beg + 1;
4955 if (*end)
4956 beg = end + 1;
4957 else
4958 break;
4961 str = (char *) alloca (len);
4962 for (beg = p + 1, dst = str;;)
4964 const char *tem;
4965 bool mem_p, reg_p, inout_p;
4967 end = strchr (beg, ',');
4968 if (end)
4969 *end = '\0';
4970 beg[-1] = '=';
4971 tem = beg - 1;
4972 parse_output_constraint (&tem, i, 0, 0,
4973 &mem_p, &reg_p, &inout_p);
4974 if (dst != str)
4975 *dst++ = ',';
4976 if (reg_p)
4978 memcpy (dst, buf, buflen);
4979 dst += buflen;
4981 else
4983 if (end)
4984 len = end - beg;
4985 else
4986 len = strlen (beg);
4987 memcpy (dst, beg, len);
4988 dst += len;
4990 if (end)
4991 beg = end + 1;
4992 else
4993 break;
4995 *dst = '\0';
4996 input = build_string (dst - str, str);
4998 else
4999 input = build_string (strlen (buf), buf);
5001 else
5002 input = build_string (constraint_len - 1, constraint + 1);
5004 free (p);
5006 input = build_tree_list (build_tree_list (NULL_TREE, input),
5007 unshare_expr (TREE_VALUE (link)));
5008 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5012 link_next = NULL_TREE;
5013 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5015 link_next = TREE_CHAIN (link);
5016 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5017 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5018 oconstraints, &allows_mem, &allows_reg);
5020 /* If we can't make copies, we can only accept memory. */
5021 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5023 if (allows_mem)
5024 allows_reg = 0;
5025 else
5027 error ("impossible constraint in %<asm%>");
5028 error ("non-memory input %d must stay in memory", i);
5029 return GS_ERROR;
5033 /* If the operand is a memory input, it should be an lvalue. */
5034 if (!allows_reg && allows_mem)
5036 tree inputv = TREE_VALUE (link);
5037 STRIP_NOPS (inputv);
5038 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5039 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5040 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5041 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5042 TREE_VALUE (link) = error_mark_node;
5043 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5044 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5045 mark_addressable (TREE_VALUE (link));
5046 if (tret == GS_ERROR)
5048 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5049 input_location = EXPR_LOCATION (TREE_VALUE (link));
5050 error ("memory input %d is not directly addressable", i);
5051 ret = tret;
5054 else
5056 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5057 is_gimple_asm_val, fb_rvalue);
5058 if (tret == GS_ERROR)
5059 ret = tret;
5062 TREE_CHAIN (link) = NULL_TREE;
5063 vec_safe_push (inputs, link);
5066 link_next = NULL_TREE;
5067 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5069 link_next = TREE_CHAIN (link);
5070 TREE_CHAIN (link) = NULL_TREE;
5071 vec_safe_push (clobbers, link);
5074 link_next = NULL_TREE;
5075 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5077 link_next = TREE_CHAIN (link);
5078 TREE_CHAIN (link) = NULL_TREE;
5079 vec_safe_push (labels, link);
5082 /* Do not add ASMs with errors to the gimple IL stream. */
5083 if (ret != GS_ERROR)
5085 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5086 inputs, outputs, clobbers, labels);
5088 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5089 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5091 gimplify_seq_add_stmt (pre_p, stmt);
5094 return ret;
5097 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5098 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5099 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5100 return to this function.
5102 FIXME should we complexify the prequeue handling instead? Or use flags
5103 for all the cleanups and let the optimizer tighten them up? The current
5104 code seems pretty fragile; it will break on a cleanup within any
5105 non-conditional nesting. But any such nesting would be broken, anyway;
5106 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5107 and continues out of it. We can do that at the RTL level, though, so
5108 having an optimizer to tighten up try/finally regions would be a Good
5109 Thing. */
5111 static enum gimplify_status
5112 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5114 gimple_stmt_iterator iter;
5115 gimple_seq body_sequence = NULL;
5117 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5119 /* We only care about the number of conditions between the innermost
5120 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5121 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5122 int old_conds = gimplify_ctxp->conditions;
5123 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5124 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5125 gimplify_ctxp->conditions = 0;
5126 gimplify_ctxp->conditional_cleanups = NULL;
5127 gimplify_ctxp->in_cleanup_point_expr = true;
5129 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5131 gimplify_ctxp->conditions = old_conds;
5132 gimplify_ctxp->conditional_cleanups = old_cleanups;
5133 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5135 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5137 gimple wce = gsi_stmt (iter);
5139 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5141 if (gsi_one_before_end_p (iter))
5143 /* Note that gsi_insert_seq_before and gsi_remove do not
5144 scan operands, unlike some other sequence mutators. */
5145 if (!gimple_wce_cleanup_eh_only (wce))
5146 gsi_insert_seq_before_without_update (&iter,
5147 gimple_wce_cleanup (wce),
5148 GSI_SAME_STMT);
5149 gsi_remove (&iter, true);
5150 break;
5152 else
5154 gimple_statement_try *gtry;
5155 gimple_seq seq;
5156 enum gimple_try_flags kind;
5158 if (gimple_wce_cleanup_eh_only (wce))
5159 kind = GIMPLE_TRY_CATCH;
5160 else
5161 kind = GIMPLE_TRY_FINALLY;
5162 seq = gsi_split_seq_after (iter);
5164 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5165 /* Do not use gsi_replace here, as it may scan operands.
5166 We want to do a simple structural modification only. */
5167 gsi_set_stmt (&iter, gtry);
5168 iter = gsi_start (gtry->eval);
5171 else
5172 gsi_next (&iter);
5175 gimplify_seq_add_seq (pre_p, body_sequence);
5176 if (temp)
5178 *expr_p = temp;
5179 return GS_OK;
5181 else
5183 *expr_p = NULL;
5184 return GS_ALL_DONE;
5188 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5189 is the cleanup action required. EH_ONLY is true if the cleanup should
5190 only be executed if an exception is thrown, not on normal exit. */
5192 static void
5193 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5195 gimple wce;
5196 gimple_seq cleanup_stmts = NULL;
5198 /* Errors can result in improperly nested cleanups. Which results in
5199 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5200 if (seen_error ())
5201 return;
5203 if (gimple_conditional_context ())
5205 /* If we're in a conditional context, this is more complex. We only
5206 want to run the cleanup if we actually ran the initialization that
5207 necessitates it, but we want to run it after the end of the
5208 conditional context. So we wrap the try/finally around the
5209 condition and use a flag to determine whether or not to actually
5210 run the destructor. Thus
5212 test ? f(A()) : 0
5214 becomes (approximately)
5216 flag = 0;
5217 try {
5218 if (test) { A::A(temp); flag = 1; val = f(temp); }
5219 else { val = 0; }
5220 } finally {
5221 if (flag) A::~A(temp);
5225 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5226 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5227 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5229 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5230 gimplify_stmt (&cleanup, &cleanup_stmts);
5231 wce = gimple_build_wce (cleanup_stmts);
5233 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5234 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5235 gimplify_seq_add_stmt (pre_p, ftrue);
5237 /* Because of this manipulation, and the EH edges that jump
5238 threading cannot redirect, the temporary (VAR) will appear
5239 to be used uninitialized. Don't warn. */
5240 TREE_NO_WARNING (var) = 1;
5242 else
5244 gimplify_stmt (&cleanup, &cleanup_stmts);
5245 wce = gimple_build_wce (cleanup_stmts);
5246 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5247 gimplify_seq_add_stmt (pre_p, wce);
5251 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5253 static enum gimplify_status
5254 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5256 tree targ = *expr_p;
5257 tree temp = TARGET_EXPR_SLOT (targ);
5258 tree init = TARGET_EXPR_INITIAL (targ);
5259 enum gimplify_status ret;
5261 if (init)
5263 tree cleanup = NULL_TREE;
5265 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5266 to the temps list. Handle also variable length TARGET_EXPRs. */
5267 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5269 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5270 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5271 gimplify_vla_decl (temp, pre_p);
5273 else
5274 gimple_add_tmp_var (temp);
5276 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5277 expression is supposed to initialize the slot. */
5278 if (VOID_TYPE_P (TREE_TYPE (init)))
5279 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5280 else
5282 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5283 init = init_expr;
5284 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5285 init = NULL;
5286 ggc_free (init_expr);
5288 if (ret == GS_ERROR)
5290 /* PR c++/28266 Make sure this is expanded only once. */
5291 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5292 return GS_ERROR;
5294 if (init)
5295 gimplify_and_add (init, pre_p);
5297 /* If needed, push the cleanup for the temp. */
5298 if (TARGET_EXPR_CLEANUP (targ))
5300 if (CLEANUP_EH_ONLY (targ))
5301 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5302 CLEANUP_EH_ONLY (targ), pre_p);
5303 else
5304 cleanup = TARGET_EXPR_CLEANUP (targ);
5307 /* Add a clobber for the temporary going out of scope, like
5308 gimplify_bind_expr. */
5309 if (gimplify_ctxp->in_cleanup_point_expr
5310 && needs_to_live_in_memory (temp)
5311 && flag_stack_reuse == SR_ALL)
5313 tree clobber = build_constructor (TREE_TYPE (temp),
5314 NULL);
5315 TREE_THIS_VOLATILE (clobber) = true;
5316 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5317 if (cleanup)
5318 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5319 clobber);
5320 else
5321 cleanup = clobber;
5324 if (cleanup)
5325 gimple_push_cleanup (temp, cleanup, false, pre_p);
5327 /* Only expand this once. */
5328 TREE_OPERAND (targ, 3) = init;
5329 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5331 else
5332 /* We should have expanded this before. */
5333 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5335 *expr_p = temp;
5336 return GS_OK;
5339 /* Gimplification of expression trees. */
5341 /* Gimplify an expression which appears at statement context. The
5342 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5343 NULL, a new sequence is allocated.
5345 Return true if we actually added a statement to the queue. */
5347 bool
5348 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5350 gimple_seq_node last;
5352 last = gimple_seq_last (*seq_p);
5353 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5354 return last != gimple_seq_last (*seq_p);
5357 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5358 to CTX. If entries already exist, force them to be some flavor of private.
5359 If there is no enclosing parallel, do nothing. */
5361 void
5362 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5364 splay_tree_node n;
5366 if (decl == NULL || !DECL_P (decl))
5367 return;
5371 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5372 if (n != NULL)
5374 if (n->value & GOVD_SHARED)
5375 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5376 else if (n->value & GOVD_MAP)
5377 n->value |= GOVD_MAP_TO_ONLY;
5378 else
5379 return;
5381 else if (ctx->region_type == ORT_TARGET)
5382 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5383 else if (ctx->region_type != ORT_WORKSHARE
5384 && ctx->region_type != ORT_SIMD
5385 && ctx->region_type != ORT_TARGET_DATA)
5386 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5388 ctx = ctx->outer_context;
5390 while (ctx);
5393 /* Similarly for each of the type sizes of TYPE. */
5395 static void
5396 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5398 if (type == NULL || type == error_mark_node)
5399 return;
5400 type = TYPE_MAIN_VARIANT (type);
5402 if (pointer_set_insert (ctx->privatized_types, type))
5403 return;
5405 switch (TREE_CODE (type))
5407 case INTEGER_TYPE:
5408 case ENUMERAL_TYPE:
5409 case BOOLEAN_TYPE:
5410 case REAL_TYPE:
5411 case FIXED_POINT_TYPE:
5412 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5413 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5414 break;
5416 case ARRAY_TYPE:
5417 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5418 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5419 break;
5421 case RECORD_TYPE:
5422 case UNION_TYPE:
5423 case QUAL_UNION_TYPE:
5425 tree field;
5426 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5427 if (TREE_CODE (field) == FIELD_DECL)
5429 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5430 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5433 break;
5435 case POINTER_TYPE:
5436 case REFERENCE_TYPE:
5437 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5438 break;
5440 default:
5441 break;
5444 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5445 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5446 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5449 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5451 static void
5452 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5454 splay_tree_node n;
5455 unsigned int nflags;
5456 tree t;
5458 if (error_operand_p (decl))
5459 return;
5461 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5462 there are constructors involved somewhere. */
5463 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5464 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5465 flags |= GOVD_SEEN;
5467 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5468 if (n != NULL && n->value != GOVD_ALIGNED)
5470 /* We shouldn't be re-adding the decl with the same data
5471 sharing class. */
5472 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5473 /* The only combination of data sharing classes we should see is
5474 FIRSTPRIVATE and LASTPRIVATE. */
5475 nflags = n->value | flags;
5476 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5477 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)
5478 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5479 n->value = nflags;
5480 return;
5483 /* When adding a variable-sized variable, we have to handle all sorts
5484 of additional bits of data: the pointer replacement variable, and
5485 the parameters of the type. */
5486 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5488 /* Add the pointer replacement variable as PRIVATE if the variable
5489 replacement is private, else FIRSTPRIVATE since we'll need the
5490 address of the original variable either for SHARED, or for the
5491 copy into or out of the context. */
5492 if (!(flags & GOVD_LOCAL))
5494 nflags = flags & GOVD_MAP
5495 ? GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT
5496 : flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5497 nflags |= flags & GOVD_SEEN;
5498 t = DECL_VALUE_EXPR (decl);
5499 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5500 t = TREE_OPERAND (t, 0);
5501 gcc_assert (DECL_P (t));
5502 omp_add_variable (ctx, t, nflags);
5505 /* Add all of the variable and type parameters (which should have
5506 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5507 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5508 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5509 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5511 /* The variable-sized variable itself is never SHARED, only some form
5512 of PRIVATE. The sharing would take place via the pointer variable
5513 which we remapped above. */
5514 if (flags & GOVD_SHARED)
5515 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5516 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5518 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5519 alloca statement we generate for the variable, so make sure it
5520 is available. This isn't automatically needed for the SHARED
5521 case, since we won't be allocating local storage then.
5522 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5523 in this case omp_notice_variable will be called later
5524 on when it is gimplified. */
5525 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5526 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5527 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5529 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5530 && lang_hooks.decls.omp_privatize_by_reference (decl))
5532 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5534 /* Similar to the direct variable sized case above, we'll need the
5535 size of references being privatized. */
5536 if ((flags & GOVD_SHARED) == 0)
5538 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5539 if (TREE_CODE (t) != INTEGER_CST)
5540 omp_notice_variable (ctx, t, true);
5544 if (n != NULL)
5545 n->value |= flags;
5546 else
5547 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5550 /* Notice a threadprivate variable DECL used in OpenMP context CTX.
5551 This just prints out diagnostics about threadprivate variable uses
5552 in untied tasks. If DECL2 is non-NULL, prevent this warning
5553 on that variable. */
5555 static bool
5556 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5557 tree decl2)
5559 splay_tree_node n;
5560 struct gimplify_omp_ctx *octx;
5562 for (octx = ctx; octx; octx = octx->outer_context)
5563 if (octx->region_type == ORT_TARGET)
5565 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5566 if (n == NULL)
5568 error ("threadprivate variable %qE used in target region",
5569 DECL_NAME (decl));
5570 error_at (octx->location, "enclosing target region");
5571 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5573 if (decl2)
5574 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5577 if (ctx->region_type != ORT_UNTIED_TASK)
5578 return false;
5579 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5580 if (n == NULL)
5582 error ("threadprivate variable %qE used in untied task",
5583 DECL_NAME (decl));
5584 error_at (ctx->location, "enclosing task");
5585 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5587 if (decl2)
5588 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5589 return false;
5592 /* Record the fact that DECL was used within the OpenMP context CTX.
5593 IN_CODE is true when real code uses DECL, and false when we should
5594 merely emit default(none) errors. Return true if DECL is going to
5595 be remapped and thus DECL shouldn't be gimplified into its
5596 DECL_VALUE_EXPR (if any). */
5598 static bool
5599 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5601 splay_tree_node n;
5602 unsigned flags = in_code ? GOVD_SEEN : 0;
5603 bool ret = false, shared;
5605 if (error_operand_p (decl))
5606 return false;
5608 /* Threadprivate variables are predetermined. */
5609 if (is_global_var (decl))
5611 if (DECL_THREAD_LOCAL_P (decl))
5612 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5614 if (DECL_HAS_VALUE_EXPR_P (decl))
5616 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5618 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5619 return omp_notice_threadprivate_variable (ctx, decl, value);
5623 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5624 if (ctx->region_type == ORT_TARGET)
5626 if (n == NULL)
5628 if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl)))
5630 error ("%qD referenced in target region does not have "
5631 "a mappable type", decl);
5632 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_EXPLICIT | flags);
5634 else
5635 omp_add_variable (ctx, decl, GOVD_MAP | flags);
5637 else
5638 n->value |= flags;
5639 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
5640 goto do_outer;
5643 if (n == NULL)
5645 enum omp_clause_default_kind default_kind, kind;
5646 struct gimplify_omp_ctx *octx;
5648 if (ctx->region_type == ORT_WORKSHARE
5649 || ctx->region_type == ORT_SIMD
5650 || ctx->region_type == ORT_TARGET_DATA)
5651 goto do_outer;
5653 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5654 remapped firstprivate instead of shared. To some extent this is
5655 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5656 default_kind = ctx->default_kind;
5657 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5658 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5659 default_kind = kind;
5661 switch (default_kind)
5663 case OMP_CLAUSE_DEFAULT_NONE:
5664 if ((ctx->region_type & ORT_TASK) != 0)
5666 error ("%qE not specified in enclosing task",
5667 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5668 error_at (ctx->location, "enclosing task");
5670 else if (ctx->region_type == ORT_TEAMS)
5672 error ("%qE not specified in enclosing teams construct",
5673 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5674 error_at (ctx->location, "enclosing teams construct");
5676 else
5678 error ("%qE not specified in enclosing parallel",
5679 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5680 error_at (ctx->location, "enclosing parallel");
5682 /* FALLTHRU */
5683 case OMP_CLAUSE_DEFAULT_SHARED:
5684 flags |= GOVD_SHARED;
5685 break;
5686 case OMP_CLAUSE_DEFAULT_PRIVATE:
5687 flags |= GOVD_PRIVATE;
5688 break;
5689 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5690 flags |= GOVD_FIRSTPRIVATE;
5691 break;
5692 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5693 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5694 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5695 if (ctx->outer_context)
5696 omp_notice_variable (ctx->outer_context, decl, in_code);
5697 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5699 splay_tree_node n2;
5701 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0)
5702 continue;
5703 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5704 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5706 flags |= GOVD_FIRSTPRIVATE;
5707 break;
5709 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
5710 break;
5712 if (flags & GOVD_FIRSTPRIVATE)
5713 break;
5714 if (octx == NULL
5715 && (TREE_CODE (decl) == PARM_DECL
5716 || (!is_global_var (decl)
5717 && DECL_CONTEXT (decl) == current_function_decl)))
5719 flags |= GOVD_FIRSTPRIVATE;
5720 break;
5722 flags |= GOVD_SHARED;
5723 break;
5724 default:
5725 gcc_unreachable ();
5728 if ((flags & GOVD_PRIVATE)
5729 && lang_hooks.decls.omp_private_outer_ref (decl))
5730 flags |= GOVD_PRIVATE_OUTER_REF;
5732 omp_add_variable (ctx, decl, flags);
5734 shared = (flags & GOVD_SHARED) != 0;
5735 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5736 goto do_outer;
5739 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5740 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5741 && DECL_SIZE (decl)
5742 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5744 splay_tree_node n2;
5745 tree t = DECL_VALUE_EXPR (decl);
5746 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5747 t = TREE_OPERAND (t, 0);
5748 gcc_assert (DECL_P (t));
5749 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5750 n2->value |= GOVD_SEEN;
5753 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5754 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5756 /* If nothing changed, there's nothing left to do. */
5757 if ((n->value & flags) == flags)
5758 return ret;
5759 flags |= n->value;
5760 n->value = flags;
5762 do_outer:
5763 /* If the variable is private in the current context, then we don't
5764 need to propagate anything to an outer context. */
5765 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5766 return ret;
5767 if (ctx->outer_context
5768 && omp_notice_variable (ctx->outer_context, decl, in_code))
5769 return true;
5770 return ret;
5773 /* Verify that DECL is private within CTX. If there's specific information
5774 to the contrary in the innermost scope, generate an error. */
5776 static bool
5777 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, bool simd)
5779 splay_tree_node n;
5781 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5782 if (n != NULL)
5784 if (n->value & GOVD_SHARED)
5786 if (ctx == gimplify_omp_ctxp)
5788 if (simd)
5789 error ("iteration variable %qE is predetermined linear",
5790 DECL_NAME (decl));
5791 else
5792 error ("iteration variable %qE should be private",
5793 DECL_NAME (decl));
5794 n->value = GOVD_PRIVATE;
5795 return true;
5797 else
5798 return false;
5800 else if ((n->value & GOVD_EXPLICIT) != 0
5801 && (ctx == gimplify_omp_ctxp
5802 || (ctx->region_type == ORT_COMBINED_PARALLEL
5803 && gimplify_omp_ctxp->outer_context == ctx)))
5805 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5806 error ("iteration variable %qE should not be firstprivate",
5807 DECL_NAME (decl));
5808 else if ((n->value & GOVD_REDUCTION) != 0)
5809 error ("iteration variable %qE should not be reduction",
5810 DECL_NAME (decl));
5811 else if (simd && (n->value & GOVD_LASTPRIVATE) != 0)
5812 error ("iteration variable %qE should not be lastprivate",
5813 DECL_NAME (decl));
5814 else if (simd && (n->value & GOVD_PRIVATE) != 0)
5815 error ("iteration variable %qE should not be private",
5816 DECL_NAME (decl));
5817 else if (simd && (n->value & GOVD_LINEAR) != 0)
5818 error ("iteration variable %qE is predetermined linear",
5819 DECL_NAME (decl));
5821 return (ctx == gimplify_omp_ctxp
5822 || (ctx->region_type == ORT_COMBINED_PARALLEL
5823 && gimplify_omp_ctxp->outer_context == ctx));
5826 if (ctx->region_type != ORT_WORKSHARE
5827 && ctx->region_type != ORT_SIMD)
5828 return false;
5829 else if (ctx->outer_context)
5830 return omp_is_private (ctx->outer_context, decl, simd);
5831 return false;
5834 /* Return true if DECL is private within a parallel region
5835 that binds to the current construct's context or in parallel
5836 region's REDUCTION clause. */
5838 static bool
5839 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5841 splay_tree_node n;
5845 ctx = ctx->outer_context;
5846 if (ctx == NULL)
5847 return !(is_global_var (decl)
5848 /* References might be private, but might be shared too. */
5849 || lang_hooks.decls.omp_privatize_by_reference (decl));
5851 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
5852 continue;
5854 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5855 if (n != NULL)
5856 return (n->value & GOVD_SHARED) == 0;
5858 while (ctx->region_type == ORT_WORKSHARE
5859 || ctx->region_type == ORT_SIMD);
5860 return false;
5863 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5864 and previous omp contexts. */
5866 static void
5867 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5868 enum omp_region_type region_type)
5870 struct gimplify_omp_ctx *ctx, *outer_ctx;
5871 tree c;
5873 ctx = new_omp_context (region_type);
5874 outer_ctx = ctx->outer_context;
5876 while ((c = *list_p) != NULL)
5878 bool remove = false;
5879 bool notice_outer = true;
5880 const char *check_non_private = NULL;
5881 unsigned int flags;
5882 tree decl;
5884 switch (OMP_CLAUSE_CODE (c))
5886 case OMP_CLAUSE_PRIVATE:
5887 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5888 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5890 flags |= GOVD_PRIVATE_OUTER_REF;
5891 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5893 else
5894 notice_outer = false;
5895 goto do_add;
5896 case OMP_CLAUSE_SHARED:
5897 flags = GOVD_SHARED | GOVD_EXPLICIT;
5898 goto do_add;
5899 case OMP_CLAUSE_FIRSTPRIVATE:
5900 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5901 check_non_private = "firstprivate";
5902 goto do_add;
5903 case OMP_CLAUSE_LASTPRIVATE:
5904 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5905 check_non_private = "lastprivate";
5906 goto do_add;
5907 case OMP_CLAUSE_REDUCTION:
5908 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5909 check_non_private = "reduction";
5910 goto do_add;
5911 case OMP_CLAUSE_LINEAR:
5912 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
5913 is_gimple_val, fb_rvalue) == GS_ERROR)
5915 remove = true;
5916 break;
5918 flags = GOVD_LINEAR | GOVD_EXPLICIT;
5919 goto do_add;
5921 case OMP_CLAUSE_MAP:
5922 if (OMP_CLAUSE_SIZE (c)
5923 && gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
5924 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
5926 remove = true;
5927 break;
5929 decl = OMP_CLAUSE_DECL (c);
5930 if (!DECL_P (decl))
5932 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
5933 NULL, is_gimple_lvalue, fb_lvalue)
5934 == GS_ERROR)
5936 remove = true;
5937 break;
5939 break;
5941 flags = GOVD_MAP | GOVD_EXPLICIT;
5942 goto do_add;
5944 case OMP_CLAUSE_DEPEND:
5945 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
5947 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
5948 NULL, is_gimple_val, fb_rvalue);
5949 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5951 if (error_operand_p (OMP_CLAUSE_DECL (c)))
5953 remove = true;
5954 break;
5956 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
5957 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
5958 is_gimple_val, fb_rvalue) == GS_ERROR)
5960 remove = true;
5961 break;
5963 break;
5965 case OMP_CLAUSE_TO:
5966 case OMP_CLAUSE_FROM:
5967 if (OMP_CLAUSE_SIZE (c)
5968 && gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
5969 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
5971 remove = true;
5972 break;
5974 decl = OMP_CLAUSE_DECL (c);
5975 if (error_operand_p (decl))
5977 remove = true;
5978 break;
5980 if (!DECL_P (decl))
5982 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
5983 NULL, is_gimple_lvalue, fb_lvalue)
5984 == GS_ERROR)
5986 remove = true;
5987 break;
5989 break;
5991 goto do_notice;
5993 do_add:
5994 decl = OMP_CLAUSE_DECL (c);
5995 if (error_operand_p (decl))
5997 remove = true;
5998 break;
6000 omp_add_variable (ctx, decl, flags);
6001 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6002 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6004 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
6005 GOVD_LOCAL | GOVD_SEEN);
6006 gimplify_omp_ctxp = ctx;
6007 push_gimplify_context ();
6009 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6010 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6012 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
6013 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
6014 pop_gimplify_context
6015 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
6016 push_gimplify_context ();
6017 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
6018 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6019 pop_gimplify_context
6020 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
6021 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
6022 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
6024 gimplify_omp_ctxp = outer_ctx;
6026 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6027 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
6029 gimplify_omp_ctxp = ctx;
6030 push_gimplify_context ();
6031 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
6033 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6034 NULL, NULL);
6035 TREE_SIDE_EFFECTS (bind) = 1;
6036 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6037 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6039 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6040 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6041 pop_gimplify_context
6042 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6043 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6045 gimplify_omp_ctxp = outer_ctx;
6047 if (notice_outer)
6048 goto do_notice;
6049 break;
6051 case OMP_CLAUSE_COPYIN:
6052 case OMP_CLAUSE_COPYPRIVATE:
6053 decl = OMP_CLAUSE_DECL (c);
6054 if (error_operand_p (decl))
6056 remove = true;
6057 break;
6059 do_notice:
6060 if (outer_ctx)
6061 omp_notice_variable (outer_ctx, decl, true);
6062 if (check_non_private
6063 && region_type == ORT_WORKSHARE
6064 && omp_check_private (ctx, decl))
6066 error ("%s variable %qE is private in outer context",
6067 check_non_private, DECL_NAME (decl));
6068 remove = true;
6070 break;
6072 case OMP_CLAUSE_FINAL:
6073 case OMP_CLAUSE_IF:
6074 OMP_CLAUSE_OPERAND (c, 0)
6075 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6076 /* Fall through. */
6078 case OMP_CLAUSE_SCHEDULE:
6079 case OMP_CLAUSE_NUM_THREADS:
6080 case OMP_CLAUSE_NUM_TEAMS:
6081 case OMP_CLAUSE_THREAD_LIMIT:
6082 case OMP_CLAUSE_DIST_SCHEDULE:
6083 case OMP_CLAUSE_DEVICE:
6084 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6085 is_gimple_val, fb_rvalue) == GS_ERROR)
6086 remove = true;
6087 break;
6089 case OMP_CLAUSE_NOWAIT:
6090 case OMP_CLAUSE_ORDERED:
6091 case OMP_CLAUSE_UNTIED:
6092 case OMP_CLAUSE_COLLAPSE:
6093 case OMP_CLAUSE_MERGEABLE:
6094 case OMP_CLAUSE_PROC_BIND:
6095 case OMP_CLAUSE_SAFELEN:
6096 break;
6098 case OMP_CLAUSE_ALIGNED:
6099 decl = OMP_CLAUSE_DECL (c);
6100 if (error_operand_p (decl))
6102 remove = true;
6103 break;
6105 if (!is_global_var (decl)
6106 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6107 omp_add_variable (ctx, decl, GOVD_ALIGNED);
6108 break;
6110 case OMP_CLAUSE_DEFAULT:
6111 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6112 break;
6114 default:
6115 gcc_unreachable ();
6118 if (remove)
6119 *list_p = OMP_CLAUSE_CHAIN (c);
6120 else
6121 list_p = &OMP_CLAUSE_CHAIN (c);
6124 gimplify_omp_ctxp = ctx;
6127 /* For all variables that were not actually used within the context,
6128 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
6130 static int
6131 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6133 tree *list_p = (tree *) data;
6134 tree decl = (tree) n->key;
6135 unsigned flags = n->value;
6136 enum omp_clause_code code;
6137 tree clause;
6138 bool private_debug;
6140 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6141 return 0;
6142 if ((flags & GOVD_SEEN) == 0)
6143 return 0;
6144 if (flags & GOVD_DEBUG_PRIVATE)
6146 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6147 private_debug = true;
6149 else if (flags & GOVD_MAP)
6150 private_debug = false;
6151 else
6152 private_debug
6153 = lang_hooks.decls.omp_private_debug_clause (decl,
6154 !!(flags & GOVD_SHARED));
6155 if (private_debug)
6156 code = OMP_CLAUSE_PRIVATE;
6157 else if (flags & GOVD_MAP)
6158 code = OMP_CLAUSE_MAP;
6159 else if (flags & GOVD_SHARED)
6161 if (is_global_var (decl))
6163 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6164 while (ctx != NULL)
6166 splay_tree_node on
6167 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6168 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6169 | GOVD_PRIVATE | GOVD_REDUCTION
6170 | GOVD_LINEAR)) != 0)
6171 break;
6172 ctx = ctx->outer_context;
6174 if (ctx == NULL)
6175 return 0;
6177 code = OMP_CLAUSE_SHARED;
6179 else if (flags & GOVD_PRIVATE)
6180 code = OMP_CLAUSE_PRIVATE;
6181 else if (flags & GOVD_FIRSTPRIVATE)
6182 code = OMP_CLAUSE_FIRSTPRIVATE;
6183 else if (flags & GOVD_LASTPRIVATE)
6184 code = OMP_CLAUSE_LASTPRIVATE;
6185 else if (flags & GOVD_ALIGNED)
6186 return 0;
6187 else
6188 gcc_unreachable ();
6190 clause = build_omp_clause (input_location, code);
6191 OMP_CLAUSE_DECL (clause) = decl;
6192 OMP_CLAUSE_CHAIN (clause) = *list_p;
6193 if (private_debug)
6194 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
6195 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6196 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
6197 else if (code == OMP_CLAUSE_MAP)
6199 OMP_CLAUSE_MAP_KIND (clause) = flags & GOVD_MAP_TO_ONLY
6200 ? OMP_CLAUSE_MAP_TO
6201 : OMP_CLAUSE_MAP_TOFROM;
6202 if (DECL_SIZE (decl)
6203 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6205 tree decl2 = DECL_VALUE_EXPR (decl);
6206 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6207 decl2 = TREE_OPERAND (decl2, 0);
6208 gcc_assert (DECL_P (decl2));
6209 tree mem = build_simple_mem_ref (decl2);
6210 OMP_CLAUSE_DECL (clause) = mem;
6211 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6212 if (gimplify_omp_ctxp->outer_context)
6214 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6215 omp_notice_variable (ctx, decl2, true);
6216 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
6218 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
6219 OMP_CLAUSE_MAP);
6220 OMP_CLAUSE_DECL (nc) = decl;
6221 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6222 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6223 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
6224 OMP_CLAUSE_CHAIN (clause) = nc;
6227 *list_p = clause;
6228 lang_hooks.decls.omp_finish_clause (clause);
6230 return 0;
6233 static void
6234 gimplify_adjust_omp_clauses (tree *list_p)
6236 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6237 tree c, decl;
6239 while ((c = *list_p) != NULL)
6241 splay_tree_node n;
6242 bool remove = false;
6244 switch (OMP_CLAUSE_CODE (c))
6246 case OMP_CLAUSE_PRIVATE:
6247 case OMP_CLAUSE_SHARED:
6248 case OMP_CLAUSE_FIRSTPRIVATE:
6249 case OMP_CLAUSE_LINEAR:
6250 decl = OMP_CLAUSE_DECL (c);
6251 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6252 remove = !(n->value & GOVD_SEEN);
6253 if (! remove)
6255 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
6256 if ((n->value & GOVD_DEBUG_PRIVATE)
6257 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6259 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6260 || ((n->value & GOVD_DATA_SHARE_CLASS)
6261 == GOVD_PRIVATE));
6262 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
6263 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6265 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6266 && ctx->outer_context
6267 && !(OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6268 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6269 && !is_global_var (decl))
6271 if (ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
6273 n = splay_tree_lookup (ctx->outer_context->variables,
6274 (splay_tree_key) decl);
6275 if (n == NULL
6276 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
6278 int flags = OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6279 ? GOVD_LASTPRIVATE : GOVD_SHARED;
6280 if (n == NULL)
6281 omp_add_variable (ctx->outer_context, decl,
6282 flags | GOVD_SEEN);
6283 else
6284 n->value |= flags | GOVD_SEEN;
6287 else
6288 omp_notice_variable (ctx->outer_context, decl, true);
6291 break;
6293 case OMP_CLAUSE_LASTPRIVATE:
6294 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6295 accurately reflect the presence of a FIRSTPRIVATE clause. */
6296 decl = OMP_CLAUSE_DECL (c);
6297 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6298 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6299 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6300 break;
6302 case OMP_CLAUSE_ALIGNED:
6303 decl = OMP_CLAUSE_DECL (c);
6304 if (!is_global_var (decl))
6306 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6307 remove = n == NULL || !(n->value & GOVD_SEEN);
6308 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6310 struct gimplify_omp_ctx *octx;
6311 if (n != NULL
6312 && (n->value & (GOVD_DATA_SHARE_CLASS
6313 & ~GOVD_FIRSTPRIVATE)))
6314 remove = true;
6315 else
6316 for (octx = ctx->outer_context; octx;
6317 octx = octx->outer_context)
6319 n = splay_tree_lookup (octx->variables,
6320 (splay_tree_key) decl);
6321 if (n == NULL)
6322 continue;
6323 if (n->value & GOVD_LOCAL)
6324 break;
6325 /* We have to avoid assigning a shared variable
6326 to itself when trying to add
6327 __builtin_assume_aligned. */
6328 if (n->value & GOVD_SHARED)
6330 remove = true;
6331 break;
6336 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
6338 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6339 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6340 remove = true;
6342 break;
6344 case OMP_CLAUSE_MAP:
6345 decl = OMP_CLAUSE_DECL (c);
6346 if (!DECL_P (decl))
6347 break;
6348 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6349 if (ctx->region_type == ORT_TARGET && !(n->value & GOVD_SEEN))
6350 remove = true;
6351 else if (DECL_SIZE (decl)
6352 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
6353 && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_POINTER)
6355 tree decl2 = DECL_VALUE_EXPR (decl);
6356 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6357 decl2 = TREE_OPERAND (decl2, 0);
6358 gcc_assert (DECL_P (decl2));
6359 tree mem = build_simple_mem_ref (decl2);
6360 OMP_CLAUSE_DECL (c) = mem;
6361 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6362 if (ctx->outer_context)
6364 omp_notice_variable (ctx->outer_context, decl2, true);
6365 omp_notice_variable (ctx->outer_context,
6366 OMP_CLAUSE_SIZE (c), true);
6368 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6369 OMP_CLAUSE_MAP);
6370 OMP_CLAUSE_DECL (nc) = decl;
6371 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6372 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6373 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
6374 OMP_CLAUSE_CHAIN (c) = nc;
6375 c = nc;
6377 break;
6379 case OMP_CLAUSE_TO:
6380 case OMP_CLAUSE_FROM:
6381 decl = OMP_CLAUSE_DECL (c);
6382 if (!DECL_P (decl))
6383 break;
6384 if (DECL_SIZE (decl)
6385 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6387 tree decl2 = DECL_VALUE_EXPR (decl);
6388 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6389 decl2 = TREE_OPERAND (decl2, 0);
6390 gcc_assert (DECL_P (decl2));
6391 tree mem = build_simple_mem_ref (decl2);
6392 OMP_CLAUSE_DECL (c) = mem;
6393 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6394 if (ctx->outer_context)
6396 omp_notice_variable (ctx->outer_context, decl2, true);
6397 omp_notice_variable (ctx->outer_context,
6398 OMP_CLAUSE_SIZE (c), true);
6401 break;
6403 case OMP_CLAUSE_REDUCTION:
6404 case OMP_CLAUSE_COPYIN:
6405 case OMP_CLAUSE_COPYPRIVATE:
6406 case OMP_CLAUSE_IF:
6407 case OMP_CLAUSE_NUM_THREADS:
6408 case OMP_CLAUSE_NUM_TEAMS:
6409 case OMP_CLAUSE_THREAD_LIMIT:
6410 case OMP_CLAUSE_DIST_SCHEDULE:
6411 case OMP_CLAUSE_DEVICE:
6412 case OMP_CLAUSE_SCHEDULE:
6413 case OMP_CLAUSE_NOWAIT:
6414 case OMP_CLAUSE_ORDERED:
6415 case OMP_CLAUSE_DEFAULT:
6416 case OMP_CLAUSE_UNTIED:
6417 case OMP_CLAUSE_COLLAPSE:
6418 case OMP_CLAUSE_FINAL:
6419 case OMP_CLAUSE_MERGEABLE:
6420 case OMP_CLAUSE_PROC_BIND:
6421 case OMP_CLAUSE_SAFELEN:
6422 case OMP_CLAUSE_DEPEND:
6423 break;
6425 default:
6426 gcc_unreachable ();
6429 if (remove)
6430 *list_p = OMP_CLAUSE_CHAIN (c);
6431 else
6432 list_p = &OMP_CLAUSE_CHAIN (c);
6435 /* Add in any implicit data sharing. */
6436 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
6438 gimplify_omp_ctxp = ctx->outer_context;
6439 delete_omp_context (ctx);
6442 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
6443 gimplification of the body, as well as scanning the body for used
6444 variables. We need to do this scan now, because variable-sized
6445 decls will be decomposed during gimplification. */
6447 static void
6448 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6450 tree expr = *expr_p;
6451 gimple g;
6452 gimple_seq body = NULL;
6454 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6455 OMP_PARALLEL_COMBINED (expr)
6456 ? ORT_COMBINED_PARALLEL
6457 : ORT_PARALLEL);
6459 push_gimplify_context ();
6461 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6462 if (gimple_code (g) == GIMPLE_BIND)
6463 pop_gimplify_context (g);
6464 else
6465 pop_gimplify_context (NULL);
6467 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6469 g = gimple_build_omp_parallel (body,
6470 OMP_PARALLEL_CLAUSES (expr),
6471 NULL_TREE, NULL_TREE);
6472 if (OMP_PARALLEL_COMBINED (expr))
6473 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6474 gimplify_seq_add_stmt (pre_p, g);
6475 *expr_p = NULL_TREE;
6478 /* Gimplify the contents of an OMP_TASK statement. This involves
6479 gimplification of the body, as well as scanning the body for used
6480 variables. We need to do this scan now, because variable-sized
6481 decls will be decomposed during gimplification. */
6483 static void
6484 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6486 tree expr = *expr_p;
6487 gimple g;
6488 gimple_seq body = NULL;
6490 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6491 find_omp_clause (OMP_TASK_CLAUSES (expr),
6492 OMP_CLAUSE_UNTIED)
6493 ? ORT_UNTIED_TASK : ORT_TASK);
6495 push_gimplify_context ();
6497 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6498 if (gimple_code (g) == GIMPLE_BIND)
6499 pop_gimplify_context (g);
6500 else
6501 pop_gimplify_context (NULL);
6503 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
6505 g = gimple_build_omp_task (body,
6506 OMP_TASK_CLAUSES (expr),
6507 NULL_TREE, NULL_TREE,
6508 NULL_TREE, NULL_TREE, NULL_TREE);
6509 gimplify_seq_add_stmt (pre_p, g);
6510 *expr_p = NULL_TREE;
6513 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
6514 with non-NULL OMP_FOR_INIT. */
6516 static tree
6517 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
6519 *walk_subtrees = 0;
6520 switch (TREE_CODE (*tp))
6522 case OMP_FOR:
6523 *walk_subtrees = 1;
6524 /* FALLTHRU */
6525 case OMP_SIMD:
6526 if (OMP_FOR_INIT (*tp) != NULL_TREE)
6527 return *tp;
6528 break;
6529 case BIND_EXPR:
6530 case STATEMENT_LIST:
6531 case OMP_PARALLEL:
6532 *walk_subtrees = 1;
6533 break;
6534 default:
6535 break;
6537 return NULL_TREE;
6540 /* Gimplify the gross structure of an OMP_FOR statement. */
6542 static enum gimplify_status
6543 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6545 tree for_stmt, orig_for_stmt, decl, var, t;
6546 enum gimplify_status ret = GS_ALL_DONE;
6547 enum gimplify_status tret;
6548 gimple gfor;
6549 gimple_seq for_body, for_pre_body;
6550 int i;
6551 bool simd;
6552 bitmap has_decl_expr = NULL;
6554 orig_for_stmt = for_stmt = *expr_p;
6556 simd = TREE_CODE (for_stmt) == OMP_SIMD
6557 || TREE_CODE (for_stmt) == CILK_SIMD;
6558 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6559 simd ? ORT_SIMD : ORT_WORKSHARE);
6561 /* Handle OMP_FOR_INIT. */
6562 for_pre_body = NULL;
6563 if (simd && OMP_FOR_PRE_BODY (for_stmt))
6565 has_decl_expr = BITMAP_ALLOC (NULL);
6566 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
6567 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
6568 == VAR_DECL)
6570 t = OMP_FOR_PRE_BODY (for_stmt);
6571 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6573 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
6575 tree_stmt_iterator si;
6576 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
6577 tsi_next (&si))
6579 t = tsi_stmt (si);
6580 if (TREE_CODE (t) == DECL_EXPR
6581 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
6582 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6586 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6587 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6589 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
6591 for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), find_combined_omp_for,
6592 NULL, NULL);
6593 gcc_assert (for_stmt != NULL_TREE);
6594 gimplify_omp_ctxp->combined_loop = true;
6597 for_body = NULL;
6598 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6599 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6600 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6601 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6602 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6604 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6605 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6606 decl = TREE_OPERAND (t, 0);
6607 gcc_assert (DECL_P (decl));
6608 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6609 || POINTER_TYPE_P (TREE_TYPE (decl)));
6611 /* Make sure the iteration variable is private. */
6612 tree c = NULL_TREE;
6613 if (orig_for_stmt != for_stmt)
6614 /* Do this only on innermost construct for combined ones. */;
6615 else if (simd)
6617 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
6618 (splay_tree_key)decl);
6619 omp_is_private (gimplify_omp_ctxp, decl, simd);
6620 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6621 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6622 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
6624 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
6625 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
6626 if (has_decl_expr
6627 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
6628 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6629 OMP_CLAUSE_DECL (c) = decl;
6630 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6631 OMP_FOR_CLAUSES (for_stmt) = c;
6632 omp_add_variable (gimplify_omp_ctxp, decl,
6633 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
6635 else
6637 bool lastprivate
6638 = (!has_decl_expr
6639 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
6640 c = build_omp_clause (input_location,
6641 lastprivate ? OMP_CLAUSE_LASTPRIVATE
6642 : OMP_CLAUSE_PRIVATE);
6643 OMP_CLAUSE_DECL (c) = decl;
6644 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6645 omp_add_variable (gimplify_omp_ctxp, decl,
6646 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
6647 | GOVD_SEEN);
6648 c = NULL_TREE;
6651 else if (omp_is_private (gimplify_omp_ctxp, decl, simd))
6652 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6653 else
6654 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6656 /* If DECL is not a gimple register, create a temporary variable to act
6657 as an iteration counter. This is valid, since DECL cannot be
6658 modified in the body of the loop. */
6659 if (orig_for_stmt != for_stmt)
6660 var = decl;
6661 else if (!is_gimple_reg (decl))
6663 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6664 TREE_OPERAND (t, 0) = var;
6666 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6668 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6670 else
6671 var = decl;
6673 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6674 is_gimple_val, fb_rvalue);
6675 ret = MIN (ret, tret);
6676 if (ret == GS_ERROR)
6677 return ret;
6679 /* Handle OMP_FOR_COND. */
6680 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6681 gcc_assert (COMPARISON_CLASS_P (t));
6682 gcc_assert (TREE_OPERAND (t, 0) == decl);
6684 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6685 is_gimple_val, fb_rvalue);
6686 ret = MIN (ret, tret);
6688 /* Handle OMP_FOR_INCR. */
6689 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6690 switch (TREE_CODE (t))
6692 case PREINCREMENT_EXPR:
6693 case POSTINCREMENT_EXPR:
6695 tree decl = TREE_OPERAND (t, 0);
6696 // c_omp_for_incr_canonicalize_ptr() should have been
6697 // called to massage things appropriately.
6698 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
6700 if (orig_for_stmt != for_stmt)
6701 break;
6702 t = build_int_cst (TREE_TYPE (decl), 1);
6703 if (c)
6704 OMP_CLAUSE_LINEAR_STEP (c) = t;
6705 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6706 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6707 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6708 break;
6711 case PREDECREMENT_EXPR:
6712 case POSTDECREMENT_EXPR:
6713 if (orig_for_stmt != for_stmt)
6714 break;
6715 t = build_int_cst (TREE_TYPE (decl), -1);
6716 if (c)
6717 OMP_CLAUSE_LINEAR_STEP (c) = t;
6718 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6719 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6720 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6721 break;
6723 case MODIFY_EXPR:
6724 gcc_assert (TREE_OPERAND (t, 0) == decl);
6725 TREE_OPERAND (t, 0) = var;
6727 t = TREE_OPERAND (t, 1);
6728 switch (TREE_CODE (t))
6730 case PLUS_EXPR:
6731 if (TREE_OPERAND (t, 1) == decl)
6733 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6734 TREE_OPERAND (t, 0) = var;
6735 break;
6738 /* Fallthru. */
6739 case MINUS_EXPR:
6740 case POINTER_PLUS_EXPR:
6741 gcc_assert (TREE_OPERAND (t, 0) == decl);
6742 TREE_OPERAND (t, 0) = var;
6743 break;
6744 default:
6745 gcc_unreachable ();
6748 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6749 is_gimple_val, fb_rvalue);
6750 ret = MIN (ret, tret);
6751 if (c)
6753 OMP_CLAUSE_LINEAR_STEP (c) = TREE_OPERAND (t, 1);
6754 if (TREE_CODE (t) == MINUS_EXPR)
6756 t = TREE_OPERAND (t, 1);
6757 OMP_CLAUSE_LINEAR_STEP (c)
6758 = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
6759 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
6760 &for_pre_body, NULL,
6761 is_gimple_val, fb_rvalue);
6762 ret = MIN (ret, tret);
6765 break;
6767 default:
6768 gcc_unreachable ();
6771 if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6772 && orig_for_stmt == for_stmt)
6774 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6775 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6776 && OMP_CLAUSE_DECL (c) == decl
6777 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6779 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6780 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6781 gcc_assert (TREE_OPERAND (t, 0) == var);
6782 t = TREE_OPERAND (t, 1);
6783 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6784 || TREE_CODE (t) == MINUS_EXPR
6785 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6786 gcc_assert (TREE_OPERAND (t, 0) == var);
6787 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6788 TREE_OPERAND (t, 1));
6789 gimplify_assign (decl, t,
6790 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6795 BITMAP_FREE (has_decl_expr);
6797 gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body);
6799 if (orig_for_stmt != for_stmt)
6800 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6802 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6803 decl = TREE_OPERAND (t, 0);
6804 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6805 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6806 TREE_OPERAND (t, 0) = var;
6807 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6808 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
6809 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
6812 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt));
6814 int kind;
6815 switch (TREE_CODE (orig_for_stmt))
6817 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
6818 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
6819 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
6820 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
6821 default:
6822 gcc_unreachable ();
6824 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
6825 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6826 for_pre_body);
6827 if (orig_for_stmt != for_stmt)
6828 gimple_omp_for_set_combined_p (gfor, true);
6829 if (gimplify_omp_ctxp
6830 && (gimplify_omp_ctxp->combined_loop
6831 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
6832 && gimplify_omp_ctxp->outer_context
6833 && gimplify_omp_ctxp->outer_context->combined_loop)))
6835 gimple_omp_for_set_combined_into_p (gfor, true);
6836 if (gimplify_omp_ctxp->combined_loop)
6837 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
6838 else
6839 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
6842 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6844 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6845 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6846 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6847 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6848 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6849 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6850 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6851 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6854 gimplify_seq_add_stmt (pre_p, gfor);
6855 if (ret != GS_ALL_DONE)
6856 return GS_ERROR;
6857 *expr_p = NULL_TREE;
6858 return GS_ALL_DONE;
6861 /* Gimplify the gross structure of other OpenMP constructs.
6862 In particular, OMP_SECTIONS, OMP_SINGLE, OMP_TARGET, OMP_TARGET_DATA
6863 and OMP_TEAMS. */
6865 static void
6866 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6868 tree expr = *expr_p;
6869 gimple stmt;
6870 gimple_seq body = NULL;
6871 enum omp_region_type ort = ORT_WORKSHARE;
6873 switch (TREE_CODE (expr))
6875 case OMP_SECTIONS:
6876 case OMP_SINGLE:
6877 break;
6878 case OMP_TARGET:
6879 ort = ORT_TARGET;
6880 break;
6881 case OMP_TARGET_DATA:
6882 ort = ORT_TARGET_DATA;
6883 break;
6884 case OMP_TEAMS:
6885 ort = ORT_TEAMS;
6886 break;
6887 default:
6888 gcc_unreachable ();
6890 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort);
6891 if (ort == ORT_TARGET || ort == ORT_TARGET_DATA)
6893 push_gimplify_context ();
6894 gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
6895 if (gimple_code (g) == GIMPLE_BIND)
6896 pop_gimplify_context (g);
6897 else
6898 pop_gimplify_context (NULL);
6899 if (ort == ORT_TARGET_DATA)
6901 gimple_seq cleanup = NULL;
6902 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TARGET_END_DATA);
6903 g = gimple_build_call (fn, 0);
6904 gimple_seq_add_stmt (&cleanup, g);
6905 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
6906 body = NULL;
6907 gimple_seq_add_stmt (&body, g);
6910 else
6911 gimplify_and_add (OMP_BODY (expr), &body);
6912 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6914 switch (TREE_CODE (expr))
6916 case OMP_SECTIONS:
6917 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6918 break;
6919 case OMP_SINGLE:
6920 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6921 break;
6922 case OMP_TARGET:
6923 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
6924 OMP_CLAUSES (expr));
6925 break;
6926 case OMP_TARGET_DATA:
6927 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
6928 OMP_CLAUSES (expr));
6929 break;
6930 case OMP_TEAMS:
6931 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
6932 break;
6933 default:
6934 gcc_unreachable ();
6937 gimplify_seq_add_stmt (pre_p, stmt);
6938 *expr_p = NULL_TREE;
6941 /* Gimplify the gross structure of OpenMP target update construct. */
6943 static void
6944 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
6946 tree expr = *expr_p;
6947 gimple stmt;
6949 gimplify_scan_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr), pre_p,
6950 ORT_WORKSHARE);
6951 gimplify_adjust_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr));
6952 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_UPDATE,
6953 OMP_TARGET_UPDATE_CLAUSES (expr));
6955 gimplify_seq_add_stmt (pre_p, stmt);
6956 *expr_p = NULL_TREE;
6959 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
6960 stabilized the lhs of the atomic operation as *ADDR. Return true if
6961 EXPR is this stabilized form. */
6963 static bool
6964 goa_lhs_expr_p (tree expr, tree addr)
6966 /* Also include casts to other type variants. The C front end is fond
6967 of adding these for e.g. volatile variables. This is like
6968 STRIP_TYPE_NOPS but includes the main variant lookup. */
6969 STRIP_USELESS_TYPE_CONVERSION (expr);
6971 if (TREE_CODE (expr) == INDIRECT_REF)
6973 expr = TREE_OPERAND (expr, 0);
6974 while (expr != addr
6975 && (CONVERT_EXPR_P (expr)
6976 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6977 && TREE_CODE (expr) == TREE_CODE (addr)
6978 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
6980 expr = TREE_OPERAND (expr, 0);
6981 addr = TREE_OPERAND (addr, 0);
6983 if (expr == addr)
6984 return true;
6985 return (TREE_CODE (addr) == ADDR_EXPR
6986 && TREE_CODE (expr) == ADDR_EXPR
6987 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6989 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6990 return true;
6991 return false;
6994 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
6995 expression does not involve the lhs, evaluate it into a temporary.
6996 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
6997 or -1 if an error was encountered. */
6999 static int
7000 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
7001 tree lhs_var)
7003 tree expr = *expr_p;
7004 int saw_lhs;
7006 if (goa_lhs_expr_p (expr, lhs_addr))
7008 *expr_p = lhs_var;
7009 return 1;
7011 if (is_gimple_val (expr))
7012 return 0;
7014 saw_lhs = 0;
7015 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
7017 case tcc_binary:
7018 case tcc_comparison:
7019 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
7020 lhs_var);
7021 case tcc_unary:
7022 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
7023 lhs_var);
7024 break;
7025 case tcc_expression:
7026 switch (TREE_CODE (expr))
7028 case TRUTH_ANDIF_EXPR:
7029 case TRUTH_ORIF_EXPR:
7030 case TRUTH_AND_EXPR:
7031 case TRUTH_OR_EXPR:
7032 case TRUTH_XOR_EXPR:
7033 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
7034 lhs_addr, lhs_var);
7035 case TRUTH_NOT_EXPR:
7036 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
7037 lhs_addr, lhs_var);
7038 break;
7039 case COMPOUND_EXPR:
7040 /* Break out any preevaluations from cp_build_modify_expr. */
7041 for (; TREE_CODE (expr) == COMPOUND_EXPR;
7042 expr = TREE_OPERAND (expr, 1))
7043 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
7044 *expr_p = expr;
7045 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
7046 default:
7047 break;
7049 break;
7050 default:
7051 break;
7054 if (saw_lhs == 0)
7056 enum gimplify_status gs;
7057 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
7058 if (gs != GS_ALL_DONE)
7059 saw_lhs = -1;
7062 return saw_lhs;
7065 /* Gimplify an OMP_ATOMIC statement. */
7067 static enum gimplify_status
7068 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
7070 tree addr = TREE_OPERAND (*expr_p, 0);
7071 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
7072 ? NULL : TREE_OPERAND (*expr_p, 1);
7073 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7074 tree tmp_load;
7075 gimple loadstmt, storestmt;
7077 tmp_load = create_tmp_reg (type, NULL);
7078 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
7079 return GS_ERROR;
7081 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
7082 != GS_ALL_DONE)
7083 return GS_ERROR;
7085 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
7086 gimplify_seq_add_stmt (pre_p, loadstmt);
7087 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
7088 != GS_ALL_DONE)
7089 return GS_ERROR;
7091 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
7092 rhs = tmp_load;
7093 storestmt = gimple_build_omp_atomic_store (rhs);
7094 gimplify_seq_add_stmt (pre_p, storestmt);
7095 if (OMP_ATOMIC_SEQ_CST (*expr_p))
7097 gimple_omp_atomic_set_seq_cst (loadstmt);
7098 gimple_omp_atomic_set_seq_cst (storestmt);
7100 switch (TREE_CODE (*expr_p))
7102 case OMP_ATOMIC_READ:
7103 case OMP_ATOMIC_CAPTURE_OLD:
7104 *expr_p = tmp_load;
7105 gimple_omp_atomic_set_need_value (loadstmt);
7106 break;
7107 case OMP_ATOMIC_CAPTURE_NEW:
7108 *expr_p = rhs;
7109 gimple_omp_atomic_set_need_value (storestmt);
7110 break;
7111 default:
7112 *expr_p = NULL;
7113 break;
7116 return GS_ALL_DONE;
7119 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
7120 body, and adding some EH bits. */
7122 static enum gimplify_status
7123 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
7125 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
7126 gimple g;
7127 gimple_seq body = NULL;
7128 int subcode = 0;
7130 /* Wrap the transaction body in a BIND_EXPR so we have a context
7131 where to put decls for OpenMP. */
7132 if (TREE_CODE (tbody) != BIND_EXPR)
7134 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
7135 TREE_SIDE_EFFECTS (bind) = 1;
7136 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
7137 TRANSACTION_EXPR_BODY (expr) = bind;
7140 push_gimplify_context ();
7141 temp = voidify_wrapper_expr (*expr_p, NULL);
7143 g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
7144 pop_gimplify_context (g);
7146 g = gimple_build_transaction (body, NULL);
7147 if (TRANSACTION_EXPR_OUTER (expr))
7148 subcode = GTMA_IS_OUTER;
7149 else if (TRANSACTION_EXPR_RELAXED (expr))
7150 subcode = GTMA_IS_RELAXED;
7151 gimple_transaction_set_subcode (g, subcode);
7153 gimplify_seq_add_stmt (pre_p, g);
7155 if (temp)
7157 *expr_p = temp;
7158 return GS_OK;
7161 *expr_p = NULL_TREE;
7162 return GS_ALL_DONE;
7165 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
7166 expression produces a value to be used as an operand inside a GIMPLE
7167 statement, the value will be stored back in *EXPR_P. This value will
7168 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
7169 an SSA_NAME. The corresponding sequence of GIMPLE statements is
7170 emitted in PRE_P and POST_P.
7172 Additionally, this process may overwrite parts of the input
7173 expression during gimplification. Ideally, it should be
7174 possible to do non-destructive gimplification.
7176 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
7177 the expression needs to evaluate to a value to be used as
7178 an operand in a GIMPLE statement, this value will be stored in
7179 *EXPR_P on exit. This happens when the caller specifies one
7180 of fb_lvalue or fb_rvalue fallback flags.
7182 PRE_P will contain the sequence of GIMPLE statements corresponding
7183 to the evaluation of EXPR and all the side-effects that must
7184 be executed before the main expression. On exit, the last
7185 statement of PRE_P is the core statement being gimplified. For
7186 instance, when gimplifying 'if (++a)' the last statement in
7187 PRE_P will be 'if (t.1)' where t.1 is the result of
7188 pre-incrementing 'a'.
7190 POST_P will contain the sequence of GIMPLE statements corresponding
7191 to the evaluation of all the side-effects that must be executed
7192 after the main expression. If this is NULL, the post
7193 side-effects are stored at the end of PRE_P.
7195 The reason why the output is split in two is to handle post
7196 side-effects explicitly. In some cases, an expression may have
7197 inner and outer post side-effects which need to be emitted in
7198 an order different from the one given by the recursive
7199 traversal. For instance, for the expression (*p--)++ the post
7200 side-effects of '--' must actually occur *after* the post
7201 side-effects of '++'. However, gimplification will first visit
7202 the inner expression, so if a separate POST sequence was not
7203 used, the resulting sequence would be:
7205 1 t.1 = *p
7206 2 p = p - 1
7207 3 t.2 = t.1 + 1
7208 4 *p = t.2
7210 However, the post-decrement operation in line #2 must not be
7211 evaluated until after the store to *p at line #4, so the
7212 correct sequence should be:
7214 1 t.1 = *p
7215 2 t.2 = t.1 + 1
7216 3 *p = t.2
7217 4 p = p - 1
7219 So, by specifying a separate post queue, it is possible
7220 to emit the post side-effects in the correct order.
7221 If POST_P is NULL, an internal queue will be used. Before
7222 returning to the caller, the sequence POST_P is appended to
7223 the main output sequence PRE_P.
7225 GIMPLE_TEST_F points to a function that takes a tree T and
7226 returns nonzero if T is in the GIMPLE form requested by the
7227 caller. The GIMPLE predicates are in gimple.c.
7229 FALLBACK tells the function what sort of a temporary we want if
7230 gimplification cannot produce an expression that complies with
7231 GIMPLE_TEST_F.
7233 fb_none means that no temporary should be generated
7234 fb_rvalue means that an rvalue is OK to generate
7235 fb_lvalue means that an lvalue is OK to generate
7236 fb_either means that either is OK, but an lvalue is preferable.
7237 fb_mayfail means that gimplification may fail (in which case
7238 GS_ERROR will be returned)
7240 The return value is either GS_ERROR or GS_ALL_DONE, since this
7241 function iterates until EXPR is completely gimplified or an error
7242 occurs. */
7244 enum gimplify_status
7245 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7246 bool (*gimple_test_f) (tree), fallback_t fallback)
7248 tree tmp;
7249 gimple_seq internal_pre = NULL;
7250 gimple_seq internal_post = NULL;
7251 tree save_expr;
7252 bool is_statement;
7253 location_t saved_location;
7254 enum gimplify_status ret;
7255 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
7257 save_expr = *expr_p;
7258 if (save_expr == NULL_TREE)
7259 return GS_ALL_DONE;
7261 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
7262 is_statement = gimple_test_f == is_gimple_stmt;
7263 if (is_statement)
7264 gcc_assert (pre_p);
7266 /* Consistency checks. */
7267 if (gimple_test_f == is_gimple_reg)
7268 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
7269 else if (gimple_test_f == is_gimple_val
7270 || gimple_test_f == is_gimple_call_addr
7271 || gimple_test_f == is_gimple_condexpr
7272 || gimple_test_f == is_gimple_mem_rhs
7273 || gimple_test_f == is_gimple_mem_rhs_or_call
7274 || gimple_test_f == is_gimple_reg_rhs
7275 || gimple_test_f == is_gimple_reg_rhs_or_call
7276 || gimple_test_f == is_gimple_asm_val
7277 || gimple_test_f == is_gimple_mem_ref_addr)
7278 gcc_assert (fallback & fb_rvalue);
7279 else if (gimple_test_f == is_gimple_min_lval
7280 || gimple_test_f == is_gimple_lvalue)
7281 gcc_assert (fallback & fb_lvalue);
7282 else if (gimple_test_f == is_gimple_addressable)
7283 gcc_assert (fallback & fb_either);
7284 else if (gimple_test_f == is_gimple_stmt)
7285 gcc_assert (fallback == fb_none);
7286 else
7288 /* We should have recognized the GIMPLE_TEST_F predicate to
7289 know what kind of fallback to use in case a temporary is
7290 needed to hold the value or address of *EXPR_P. */
7291 gcc_unreachable ();
7294 /* We used to check the predicate here and return immediately if it
7295 succeeds. This is wrong; the design is for gimplification to be
7296 idempotent, and for the predicates to only test for valid forms, not
7297 whether they are fully simplified. */
7298 if (pre_p == NULL)
7299 pre_p = &internal_pre;
7301 if (post_p == NULL)
7302 post_p = &internal_post;
7304 /* Remember the last statements added to PRE_P and POST_P. Every
7305 new statement added by the gimplification helpers needs to be
7306 annotated with location information. To centralize the
7307 responsibility, we remember the last statement that had been
7308 added to both queues before gimplifying *EXPR_P. If
7309 gimplification produces new statements in PRE_P and POST_P, those
7310 statements will be annotated with the same location information
7311 as *EXPR_P. */
7312 pre_last_gsi = gsi_last (*pre_p);
7313 post_last_gsi = gsi_last (*post_p);
7315 saved_location = input_location;
7316 if (save_expr != error_mark_node
7317 && EXPR_HAS_LOCATION (*expr_p))
7318 input_location = EXPR_LOCATION (*expr_p);
7320 /* Loop over the specific gimplifiers until the toplevel node
7321 remains the same. */
7324 /* Strip away as many useless type conversions as possible
7325 at the toplevel. */
7326 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
7328 /* Remember the expr. */
7329 save_expr = *expr_p;
7331 /* Die, die, die, my darling. */
7332 if (save_expr == error_mark_node
7333 || (TREE_TYPE (save_expr)
7334 && TREE_TYPE (save_expr) == error_mark_node))
7336 ret = GS_ERROR;
7337 break;
7340 /* Do any language-specific gimplification. */
7341 ret = ((enum gimplify_status)
7342 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
7343 if (ret == GS_OK)
7345 if (*expr_p == NULL_TREE)
7346 break;
7347 if (*expr_p != save_expr)
7348 continue;
7350 else if (ret != GS_UNHANDLED)
7351 break;
7353 /* Make sure that all the cases set 'ret' appropriately. */
7354 ret = GS_UNHANDLED;
7355 switch (TREE_CODE (*expr_p))
7357 /* First deal with the special cases. */
7359 case POSTINCREMENT_EXPR:
7360 case POSTDECREMENT_EXPR:
7361 case PREINCREMENT_EXPR:
7362 case PREDECREMENT_EXPR:
7363 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
7364 fallback != fb_none,
7365 TREE_TYPE (*expr_p));
7366 break;
7368 case ARRAY_REF:
7369 case ARRAY_RANGE_REF:
7370 case REALPART_EXPR:
7371 case IMAGPART_EXPR:
7372 case COMPONENT_REF:
7373 case VIEW_CONVERT_EXPR:
7374 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
7375 fallback ? fallback : fb_rvalue);
7376 break;
7378 case COND_EXPR:
7379 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
7381 /* C99 code may assign to an array in a structure value of a
7382 conditional expression, and this has undefined behavior
7383 only on execution, so create a temporary if an lvalue is
7384 required. */
7385 if (fallback == fb_lvalue)
7387 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7388 mark_addressable (*expr_p);
7389 ret = GS_OK;
7391 break;
7393 case CILK_SPAWN_STMT:
7394 gcc_assert
7395 (fn_contains_cilk_spawn_p (cfun)
7396 && lang_hooks.cilkplus.cilk_detect_spawn_and_unwrap (expr_p));
7397 if (!seen_error ())
7399 ret = (enum gimplify_status)
7400 lang_hooks.cilkplus.gimplify_cilk_spawn (expr_p, pre_p,
7401 post_p);
7402 break;
7404 /* If errors are seen, then just process it as a CALL_EXPR. */
7406 case CALL_EXPR:
7407 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
7409 /* C99 code may assign to an array in a structure returned
7410 from a function, and this has undefined behavior only on
7411 execution, so create a temporary if an lvalue is
7412 required. */
7413 if (fallback == fb_lvalue)
7415 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7416 mark_addressable (*expr_p);
7417 ret = GS_OK;
7419 break;
7421 case TREE_LIST:
7422 gcc_unreachable ();
7424 case COMPOUND_EXPR:
7425 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
7426 break;
7428 case COMPOUND_LITERAL_EXPR:
7429 ret = gimplify_compound_literal_expr (expr_p, pre_p,
7430 gimple_test_f, fallback);
7431 break;
7433 case MODIFY_EXPR:
7434 case INIT_EXPR:
7435 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
7436 fallback != fb_none);
7437 break;
7439 case TRUTH_ANDIF_EXPR:
7440 case TRUTH_ORIF_EXPR:
7442 /* Preserve the original type of the expression and the
7443 source location of the outer expression. */
7444 tree org_type = TREE_TYPE (*expr_p);
7445 *expr_p = gimple_boolify (*expr_p);
7446 *expr_p = build3_loc (input_location, COND_EXPR,
7447 org_type, *expr_p,
7448 fold_convert_loc
7449 (input_location,
7450 org_type, boolean_true_node),
7451 fold_convert_loc
7452 (input_location,
7453 org_type, boolean_false_node));
7454 ret = GS_OK;
7455 break;
7458 case TRUTH_NOT_EXPR:
7460 tree type = TREE_TYPE (*expr_p);
7461 /* The parsers are careful to generate TRUTH_NOT_EXPR
7462 only with operands that are always zero or one.
7463 We do not fold here but handle the only interesting case
7464 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
7465 *expr_p = gimple_boolify (*expr_p);
7466 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
7467 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
7468 TREE_TYPE (*expr_p),
7469 TREE_OPERAND (*expr_p, 0));
7470 else
7471 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
7472 TREE_TYPE (*expr_p),
7473 TREE_OPERAND (*expr_p, 0),
7474 build_int_cst (TREE_TYPE (*expr_p), 1));
7475 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
7476 *expr_p = fold_convert_loc (input_location, type, *expr_p);
7477 ret = GS_OK;
7478 break;
7481 case ADDR_EXPR:
7482 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
7483 break;
7485 case ANNOTATE_EXPR:
7487 tree cond = TREE_OPERAND (*expr_p, 0);
7488 tree id = TREE_OPERAND (*expr_p, 1);
7489 tree tmp = create_tmp_var_raw (TREE_TYPE(cond), NULL);
7490 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
7491 gimple call = gimple_build_call_internal (IFN_ANNOTATE, 2,
7492 cond, id);
7493 gimple_call_set_lhs (call, tmp);
7494 gimplify_seq_add_stmt (pre_p, call);
7495 *expr_p = tmp;
7496 ret = GS_ALL_DONE;
7497 break;
7500 case VA_ARG_EXPR:
7501 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
7502 break;
7504 CASE_CONVERT:
7505 if (IS_EMPTY_STMT (*expr_p))
7507 ret = GS_ALL_DONE;
7508 break;
7511 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7512 || fallback == fb_none)
7514 /* Just strip a conversion to void (or in void context) and
7515 try again. */
7516 *expr_p = TREE_OPERAND (*expr_p, 0);
7517 ret = GS_OK;
7518 break;
7521 ret = gimplify_conversion (expr_p);
7522 if (ret == GS_ERROR)
7523 break;
7524 if (*expr_p != save_expr)
7525 break;
7526 /* FALLTHRU */
7528 case FIX_TRUNC_EXPR:
7529 /* unary_expr: ... | '(' cast ')' val | ... */
7530 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7531 is_gimple_val, fb_rvalue);
7532 recalculate_side_effects (*expr_p);
7533 break;
7535 case INDIRECT_REF:
7537 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
7538 bool notrap = TREE_THIS_NOTRAP (*expr_p);
7539 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7541 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7542 if (*expr_p != save_expr)
7544 ret = GS_OK;
7545 break;
7548 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7549 is_gimple_reg, fb_rvalue);
7550 if (ret == GS_ERROR)
7551 break;
7553 recalculate_side_effects (*expr_p);
7554 *expr_p = fold_build2_loc (input_location, MEM_REF,
7555 TREE_TYPE (*expr_p),
7556 TREE_OPERAND (*expr_p, 0),
7557 build_int_cst (saved_ptr_type, 0));
7558 TREE_THIS_VOLATILE (*expr_p) = volatilep;
7559 TREE_THIS_NOTRAP (*expr_p) = notrap;
7560 ret = GS_OK;
7561 break;
7564 /* We arrive here through the various re-gimplifcation paths. */
7565 case MEM_REF:
7566 /* First try re-folding the whole thing. */
7567 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7568 TREE_OPERAND (*expr_p, 0),
7569 TREE_OPERAND (*expr_p, 1));
7570 if (tmp)
7572 *expr_p = tmp;
7573 recalculate_side_effects (*expr_p);
7574 ret = GS_OK;
7575 break;
7577 /* Avoid re-gimplifying the address operand if it is already
7578 in suitable form. Re-gimplifying would mark the address
7579 operand addressable. Always gimplify when not in SSA form
7580 as we still may have to gimplify decls with value-exprs. */
7581 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7582 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7584 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7585 is_gimple_mem_ref_addr, fb_rvalue);
7586 if (ret == GS_ERROR)
7587 break;
7589 recalculate_side_effects (*expr_p);
7590 ret = GS_ALL_DONE;
7591 break;
7593 /* Constants need not be gimplified. */
7594 case INTEGER_CST:
7595 case REAL_CST:
7596 case FIXED_CST:
7597 case STRING_CST:
7598 case COMPLEX_CST:
7599 case VECTOR_CST:
7600 /* Drop the overflow flag on constants, we do not want
7601 that in the GIMPLE IL. */
7602 if (TREE_OVERFLOW_P (*expr_p))
7603 *expr_p = drop_tree_overflow (*expr_p);
7604 ret = GS_ALL_DONE;
7605 break;
7607 case CONST_DECL:
7608 /* If we require an lvalue, such as for ADDR_EXPR, retain the
7609 CONST_DECL node. Otherwise the decl is replaceable by its
7610 value. */
7611 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
7612 if (fallback & fb_lvalue)
7613 ret = GS_ALL_DONE;
7614 else
7616 *expr_p = DECL_INITIAL (*expr_p);
7617 ret = GS_OK;
7619 break;
7621 case DECL_EXPR:
7622 ret = gimplify_decl_expr (expr_p, pre_p);
7623 break;
7625 case BIND_EXPR:
7626 ret = gimplify_bind_expr (expr_p, pre_p);
7627 break;
7629 case LOOP_EXPR:
7630 ret = gimplify_loop_expr (expr_p, pre_p);
7631 break;
7633 case SWITCH_EXPR:
7634 ret = gimplify_switch_expr (expr_p, pre_p);
7635 break;
7637 case EXIT_EXPR:
7638 ret = gimplify_exit_expr (expr_p);
7639 break;
7641 case GOTO_EXPR:
7642 /* If the target is not LABEL, then it is a computed jump
7643 and the target needs to be gimplified. */
7644 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
7646 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7647 NULL, is_gimple_val, fb_rvalue);
7648 if (ret == GS_ERROR)
7649 break;
7651 gimplify_seq_add_stmt (pre_p,
7652 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
7653 ret = GS_ALL_DONE;
7654 break;
7656 case PREDICT_EXPR:
7657 gimplify_seq_add_stmt (pre_p,
7658 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7659 PREDICT_EXPR_OUTCOME (*expr_p)));
7660 ret = GS_ALL_DONE;
7661 break;
7663 case LABEL_EXPR:
7664 ret = GS_ALL_DONE;
7665 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7666 == current_function_decl);
7667 gimplify_seq_add_stmt (pre_p,
7668 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
7669 break;
7671 case CASE_LABEL_EXPR:
7672 ret = gimplify_case_label_expr (expr_p, pre_p);
7673 break;
7675 case RETURN_EXPR:
7676 ret = gimplify_return_expr (*expr_p, pre_p);
7677 break;
7679 case CONSTRUCTOR:
7680 /* Don't reduce this in place; let gimplify_init_constructor work its
7681 magic. Buf if we're just elaborating this for side effects, just
7682 gimplify any element that has side-effects. */
7683 if (fallback == fb_none)
7685 unsigned HOST_WIDE_INT ix;
7686 tree val;
7687 tree temp = NULL_TREE;
7688 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7689 if (TREE_SIDE_EFFECTS (val))
7690 append_to_statement_list (val, &temp);
7692 *expr_p = temp;
7693 ret = temp ? GS_OK : GS_ALL_DONE;
7695 /* C99 code may assign to an array in a constructed
7696 structure or union, and this has undefined behavior only
7697 on execution, so create a temporary if an lvalue is
7698 required. */
7699 else if (fallback == fb_lvalue)
7701 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7702 mark_addressable (*expr_p);
7703 ret = GS_OK;
7705 else
7706 ret = GS_ALL_DONE;
7707 break;
7709 /* The following are special cases that are not handled by the
7710 original GIMPLE grammar. */
7712 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7713 eliminated. */
7714 case SAVE_EXPR:
7715 ret = gimplify_save_expr (expr_p, pre_p, post_p);
7716 break;
7718 case BIT_FIELD_REF:
7719 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7720 post_p, is_gimple_lvalue, fb_either);
7721 recalculate_side_effects (*expr_p);
7722 break;
7724 case TARGET_MEM_REF:
7726 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7728 if (TMR_BASE (*expr_p))
7729 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
7730 post_p, is_gimple_mem_ref_addr, fb_either);
7731 if (TMR_INDEX (*expr_p))
7732 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7733 post_p, is_gimple_val, fb_rvalue);
7734 if (TMR_INDEX2 (*expr_p))
7735 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7736 post_p, is_gimple_val, fb_rvalue);
7737 /* TMR_STEP and TMR_OFFSET are always integer constants. */
7738 ret = MIN (r0, r1);
7740 break;
7742 case NON_LVALUE_EXPR:
7743 /* This should have been stripped above. */
7744 gcc_unreachable ();
7746 case ASM_EXPR:
7747 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
7748 break;
7750 case TRY_FINALLY_EXPR:
7751 case TRY_CATCH_EXPR:
7753 gimple_seq eval, cleanup;
7754 gimple try_;
7756 /* Calls to destructors are generated automatically in FINALLY/CATCH
7757 block. They should have location as UNKNOWN_LOCATION. However,
7758 gimplify_call_expr will reset these call stmts to input_location
7759 if it finds stmt's location is unknown. To prevent resetting for
7760 destructors, we set the input_location to unknown.
7761 Note that this only affects the destructor calls in FINALLY/CATCH
7762 block, and will automatically reset to its original value by the
7763 end of gimplify_expr. */
7764 input_location = UNKNOWN_LOCATION;
7765 eval = cleanup = NULL;
7766 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
7767 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
7768 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
7769 if (gimple_seq_empty_p (cleanup))
7771 gimple_seq_add_seq (pre_p, eval);
7772 ret = GS_ALL_DONE;
7773 break;
7775 try_ = gimple_build_try (eval, cleanup,
7776 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
7777 ? GIMPLE_TRY_FINALLY
7778 : GIMPLE_TRY_CATCH);
7779 if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
7780 gimple_set_location (try_, saved_location);
7781 else
7782 gimple_set_location (try_, EXPR_LOCATION (save_expr));
7783 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
7784 gimple_try_set_catch_is_cleanup (try_,
7785 TRY_CATCH_IS_CLEANUP (*expr_p));
7786 gimplify_seq_add_stmt (pre_p, try_);
7787 ret = GS_ALL_DONE;
7788 break;
7791 case CLEANUP_POINT_EXPR:
7792 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
7793 break;
7795 case TARGET_EXPR:
7796 ret = gimplify_target_expr (expr_p, pre_p, post_p);
7797 break;
7799 case CATCH_EXPR:
7801 gimple c;
7802 gimple_seq handler = NULL;
7803 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
7804 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
7805 gimplify_seq_add_stmt (pre_p, c);
7806 ret = GS_ALL_DONE;
7807 break;
7810 case EH_FILTER_EXPR:
7812 gimple ehf;
7813 gimple_seq failure = NULL;
7815 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
7816 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
7817 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
7818 gimplify_seq_add_stmt (pre_p, ehf);
7819 ret = GS_ALL_DONE;
7820 break;
7823 case OBJ_TYPE_REF:
7825 enum gimplify_status r0, r1;
7826 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
7827 post_p, is_gimple_val, fb_rvalue);
7828 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
7829 post_p, is_gimple_val, fb_rvalue);
7830 TREE_SIDE_EFFECTS (*expr_p) = 0;
7831 ret = MIN (r0, r1);
7833 break;
7835 case LABEL_DECL:
7836 /* We get here when taking the address of a label. We mark
7837 the label as "forced"; meaning it can never be removed and
7838 it is a potential target for any computed goto. */
7839 FORCED_LABEL (*expr_p) = 1;
7840 ret = GS_ALL_DONE;
7841 break;
7843 case STATEMENT_LIST:
7844 ret = gimplify_statement_list (expr_p, pre_p);
7845 break;
7847 case WITH_SIZE_EXPR:
7849 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7850 post_p == &internal_post ? NULL : post_p,
7851 gimple_test_f, fallback);
7852 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7853 is_gimple_val, fb_rvalue);
7854 ret = GS_ALL_DONE;
7856 break;
7858 case VAR_DECL:
7859 case PARM_DECL:
7860 ret = gimplify_var_or_parm_decl (expr_p);
7861 break;
7863 case RESULT_DECL:
7864 /* When within an OpenMP context, notice uses of variables. */
7865 if (gimplify_omp_ctxp)
7866 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
7867 ret = GS_ALL_DONE;
7868 break;
7870 case SSA_NAME:
7871 /* Allow callbacks into the gimplifier during optimization. */
7872 ret = GS_ALL_DONE;
7873 break;
7875 case OMP_PARALLEL:
7876 gimplify_omp_parallel (expr_p, pre_p);
7877 ret = GS_ALL_DONE;
7878 break;
7880 case OMP_TASK:
7881 gimplify_omp_task (expr_p, pre_p);
7882 ret = GS_ALL_DONE;
7883 break;
7885 case OMP_FOR:
7886 case OMP_SIMD:
7887 case CILK_SIMD:
7888 case OMP_DISTRIBUTE:
7889 ret = gimplify_omp_for (expr_p, pre_p);
7890 break;
7892 case OMP_SECTIONS:
7893 case OMP_SINGLE:
7894 case OMP_TARGET:
7895 case OMP_TARGET_DATA:
7896 case OMP_TEAMS:
7897 gimplify_omp_workshare (expr_p, pre_p);
7898 ret = GS_ALL_DONE;
7899 break;
7901 case OMP_TARGET_UPDATE:
7902 gimplify_omp_target_update (expr_p, pre_p);
7903 ret = GS_ALL_DONE;
7904 break;
7906 case OMP_SECTION:
7907 case OMP_MASTER:
7908 case OMP_TASKGROUP:
7909 case OMP_ORDERED:
7910 case OMP_CRITICAL:
7912 gimple_seq body = NULL;
7913 gimple g;
7915 gimplify_and_add (OMP_BODY (*expr_p), &body);
7916 switch (TREE_CODE (*expr_p))
7918 case OMP_SECTION:
7919 g = gimple_build_omp_section (body);
7920 break;
7921 case OMP_MASTER:
7922 g = gimple_build_omp_master (body);
7923 break;
7924 case OMP_TASKGROUP:
7926 gimple_seq cleanup = NULL;
7927 tree fn
7928 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
7929 g = gimple_build_call (fn, 0);
7930 gimple_seq_add_stmt (&cleanup, g);
7931 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
7932 body = NULL;
7933 gimple_seq_add_stmt (&body, g);
7934 g = gimple_build_omp_taskgroup (body);
7936 break;
7937 case OMP_ORDERED:
7938 g = gimple_build_omp_ordered (body);
7939 break;
7940 case OMP_CRITICAL:
7941 g = gimple_build_omp_critical (body,
7942 OMP_CRITICAL_NAME (*expr_p));
7943 break;
7944 default:
7945 gcc_unreachable ();
7947 gimplify_seq_add_stmt (pre_p, g);
7948 ret = GS_ALL_DONE;
7949 break;
7952 case OMP_ATOMIC:
7953 case OMP_ATOMIC_READ:
7954 case OMP_ATOMIC_CAPTURE_OLD:
7955 case OMP_ATOMIC_CAPTURE_NEW:
7956 ret = gimplify_omp_atomic (expr_p, pre_p);
7957 break;
7959 case TRANSACTION_EXPR:
7960 ret = gimplify_transaction (expr_p, pre_p);
7961 break;
7963 case TRUTH_AND_EXPR:
7964 case TRUTH_OR_EXPR:
7965 case TRUTH_XOR_EXPR:
7967 tree orig_type = TREE_TYPE (*expr_p);
7968 tree new_type, xop0, xop1;
7969 *expr_p = gimple_boolify (*expr_p);
7970 new_type = TREE_TYPE (*expr_p);
7971 if (!useless_type_conversion_p (orig_type, new_type))
7973 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
7974 ret = GS_OK;
7975 break;
7978 /* Boolified binary truth expressions are semantically equivalent
7979 to bitwise binary expressions. Canonicalize them to the
7980 bitwise variant. */
7981 switch (TREE_CODE (*expr_p))
7983 case TRUTH_AND_EXPR:
7984 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
7985 break;
7986 case TRUTH_OR_EXPR:
7987 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
7988 break;
7989 case TRUTH_XOR_EXPR:
7990 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
7991 break;
7992 default:
7993 break;
7995 /* Now make sure that operands have compatible type to
7996 expression's new_type. */
7997 xop0 = TREE_OPERAND (*expr_p, 0);
7998 xop1 = TREE_OPERAND (*expr_p, 1);
7999 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
8000 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
8001 new_type,
8002 xop0);
8003 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
8004 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
8005 new_type,
8006 xop1);
8007 /* Continue classified as tcc_binary. */
8008 goto expr_2;
8011 case FMA_EXPR:
8012 case VEC_COND_EXPR:
8013 case VEC_PERM_EXPR:
8014 /* Classified as tcc_expression. */
8015 goto expr_3;
8017 case POINTER_PLUS_EXPR:
8019 enum gimplify_status r0, r1;
8020 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8021 post_p, is_gimple_val, fb_rvalue);
8022 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8023 post_p, is_gimple_val, fb_rvalue);
8024 recalculate_side_effects (*expr_p);
8025 ret = MIN (r0, r1);
8026 /* Convert &X + CST to invariant &MEM[&X, CST]. Do this
8027 after gimplifying operands - this is similar to how
8028 it would be folding all gimplified stmts on creation
8029 to have them canonicalized, which is what we eventually
8030 should do anyway. */
8031 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
8032 && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
8034 *expr_p = build_fold_addr_expr_with_type_loc
8035 (input_location,
8036 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
8037 TREE_OPERAND (*expr_p, 0),
8038 fold_convert (ptr_type_node,
8039 TREE_OPERAND (*expr_p, 1))),
8040 TREE_TYPE (*expr_p));
8041 ret = MIN (ret, GS_OK);
8043 break;
8046 case CILK_SYNC_STMT:
8048 if (!fn_contains_cilk_spawn_p (cfun))
8050 error_at (EXPR_LOCATION (*expr_p),
8051 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
8052 ret = GS_ERROR;
8054 else
8056 gimplify_cilk_sync (expr_p, pre_p);
8057 ret = GS_ALL_DONE;
8059 break;
8062 default:
8063 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
8065 case tcc_comparison:
8066 /* Handle comparison of objects of non scalar mode aggregates
8067 with a call to memcmp. It would be nice to only have to do
8068 this for variable-sized objects, but then we'd have to allow
8069 the same nest of reference nodes we allow for MODIFY_EXPR and
8070 that's too complex.
8072 Compare scalar mode aggregates as scalar mode values. Using
8073 memcmp for them would be very inefficient at best, and is
8074 plain wrong if bitfields are involved. */
8076 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
8078 /* Vector comparisons need no boolification. */
8079 if (TREE_CODE (type) == VECTOR_TYPE)
8080 goto expr_2;
8081 else if (!AGGREGATE_TYPE_P (type))
8083 tree org_type = TREE_TYPE (*expr_p);
8084 *expr_p = gimple_boolify (*expr_p);
8085 if (!useless_type_conversion_p (org_type,
8086 TREE_TYPE (*expr_p)))
8088 *expr_p = fold_convert_loc (input_location,
8089 org_type, *expr_p);
8090 ret = GS_OK;
8092 else
8093 goto expr_2;
8095 else if (TYPE_MODE (type) != BLKmode)
8096 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
8097 else
8098 ret = gimplify_variable_sized_compare (expr_p);
8100 break;
8103 /* If *EXPR_P does not need to be special-cased, handle it
8104 according to its class. */
8105 case tcc_unary:
8106 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8107 post_p, is_gimple_val, fb_rvalue);
8108 break;
8110 case tcc_binary:
8111 expr_2:
8113 enum gimplify_status r0, r1;
8115 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8116 post_p, is_gimple_val, fb_rvalue);
8117 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8118 post_p, is_gimple_val, fb_rvalue);
8120 ret = MIN (r0, r1);
8121 break;
8124 expr_3:
8126 enum gimplify_status r0, r1, r2;
8128 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8129 post_p, is_gimple_val, fb_rvalue);
8130 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8131 post_p, is_gimple_val, fb_rvalue);
8132 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
8133 post_p, is_gimple_val, fb_rvalue);
8135 ret = MIN (MIN (r0, r1), r2);
8136 break;
8139 case tcc_declaration:
8140 case tcc_constant:
8141 ret = GS_ALL_DONE;
8142 goto dont_recalculate;
8144 default:
8145 gcc_unreachable ();
8148 recalculate_side_effects (*expr_p);
8150 dont_recalculate:
8151 break;
8154 gcc_assert (*expr_p || ret != GS_OK);
8156 while (ret == GS_OK);
8158 /* If we encountered an error_mark somewhere nested inside, either
8159 stub out the statement or propagate the error back out. */
8160 if (ret == GS_ERROR)
8162 if (is_statement)
8163 *expr_p = NULL;
8164 goto out;
8167 /* This was only valid as a return value from the langhook, which
8168 we handled. Make sure it doesn't escape from any other context. */
8169 gcc_assert (ret != GS_UNHANDLED);
8171 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
8173 /* We aren't looking for a value, and we don't have a valid
8174 statement. If it doesn't have side-effects, throw it away. */
8175 if (!TREE_SIDE_EFFECTS (*expr_p))
8176 *expr_p = NULL;
8177 else if (!TREE_THIS_VOLATILE (*expr_p))
8179 /* This is probably a _REF that contains something nested that
8180 has side effects. Recurse through the operands to find it. */
8181 enum tree_code code = TREE_CODE (*expr_p);
8183 switch (code)
8185 case COMPONENT_REF:
8186 case REALPART_EXPR:
8187 case IMAGPART_EXPR:
8188 case VIEW_CONVERT_EXPR:
8189 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8190 gimple_test_f, fallback);
8191 break;
8193 case ARRAY_REF:
8194 case ARRAY_RANGE_REF:
8195 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8196 gimple_test_f, fallback);
8197 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8198 gimple_test_f, fallback);
8199 break;
8201 default:
8202 /* Anything else with side-effects must be converted to
8203 a valid statement before we get here. */
8204 gcc_unreachable ();
8207 *expr_p = NULL;
8209 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
8210 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
8212 /* Historically, the compiler has treated a bare reference
8213 to a non-BLKmode volatile lvalue as forcing a load. */
8214 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
8216 /* Normally, we do not want to create a temporary for a
8217 TREE_ADDRESSABLE type because such a type should not be
8218 copied by bitwise-assignment. However, we make an
8219 exception here, as all we are doing here is ensuring that
8220 we read the bytes that make up the type. We use
8221 create_tmp_var_raw because create_tmp_var will abort when
8222 given a TREE_ADDRESSABLE type. */
8223 tree tmp = create_tmp_var_raw (type, "vol");
8224 gimple_add_tmp_var (tmp);
8225 gimplify_assign (tmp, *expr_p, pre_p);
8226 *expr_p = NULL;
8228 else
8229 /* We can't do anything useful with a volatile reference to
8230 an incomplete type, so just throw it away. Likewise for
8231 a BLKmode type, since any implicit inner load should
8232 already have been turned into an explicit one by the
8233 gimplification process. */
8234 *expr_p = NULL;
8237 /* If we are gimplifying at the statement level, we're done. Tack
8238 everything together and return. */
8239 if (fallback == fb_none || is_statement)
8241 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
8242 it out for GC to reclaim it. */
8243 *expr_p = NULL_TREE;
8245 if (!gimple_seq_empty_p (internal_pre)
8246 || !gimple_seq_empty_p (internal_post))
8248 gimplify_seq_add_seq (&internal_pre, internal_post);
8249 gimplify_seq_add_seq (pre_p, internal_pre);
8252 /* The result of gimplifying *EXPR_P is going to be the last few
8253 statements in *PRE_P and *POST_P. Add location information
8254 to all the statements that were added by the gimplification
8255 helpers. */
8256 if (!gimple_seq_empty_p (*pre_p))
8257 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
8259 if (!gimple_seq_empty_p (*post_p))
8260 annotate_all_with_location_after (*post_p, post_last_gsi,
8261 input_location);
8263 goto out;
8266 #ifdef ENABLE_GIMPLE_CHECKING
8267 if (*expr_p)
8269 enum tree_code code = TREE_CODE (*expr_p);
8270 /* These expressions should already be in gimple IR form. */
8271 gcc_assert (code != MODIFY_EXPR
8272 && code != ASM_EXPR
8273 && code != BIND_EXPR
8274 && code != CATCH_EXPR
8275 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
8276 && code != EH_FILTER_EXPR
8277 && code != GOTO_EXPR
8278 && code != LABEL_EXPR
8279 && code != LOOP_EXPR
8280 && code != SWITCH_EXPR
8281 && code != TRY_FINALLY_EXPR
8282 && code != OMP_CRITICAL
8283 && code != OMP_FOR
8284 && code != OMP_MASTER
8285 && code != OMP_TASKGROUP
8286 && code != OMP_ORDERED
8287 && code != OMP_PARALLEL
8288 && code != OMP_SECTIONS
8289 && code != OMP_SECTION
8290 && code != OMP_SINGLE);
8292 #endif
8294 /* Otherwise we're gimplifying a subexpression, so the resulting
8295 value is interesting. If it's a valid operand that matches
8296 GIMPLE_TEST_F, we're done. Unless we are handling some
8297 post-effects internally; if that's the case, we need to copy into
8298 a temporary before adding the post-effects to POST_P. */
8299 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
8300 goto out;
8302 /* Otherwise, we need to create a new temporary for the gimplified
8303 expression. */
8305 /* We can't return an lvalue if we have an internal postqueue. The
8306 object the lvalue refers to would (probably) be modified by the
8307 postqueue; we need to copy the value out first, which means an
8308 rvalue. */
8309 if ((fallback & fb_lvalue)
8310 && gimple_seq_empty_p (internal_post)
8311 && is_gimple_addressable (*expr_p))
8313 /* An lvalue will do. Take the address of the expression, store it
8314 in a temporary, and replace the expression with an INDIRECT_REF of
8315 that temporary. */
8316 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
8317 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
8318 *expr_p = build_simple_mem_ref (tmp);
8320 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
8322 /* An rvalue will do. Assign the gimplified expression into a
8323 new temporary TMP and replace the original expression with
8324 TMP. First, make sure that the expression has a type so that
8325 it can be assigned into a temporary. */
8326 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
8327 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
8329 else
8331 #ifdef ENABLE_GIMPLE_CHECKING
8332 if (!(fallback & fb_mayfail))
8334 fprintf (stderr, "gimplification failed:\n");
8335 print_generic_expr (stderr, *expr_p, 0);
8336 debug_tree (*expr_p);
8337 internal_error ("gimplification failed");
8339 #endif
8340 gcc_assert (fallback & fb_mayfail);
8342 /* If this is an asm statement, and the user asked for the
8343 impossible, don't die. Fail and let gimplify_asm_expr
8344 issue an error. */
8345 ret = GS_ERROR;
8346 goto out;
8349 /* Make sure the temporary matches our predicate. */
8350 gcc_assert ((*gimple_test_f) (*expr_p));
8352 if (!gimple_seq_empty_p (internal_post))
8354 annotate_all_with_location (internal_post, input_location);
8355 gimplify_seq_add_seq (pre_p, internal_post);
8358 out:
8359 input_location = saved_location;
8360 return ret;
8363 /* Look through TYPE for variable-sized objects and gimplify each such
8364 size that we find. Add to LIST_P any statements generated. */
8366 void
8367 gimplify_type_sizes (tree type, gimple_seq *list_p)
8369 tree field, t;
8371 if (type == NULL || type == error_mark_node)
8372 return;
8374 /* We first do the main variant, then copy into any other variants. */
8375 type = TYPE_MAIN_VARIANT (type);
8377 /* Avoid infinite recursion. */
8378 if (TYPE_SIZES_GIMPLIFIED (type))
8379 return;
8381 TYPE_SIZES_GIMPLIFIED (type) = 1;
8383 switch (TREE_CODE (type))
8385 case INTEGER_TYPE:
8386 case ENUMERAL_TYPE:
8387 case BOOLEAN_TYPE:
8388 case REAL_TYPE:
8389 case FIXED_POINT_TYPE:
8390 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
8391 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
8393 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8395 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
8396 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
8398 break;
8400 case ARRAY_TYPE:
8401 /* These types may not have declarations, so handle them here. */
8402 gimplify_type_sizes (TREE_TYPE (type), list_p);
8403 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
8404 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
8405 with assigned stack slots, for -O1+ -g they should be tracked
8406 by VTA. */
8407 if (!(TYPE_NAME (type)
8408 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
8409 && DECL_IGNORED_P (TYPE_NAME (type)))
8410 && TYPE_DOMAIN (type)
8411 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
8413 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8414 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8415 DECL_IGNORED_P (t) = 0;
8416 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8417 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8418 DECL_IGNORED_P (t) = 0;
8420 break;
8422 case RECORD_TYPE:
8423 case UNION_TYPE:
8424 case QUAL_UNION_TYPE:
8425 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8426 if (TREE_CODE (field) == FIELD_DECL)
8428 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
8429 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
8430 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8431 gimplify_type_sizes (TREE_TYPE (field), list_p);
8433 break;
8435 case POINTER_TYPE:
8436 case REFERENCE_TYPE:
8437 /* We used to recurse on the pointed-to type here, which turned out to
8438 be incorrect because its definition might refer to variables not
8439 yet initialized at this point if a forward declaration is involved.
8441 It was actually useful for anonymous pointed-to types to ensure
8442 that the sizes evaluation dominates every possible later use of the
8443 values. Restricting to such types here would be safe since there
8444 is no possible forward declaration around, but would introduce an
8445 undesirable middle-end semantic to anonymity. We then defer to
8446 front-ends the responsibility of ensuring that the sizes are
8447 evaluated both early and late enough, e.g. by attaching artificial
8448 type declarations to the tree. */
8449 break;
8451 default:
8452 break;
8455 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
8456 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
8458 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8460 TYPE_SIZE (t) = TYPE_SIZE (type);
8461 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
8462 TYPE_SIZES_GIMPLIFIED (t) = 1;
8466 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
8467 a size or position, has had all of its SAVE_EXPRs evaluated.
8468 We add any required statements to *STMT_P. */
8470 void
8471 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
8473 tree expr = *expr_p;
8475 /* We don't do anything if the value isn't there, is constant, or contains
8476 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
8477 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
8478 will want to replace it with a new variable, but that will cause problems
8479 if this type is from outside the function. It's OK to have that here. */
8480 if (is_gimple_sizepos (expr))
8481 return;
8483 *expr_p = unshare_expr (expr);
8485 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
8488 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
8489 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
8490 is true, also gimplify the parameters. */
8492 gimple
8493 gimplify_body (tree fndecl, bool do_parms)
8495 location_t saved_location = input_location;
8496 gimple_seq parm_stmts, seq;
8497 gimple outer_bind;
8498 struct cgraph_node *cgn;
8500 timevar_push (TV_TREE_GIMPLIFY);
8502 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
8503 gimplification. */
8504 default_rtl_profile ();
8506 gcc_assert (gimplify_ctxp == NULL);
8507 push_gimplify_context ();
8509 if (flag_openmp)
8511 gcc_assert (gimplify_omp_ctxp == NULL);
8512 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
8513 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
8516 /* Unshare most shared trees in the body and in that of any nested functions.
8517 It would seem we don't have to do this for nested functions because
8518 they are supposed to be output and then the outer function gimplified
8519 first, but the g++ front end doesn't always do it that way. */
8520 unshare_body (fndecl);
8521 unvisit_body (fndecl);
8523 cgn = cgraph_get_node (fndecl);
8524 if (cgn && cgn->origin)
8525 nonlocal_vlas = pointer_set_create ();
8527 /* Make sure input_location isn't set to something weird. */
8528 input_location = DECL_SOURCE_LOCATION (fndecl);
8530 /* Resolve callee-copies. This has to be done before processing
8531 the body so that DECL_VALUE_EXPR gets processed correctly. */
8532 parm_stmts = do_parms ? gimplify_parameters () : NULL;
8534 /* Gimplify the function's body. */
8535 seq = NULL;
8536 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
8537 outer_bind = gimple_seq_first_stmt (seq);
8538 if (!outer_bind)
8540 outer_bind = gimple_build_nop ();
8541 gimplify_seq_add_stmt (&seq, outer_bind);
8544 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
8545 not the case, wrap everything in a GIMPLE_BIND to make it so. */
8546 if (gimple_code (outer_bind) == GIMPLE_BIND
8547 && gimple_seq_first (seq) == gimple_seq_last (seq))
8549 else
8550 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8552 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8554 /* If we had callee-copies statements, insert them at the beginning
8555 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
8556 if (!gimple_seq_empty_p (parm_stmts))
8558 tree parm;
8560 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8561 gimple_bind_set_body (outer_bind, parm_stmts);
8563 for (parm = DECL_ARGUMENTS (current_function_decl);
8564 parm; parm = DECL_CHAIN (parm))
8565 if (DECL_HAS_VALUE_EXPR_P (parm))
8567 DECL_HAS_VALUE_EXPR_P (parm) = 0;
8568 DECL_IGNORED_P (parm) = 0;
8572 if (nonlocal_vlas)
8574 pointer_set_destroy (nonlocal_vlas);
8575 nonlocal_vlas = NULL;
8578 if ((flag_openmp || flag_openmp_simd) && gimplify_omp_ctxp)
8580 delete_omp_context (gimplify_omp_ctxp);
8581 gimplify_omp_ctxp = NULL;
8584 pop_gimplify_context (outer_bind);
8585 gcc_assert (gimplify_ctxp == NULL);
8587 #ifdef ENABLE_CHECKING
8588 if (!seen_error ())
8589 verify_gimple_in_seq (gimple_bind_body (outer_bind));
8590 #endif
8592 timevar_pop (TV_TREE_GIMPLIFY);
8593 input_location = saved_location;
8595 return outer_bind;
8598 typedef char *char_p; /* For DEF_VEC_P. */
8600 /* Return whether we should exclude FNDECL from instrumentation. */
8602 static bool
8603 flag_instrument_functions_exclude_p (tree fndecl)
8605 vec<char_p> *v;
8607 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
8608 if (v && v->length () > 0)
8610 const char *name;
8611 int i;
8612 char *s;
8614 name = lang_hooks.decl_printable_name (fndecl, 0);
8615 FOR_EACH_VEC_ELT (*v, i, s)
8616 if (strstr (name, s) != NULL)
8617 return true;
8620 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
8621 if (v && v->length () > 0)
8623 const char *name;
8624 int i;
8625 char *s;
8627 name = DECL_SOURCE_FILE (fndecl);
8628 FOR_EACH_VEC_ELT (*v, i, s)
8629 if (strstr (name, s) != NULL)
8630 return true;
8633 return false;
8636 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
8637 node for the function we want to gimplify.
8639 Return the sequence of GIMPLE statements corresponding to the body
8640 of FNDECL. */
8642 void
8643 gimplify_function_tree (tree fndecl)
8645 tree parm, ret;
8646 gimple_seq seq;
8647 gimple bind;
8649 gcc_assert (!gimple_body (fndecl));
8651 if (DECL_STRUCT_FUNCTION (fndecl))
8652 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8653 else
8654 push_struct_function (fndecl);
8656 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
8658 /* Preliminarily mark non-addressed complex variables as eligible
8659 for promotion to gimple registers. We'll transform their uses
8660 as we find them. */
8661 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8662 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
8663 && !TREE_THIS_VOLATILE (parm)
8664 && !needs_to_live_in_memory (parm))
8665 DECL_GIMPLE_REG_P (parm) = 1;
8668 ret = DECL_RESULT (fndecl);
8669 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
8670 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
8671 && !needs_to_live_in_memory (ret))
8672 DECL_GIMPLE_REG_P (ret) = 1;
8674 bind = gimplify_body (fndecl, true);
8676 /* The tree body of the function is no longer needed, replace it
8677 with the new GIMPLE body. */
8678 seq = NULL;
8679 gimple_seq_add_stmt (&seq, bind);
8680 gimple_set_body (fndecl, seq);
8682 /* If we're instrumenting function entry/exit, then prepend the call to
8683 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8684 catch the exit hook. */
8685 /* ??? Add some way to ignore exceptions for this TFE. */
8686 if (flag_instrument_function_entry_exit
8687 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8688 && !flag_instrument_functions_exclude_p (fndecl))
8690 tree x;
8691 gimple new_bind;
8692 gimple tf;
8693 gimple_seq cleanup = NULL, body = NULL;
8694 tree tmp_var;
8695 gimple call;
8697 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8698 call = gimple_build_call (x, 1, integer_zero_node);
8699 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8700 gimple_call_set_lhs (call, tmp_var);
8701 gimplify_seq_add_stmt (&cleanup, call);
8702 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
8703 call = gimple_build_call (x, 2,
8704 build_fold_addr_expr (current_function_decl),
8705 tmp_var);
8706 gimplify_seq_add_stmt (&cleanup, call);
8707 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
8709 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8710 call = gimple_build_call (x, 1, integer_zero_node);
8711 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8712 gimple_call_set_lhs (call, tmp_var);
8713 gimplify_seq_add_stmt (&body, call);
8714 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
8715 call = gimple_build_call (x, 2,
8716 build_fold_addr_expr (current_function_decl),
8717 tmp_var);
8718 gimplify_seq_add_stmt (&body, call);
8719 gimplify_seq_add_stmt (&body, tf);
8720 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
8721 /* Clear the block for BIND, since it is no longer directly inside
8722 the function, but within a try block. */
8723 gimple_bind_set_block (bind, NULL);
8725 /* Replace the current function body with the body
8726 wrapped in the try/finally TF. */
8727 seq = NULL;
8728 gimple_seq_add_stmt (&seq, new_bind);
8729 gimple_set_body (fndecl, seq);
8732 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8733 cfun->curr_properties = PROP_gimple_any;
8735 pop_cfun ();
8738 /* Return a dummy expression of type TYPE in order to keep going after an
8739 error. */
8741 static tree
8742 dummy_object (tree type)
8744 tree t = build_int_cst (build_pointer_type (type), 0);
8745 return build2 (MEM_REF, type, t, t);
8748 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
8749 builtin function, but a very special sort of operator. */
8751 enum gimplify_status
8752 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
8754 tree promoted_type, have_va_type;
8755 tree valist = TREE_OPERAND (*expr_p, 0);
8756 tree type = TREE_TYPE (*expr_p);
8757 tree t;
8758 location_t loc = EXPR_LOCATION (*expr_p);
8760 /* Verify that valist is of the proper type. */
8761 have_va_type = TREE_TYPE (valist);
8762 if (have_va_type == error_mark_node)
8763 return GS_ERROR;
8764 have_va_type = targetm.canonical_va_list_type (have_va_type);
8766 if (have_va_type == NULL_TREE)
8768 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
8769 return GS_ERROR;
8772 /* Generate a diagnostic for requesting data of a type that cannot
8773 be passed through `...' due to type promotion at the call site. */
8774 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
8775 != type)
8777 static bool gave_help;
8778 bool warned;
8780 /* Unfortunately, this is merely undefined, rather than a constraint
8781 violation, so we cannot make this an error. If this call is never
8782 executed, the program is still strictly conforming. */
8783 warned = warning_at (loc, 0,
8784 "%qT is promoted to %qT when passed through %<...%>",
8785 type, promoted_type);
8786 if (!gave_help && warned)
8788 gave_help = true;
8789 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
8790 promoted_type, type);
8793 /* We can, however, treat "undefined" any way we please.
8794 Call abort to encourage the user to fix the program. */
8795 if (warned)
8796 inform (loc, "if this code is reached, the program will abort");
8797 /* Before the abort, allow the evaluation of the va_list
8798 expression to exit or longjmp. */
8799 gimplify_and_add (valist, pre_p);
8800 t = build_call_expr_loc (loc,
8801 builtin_decl_implicit (BUILT_IN_TRAP), 0);
8802 gimplify_and_add (t, pre_p);
8804 /* This is dead code, but go ahead and finish so that the
8805 mode of the result comes out right. */
8806 *expr_p = dummy_object (type);
8807 return GS_ALL_DONE;
8809 else
8811 /* Make it easier for the backends by protecting the valist argument
8812 from multiple evaluations. */
8813 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
8815 /* For this case, the backends will be expecting a pointer to
8816 TREE_TYPE (abi), but it's possible we've
8817 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
8818 So fix it. */
8819 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
8821 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
8822 valist = fold_convert_loc (loc, p1,
8823 build_fold_addr_expr_loc (loc, valist));
8826 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
8828 else
8829 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
8831 if (!targetm.gimplify_va_arg_expr)
8832 /* FIXME: Once most targets are converted we should merely
8833 assert this is non-null. */
8834 return GS_ALL_DONE;
8836 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
8837 return GS_OK;
8841 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
8843 DST/SRC are the destination and source respectively. You can pass
8844 ungimplified trees in DST or SRC, in which case they will be
8845 converted to a gimple operand if necessary.
8847 This function returns the newly created GIMPLE_ASSIGN tuple. */
8849 gimple
8850 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
8852 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8853 gimplify_and_add (t, seq_p);
8854 ggc_free (t);
8855 return gimple_seq_last_stmt (*seq_p);
8858 inline hashval_t
8859 gimplify_hasher::hash (const value_type *p)
8861 tree t = p->val;
8862 return iterative_hash_expr (t, 0);
8865 inline bool
8866 gimplify_hasher::equal (const value_type *p1, const compare_type *p2)
8868 tree t1 = p1->val;
8869 tree t2 = p2->val;
8870 enum tree_code code = TREE_CODE (t1);
8872 if (TREE_CODE (t2) != code
8873 || TREE_TYPE (t1) != TREE_TYPE (t2))
8874 return false;
8876 if (!operand_equal_p (t1, t2, 0))
8877 return false;
8879 #ifdef ENABLE_CHECKING
8880 /* Only allow them to compare equal if they also hash equal; otherwise
8881 results are nondeterminate, and we fail bootstrap comparison. */
8882 gcc_assert (hash (p1) == hash (p2));
8883 #endif
8885 return true;