Merge from trunk:
[official-gcc.git] / main / gcc / gimplify.c
blob8b95717650642f0979cc16df7479601f5e735b0b
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2014 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tree.h"
27 #include "expr.h"
28 #include "hash-set.h"
29 #include "pointer-set.h"
30 #include "hash-table.h"
31 #include "basic-block.h"
32 #include "tree-ssa-alias.h"
33 #include "internal-fn.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "gimple-expr.h"
37 #include "is-a.h"
38 #include "gimple.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "stringpool.h"
42 #include "calls.h"
43 #include "varasm.h"
44 #include "stor-layout.h"
45 #include "stmt.h"
46 #include "print-tree.h"
47 #include "tree-iterator.h"
48 #include "tree-inline.h"
49 #include "tree-pretty-print.h"
50 #include "langhooks.h"
51 #include "bitmap.h"
52 #include "gimple-ssa.h"
53 #include "cgraph.h"
54 #include "tree-cfg.h"
55 #include "tree-ssanames.h"
56 #include "tree-ssa.h"
57 #include "diagnostic-core.h"
58 #include "target.h"
59 #include "splay-tree.h"
60 #include "omp-low.h"
61 #include "gimple-low.h"
62 #include "cilk.h"
64 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
65 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
66 #include "builtins.h"
68 enum gimplify_omp_var_data
70 GOVD_SEEN = 1,
71 GOVD_EXPLICIT = 2,
72 GOVD_SHARED = 4,
73 GOVD_PRIVATE = 8,
74 GOVD_FIRSTPRIVATE = 16,
75 GOVD_LASTPRIVATE = 32,
76 GOVD_REDUCTION = 64,
77 GOVD_LOCAL = 128,
78 GOVD_MAP = 256,
79 GOVD_DEBUG_PRIVATE = 512,
80 GOVD_PRIVATE_OUTER_REF = 1024,
81 GOVD_LINEAR = 2048,
82 GOVD_ALIGNED = 4096,
83 GOVD_MAP_TO_ONLY = 8192,
84 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
85 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
86 | GOVD_LOCAL)
90 enum omp_region_type
92 ORT_WORKSHARE = 0,
93 ORT_SIMD = 1,
94 ORT_PARALLEL = 2,
95 ORT_COMBINED_PARALLEL = 3,
96 ORT_TASK = 4,
97 ORT_UNTIED_TASK = 5,
98 ORT_TEAMS = 8,
99 ORT_TARGET_DATA = 16,
100 ORT_TARGET = 32
103 /* Gimplify hashtable helper. */
105 struct gimplify_hasher : typed_free_remove <elt_t>
107 typedef elt_t value_type;
108 typedef elt_t compare_type;
109 static inline hashval_t hash (const value_type *);
110 static inline bool equal (const value_type *, const compare_type *);
113 struct gimplify_ctx
115 struct gimplify_ctx *prev_context;
117 vec<gimple> bind_expr_stack;
118 tree temps;
119 gimple_seq conditional_cleanups;
120 tree exit_label;
121 tree return_temp;
123 vec<tree> case_labels;
124 /* The formal temporary table. Should this be persistent? */
125 hash_table<gimplify_hasher> *temp_htab;
127 int conditions;
128 bool save_stack;
129 bool into_ssa;
130 bool allow_rhs_cond_expr;
131 bool in_cleanup_point_expr;
134 struct gimplify_omp_ctx
136 struct gimplify_omp_ctx *outer_context;
137 splay_tree variables;
138 hash_set<tree> *privatized_types;
139 location_t location;
140 enum omp_clause_default_kind default_kind;
141 enum omp_region_type region_type;
142 bool combined_loop;
143 bool distribute;
146 static struct gimplify_ctx *gimplify_ctxp;
147 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
149 /* Forward declaration. */
150 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
152 /* Shorter alias name for the above function for use in gimplify.c
153 only. */
155 static inline void
156 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
158 gimple_seq_add_stmt_without_update (seq_p, gs);
161 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
162 NULL, a new sequence is allocated. This function is
163 similar to gimple_seq_add_seq, but does not scan the operands.
164 During gimplification, we need to manipulate statement sequences
165 before the def/use vectors have been constructed. */
167 static void
168 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
170 gimple_stmt_iterator si;
172 if (src == NULL)
173 return;
175 si = gsi_last (*dst_p);
176 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
180 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
181 and popping gimplify contexts. */
183 static struct gimplify_ctx *ctx_pool = NULL;
185 /* Return a gimplify context struct from the pool. */
187 static inline struct gimplify_ctx *
188 ctx_alloc (void)
190 struct gimplify_ctx * c = ctx_pool;
192 if (c)
193 ctx_pool = c->prev_context;
194 else
195 c = XNEW (struct gimplify_ctx);
197 memset (c, '\0', sizeof (*c));
198 return c;
201 /* Put gimplify context C back into the pool. */
203 static inline void
204 ctx_free (struct gimplify_ctx *c)
206 c->prev_context = ctx_pool;
207 ctx_pool = c;
210 /* Free allocated ctx stack memory. */
212 void
213 free_gimplify_stack (void)
215 struct gimplify_ctx *c;
217 while ((c = ctx_pool))
219 ctx_pool = c->prev_context;
220 free (c);
225 /* Set up a context for the gimplifier. */
227 void
228 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
230 struct gimplify_ctx *c = ctx_alloc ();
232 c->prev_context = gimplify_ctxp;
233 gimplify_ctxp = c;
234 gimplify_ctxp->into_ssa = in_ssa;
235 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
238 /* Tear down a context for the gimplifier. If BODY is non-null, then
239 put the temporaries into the outer BIND_EXPR. Otherwise, put them
240 in the local_decls.
242 BODY is not a sequence, but the first tuple in a sequence. */
244 void
245 pop_gimplify_context (gimple body)
247 struct gimplify_ctx *c = gimplify_ctxp;
249 gcc_assert (c
250 && (!c->bind_expr_stack.exists ()
251 || c->bind_expr_stack.is_empty ()));
252 c->bind_expr_stack.release ();
253 gimplify_ctxp = c->prev_context;
255 if (body)
256 declare_vars (c->temps, body, false);
257 else
258 record_vars (c->temps);
260 delete c->temp_htab;
261 c->temp_htab = NULL;
262 ctx_free (c);
265 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
267 static void
268 gimple_push_bind_expr (gimple gimple_bind)
270 gimplify_ctxp->bind_expr_stack.reserve (8);
271 gimplify_ctxp->bind_expr_stack.safe_push (gimple_bind);
274 /* Pop the first element off the stack of bindings. */
276 static void
277 gimple_pop_bind_expr (void)
279 gimplify_ctxp->bind_expr_stack.pop ();
282 /* Return the first element of the stack of bindings. */
284 gimple
285 gimple_current_bind_expr (void)
287 return gimplify_ctxp->bind_expr_stack.last ();
290 /* Return the stack of bindings created during gimplification. */
292 vec<gimple>
293 gimple_bind_expr_stack (void)
295 return gimplify_ctxp->bind_expr_stack;
298 /* Return true iff there is a COND_EXPR between us and the innermost
299 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
301 static bool
302 gimple_conditional_context (void)
304 return gimplify_ctxp->conditions > 0;
307 /* Note that we've entered a COND_EXPR. */
309 static void
310 gimple_push_condition (void)
312 #ifdef ENABLE_GIMPLE_CHECKING
313 if (gimplify_ctxp->conditions == 0)
314 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
315 #endif
316 ++(gimplify_ctxp->conditions);
319 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
320 now, add any conditional cleanups we've seen to the prequeue. */
322 static void
323 gimple_pop_condition (gimple_seq *pre_p)
325 int conds = --(gimplify_ctxp->conditions);
327 gcc_assert (conds >= 0);
328 if (conds == 0)
330 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
331 gimplify_ctxp->conditional_cleanups = NULL;
335 /* A stable comparison routine for use with splay trees and DECLs. */
337 static int
338 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
340 tree a = (tree) xa;
341 tree b = (tree) xb;
343 return DECL_UID (a) - DECL_UID (b);
346 /* Create a new omp construct that deals with variable remapping. */
348 static struct gimplify_omp_ctx *
349 new_omp_context (enum omp_region_type region_type)
351 struct gimplify_omp_ctx *c;
353 c = XCNEW (struct gimplify_omp_ctx);
354 c->outer_context = gimplify_omp_ctxp;
355 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
356 c->privatized_types = new hash_set<tree>;
357 c->location = input_location;
358 c->region_type = region_type;
359 if ((region_type & ORT_TASK) == 0)
360 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
361 else
362 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
364 return c;
367 /* Destroy an omp construct that deals with variable remapping. */
369 static void
370 delete_omp_context (struct gimplify_omp_ctx *c)
372 splay_tree_delete (c->variables);
373 delete c->privatized_types;
374 XDELETE (c);
377 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
378 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
380 /* Both gimplify the statement T and append it to *SEQ_P. This function
381 behaves exactly as gimplify_stmt, but you don't have to pass T as a
382 reference. */
384 void
385 gimplify_and_add (tree t, gimple_seq *seq_p)
387 gimplify_stmt (&t, seq_p);
390 /* Gimplify statement T into sequence *SEQ_P, and return the first
391 tuple in the sequence of generated tuples for this statement.
392 Return NULL if gimplifying T produced no tuples. */
394 static gimple
395 gimplify_and_return_first (tree t, gimple_seq *seq_p)
397 gimple_stmt_iterator last = gsi_last (*seq_p);
399 gimplify_and_add (t, seq_p);
401 if (!gsi_end_p (last))
403 gsi_next (&last);
404 return gsi_stmt (last);
406 else
407 return gimple_seq_first_stmt (*seq_p);
410 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
411 LHS, or for a call argument. */
413 static bool
414 is_gimple_mem_rhs (tree t)
416 /* If we're dealing with a renamable type, either source or dest must be
417 a renamed variable. */
418 if (is_gimple_reg_type (TREE_TYPE (t)))
419 return is_gimple_val (t);
420 else
421 return is_gimple_val (t) || is_gimple_lvalue (t);
424 /* Return true if T is a CALL_EXPR or an expression that can be
425 assigned to a temporary. Note that this predicate should only be
426 used during gimplification. See the rationale for this in
427 gimplify_modify_expr. */
429 static bool
430 is_gimple_reg_rhs_or_call (tree t)
432 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
433 || TREE_CODE (t) == CALL_EXPR);
436 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
437 this predicate should only be used during gimplification. See the
438 rationale for this in gimplify_modify_expr. */
440 static bool
441 is_gimple_mem_rhs_or_call (tree t)
443 /* If we're dealing with a renamable type, either source or dest must be
444 a renamed variable. */
445 if (is_gimple_reg_type (TREE_TYPE (t)))
446 return is_gimple_val (t);
447 else
448 return (is_gimple_val (t) || is_gimple_lvalue (t)
449 || TREE_CODE (t) == CALL_EXPR);
452 /* Create a temporary with a name derived from VAL. Subroutine of
453 lookup_tmp_var; nobody else should call this function. */
455 static inline tree
456 create_tmp_from_val (tree val)
458 /* Drop all qualifiers and address-space information from the value type. */
459 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
460 tree var = create_tmp_var (type, get_name (val));
461 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
462 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
463 DECL_GIMPLE_REG_P (var) = 1;
464 return var;
467 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
468 an existing expression temporary. */
470 static tree
471 lookup_tmp_var (tree val, bool is_formal)
473 tree ret;
475 /* If not optimizing, never really reuse a temporary. local-alloc
476 won't allocate any variable that is used in more than one basic
477 block, which means it will go into memory, causing much extra
478 work in reload and final and poorer code generation, outweighing
479 the extra memory allocation here. */
480 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
481 ret = create_tmp_from_val (val);
482 else
484 elt_t elt, *elt_p;
485 elt_t **slot;
487 elt.val = val;
488 if (!gimplify_ctxp->temp_htab)
489 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
490 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
491 if (*slot == NULL)
493 elt_p = XNEW (elt_t);
494 elt_p->val = val;
495 elt_p->temp = ret = create_tmp_from_val (val);
496 *slot = elt_p;
498 else
500 elt_p = *slot;
501 ret = elt_p->temp;
505 return ret;
508 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
510 static tree
511 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
512 bool is_formal)
514 tree t, mod;
516 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
517 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
518 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
519 fb_rvalue);
521 if (gimplify_ctxp->into_ssa
522 && is_gimple_reg_type (TREE_TYPE (val)))
523 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)), NULL);
524 else
525 t = lookup_tmp_var (val, is_formal);
527 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
529 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
531 /* gimplify_modify_expr might want to reduce this further. */
532 gimplify_and_add (mod, pre_p);
533 ggc_free (mod);
535 return t;
538 /* Return a formal temporary variable initialized with VAL. PRE_P is as
539 in gimplify_expr. Only use this function if:
541 1) The value of the unfactored expression represented by VAL will not
542 change between the initialization and use of the temporary, and
543 2) The temporary will not be otherwise modified.
545 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
546 and #2 means it is inappropriate for && temps.
548 For other cases, use get_initialized_tmp_var instead. */
550 tree
551 get_formal_tmp_var (tree val, gimple_seq *pre_p)
553 return internal_get_tmp_var (val, pre_p, NULL, true);
556 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
557 are as in gimplify_expr. */
559 tree
560 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
562 return internal_get_tmp_var (val, pre_p, post_p, false);
565 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
566 generate debug info for them; otherwise don't. */
568 void
569 declare_vars (tree vars, gimple scope, bool debug_info)
571 tree last = vars;
572 if (last)
574 tree temps, block;
576 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
578 temps = nreverse (last);
580 block = gimple_bind_block (scope);
581 gcc_assert (!block || TREE_CODE (block) == BLOCK);
582 if (!block || !debug_info)
584 DECL_CHAIN (last) = gimple_bind_vars (scope);
585 gimple_bind_set_vars (scope, temps);
587 else
589 /* We need to attach the nodes both to the BIND_EXPR and to its
590 associated BLOCK for debugging purposes. The key point here
591 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
592 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
593 if (BLOCK_VARS (block))
594 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
595 else
597 gimple_bind_set_vars (scope,
598 chainon (gimple_bind_vars (scope), temps));
599 BLOCK_VARS (block) = temps;
605 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
606 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
607 no such upper bound can be obtained. */
609 static void
610 force_constant_size (tree var)
612 /* The only attempt we make is by querying the maximum size of objects
613 of the variable's type. */
615 HOST_WIDE_INT max_size;
617 gcc_assert (TREE_CODE (var) == VAR_DECL);
619 max_size = max_int_size_in_bytes (TREE_TYPE (var));
621 gcc_assert (max_size >= 0);
623 DECL_SIZE_UNIT (var)
624 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
625 DECL_SIZE (var)
626 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
629 /* Push the temporary variable TMP into the current binding. */
631 void
632 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
634 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
636 /* Later processing assumes that the object size is constant, which might
637 not be true at this point. Force the use of a constant upper bound in
638 this case. */
639 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
640 force_constant_size (tmp);
642 DECL_CONTEXT (tmp) = fn->decl;
643 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
645 record_vars_into (tmp, fn->decl);
648 /* Push the temporary variable TMP into the current binding. */
650 void
651 gimple_add_tmp_var (tree tmp)
653 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
655 /* Later processing assumes that the object size is constant, which might
656 not be true at this point. Force the use of a constant upper bound in
657 this case. */
658 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
659 force_constant_size (tmp);
661 DECL_CONTEXT (tmp) = current_function_decl;
662 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
664 if (gimplify_ctxp)
666 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
667 gimplify_ctxp->temps = tmp;
669 /* Mark temporaries local within the nearest enclosing parallel. */
670 if (gimplify_omp_ctxp)
672 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
673 while (ctx
674 && (ctx->region_type == ORT_WORKSHARE
675 || ctx->region_type == ORT_SIMD))
676 ctx = ctx->outer_context;
677 if (ctx)
678 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
681 else if (cfun)
682 record_vars (tmp);
683 else
685 gimple_seq body_seq;
687 /* This case is for nested functions. We need to expose the locals
688 they create. */
689 body_seq = gimple_body (current_function_decl);
690 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
696 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
697 nodes that are referenced more than once in GENERIC functions. This is
698 necessary because gimplification (translation into GIMPLE) is performed
699 by modifying tree nodes in-place, so gimplication of a shared node in a
700 first context could generate an invalid GIMPLE form in a second context.
702 This is achieved with a simple mark/copy/unmark algorithm that walks the
703 GENERIC representation top-down, marks nodes with TREE_VISITED the first
704 time it encounters them, duplicates them if they already have TREE_VISITED
705 set, and finally removes the TREE_VISITED marks it has set.
707 The algorithm works only at the function level, i.e. it generates a GENERIC
708 representation of a function with no nodes shared within the function when
709 passed a GENERIC function (except for nodes that are allowed to be shared).
711 At the global level, it is also necessary to unshare tree nodes that are
712 referenced in more than one function, for the same aforementioned reason.
713 This requires some cooperation from the front-end. There are 2 strategies:
715 1. Manual unsharing. The front-end needs to call unshare_expr on every
716 expression that might end up being shared across functions.
718 2. Deep unsharing. This is an extension of regular unsharing. Instead
719 of calling unshare_expr on expressions that might be shared across
720 functions, the front-end pre-marks them with TREE_VISITED. This will
721 ensure that they are unshared on the first reference within functions
722 when the regular unsharing algorithm runs. The counterpart is that
723 this algorithm must look deeper than for manual unsharing, which is
724 specified by LANG_HOOKS_DEEP_UNSHARING.
726 If there are only few specific cases of node sharing across functions, it is
727 probably easier for a front-end to unshare the expressions manually. On the
728 contrary, if the expressions generated at the global level are as widespread
729 as expressions generated within functions, deep unsharing is very likely the
730 way to go. */
732 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
733 These nodes model computations that must be done once. If we were to
734 unshare something like SAVE_EXPR(i++), the gimplification process would
735 create wrong code. However, if DATA is non-null, it must hold a pointer
736 set that is used to unshare the subtrees of these nodes. */
738 static tree
739 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
741 tree t = *tp;
742 enum tree_code code = TREE_CODE (t);
744 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
745 copy their subtrees if we can make sure to do it only once. */
746 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
748 if (data && !((hash_set<tree> *)data)->add (t))
750 else
751 *walk_subtrees = 0;
754 /* Stop at types, decls, constants like copy_tree_r. */
755 else if (TREE_CODE_CLASS (code) == tcc_type
756 || TREE_CODE_CLASS (code) == tcc_declaration
757 || TREE_CODE_CLASS (code) == tcc_constant
758 /* We can't do anything sensible with a BLOCK used as an
759 expression, but we also can't just die when we see it
760 because of non-expression uses. So we avert our eyes
761 and cross our fingers. Silly Java. */
762 || code == BLOCK)
763 *walk_subtrees = 0;
765 /* Cope with the statement expression extension. */
766 else if (code == STATEMENT_LIST)
769 /* Leave the bulk of the work to copy_tree_r itself. */
770 else
771 copy_tree_r (tp, walk_subtrees, NULL);
773 return NULL_TREE;
776 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
777 If *TP has been visited already, then *TP is deeply copied by calling
778 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
780 static tree
781 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
783 tree t = *tp;
784 enum tree_code code = TREE_CODE (t);
786 /* Skip types, decls, and constants. But we do want to look at their
787 types and the bounds of types. Mark them as visited so we properly
788 unmark their subtrees on the unmark pass. If we've already seen them,
789 don't look down further. */
790 if (TREE_CODE_CLASS (code) == tcc_type
791 || TREE_CODE_CLASS (code) == tcc_declaration
792 || TREE_CODE_CLASS (code) == tcc_constant)
794 if (TREE_VISITED (t))
795 *walk_subtrees = 0;
796 else
797 TREE_VISITED (t) = 1;
800 /* If this node has been visited already, unshare it and don't look
801 any deeper. */
802 else if (TREE_VISITED (t))
804 walk_tree (tp, mostly_copy_tree_r, data, NULL);
805 *walk_subtrees = 0;
808 /* Otherwise, mark the node as visited and keep looking. */
809 else
810 TREE_VISITED (t) = 1;
812 return NULL_TREE;
815 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
816 copy_if_shared_r callback unmodified. */
818 static inline void
819 copy_if_shared (tree *tp, void *data)
821 walk_tree (tp, copy_if_shared_r, data, NULL);
824 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
825 any nested functions. */
827 static void
828 unshare_body (tree fndecl)
830 struct cgraph_node *cgn = cgraph_node::get (fndecl);
831 /* If the language requires deep unsharing, we need a pointer set to make
832 sure we don't repeatedly unshare subtrees of unshareable nodes. */
833 hash_set<tree> *visited
834 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
836 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
837 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
838 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
840 delete visited;
842 if (cgn)
843 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
844 unshare_body (cgn->decl);
847 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
848 Subtrees are walked until the first unvisited node is encountered. */
850 static tree
851 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
853 tree t = *tp;
855 /* If this node has been visited, unmark it and keep looking. */
856 if (TREE_VISITED (t))
857 TREE_VISITED (t) = 0;
859 /* Otherwise, don't look any deeper. */
860 else
861 *walk_subtrees = 0;
863 return NULL_TREE;
866 /* Unmark the visited trees rooted at *TP. */
868 static inline void
869 unmark_visited (tree *tp)
871 walk_tree (tp, unmark_visited_r, NULL, NULL);
874 /* Likewise, but mark all trees as not visited. */
876 static void
877 unvisit_body (tree fndecl)
879 struct cgraph_node *cgn = cgraph_node::get (fndecl);
881 unmark_visited (&DECL_SAVED_TREE (fndecl));
882 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
883 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
885 if (cgn)
886 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
887 unvisit_body (cgn->decl);
890 /* Unconditionally make an unshared copy of EXPR. This is used when using
891 stored expressions which span multiple functions, such as BINFO_VTABLE,
892 as the normal unsharing process can't tell that they're shared. */
894 tree
895 unshare_expr (tree expr)
897 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
898 return expr;
901 /* Worker for unshare_expr_without_location. */
903 static tree
904 prune_expr_location (tree *tp, int *walk_subtrees, void *)
906 if (EXPR_P (*tp))
907 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
908 else
909 *walk_subtrees = 0;
910 return NULL_TREE;
913 /* Similar to unshare_expr but also prune all expression locations
914 from EXPR. */
916 tree
917 unshare_expr_without_location (tree expr)
919 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
920 if (EXPR_P (expr))
921 walk_tree (&expr, prune_expr_location, NULL, NULL);
922 return expr;
925 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
926 contain statements and have a value. Assign its value to a temporary
927 and give it void_type_node. Return the temporary, or NULL_TREE if
928 WRAPPER was already void. */
930 tree
931 voidify_wrapper_expr (tree wrapper, tree temp)
933 tree type = TREE_TYPE (wrapper);
934 if (type && !VOID_TYPE_P (type))
936 tree *p;
938 /* Set p to point to the body of the wrapper. Loop until we find
939 something that isn't a wrapper. */
940 for (p = &wrapper; p && *p; )
942 switch (TREE_CODE (*p))
944 case BIND_EXPR:
945 TREE_SIDE_EFFECTS (*p) = 1;
946 TREE_TYPE (*p) = void_type_node;
947 /* For a BIND_EXPR, the body is operand 1. */
948 p = &BIND_EXPR_BODY (*p);
949 break;
951 case CLEANUP_POINT_EXPR:
952 case TRY_FINALLY_EXPR:
953 case TRY_CATCH_EXPR:
954 TREE_SIDE_EFFECTS (*p) = 1;
955 TREE_TYPE (*p) = void_type_node;
956 p = &TREE_OPERAND (*p, 0);
957 break;
959 case STATEMENT_LIST:
961 tree_stmt_iterator i = tsi_last (*p);
962 TREE_SIDE_EFFECTS (*p) = 1;
963 TREE_TYPE (*p) = void_type_node;
964 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
966 break;
968 case COMPOUND_EXPR:
969 /* Advance to the last statement. Set all container types to
970 void. */
971 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
973 TREE_SIDE_EFFECTS (*p) = 1;
974 TREE_TYPE (*p) = void_type_node;
976 break;
978 case TRANSACTION_EXPR:
979 TREE_SIDE_EFFECTS (*p) = 1;
980 TREE_TYPE (*p) = void_type_node;
981 p = &TRANSACTION_EXPR_BODY (*p);
982 break;
984 default:
985 /* Assume that any tree upon which voidify_wrapper_expr is
986 directly called is a wrapper, and that its body is op0. */
987 if (p == &wrapper)
989 TREE_SIDE_EFFECTS (*p) = 1;
990 TREE_TYPE (*p) = void_type_node;
991 p = &TREE_OPERAND (*p, 0);
992 break;
994 goto out;
998 out:
999 if (p == NULL || IS_EMPTY_STMT (*p))
1000 temp = NULL_TREE;
1001 else if (temp)
1003 /* The wrapper is on the RHS of an assignment that we're pushing
1004 down. */
1005 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1006 || TREE_CODE (temp) == MODIFY_EXPR);
1007 TREE_OPERAND (temp, 1) = *p;
1008 *p = temp;
1010 else
1012 temp = create_tmp_var (type, "retval");
1013 *p = build2 (INIT_EXPR, type, temp, *p);
1016 return temp;
1019 return NULL_TREE;
1022 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1023 a temporary through which they communicate. */
1025 static void
1026 build_stack_save_restore (gimple *save, gimple *restore)
1028 tree tmp_var;
1030 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1031 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1032 gimple_call_set_lhs (*save, tmp_var);
1034 *restore
1035 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1036 1, tmp_var);
1039 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1041 static enum gimplify_status
1042 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1044 tree bind_expr = *expr_p;
1045 bool old_save_stack = gimplify_ctxp->save_stack;
1046 tree t;
1047 gimple gimple_bind;
1048 gimple_seq body, cleanup;
1049 gimple stack_save;
1050 location_t start_locus = 0, end_locus = 0;
1052 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1054 /* Mark variables seen in this bind expr. */
1055 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1057 if (TREE_CODE (t) == VAR_DECL)
1059 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1061 /* Mark variable as local. */
1062 if (ctx && !DECL_EXTERNAL (t)
1063 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1064 || splay_tree_lookup (ctx->variables,
1065 (splay_tree_key) t) == NULL))
1067 if (ctx->region_type == ORT_SIMD
1068 && TREE_ADDRESSABLE (t)
1069 && !TREE_STATIC (t))
1070 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1071 else
1072 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1075 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1077 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1078 cfun->has_local_explicit_reg_vars = true;
1081 /* Preliminarily mark non-addressed complex variables as eligible
1082 for promotion to gimple registers. We'll transform their uses
1083 as we find them. */
1084 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1085 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1086 && !TREE_THIS_VOLATILE (t)
1087 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1088 && !needs_to_live_in_memory (t))
1089 DECL_GIMPLE_REG_P (t) = 1;
1092 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1093 BIND_EXPR_BLOCK (bind_expr));
1094 gimple_push_bind_expr (gimple_bind);
1096 gimplify_ctxp->save_stack = false;
1098 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1099 body = NULL;
1100 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1101 gimple_bind_set_body (gimple_bind, body);
1103 /* Source location wise, the cleanup code (stack_restore and clobbers)
1104 belongs to the end of the block, so propagate what we have. The
1105 stack_save operation belongs to the beginning of block, which we can
1106 infer from the bind_expr directly if the block has no explicit
1107 assignment. */
1108 if (BIND_EXPR_BLOCK (bind_expr))
1110 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1111 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1113 if (start_locus == 0)
1114 start_locus = EXPR_LOCATION (bind_expr);
1116 cleanup = NULL;
1117 stack_save = NULL;
1118 if (gimplify_ctxp->save_stack)
1120 gimple stack_restore;
1122 /* Save stack on entry and restore it on exit. Add a try_finally
1123 block to achieve this. */
1124 build_stack_save_restore (&stack_save, &stack_restore);
1126 gimple_set_location (stack_save, start_locus);
1127 gimple_set_location (stack_restore, end_locus);
1129 gimplify_seq_add_stmt (&cleanup, stack_restore);
1132 /* Add clobbers for all variables that go out of scope. */
1133 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1135 if (TREE_CODE (t) == VAR_DECL
1136 && !is_global_var (t)
1137 && DECL_CONTEXT (t) == current_function_decl
1138 && !DECL_HARD_REGISTER (t)
1139 && !TREE_THIS_VOLATILE (t)
1140 && !DECL_HAS_VALUE_EXPR_P (t)
1141 /* Only care for variables that have to be in memory. Others
1142 will be rewritten into SSA names, hence moved to the top-level. */
1143 && !is_gimple_reg (t)
1144 && flag_stack_reuse != SR_NONE)
1146 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1147 gimple clobber_stmt;
1148 TREE_THIS_VOLATILE (clobber) = 1;
1149 clobber_stmt = gimple_build_assign (t, clobber);
1150 gimple_set_location (clobber_stmt, end_locus);
1151 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1155 if (cleanup)
1157 gimple gs;
1158 gimple_seq new_body;
1160 new_body = NULL;
1161 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1162 GIMPLE_TRY_FINALLY);
1164 if (stack_save)
1165 gimplify_seq_add_stmt (&new_body, stack_save);
1166 gimplify_seq_add_stmt (&new_body, gs);
1167 gimple_bind_set_body (gimple_bind, new_body);
1170 gimplify_ctxp->save_stack = old_save_stack;
1171 gimple_pop_bind_expr ();
1173 gimplify_seq_add_stmt (pre_p, gimple_bind);
1175 if (temp)
1177 *expr_p = temp;
1178 return GS_OK;
1181 *expr_p = NULL_TREE;
1182 return GS_ALL_DONE;
1185 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1186 GIMPLE value, it is assigned to a new temporary and the statement is
1187 re-written to return the temporary.
1189 PRE_P points to the sequence where side effects that must happen before
1190 STMT should be stored. */
1192 static enum gimplify_status
1193 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1195 gimple ret;
1196 tree ret_expr = TREE_OPERAND (stmt, 0);
1197 tree result_decl, result;
1199 if (ret_expr == error_mark_node)
1200 return GS_ERROR;
1202 /* Implicit _Cilk_sync must be inserted right before any return statement
1203 if there is a _Cilk_spawn in the function. If the user has provided a
1204 _Cilk_sync, the optimizer should remove this duplicate one. */
1205 if (fn_contains_cilk_spawn_p (cfun))
1207 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1208 gimplify_and_add (impl_sync, pre_p);
1211 if (!ret_expr
1212 || TREE_CODE (ret_expr) == RESULT_DECL
1213 || ret_expr == error_mark_node)
1215 gimple ret = gimple_build_return (ret_expr);
1216 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1217 gimplify_seq_add_stmt (pre_p, ret);
1218 return GS_ALL_DONE;
1221 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1222 result_decl = NULL_TREE;
1223 else
1225 result_decl = TREE_OPERAND (ret_expr, 0);
1227 /* See through a return by reference. */
1228 if (TREE_CODE (result_decl) == INDIRECT_REF)
1229 result_decl = TREE_OPERAND (result_decl, 0);
1231 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1232 || TREE_CODE (ret_expr) == INIT_EXPR)
1233 && TREE_CODE (result_decl) == RESULT_DECL);
1236 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1237 Recall that aggregate_value_p is FALSE for any aggregate type that is
1238 returned in registers. If we're returning values in registers, then
1239 we don't want to extend the lifetime of the RESULT_DECL, particularly
1240 across another call. In addition, for those aggregates for which
1241 hard_function_value generates a PARALLEL, we'll die during normal
1242 expansion of structure assignments; there's special code in expand_return
1243 to handle this case that does not exist in expand_expr. */
1244 if (!result_decl)
1245 result = NULL_TREE;
1246 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1248 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1250 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1251 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1252 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1253 should be effectively allocated by the caller, i.e. all calls to
1254 this function must be subject to the Return Slot Optimization. */
1255 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1256 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1258 result = result_decl;
1260 else if (gimplify_ctxp->return_temp)
1261 result = gimplify_ctxp->return_temp;
1262 else
1264 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
1266 /* ??? With complex control flow (usually involving abnormal edges),
1267 we can wind up warning about an uninitialized value for this. Due
1268 to how this variable is constructed and initialized, this is never
1269 true. Give up and never warn. */
1270 TREE_NO_WARNING (result) = 1;
1272 gimplify_ctxp->return_temp = result;
1275 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1276 Then gimplify the whole thing. */
1277 if (result != result_decl)
1278 TREE_OPERAND (ret_expr, 0) = result;
1280 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1282 ret = gimple_build_return (result);
1283 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1284 gimplify_seq_add_stmt (pre_p, ret);
1286 return GS_ALL_DONE;
1289 /* Gimplify a variable-length array DECL. */
1291 static void
1292 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1294 /* This is a variable-sized decl. Simplify its size and mark it
1295 for deferred expansion. */
1296 tree t, addr, ptr_type;
1298 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1299 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1301 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1302 if (DECL_HAS_VALUE_EXPR_P (decl))
1303 return;
1305 /* All occurrences of this decl in final gimplified code will be
1306 replaced by indirection. Setting DECL_VALUE_EXPR does two
1307 things: First, it lets the rest of the gimplifier know what
1308 replacement to use. Second, it lets the debug info know
1309 where to find the value. */
1310 ptr_type = build_pointer_type (TREE_TYPE (decl));
1311 addr = create_tmp_var (ptr_type, get_name (decl));
1312 DECL_IGNORED_P (addr) = 0;
1313 t = build_fold_indirect_ref (addr);
1314 TREE_THIS_NOTRAP (t) = 1;
1315 SET_DECL_VALUE_EXPR (decl, t);
1316 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1318 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1319 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1320 size_int (DECL_ALIGN (decl)));
1321 /* The call has been built for a variable-sized object. */
1322 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1323 t = fold_convert (ptr_type, t);
1324 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1326 gimplify_and_add (t, seq_p);
1328 /* Indicate that we need to restore the stack level when the
1329 enclosing BIND_EXPR is exited. */
1330 gimplify_ctxp->save_stack = true;
1333 /* A helper function to be called via walk_tree. Mark all labels under *TP
1334 as being forced. To be called for DECL_INITIAL of static variables. */
1336 static tree
1337 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1339 if (TYPE_P (*tp))
1340 *walk_subtrees = 0;
1341 if (TREE_CODE (*tp) == LABEL_DECL)
1342 FORCED_LABEL (*tp) = 1;
1344 return NULL_TREE;
1347 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1348 and initialization explicit. */
1350 static enum gimplify_status
1351 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1353 tree stmt = *stmt_p;
1354 tree decl = DECL_EXPR_DECL (stmt);
1356 *stmt_p = NULL_TREE;
1358 if (TREE_TYPE (decl) == error_mark_node)
1359 return GS_ERROR;
1361 if ((TREE_CODE (decl) == TYPE_DECL
1362 || TREE_CODE (decl) == VAR_DECL)
1363 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1364 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1366 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1367 in case its size expressions contain problematic nodes like CALL_EXPR. */
1368 if (TREE_CODE (decl) == TYPE_DECL
1369 && DECL_ORIGINAL_TYPE (decl)
1370 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1371 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1373 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1375 tree init = DECL_INITIAL (decl);
1377 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1378 || (!TREE_STATIC (decl)
1379 && flag_stack_check == GENERIC_STACK_CHECK
1380 && compare_tree_int (DECL_SIZE_UNIT (decl),
1381 STACK_CHECK_MAX_VAR_SIZE) > 0))
1382 gimplify_vla_decl (decl, seq_p);
1384 /* Some front ends do not explicitly declare all anonymous
1385 artificial variables. We compensate here by declaring the
1386 variables, though it would be better if the front ends would
1387 explicitly declare them. */
1388 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1389 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1390 gimple_add_tmp_var (decl);
1392 if (init && init != error_mark_node)
1394 if (!TREE_STATIC (decl))
1396 DECL_INITIAL (decl) = NULL_TREE;
1397 init = build2 (INIT_EXPR, void_type_node, decl, init);
1398 gimplify_and_add (init, seq_p);
1399 ggc_free (init);
1401 else
1402 /* We must still examine initializers for static variables
1403 as they may contain a label address. */
1404 walk_tree (&init, force_labels_r, NULL, NULL);
1408 return GS_ALL_DONE;
1411 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1412 and replacing the LOOP_EXPR with goto, but if the loop contains an
1413 EXIT_EXPR, we need to append a label for it to jump to. */
1415 static enum gimplify_status
1416 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1418 tree saved_label = gimplify_ctxp->exit_label;
1419 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1421 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1423 gimplify_ctxp->exit_label = NULL_TREE;
1425 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1427 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1429 if (gimplify_ctxp->exit_label)
1430 gimplify_seq_add_stmt (pre_p,
1431 gimple_build_label (gimplify_ctxp->exit_label));
1433 gimplify_ctxp->exit_label = saved_label;
1435 *expr_p = NULL;
1436 return GS_ALL_DONE;
1439 /* Gimplify a statement list onto a sequence. These may be created either
1440 by an enlightened front-end, or by shortcut_cond_expr. */
1442 static enum gimplify_status
1443 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1445 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1447 tree_stmt_iterator i = tsi_start (*expr_p);
1449 while (!tsi_end_p (i))
1451 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1452 tsi_delink (&i);
1455 if (temp)
1457 *expr_p = temp;
1458 return GS_OK;
1461 return GS_ALL_DONE;
1465 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1466 branch to. */
1468 static enum gimplify_status
1469 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1471 tree switch_expr = *expr_p;
1472 gimple_seq switch_body_seq = NULL;
1473 enum gimplify_status ret;
1474 tree index_type = TREE_TYPE (switch_expr);
1475 if (index_type == NULL_TREE)
1476 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1478 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1479 fb_rvalue);
1480 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1481 return ret;
1483 if (SWITCH_BODY (switch_expr))
1485 vec<tree> labels;
1486 vec<tree> saved_labels;
1487 tree default_case = NULL_TREE;
1488 gimple gimple_switch;
1490 /* If someone can be bothered to fill in the labels, they can
1491 be bothered to null out the body too. */
1492 gcc_assert (!SWITCH_LABELS (switch_expr));
1494 /* Save old labels, get new ones from body, then restore the old
1495 labels. Save all the things from the switch body to append after. */
1496 saved_labels = gimplify_ctxp->case_labels;
1497 gimplify_ctxp->case_labels.create (8);
1499 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1500 labels = gimplify_ctxp->case_labels;
1501 gimplify_ctxp->case_labels = saved_labels;
1503 preprocess_case_label_vec_for_gimple (labels, index_type,
1504 &default_case);
1506 if (!default_case)
1508 gimple new_default;
1510 default_case
1511 = build_case_label (NULL_TREE, NULL_TREE,
1512 create_artificial_label (UNKNOWN_LOCATION));
1513 new_default = gimple_build_label (CASE_LABEL (default_case));
1514 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1517 gimple_switch = gimple_build_switch (SWITCH_COND (switch_expr),
1518 default_case, labels);
1519 gimplify_seq_add_stmt (pre_p, gimple_switch);
1520 gimplify_seq_add_seq (pre_p, switch_body_seq);
1521 labels.release ();
1523 else
1524 gcc_assert (SWITCH_LABELS (switch_expr));
1526 return GS_ALL_DONE;
1529 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1531 static enum gimplify_status
1532 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1534 struct gimplify_ctx *ctxp;
1535 gimple gimple_label;
1537 /* Invalid OpenMP programs can play Duff's Device type games with
1538 #pragma omp parallel. At least in the C front end, we don't
1539 detect such invalid branches until after gimplification. */
1540 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1541 if (ctxp->case_labels.exists ())
1542 break;
1544 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1545 ctxp->case_labels.safe_push (*expr_p);
1546 gimplify_seq_add_stmt (pre_p, gimple_label);
1548 return GS_ALL_DONE;
1551 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1552 if necessary. */
1554 tree
1555 build_and_jump (tree *label_p)
1557 if (label_p == NULL)
1558 /* If there's nowhere to jump, just fall through. */
1559 return NULL_TREE;
1561 if (*label_p == NULL_TREE)
1563 tree label = create_artificial_label (UNKNOWN_LOCATION);
1564 *label_p = label;
1567 return build1 (GOTO_EXPR, void_type_node, *label_p);
1570 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1571 This also involves building a label to jump to and communicating it to
1572 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1574 static enum gimplify_status
1575 gimplify_exit_expr (tree *expr_p)
1577 tree cond = TREE_OPERAND (*expr_p, 0);
1578 tree expr;
1580 expr = build_and_jump (&gimplify_ctxp->exit_label);
1581 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1582 *expr_p = expr;
1584 return GS_OK;
1587 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1588 different from its canonical type, wrap the whole thing inside a
1589 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1590 type.
1592 The canonical type of a COMPONENT_REF is the type of the field being
1593 referenced--unless the field is a bit-field which can be read directly
1594 in a smaller mode, in which case the canonical type is the
1595 sign-appropriate type corresponding to that mode. */
1597 static void
1598 canonicalize_component_ref (tree *expr_p)
1600 tree expr = *expr_p;
1601 tree type;
1603 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1605 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1606 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1607 else
1608 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1610 /* One could argue that all the stuff below is not necessary for
1611 the non-bitfield case and declare it a FE error if type
1612 adjustment would be needed. */
1613 if (TREE_TYPE (expr) != type)
1615 #ifdef ENABLE_TYPES_CHECKING
1616 tree old_type = TREE_TYPE (expr);
1617 #endif
1618 int type_quals;
1620 /* We need to preserve qualifiers and propagate them from
1621 operand 0. */
1622 type_quals = TYPE_QUALS (type)
1623 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1624 if (TYPE_QUALS (type) != type_quals)
1625 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1627 /* Set the type of the COMPONENT_REF to the underlying type. */
1628 TREE_TYPE (expr) = type;
1630 #ifdef ENABLE_TYPES_CHECKING
1631 /* It is now a FE error, if the conversion from the canonical
1632 type to the original expression type is not useless. */
1633 gcc_assert (useless_type_conversion_p (old_type, type));
1634 #endif
1638 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1639 to foo, embed that change in the ADDR_EXPR by converting
1640 T array[U];
1641 (T *)&array
1643 &array[L]
1644 where L is the lower bound. For simplicity, only do this for constant
1645 lower bound.
1646 The constraint is that the type of &array[L] is trivially convertible
1647 to T *. */
1649 static void
1650 canonicalize_addr_expr (tree *expr_p)
1652 tree expr = *expr_p;
1653 tree addr_expr = TREE_OPERAND (expr, 0);
1654 tree datype, ddatype, pddatype;
1656 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1657 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1658 || TREE_CODE (addr_expr) != ADDR_EXPR)
1659 return;
1661 /* The addr_expr type should be a pointer to an array. */
1662 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1663 if (TREE_CODE (datype) != ARRAY_TYPE)
1664 return;
1666 /* The pointer to element type shall be trivially convertible to
1667 the expression pointer type. */
1668 ddatype = TREE_TYPE (datype);
1669 pddatype = build_pointer_type (ddatype);
1670 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1671 pddatype))
1672 return;
1674 /* The lower bound and element sizes must be constant. */
1675 if (!TYPE_SIZE_UNIT (ddatype)
1676 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1677 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1678 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1679 return;
1681 /* All checks succeeded. Build a new node to merge the cast. */
1682 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1683 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1684 NULL_TREE, NULL_TREE);
1685 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1687 /* We can have stripped a required restrict qualifier above. */
1688 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1689 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1692 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1693 underneath as appropriate. */
1695 static enum gimplify_status
1696 gimplify_conversion (tree *expr_p)
1698 location_t loc = EXPR_LOCATION (*expr_p);
1699 gcc_assert (CONVERT_EXPR_P (*expr_p));
1701 /* Then strip away all but the outermost conversion. */
1702 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1704 /* And remove the outermost conversion if it's useless. */
1705 if (tree_ssa_useless_type_conversion (*expr_p))
1706 *expr_p = TREE_OPERAND (*expr_p, 0);
1708 /* If we still have a conversion at the toplevel,
1709 then canonicalize some constructs. */
1710 if (CONVERT_EXPR_P (*expr_p))
1712 tree sub = TREE_OPERAND (*expr_p, 0);
1714 /* If a NOP conversion is changing the type of a COMPONENT_REF
1715 expression, then canonicalize its type now in order to expose more
1716 redundant conversions. */
1717 if (TREE_CODE (sub) == COMPONENT_REF)
1718 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1720 /* If a NOP conversion is changing a pointer to array of foo
1721 to a pointer to foo, embed that change in the ADDR_EXPR. */
1722 else if (TREE_CODE (sub) == ADDR_EXPR)
1723 canonicalize_addr_expr (expr_p);
1726 /* If we have a conversion to a non-register type force the
1727 use of a VIEW_CONVERT_EXPR instead. */
1728 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1729 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1730 TREE_OPERAND (*expr_p, 0));
1732 return GS_OK;
1735 /* Nonlocal VLAs seen in the current function. */
1736 static hash_set<tree> *nonlocal_vlas;
1738 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
1739 static tree nonlocal_vla_vars;
1741 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
1742 DECL_VALUE_EXPR, and it's worth re-examining things. */
1744 static enum gimplify_status
1745 gimplify_var_or_parm_decl (tree *expr_p)
1747 tree decl = *expr_p;
1749 /* ??? If this is a local variable, and it has not been seen in any
1750 outer BIND_EXPR, then it's probably the result of a duplicate
1751 declaration, for which we've already issued an error. It would
1752 be really nice if the front end wouldn't leak these at all.
1753 Currently the only known culprit is C++ destructors, as seen
1754 in g++.old-deja/g++.jason/binding.C. */
1755 if (TREE_CODE (decl) == VAR_DECL
1756 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1757 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1758 && decl_function_context (decl) == current_function_decl)
1760 gcc_assert (seen_error ());
1761 return GS_ERROR;
1764 /* When within an OpenMP context, notice uses of variables. */
1765 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1766 return GS_ALL_DONE;
1768 /* If the decl is an alias for another expression, substitute it now. */
1769 if (DECL_HAS_VALUE_EXPR_P (decl))
1771 tree value_expr = DECL_VALUE_EXPR (decl);
1773 /* For referenced nonlocal VLAs add a decl for debugging purposes
1774 to the current function. */
1775 if (TREE_CODE (decl) == VAR_DECL
1776 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1777 && nonlocal_vlas != NULL
1778 && TREE_CODE (value_expr) == INDIRECT_REF
1779 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1780 && decl_function_context (decl) != current_function_decl)
1782 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1783 while (ctx
1784 && (ctx->region_type == ORT_WORKSHARE
1785 || ctx->region_type == ORT_SIMD))
1786 ctx = ctx->outer_context;
1787 if (!ctx && !nonlocal_vlas->add (decl))
1789 tree copy = copy_node (decl);
1791 lang_hooks.dup_lang_specific_decl (copy);
1792 SET_DECL_RTL (copy, 0);
1793 TREE_USED (copy) = 1;
1794 DECL_CHAIN (copy) = nonlocal_vla_vars;
1795 nonlocal_vla_vars = copy;
1796 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1797 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1801 *expr_p = unshare_expr (value_expr);
1802 return GS_OK;
1805 return GS_ALL_DONE;
1808 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
1810 static void
1811 recalculate_side_effects (tree t)
1813 enum tree_code code = TREE_CODE (t);
1814 int len = TREE_OPERAND_LENGTH (t);
1815 int i;
1817 switch (TREE_CODE_CLASS (code))
1819 case tcc_expression:
1820 switch (code)
1822 case INIT_EXPR:
1823 case MODIFY_EXPR:
1824 case VA_ARG_EXPR:
1825 case PREDECREMENT_EXPR:
1826 case PREINCREMENT_EXPR:
1827 case POSTDECREMENT_EXPR:
1828 case POSTINCREMENT_EXPR:
1829 /* All of these have side-effects, no matter what their
1830 operands are. */
1831 return;
1833 default:
1834 break;
1836 /* Fall through. */
1838 case tcc_comparison: /* a comparison expression */
1839 case tcc_unary: /* a unary arithmetic expression */
1840 case tcc_binary: /* a binary arithmetic expression */
1841 case tcc_reference: /* a reference */
1842 case tcc_vl_exp: /* a function call */
1843 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1844 for (i = 0; i < len; ++i)
1846 tree op = TREE_OPERAND (t, i);
1847 if (op && TREE_SIDE_EFFECTS (op))
1848 TREE_SIDE_EFFECTS (t) = 1;
1850 break;
1852 case tcc_constant:
1853 /* No side-effects. */
1854 return;
1856 default:
1857 gcc_unreachable ();
1861 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1862 node *EXPR_P.
1864 compound_lval
1865 : min_lval '[' val ']'
1866 | min_lval '.' ID
1867 | compound_lval '[' val ']'
1868 | compound_lval '.' ID
1870 This is not part of the original SIMPLE definition, which separates
1871 array and member references, but it seems reasonable to handle them
1872 together. Also, this way we don't run into problems with union
1873 aliasing; gcc requires that for accesses through a union to alias, the
1874 union reference must be explicit, which was not always the case when we
1875 were splitting up array and member refs.
1877 PRE_P points to the sequence where side effects that must happen before
1878 *EXPR_P should be stored.
1880 POST_P points to the sequence where side effects that must happen after
1881 *EXPR_P should be stored. */
1883 static enum gimplify_status
1884 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1885 fallback_t fallback)
1887 tree *p;
1888 enum gimplify_status ret = GS_ALL_DONE, tret;
1889 int i;
1890 location_t loc = EXPR_LOCATION (*expr_p);
1891 tree expr = *expr_p;
1893 /* Create a stack of the subexpressions so later we can walk them in
1894 order from inner to outer. */
1895 auto_vec<tree, 10> expr_stack;
1897 /* We can handle anything that get_inner_reference can deal with. */
1898 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1900 restart:
1901 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1902 if (TREE_CODE (*p) == INDIRECT_REF)
1903 *p = fold_indirect_ref_loc (loc, *p);
1905 if (handled_component_p (*p))
1907 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1908 additional COMPONENT_REFs. */
1909 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1910 && gimplify_var_or_parm_decl (p) == GS_OK)
1911 goto restart;
1912 else
1913 break;
1915 expr_stack.safe_push (*p);
1918 gcc_assert (expr_stack.length ());
1920 /* Now EXPR_STACK is a stack of pointers to all the refs we've
1921 walked through and P points to the innermost expression.
1923 Java requires that we elaborated nodes in source order. That
1924 means we must gimplify the inner expression followed by each of
1925 the indices, in order. But we can't gimplify the inner
1926 expression until we deal with any variable bounds, sizes, or
1927 positions in order to deal with PLACEHOLDER_EXPRs.
1929 So we do this in three steps. First we deal with the annotations
1930 for any variables in the components, then we gimplify the base,
1931 then we gimplify any indices, from left to right. */
1932 for (i = expr_stack.length () - 1; i >= 0; i--)
1934 tree t = expr_stack[i];
1936 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1938 /* Gimplify the low bound and element type size and put them into
1939 the ARRAY_REF. If these values are set, they have already been
1940 gimplified. */
1941 if (TREE_OPERAND (t, 2) == NULL_TREE)
1943 tree low = unshare_expr (array_ref_low_bound (t));
1944 if (!is_gimple_min_invariant (low))
1946 TREE_OPERAND (t, 2) = low;
1947 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1948 post_p, is_gimple_reg,
1949 fb_rvalue);
1950 ret = MIN (ret, tret);
1953 else
1955 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1956 is_gimple_reg, fb_rvalue);
1957 ret = MIN (ret, tret);
1960 if (TREE_OPERAND (t, 3) == NULL_TREE)
1962 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1963 tree elmt_size = unshare_expr (array_ref_element_size (t));
1964 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1966 /* Divide the element size by the alignment of the element
1967 type (above). */
1968 elmt_size
1969 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
1971 if (!is_gimple_min_invariant (elmt_size))
1973 TREE_OPERAND (t, 3) = elmt_size;
1974 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
1975 post_p, is_gimple_reg,
1976 fb_rvalue);
1977 ret = MIN (ret, tret);
1980 else
1982 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1983 is_gimple_reg, fb_rvalue);
1984 ret = MIN (ret, tret);
1987 else if (TREE_CODE (t) == COMPONENT_REF)
1989 /* Set the field offset into T and gimplify it. */
1990 if (TREE_OPERAND (t, 2) == NULL_TREE)
1992 tree offset = unshare_expr (component_ref_field_offset (t));
1993 tree field = TREE_OPERAND (t, 1);
1994 tree factor
1995 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1997 /* Divide the offset by its alignment. */
1998 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2000 if (!is_gimple_min_invariant (offset))
2002 TREE_OPERAND (t, 2) = offset;
2003 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2004 post_p, is_gimple_reg,
2005 fb_rvalue);
2006 ret = MIN (ret, tret);
2009 else
2011 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2012 is_gimple_reg, fb_rvalue);
2013 ret = MIN (ret, tret);
2018 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2019 so as to match the min_lval predicate. Failure to do so may result
2020 in the creation of large aggregate temporaries. */
2021 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2022 fallback | fb_lvalue);
2023 ret = MIN (ret, tret);
2025 /* And finally, the indices and operands of ARRAY_REF. During this
2026 loop we also remove any useless conversions. */
2027 for (; expr_stack.length () > 0; )
2029 tree t = expr_stack.pop ();
2031 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2033 /* Gimplify the dimension. */
2034 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2036 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2037 is_gimple_val, fb_rvalue);
2038 ret = MIN (ret, tret);
2042 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2044 /* The innermost expression P may have originally had
2045 TREE_SIDE_EFFECTS set which would have caused all the outer
2046 expressions in *EXPR_P leading to P to also have had
2047 TREE_SIDE_EFFECTS set. */
2048 recalculate_side_effects (t);
2051 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2052 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2054 canonicalize_component_ref (expr_p);
2057 expr_stack.release ();
2059 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2061 return ret;
2064 /* Gimplify the self modifying expression pointed to by EXPR_P
2065 (++, --, +=, -=).
2067 PRE_P points to the list where side effects that must happen before
2068 *EXPR_P should be stored.
2070 POST_P points to the list where side effects that must happen after
2071 *EXPR_P should be stored.
2073 WANT_VALUE is nonzero iff we want to use the value of this expression
2074 in another expression.
2076 ARITH_TYPE is the type the computation should be performed in. */
2078 enum gimplify_status
2079 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2080 bool want_value, tree arith_type)
2082 enum tree_code code;
2083 tree lhs, lvalue, rhs, t1;
2084 gimple_seq post = NULL, *orig_post_p = post_p;
2085 bool postfix;
2086 enum tree_code arith_code;
2087 enum gimplify_status ret;
2088 location_t loc = EXPR_LOCATION (*expr_p);
2090 code = TREE_CODE (*expr_p);
2092 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2093 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2095 /* Prefix or postfix? */
2096 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2097 /* Faster to treat as prefix if result is not used. */
2098 postfix = want_value;
2099 else
2100 postfix = false;
2102 /* For postfix, make sure the inner expression's post side effects
2103 are executed after side effects from this expression. */
2104 if (postfix)
2105 post_p = &post;
2107 /* Add or subtract? */
2108 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2109 arith_code = PLUS_EXPR;
2110 else
2111 arith_code = MINUS_EXPR;
2113 /* Gimplify the LHS into a GIMPLE lvalue. */
2114 lvalue = TREE_OPERAND (*expr_p, 0);
2115 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2116 if (ret == GS_ERROR)
2117 return ret;
2119 /* Extract the operands to the arithmetic operation. */
2120 lhs = lvalue;
2121 rhs = TREE_OPERAND (*expr_p, 1);
2123 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2124 that as the result value and in the postqueue operation. */
2125 if (postfix)
2127 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2128 if (ret == GS_ERROR)
2129 return ret;
2131 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2134 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2135 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2137 rhs = convert_to_ptrofftype_loc (loc, rhs);
2138 if (arith_code == MINUS_EXPR)
2139 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2140 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2142 else
2143 t1 = fold_convert (TREE_TYPE (*expr_p),
2144 fold_build2 (arith_code, arith_type,
2145 fold_convert (arith_type, lhs),
2146 fold_convert (arith_type, rhs)));
2148 if (postfix)
2150 gimplify_assign (lvalue, t1, pre_p);
2151 gimplify_seq_add_seq (orig_post_p, post);
2152 *expr_p = lhs;
2153 return GS_ALL_DONE;
2155 else
2157 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2158 return GS_OK;
2162 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2164 static void
2165 maybe_with_size_expr (tree *expr_p)
2167 tree expr = *expr_p;
2168 tree type = TREE_TYPE (expr);
2169 tree size;
2171 /* If we've already wrapped this or the type is error_mark_node, we can't do
2172 anything. */
2173 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2174 || type == error_mark_node)
2175 return;
2177 /* If the size isn't known or is a constant, we have nothing to do. */
2178 size = TYPE_SIZE_UNIT (type);
2179 if (!size || TREE_CODE (size) == INTEGER_CST)
2180 return;
2182 /* Otherwise, make a WITH_SIZE_EXPR. */
2183 size = unshare_expr (size);
2184 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2185 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2188 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2189 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2190 the CALL_EXPR. */
2192 enum gimplify_status
2193 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2195 bool (*test) (tree);
2196 fallback_t fb;
2198 /* In general, we allow lvalues for function arguments to avoid
2199 extra overhead of copying large aggregates out of even larger
2200 aggregates into temporaries only to copy the temporaries to
2201 the argument list. Make optimizers happy by pulling out to
2202 temporaries those types that fit in registers. */
2203 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2204 test = is_gimple_val, fb = fb_rvalue;
2205 else
2207 test = is_gimple_lvalue, fb = fb_either;
2208 /* Also strip a TARGET_EXPR that would force an extra copy. */
2209 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2211 tree init = TARGET_EXPR_INITIAL (*arg_p);
2212 if (init
2213 && !VOID_TYPE_P (TREE_TYPE (init)))
2214 *arg_p = init;
2218 /* If this is a variable sized type, we must remember the size. */
2219 maybe_with_size_expr (arg_p);
2221 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2222 /* Make sure arguments have the same location as the function call
2223 itself. */
2224 protected_set_expr_location (*arg_p, call_location);
2226 /* There is a sequence point before a function call. Side effects in
2227 the argument list must occur before the actual call. So, when
2228 gimplifying arguments, force gimplify_expr to use an internal
2229 post queue which is then appended to the end of PRE_P. */
2230 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2233 /* Don't fold STMT inside ORT_TARGET, because it can break code by adding decl
2234 references that weren't in the source. We'll do it during omplower pass
2235 instead. */
2237 static bool
2238 maybe_fold_stmt (gimple_stmt_iterator *gsi)
2240 struct gimplify_omp_ctx *ctx;
2241 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2242 if (ctx->region_type == ORT_TARGET)
2243 return false;
2244 return fold_stmt (gsi);
2247 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2248 WANT_VALUE is true if the result of the call is desired. */
2250 static enum gimplify_status
2251 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2253 tree fndecl, parms, p, fnptrtype;
2254 enum gimplify_status ret;
2255 int i, nargs;
2256 gimple call;
2257 bool builtin_va_start_p = false;
2258 location_t loc = EXPR_LOCATION (*expr_p);
2260 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2262 /* For reliable diagnostics during inlining, it is necessary that
2263 every call_expr be annotated with file and line. */
2264 if (! EXPR_HAS_LOCATION (*expr_p))
2265 SET_EXPR_LOCATION (*expr_p, input_location);
2267 /* Gimplify internal functions created in the FEs. */
2268 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
2270 nargs = call_expr_nargs (*expr_p);
2271 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
2272 auto_vec<tree> vargs (nargs);
2274 for (i = 0; i < nargs; i++)
2276 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2277 EXPR_LOCATION (*expr_p));
2278 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
2280 gimple call = gimple_build_call_internal_vec (ifn, vargs);
2281 gimplify_seq_add_stmt (pre_p, call);
2282 return GS_ALL_DONE;
2285 /* This may be a call to a builtin function.
2287 Builtin function calls may be transformed into different
2288 (and more efficient) builtin function calls under certain
2289 circumstances. Unfortunately, gimplification can muck things
2290 up enough that the builtin expanders are not aware that certain
2291 transformations are still valid.
2293 So we attempt transformation/gimplification of the call before
2294 we gimplify the CALL_EXPR. At this time we do not manage to
2295 transform all calls in the same manner as the expanders do, but
2296 we do transform most of them. */
2297 fndecl = get_callee_fndecl (*expr_p);
2298 if (fndecl
2299 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2300 switch (DECL_FUNCTION_CODE (fndecl))
2302 case BUILT_IN_VA_START:
2304 builtin_va_start_p = TRUE;
2305 if (call_expr_nargs (*expr_p) < 2)
2307 error ("too few arguments to function %<va_start%>");
2308 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2309 return GS_OK;
2312 if (fold_builtin_next_arg (*expr_p, true))
2314 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2315 return GS_OK;
2317 break;
2319 case BUILT_IN_LINE:
2321 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2322 *expr_p = build_int_cst (TREE_TYPE (*expr_p), loc.line);
2323 return GS_OK;
2325 case BUILT_IN_FILE:
2327 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2328 *expr_p = build_string_literal (strlen (loc.file) + 1, loc.file);
2329 return GS_OK;
2331 case BUILT_IN_FUNCTION:
2333 const char *function;
2334 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2335 *expr_p = build_string_literal (strlen (function) + 1, function);
2336 return GS_OK;
2338 default:
2341 if (fndecl && DECL_BUILT_IN (fndecl))
2343 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2344 if (new_tree && new_tree != *expr_p)
2346 /* There was a transformation of this call which computes the
2347 same value, but in a more efficient way. Return and try
2348 again. */
2349 *expr_p = new_tree;
2350 return GS_OK;
2354 /* Remember the original function pointer type. */
2355 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2357 /* There is a sequence point before the call, so any side effects in
2358 the calling expression must occur before the actual call. Force
2359 gimplify_expr to use an internal post queue. */
2360 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2361 is_gimple_call_addr, fb_rvalue);
2363 nargs = call_expr_nargs (*expr_p);
2365 /* Get argument types for verification. */
2366 fndecl = get_callee_fndecl (*expr_p);
2367 parms = NULL_TREE;
2368 if (fndecl)
2369 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2370 else
2371 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
2373 if (fndecl && DECL_ARGUMENTS (fndecl))
2374 p = DECL_ARGUMENTS (fndecl);
2375 else if (parms)
2376 p = parms;
2377 else
2378 p = NULL_TREE;
2379 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2382 /* If the last argument is __builtin_va_arg_pack () and it is not
2383 passed as a named argument, decrease the number of CALL_EXPR
2384 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2385 if (!p
2386 && i < nargs
2387 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2389 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2390 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2392 if (last_arg_fndecl
2393 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2394 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2395 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2397 tree call = *expr_p;
2399 --nargs;
2400 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2401 CALL_EXPR_FN (call),
2402 nargs, CALL_EXPR_ARGP (call));
2404 /* Copy all CALL_EXPR flags, location and block, except
2405 CALL_EXPR_VA_ARG_PACK flag. */
2406 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2407 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2408 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2409 = CALL_EXPR_RETURN_SLOT_OPT (call);
2410 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2411 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2413 /* Set CALL_EXPR_VA_ARG_PACK. */
2414 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2418 /* Finally, gimplify the function arguments. */
2419 if (nargs > 0)
2421 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2422 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2423 PUSH_ARGS_REVERSED ? i-- : i++)
2425 enum gimplify_status t;
2427 /* Avoid gimplifying the second argument to va_start, which needs to
2428 be the plain PARM_DECL. */
2429 if ((i != 1) || !builtin_va_start_p)
2431 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2432 EXPR_LOCATION (*expr_p));
2434 if (t == GS_ERROR)
2435 ret = GS_ERROR;
2440 /* Verify the function result. */
2441 if (want_value && fndecl
2442 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2444 error_at (loc, "using result of function returning %<void%>");
2445 ret = GS_ERROR;
2448 /* Try this again in case gimplification exposed something. */
2449 if (ret != GS_ERROR)
2451 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2453 if (new_tree && new_tree != *expr_p)
2455 /* There was a transformation of this call which computes the
2456 same value, but in a more efficient way. Return and try
2457 again. */
2458 *expr_p = new_tree;
2459 return GS_OK;
2462 else
2464 *expr_p = error_mark_node;
2465 return GS_ERROR;
2468 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2469 decl. This allows us to eliminate redundant or useless
2470 calls to "const" functions. */
2471 if (TREE_CODE (*expr_p) == CALL_EXPR)
2473 int flags = call_expr_flags (*expr_p);
2474 if (flags & (ECF_CONST | ECF_PURE)
2475 /* An infinite loop is considered a side effect. */
2476 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2477 TREE_SIDE_EFFECTS (*expr_p) = 0;
2480 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2481 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2482 form and delegate the creation of a GIMPLE_CALL to
2483 gimplify_modify_expr. This is always possible because when
2484 WANT_VALUE is true, the caller wants the result of this call into
2485 a temporary, which means that we will emit an INIT_EXPR in
2486 internal_get_tmp_var which will then be handled by
2487 gimplify_modify_expr. */
2488 if (!want_value)
2490 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2491 have to do is replicate it as a GIMPLE_CALL tuple. */
2492 gimple_stmt_iterator gsi;
2493 call = gimple_build_call_from_tree (*expr_p);
2494 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2495 notice_special_calls (call);
2496 gimplify_seq_add_stmt (pre_p, call);
2497 gsi = gsi_last (*pre_p);
2498 maybe_fold_stmt (&gsi);
2499 *expr_p = NULL_TREE;
2501 else
2502 /* Remember the original function type. */
2503 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2504 CALL_EXPR_FN (*expr_p));
2506 return ret;
2509 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2510 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2512 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2513 condition is true or false, respectively. If null, we should generate
2514 our own to skip over the evaluation of this specific expression.
2516 LOCUS is the source location of the COND_EXPR.
2518 This function is the tree equivalent of do_jump.
2520 shortcut_cond_r should only be called by shortcut_cond_expr. */
2522 static tree
2523 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2524 location_t locus)
2526 tree local_label = NULL_TREE;
2527 tree t, expr = NULL;
2529 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2530 retain the shortcut semantics. Just insert the gotos here;
2531 shortcut_cond_expr will append the real blocks later. */
2532 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2534 location_t new_locus;
2536 /* Turn if (a && b) into
2538 if (a); else goto no;
2539 if (b) goto yes; else goto no;
2540 (no:) */
2542 if (false_label_p == NULL)
2543 false_label_p = &local_label;
2545 /* Keep the original source location on the first 'if'. */
2546 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2547 append_to_statement_list (t, &expr);
2549 /* Set the source location of the && on the second 'if'. */
2550 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2551 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2552 new_locus);
2553 append_to_statement_list (t, &expr);
2555 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2557 location_t new_locus;
2559 /* Turn if (a || b) into
2561 if (a) goto yes;
2562 if (b) goto yes; else goto no;
2563 (yes:) */
2565 if (true_label_p == NULL)
2566 true_label_p = &local_label;
2568 /* Keep the original source location on the first 'if'. */
2569 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2570 append_to_statement_list (t, &expr);
2572 /* Set the source location of the || on the second 'if'. */
2573 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2574 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2575 new_locus);
2576 append_to_statement_list (t, &expr);
2578 else if (TREE_CODE (pred) == COND_EXPR
2579 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2580 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2582 location_t new_locus;
2584 /* As long as we're messing with gotos, turn if (a ? b : c) into
2585 if (a)
2586 if (b) goto yes; else goto no;
2587 else
2588 if (c) goto yes; else goto no;
2590 Don't do this if one of the arms has void type, which can happen
2591 in C++ when the arm is throw. */
2593 /* Keep the original source location on the first 'if'. Set the source
2594 location of the ? on the second 'if'. */
2595 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2596 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2597 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2598 false_label_p, locus),
2599 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2600 false_label_p, new_locus));
2602 else
2604 expr = build3 (COND_EXPR, void_type_node, pred,
2605 build_and_jump (true_label_p),
2606 build_and_jump (false_label_p));
2607 SET_EXPR_LOCATION (expr, locus);
2610 if (local_label)
2612 t = build1 (LABEL_EXPR, void_type_node, local_label);
2613 append_to_statement_list (t, &expr);
2616 return expr;
2619 /* Given a conditional expression EXPR with short-circuit boolean
2620 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2621 predicate apart into the equivalent sequence of conditionals. */
2623 static tree
2624 shortcut_cond_expr (tree expr)
2626 tree pred = TREE_OPERAND (expr, 0);
2627 tree then_ = TREE_OPERAND (expr, 1);
2628 tree else_ = TREE_OPERAND (expr, 2);
2629 tree true_label, false_label, end_label, t;
2630 tree *true_label_p;
2631 tree *false_label_p;
2632 bool emit_end, emit_false, jump_over_else;
2633 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2634 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2636 /* First do simple transformations. */
2637 if (!else_se)
2639 /* If there is no 'else', turn
2640 if (a && b) then c
2641 into
2642 if (a) if (b) then c. */
2643 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2645 /* Keep the original source location on the first 'if'. */
2646 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2647 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2648 /* Set the source location of the && on the second 'if'. */
2649 if (EXPR_HAS_LOCATION (pred))
2650 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2651 then_ = shortcut_cond_expr (expr);
2652 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2653 pred = TREE_OPERAND (pred, 0);
2654 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2655 SET_EXPR_LOCATION (expr, locus);
2659 if (!then_se)
2661 /* If there is no 'then', turn
2662 if (a || b); else d
2663 into
2664 if (a); else if (b); else d. */
2665 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2667 /* Keep the original source location on the first 'if'. */
2668 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2669 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2670 /* Set the source location of the || on the second 'if'. */
2671 if (EXPR_HAS_LOCATION (pred))
2672 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2673 else_ = shortcut_cond_expr (expr);
2674 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2675 pred = TREE_OPERAND (pred, 0);
2676 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2677 SET_EXPR_LOCATION (expr, locus);
2681 /* If we're done, great. */
2682 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2683 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2684 return expr;
2686 /* Otherwise we need to mess with gotos. Change
2687 if (a) c; else d;
2689 if (a); else goto no;
2690 c; goto end;
2691 no: d; end:
2692 and recursively gimplify the condition. */
2694 true_label = false_label = end_label = NULL_TREE;
2696 /* If our arms just jump somewhere, hijack those labels so we don't
2697 generate jumps to jumps. */
2699 if (then_
2700 && TREE_CODE (then_) == GOTO_EXPR
2701 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2703 true_label = GOTO_DESTINATION (then_);
2704 then_ = NULL;
2705 then_se = false;
2708 if (else_
2709 && TREE_CODE (else_) == GOTO_EXPR
2710 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2712 false_label = GOTO_DESTINATION (else_);
2713 else_ = NULL;
2714 else_se = false;
2717 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2718 if (true_label)
2719 true_label_p = &true_label;
2720 else
2721 true_label_p = NULL;
2723 /* The 'else' branch also needs a label if it contains interesting code. */
2724 if (false_label || else_se)
2725 false_label_p = &false_label;
2726 else
2727 false_label_p = NULL;
2729 /* If there was nothing else in our arms, just forward the label(s). */
2730 if (!then_se && !else_se)
2731 return shortcut_cond_r (pred, true_label_p, false_label_p,
2732 EXPR_LOC_OR_LOC (expr, input_location));
2734 /* If our last subexpression already has a terminal label, reuse it. */
2735 if (else_se)
2736 t = expr_last (else_);
2737 else if (then_se)
2738 t = expr_last (then_);
2739 else
2740 t = NULL;
2741 if (t && TREE_CODE (t) == LABEL_EXPR)
2742 end_label = LABEL_EXPR_LABEL (t);
2744 /* If we don't care about jumping to the 'else' branch, jump to the end
2745 if the condition is false. */
2746 if (!false_label_p)
2747 false_label_p = &end_label;
2749 /* We only want to emit these labels if we aren't hijacking them. */
2750 emit_end = (end_label == NULL_TREE);
2751 emit_false = (false_label == NULL_TREE);
2753 /* We only emit the jump over the else clause if we have to--if the
2754 then clause may fall through. Otherwise we can wind up with a
2755 useless jump and a useless label at the end of gimplified code,
2756 which will cause us to think that this conditional as a whole
2757 falls through even if it doesn't. If we then inline a function
2758 which ends with such a condition, that can cause us to issue an
2759 inappropriate warning about control reaching the end of a
2760 non-void function. */
2761 jump_over_else = block_may_fallthru (then_);
2763 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2764 EXPR_LOC_OR_LOC (expr, input_location));
2766 expr = NULL;
2767 append_to_statement_list (pred, &expr);
2769 append_to_statement_list (then_, &expr);
2770 if (else_se)
2772 if (jump_over_else)
2774 tree last = expr_last (expr);
2775 t = build_and_jump (&end_label);
2776 if (EXPR_HAS_LOCATION (last))
2777 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2778 append_to_statement_list (t, &expr);
2780 if (emit_false)
2782 t = build1 (LABEL_EXPR, void_type_node, false_label);
2783 append_to_statement_list (t, &expr);
2785 append_to_statement_list (else_, &expr);
2787 if (emit_end && end_label)
2789 t = build1 (LABEL_EXPR, void_type_node, end_label);
2790 append_to_statement_list (t, &expr);
2793 return expr;
2796 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2798 tree
2799 gimple_boolify (tree expr)
2801 tree type = TREE_TYPE (expr);
2802 location_t loc = EXPR_LOCATION (expr);
2804 if (TREE_CODE (expr) == NE_EXPR
2805 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2806 && integer_zerop (TREE_OPERAND (expr, 1)))
2808 tree call = TREE_OPERAND (expr, 0);
2809 tree fn = get_callee_fndecl (call);
2811 /* For __builtin_expect ((long) (x), y) recurse into x as well
2812 if x is truth_value_p. */
2813 if (fn
2814 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2815 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2816 && call_expr_nargs (call) == 2)
2818 tree arg = CALL_EXPR_ARG (call, 0);
2819 if (arg)
2821 if (TREE_CODE (arg) == NOP_EXPR
2822 && TREE_TYPE (arg) == TREE_TYPE (call))
2823 arg = TREE_OPERAND (arg, 0);
2824 if (truth_value_p (TREE_CODE (arg)))
2826 arg = gimple_boolify (arg);
2827 CALL_EXPR_ARG (call, 0)
2828 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2834 switch (TREE_CODE (expr))
2836 case TRUTH_AND_EXPR:
2837 case TRUTH_OR_EXPR:
2838 case TRUTH_XOR_EXPR:
2839 case TRUTH_ANDIF_EXPR:
2840 case TRUTH_ORIF_EXPR:
2841 /* Also boolify the arguments of truth exprs. */
2842 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2843 /* FALLTHRU */
2845 case TRUTH_NOT_EXPR:
2846 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2848 /* These expressions always produce boolean results. */
2849 if (TREE_CODE (type) != BOOLEAN_TYPE)
2850 TREE_TYPE (expr) = boolean_type_node;
2851 return expr;
2853 case ANNOTATE_EXPR:
2854 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
2856 case annot_expr_ivdep_kind:
2857 case annot_expr_no_vector_kind:
2858 case annot_expr_vector_kind:
2859 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2860 if (TREE_CODE (type) != BOOLEAN_TYPE)
2861 TREE_TYPE (expr) = boolean_type_node;
2862 return expr;
2863 default:
2864 gcc_unreachable ();
2867 default:
2868 if (COMPARISON_CLASS_P (expr))
2870 /* There expressions always prduce boolean results. */
2871 if (TREE_CODE (type) != BOOLEAN_TYPE)
2872 TREE_TYPE (expr) = boolean_type_node;
2873 return expr;
2875 /* Other expressions that get here must have boolean values, but
2876 might need to be converted to the appropriate mode. */
2877 if (TREE_CODE (type) == BOOLEAN_TYPE)
2878 return expr;
2879 return fold_convert_loc (loc, boolean_type_node, expr);
2883 /* Given a conditional expression *EXPR_P without side effects, gimplify
2884 its operands. New statements are inserted to PRE_P. */
2886 static enum gimplify_status
2887 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2889 tree expr = *expr_p, cond;
2890 enum gimplify_status ret, tret;
2891 enum tree_code code;
2893 cond = gimple_boolify (COND_EXPR_COND (expr));
2895 /* We need to handle && and || specially, as their gimplification
2896 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2897 code = TREE_CODE (cond);
2898 if (code == TRUTH_ANDIF_EXPR)
2899 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2900 else if (code == TRUTH_ORIF_EXPR)
2901 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2902 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2903 COND_EXPR_COND (*expr_p) = cond;
2905 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2906 is_gimple_val, fb_rvalue);
2907 ret = MIN (ret, tret);
2908 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2909 is_gimple_val, fb_rvalue);
2911 return MIN (ret, tret);
2914 /* Return true if evaluating EXPR could trap.
2915 EXPR is GENERIC, while tree_could_trap_p can be called
2916 only on GIMPLE. */
2918 static bool
2919 generic_expr_could_trap_p (tree expr)
2921 unsigned i, n;
2923 if (!expr || is_gimple_val (expr))
2924 return false;
2926 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2927 return true;
2929 n = TREE_OPERAND_LENGTH (expr);
2930 for (i = 0; i < n; i++)
2931 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2932 return true;
2934 return false;
2937 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2938 into
2940 if (p) if (p)
2941 t1 = a; a;
2942 else or else
2943 t1 = b; b;
2946 The second form is used when *EXPR_P is of type void.
2948 PRE_P points to the list where side effects that must happen before
2949 *EXPR_P should be stored. */
2951 static enum gimplify_status
2952 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2954 tree expr = *expr_p;
2955 tree type = TREE_TYPE (expr);
2956 location_t loc = EXPR_LOCATION (expr);
2957 tree tmp, arm1, arm2;
2958 enum gimplify_status ret;
2959 tree label_true, label_false, label_cont;
2960 bool have_then_clause_p, have_else_clause_p;
2961 gimple gimple_cond;
2962 enum tree_code pred_code;
2963 gimple_seq seq = NULL;
2965 /* If this COND_EXPR has a value, copy the values into a temporary within
2966 the arms. */
2967 if (!VOID_TYPE_P (type))
2969 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
2970 tree result;
2972 /* If either an rvalue is ok or we do not require an lvalue, create the
2973 temporary. But we cannot do that if the type is addressable. */
2974 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
2975 && !TREE_ADDRESSABLE (type))
2977 if (gimplify_ctxp->allow_rhs_cond_expr
2978 /* If either branch has side effects or could trap, it can't be
2979 evaluated unconditionally. */
2980 && !TREE_SIDE_EFFECTS (then_)
2981 && !generic_expr_could_trap_p (then_)
2982 && !TREE_SIDE_EFFECTS (else_)
2983 && !generic_expr_could_trap_p (else_))
2984 return gimplify_pure_cond_expr (expr_p, pre_p);
2986 tmp = create_tmp_var (type, "iftmp");
2987 result = tmp;
2990 /* Otherwise, only create and copy references to the values. */
2991 else
2993 type = build_pointer_type (type);
2995 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2996 then_ = build_fold_addr_expr_loc (loc, then_);
2998 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2999 else_ = build_fold_addr_expr_loc (loc, else_);
3001 expr
3002 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3004 tmp = create_tmp_var (type, "iftmp");
3005 result = build_simple_mem_ref_loc (loc, tmp);
3008 /* Build the new then clause, `tmp = then_;'. But don't build the
3009 assignment if the value is void; in C++ it can be if it's a throw. */
3010 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3011 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3013 /* Similarly, build the new else clause, `tmp = else_;'. */
3014 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3015 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3017 TREE_TYPE (expr) = void_type_node;
3018 recalculate_side_effects (expr);
3020 /* Move the COND_EXPR to the prequeue. */
3021 gimplify_stmt (&expr, pre_p);
3023 *expr_p = result;
3024 return GS_ALL_DONE;
3027 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3028 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3029 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3030 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3032 /* Make sure the condition has BOOLEAN_TYPE. */
3033 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3035 /* Break apart && and || conditions. */
3036 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3037 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3039 expr = shortcut_cond_expr (expr);
3041 if (expr != *expr_p)
3043 *expr_p = expr;
3045 /* We can't rely on gimplify_expr to re-gimplify the expanded
3046 form properly, as cleanups might cause the target labels to be
3047 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3048 set up a conditional context. */
3049 gimple_push_condition ();
3050 gimplify_stmt (expr_p, &seq);
3051 gimple_pop_condition (pre_p);
3052 gimple_seq_add_seq (pre_p, seq);
3054 return GS_ALL_DONE;
3058 /* Now do the normal gimplification. */
3060 /* Gimplify condition. */
3061 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3062 fb_rvalue);
3063 if (ret == GS_ERROR)
3064 return GS_ERROR;
3065 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3067 gimple_push_condition ();
3069 have_then_clause_p = have_else_clause_p = false;
3070 if (TREE_OPERAND (expr, 1) != NULL
3071 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3072 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3073 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3074 == current_function_decl)
3075 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3076 have different locations, otherwise we end up with incorrect
3077 location information on the branches. */
3078 && (optimize
3079 || !EXPR_HAS_LOCATION (expr)
3080 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3081 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3083 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3084 have_then_clause_p = true;
3086 else
3087 label_true = create_artificial_label (UNKNOWN_LOCATION);
3088 if (TREE_OPERAND (expr, 2) != NULL
3089 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3090 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3091 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3092 == current_function_decl)
3093 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3094 have different locations, otherwise we end up with incorrect
3095 location information on the branches. */
3096 && (optimize
3097 || !EXPR_HAS_LOCATION (expr)
3098 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3099 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3101 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3102 have_else_clause_p = true;
3104 else
3105 label_false = create_artificial_label (UNKNOWN_LOCATION);
3107 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3108 &arm2);
3110 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3111 label_false);
3113 gimplify_seq_add_stmt (&seq, gimple_cond);
3114 label_cont = NULL_TREE;
3115 if (!have_then_clause_p)
3117 /* For if (...) {} else { code; } put label_true after
3118 the else block. */
3119 if (TREE_OPERAND (expr, 1) == NULL_TREE
3120 && !have_else_clause_p
3121 && TREE_OPERAND (expr, 2) != NULL_TREE)
3122 label_cont = label_true;
3123 else
3125 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3126 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3127 /* For if (...) { code; } else {} or
3128 if (...) { code; } else goto label; or
3129 if (...) { code; return; } else { ... }
3130 label_cont isn't needed. */
3131 if (!have_else_clause_p
3132 && TREE_OPERAND (expr, 2) != NULL_TREE
3133 && gimple_seq_may_fallthru (seq))
3135 gimple g;
3136 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3138 g = gimple_build_goto (label_cont);
3140 /* GIMPLE_COND's are very low level; they have embedded
3141 gotos. This particular embedded goto should not be marked
3142 with the location of the original COND_EXPR, as it would
3143 correspond to the COND_EXPR's condition, not the ELSE or the
3144 THEN arms. To avoid marking it with the wrong location, flag
3145 it as "no location". */
3146 gimple_set_do_not_emit_location (g);
3148 gimplify_seq_add_stmt (&seq, g);
3152 if (!have_else_clause_p)
3154 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3155 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3157 if (label_cont)
3158 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3160 gimple_pop_condition (pre_p);
3161 gimple_seq_add_seq (pre_p, seq);
3163 if (ret == GS_ERROR)
3164 ; /* Do nothing. */
3165 else if (have_then_clause_p || have_else_clause_p)
3166 ret = GS_ALL_DONE;
3167 else
3169 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3170 expr = TREE_OPERAND (expr, 0);
3171 gimplify_stmt (&expr, pre_p);
3174 *expr_p = NULL;
3175 return ret;
3178 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3179 to be marked addressable.
3181 We cannot rely on such an expression being directly markable if a temporary
3182 has been created by the gimplification. In this case, we create another
3183 temporary and initialize it with a copy, which will become a store after we
3184 mark it addressable. This can happen if the front-end passed us something
3185 that it could not mark addressable yet, like a Fortran pass-by-reference
3186 parameter (int) floatvar. */
3188 static void
3189 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3191 while (handled_component_p (*expr_p))
3192 expr_p = &TREE_OPERAND (*expr_p, 0);
3193 if (is_gimple_reg (*expr_p))
3195 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3196 DECL_GIMPLE_REG_P (var) = 0;
3197 *expr_p = var;
3201 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3202 a call to __builtin_memcpy. */
3204 static enum gimplify_status
3205 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3206 gimple_seq *seq_p)
3208 tree t, to, to_ptr, from, from_ptr;
3209 gimple gs;
3210 location_t loc = EXPR_LOCATION (*expr_p);
3212 to = TREE_OPERAND (*expr_p, 0);
3213 from = TREE_OPERAND (*expr_p, 1);
3215 /* Mark the RHS addressable. Beware that it may not be possible to do so
3216 directly if a temporary has been created by the gimplification. */
3217 prepare_gimple_addressable (&from, seq_p);
3219 mark_addressable (from);
3220 from_ptr = build_fold_addr_expr_loc (loc, from);
3221 gimplify_arg (&from_ptr, seq_p, loc);
3223 mark_addressable (to);
3224 to_ptr = build_fold_addr_expr_loc (loc, to);
3225 gimplify_arg (&to_ptr, seq_p, loc);
3227 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3229 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3231 if (want_value)
3233 /* tmp = memcpy() */
3234 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3235 gimple_call_set_lhs (gs, t);
3236 gimplify_seq_add_stmt (seq_p, gs);
3238 *expr_p = build_simple_mem_ref (t);
3239 return GS_ALL_DONE;
3242 gimplify_seq_add_stmt (seq_p, gs);
3243 *expr_p = NULL;
3244 return GS_ALL_DONE;
3247 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3248 a call to __builtin_memset. In this case we know that the RHS is
3249 a CONSTRUCTOR with an empty element list. */
3251 static enum gimplify_status
3252 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3253 gimple_seq *seq_p)
3255 tree t, from, to, to_ptr;
3256 gimple gs;
3257 location_t loc = EXPR_LOCATION (*expr_p);
3259 /* Assert our assumptions, to abort instead of producing wrong code
3260 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3261 not be immediately exposed. */
3262 from = TREE_OPERAND (*expr_p, 1);
3263 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3264 from = TREE_OPERAND (from, 0);
3266 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3267 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3269 /* Now proceed. */
3270 to = TREE_OPERAND (*expr_p, 0);
3272 to_ptr = build_fold_addr_expr_loc (loc, to);
3273 gimplify_arg (&to_ptr, seq_p, loc);
3274 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3276 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3278 if (want_value)
3280 /* tmp = memset() */
3281 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3282 gimple_call_set_lhs (gs, t);
3283 gimplify_seq_add_stmt (seq_p, gs);
3285 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3286 return GS_ALL_DONE;
3289 gimplify_seq_add_stmt (seq_p, gs);
3290 *expr_p = NULL;
3291 return GS_ALL_DONE;
3294 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3295 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3296 assignment. Return non-null if we detect a potential overlap. */
3298 struct gimplify_init_ctor_preeval_data
3300 /* The base decl of the lhs object. May be NULL, in which case we
3301 have to assume the lhs is indirect. */
3302 tree lhs_base_decl;
3304 /* The alias set of the lhs object. */
3305 alias_set_type lhs_alias_set;
3308 static tree
3309 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3311 struct gimplify_init_ctor_preeval_data *data
3312 = (struct gimplify_init_ctor_preeval_data *) xdata;
3313 tree t = *tp;
3315 /* If we find the base object, obviously we have overlap. */
3316 if (data->lhs_base_decl == t)
3317 return t;
3319 /* If the constructor component is indirect, determine if we have a
3320 potential overlap with the lhs. The only bits of information we
3321 have to go on at this point are addressability and alias sets. */
3322 if ((INDIRECT_REF_P (t)
3323 || TREE_CODE (t) == MEM_REF)
3324 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3325 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3326 return t;
3328 /* If the constructor component is a call, determine if it can hide a
3329 potential overlap with the lhs through an INDIRECT_REF like above.
3330 ??? Ugh - this is completely broken. In fact this whole analysis
3331 doesn't look conservative. */
3332 if (TREE_CODE (t) == CALL_EXPR)
3334 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3336 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3337 if (POINTER_TYPE_P (TREE_VALUE (type))
3338 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3339 && alias_sets_conflict_p (data->lhs_alias_set,
3340 get_alias_set
3341 (TREE_TYPE (TREE_VALUE (type)))))
3342 return t;
3345 if (IS_TYPE_OR_DECL_P (t))
3346 *walk_subtrees = 0;
3347 return NULL;
3350 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3351 force values that overlap with the lhs (as described by *DATA)
3352 into temporaries. */
3354 static void
3355 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3356 struct gimplify_init_ctor_preeval_data *data)
3358 enum gimplify_status one;
3360 /* If the value is constant, then there's nothing to pre-evaluate. */
3361 if (TREE_CONSTANT (*expr_p))
3363 /* Ensure it does not have side effects, it might contain a reference to
3364 the object we're initializing. */
3365 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3366 return;
3369 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3370 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3371 return;
3373 /* Recurse for nested constructors. */
3374 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3376 unsigned HOST_WIDE_INT ix;
3377 constructor_elt *ce;
3378 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3380 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3381 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3383 return;
3386 /* If this is a variable sized type, we must remember the size. */
3387 maybe_with_size_expr (expr_p);
3389 /* Gimplify the constructor element to something appropriate for the rhs
3390 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3391 the gimplifier will consider this a store to memory. Doing this
3392 gimplification now means that we won't have to deal with complicated
3393 language-specific trees, nor trees like SAVE_EXPR that can induce
3394 exponential search behavior. */
3395 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3396 if (one == GS_ERROR)
3398 *expr_p = NULL;
3399 return;
3402 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3403 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3404 always be true for all scalars, since is_gimple_mem_rhs insists on a
3405 temporary variable for them. */
3406 if (DECL_P (*expr_p))
3407 return;
3409 /* If this is of variable size, we have no choice but to assume it doesn't
3410 overlap since we can't make a temporary for it. */
3411 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3412 return;
3414 /* Otherwise, we must search for overlap ... */
3415 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3416 return;
3418 /* ... and if found, force the value into a temporary. */
3419 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3422 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3423 a RANGE_EXPR in a CONSTRUCTOR for an array.
3425 var = lower;
3426 loop_entry:
3427 object[var] = value;
3428 if (var == upper)
3429 goto loop_exit;
3430 var = var + 1;
3431 goto loop_entry;
3432 loop_exit:
3434 We increment var _after_ the loop exit check because we might otherwise
3435 fail if upper == TYPE_MAX_VALUE (type for upper).
3437 Note that we never have to deal with SAVE_EXPRs here, because this has
3438 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3440 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3441 gimple_seq *, bool);
3443 static void
3444 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3445 tree value, tree array_elt_type,
3446 gimple_seq *pre_p, bool cleared)
3448 tree loop_entry_label, loop_exit_label, fall_thru_label;
3449 tree var, var_type, cref, tmp;
3451 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3452 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3453 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3455 /* Create and initialize the index variable. */
3456 var_type = TREE_TYPE (upper);
3457 var = create_tmp_var (var_type, NULL);
3458 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3460 /* Add the loop entry label. */
3461 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3463 /* Build the reference. */
3464 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3465 var, NULL_TREE, NULL_TREE);
3467 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3468 the store. Otherwise just assign value to the reference. */
3470 if (TREE_CODE (value) == CONSTRUCTOR)
3471 /* NB we might have to call ourself recursively through
3472 gimplify_init_ctor_eval if the value is a constructor. */
3473 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3474 pre_p, cleared);
3475 else
3476 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3478 /* We exit the loop when the index var is equal to the upper bound. */
3479 gimplify_seq_add_stmt (pre_p,
3480 gimple_build_cond (EQ_EXPR, var, upper,
3481 loop_exit_label, fall_thru_label));
3483 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3485 /* Otherwise, increment the index var... */
3486 tmp = build2 (PLUS_EXPR, var_type, var,
3487 fold_convert (var_type, integer_one_node));
3488 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3490 /* ...and jump back to the loop entry. */
3491 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3493 /* Add the loop exit label. */
3494 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3497 /* Return true if FDECL is accessing a field that is zero sized. */
3499 static bool
3500 zero_sized_field_decl (const_tree fdecl)
3502 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3503 && integer_zerop (DECL_SIZE (fdecl)))
3504 return true;
3505 return false;
3508 /* Return true if TYPE is zero sized. */
3510 static bool
3511 zero_sized_type (const_tree type)
3513 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3514 && integer_zerop (TYPE_SIZE (type)))
3515 return true;
3516 return false;
3519 /* A subroutine of gimplify_init_constructor. Generate individual
3520 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3521 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3522 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3523 zeroed first. */
3525 static void
3526 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3527 gimple_seq *pre_p, bool cleared)
3529 tree array_elt_type = NULL;
3530 unsigned HOST_WIDE_INT ix;
3531 tree purpose, value;
3533 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3534 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3536 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3538 tree cref;
3540 /* NULL values are created above for gimplification errors. */
3541 if (value == NULL)
3542 continue;
3544 if (cleared && initializer_zerop (value))
3545 continue;
3547 /* ??? Here's to hoping the front end fills in all of the indices,
3548 so we don't have to figure out what's missing ourselves. */
3549 gcc_assert (purpose);
3551 /* Skip zero-sized fields, unless value has side-effects. This can
3552 happen with calls to functions returning a zero-sized type, which
3553 we shouldn't discard. As a number of downstream passes don't
3554 expect sets of zero-sized fields, we rely on the gimplification of
3555 the MODIFY_EXPR we make below to drop the assignment statement. */
3556 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3557 continue;
3559 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3560 whole range. */
3561 if (TREE_CODE (purpose) == RANGE_EXPR)
3563 tree lower = TREE_OPERAND (purpose, 0);
3564 tree upper = TREE_OPERAND (purpose, 1);
3566 /* If the lower bound is equal to upper, just treat it as if
3567 upper was the index. */
3568 if (simple_cst_equal (lower, upper))
3569 purpose = upper;
3570 else
3572 gimplify_init_ctor_eval_range (object, lower, upper, value,
3573 array_elt_type, pre_p, cleared);
3574 continue;
3578 if (array_elt_type)
3580 /* Do not use bitsizetype for ARRAY_REF indices. */
3581 if (TYPE_DOMAIN (TREE_TYPE (object)))
3582 purpose
3583 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3584 purpose);
3585 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3586 purpose, NULL_TREE, NULL_TREE);
3588 else
3590 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3591 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3592 unshare_expr (object), purpose, NULL_TREE);
3595 if (TREE_CODE (value) == CONSTRUCTOR
3596 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3597 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3598 pre_p, cleared);
3599 else
3601 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3602 gimplify_and_add (init, pre_p);
3603 ggc_free (init);
3608 /* Return the appropriate RHS predicate for this LHS. */
3610 gimple_predicate
3611 rhs_predicate_for (tree lhs)
3613 if (is_gimple_reg (lhs))
3614 return is_gimple_reg_rhs_or_call;
3615 else
3616 return is_gimple_mem_rhs_or_call;
3619 /* Gimplify a C99 compound literal expression. This just means adding
3620 the DECL_EXPR before the current statement and using its anonymous
3621 decl instead. */
3623 static enum gimplify_status
3624 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3625 bool (*gimple_test_f) (tree),
3626 fallback_t fallback)
3628 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3629 tree decl = DECL_EXPR_DECL (decl_s);
3630 tree init = DECL_INITIAL (decl);
3631 /* Mark the decl as addressable if the compound literal
3632 expression is addressable now, otherwise it is marked too late
3633 after we gimplify the initialization expression. */
3634 if (TREE_ADDRESSABLE (*expr_p))
3635 TREE_ADDRESSABLE (decl) = 1;
3636 /* Otherwise, if we don't need an lvalue and have a literal directly
3637 substitute it. Check if it matches the gimple predicate, as
3638 otherwise we'd generate a new temporary, and we can as well just
3639 use the decl we already have. */
3640 else if (!TREE_ADDRESSABLE (decl)
3641 && init
3642 && (fallback & fb_lvalue) == 0
3643 && gimple_test_f (init))
3645 *expr_p = init;
3646 return GS_OK;
3649 /* Preliminarily mark non-addressed complex variables as eligible
3650 for promotion to gimple registers. We'll transform their uses
3651 as we find them. */
3652 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3653 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3654 && !TREE_THIS_VOLATILE (decl)
3655 && !needs_to_live_in_memory (decl))
3656 DECL_GIMPLE_REG_P (decl) = 1;
3658 /* If the decl is not addressable, then it is being used in some
3659 expression or on the right hand side of a statement, and it can
3660 be put into a readonly data section. */
3661 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3662 TREE_READONLY (decl) = 1;
3664 /* This decl isn't mentioned in the enclosing block, so add it to the
3665 list of temps. FIXME it seems a bit of a kludge to say that
3666 anonymous artificial vars aren't pushed, but everything else is. */
3667 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3668 gimple_add_tmp_var (decl);
3670 gimplify_and_add (decl_s, pre_p);
3671 *expr_p = decl;
3672 return GS_OK;
3675 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3676 return a new CONSTRUCTOR if something changed. */
3678 static tree
3679 optimize_compound_literals_in_ctor (tree orig_ctor)
3681 tree ctor = orig_ctor;
3682 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3683 unsigned int idx, num = vec_safe_length (elts);
3685 for (idx = 0; idx < num; idx++)
3687 tree value = (*elts)[idx].value;
3688 tree newval = value;
3689 if (TREE_CODE (value) == CONSTRUCTOR)
3690 newval = optimize_compound_literals_in_ctor (value);
3691 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3693 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3694 tree decl = DECL_EXPR_DECL (decl_s);
3695 tree init = DECL_INITIAL (decl);
3697 if (!TREE_ADDRESSABLE (value)
3698 && !TREE_ADDRESSABLE (decl)
3699 && init
3700 && TREE_CODE (init) == CONSTRUCTOR)
3701 newval = optimize_compound_literals_in_ctor (init);
3703 if (newval == value)
3704 continue;
3706 if (ctor == orig_ctor)
3708 ctor = copy_node (orig_ctor);
3709 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3710 elts = CONSTRUCTOR_ELTS (ctor);
3712 (*elts)[idx].value = newval;
3714 return ctor;
3717 /* A subroutine of gimplify_modify_expr. Break out elements of a
3718 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3720 Note that we still need to clear any elements that don't have explicit
3721 initializers, so if not all elements are initialized we keep the
3722 original MODIFY_EXPR, we just remove all of the constructor elements.
3724 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3725 GS_ERROR if we would have to create a temporary when gimplifying
3726 this constructor. Otherwise, return GS_OK.
3728 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3730 static enum gimplify_status
3731 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3732 bool want_value, bool notify_temp_creation)
3734 tree object, ctor, type;
3735 enum gimplify_status ret;
3736 vec<constructor_elt, va_gc> *elts;
3738 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3740 if (!notify_temp_creation)
3742 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3743 is_gimple_lvalue, fb_lvalue);
3744 if (ret == GS_ERROR)
3745 return ret;
3748 object = TREE_OPERAND (*expr_p, 0);
3749 ctor = TREE_OPERAND (*expr_p, 1) =
3750 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3751 type = TREE_TYPE (ctor);
3752 elts = CONSTRUCTOR_ELTS (ctor);
3753 ret = GS_ALL_DONE;
3755 switch (TREE_CODE (type))
3757 case RECORD_TYPE:
3758 case UNION_TYPE:
3759 case QUAL_UNION_TYPE:
3760 case ARRAY_TYPE:
3762 struct gimplify_init_ctor_preeval_data preeval_data;
3763 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3764 bool cleared, complete_p, valid_const_initializer;
3766 /* Aggregate types must lower constructors to initialization of
3767 individual elements. The exception is that a CONSTRUCTOR node
3768 with no elements indicates zero-initialization of the whole. */
3769 if (vec_safe_is_empty (elts))
3771 if (notify_temp_creation)
3772 return GS_OK;
3773 break;
3776 /* Fetch information about the constructor to direct later processing.
3777 We might want to make static versions of it in various cases, and
3778 can only do so if it known to be a valid constant initializer. */
3779 valid_const_initializer
3780 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3781 &num_ctor_elements, &complete_p);
3783 /* If a const aggregate variable is being initialized, then it
3784 should never be a lose to promote the variable to be static. */
3785 if (valid_const_initializer
3786 && num_nonzero_elements > 1
3787 && TREE_READONLY (object)
3788 && TREE_CODE (object) == VAR_DECL
3789 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3791 if (notify_temp_creation)
3792 return GS_ERROR;
3793 DECL_INITIAL (object) = ctor;
3794 TREE_STATIC (object) = 1;
3795 if (!DECL_NAME (object))
3796 DECL_NAME (object) = create_tmp_var_name ("C");
3797 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3799 /* ??? C++ doesn't automatically append a .<number> to the
3800 assembler name, and even when it does, it looks at FE private
3801 data structures to figure out what that number should be,
3802 which are not set for this variable. I suppose this is
3803 important for local statics for inline functions, which aren't
3804 "local" in the object file sense. So in order to get a unique
3805 TU-local symbol, we must invoke the lhd version now. */
3806 lhd_set_decl_assembler_name (object);
3808 *expr_p = NULL_TREE;
3809 break;
3812 /* If there are "lots" of initialized elements, even discounting
3813 those that are not address constants (and thus *must* be
3814 computed at runtime), then partition the constructor into
3815 constant and non-constant parts. Block copy the constant
3816 parts in, then generate code for the non-constant parts. */
3817 /* TODO. There's code in cp/typeck.c to do this. */
3819 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3820 /* store_constructor will ignore the clearing of variable-sized
3821 objects. Initializers for such objects must explicitly set
3822 every field that needs to be set. */
3823 cleared = false;
3824 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
3825 /* If the constructor isn't complete, clear the whole object
3826 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
3828 ??? This ought not to be needed. For any element not present
3829 in the initializer, we should simply set them to zero. Except
3830 we'd need to *find* the elements that are not present, and that
3831 requires trickery to avoid quadratic compile-time behavior in
3832 large cases or excessive memory use in small cases. */
3833 cleared = true;
3834 else if (num_ctor_elements - num_nonzero_elements
3835 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3836 && num_nonzero_elements < num_ctor_elements / 4)
3837 /* If there are "lots" of zeros, it's more efficient to clear
3838 the memory and then set the nonzero elements. */
3839 cleared = true;
3840 else
3841 cleared = false;
3843 /* If there are "lots" of initialized elements, and all of them
3844 are valid address constants, then the entire initializer can
3845 be dropped to memory, and then memcpy'd out. Don't do this
3846 for sparse arrays, though, as it's more efficient to follow
3847 the standard CONSTRUCTOR behavior of memset followed by
3848 individual element initialization. Also don't do this for small
3849 all-zero initializers (which aren't big enough to merit
3850 clearing), and don't try to make bitwise copies of
3851 TREE_ADDRESSABLE types. */
3852 if (valid_const_initializer
3853 && !(cleared || num_nonzero_elements == 0)
3854 && !TREE_ADDRESSABLE (type))
3856 HOST_WIDE_INT size = int_size_in_bytes (type);
3857 unsigned int align;
3859 /* ??? We can still get unbounded array types, at least
3860 from the C++ front end. This seems wrong, but attempt
3861 to work around it for now. */
3862 if (size < 0)
3864 size = int_size_in_bytes (TREE_TYPE (object));
3865 if (size >= 0)
3866 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3869 /* Find the maximum alignment we can assume for the object. */
3870 /* ??? Make use of DECL_OFFSET_ALIGN. */
3871 if (DECL_P (object))
3872 align = DECL_ALIGN (object);
3873 else
3874 align = TYPE_ALIGN (type);
3876 /* Do a block move either if the size is so small as to make
3877 each individual move a sub-unit move on average, or if it
3878 is so large as to make individual moves inefficient. */
3879 if (size > 0
3880 && num_nonzero_elements > 1
3881 && (size < num_nonzero_elements
3882 || !can_move_by_pieces (size, align)))
3884 if (notify_temp_creation)
3885 return GS_ERROR;
3887 walk_tree (&ctor, force_labels_r, NULL, NULL);
3888 ctor = tree_output_constant_def (ctor);
3889 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3890 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3891 TREE_OPERAND (*expr_p, 1) = ctor;
3893 /* This is no longer an assignment of a CONSTRUCTOR, but
3894 we still may have processing to do on the LHS. So
3895 pretend we didn't do anything here to let that happen. */
3896 return GS_UNHANDLED;
3900 /* If the target is volatile, we have non-zero elements and more than
3901 one field to assign, initialize the target from a temporary. */
3902 if (TREE_THIS_VOLATILE (object)
3903 && !TREE_ADDRESSABLE (type)
3904 && num_nonzero_elements > 0
3905 && vec_safe_length (elts) > 1)
3907 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3908 TREE_OPERAND (*expr_p, 0) = temp;
3909 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3910 *expr_p,
3911 build2 (MODIFY_EXPR, void_type_node,
3912 object, temp));
3913 return GS_OK;
3916 if (notify_temp_creation)
3917 return GS_OK;
3919 /* If there are nonzero elements and if needed, pre-evaluate to capture
3920 elements overlapping with the lhs into temporaries. We must do this
3921 before clearing to fetch the values before they are zeroed-out. */
3922 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
3924 preeval_data.lhs_base_decl = get_base_address (object);
3925 if (!DECL_P (preeval_data.lhs_base_decl))
3926 preeval_data.lhs_base_decl = NULL;
3927 preeval_data.lhs_alias_set = get_alias_set (object);
3929 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3930 pre_p, post_p, &preeval_data);
3933 if (cleared)
3935 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3936 Note that we still have to gimplify, in order to handle the
3937 case of variable sized types. Avoid shared tree structures. */
3938 CONSTRUCTOR_ELTS (ctor) = NULL;
3939 TREE_SIDE_EFFECTS (ctor) = 0;
3940 object = unshare_expr (object);
3941 gimplify_stmt (expr_p, pre_p);
3944 /* If we have not block cleared the object, or if there are nonzero
3945 elements in the constructor, add assignments to the individual
3946 scalar fields of the object. */
3947 if (!cleared || num_nonzero_elements > 0)
3948 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3950 *expr_p = NULL_TREE;
3952 break;
3954 case COMPLEX_TYPE:
3956 tree r, i;
3958 if (notify_temp_creation)
3959 return GS_OK;
3961 /* Extract the real and imaginary parts out of the ctor. */
3962 gcc_assert (elts->length () == 2);
3963 r = (*elts)[0].value;
3964 i = (*elts)[1].value;
3965 if (r == NULL || i == NULL)
3967 tree zero = build_zero_cst (TREE_TYPE (type));
3968 if (r == NULL)
3969 r = zero;
3970 if (i == NULL)
3971 i = zero;
3974 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3975 represent creation of a complex value. */
3976 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3978 ctor = build_complex (type, r, i);
3979 TREE_OPERAND (*expr_p, 1) = ctor;
3981 else
3983 ctor = build2 (COMPLEX_EXPR, type, r, i);
3984 TREE_OPERAND (*expr_p, 1) = ctor;
3985 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3986 pre_p,
3987 post_p,
3988 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3989 fb_rvalue);
3992 break;
3994 case VECTOR_TYPE:
3996 unsigned HOST_WIDE_INT ix;
3997 constructor_elt *ce;
3999 if (notify_temp_creation)
4000 return GS_OK;
4002 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4003 if (TREE_CONSTANT (ctor))
4005 bool constant_p = true;
4006 tree value;
4008 /* Even when ctor is constant, it might contain non-*_CST
4009 elements, such as addresses or trapping values like
4010 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4011 in VECTOR_CST nodes. */
4012 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4013 if (!CONSTANT_CLASS_P (value))
4015 constant_p = false;
4016 break;
4019 if (constant_p)
4021 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4022 break;
4025 /* Don't reduce an initializer constant even if we can't
4026 make a VECTOR_CST. It won't do anything for us, and it'll
4027 prevent us from representing it as a single constant. */
4028 if (initializer_constant_valid_p (ctor, type))
4029 break;
4031 TREE_CONSTANT (ctor) = 0;
4034 /* Vector types use CONSTRUCTOR all the way through gimple
4035 compilation as a general initializer. */
4036 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4038 enum gimplify_status tret;
4039 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4040 fb_rvalue);
4041 if (tret == GS_ERROR)
4042 ret = GS_ERROR;
4044 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4045 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4047 break;
4049 default:
4050 /* So how did we get a CONSTRUCTOR for a scalar type? */
4051 gcc_unreachable ();
4054 if (ret == GS_ERROR)
4055 return GS_ERROR;
4056 else if (want_value)
4058 *expr_p = object;
4059 return GS_OK;
4061 else
4063 /* If we have gimplified both sides of the initializer but have
4064 not emitted an assignment, do so now. */
4065 if (*expr_p)
4067 tree lhs = TREE_OPERAND (*expr_p, 0);
4068 tree rhs = TREE_OPERAND (*expr_p, 1);
4069 gimple init = gimple_build_assign (lhs, rhs);
4070 gimplify_seq_add_stmt (pre_p, init);
4071 *expr_p = NULL;
4074 return GS_ALL_DONE;
4078 /* Given a pointer value OP0, return a simplified version of an
4079 indirection through OP0, or NULL_TREE if no simplification is
4080 possible. This may only be applied to a rhs of an expression.
4081 Note that the resulting type may be different from the type pointed
4082 to in the sense that it is still compatible from the langhooks
4083 point of view. */
4085 static tree
4086 gimple_fold_indirect_ref_rhs (tree t)
4088 return gimple_fold_indirect_ref (t);
4091 /* Subroutine of gimplify_modify_expr to do simplifications of
4092 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4093 something changes. */
4095 static enum gimplify_status
4096 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4097 gimple_seq *pre_p, gimple_seq *post_p,
4098 bool want_value)
4100 enum gimplify_status ret = GS_UNHANDLED;
4101 bool changed;
4105 changed = false;
4106 switch (TREE_CODE (*from_p))
4108 case VAR_DECL:
4109 /* If we're assigning from a read-only variable initialized with
4110 a constructor, do the direct assignment from the constructor,
4111 but only if neither source nor target are volatile since this
4112 latter assignment might end up being done on a per-field basis. */
4113 if (DECL_INITIAL (*from_p)
4114 && TREE_READONLY (*from_p)
4115 && !TREE_THIS_VOLATILE (*from_p)
4116 && !TREE_THIS_VOLATILE (*to_p)
4117 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4119 tree old_from = *from_p;
4120 enum gimplify_status subret;
4122 /* Move the constructor into the RHS. */
4123 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4125 /* Let's see if gimplify_init_constructor will need to put
4126 it in memory. */
4127 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4128 false, true);
4129 if (subret == GS_ERROR)
4131 /* If so, revert the change. */
4132 *from_p = old_from;
4134 else
4136 ret = GS_OK;
4137 changed = true;
4140 break;
4141 case INDIRECT_REF:
4143 /* If we have code like
4145 *(const A*)(A*)&x
4147 where the type of "x" is a (possibly cv-qualified variant
4148 of "A"), treat the entire expression as identical to "x".
4149 This kind of code arises in C++ when an object is bound
4150 to a const reference, and if "x" is a TARGET_EXPR we want
4151 to take advantage of the optimization below. */
4152 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4153 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4154 if (t)
4156 if (TREE_THIS_VOLATILE (t) != volatile_p)
4158 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4159 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4160 build_fold_addr_expr (t));
4161 if (REFERENCE_CLASS_P (t))
4162 TREE_THIS_VOLATILE (t) = volatile_p;
4164 *from_p = t;
4165 ret = GS_OK;
4166 changed = true;
4168 break;
4171 case TARGET_EXPR:
4173 /* If we are initializing something from a TARGET_EXPR, strip the
4174 TARGET_EXPR and initialize it directly, if possible. This can't
4175 be done if the initializer is void, since that implies that the
4176 temporary is set in some non-trivial way.
4178 ??? What about code that pulls out the temp and uses it
4179 elsewhere? I think that such code never uses the TARGET_EXPR as
4180 an initializer. If I'm wrong, we'll die because the temp won't
4181 have any RTL. In that case, I guess we'll need to replace
4182 references somehow. */
4183 tree init = TARGET_EXPR_INITIAL (*from_p);
4185 if (init
4186 && !VOID_TYPE_P (TREE_TYPE (init)))
4188 *from_p = init;
4189 ret = GS_OK;
4190 changed = true;
4193 break;
4195 case COMPOUND_EXPR:
4196 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4197 caught. */
4198 gimplify_compound_expr (from_p, pre_p, true);
4199 ret = GS_OK;
4200 changed = true;
4201 break;
4203 case CONSTRUCTOR:
4204 /* If we already made some changes, let the front end have a
4205 crack at this before we break it down. */
4206 if (ret != GS_UNHANDLED)
4207 break;
4208 /* If we're initializing from a CONSTRUCTOR, break this into
4209 individual MODIFY_EXPRs. */
4210 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4211 false);
4213 case COND_EXPR:
4214 /* If we're assigning to a non-register type, push the assignment
4215 down into the branches. This is mandatory for ADDRESSABLE types,
4216 since we cannot generate temporaries for such, but it saves a
4217 copy in other cases as well. */
4218 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4220 /* This code should mirror the code in gimplify_cond_expr. */
4221 enum tree_code code = TREE_CODE (*expr_p);
4222 tree cond = *from_p;
4223 tree result = *to_p;
4225 ret = gimplify_expr (&result, pre_p, post_p,
4226 is_gimple_lvalue, fb_lvalue);
4227 if (ret != GS_ERROR)
4228 ret = GS_OK;
4230 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4231 TREE_OPERAND (cond, 1)
4232 = build2 (code, void_type_node, result,
4233 TREE_OPERAND (cond, 1));
4234 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4235 TREE_OPERAND (cond, 2)
4236 = build2 (code, void_type_node, unshare_expr (result),
4237 TREE_OPERAND (cond, 2));
4239 TREE_TYPE (cond) = void_type_node;
4240 recalculate_side_effects (cond);
4242 if (want_value)
4244 gimplify_and_add (cond, pre_p);
4245 *expr_p = unshare_expr (result);
4247 else
4248 *expr_p = cond;
4249 return ret;
4251 break;
4253 case CALL_EXPR:
4254 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4255 return slot so that we don't generate a temporary. */
4256 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4257 && aggregate_value_p (*from_p, *from_p))
4259 bool use_target;
4261 if (!(rhs_predicate_for (*to_p))(*from_p))
4262 /* If we need a temporary, *to_p isn't accurate. */
4263 use_target = false;
4264 /* It's OK to use the return slot directly unless it's an NRV. */
4265 else if (TREE_CODE (*to_p) == RESULT_DECL
4266 && DECL_NAME (*to_p) == NULL_TREE
4267 && needs_to_live_in_memory (*to_p))
4268 use_target = true;
4269 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4270 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4271 /* Don't force regs into memory. */
4272 use_target = false;
4273 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4274 /* It's OK to use the target directly if it's being
4275 initialized. */
4276 use_target = true;
4277 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4278 /* Always use the target and thus RSO for variable-sized types.
4279 GIMPLE cannot deal with a variable-sized assignment
4280 embedded in a call statement. */
4281 use_target = true;
4282 else if (TREE_CODE (*to_p) != SSA_NAME
4283 && (!is_gimple_variable (*to_p)
4284 || needs_to_live_in_memory (*to_p)))
4285 /* Don't use the original target if it's already addressable;
4286 if its address escapes, and the called function uses the
4287 NRV optimization, a conforming program could see *to_p
4288 change before the called function returns; see c++/19317.
4289 When optimizing, the return_slot pass marks more functions
4290 as safe after we have escape info. */
4291 use_target = false;
4292 else
4293 use_target = true;
4295 if (use_target)
4297 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4298 mark_addressable (*to_p);
4301 break;
4303 case WITH_SIZE_EXPR:
4304 /* Likewise for calls that return an aggregate of non-constant size,
4305 since we would not be able to generate a temporary at all. */
4306 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4308 *from_p = TREE_OPERAND (*from_p, 0);
4309 /* We don't change ret in this case because the
4310 WITH_SIZE_EXPR might have been added in
4311 gimplify_modify_expr, so returning GS_OK would lead to an
4312 infinite loop. */
4313 changed = true;
4315 break;
4317 /* If we're initializing from a container, push the initialization
4318 inside it. */
4319 case CLEANUP_POINT_EXPR:
4320 case BIND_EXPR:
4321 case STATEMENT_LIST:
4323 tree wrap = *from_p;
4324 tree t;
4326 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4327 fb_lvalue);
4328 if (ret != GS_ERROR)
4329 ret = GS_OK;
4331 t = voidify_wrapper_expr (wrap, *expr_p);
4332 gcc_assert (t == *expr_p);
4334 if (want_value)
4336 gimplify_and_add (wrap, pre_p);
4337 *expr_p = unshare_expr (*to_p);
4339 else
4340 *expr_p = wrap;
4341 return GS_OK;
4344 case COMPOUND_LITERAL_EXPR:
4346 tree complit = TREE_OPERAND (*expr_p, 1);
4347 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4348 tree decl = DECL_EXPR_DECL (decl_s);
4349 tree init = DECL_INITIAL (decl);
4351 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4352 into struct T x = { 0, 1, 2 } if the address of the
4353 compound literal has never been taken. */
4354 if (!TREE_ADDRESSABLE (complit)
4355 && !TREE_ADDRESSABLE (decl)
4356 && init)
4358 *expr_p = copy_node (*expr_p);
4359 TREE_OPERAND (*expr_p, 1) = init;
4360 return GS_OK;
4364 default:
4365 break;
4368 while (changed);
4370 return ret;
4374 /* Return true if T looks like a valid GIMPLE statement. */
4376 static bool
4377 is_gimple_stmt (tree t)
4379 const enum tree_code code = TREE_CODE (t);
4381 switch (code)
4383 case NOP_EXPR:
4384 /* The only valid NOP_EXPR is the empty statement. */
4385 return IS_EMPTY_STMT (t);
4387 case BIND_EXPR:
4388 case COND_EXPR:
4389 /* These are only valid if they're void. */
4390 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4392 case SWITCH_EXPR:
4393 case GOTO_EXPR:
4394 case RETURN_EXPR:
4395 case LABEL_EXPR:
4396 case CASE_LABEL_EXPR:
4397 case TRY_CATCH_EXPR:
4398 case TRY_FINALLY_EXPR:
4399 case EH_FILTER_EXPR:
4400 case CATCH_EXPR:
4401 case ASM_EXPR:
4402 case STATEMENT_LIST:
4403 case OMP_PARALLEL:
4404 case OMP_FOR:
4405 case OMP_SIMD:
4406 case CILK_SIMD:
4407 case OMP_DISTRIBUTE:
4408 case OMP_SECTIONS:
4409 case OMP_SECTION:
4410 case OMP_SINGLE:
4411 case OMP_MASTER:
4412 case OMP_TASKGROUP:
4413 case OMP_ORDERED:
4414 case OMP_CRITICAL:
4415 case OMP_TASK:
4416 /* These are always void. */
4417 return true;
4419 case CALL_EXPR:
4420 case MODIFY_EXPR:
4421 case PREDICT_EXPR:
4422 /* These are valid regardless of their type. */
4423 return true;
4425 default:
4426 return false;
4431 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4432 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4433 DECL_GIMPLE_REG_P set.
4435 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4436 other, unmodified part of the complex object just before the total store.
4437 As a consequence, if the object is still uninitialized, an undefined value
4438 will be loaded into a register, which may result in a spurious exception
4439 if the register is floating-point and the value happens to be a signaling
4440 NaN for example. Then the fully-fledged complex operations lowering pass
4441 followed by a DCE pass are necessary in order to fix things up. */
4443 static enum gimplify_status
4444 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4445 bool want_value)
4447 enum tree_code code, ocode;
4448 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4450 lhs = TREE_OPERAND (*expr_p, 0);
4451 rhs = TREE_OPERAND (*expr_p, 1);
4452 code = TREE_CODE (lhs);
4453 lhs = TREE_OPERAND (lhs, 0);
4455 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4456 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4457 TREE_NO_WARNING (other) = 1;
4458 other = get_formal_tmp_var (other, pre_p);
4460 realpart = code == REALPART_EXPR ? rhs : other;
4461 imagpart = code == REALPART_EXPR ? other : rhs;
4463 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4464 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4465 else
4466 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4468 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4469 *expr_p = (want_value) ? rhs : NULL_TREE;
4471 return GS_ALL_DONE;
4474 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4476 modify_expr
4477 : varname '=' rhs
4478 | '*' ID '=' rhs
4480 PRE_P points to the list where side effects that must happen before
4481 *EXPR_P should be stored.
4483 POST_P points to the list where side effects that must happen after
4484 *EXPR_P should be stored.
4486 WANT_VALUE is nonzero iff we want to use the value of this expression
4487 in another expression. */
4489 static enum gimplify_status
4490 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4491 bool want_value)
4493 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4494 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4495 enum gimplify_status ret = GS_UNHANDLED;
4496 gimple assign;
4497 location_t loc = EXPR_LOCATION (*expr_p);
4498 gimple_stmt_iterator gsi;
4500 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4501 || TREE_CODE (*expr_p) == INIT_EXPR);
4503 /* Trying to simplify a clobber using normal logic doesn't work,
4504 so handle it here. */
4505 if (TREE_CLOBBER_P (*from_p))
4507 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4508 if (ret == GS_ERROR)
4509 return ret;
4510 gcc_assert (!want_value
4511 && (TREE_CODE (*to_p) == VAR_DECL
4512 || TREE_CODE (*to_p) == MEM_REF));
4513 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4514 *expr_p = NULL;
4515 return GS_ALL_DONE;
4518 /* Insert pointer conversions required by the middle-end that are not
4519 required by the frontend. This fixes middle-end type checking for
4520 for example gcc.dg/redecl-6.c. */
4521 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4523 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4524 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4525 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4528 /* See if any simplifications can be done based on what the RHS is. */
4529 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4530 want_value);
4531 if (ret != GS_UNHANDLED)
4532 return ret;
4534 /* For zero sized types only gimplify the left hand side and right hand
4535 side as statements and throw away the assignment. Do this after
4536 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4537 types properly. */
4538 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4540 gimplify_stmt (from_p, pre_p);
4541 gimplify_stmt (to_p, pre_p);
4542 *expr_p = NULL_TREE;
4543 return GS_ALL_DONE;
4546 /* If the value being copied is of variable width, compute the length
4547 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4548 before gimplifying any of the operands so that we can resolve any
4549 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4550 the size of the expression to be copied, not of the destination, so
4551 that is what we must do here. */
4552 maybe_with_size_expr (from_p);
4554 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4555 if (ret == GS_ERROR)
4556 return ret;
4558 /* As a special case, we have to temporarily allow for assignments
4559 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4560 a toplevel statement, when gimplifying the GENERIC expression
4561 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4562 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4564 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4565 prevent gimplify_expr from trying to create a new temporary for
4566 foo's LHS, we tell it that it should only gimplify until it
4567 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4568 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4569 and all we need to do here is set 'a' to be its LHS. */
4570 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4571 fb_rvalue);
4572 if (ret == GS_ERROR)
4573 return ret;
4575 /* Now see if the above changed *from_p to something we handle specially. */
4576 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4577 want_value);
4578 if (ret != GS_UNHANDLED)
4579 return ret;
4581 /* If we've got a variable sized assignment between two lvalues (i.e. does
4582 not involve a call), then we can make things a bit more straightforward
4583 by converting the assignment to memcpy or memset. */
4584 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4586 tree from = TREE_OPERAND (*from_p, 0);
4587 tree size = TREE_OPERAND (*from_p, 1);
4589 if (TREE_CODE (from) == CONSTRUCTOR)
4590 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4592 if (is_gimple_addressable (from))
4594 *from_p = from;
4595 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4596 pre_p);
4600 /* Transform partial stores to non-addressable complex variables into
4601 total stores. This allows us to use real instead of virtual operands
4602 for these variables, which improves optimization. */
4603 if ((TREE_CODE (*to_p) == REALPART_EXPR
4604 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4605 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4606 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4608 /* Try to alleviate the effects of the gimplification creating artificial
4609 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4610 if (!gimplify_ctxp->into_ssa
4611 && TREE_CODE (*from_p) == VAR_DECL
4612 && DECL_IGNORED_P (*from_p)
4613 && DECL_P (*to_p)
4614 && !DECL_IGNORED_P (*to_p))
4616 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4617 DECL_NAME (*from_p)
4618 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4619 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4620 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4623 if (want_value && TREE_THIS_VOLATILE (*to_p))
4624 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4626 if (TREE_CODE (*from_p) == CALL_EXPR)
4628 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4629 instead of a GIMPLE_ASSIGN. */
4630 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4631 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4632 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4633 tree fndecl = get_callee_fndecl (*from_p);
4634 if (fndecl
4635 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4636 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
4637 && call_expr_nargs (*from_p) == 3)
4638 assign = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
4639 CALL_EXPR_ARG (*from_p, 0),
4640 CALL_EXPR_ARG (*from_p, 1),
4641 CALL_EXPR_ARG (*from_p, 2));
4642 else
4644 assign = gimple_build_call_from_tree (*from_p);
4645 gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
4647 notice_special_calls (assign);
4648 if (!gimple_call_noreturn_p (assign))
4649 gimple_call_set_lhs (assign, *to_p);
4651 else
4653 assign = gimple_build_assign (*to_p, *from_p);
4654 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4657 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4659 /* We should have got an SSA name from the start. */
4660 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4663 gimplify_seq_add_stmt (pre_p, assign);
4664 gsi = gsi_last (*pre_p);
4665 maybe_fold_stmt (&gsi);
4667 if (want_value)
4669 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4670 return GS_OK;
4672 else
4673 *expr_p = NULL;
4675 return GS_ALL_DONE;
4678 /* Gimplify a comparison between two variable-sized objects. Do this
4679 with a call to BUILT_IN_MEMCMP. */
4681 static enum gimplify_status
4682 gimplify_variable_sized_compare (tree *expr_p)
4684 location_t loc = EXPR_LOCATION (*expr_p);
4685 tree op0 = TREE_OPERAND (*expr_p, 0);
4686 tree op1 = TREE_OPERAND (*expr_p, 1);
4687 tree t, arg, dest, src, expr;
4689 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4690 arg = unshare_expr (arg);
4691 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4692 src = build_fold_addr_expr_loc (loc, op1);
4693 dest = build_fold_addr_expr_loc (loc, op0);
4694 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4695 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4697 expr
4698 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4699 SET_EXPR_LOCATION (expr, loc);
4700 *expr_p = expr;
4702 return GS_OK;
4705 /* Gimplify a comparison between two aggregate objects of integral scalar
4706 mode as a comparison between the bitwise equivalent scalar values. */
4708 static enum gimplify_status
4709 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4711 location_t loc = EXPR_LOCATION (*expr_p);
4712 tree op0 = TREE_OPERAND (*expr_p, 0);
4713 tree op1 = TREE_OPERAND (*expr_p, 1);
4715 tree type = TREE_TYPE (op0);
4716 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4718 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4719 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4721 *expr_p
4722 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4724 return GS_OK;
4727 /* Gimplify an expression sequence. This function gimplifies each
4728 expression and rewrites the original expression with the last
4729 expression of the sequence in GIMPLE form.
4731 PRE_P points to the list where the side effects for all the
4732 expressions in the sequence will be emitted.
4734 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4736 static enum gimplify_status
4737 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4739 tree t = *expr_p;
4743 tree *sub_p = &TREE_OPERAND (t, 0);
4745 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4746 gimplify_compound_expr (sub_p, pre_p, false);
4747 else
4748 gimplify_stmt (sub_p, pre_p);
4750 t = TREE_OPERAND (t, 1);
4752 while (TREE_CODE (t) == COMPOUND_EXPR);
4754 *expr_p = t;
4755 if (want_value)
4756 return GS_OK;
4757 else
4759 gimplify_stmt (expr_p, pre_p);
4760 return GS_ALL_DONE;
4764 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4765 gimplify. After gimplification, EXPR_P will point to a new temporary
4766 that holds the original value of the SAVE_EXPR node.
4768 PRE_P points to the list where side effects that must happen before
4769 *EXPR_P should be stored. */
4771 static enum gimplify_status
4772 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4774 enum gimplify_status ret = GS_ALL_DONE;
4775 tree val;
4777 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4778 val = TREE_OPERAND (*expr_p, 0);
4780 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4781 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4783 /* The operand may be a void-valued expression such as SAVE_EXPRs
4784 generated by the Java frontend for class initialization. It is
4785 being executed only for its side-effects. */
4786 if (TREE_TYPE (val) == void_type_node)
4788 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4789 is_gimple_stmt, fb_none);
4790 val = NULL;
4792 else
4793 val = get_initialized_tmp_var (val, pre_p, post_p);
4795 TREE_OPERAND (*expr_p, 0) = val;
4796 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4799 *expr_p = val;
4801 return ret;
4804 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
4806 unary_expr
4807 : ...
4808 | '&' varname
4811 PRE_P points to the list where side effects that must happen before
4812 *EXPR_P should be stored.
4814 POST_P points to the list where side effects that must happen after
4815 *EXPR_P should be stored. */
4817 static enum gimplify_status
4818 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4820 tree expr = *expr_p;
4821 tree op0 = TREE_OPERAND (expr, 0);
4822 enum gimplify_status ret;
4823 location_t loc = EXPR_LOCATION (*expr_p);
4825 switch (TREE_CODE (op0))
4827 case INDIRECT_REF:
4828 do_indirect_ref:
4829 /* Check if we are dealing with an expression of the form '&*ptr'.
4830 While the front end folds away '&*ptr' into 'ptr', these
4831 expressions may be generated internally by the compiler (e.g.,
4832 builtins like __builtin_va_end). */
4833 /* Caution: the silent array decomposition semantics we allow for
4834 ADDR_EXPR means we can't always discard the pair. */
4835 /* Gimplification of the ADDR_EXPR operand may drop
4836 cv-qualification conversions, so make sure we add them if
4837 needed. */
4839 tree op00 = TREE_OPERAND (op0, 0);
4840 tree t_expr = TREE_TYPE (expr);
4841 tree t_op00 = TREE_TYPE (op00);
4843 if (!useless_type_conversion_p (t_expr, t_op00))
4844 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4845 *expr_p = op00;
4846 ret = GS_OK;
4848 break;
4850 case VIEW_CONVERT_EXPR:
4851 /* Take the address of our operand and then convert it to the type of
4852 this ADDR_EXPR.
4854 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4855 all clear. The impact of this transformation is even less clear. */
4857 /* If the operand is a useless conversion, look through it. Doing so
4858 guarantees that the ADDR_EXPR and its operand will remain of the
4859 same type. */
4860 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4861 op0 = TREE_OPERAND (op0, 0);
4863 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4864 build_fold_addr_expr_loc (loc,
4865 TREE_OPERAND (op0, 0)));
4866 ret = GS_OK;
4867 break;
4869 default:
4870 /* We use fb_either here because the C frontend sometimes takes
4871 the address of a call that returns a struct; see
4872 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4873 the implied temporary explicit. */
4875 /* Make the operand addressable. */
4876 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4877 is_gimple_addressable, fb_either);
4878 if (ret == GS_ERROR)
4879 break;
4881 /* Then mark it. Beware that it may not be possible to do so directly
4882 if a temporary has been created by the gimplification. */
4883 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
4885 op0 = TREE_OPERAND (expr, 0);
4887 /* For various reasons, the gimplification of the expression
4888 may have made a new INDIRECT_REF. */
4889 if (TREE_CODE (op0) == INDIRECT_REF)
4890 goto do_indirect_ref;
4892 mark_addressable (TREE_OPERAND (expr, 0));
4894 /* Fix to PR/41163 (r151122) broke LIPO. Calls to builtin functions
4895 were 'canonicized' in profile-use pass, but not in profile-gen. */
4896 if (!flag_dyn_ipa)
4898 /* The FEs may end up building ADDR_EXPRs early on a decl with
4899 an incomplete type. Re-build ADDR_EXPRs in canonical form
4900 here. */
4901 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4902 *expr_p = build_fold_addr_expr (op0);
4905 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
4906 recompute_tree_invariant_for_addr_expr (*expr_p);
4908 /* Fix to PR/41163 (r151122) broke LIPO. Calls to builtin functions
4909 were 'canonicized' in profile-use pass, but not in profile-gen. */
4910 if (!flag_dyn_ipa)
4912 /* If we re-built the ADDR_EXPR add a conversion to the original type
4913 if required. */
4914 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4915 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
4918 break;
4921 return ret;
4924 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4925 value; output operands should be a gimple lvalue. */
4927 static enum gimplify_status
4928 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4930 tree expr;
4931 int noutputs;
4932 const char **oconstraints;
4933 int i;
4934 tree link;
4935 const char *constraint;
4936 bool allows_mem, allows_reg, is_inout;
4937 enum gimplify_status ret, tret;
4938 gimple stmt;
4939 vec<tree, va_gc> *inputs;
4940 vec<tree, va_gc> *outputs;
4941 vec<tree, va_gc> *clobbers;
4942 vec<tree, va_gc> *labels;
4943 tree link_next;
4945 expr = *expr_p;
4946 noutputs = list_length (ASM_OUTPUTS (expr));
4947 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4949 inputs = NULL;
4950 outputs = NULL;
4951 clobbers = NULL;
4952 labels = NULL;
4954 ret = GS_ALL_DONE;
4955 link_next = NULL_TREE;
4956 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4958 bool ok;
4959 size_t constraint_len;
4961 link_next = TREE_CHAIN (link);
4963 oconstraints[i]
4964 = constraint
4965 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4966 constraint_len = strlen (constraint);
4967 if (constraint_len == 0)
4968 continue;
4970 ok = parse_output_constraint (&constraint, i, 0, 0,
4971 &allows_mem, &allows_reg, &is_inout);
4972 if (!ok)
4974 ret = GS_ERROR;
4975 is_inout = false;
4978 if (!allows_reg && allows_mem)
4979 mark_addressable (TREE_VALUE (link));
4981 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4982 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4983 fb_lvalue | fb_mayfail);
4984 if (tret == GS_ERROR)
4986 error ("invalid lvalue in asm output %d", i);
4987 ret = tret;
4990 vec_safe_push (outputs, link);
4991 TREE_CHAIN (link) = NULL_TREE;
4993 if (is_inout)
4995 /* An input/output operand. To give the optimizers more
4996 flexibility, split it into separate input and output
4997 operands. */
4998 tree input;
4999 char buf[10];
5001 /* Turn the in/out constraint into an output constraint. */
5002 char *p = xstrdup (constraint);
5003 p[0] = '=';
5004 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5006 /* And add a matching input constraint. */
5007 if (allows_reg)
5009 sprintf (buf, "%d", i);
5011 /* If there are multiple alternatives in the constraint,
5012 handle each of them individually. Those that allow register
5013 will be replaced with operand number, the others will stay
5014 unchanged. */
5015 if (strchr (p, ',') != NULL)
5017 size_t len = 0, buflen = strlen (buf);
5018 char *beg, *end, *str, *dst;
5020 for (beg = p + 1;;)
5022 end = strchr (beg, ',');
5023 if (end == NULL)
5024 end = strchr (beg, '\0');
5025 if ((size_t) (end - beg) < buflen)
5026 len += buflen + 1;
5027 else
5028 len += end - beg + 1;
5029 if (*end)
5030 beg = end + 1;
5031 else
5032 break;
5035 str = (char *) alloca (len);
5036 for (beg = p + 1, dst = str;;)
5038 const char *tem;
5039 bool mem_p, reg_p, inout_p;
5041 end = strchr (beg, ',');
5042 if (end)
5043 *end = '\0';
5044 beg[-1] = '=';
5045 tem = beg - 1;
5046 parse_output_constraint (&tem, i, 0, 0,
5047 &mem_p, &reg_p, &inout_p);
5048 if (dst != str)
5049 *dst++ = ',';
5050 if (reg_p)
5052 memcpy (dst, buf, buflen);
5053 dst += buflen;
5055 else
5057 if (end)
5058 len = end - beg;
5059 else
5060 len = strlen (beg);
5061 memcpy (dst, beg, len);
5062 dst += len;
5064 if (end)
5065 beg = end + 1;
5066 else
5067 break;
5069 *dst = '\0';
5070 input = build_string (dst - str, str);
5072 else
5073 input = build_string (strlen (buf), buf);
5075 else
5076 input = build_string (constraint_len - 1, constraint + 1);
5078 free (p);
5080 input = build_tree_list (build_tree_list (NULL_TREE, input),
5081 unshare_expr (TREE_VALUE (link)));
5082 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5086 link_next = NULL_TREE;
5087 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5089 link_next = TREE_CHAIN (link);
5090 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5091 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5092 oconstraints, &allows_mem, &allows_reg);
5094 /* If we can't make copies, we can only accept memory. */
5095 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5097 if (allows_mem)
5098 allows_reg = 0;
5099 else
5101 error ("impossible constraint in %<asm%>");
5102 error ("non-memory input %d must stay in memory", i);
5103 return GS_ERROR;
5107 /* If the operand is a memory input, it should be an lvalue. */
5108 if (!allows_reg && allows_mem)
5110 tree inputv = TREE_VALUE (link);
5111 STRIP_NOPS (inputv);
5112 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5113 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5114 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5115 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5116 TREE_VALUE (link) = error_mark_node;
5117 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5118 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5119 mark_addressable (TREE_VALUE (link));
5120 if (tret == GS_ERROR)
5122 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5123 input_location = EXPR_LOCATION (TREE_VALUE (link));
5124 error ("memory input %d is not directly addressable", i);
5125 ret = tret;
5128 else
5130 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5131 is_gimple_asm_val, fb_rvalue);
5132 if (tret == GS_ERROR)
5133 ret = tret;
5136 TREE_CHAIN (link) = NULL_TREE;
5137 vec_safe_push (inputs, link);
5140 link_next = NULL_TREE;
5141 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5143 link_next = TREE_CHAIN (link);
5144 TREE_CHAIN (link) = NULL_TREE;
5145 vec_safe_push (clobbers, link);
5148 link_next = NULL_TREE;
5149 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5151 link_next = TREE_CHAIN (link);
5152 TREE_CHAIN (link) = NULL_TREE;
5153 vec_safe_push (labels, link);
5156 /* Do not add ASMs with errors to the gimple IL stream. */
5157 if (ret != GS_ERROR)
5159 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5160 inputs, outputs, clobbers, labels);
5162 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5163 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5165 gimplify_seq_add_stmt (pre_p, stmt);
5168 return ret;
5171 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5172 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5173 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5174 return to this function.
5176 FIXME should we complexify the prequeue handling instead? Or use flags
5177 for all the cleanups and let the optimizer tighten them up? The current
5178 code seems pretty fragile; it will break on a cleanup within any
5179 non-conditional nesting. But any such nesting would be broken, anyway;
5180 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5181 and continues out of it. We can do that at the RTL level, though, so
5182 having an optimizer to tighten up try/finally regions would be a Good
5183 Thing. */
5185 static enum gimplify_status
5186 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5188 gimple_stmt_iterator iter;
5189 gimple_seq body_sequence = NULL;
5191 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5193 /* We only care about the number of conditions between the innermost
5194 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5195 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5196 int old_conds = gimplify_ctxp->conditions;
5197 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5198 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5199 gimplify_ctxp->conditions = 0;
5200 gimplify_ctxp->conditional_cleanups = NULL;
5201 gimplify_ctxp->in_cleanup_point_expr = true;
5203 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5205 gimplify_ctxp->conditions = old_conds;
5206 gimplify_ctxp->conditional_cleanups = old_cleanups;
5207 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5209 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5211 gimple wce = gsi_stmt (iter);
5213 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5215 if (gsi_one_before_end_p (iter))
5217 /* Note that gsi_insert_seq_before and gsi_remove do not
5218 scan operands, unlike some other sequence mutators. */
5219 if (!gimple_wce_cleanup_eh_only (wce))
5220 gsi_insert_seq_before_without_update (&iter,
5221 gimple_wce_cleanup (wce),
5222 GSI_SAME_STMT);
5223 gsi_remove (&iter, true);
5224 break;
5226 else
5228 gimple_statement_try *gtry;
5229 gimple_seq seq;
5230 enum gimple_try_flags kind;
5232 if (gimple_wce_cleanup_eh_only (wce))
5233 kind = GIMPLE_TRY_CATCH;
5234 else
5235 kind = GIMPLE_TRY_FINALLY;
5236 seq = gsi_split_seq_after (iter);
5238 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5239 /* Do not use gsi_replace here, as it may scan operands.
5240 We want to do a simple structural modification only. */
5241 gsi_set_stmt (&iter, gtry);
5242 iter = gsi_start (gtry->eval);
5245 else
5246 gsi_next (&iter);
5249 gimplify_seq_add_seq (pre_p, body_sequence);
5250 if (temp)
5252 *expr_p = temp;
5253 return GS_OK;
5255 else
5257 *expr_p = NULL;
5258 return GS_ALL_DONE;
5262 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5263 is the cleanup action required. EH_ONLY is true if the cleanup should
5264 only be executed if an exception is thrown, not on normal exit. */
5266 static void
5267 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5269 gimple wce;
5270 gimple_seq cleanup_stmts = NULL;
5272 /* Errors can result in improperly nested cleanups. Which results in
5273 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5274 if (seen_error ())
5275 return;
5277 if (gimple_conditional_context ())
5279 /* If we're in a conditional context, this is more complex. We only
5280 want to run the cleanup if we actually ran the initialization that
5281 necessitates it, but we want to run it after the end of the
5282 conditional context. So we wrap the try/finally around the
5283 condition and use a flag to determine whether or not to actually
5284 run the destructor. Thus
5286 test ? f(A()) : 0
5288 becomes (approximately)
5290 flag = 0;
5291 try {
5292 if (test) { A::A(temp); flag = 1; val = f(temp); }
5293 else { val = 0; }
5294 } finally {
5295 if (flag) A::~A(temp);
5299 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5300 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5301 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5303 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5304 gimplify_stmt (&cleanup, &cleanup_stmts);
5305 wce = gimple_build_wce (cleanup_stmts);
5307 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5308 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5309 gimplify_seq_add_stmt (pre_p, ftrue);
5311 /* Because of this manipulation, and the EH edges that jump
5312 threading cannot redirect, the temporary (VAR) will appear
5313 to be used uninitialized. Don't warn. */
5314 TREE_NO_WARNING (var) = 1;
5316 else
5318 gimplify_stmt (&cleanup, &cleanup_stmts);
5319 wce = gimple_build_wce (cleanup_stmts);
5320 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5321 gimplify_seq_add_stmt (pre_p, wce);
5325 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5327 static enum gimplify_status
5328 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5330 tree targ = *expr_p;
5331 tree temp = TARGET_EXPR_SLOT (targ);
5332 tree init = TARGET_EXPR_INITIAL (targ);
5333 enum gimplify_status ret;
5335 if (init)
5337 tree cleanup = NULL_TREE;
5339 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5340 to the temps list. Handle also variable length TARGET_EXPRs. */
5341 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5343 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5344 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5345 gimplify_vla_decl (temp, pre_p);
5347 else
5348 gimple_add_tmp_var (temp);
5350 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5351 expression is supposed to initialize the slot. */
5352 if (VOID_TYPE_P (TREE_TYPE (init)))
5353 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5354 else
5356 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5357 init = init_expr;
5358 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5359 init = NULL;
5360 ggc_free (init_expr);
5362 if (ret == GS_ERROR)
5364 /* PR c++/28266 Make sure this is expanded only once. */
5365 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5366 return GS_ERROR;
5368 if (init)
5369 gimplify_and_add (init, pre_p);
5371 /* If needed, push the cleanup for the temp. */
5372 if (TARGET_EXPR_CLEANUP (targ))
5374 if (CLEANUP_EH_ONLY (targ))
5375 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5376 CLEANUP_EH_ONLY (targ), pre_p);
5377 else
5378 cleanup = TARGET_EXPR_CLEANUP (targ);
5381 /* Add a clobber for the temporary going out of scope, like
5382 gimplify_bind_expr. */
5383 if (gimplify_ctxp->in_cleanup_point_expr
5384 && needs_to_live_in_memory (temp)
5385 && flag_stack_reuse == SR_ALL)
5387 tree clobber = build_constructor (TREE_TYPE (temp),
5388 NULL);
5389 TREE_THIS_VOLATILE (clobber) = true;
5390 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5391 if (cleanup)
5392 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5393 clobber);
5394 else
5395 cleanup = clobber;
5398 if (cleanup)
5399 gimple_push_cleanup (temp, cleanup, false, pre_p);
5401 /* Only expand this once. */
5402 TREE_OPERAND (targ, 3) = init;
5403 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5405 else
5406 /* We should have expanded this before. */
5407 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5409 *expr_p = temp;
5410 return GS_OK;
5413 /* Gimplification of expression trees. */
5415 /* Gimplify an expression which appears at statement context. The
5416 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5417 NULL, a new sequence is allocated.
5419 Return true if we actually added a statement to the queue. */
5421 bool
5422 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5424 gimple_seq_node last;
5426 last = gimple_seq_last (*seq_p);
5427 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5428 return last != gimple_seq_last (*seq_p);
5431 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5432 to CTX. If entries already exist, force them to be some flavor of private.
5433 If there is no enclosing parallel, do nothing. */
5435 void
5436 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5438 splay_tree_node n;
5440 if (decl == NULL || !DECL_P (decl))
5441 return;
5445 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5446 if (n != NULL)
5448 if (n->value & GOVD_SHARED)
5449 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5450 else if (n->value & GOVD_MAP)
5451 n->value |= GOVD_MAP_TO_ONLY;
5452 else
5453 return;
5455 else if (ctx->region_type == ORT_TARGET)
5456 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5457 else if (ctx->region_type != ORT_WORKSHARE
5458 && ctx->region_type != ORT_SIMD
5459 && ctx->region_type != ORT_TARGET_DATA)
5460 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5462 ctx = ctx->outer_context;
5464 while (ctx);
5467 /* Similarly for each of the type sizes of TYPE. */
5469 static void
5470 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5472 if (type == NULL || type == error_mark_node)
5473 return;
5474 type = TYPE_MAIN_VARIANT (type);
5476 if (ctx->privatized_types->add (type))
5477 return;
5479 switch (TREE_CODE (type))
5481 case INTEGER_TYPE:
5482 case ENUMERAL_TYPE:
5483 case BOOLEAN_TYPE:
5484 case REAL_TYPE:
5485 case FIXED_POINT_TYPE:
5486 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5487 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5488 break;
5490 case ARRAY_TYPE:
5491 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5492 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5493 break;
5495 case RECORD_TYPE:
5496 case UNION_TYPE:
5497 case QUAL_UNION_TYPE:
5499 tree field;
5500 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5501 if (TREE_CODE (field) == FIELD_DECL)
5503 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5504 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5507 break;
5509 case POINTER_TYPE:
5510 case REFERENCE_TYPE:
5511 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5512 break;
5514 default:
5515 break;
5518 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5519 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5520 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5523 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5525 static void
5526 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5528 splay_tree_node n;
5529 unsigned int nflags;
5530 tree t;
5532 if (error_operand_p (decl))
5533 return;
5535 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5536 there are constructors involved somewhere. */
5537 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5538 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5539 flags |= GOVD_SEEN;
5541 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5542 if (n != NULL && n->value != GOVD_ALIGNED)
5544 /* We shouldn't be re-adding the decl with the same data
5545 sharing class. */
5546 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5547 /* The only combination of data sharing classes we should see is
5548 FIRSTPRIVATE and LASTPRIVATE. */
5549 nflags = n->value | flags;
5550 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5551 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)
5552 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5553 n->value = nflags;
5554 return;
5557 /* When adding a variable-sized variable, we have to handle all sorts
5558 of additional bits of data: the pointer replacement variable, and
5559 the parameters of the type. */
5560 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5562 /* Add the pointer replacement variable as PRIVATE if the variable
5563 replacement is private, else FIRSTPRIVATE since we'll need the
5564 address of the original variable either for SHARED, or for the
5565 copy into or out of the context. */
5566 if (!(flags & GOVD_LOCAL))
5568 nflags = flags & GOVD_MAP
5569 ? GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT
5570 : flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5571 nflags |= flags & GOVD_SEEN;
5572 t = DECL_VALUE_EXPR (decl);
5573 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5574 t = TREE_OPERAND (t, 0);
5575 gcc_assert (DECL_P (t));
5576 omp_add_variable (ctx, t, nflags);
5579 /* Add all of the variable and type parameters (which should have
5580 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5581 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5582 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5583 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5585 /* The variable-sized variable itself is never SHARED, only some form
5586 of PRIVATE. The sharing would take place via the pointer variable
5587 which we remapped above. */
5588 if (flags & GOVD_SHARED)
5589 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5590 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5592 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5593 alloca statement we generate for the variable, so make sure it
5594 is available. This isn't automatically needed for the SHARED
5595 case, since we won't be allocating local storage then.
5596 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5597 in this case omp_notice_variable will be called later
5598 on when it is gimplified. */
5599 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5600 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5601 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5603 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5604 && lang_hooks.decls.omp_privatize_by_reference (decl))
5606 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5608 /* Similar to the direct variable sized case above, we'll need the
5609 size of references being privatized. */
5610 if ((flags & GOVD_SHARED) == 0)
5612 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5613 if (TREE_CODE (t) != INTEGER_CST)
5614 omp_notice_variable (ctx, t, true);
5618 if (n != NULL)
5619 n->value |= flags;
5620 else
5621 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5624 /* Notice a threadprivate variable DECL used in OpenMP context CTX.
5625 This just prints out diagnostics about threadprivate variable uses
5626 in untied tasks. If DECL2 is non-NULL, prevent this warning
5627 on that variable. */
5629 static bool
5630 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5631 tree decl2)
5633 splay_tree_node n;
5634 struct gimplify_omp_ctx *octx;
5636 for (octx = ctx; octx; octx = octx->outer_context)
5637 if (octx->region_type == ORT_TARGET)
5639 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5640 if (n == NULL)
5642 error ("threadprivate variable %qE used in target region",
5643 DECL_NAME (decl));
5644 error_at (octx->location, "enclosing target region");
5645 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5647 if (decl2)
5648 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5651 if (ctx->region_type != ORT_UNTIED_TASK)
5652 return false;
5653 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5654 if (n == NULL)
5656 error ("threadprivate variable %qE used in untied task",
5657 DECL_NAME (decl));
5658 error_at (ctx->location, "enclosing task");
5659 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5661 if (decl2)
5662 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5663 return false;
5666 /* Record the fact that DECL was used within the OpenMP context CTX.
5667 IN_CODE is true when real code uses DECL, and false when we should
5668 merely emit default(none) errors. Return true if DECL is going to
5669 be remapped and thus DECL shouldn't be gimplified into its
5670 DECL_VALUE_EXPR (if any). */
5672 static bool
5673 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5675 splay_tree_node n;
5676 unsigned flags = in_code ? GOVD_SEEN : 0;
5677 bool ret = false, shared;
5679 if (error_operand_p (decl))
5680 return false;
5682 /* Threadprivate variables are predetermined. */
5683 if (is_global_var (decl))
5685 if (DECL_THREAD_LOCAL_P (decl))
5686 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5688 if (DECL_HAS_VALUE_EXPR_P (decl))
5690 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5692 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5693 return omp_notice_threadprivate_variable (ctx, decl, value);
5697 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5698 if (ctx->region_type == ORT_TARGET)
5700 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
5701 if (n == NULL)
5703 if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl)))
5705 error ("%qD referenced in target region does not have "
5706 "a mappable type", decl);
5707 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_EXPLICIT | flags);
5709 else
5710 omp_add_variable (ctx, decl, GOVD_MAP | flags);
5712 else
5714 /* If nothing changed, there's nothing left to do. */
5715 if ((n->value & flags) == flags)
5716 return ret;
5717 n->value |= flags;
5719 goto do_outer;
5722 if (n == NULL)
5724 enum omp_clause_default_kind default_kind, kind;
5725 struct gimplify_omp_ctx *octx;
5727 if (ctx->region_type == ORT_WORKSHARE
5728 || ctx->region_type == ORT_SIMD
5729 || ctx->region_type == ORT_TARGET_DATA)
5730 goto do_outer;
5732 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5733 remapped firstprivate instead of shared. To some extent this is
5734 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5735 default_kind = ctx->default_kind;
5736 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5737 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5738 default_kind = kind;
5740 switch (default_kind)
5742 case OMP_CLAUSE_DEFAULT_NONE:
5743 if ((ctx->region_type & ORT_PARALLEL) != 0)
5745 error ("%qE not specified in enclosing parallel",
5746 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5747 error_at (ctx->location, "enclosing parallel");
5749 else if ((ctx->region_type & ORT_TASK) != 0)
5751 error ("%qE not specified in enclosing task",
5752 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5753 error_at (ctx->location, "enclosing task");
5755 else if (ctx->region_type == ORT_TEAMS)
5757 error ("%qE not specified in enclosing teams construct",
5758 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5759 error_at (ctx->location, "enclosing teams construct");
5761 else
5762 gcc_unreachable ();
5763 /* FALLTHRU */
5764 case OMP_CLAUSE_DEFAULT_SHARED:
5765 flags |= GOVD_SHARED;
5766 break;
5767 case OMP_CLAUSE_DEFAULT_PRIVATE:
5768 flags |= GOVD_PRIVATE;
5769 break;
5770 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5771 flags |= GOVD_FIRSTPRIVATE;
5772 break;
5773 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5774 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5775 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5776 if (ctx->outer_context)
5777 omp_notice_variable (ctx->outer_context, decl, in_code);
5778 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5780 splay_tree_node n2;
5782 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0)
5783 continue;
5784 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5785 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5787 flags |= GOVD_FIRSTPRIVATE;
5788 break;
5790 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
5791 break;
5793 if (flags & GOVD_FIRSTPRIVATE)
5794 break;
5795 if (octx == NULL
5796 && (TREE_CODE (decl) == PARM_DECL
5797 || (!is_global_var (decl)
5798 && DECL_CONTEXT (decl) == current_function_decl)))
5800 flags |= GOVD_FIRSTPRIVATE;
5801 break;
5803 flags |= GOVD_SHARED;
5804 break;
5805 default:
5806 gcc_unreachable ();
5809 if ((flags & GOVD_PRIVATE)
5810 && lang_hooks.decls.omp_private_outer_ref (decl))
5811 flags |= GOVD_PRIVATE_OUTER_REF;
5813 omp_add_variable (ctx, decl, flags);
5815 shared = (flags & GOVD_SHARED) != 0;
5816 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5817 goto do_outer;
5820 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5821 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5822 && DECL_SIZE (decl)
5823 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5825 splay_tree_node n2;
5826 tree t = DECL_VALUE_EXPR (decl);
5827 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5828 t = TREE_OPERAND (t, 0);
5829 gcc_assert (DECL_P (t));
5830 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5831 n2->value |= GOVD_SEEN;
5834 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5835 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5837 /* If nothing changed, there's nothing left to do. */
5838 if ((n->value & flags) == flags)
5839 return ret;
5840 flags |= n->value;
5841 n->value = flags;
5843 do_outer:
5844 /* If the variable is private in the current context, then we don't
5845 need to propagate anything to an outer context. */
5846 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5847 return ret;
5848 if (ctx->outer_context
5849 && omp_notice_variable (ctx->outer_context, decl, in_code))
5850 return true;
5851 return ret;
5854 /* Verify that DECL is private within CTX. If there's specific information
5855 to the contrary in the innermost scope, generate an error. */
5857 static bool
5858 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
5860 splay_tree_node n;
5862 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5863 if (n != NULL)
5865 if (n->value & GOVD_SHARED)
5867 if (ctx == gimplify_omp_ctxp)
5869 if (simd)
5870 error ("iteration variable %qE is predetermined linear",
5871 DECL_NAME (decl));
5872 else
5873 error ("iteration variable %qE should be private",
5874 DECL_NAME (decl));
5875 n->value = GOVD_PRIVATE;
5876 return true;
5878 else
5879 return false;
5881 else if ((n->value & GOVD_EXPLICIT) != 0
5882 && (ctx == gimplify_omp_ctxp
5883 || (ctx->region_type == ORT_COMBINED_PARALLEL
5884 && gimplify_omp_ctxp->outer_context == ctx)))
5886 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5887 error ("iteration variable %qE should not be firstprivate",
5888 DECL_NAME (decl));
5889 else if ((n->value & GOVD_REDUCTION) != 0)
5890 error ("iteration variable %qE should not be reduction",
5891 DECL_NAME (decl));
5892 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
5893 error ("iteration variable %qE should not be lastprivate",
5894 DECL_NAME (decl));
5895 else if (simd && (n->value & GOVD_PRIVATE) != 0)
5896 error ("iteration variable %qE should not be private",
5897 DECL_NAME (decl));
5898 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
5899 error ("iteration variable %qE is predetermined linear",
5900 DECL_NAME (decl));
5902 return (ctx == gimplify_omp_ctxp
5903 || (ctx->region_type == ORT_COMBINED_PARALLEL
5904 && gimplify_omp_ctxp->outer_context == ctx));
5907 if (ctx->region_type != ORT_WORKSHARE
5908 && ctx->region_type != ORT_SIMD)
5909 return false;
5910 else if (ctx->outer_context)
5911 return omp_is_private (ctx->outer_context, decl, simd);
5912 return false;
5915 /* Return true if DECL is private within a parallel region
5916 that binds to the current construct's context or in parallel
5917 region's REDUCTION clause. */
5919 static bool
5920 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
5922 splay_tree_node n;
5926 ctx = ctx->outer_context;
5927 if (ctx == NULL)
5928 return !(is_global_var (decl)
5929 /* References might be private, but might be shared too,
5930 when checking for copyprivate, assume they might be
5931 private, otherwise assume they might be shared. */
5932 || (!copyprivate
5933 && lang_hooks.decls.omp_privatize_by_reference (decl)));
5935 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
5936 continue;
5938 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5939 if (n != NULL)
5940 return (n->value & GOVD_SHARED) == 0;
5942 while (ctx->region_type == ORT_WORKSHARE
5943 || ctx->region_type == ORT_SIMD);
5944 return false;
5947 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5948 and previous omp contexts. */
5950 static void
5951 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5952 enum omp_region_type region_type)
5954 struct gimplify_omp_ctx *ctx, *outer_ctx;
5955 tree c;
5957 ctx = new_omp_context (region_type);
5958 outer_ctx = ctx->outer_context;
5960 while ((c = *list_p) != NULL)
5962 bool remove = false;
5963 bool notice_outer = true;
5964 const char *check_non_private = NULL;
5965 unsigned int flags;
5966 tree decl;
5968 switch (OMP_CLAUSE_CODE (c))
5970 case OMP_CLAUSE_PRIVATE:
5971 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5972 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5974 flags |= GOVD_PRIVATE_OUTER_REF;
5975 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5977 else
5978 notice_outer = false;
5979 goto do_add;
5980 case OMP_CLAUSE_SHARED:
5981 flags = GOVD_SHARED | GOVD_EXPLICIT;
5982 goto do_add;
5983 case OMP_CLAUSE_FIRSTPRIVATE:
5984 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5985 check_non_private = "firstprivate";
5986 goto do_add;
5987 case OMP_CLAUSE_LASTPRIVATE:
5988 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5989 check_non_private = "lastprivate";
5990 goto do_add;
5991 case OMP_CLAUSE_REDUCTION:
5992 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5993 check_non_private = "reduction";
5994 goto do_add;
5995 case OMP_CLAUSE_LINEAR:
5996 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
5997 is_gimple_val, fb_rvalue) == GS_ERROR)
5999 remove = true;
6000 break;
6002 flags = GOVD_LINEAR | GOVD_EXPLICIT;
6003 goto do_add;
6005 case OMP_CLAUSE_MAP:
6006 decl = OMP_CLAUSE_DECL (c);
6007 if (error_operand_p (decl))
6009 remove = true;
6010 break;
6012 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6013 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6014 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6015 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6016 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6018 remove = true;
6019 break;
6021 if (!DECL_P (decl))
6023 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6024 NULL, is_gimple_lvalue, fb_lvalue)
6025 == GS_ERROR)
6027 remove = true;
6028 break;
6030 break;
6032 flags = GOVD_MAP | GOVD_EXPLICIT;
6033 goto do_add;
6035 case OMP_CLAUSE_DEPEND:
6036 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
6038 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
6039 NULL, is_gimple_val, fb_rvalue);
6040 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
6042 if (error_operand_p (OMP_CLAUSE_DECL (c)))
6044 remove = true;
6045 break;
6047 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
6048 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
6049 is_gimple_val, fb_rvalue) == GS_ERROR)
6051 remove = true;
6052 break;
6054 break;
6056 case OMP_CLAUSE_TO:
6057 case OMP_CLAUSE_FROM:
6058 decl = OMP_CLAUSE_DECL (c);
6059 if (error_operand_p (decl))
6061 remove = true;
6062 break;
6064 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6065 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6066 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6067 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6068 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6070 remove = true;
6071 break;
6073 if (!DECL_P (decl))
6075 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6076 NULL, is_gimple_lvalue, fb_lvalue)
6077 == GS_ERROR)
6079 remove = true;
6080 break;
6082 break;
6084 goto do_notice;
6086 do_add:
6087 decl = OMP_CLAUSE_DECL (c);
6088 if (error_operand_p (decl))
6090 remove = true;
6091 break;
6093 omp_add_variable (ctx, decl, flags);
6094 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6095 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6097 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
6098 GOVD_LOCAL | GOVD_SEEN);
6099 gimplify_omp_ctxp = ctx;
6100 push_gimplify_context ();
6102 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6103 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6105 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
6106 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
6107 pop_gimplify_context
6108 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
6109 push_gimplify_context ();
6110 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
6111 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6112 pop_gimplify_context
6113 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
6114 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
6115 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
6117 gimplify_omp_ctxp = outer_ctx;
6119 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6120 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
6122 gimplify_omp_ctxp = ctx;
6123 push_gimplify_context ();
6124 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
6126 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6127 NULL, NULL);
6128 TREE_SIDE_EFFECTS (bind) = 1;
6129 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6130 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6132 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6133 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6134 pop_gimplify_context
6135 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6136 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6138 gimplify_omp_ctxp = outer_ctx;
6140 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6141 && OMP_CLAUSE_LINEAR_STMT (c))
6143 gimplify_omp_ctxp = ctx;
6144 push_gimplify_context ();
6145 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
6147 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6148 NULL, NULL);
6149 TREE_SIDE_EFFECTS (bind) = 1;
6150 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
6151 OMP_CLAUSE_LINEAR_STMT (c) = bind;
6153 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
6154 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6155 pop_gimplify_context
6156 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
6157 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
6159 gimplify_omp_ctxp = outer_ctx;
6161 if (notice_outer)
6162 goto do_notice;
6163 break;
6165 case OMP_CLAUSE_COPYIN:
6166 case OMP_CLAUSE_COPYPRIVATE:
6167 decl = OMP_CLAUSE_DECL (c);
6168 if (error_operand_p (decl))
6170 remove = true;
6171 break;
6173 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
6174 && !remove
6175 && !omp_check_private (ctx, decl, true))
6177 remove = true;
6178 if (is_global_var (decl))
6180 if (DECL_THREAD_LOCAL_P (decl))
6181 remove = false;
6182 else if (DECL_HAS_VALUE_EXPR_P (decl))
6184 tree value = get_base_address (DECL_VALUE_EXPR (decl));
6186 if (value
6187 && DECL_P (value)
6188 && DECL_THREAD_LOCAL_P (value))
6189 remove = false;
6192 if (remove)
6193 error_at (OMP_CLAUSE_LOCATION (c),
6194 "copyprivate variable %qE is not threadprivate"
6195 " or private in outer context", DECL_NAME (decl));
6197 do_notice:
6198 if (outer_ctx)
6199 omp_notice_variable (outer_ctx, decl, true);
6200 if (check_non_private
6201 && region_type == ORT_WORKSHARE
6202 && omp_check_private (ctx, decl, false))
6204 error ("%s variable %qE is private in outer context",
6205 check_non_private, DECL_NAME (decl));
6206 remove = true;
6208 break;
6210 case OMP_CLAUSE_FINAL:
6211 case OMP_CLAUSE_IF:
6212 OMP_CLAUSE_OPERAND (c, 0)
6213 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6214 /* Fall through. */
6216 case OMP_CLAUSE_SCHEDULE:
6217 case OMP_CLAUSE_NUM_THREADS:
6218 case OMP_CLAUSE_NUM_TEAMS:
6219 case OMP_CLAUSE_THREAD_LIMIT:
6220 case OMP_CLAUSE_DIST_SCHEDULE:
6221 case OMP_CLAUSE_DEVICE:
6222 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6223 is_gimple_val, fb_rvalue) == GS_ERROR)
6224 remove = true;
6225 break;
6227 case OMP_CLAUSE_NOWAIT:
6228 case OMP_CLAUSE_ORDERED:
6229 case OMP_CLAUSE_UNTIED:
6230 case OMP_CLAUSE_COLLAPSE:
6231 case OMP_CLAUSE_MERGEABLE:
6232 case OMP_CLAUSE_PROC_BIND:
6233 case OMP_CLAUSE_SAFELEN:
6234 break;
6236 case OMP_CLAUSE_ALIGNED:
6237 decl = OMP_CLAUSE_DECL (c);
6238 if (error_operand_p (decl))
6240 remove = true;
6241 break;
6243 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
6244 is_gimple_val, fb_rvalue) == GS_ERROR)
6246 remove = true;
6247 break;
6249 if (!is_global_var (decl)
6250 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6251 omp_add_variable (ctx, decl, GOVD_ALIGNED);
6252 break;
6254 case OMP_CLAUSE_DEFAULT:
6255 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6256 break;
6258 default:
6259 gcc_unreachable ();
6262 if (remove)
6263 *list_p = OMP_CLAUSE_CHAIN (c);
6264 else
6265 list_p = &OMP_CLAUSE_CHAIN (c);
6268 gimplify_omp_ctxp = ctx;
6271 struct gimplify_adjust_omp_clauses_data
6273 tree *list_p;
6274 gimple_seq *pre_p;
6277 /* For all variables that were not actually used within the context,
6278 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
6280 static int
6281 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6283 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
6284 gimple_seq *pre_p
6285 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
6286 tree decl = (tree) n->key;
6287 unsigned flags = n->value;
6288 enum omp_clause_code code;
6289 tree clause;
6290 bool private_debug;
6292 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6293 return 0;
6294 if ((flags & GOVD_SEEN) == 0)
6295 return 0;
6296 if (flags & GOVD_DEBUG_PRIVATE)
6298 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6299 private_debug = true;
6301 else if (flags & GOVD_MAP)
6302 private_debug = false;
6303 else
6304 private_debug
6305 = lang_hooks.decls.omp_private_debug_clause (decl,
6306 !!(flags & GOVD_SHARED));
6307 if (private_debug)
6308 code = OMP_CLAUSE_PRIVATE;
6309 else if (flags & GOVD_MAP)
6310 code = OMP_CLAUSE_MAP;
6311 else if (flags & GOVD_SHARED)
6313 if (is_global_var (decl))
6315 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6316 while (ctx != NULL)
6318 splay_tree_node on
6319 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6320 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6321 | GOVD_PRIVATE | GOVD_REDUCTION
6322 | GOVD_LINEAR)) != 0)
6323 break;
6324 ctx = ctx->outer_context;
6326 if (ctx == NULL)
6327 return 0;
6329 code = OMP_CLAUSE_SHARED;
6331 else if (flags & GOVD_PRIVATE)
6332 code = OMP_CLAUSE_PRIVATE;
6333 else if (flags & GOVD_FIRSTPRIVATE)
6334 code = OMP_CLAUSE_FIRSTPRIVATE;
6335 else if (flags & GOVD_LASTPRIVATE)
6336 code = OMP_CLAUSE_LASTPRIVATE;
6337 else if (flags & GOVD_ALIGNED)
6338 return 0;
6339 else
6340 gcc_unreachable ();
6342 clause = build_omp_clause (input_location, code);
6343 OMP_CLAUSE_DECL (clause) = decl;
6344 OMP_CLAUSE_CHAIN (clause) = *list_p;
6345 if (private_debug)
6346 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
6347 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6348 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
6349 else if (code == OMP_CLAUSE_MAP)
6351 OMP_CLAUSE_MAP_KIND (clause) = flags & GOVD_MAP_TO_ONLY
6352 ? OMP_CLAUSE_MAP_TO
6353 : OMP_CLAUSE_MAP_TOFROM;
6354 if (DECL_SIZE (decl)
6355 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6357 tree decl2 = DECL_VALUE_EXPR (decl);
6358 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6359 decl2 = TREE_OPERAND (decl2, 0);
6360 gcc_assert (DECL_P (decl2));
6361 tree mem = build_simple_mem_ref (decl2);
6362 OMP_CLAUSE_DECL (clause) = mem;
6363 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6364 if (gimplify_omp_ctxp->outer_context)
6366 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6367 omp_notice_variable (ctx, decl2, true);
6368 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
6370 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
6371 OMP_CLAUSE_MAP);
6372 OMP_CLAUSE_DECL (nc) = decl;
6373 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6374 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6375 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
6376 OMP_CLAUSE_CHAIN (clause) = nc;
6378 else
6379 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
6381 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
6383 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
6384 OMP_CLAUSE_DECL (nc) = decl;
6385 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
6386 OMP_CLAUSE_CHAIN (nc) = *list_p;
6387 OMP_CLAUSE_CHAIN (clause) = nc;
6388 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6389 gimplify_omp_ctxp = ctx->outer_context;
6390 lang_hooks.decls.omp_finish_clause (nc, pre_p);
6391 gimplify_omp_ctxp = ctx;
6393 *list_p = clause;
6394 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6395 gimplify_omp_ctxp = ctx->outer_context;
6396 lang_hooks.decls.omp_finish_clause (clause, pre_p);
6397 gimplify_omp_ctxp = ctx;
6398 return 0;
6401 static void
6402 gimplify_adjust_omp_clauses (gimple_seq *pre_p, tree *list_p)
6404 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6405 tree c, decl;
6407 while ((c = *list_p) != NULL)
6409 splay_tree_node n;
6410 bool remove = false;
6412 switch (OMP_CLAUSE_CODE (c))
6414 case OMP_CLAUSE_PRIVATE:
6415 case OMP_CLAUSE_SHARED:
6416 case OMP_CLAUSE_FIRSTPRIVATE:
6417 case OMP_CLAUSE_LINEAR:
6418 decl = OMP_CLAUSE_DECL (c);
6419 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6420 remove = !(n->value & GOVD_SEEN);
6421 if (! remove)
6423 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
6424 if ((n->value & GOVD_DEBUG_PRIVATE)
6425 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6427 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6428 || ((n->value & GOVD_DATA_SHARE_CLASS)
6429 == GOVD_PRIVATE));
6430 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
6431 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6433 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6434 && ctx->outer_context
6435 && !(OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6436 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6438 if (ctx->outer_context->combined_loop
6439 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6441 n = splay_tree_lookup (ctx->outer_context->variables,
6442 (splay_tree_key) decl);
6443 if (n == NULL
6444 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
6446 int flags = GOVD_FIRSTPRIVATE;
6447 /* #pragma omp distribute does not allow
6448 lastprivate clause. */
6449 if (!ctx->outer_context->distribute)
6450 flags |= GOVD_LASTPRIVATE;
6451 if (n == NULL)
6452 omp_add_variable (ctx->outer_context, decl,
6453 flags | GOVD_SEEN);
6454 else
6455 n->value |= flags | GOVD_SEEN;
6458 else if (!is_global_var (decl))
6459 omp_notice_variable (ctx->outer_context, decl, true);
6462 break;
6464 case OMP_CLAUSE_LASTPRIVATE:
6465 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6466 accurately reflect the presence of a FIRSTPRIVATE clause. */
6467 decl = OMP_CLAUSE_DECL (c);
6468 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6469 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6470 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6471 break;
6473 case OMP_CLAUSE_ALIGNED:
6474 decl = OMP_CLAUSE_DECL (c);
6475 if (!is_global_var (decl))
6477 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6478 remove = n == NULL || !(n->value & GOVD_SEEN);
6479 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6481 struct gimplify_omp_ctx *octx;
6482 if (n != NULL
6483 && (n->value & (GOVD_DATA_SHARE_CLASS
6484 & ~GOVD_FIRSTPRIVATE)))
6485 remove = true;
6486 else
6487 for (octx = ctx->outer_context; octx;
6488 octx = octx->outer_context)
6490 n = splay_tree_lookup (octx->variables,
6491 (splay_tree_key) decl);
6492 if (n == NULL)
6493 continue;
6494 if (n->value & GOVD_LOCAL)
6495 break;
6496 /* We have to avoid assigning a shared variable
6497 to itself when trying to add
6498 __builtin_assume_aligned. */
6499 if (n->value & GOVD_SHARED)
6501 remove = true;
6502 break;
6507 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
6509 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6510 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6511 remove = true;
6513 break;
6515 case OMP_CLAUSE_MAP:
6516 decl = OMP_CLAUSE_DECL (c);
6517 if (!DECL_P (decl))
6518 break;
6519 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6520 if (ctx->region_type == ORT_TARGET && !(n->value & GOVD_SEEN))
6521 remove = true;
6522 else if (DECL_SIZE (decl)
6523 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
6524 && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_POINTER)
6526 tree decl2 = DECL_VALUE_EXPR (decl);
6527 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6528 decl2 = TREE_OPERAND (decl2, 0);
6529 gcc_assert (DECL_P (decl2));
6530 tree mem = build_simple_mem_ref (decl2);
6531 OMP_CLAUSE_DECL (c) = mem;
6532 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6533 if (ctx->outer_context)
6535 omp_notice_variable (ctx->outer_context, decl2, true);
6536 omp_notice_variable (ctx->outer_context,
6537 OMP_CLAUSE_SIZE (c), true);
6539 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6540 OMP_CLAUSE_MAP);
6541 OMP_CLAUSE_DECL (nc) = decl;
6542 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6543 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6544 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
6545 OMP_CLAUSE_CHAIN (c) = nc;
6546 c = nc;
6548 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6549 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
6550 break;
6552 case OMP_CLAUSE_TO:
6553 case OMP_CLAUSE_FROM:
6554 decl = OMP_CLAUSE_DECL (c);
6555 if (!DECL_P (decl))
6556 break;
6557 if (DECL_SIZE (decl)
6558 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6560 tree decl2 = DECL_VALUE_EXPR (decl);
6561 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6562 decl2 = TREE_OPERAND (decl2, 0);
6563 gcc_assert (DECL_P (decl2));
6564 tree mem = build_simple_mem_ref (decl2);
6565 OMP_CLAUSE_DECL (c) = mem;
6566 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6567 if (ctx->outer_context)
6569 omp_notice_variable (ctx->outer_context, decl2, true);
6570 omp_notice_variable (ctx->outer_context,
6571 OMP_CLAUSE_SIZE (c), true);
6574 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6575 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
6576 break;
6578 case OMP_CLAUSE_REDUCTION:
6579 case OMP_CLAUSE_COPYIN:
6580 case OMP_CLAUSE_COPYPRIVATE:
6581 case OMP_CLAUSE_IF:
6582 case OMP_CLAUSE_NUM_THREADS:
6583 case OMP_CLAUSE_NUM_TEAMS:
6584 case OMP_CLAUSE_THREAD_LIMIT:
6585 case OMP_CLAUSE_DIST_SCHEDULE:
6586 case OMP_CLAUSE_DEVICE:
6587 case OMP_CLAUSE_SCHEDULE:
6588 case OMP_CLAUSE_NOWAIT:
6589 case OMP_CLAUSE_ORDERED:
6590 case OMP_CLAUSE_DEFAULT:
6591 case OMP_CLAUSE_UNTIED:
6592 case OMP_CLAUSE_COLLAPSE:
6593 case OMP_CLAUSE_FINAL:
6594 case OMP_CLAUSE_MERGEABLE:
6595 case OMP_CLAUSE_PROC_BIND:
6596 case OMP_CLAUSE_SAFELEN:
6597 case OMP_CLAUSE_DEPEND:
6598 break;
6600 default:
6601 gcc_unreachable ();
6604 if (remove)
6605 *list_p = OMP_CLAUSE_CHAIN (c);
6606 else
6607 list_p = &OMP_CLAUSE_CHAIN (c);
6610 /* Add in any implicit data sharing. */
6611 struct gimplify_adjust_omp_clauses_data data;
6612 data.list_p = list_p;
6613 data.pre_p = pre_p;
6614 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
6616 gimplify_omp_ctxp = ctx->outer_context;
6617 delete_omp_context (ctx);
6620 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
6621 gimplification of the body, as well as scanning the body for used
6622 variables. We need to do this scan now, because variable-sized
6623 decls will be decomposed during gimplification. */
6625 static void
6626 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6628 tree expr = *expr_p;
6629 gimple g;
6630 gimple_seq body = NULL;
6632 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6633 OMP_PARALLEL_COMBINED (expr)
6634 ? ORT_COMBINED_PARALLEL
6635 : ORT_PARALLEL);
6637 push_gimplify_context ();
6639 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6640 if (gimple_code (g) == GIMPLE_BIND)
6641 pop_gimplify_context (g);
6642 else
6643 pop_gimplify_context (NULL);
6645 gimplify_adjust_omp_clauses (pre_p, &OMP_PARALLEL_CLAUSES (expr));
6647 g = gimple_build_omp_parallel (body,
6648 OMP_PARALLEL_CLAUSES (expr),
6649 NULL_TREE, NULL_TREE);
6650 if (OMP_PARALLEL_COMBINED (expr))
6651 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6652 gimplify_seq_add_stmt (pre_p, g);
6653 *expr_p = NULL_TREE;
6656 /* Gimplify the contents of an OMP_TASK statement. This involves
6657 gimplification of the body, as well as scanning the body for used
6658 variables. We need to do this scan now, because variable-sized
6659 decls will be decomposed during gimplification. */
6661 static void
6662 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6664 tree expr = *expr_p;
6665 gimple g;
6666 gimple_seq body = NULL;
6668 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6669 find_omp_clause (OMP_TASK_CLAUSES (expr),
6670 OMP_CLAUSE_UNTIED)
6671 ? ORT_UNTIED_TASK : ORT_TASK);
6673 push_gimplify_context ();
6675 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6676 if (gimple_code (g) == GIMPLE_BIND)
6677 pop_gimplify_context (g);
6678 else
6679 pop_gimplify_context (NULL);
6681 gimplify_adjust_omp_clauses (pre_p, &OMP_TASK_CLAUSES (expr));
6683 g = gimple_build_omp_task (body,
6684 OMP_TASK_CLAUSES (expr),
6685 NULL_TREE, NULL_TREE,
6686 NULL_TREE, NULL_TREE, NULL_TREE);
6687 gimplify_seq_add_stmt (pre_p, g);
6688 *expr_p = NULL_TREE;
6691 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
6692 with non-NULL OMP_FOR_INIT. */
6694 static tree
6695 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
6697 *walk_subtrees = 0;
6698 switch (TREE_CODE (*tp))
6700 case OMP_FOR:
6701 *walk_subtrees = 1;
6702 /* FALLTHRU */
6703 case OMP_SIMD:
6704 if (OMP_FOR_INIT (*tp) != NULL_TREE)
6705 return *tp;
6706 break;
6707 case BIND_EXPR:
6708 case STATEMENT_LIST:
6709 case OMP_PARALLEL:
6710 *walk_subtrees = 1;
6711 break;
6712 default:
6713 break;
6715 return NULL_TREE;
6718 /* Gimplify the gross structure of an OMP_FOR statement. */
6720 static enum gimplify_status
6721 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6723 tree for_stmt, orig_for_stmt, decl, var, t;
6724 enum gimplify_status ret = GS_ALL_DONE;
6725 enum gimplify_status tret;
6726 gimple gfor;
6727 gimple_seq for_body, for_pre_body;
6728 int i;
6729 bool simd;
6730 bitmap has_decl_expr = NULL;
6732 orig_for_stmt = for_stmt = *expr_p;
6734 simd = (TREE_CODE (for_stmt) == OMP_SIMD
6735 || TREE_CODE (for_stmt) == CILK_SIMD);
6736 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6737 simd ? ORT_SIMD : ORT_WORKSHARE);
6738 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
6739 gimplify_omp_ctxp->distribute = true;
6741 /* Handle OMP_FOR_INIT. */
6742 for_pre_body = NULL;
6743 if (simd && OMP_FOR_PRE_BODY (for_stmt))
6745 has_decl_expr = BITMAP_ALLOC (NULL);
6746 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
6747 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
6748 == VAR_DECL)
6750 t = OMP_FOR_PRE_BODY (for_stmt);
6751 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6753 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
6755 tree_stmt_iterator si;
6756 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
6757 tsi_next (&si))
6759 t = tsi_stmt (si);
6760 if (TREE_CODE (t) == DECL_EXPR
6761 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
6762 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6766 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6767 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6769 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
6771 for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), find_combined_omp_for,
6772 NULL, NULL);
6773 gcc_assert (for_stmt != NULL_TREE);
6774 gimplify_omp_ctxp->combined_loop = true;
6777 for_body = NULL;
6778 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6779 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6780 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6781 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6782 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6784 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6785 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6786 decl = TREE_OPERAND (t, 0);
6787 gcc_assert (DECL_P (decl));
6788 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6789 || POINTER_TYPE_P (TREE_TYPE (decl)));
6791 /* Make sure the iteration variable is private. */
6792 tree c = NULL_TREE;
6793 tree c2 = NULL_TREE;
6794 if (orig_for_stmt != for_stmt)
6795 /* Do this only on innermost construct for combined ones. */;
6796 else if (simd)
6798 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
6799 (splay_tree_key)decl);
6800 omp_is_private (gimplify_omp_ctxp, decl,
6801 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6802 != 1));
6803 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6804 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6805 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
6807 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
6808 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
6809 if (has_decl_expr
6810 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
6811 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6812 OMP_CLAUSE_DECL (c) = decl;
6813 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6814 OMP_FOR_CLAUSES (for_stmt) = c;
6815 omp_add_variable (gimplify_omp_ctxp, decl,
6816 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
6818 else
6820 bool lastprivate
6821 = (!has_decl_expr
6822 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
6823 if (lastprivate
6824 && gimplify_omp_ctxp->outer_context
6825 && gimplify_omp_ctxp->outer_context->region_type
6826 == ORT_WORKSHARE
6827 && gimplify_omp_ctxp->outer_context->combined_loop
6828 && !gimplify_omp_ctxp->outer_context->distribute)
6830 struct gimplify_omp_ctx *outer
6831 = gimplify_omp_ctxp->outer_context;
6832 n = splay_tree_lookup (outer->variables,
6833 (splay_tree_key) decl);
6834 if (n != NULL
6835 && (n->value & GOVD_DATA_SHARE_CLASS) == GOVD_LOCAL)
6836 lastprivate = false;
6837 else if (omp_check_private (outer, decl, false))
6838 error ("lastprivate variable %qE is private in outer "
6839 "context", DECL_NAME (decl));
6840 else
6842 omp_add_variable (outer, decl,
6843 GOVD_LASTPRIVATE | GOVD_SEEN);
6844 if (outer->outer_context)
6845 omp_notice_variable (outer->outer_context, decl, true);
6848 c = build_omp_clause (input_location,
6849 lastprivate ? OMP_CLAUSE_LASTPRIVATE
6850 : OMP_CLAUSE_PRIVATE);
6851 OMP_CLAUSE_DECL (c) = decl;
6852 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6853 OMP_FOR_CLAUSES (for_stmt) = c;
6854 omp_add_variable (gimplify_omp_ctxp, decl,
6855 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
6856 | GOVD_EXPLICIT | GOVD_SEEN);
6857 c = NULL_TREE;
6860 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
6861 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6862 else
6863 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6865 /* If DECL is not a gimple register, create a temporary variable to act
6866 as an iteration counter. This is valid, since DECL cannot be
6867 modified in the body of the loop. Similarly for any iteration vars
6868 in simd with collapse > 1 where the iterator vars must be
6869 lastprivate. */
6870 if (orig_for_stmt != for_stmt)
6871 var = decl;
6872 else if (!is_gimple_reg (decl)
6873 || (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
6875 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6876 TREE_OPERAND (t, 0) = var;
6878 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6880 if (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
6882 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
6883 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
6884 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
6885 OMP_CLAUSE_DECL (c2) = var;
6886 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
6887 OMP_FOR_CLAUSES (for_stmt) = c2;
6888 omp_add_variable (gimplify_omp_ctxp, var,
6889 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
6890 if (c == NULL_TREE)
6892 c = c2;
6893 c2 = NULL_TREE;
6896 else
6897 omp_add_variable (gimplify_omp_ctxp, var,
6898 GOVD_PRIVATE | GOVD_SEEN);
6900 else
6901 var = decl;
6903 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6904 is_gimple_val, fb_rvalue);
6905 ret = MIN (ret, tret);
6906 if (ret == GS_ERROR)
6907 return ret;
6909 /* Handle OMP_FOR_COND. */
6910 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6911 gcc_assert (COMPARISON_CLASS_P (t));
6912 gcc_assert (TREE_OPERAND (t, 0) == decl);
6914 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6915 is_gimple_val, fb_rvalue);
6916 ret = MIN (ret, tret);
6918 /* Handle OMP_FOR_INCR. */
6919 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6920 switch (TREE_CODE (t))
6922 case PREINCREMENT_EXPR:
6923 case POSTINCREMENT_EXPR:
6925 tree decl = TREE_OPERAND (t, 0);
6926 /* c_omp_for_incr_canonicalize_ptr() should have been
6927 called to massage things appropriately. */
6928 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
6930 if (orig_for_stmt != for_stmt)
6931 break;
6932 t = build_int_cst (TREE_TYPE (decl), 1);
6933 if (c)
6934 OMP_CLAUSE_LINEAR_STEP (c) = t;
6935 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6936 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6937 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6938 break;
6941 case PREDECREMENT_EXPR:
6942 case POSTDECREMENT_EXPR:
6943 /* c_omp_for_incr_canonicalize_ptr() should have been
6944 called to massage things appropriately. */
6945 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
6946 if (orig_for_stmt != for_stmt)
6947 break;
6948 t = build_int_cst (TREE_TYPE (decl), -1);
6949 if (c)
6950 OMP_CLAUSE_LINEAR_STEP (c) = t;
6951 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6952 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6953 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6954 break;
6956 case MODIFY_EXPR:
6957 gcc_assert (TREE_OPERAND (t, 0) == decl);
6958 TREE_OPERAND (t, 0) = var;
6960 t = TREE_OPERAND (t, 1);
6961 switch (TREE_CODE (t))
6963 case PLUS_EXPR:
6964 if (TREE_OPERAND (t, 1) == decl)
6966 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6967 TREE_OPERAND (t, 0) = var;
6968 break;
6971 /* Fallthru. */
6972 case MINUS_EXPR:
6973 case POINTER_PLUS_EXPR:
6974 gcc_assert (TREE_OPERAND (t, 0) == decl);
6975 TREE_OPERAND (t, 0) = var;
6976 break;
6977 default:
6978 gcc_unreachable ();
6981 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6982 is_gimple_val, fb_rvalue);
6983 ret = MIN (ret, tret);
6984 if (c)
6986 tree step = TREE_OPERAND (t, 1);
6987 tree stept = TREE_TYPE (decl);
6988 if (POINTER_TYPE_P (stept))
6989 stept = sizetype;
6990 step = fold_convert (stept, step);
6991 if (TREE_CODE (t) == MINUS_EXPR)
6992 step = fold_build1 (NEGATE_EXPR, stept, step);
6993 OMP_CLAUSE_LINEAR_STEP (c) = step;
6994 if (step != TREE_OPERAND (t, 1))
6996 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
6997 &for_pre_body, NULL,
6998 is_gimple_val, fb_rvalue);
6999 ret = MIN (ret, tret);
7002 break;
7004 default:
7005 gcc_unreachable ();
7008 if (c2)
7010 gcc_assert (c);
7011 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
7014 if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
7015 && orig_for_stmt == for_stmt)
7017 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
7018 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7019 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
7020 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7021 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
7022 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
7023 && OMP_CLAUSE_DECL (c) == decl)
7025 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7026 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
7027 gcc_assert (TREE_OPERAND (t, 0) == var);
7028 t = TREE_OPERAND (t, 1);
7029 gcc_assert (TREE_CODE (t) == PLUS_EXPR
7030 || TREE_CODE (t) == MINUS_EXPR
7031 || TREE_CODE (t) == POINTER_PLUS_EXPR);
7032 gcc_assert (TREE_OPERAND (t, 0) == var);
7033 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
7034 TREE_OPERAND (t, 1));
7035 gimple_seq *seq;
7036 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
7037 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
7038 else
7039 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
7040 gimplify_assign (decl, t, seq);
7045 BITMAP_FREE (has_decl_expr);
7047 gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body);
7049 if (orig_for_stmt != for_stmt)
7050 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7052 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7053 decl = TREE_OPERAND (t, 0);
7054 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
7055 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
7056 TREE_OPERAND (t, 0) = var;
7057 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7058 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
7059 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
7062 gimplify_adjust_omp_clauses (pre_p, &OMP_FOR_CLAUSES (orig_for_stmt));
7064 int kind;
7065 switch (TREE_CODE (orig_for_stmt))
7067 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
7068 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
7069 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
7070 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
7071 default:
7072 gcc_unreachable ();
7074 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
7075 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
7076 for_pre_body);
7077 if (orig_for_stmt != for_stmt)
7078 gimple_omp_for_set_combined_p (gfor, true);
7079 if (gimplify_omp_ctxp
7080 && (gimplify_omp_ctxp->combined_loop
7081 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
7082 && gimplify_omp_ctxp->outer_context
7083 && gimplify_omp_ctxp->outer_context->combined_loop)))
7085 gimple_omp_for_set_combined_into_p (gfor, true);
7086 if (gimplify_omp_ctxp->combined_loop)
7087 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
7088 else
7089 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
7092 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7094 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7095 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
7096 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
7097 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
7098 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
7099 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
7100 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7101 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
7104 gimplify_seq_add_stmt (pre_p, gfor);
7105 if (ret != GS_ALL_DONE)
7106 return GS_ERROR;
7107 *expr_p = NULL_TREE;
7108 return GS_ALL_DONE;
7111 /* Gimplify the gross structure of other OpenMP constructs.
7112 In particular, OMP_SECTIONS, OMP_SINGLE, OMP_TARGET, OMP_TARGET_DATA
7113 and OMP_TEAMS. */
7115 static void
7116 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
7118 tree expr = *expr_p;
7119 gimple stmt;
7120 gimple_seq body = NULL;
7121 enum omp_region_type ort = ORT_WORKSHARE;
7123 switch (TREE_CODE (expr))
7125 case OMP_SECTIONS:
7126 case OMP_SINGLE:
7127 break;
7128 case OMP_TARGET:
7129 ort = ORT_TARGET;
7130 break;
7131 case OMP_TARGET_DATA:
7132 ort = ORT_TARGET_DATA;
7133 break;
7134 case OMP_TEAMS:
7135 ort = ORT_TEAMS;
7136 break;
7137 default:
7138 gcc_unreachable ();
7140 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort);
7141 if (ort == ORT_TARGET || ort == ORT_TARGET_DATA)
7143 push_gimplify_context ();
7144 gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
7145 if (gimple_code (g) == GIMPLE_BIND)
7146 pop_gimplify_context (g);
7147 else
7148 pop_gimplify_context (NULL);
7149 if (ort == ORT_TARGET_DATA)
7151 gimple_seq cleanup = NULL;
7152 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TARGET_END_DATA);
7153 g = gimple_build_call (fn, 0);
7154 gimple_seq_add_stmt (&cleanup, g);
7155 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
7156 body = NULL;
7157 gimple_seq_add_stmt (&body, g);
7160 else
7161 gimplify_and_add (OMP_BODY (expr), &body);
7162 gimplify_adjust_omp_clauses (pre_p, &OMP_CLAUSES (expr));
7164 switch (TREE_CODE (expr))
7166 case OMP_SECTIONS:
7167 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
7168 break;
7169 case OMP_SINGLE:
7170 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
7171 break;
7172 case OMP_TARGET:
7173 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
7174 OMP_CLAUSES (expr));
7175 break;
7176 case OMP_TARGET_DATA:
7177 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
7178 OMP_CLAUSES (expr));
7179 break;
7180 case OMP_TEAMS:
7181 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
7182 break;
7183 default:
7184 gcc_unreachable ();
7187 gimplify_seq_add_stmt (pre_p, stmt);
7188 *expr_p = NULL_TREE;
7191 /* Gimplify the gross structure of OpenMP target update construct. */
7193 static void
7194 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
7196 tree expr = *expr_p;
7197 gimple stmt;
7199 gimplify_scan_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr), pre_p,
7200 ORT_WORKSHARE);
7201 gimplify_adjust_omp_clauses (pre_p, &OMP_TARGET_UPDATE_CLAUSES (expr));
7202 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_UPDATE,
7203 OMP_TARGET_UPDATE_CLAUSES (expr));
7205 gimplify_seq_add_stmt (pre_p, stmt);
7206 *expr_p = NULL_TREE;
7209 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
7210 stabilized the lhs of the atomic operation as *ADDR. Return true if
7211 EXPR is this stabilized form. */
7213 static bool
7214 goa_lhs_expr_p (tree expr, tree addr)
7216 /* Also include casts to other type variants. The C front end is fond
7217 of adding these for e.g. volatile variables. This is like
7218 STRIP_TYPE_NOPS but includes the main variant lookup. */
7219 STRIP_USELESS_TYPE_CONVERSION (expr);
7221 if (TREE_CODE (expr) == INDIRECT_REF)
7223 expr = TREE_OPERAND (expr, 0);
7224 while (expr != addr
7225 && (CONVERT_EXPR_P (expr)
7226 || TREE_CODE (expr) == NON_LVALUE_EXPR)
7227 && TREE_CODE (expr) == TREE_CODE (addr)
7228 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
7230 expr = TREE_OPERAND (expr, 0);
7231 addr = TREE_OPERAND (addr, 0);
7233 if (expr == addr)
7234 return true;
7235 return (TREE_CODE (addr) == ADDR_EXPR
7236 && TREE_CODE (expr) == ADDR_EXPR
7237 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
7239 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
7240 return true;
7241 return false;
7244 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
7245 expression does not involve the lhs, evaluate it into a temporary.
7246 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
7247 or -1 if an error was encountered. */
7249 static int
7250 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
7251 tree lhs_var)
7253 tree expr = *expr_p;
7254 int saw_lhs;
7256 if (goa_lhs_expr_p (expr, lhs_addr))
7258 *expr_p = lhs_var;
7259 return 1;
7261 if (is_gimple_val (expr))
7262 return 0;
7264 saw_lhs = 0;
7265 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
7267 case tcc_binary:
7268 case tcc_comparison:
7269 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
7270 lhs_var);
7271 case tcc_unary:
7272 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
7273 lhs_var);
7274 break;
7275 case tcc_expression:
7276 switch (TREE_CODE (expr))
7278 case TRUTH_ANDIF_EXPR:
7279 case TRUTH_ORIF_EXPR:
7280 case TRUTH_AND_EXPR:
7281 case TRUTH_OR_EXPR:
7282 case TRUTH_XOR_EXPR:
7283 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
7284 lhs_addr, lhs_var);
7285 case TRUTH_NOT_EXPR:
7286 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
7287 lhs_addr, lhs_var);
7288 break;
7289 case COMPOUND_EXPR:
7290 /* Break out any preevaluations from cp_build_modify_expr. */
7291 for (; TREE_CODE (expr) == COMPOUND_EXPR;
7292 expr = TREE_OPERAND (expr, 1))
7293 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
7294 *expr_p = expr;
7295 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
7296 default:
7297 break;
7299 break;
7300 default:
7301 break;
7304 if (saw_lhs == 0)
7306 enum gimplify_status gs;
7307 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
7308 if (gs != GS_ALL_DONE)
7309 saw_lhs = -1;
7312 return saw_lhs;
7315 /* Gimplify an OMP_ATOMIC statement. */
7317 static enum gimplify_status
7318 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
7320 tree addr = TREE_OPERAND (*expr_p, 0);
7321 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
7322 ? NULL : TREE_OPERAND (*expr_p, 1);
7323 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7324 tree tmp_load;
7325 gimple loadstmt, storestmt;
7327 tmp_load = create_tmp_reg (type, NULL);
7328 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
7329 return GS_ERROR;
7331 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
7332 != GS_ALL_DONE)
7333 return GS_ERROR;
7335 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
7336 gimplify_seq_add_stmt (pre_p, loadstmt);
7337 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
7338 != GS_ALL_DONE)
7339 return GS_ERROR;
7341 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
7342 rhs = tmp_load;
7343 storestmt = gimple_build_omp_atomic_store (rhs);
7344 gimplify_seq_add_stmt (pre_p, storestmt);
7345 if (OMP_ATOMIC_SEQ_CST (*expr_p))
7347 gimple_omp_atomic_set_seq_cst (loadstmt);
7348 gimple_omp_atomic_set_seq_cst (storestmt);
7350 switch (TREE_CODE (*expr_p))
7352 case OMP_ATOMIC_READ:
7353 case OMP_ATOMIC_CAPTURE_OLD:
7354 *expr_p = tmp_load;
7355 gimple_omp_atomic_set_need_value (loadstmt);
7356 break;
7357 case OMP_ATOMIC_CAPTURE_NEW:
7358 *expr_p = rhs;
7359 gimple_omp_atomic_set_need_value (storestmt);
7360 break;
7361 default:
7362 *expr_p = NULL;
7363 break;
7366 return GS_ALL_DONE;
7369 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
7370 body, and adding some EH bits. */
7372 static enum gimplify_status
7373 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
7375 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
7376 gimple g;
7377 gimple_seq body = NULL;
7378 int subcode = 0;
7380 /* Wrap the transaction body in a BIND_EXPR so we have a context
7381 where to put decls for OpenMP. */
7382 if (TREE_CODE (tbody) != BIND_EXPR)
7384 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
7385 TREE_SIDE_EFFECTS (bind) = 1;
7386 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
7387 TRANSACTION_EXPR_BODY (expr) = bind;
7390 push_gimplify_context ();
7391 temp = voidify_wrapper_expr (*expr_p, NULL);
7393 g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
7394 pop_gimplify_context (g);
7396 g = gimple_build_transaction (body, NULL);
7397 if (TRANSACTION_EXPR_OUTER (expr))
7398 subcode = GTMA_IS_OUTER;
7399 else if (TRANSACTION_EXPR_RELAXED (expr))
7400 subcode = GTMA_IS_RELAXED;
7401 gimple_transaction_set_subcode (g, subcode);
7403 gimplify_seq_add_stmt (pre_p, g);
7405 if (temp)
7407 *expr_p = temp;
7408 return GS_OK;
7411 *expr_p = NULL_TREE;
7412 return GS_ALL_DONE;
7415 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
7416 expression produces a value to be used as an operand inside a GIMPLE
7417 statement, the value will be stored back in *EXPR_P. This value will
7418 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
7419 an SSA_NAME. The corresponding sequence of GIMPLE statements is
7420 emitted in PRE_P and POST_P.
7422 Additionally, this process may overwrite parts of the input
7423 expression during gimplification. Ideally, it should be
7424 possible to do non-destructive gimplification.
7426 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
7427 the expression needs to evaluate to a value to be used as
7428 an operand in a GIMPLE statement, this value will be stored in
7429 *EXPR_P on exit. This happens when the caller specifies one
7430 of fb_lvalue or fb_rvalue fallback flags.
7432 PRE_P will contain the sequence of GIMPLE statements corresponding
7433 to the evaluation of EXPR and all the side-effects that must
7434 be executed before the main expression. On exit, the last
7435 statement of PRE_P is the core statement being gimplified. For
7436 instance, when gimplifying 'if (++a)' the last statement in
7437 PRE_P will be 'if (t.1)' where t.1 is the result of
7438 pre-incrementing 'a'.
7440 POST_P will contain the sequence of GIMPLE statements corresponding
7441 to the evaluation of all the side-effects that must be executed
7442 after the main expression. If this is NULL, the post
7443 side-effects are stored at the end of PRE_P.
7445 The reason why the output is split in two is to handle post
7446 side-effects explicitly. In some cases, an expression may have
7447 inner and outer post side-effects which need to be emitted in
7448 an order different from the one given by the recursive
7449 traversal. For instance, for the expression (*p--)++ the post
7450 side-effects of '--' must actually occur *after* the post
7451 side-effects of '++'. However, gimplification will first visit
7452 the inner expression, so if a separate POST sequence was not
7453 used, the resulting sequence would be:
7455 1 t.1 = *p
7456 2 p = p - 1
7457 3 t.2 = t.1 + 1
7458 4 *p = t.2
7460 However, the post-decrement operation in line #2 must not be
7461 evaluated until after the store to *p at line #4, so the
7462 correct sequence should be:
7464 1 t.1 = *p
7465 2 t.2 = t.1 + 1
7466 3 *p = t.2
7467 4 p = p - 1
7469 So, by specifying a separate post queue, it is possible
7470 to emit the post side-effects in the correct order.
7471 If POST_P is NULL, an internal queue will be used. Before
7472 returning to the caller, the sequence POST_P is appended to
7473 the main output sequence PRE_P.
7475 GIMPLE_TEST_F points to a function that takes a tree T and
7476 returns nonzero if T is in the GIMPLE form requested by the
7477 caller. The GIMPLE predicates are in gimple.c.
7479 FALLBACK tells the function what sort of a temporary we want if
7480 gimplification cannot produce an expression that complies with
7481 GIMPLE_TEST_F.
7483 fb_none means that no temporary should be generated
7484 fb_rvalue means that an rvalue is OK to generate
7485 fb_lvalue means that an lvalue is OK to generate
7486 fb_either means that either is OK, but an lvalue is preferable.
7487 fb_mayfail means that gimplification may fail (in which case
7488 GS_ERROR will be returned)
7490 The return value is either GS_ERROR or GS_ALL_DONE, since this
7491 function iterates until EXPR is completely gimplified or an error
7492 occurs. */
7494 enum gimplify_status
7495 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7496 bool (*gimple_test_f) (tree), fallback_t fallback)
7498 tree tmp;
7499 gimple_seq internal_pre = NULL;
7500 gimple_seq internal_post = NULL;
7501 tree save_expr;
7502 bool is_statement;
7503 location_t saved_location;
7504 enum gimplify_status ret;
7505 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
7507 save_expr = *expr_p;
7508 if (save_expr == NULL_TREE)
7509 return GS_ALL_DONE;
7511 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
7512 is_statement = gimple_test_f == is_gimple_stmt;
7513 if (is_statement)
7514 gcc_assert (pre_p);
7516 /* Consistency checks. */
7517 if (gimple_test_f == is_gimple_reg)
7518 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
7519 else if (gimple_test_f == is_gimple_val
7520 || gimple_test_f == is_gimple_call_addr
7521 || gimple_test_f == is_gimple_condexpr
7522 || gimple_test_f == is_gimple_mem_rhs
7523 || gimple_test_f == is_gimple_mem_rhs_or_call
7524 || gimple_test_f == is_gimple_reg_rhs
7525 || gimple_test_f == is_gimple_reg_rhs_or_call
7526 || gimple_test_f == is_gimple_asm_val
7527 || gimple_test_f == is_gimple_mem_ref_addr)
7528 gcc_assert (fallback & fb_rvalue);
7529 else if (gimple_test_f == is_gimple_min_lval
7530 || gimple_test_f == is_gimple_lvalue)
7531 gcc_assert (fallback & fb_lvalue);
7532 else if (gimple_test_f == is_gimple_addressable)
7533 gcc_assert (fallback & fb_either);
7534 else if (gimple_test_f == is_gimple_stmt)
7535 gcc_assert (fallback == fb_none);
7536 else
7538 /* We should have recognized the GIMPLE_TEST_F predicate to
7539 know what kind of fallback to use in case a temporary is
7540 needed to hold the value or address of *EXPR_P. */
7541 gcc_unreachable ();
7544 /* We used to check the predicate here and return immediately if it
7545 succeeds. This is wrong; the design is for gimplification to be
7546 idempotent, and for the predicates to only test for valid forms, not
7547 whether they are fully simplified. */
7548 if (pre_p == NULL)
7549 pre_p = &internal_pre;
7551 if (post_p == NULL)
7552 post_p = &internal_post;
7554 /* Remember the last statements added to PRE_P and POST_P. Every
7555 new statement added by the gimplification helpers needs to be
7556 annotated with location information. To centralize the
7557 responsibility, we remember the last statement that had been
7558 added to both queues before gimplifying *EXPR_P. If
7559 gimplification produces new statements in PRE_P and POST_P, those
7560 statements will be annotated with the same location information
7561 as *EXPR_P. */
7562 pre_last_gsi = gsi_last (*pre_p);
7563 post_last_gsi = gsi_last (*post_p);
7565 saved_location = input_location;
7566 if (save_expr != error_mark_node
7567 && EXPR_HAS_LOCATION (*expr_p))
7568 input_location = EXPR_LOCATION (*expr_p);
7570 /* Loop over the specific gimplifiers until the toplevel node
7571 remains the same. */
7574 /* Strip away as many useless type conversions as possible
7575 at the toplevel. */
7576 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
7578 /* Remember the expr. */
7579 save_expr = *expr_p;
7581 /* Die, die, die, my darling. */
7582 if (save_expr == error_mark_node
7583 || (TREE_TYPE (save_expr)
7584 && TREE_TYPE (save_expr) == error_mark_node))
7586 ret = GS_ERROR;
7587 break;
7590 /* Do any language-specific gimplification. */
7591 ret = ((enum gimplify_status)
7592 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
7593 if (ret == GS_OK)
7595 if (*expr_p == NULL_TREE)
7596 break;
7597 if (*expr_p != save_expr)
7598 continue;
7600 else if (ret != GS_UNHANDLED)
7601 break;
7603 /* Make sure that all the cases set 'ret' appropriately. */
7604 ret = GS_UNHANDLED;
7605 switch (TREE_CODE (*expr_p))
7607 /* First deal with the special cases. */
7609 case POSTINCREMENT_EXPR:
7610 case POSTDECREMENT_EXPR:
7611 case PREINCREMENT_EXPR:
7612 case PREDECREMENT_EXPR:
7613 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
7614 fallback != fb_none,
7615 TREE_TYPE (*expr_p));
7616 break;
7618 case VIEW_CONVERT_EXPR:
7619 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
7620 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
7622 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7623 post_p, is_gimple_val, fb_rvalue);
7624 recalculate_side_effects (*expr_p);
7625 break;
7627 /* Fallthru. */
7629 case ARRAY_REF:
7630 case ARRAY_RANGE_REF:
7631 case REALPART_EXPR:
7632 case IMAGPART_EXPR:
7633 case COMPONENT_REF:
7634 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
7635 fallback ? fallback : fb_rvalue);
7636 break;
7638 case COND_EXPR:
7639 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
7641 /* C99 code may assign to an array in a structure value of a
7642 conditional expression, and this has undefined behavior
7643 only on execution, so create a temporary if an lvalue is
7644 required. */
7645 if (fallback == fb_lvalue)
7647 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7648 mark_addressable (*expr_p);
7649 ret = GS_OK;
7651 break;
7653 case CALL_EXPR:
7654 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
7656 /* C99 code may assign to an array in a structure returned
7657 from a function, and this has undefined behavior only on
7658 execution, so create a temporary if an lvalue is
7659 required. */
7660 if (fallback == fb_lvalue)
7662 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7663 mark_addressable (*expr_p);
7664 ret = GS_OK;
7666 break;
7668 case TREE_LIST:
7669 gcc_unreachable ();
7671 case COMPOUND_EXPR:
7672 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
7673 break;
7675 case COMPOUND_LITERAL_EXPR:
7676 ret = gimplify_compound_literal_expr (expr_p, pre_p,
7677 gimple_test_f, fallback);
7678 break;
7680 case MODIFY_EXPR:
7681 case INIT_EXPR:
7682 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
7683 fallback != fb_none);
7684 break;
7686 case TRUTH_ANDIF_EXPR:
7687 case TRUTH_ORIF_EXPR:
7689 /* Preserve the original type of the expression and the
7690 source location of the outer expression. */
7691 tree org_type = TREE_TYPE (*expr_p);
7692 *expr_p = gimple_boolify (*expr_p);
7693 *expr_p = build3_loc (input_location, COND_EXPR,
7694 org_type, *expr_p,
7695 fold_convert_loc
7696 (input_location,
7697 org_type, boolean_true_node),
7698 fold_convert_loc
7699 (input_location,
7700 org_type, boolean_false_node));
7701 ret = GS_OK;
7702 break;
7705 case TRUTH_NOT_EXPR:
7707 tree type = TREE_TYPE (*expr_p);
7708 /* The parsers are careful to generate TRUTH_NOT_EXPR
7709 only with operands that are always zero or one.
7710 We do not fold here but handle the only interesting case
7711 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
7712 *expr_p = gimple_boolify (*expr_p);
7713 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
7714 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
7715 TREE_TYPE (*expr_p),
7716 TREE_OPERAND (*expr_p, 0));
7717 else
7718 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
7719 TREE_TYPE (*expr_p),
7720 TREE_OPERAND (*expr_p, 0),
7721 build_int_cst (TREE_TYPE (*expr_p), 1));
7722 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
7723 *expr_p = fold_convert_loc (input_location, type, *expr_p);
7724 ret = GS_OK;
7725 break;
7728 case ADDR_EXPR:
7729 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
7730 break;
7732 case ANNOTATE_EXPR:
7734 tree cond = TREE_OPERAND (*expr_p, 0);
7735 tree kind = TREE_OPERAND (*expr_p, 1);
7736 tree type = TREE_TYPE (cond);
7737 if (!INTEGRAL_TYPE_P (type))
7739 *expr_p = cond;
7740 ret = GS_OK;
7741 break;
7743 tree tmp = create_tmp_var (type, NULL);
7744 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
7745 gimple call
7746 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
7747 gimple_call_set_lhs (call, tmp);
7748 gimplify_seq_add_stmt (pre_p, call);
7749 *expr_p = tmp;
7750 ret = GS_ALL_DONE;
7751 break;
7754 case VA_ARG_EXPR:
7755 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
7756 break;
7758 CASE_CONVERT:
7759 if (IS_EMPTY_STMT (*expr_p))
7761 ret = GS_ALL_DONE;
7762 break;
7765 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7766 || fallback == fb_none)
7768 /* Just strip a conversion to void (or in void context) and
7769 try again. */
7770 *expr_p = TREE_OPERAND (*expr_p, 0);
7771 ret = GS_OK;
7772 break;
7775 ret = gimplify_conversion (expr_p);
7776 if (ret == GS_ERROR)
7777 break;
7778 if (*expr_p != save_expr)
7779 break;
7780 /* FALLTHRU */
7782 case FIX_TRUNC_EXPR:
7783 /* unary_expr: ... | '(' cast ')' val | ... */
7784 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7785 is_gimple_val, fb_rvalue);
7786 recalculate_side_effects (*expr_p);
7787 break;
7789 case INDIRECT_REF:
7791 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
7792 bool notrap = TREE_THIS_NOTRAP (*expr_p);
7793 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7795 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7796 if (*expr_p != save_expr)
7798 ret = GS_OK;
7799 break;
7802 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7803 is_gimple_reg, fb_rvalue);
7804 if (ret == GS_ERROR)
7805 break;
7807 recalculate_side_effects (*expr_p);
7808 *expr_p = fold_build2_loc (input_location, MEM_REF,
7809 TREE_TYPE (*expr_p),
7810 TREE_OPERAND (*expr_p, 0),
7811 build_int_cst (saved_ptr_type, 0));
7812 TREE_THIS_VOLATILE (*expr_p) = volatilep;
7813 TREE_THIS_NOTRAP (*expr_p) = notrap;
7814 ret = GS_OK;
7815 break;
7818 /* We arrive here through the various re-gimplifcation paths. */
7819 case MEM_REF:
7820 /* First try re-folding the whole thing. */
7821 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7822 TREE_OPERAND (*expr_p, 0),
7823 TREE_OPERAND (*expr_p, 1));
7824 if (tmp)
7826 *expr_p = tmp;
7827 recalculate_side_effects (*expr_p);
7828 ret = GS_OK;
7829 break;
7831 /* Avoid re-gimplifying the address operand if it is already
7832 in suitable form. Re-gimplifying would mark the address
7833 operand addressable. Always gimplify when not in SSA form
7834 as we still may have to gimplify decls with value-exprs. */
7835 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7836 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7838 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7839 is_gimple_mem_ref_addr, fb_rvalue);
7840 if (ret == GS_ERROR)
7841 break;
7843 recalculate_side_effects (*expr_p);
7844 ret = GS_ALL_DONE;
7845 break;
7847 /* Constants need not be gimplified. */
7848 case INTEGER_CST:
7849 case REAL_CST:
7850 case FIXED_CST:
7851 case STRING_CST:
7852 case COMPLEX_CST:
7853 case VECTOR_CST:
7854 /* Drop the overflow flag on constants, we do not want
7855 that in the GIMPLE IL. */
7856 if (TREE_OVERFLOW_P (*expr_p))
7857 *expr_p = drop_tree_overflow (*expr_p);
7858 ret = GS_ALL_DONE;
7859 break;
7861 case CONST_DECL:
7862 /* If we require an lvalue, such as for ADDR_EXPR, retain the
7863 CONST_DECL node. Otherwise the decl is replaceable by its
7864 value. */
7865 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
7866 if (fallback & fb_lvalue)
7867 ret = GS_ALL_DONE;
7868 else
7870 *expr_p = DECL_INITIAL (*expr_p);
7871 ret = GS_OK;
7873 break;
7875 case DECL_EXPR:
7876 ret = gimplify_decl_expr (expr_p, pre_p);
7877 break;
7879 case BIND_EXPR:
7880 ret = gimplify_bind_expr (expr_p, pre_p);
7881 break;
7883 case LOOP_EXPR:
7884 ret = gimplify_loop_expr (expr_p, pre_p);
7885 break;
7887 case SWITCH_EXPR:
7888 ret = gimplify_switch_expr (expr_p, pre_p);
7889 break;
7891 case EXIT_EXPR:
7892 ret = gimplify_exit_expr (expr_p);
7893 break;
7895 case GOTO_EXPR:
7896 /* If the target is not LABEL, then it is a computed jump
7897 and the target needs to be gimplified. */
7898 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
7900 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7901 NULL, is_gimple_val, fb_rvalue);
7902 if (ret == GS_ERROR)
7903 break;
7905 gimplify_seq_add_stmt (pre_p,
7906 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
7907 ret = GS_ALL_DONE;
7908 break;
7910 case PREDICT_EXPR:
7911 gimplify_seq_add_stmt (pre_p,
7912 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7913 PREDICT_EXPR_OUTCOME (*expr_p)));
7914 ret = GS_ALL_DONE;
7915 break;
7917 case LABEL_EXPR:
7918 ret = GS_ALL_DONE;
7919 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7920 == current_function_decl);
7921 gimplify_seq_add_stmt (pre_p,
7922 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
7923 break;
7925 case CASE_LABEL_EXPR:
7926 ret = gimplify_case_label_expr (expr_p, pre_p);
7927 break;
7929 case RETURN_EXPR:
7930 ret = gimplify_return_expr (*expr_p, pre_p);
7931 break;
7933 case CONSTRUCTOR:
7934 /* Don't reduce this in place; let gimplify_init_constructor work its
7935 magic. Buf if we're just elaborating this for side effects, just
7936 gimplify any element that has side-effects. */
7937 if (fallback == fb_none)
7939 unsigned HOST_WIDE_INT ix;
7940 tree val;
7941 tree temp = NULL_TREE;
7942 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7943 if (TREE_SIDE_EFFECTS (val))
7944 append_to_statement_list (val, &temp);
7946 *expr_p = temp;
7947 ret = temp ? GS_OK : GS_ALL_DONE;
7949 /* C99 code may assign to an array in a constructed
7950 structure or union, and this has undefined behavior only
7951 on execution, so create a temporary if an lvalue is
7952 required. */
7953 else if (fallback == fb_lvalue)
7955 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7956 mark_addressable (*expr_p);
7957 ret = GS_OK;
7959 else
7960 ret = GS_ALL_DONE;
7961 break;
7963 /* The following are special cases that are not handled by the
7964 original GIMPLE grammar. */
7966 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7967 eliminated. */
7968 case SAVE_EXPR:
7969 ret = gimplify_save_expr (expr_p, pre_p, post_p);
7970 break;
7972 case BIT_FIELD_REF:
7973 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7974 post_p, is_gimple_lvalue, fb_either);
7975 recalculate_side_effects (*expr_p);
7976 break;
7978 case TARGET_MEM_REF:
7980 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7982 if (TMR_BASE (*expr_p))
7983 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
7984 post_p, is_gimple_mem_ref_addr, fb_either);
7985 if (TMR_INDEX (*expr_p))
7986 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7987 post_p, is_gimple_val, fb_rvalue);
7988 if (TMR_INDEX2 (*expr_p))
7989 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7990 post_p, is_gimple_val, fb_rvalue);
7991 /* TMR_STEP and TMR_OFFSET are always integer constants. */
7992 ret = MIN (r0, r1);
7994 break;
7996 case NON_LVALUE_EXPR:
7997 /* This should have been stripped above. */
7998 gcc_unreachable ();
8000 case ASM_EXPR:
8001 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
8002 break;
8004 case TRY_FINALLY_EXPR:
8005 case TRY_CATCH_EXPR:
8007 gimple_seq eval, cleanup;
8008 gimple try_;
8010 /* Calls to destructors are generated automatically in FINALLY/CATCH
8011 block. They should have location as UNKNOWN_LOCATION. However,
8012 gimplify_call_expr will reset these call stmts to input_location
8013 if it finds stmt's location is unknown. To prevent resetting for
8014 destructors, we set the input_location to unknown.
8015 Note that this only affects the destructor calls in FINALLY/CATCH
8016 block, and will automatically reset to its original value by the
8017 end of gimplify_expr. */
8018 input_location = UNKNOWN_LOCATION;
8019 eval = cleanup = NULL;
8020 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
8021 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
8022 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
8023 if (gimple_seq_empty_p (cleanup))
8025 gimple_seq_add_seq (pre_p, eval);
8026 ret = GS_ALL_DONE;
8027 break;
8029 try_ = gimple_build_try (eval, cleanup,
8030 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
8031 ? GIMPLE_TRY_FINALLY
8032 : GIMPLE_TRY_CATCH);
8033 if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
8034 gimple_set_location (try_, saved_location);
8035 else
8036 gimple_set_location (try_, EXPR_LOCATION (save_expr));
8037 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
8038 gimple_try_set_catch_is_cleanup (try_,
8039 TRY_CATCH_IS_CLEANUP (*expr_p));
8040 gimplify_seq_add_stmt (pre_p, try_);
8041 ret = GS_ALL_DONE;
8042 break;
8045 case CLEANUP_POINT_EXPR:
8046 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
8047 break;
8049 case TARGET_EXPR:
8050 ret = gimplify_target_expr (expr_p, pre_p, post_p);
8051 break;
8053 case CATCH_EXPR:
8055 gimple c;
8056 gimple_seq handler = NULL;
8057 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
8058 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
8059 gimplify_seq_add_stmt (pre_p, c);
8060 ret = GS_ALL_DONE;
8061 break;
8064 case EH_FILTER_EXPR:
8066 gimple ehf;
8067 gimple_seq failure = NULL;
8069 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
8070 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
8071 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
8072 gimplify_seq_add_stmt (pre_p, ehf);
8073 ret = GS_ALL_DONE;
8074 break;
8077 case OBJ_TYPE_REF:
8079 enum gimplify_status r0, r1;
8080 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
8081 post_p, is_gimple_val, fb_rvalue);
8082 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
8083 post_p, is_gimple_val, fb_rvalue);
8084 TREE_SIDE_EFFECTS (*expr_p) = 0;
8085 ret = MIN (r0, r1);
8087 break;
8089 case LABEL_DECL:
8090 /* We get here when taking the address of a label. We mark
8091 the label as "forced"; meaning it can never be removed and
8092 it is a potential target for any computed goto. */
8093 FORCED_LABEL (*expr_p) = 1;
8094 ret = GS_ALL_DONE;
8095 break;
8097 case STATEMENT_LIST:
8098 ret = gimplify_statement_list (expr_p, pre_p);
8099 break;
8101 case WITH_SIZE_EXPR:
8103 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8104 post_p == &internal_post ? NULL : post_p,
8105 gimple_test_f, fallback);
8106 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8107 is_gimple_val, fb_rvalue);
8108 ret = GS_ALL_DONE;
8110 break;
8112 case VAR_DECL:
8113 case PARM_DECL:
8114 ret = gimplify_var_or_parm_decl (expr_p);
8115 break;
8117 case RESULT_DECL:
8118 /* When within an OpenMP context, notice uses of variables. */
8119 if (gimplify_omp_ctxp)
8120 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
8121 ret = GS_ALL_DONE;
8122 break;
8124 case SSA_NAME:
8125 /* Allow callbacks into the gimplifier during optimization. */
8126 ret = GS_ALL_DONE;
8127 break;
8129 case OMP_PARALLEL:
8130 gimplify_omp_parallel (expr_p, pre_p);
8131 ret = GS_ALL_DONE;
8132 break;
8134 case OMP_TASK:
8135 gimplify_omp_task (expr_p, pre_p);
8136 ret = GS_ALL_DONE;
8137 break;
8139 case OMP_FOR:
8140 case OMP_SIMD:
8141 case CILK_SIMD:
8142 case OMP_DISTRIBUTE:
8143 ret = gimplify_omp_for (expr_p, pre_p);
8144 break;
8146 case OMP_SECTIONS:
8147 case OMP_SINGLE:
8148 case OMP_TARGET:
8149 case OMP_TARGET_DATA:
8150 case OMP_TEAMS:
8151 gimplify_omp_workshare (expr_p, pre_p);
8152 ret = GS_ALL_DONE;
8153 break;
8155 case OMP_TARGET_UPDATE:
8156 gimplify_omp_target_update (expr_p, pre_p);
8157 ret = GS_ALL_DONE;
8158 break;
8160 case OMP_SECTION:
8161 case OMP_MASTER:
8162 case OMP_TASKGROUP:
8163 case OMP_ORDERED:
8164 case OMP_CRITICAL:
8166 gimple_seq body = NULL;
8167 gimple g;
8169 gimplify_and_add (OMP_BODY (*expr_p), &body);
8170 switch (TREE_CODE (*expr_p))
8172 case OMP_SECTION:
8173 g = gimple_build_omp_section (body);
8174 break;
8175 case OMP_MASTER:
8176 g = gimple_build_omp_master (body);
8177 break;
8178 case OMP_TASKGROUP:
8180 gimple_seq cleanup = NULL;
8181 tree fn
8182 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
8183 g = gimple_build_call (fn, 0);
8184 gimple_seq_add_stmt (&cleanup, g);
8185 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
8186 body = NULL;
8187 gimple_seq_add_stmt (&body, g);
8188 g = gimple_build_omp_taskgroup (body);
8190 break;
8191 case OMP_ORDERED:
8192 g = gimple_build_omp_ordered (body);
8193 break;
8194 case OMP_CRITICAL:
8195 g = gimple_build_omp_critical (body,
8196 OMP_CRITICAL_NAME (*expr_p));
8197 break;
8198 default:
8199 gcc_unreachable ();
8201 gimplify_seq_add_stmt (pre_p, g);
8202 ret = GS_ALL_DONE;
8203 break;
8206 case OMP_ATOMIC:
8207 case OMP_ATOMIC_READ:
8208 case OMP_ATOMIC_CAPTURE_OLD:
8209 case OMP_ATOMIC_CAPTURE_NEW:
8210 ret = gimplify_omp_atomic (expr_p, pre_p);
8211 break;
8213 case TRANSACTION_EXPR:
8214 ret = gimplify_transaction (expr_p, pre_p);
8215 break;
8217 case TRUTH_AND_EXPR:
8218 case TRUTH_OR_EXPR:
8219 case TRUTH_XOR_EXPR:
8221 tree orig_type = TREE_TYPE (*expr_p);
8222 tree new_type, xop0, xop1;
8223 *expr_p = gimple_boolify (*expr_p);
8224 new_type = TREE_TYPE (*expr_p);
8225 if (!useless_type_conversion_p (orig_type, new_type))
8227 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
8228 ret = GS_OK;
8229 break;
8232 /* Boolified binary truth expressions are semantically equivalent
8233 to bitwise binary expressions. Canonicalize them to the
8234 bitwise variant. */
8235 switch (TREE_CODE (*expr_p))
8237 case TRUTH_AND_EXPR:
8238 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
8239 break;
8240 case TRUTH_OR_EXPR:
8241 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
8242 break;
8243 case TRUTH_XOR_EXPR:
8244 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
8245 break;
8246 default:
8247 break;
8249 /* Now make sure that operands have compatible type to
8250 expression's new_type. */
8251 xop0 = TREE_OPERAND (*expr_p, 0);
8252 xop1 = TREE_OPERAND (*expr_p, 1);
8253 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
8254 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
8255 new_type,
8256 xop0);
8257 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
8258 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
8259 new_type,
8260 xop1);
8261 /* Continue classified as tcc_binary. */
8262 goto expr_2;
8265 case FMA_EXPR:
8266 case VEC_COND_EXPR:
8267 case VEC_PERM_EXPR:
8268 /* Classified as tcc_expression. */
8269 goto expr_3;
8271 case POINTER_PLUS_EXPR:
8273 enum gimplify_status r0, r1;
8274 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8275 post_p, is_gimple_val, fb_rvalue);
8276 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8277 post_p, is_gimple_val, fb_rvalue);
8278 recalculate_side_effects (*expr_p);
8279 ret = MIN (r0, r1);
8280 /* Convert &X + CST to invariant &MEM[&X, CST]. Do this
8281 after gimplifying operands - this is similar to how
8282 it would be folding all gimplified stmts on creation
8283 to have them canonicalized, which is what we eventually
8284 should do anyway. */
8285 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
8286 && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
8288 *expr_p = build_fold_addr_expr_with_type_loc
8289 (input_location,
8290 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
8291 TREE_OPERAND (*expr_p, 0),
8292 fold_convert (ptr_type_node,
8293 TREE_OPERAND (*expr_p, 1))),
8294 TREE_TYPE (*expr_p));
8295 ret = MIN (ret, GS_OK);
8297 break;
8300 case CILK_SYNC_STMT:
8302 if (!fn_contains_cilk_spawn_p (cfun))
8304 error_at (EXPR_LOCATION (*expr_p),
8305 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
8306 ret = GS_ERROR;
8308 else
8310 gimplify_cilk_sync (expr_p, pre_p);
8311 ret = GS_ALL_DONE;
8313 break;
8316 default:
8317 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
8319 case tcc_comparison:
8320 /* Handle comparison of objects of non scalar mode aggregates
8321 with a call to memcmp. It would be nice to only have to do
8322 this for variable-sized objects, but then we'd have to allow
8323 the same nest of reference nodes we allow for MODIFY_EXPR and
8324 that's too complex.
8326 Compare scalar mode aggregates as scalar mode values. Using
8327 memcmp for them would be very inefficient at best, and is
8328 plain wrong if bitfields are involved. */
8330 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
8332 /* Vector comparisons need no boolification. */
8333 if (TREE_CODE (type) == VECTOR_TYPE)
8334 goto expr_2;
8335 else if (!AGGREGATE_TYPE_P (type))
8337 tree org_type = TREE_TYPE (*expr_p);
8338 *expr_p = gimple_boolify (*expr_p);
8339 if (!useless_type_conversion_p (org_type,
8340 TREE_TYPE (*expr_p)))
8342 *expr_p = fold_convert_loc (input_location,
8343 org_type, *expr_p);
8344 ret = GS_OK;
8346 else
8347 goto expr_2;
8349 else if (TYPE_MODE (type) != BLKmode)
8350 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
8351 else
8352 ret = gimplify_variable_sized_compare (expr_p);
8354 break;
8357 /* If *EXPR_P does not need to be special-cased, handle it
8358 according to its class. */
8359 case tcc_unary:
8360 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8361 post_p, is_gimple_val, fb_rvalue);
8362 break;
8364 case tcc_binary:
8365 expr_2:
8367 enum gimplify_status r0, r1;
8369 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8370 post_p, is_gimple_val, fb_rvalue);
8371 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8372 post_p, is_gimple_val, fb_rvalue);
8374 ret = MIN (r0, r1);
8375 break;
8378 expr_3:
8380 enum gimplify_status r0, r1, r2;
8382 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8383 post_p, is_gimple_val, fb_rvalue);
8384 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8385 post_p, is_gimple_val, fb_rvalue);
8386 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
8387 post_p, is_gimple_val, fb_rvalue);
8389 ret = MIN (MIN (r0, r1), r2);
8390 break;
8393 case tcc_declaration:
8394 case tcc_constant:
8395 ret = GS_ALL_DONE;
8396 goto dont_recalculate;
8398 default:
8399 gcc_unreachable ();
8402 recalculate_side_effects (*expr_p);
8404 dont_recalculate:
8405 break;
8408 gcc_assert (*expr_p || ret != GS_OK);
8410 while (ret == GS_OK);
8412 /* If we encountered an error_mark somewhere nested inside, either
8413 stub out the statement or propagate the error back out. */
8414 if (ret == GS_ERROR)
8416 if (is_statement)
8417 *expr_p = NULL;
8418 goto out;
8421 /* This was only valid as a return value from the langhook, which
8422 we handled. Make sure it doesn't escape from any other context. */
8423 gcc_assert (ret != GS_UNHANDLED);
8425 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
8427 /* We aren't looking for a value, and we don't have a valid
8428 statement. If it doesn't have side-effects, throw it away. */
8429 if (!TREE_SIDE_EFFECTS (*expr_p))
8430 *expr_p = NULL;
8431 else if (!TREE_THIS_VOLATILE (*expr_p))
8433 /* This is probably a _REF that contains something nested that
8434 has side effects. Recurse through the operands to find it. */
8435 enum tree_code code = TREE_CODE (*expr_p);
8437 switch (code)
8439 case COMPONENT_REF:
8440 case REALPART_EXPR:
8441 case IMAGPART_EXPR:
8442 case VIEW_CONVERT_EXPR:
8443 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8444 gimple_test_f, fallback);
8445 break;
8447 case ARRAY_REF:
8448 case ARRAY_RANGE_REF:
8449 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8450 gimple_test_f, fallback);
8451 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8452 gimple_test_f, fallback);
8453 break;
8455 default:
8456 /* Anything else with side-effects must be converted to
8457 a valid statement before we get here. */
8458 gcc_unreachable ();
8461 *expr_p = NULL;
8463 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
8464 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
8466 /* Historically, the compiler has treated a bare reference
8467 to a non-BLKmode volatile lvalue as forcing a load. */
8468 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
8470 /* Normally, we do not want to create a temporary for a
8471 TREE_ADDRESSABLE type because such a type should not be
8472 copied by bitwise-assignment. However, we make an
8473 exception here, as all we are doing here is ensuring that
8474 we read the bytes that make up the type. We use
8475 create_tmp_var_raw because create_tmp_var will abort when
8476 given a TREE_ADDRESSABLE type. */
8477 tree tmp = create_tmp_var_raw (type, "vol");
8478 gimple_add_tmp_var (tmp);
8479 gimplify_assign (tmp, *expr_p, pre_p);
8480 *expr_p = NULL;
8482 else
8483 /* We can't do anything useful with a volatile reference to
8484 an incomplete type, so just throw it away. Likewise for
8485 a BLKmode type, since any implicit inner load should
8486 already have been turned into an explicit one by the
8487 gimplification process. */
8488 *expr_p = NULL;
8491 /* If we are gimplifying at the statement level, we're done. Tack
8492 everything together and return. */
8493 if (fallback == fb_none || is_statement)
8495 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
8496 it out for GC to reclaim it. */
8497 *expr_p = NULL_TREE;
8499 if (!gimple_seq_empty_p (internal_pre)
8500 || !gimple_seq_empty_p (internal_post))
8502 gimplify_seq_add_seq (&internal_pre, internal_post);
8503 gimplify_seq_add_seq (pre_p, internal_pre);
8506 /* The result of gimplifying *EXPR_P is going to be the last few
8507 statements in *PRE_P and *POST_P. Add location information
8508 to all the statements that were added by the gimplification
8509 helpers. */
8510 if (!gimple_seq_empty_p (*pre_p))
8511 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
8513 if (!gimple_seq_empty_p (*post_p))
8514 annotate_all_with_location_after (*post_p, post_last_gsi,
8515 input_location);
8517 goto out;
8520 #ifdef ENABLE_GIMPLE_CHECKING
8521 if (*expr_p)
8523 enum tree_code code = TREE_CODE (*expr_p);
8524 /* These expressions should already be in gimple IR form. */
8525 gcc_assert (code != MODIFY_EXPR
8526 && code != ASM_EXPR
8527 && code != BIND_EXPR
8528 && code != CATCH_EXPR
8529 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
8530 && code != EH_FILTER_EXPR
8531 && code != GOTO_EXPR
8532 && code != LABEL_EXPR
8533 && code != LOOP_EXPR
8534 && code != SWITCH_EXPR
8535 && code != TRY_FINALLY_EXPR
8536 && code != OMP_CRITICAL
8537 && code != OMP_FOR
8538 && code != OMP_MASTER
8539 && code != OMP_TASKGROUP
8540 && code != OMP_ORDERED
8541 && code != OMP_PARALLEL
8542 && code != OMP_SECTIONS
8543 && code != OMP_SECTION
8544 && code != OMP_SINGLE);
8546 #endif
8548 /* Otherwise we're gimplifying a subexpression, so the resulting
8549 value is interesting. If it's a valid operand that matches
8550 GIMPLE_TEST_F, we're done. Unless we are handling some
8551 post-effects internally; if that's the case, we need to copy into
8552 a temporary before adding the post-effects to POST_P. */
8553 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
8554 goto out;
8556 /* Otherwise, we need to create a new temporary for the gimplified
8557 expression. */
8559 /* We can't return an lvalue if we have an internal postqueue. The
8560 object the lvalue refers to would (probably) be modified by the
8561 postqueue; we need to copy the value out first, which means an
8562 rvalue. */
8563 if ((fallback & fb_lvalue)
8564 && gimple_seq_empty_p (internal_post)
8565 && is_gimple_addressable (*expr_p))
8567 /* An lvalue will do. Take the address of the expression, store it
8568 in a temporary, and replace the expression with an INDIRECT_REF of
8569 that temporary. */
8570 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
8571 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
8572 *expr_p = build_simple_mem_ref (tmp);
8574 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
8576 /* An rvalue will do. Assign the gimplified expression into a
8577 new temporary TMP and replace the original expression with
8578 TMP. First, make sure that the expression has a type so that
8579 it can be assigned into a temporary. */
8580 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
8581 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
8583 else
8585 #ifdef ENABLE_GIMPLE_CHECKING
8586 if (!(fallback & fb_mayfail))
8588 fprintf (stderr, "gimplification failed:\n");
8589 print_generic_expr (stderr, *expr_p, 0);
8590 debug_tree (*expr_p);
8591 internal_error ("gimplification failed");
8593 #endif
8594 gcc_assert (fallback & fb_mayfail);
8596 /* If this is an asm statement, and the user asked for the
8597 impossible, don't die. Fail and let gimplify_asm_expr
8598 issue an error. */
8599 ret = GS_ERROR;
8600 goto out;
8603 /* Make sure the temporary matches our predicate. */
8604 gcc_assert ((*gimple_test_f) (*expr_p));
8606 if (!gimple_seq_empty_p (internal_post))
8608 annotate_all_with_location (internal_post, input_location);
8609 gimplify_seq_add_seq (pre_p, internal_post);
8612 out:
8613 input_location = saved_location;
8614 return ret;
8617 /* Look through TYPE for variable-sized objects and gimplify each such
8618 size that we find. Add to LIST_P any statements generated. */
8620 void
8621 gimplify_type_sizes (tree type, gimple_seq *list_p)
8623 tree field, t;
8625 if (type == NULL || type == error_mark_node)
8626 return;
8628 /* We first do the main variant, then copy into any other variants. */
8629 type = TYPE_MAIN_VARIANT (type);
8631 /* Avoid infinite recursion. */
8632 if (TYPE_SIZES_GIMPLIFIED (type))
8633 return;
8635 TYPE_SIZES_GIMPLIFIED (type) = 1;
8637 switch (TREE_CODE (type))
8639 case INTEGER_TYPE:
8640 case ENUMERAL_TYPE:
8641 case BOOLEAN_TYPE:
8642 case REAL_TYPE:
8643 case FIXED_POINT_TYPE:
8644 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
8645 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
8647 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8649 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
8650 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
8652 break;
8654 case ARRAY_TYPE:
8655 /* These types may not have declarations, so handle them here. */
8656 gimplify_type_sizes (TREE_TYPE (type), list_p);
8657 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
8658 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
8659 with assigned stack slots, for -O1+ -g they should be tracked
8660 by VTA. */
8661 if (!(TYPE_NAME (type)
8662 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
8663 && DECL_IGNORED_P (TYPE_NAME (type)))
8664 && TYPE_DOMAIN (type)
8665 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
8667 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8668 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8669 DECL_IGNORED_P (t) = 0;
8670 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8671 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8672 DECL_IGNORED_P (t) = 0;
8674 break;
8676 case RECORD_TYPE:
8677 case UNION_TYPE:
8678 case QUAL_UNION_TYPE:
8679 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8680 if (TREE_CODE (field) == FIELD_DECL)
8682 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
8683 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
8684 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8685 gimplify_type_sizes (TREE_TYPE (field), list_p);
8687 break;
8689 case POINTER_TYPE:
8690 case REFERENCE_TYPE:
8691 /* We used to recurse on the pointed-to type here, which turned out to
8692 be incorrect because its definition might refer to variables not
8693 yet initialized at this point if a forward declaration is involved.
8695 It was actually useful for anonymous pointed-to types to ensure
8696 that the sizes evaluation dominates every possible later use of the
8697 values. Restricting to such types here would be safe since there
8698 is no possible forward declaration around, but would introduce an
8699 undesirable middle-end semantic to anonymity. We then defer to
8700 front-ends the responsibility of ensuring that the sizes are
8701 evaluated both early and late enough, e.g. by attaching artificial
8702 type declarations to the tree. */
8703 break;
8705 default:
8706 break;
8709 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
8710 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
8712 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8714 TYPE_SIZE (t) = TYPE_SIZE (type);
8715 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
8716 TYPE_SIZES_GIMPLIFIED (t) = 1;
8720 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
8721 a size or position, has had all of its SAVE_EXPRs evaluated.
8722 We add any required statements to *STMT_P. */
8724 void
8725 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
8727 tree expr = *expr_p;
8729 /* We don't do anything if the value isn't there, is constant, or contains
8730 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
8731 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
8732 will want to replace it with a new variable, but that will cause problems
8733 if this type is from outside the function. It's OK to have that here. */
8734 if (is_gimple_sizepos (expr))
8735 return;
8737 *expr_p = unshare_expr (expr);
8739 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
8742 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
8743 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
8744 is true, also gimplify the parameters. */
8746 gimple
8747 gimplify_body (tree fndecl, bool do_parms)
8749 location_t saved_location = input_location;
8750 gimple_seq parm_stmts, seq;
8751 gimple outer_bind;
8752 struct cgraph_node *cgn;
8754 timevar_push (TV_TREE_GIMPLIFY);
8756 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
8757 gimplification. */
8758 default_rtl_profile ();
8760 gcc_assert (gimplify_ctxp == NULL);
8761 push_gimplify_context ();
8763 if (flag_openmp)
8765 gcc_assert (gimplify_omp_ctxp == NULL);
8766 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
8767 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
8770 /* Unshare most shared trees in the body and in that of any nested functions.
8771 It would seem we don't have to do this for nested functions because
8772 they are supposed to be output and then the outer function gimplified
8773 first, but the g++ front end doesn't always do it that way. */
8774 unshare_body (fndecl);
8775 unvisit_body (fndecl);
8777 cgn = cgraph_node::get (fndecl);
8778 if (cgn && cgn->origin)
8779 nonlocal_vlas = new hash_set<tree>;
8781 /* Make sure input_location isn't set to something weird. */
8782 input_location = DECL_SOURCE_LOCATION (fndecl);
8784 /* Resolve callee-copies. This has to be done before processing
8785 the body so that DECL_VALUE_EXPR gets processed correctly. */
8786 parm_stmts = do_parms ? gimplify_parameters () : NULL;
8788 /* Gimplify the function's body. */
8789 seq = NULL;
8790 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
8791 outer_bind = gimple_seq_first_stmt (seq);
8792 if (!outer_bind)
8794 outer_bind = gimple_build_nop ();
8795 gimplify_seq_add_stmt (&seq, outer_bind);
8798 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
8799 not the case, wrap everything in a GIMPLE_BIND to make it so. */
8800 if (gimple_code (outer_bind) == GIMPLE_BIND
8801 && gimple_seq_first (seq) == gimple_seq_last (seq))
8803 else
8804 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8806 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8808 /* If we had callee-copies statements, insert them at the beginning
8809 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
8810 if (!gimple_seq_empty_p (parm_stmts))
8812 tree parm;
8814 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8815 gimple_bind_set_body (outer_bind, parm_stmts);
8817 for (parm = DECL_ARGUMENTS (current_function_decl);
8818 parm; parm = DECL_CHAIN (parm))
8819 if (DECL_HAS_VALUE_EXPR_P (parm))
8821 DECL_HAS_VALUE_EXPR_P (parm) = 0;
8822 DECL_IGNORED_P (parm) = 0;
8826 if (nonlocal_vlas)
8828 if (nonlocal_vla_vars)
8830 /* tree-nested.c may later on call declare_vars (..., true);
8831 which relies on BLOCK_VARS chain to be the tail of the
8832 gimple_bind_vars chain. Ensure we don't violate that
8833 assumption. */
8834 if (gimple_bind_block (outer_bind)
8835 == DECL_INITIAL (current_function_decl))
8836 declare_vars (nonlocal_vla_vars, outer_bind, true);
8837 else
8838 BLOCK_VARS (DECL_INITIAL (current_function_decl))
8839 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
8840 nonlocal_vla_vars);
8841 nonlocal_vla_vars = NULL_TREE;
8843 delete nonlocal_vlas;
8844 nonlocal_vlas = NULL;
8847 if ((flag_openmp || flag_openmp_simd) && gimplify_omp_ctxp)
8849 delete_omp_context (gimplify_omp_ctxp);
8850 gimplify_omp_ctxp = NULL;
8853 pop_gimplify_context (outer_bind);
8854 gcc_assert (gimplify_ctxp == NULL);
8856 #ifdef ENABLE_CHECKING
8857 if (!seen_error ())
8858 verify_gimple_in_seq (gimple_bind_body (outer_bind));
8859 #endif
8861 timevar_pop (TV_TREE_GIMPLIFY);
8862 input_location = saved_location;
8864 return outer_bind;
8867 typedef char *char_p; /* For DEF_VEC_P. */
8869 /* Return whether we should exclude FNDECL from instrumentation. */
8871 static bool
8872 flag_instrument_functions_exclude_p (tree fndecl)
8874 vec<char_p> *v;
8876 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
8877 if (v && v->length () > 0)
8879 const char *name;
8880 int i;
8881 char *s;
8883 name = lang_hooks.decl_printable_name (fndecl, 0);
8884 FOR_EACH_VEC_ELT (*v, i, s)
8885 if (strstr (name, s) != NULL)
8886 return true;
8889 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
8890 if (v && v->length () > 0)
8892 const char *name;
8893 int i;
8894 char *s;
8896 name = DECL_SOURCE_FILE (fndecl);
8897 FOR_EACH_VEC_ELT (*v, i, s)
8898 if (strstr (name, s) != NULL)
8899 return true;
8902 return false;
8905 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
8906 node for the function we want to gimplify.
8908 Return the sequence of GIMPLE statements corresponding to the body
8909 of FNDECL. */
8911 void
8912 gimplify_function_tree (tree fndecl)
8914 tree parm, ret;
8915 gimple_seq seq;
8916 gimple bind;
8918 gcc_assert (!gimple_body (fndecl));
8920 if (DECL_STRUCT_FUNCTION (fndecl))
8921 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8922 else
8923 push_struct_function (fndecl);
8925 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
8927 /* Preliminarily mark non-addressed complex variables as eligible
8928 for promotion to gimple registers. We'll transform their uses
8929 as we find them. */
8930 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8931 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
8932 && !TREE_THIS_VOLATILE (parm)
8933 && !needs_to_live_in_memory (parm))
8934 DECL_GIMPLE_REG_P (parm) = 1;
8937 ret = DECL_RESULT (fndecl);
8938 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
8939 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
8940 && !needs_to_live_in_memory (ret))
8941 DECL_GIMPLE_REG_P (ret) = 1;
8943 bind = gimplify_body (fndecl, true);
8945 /* The tree body of the function is no longer needed, replace it
8946 with the new GIMPLE body. */
8947 seq = NULL;
8948 gimple_seq_add_stmt (&seq, bind);
8949 gimple_set_body (fndecl, seq);
8951 /* If we're instrumenting function entry/exit, then prepend the call to
8952 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8953 catch the exit hook. */
8954 /* ??? Add some way to ignore exceptions for this TFE. */
8955 if (flag_instrument_function_entry_exit
8956 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8957 && !flag_instrument_functions_exclude_p (fndecl))
8959 tree x;
8960 gimple new_bind;
8961 gimple tf;
8962 gimple_seq cleanup = NULL, body = NULL;
8963 tree tmp_var;
8964 gimple call;
8966 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8967 call = gimple_build_call (x, 1, integer_zero_node);
8968 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8969 gimple_call_set_lhs (call, tmp_var);
8970 gimplify_seq_add_stmt (&cleanup, call);
8971 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
8972 call = gimple_build_call (x, 2,
8973 build_fold_addr_expr (current_function_decl),
8974 tmp_var);
8975 gimplify_seq_add_stmt (&cleanup, call);
8976 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
8978 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8979 call = gimple_build_call (x, 1, integer_zero_node);
8980 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8981 gimple_call_set_lhs (call, tmp_var);
8982 gimplify_seq_add_stmt (&body, call);
8983 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
8984 call = gimple_build_call (x, 2,
8985 build_fold_addr_expr (current_function_decl),
8986 tmp_var);
8987 gimplify_seq_add_stmt (&body, call);
8988 gimplify_seq_add_stmt (&body, tf);
8989 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
8990 /* Clear the block for BIND, since it is no longer directly inside
8991 the function, but within a try block. */
8992 gimple_bind_set_block (bind, NULL);
8994 /* Replace the current function body with the body
8995 wrapped in the try/finally TF. */
8996 seq = NULL;
8997 gimple_seq_add_stmt (&seq, new_bind);
8998 gimple_set_body (fndecl, seq);
9001 DECL_SAVED_TREE (fndecl) = NULL_TREE;
9002 cfun->curr_properties = PROP_gimple_any;
9004 pop_cfun ();
9007 /* Return a dummy expression of type TYPE in order to keep going after an
9008 error. */
9010 static tree
9011 dummy_object (tree type)
9013 tree t = build_int_cst (build_pointer_type (type), 0);
9014 return build2 (MEM_REF, type, t, t);
9017 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
9018 builtin function, but a very special sort of operator. */
9020 enum gimplify_status
9021 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
9023 tree promoted_type, have_va_type;
9024 tree valist = TREE_OPERAND (*expr_p, 0);
9025 tree type = TREE_TYPE (*expr_p);
9026 tree t;
9027 location_t loc = EXPR_LOCATION (*expr_p);
9029 /* Verify that valist is of the proper type. */
9030 have_va_type = TREE_TYPE (valist);
9031 if (have_va_type == error_mark_node)
9032 return GS_ERROR;
9033 have_va_type = targetm.canonical_va_list_type (have_va_type);
9035 if (have_va_type == NULL_TREE)
9037 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
9038 return GS_ERROR;
9041 /* Generate a diagnostic for requesting data of a type that cannot
9042 be passed through `...' due to type promotion at the call site. */
9043 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
9044 != type)
9046 static bool gave_help;
9047 bool warned;
9049 /* Unfortunately, this is merely undefined, rather than a constraint
9050 violation, so we cannot make this an error. If this call is never
9051 executed, the program is still strictly conforming. */
9052 warned = warning_at (loc, 0,
9053 "%qT is promoted to %qT when passed through %<...%>",
9054 type, promoted_type);
9055 if (!gave_help && warned)
9057 gave_help = true;
9058 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
9059 promoted_type, type);
9062 /* We can, however, treat "undefined" any way we please.
9063 Call abort to encourage the user to fix the program. */
9064 if (warned)
9065 inform (loc, "if this code is reached, the program will abort");
9066 /* Before the abort, allow the evaluation of the va_list
9067 expression to exit or longjmp. */
9068 gimplify_and_add (valist, pre_p);
9069 t = build_call_expr_loc (loc,
9070 builtin_decl_implicit (BUILT_IN_TRAP), 0);
9071 gimplify_and_add (t, pre_p);
9073 /* This is dead code, but go ahead and finish so that the
9074 mode of the result comes out right. */
9075 *expr_p = dummy_object (type);
9076 return GS_ALL_DONE;
9078 else
9080 /* Make it easier for the backends by protecting the valist argument
9081 from multiple evaluations. */
9082 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
9084 /* For this case, the backends will be expecting a pointer to
9085 TREE_TYPE (abi), but it's possible we've
9086 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
9087 So fix it. */
9088 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
9090 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
9091 valist = fold_convert_loc (loc, p1,
9092 build_fold_addr_expr_loc (loc, valist));
9095 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
9097 else
9098 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
9100 if (!targetm.gimplify_va_arg_expr)
9101 /* FIXME: Once most targets are converted we should merely
9102 assert this is non-null. */
9103 return GS_ALL_DONE;
9105 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
9106 return GS_OK;
9110 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
9112 DST/SRC are the destination and source respectively. You can pass
9113 ungimplified trees in DST or SRC, in which case they will be
9114 converted to a gimple operand if necessary.
9116 This function returns the newly created GIMPLE_ASSIGN tuple. */
9118 gimple
9119 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
9121 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9122 gimplify_and_add (t, seq_p);
9123 ggc_free (t);
9124 return gimple_seq_last_stmt (*seq_p);
9127 inline hashval_t
9128 gimplify_hasher::hash (const value_type *p)
9130 tree t = p->val;
9131 return iterative_hash_expr (t, 0);
9134 inline bool
9135 gimplify_hasher::equal (const value_type *p1, const compare_type *p2)
9137 tree t1 = p1->val;
9138 tree t2 = p2->val;
9139 enum tree_code code = TREE_CODE (t1);
9141 if (TREE_CODE (t2) != code
9142 || TREE_TYPE (t1) != TREE_TYPE (t2))
9143 return false;
9145 if (!operand_equal_p (t1, t2, 0))
9146 return false;
9148 #ifdef ENABLE_CHECKING
9149 /* Only allow them to compare equal if they also hash equal; otherwise
9150 results are nondeterminate, and we fail bootstrap comparison. */
9151 gcc_assert (hash (p1) == hash (p2));
9152 #endif
9154 return true;