PR target/56858
[official-gcc.git] / gcc / gimplify.c
blobf3c7d610e3f1501943f242744e419c00bc4d52c6
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2014 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tree.h"
27 #include "expr.h"
28 #include "pointer-set.h"
29 #include "hash-table.h"
30 #include "basic-block.h"
31 #include "tree-ssa-alias.h"
32 #include "internal-fn.h"
33 #include "gimple-fold.h"
34 #include "tree-eh.h"
35 #include "gimple-expr.h"
36 #include "is-a.h"
37 #include "gimple.h"
38 #include "gimplify.h"
39 #include "gimple-iterator.h"
40 #include "stringpool.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stor-layout.h"
44 #include "stmt.h"
45 #include "print-tree.h"
46 #include "tree-iterator.h"
47 #include "tree-inline.h"
48 #include "tree-pretty-print.h"
49 #include "langhooks.h"
50 #include "bitmap.h"
51 #include "gimple-ssa.h"
52 #include "cgraph.h"
53 #include "tree-cfg.h"
54 #include "tree-ssanames.h"
55 #include "tree-ssa.h"
56 #include "diagnostic-core.h"
57 #include "target.h"
58 #include "splay-tree.h"
59 #include "omp-low.h"
60 #include "gimple-low.h"
61 #include "cilk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
65 #include "builtins.h"
67 enum gimplify_omp_var_data
69 GOVD_SEEN = 1,
70 GOVD_EXPLICIT = 2,
71 GOVD_SHARED = 4,
72 GOVD_PRIVATE = 8,
73 GOVD_FIRSTPRIVATE = 16,
74 GOVD_LASTPRIVATE = 32,
75 GOVD_REDUCTION = 64,
76 GOVD_LOCAL = 128,
77 GOVD_MAP = 256,
78 GOVD_DEBUG_PRIVATE = 512,
79 GOVD_PRIVATE_OUTER_REF = 1024,
80 GOVD_LINEAR = 2048,
81 GOVD_ALIGNED = 4096,
82 GOVD_MAP_TO_ONLY = 8192,
83 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
84 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
85 | GOVD_LOCAL)
89 enum omp_region_type
91 ORT_WORKSHARE = 0,
92 ORT_SIMD = 1,
93 ORT_PARALLEL = 2,
94 ORT_COMBINED_PARALLEL = 3,
95 ORT_TASK = 4,
96 ORT_UNTIED_TASK = 5,
97 ORT_TEAMS = 8,
98 ORT_TARGET_DATA = 16,
99 ORT_TARGET = 32
102 /* Gimplify hashtable helper. */
104 struct gimplify_hasher : typed_free_remove <elt_t>
106 typedef elt_t value_type;
107 typedef elt_t compare_type;
108 static inline hashval_t hash (const value_type *);
109 static inline bool equal (const value_type *, const compare_type *);
112 struct gimplify_ctx
114 struct gimplify_ctx *prev_context;
116 vec<gimple> bind_expr_stack;
117 tree temps;
118 gimple_seq conditional_cleanups;
119 tree exit_label;
120 tree return_temp;
122 vec<tree> case_labels;
123 /* The formal temporary table. Should this be persistent? */
124 hash_table<gimplify_hasher> *temp_htab;
126 int conditions;
127 bool save_stack;
128 bool into_ssa;
129 bool allow_rhs_cond_expr;
130 bool in_cleanup_point_expr;
133 struct gimplify_omp_ctx
135 struct gimplify_omp_ctx *outer_context;
136 splay_tree variables;
137 struct pointer_set_t *privatized_types;
138 location_t location;
139 enum omp_clause_default_kind default_kind;
140 enum omp_region_type region_type;
141 bool combined_loop;
142 bool distribute;
145 static struct gimplify_ctx *gimplify_ctxp;
146 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
148 /* Forward declaration. */
149 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
151 /* Shorter alias name for the above function for use in gimplify.c
152 only. */
154 static inline void
155 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
157 gimple_seq_add_stmt_without_update (seq_p, gs);
160 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
161 NULL, a new sequence is allocated. This function is
162 similar to gimple_seq_add_seq, but does not scan the operands.
163 During gimplification, we need to manipulate statement sequences
164 before the def/use vectors have been constructed. */
166 static void
167 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
169 gimple_stmt_iterator si;
171 if (src == NULL)
172 return;
174 si = gsi_last (*dst_p);
175 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
179 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
180 and popping gimplify contexts. */
182 static struct gimplify_ctx *ctx_pool = NULL;
184 /* Return a gimplify context struct from the pool. */
186 static inline struct gimplify_ctx *
187 ctx_alloc (void)
189 struct gimplify_ctx * c = ctx_pool;
191 if (c)
192 ctx_pool = c->prev_context;
193 else
194 c = XNEW (struct gimplify_ctx);
196 memset (c, '\0', sizeof (*c));
197 return c;
200 /* Put gimplify context C back into the pool. */
202 static inline void
203 ctx_free (struct gimplify_ctx *c)
205 c->prev_context = ctx_pool;
206 ctx_pool = c;
209 /* Free allocated ctx stack memory. */
211 void
212 free_gimplify_stack (void)
214 struct gimplify_ctx *c;
216 while ((c = ctx_pool))
218 ctx_pool = c->prev_context;
219 free (c);
224 /* Set up a context for the gimplifier. */
226 void
227 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
229 struct gimplify_ctx *c = ctx_alloc ();
231 c->prev_context = gimplify_ctxp;
232 gimplify_ctxp = c;
233 gimplify_ctxp->into_ssa = in_ssa;
234 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
237 /* Tear down a context for the gimplifier. If BODY is non-null, then
238 put the temporaries into the outer BIND_EXPR. Otherwise, put them
239 in the local_decls.
241 BODY is not a sequence, but the first tuple in a sequence. */
243 void
244 pop_gimplify_context (gimple body)
246 struct gimplify_ctx *c = gimplify_ctxp;
248 gcc_assert (c
249 && (!c->bind_expr_stack.exists ()
250 || c->bind_expr_stack.is_empty ()));
251 c->bind_expr_stack.release ();
252 gimplify_ctxp = c->prev_context;
254 if (body)
255 declare_vars (c->temps, body, false);
256 else
257 record_vars (c->temps);
259 delete c->temp_htab;
260 c->temp_htab = NULL;
261 ctx_free (c);
264 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
266 static void
267 gimple_push_bind_expr (gimple gimple_bind)
269 gimplify_ctxp->bind_expr_stack.reserve (8);
270 gimplify_ctxp->bind_expr_stack.safe_push (gimple_bind);
273 /* Pop the first element off the stack of bindings. */
275 static void
276 gimple_pop_bind_expr (void)
278 gimplify_ctxp->bind_expr_stack.pop ();
281 /* Return the first element of the stack of bindings. */
283 gimple
284 gimple_current_bind_expr (void)
286 return gimplify_ctxp->bind_expr_stack.last ();
289 /* Return the stack of bindings created during gimplification. */
291 vec<gimple>
292 gimple_bind_expr_stack (void)
294 return gimplify_ctxp->bind_expr_stack;
297 /* Return true iff there is a COND_EXPR between us and the innermost
298 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
300 static bool
301 gimple_conditional_context (void)
303 return gimplify_ctxp->conditions > 0;
306 /* Note that we've entered a COND_EXPR. */
308 static void
309 gimple_push_condition (void)
311 #ifdef ENABLE_GIMPLE_CHECKING
312 if (gimplify_ctxp->conditions == 0)
313 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
314 #endif
315 ++(gimplify_ctxp->conditions);
318 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
319 now, add any conditional cleanups we've seen to the prequeue. */
321 static void
322 gimple_pop_condition (gimple_seq *pre_p)
324 int conds = --(gimplify_ctxp->conditions);
326 gcc_assert (conds >= 0);
327 if (conds == 0)
329 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
330 gimplify_ctxp->conditional_cleanups = NULL;
334 /* A stable comparison routine for use with splay trees and DECLs. */
336 static int
337 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
339 tree a = (tree) xa;
340 tree b = (tree) xb;
342 return DECL_UID (a) - DECL_UID (b);
345 /* Create a new omp construct that deals with variable remapping. */
347 static struct gimplify_omp_ctx *
348 new_omp_context (enum omp_region_type region_type)
350 struct gimplify_omp_ctx *c;
352 c = XCNEW (struct gimplify_omp_ctx);
353 c->outer_context = gimplify_omp_ctxp;
354 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
355 c->privatized_types = pointer_set_create ();
356 c->location = input_location;
357 c->region_type = region_type;
358 if ((region_type & ORT_TASK) == 0)
359 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
360 else
361 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
363 return c;
366 /* Destroy an omp construct that deals with variable remapping. */
368 static void
369 delete_omp_context (struct gimplify_omp_ctx *c)
371 splay_tree_delete (c->variables);
372 pointer_set_destroy (c->privatized_types);
373 XDELETE (c);
376 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
377 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
379 /* Both gimplify the statement T and append it to *SEQ_P. This function
380 behaves exactly as gimplify_stmt, but you don't have to pass T as a
381 reference. */
383 void
384 gimplify_and_add (tree t, gimple_seq *seq_p)
386 gimplify_stmt (&t, seq_p);
389 /* Gimplify statement T into sequence *SEQ_P, and return the first
390 tuple in the sequence of generated tuples for this statement.
391 Return NULL if gimplifying T produced no tuples. */
393 static gimple
394 gimplify_and_return_first (tree t, gimple_seq *seq_p)
396 gimple_stmt_iterator last = gsi_last (*seq_p);
398 gimplify_and_add (t, seq_p);
400 if (!gsi_end_p (last))
402 gsi_next (&last);
403 return gsi_stmt (last);
405 else
406 return gimple_seq_first_stmt (*seq_p);
409 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
410 LHS, or for a call argument. */
412 static bool
413 is_gimple_mem_rhs (tree t)
415 /* If we're dealing with a renamable type, either source or dest must be
416 a renamed variable. */
417 if (is_gimple_reg_type (TREE_TYPE (t)))
418 return is_gimple_val (t);
419 else
420 return is_gimple_val (t) || is_gimple_lvalue (t);
423 /* Return true if T is a CALL_EXPR or an expression that can be
424 assigned to a temporary. Note that this predicate should only be
425 used during gimplification. See the rationale for this in
426 gimplify_modify_expr. */
428 static bool
429 is_gimple_reg_rhs_or_call (tree t)
431 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
432 || TREE_CODE (t) == CALL_EXPR);
435 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
436 this predicate should only be used during gimplification. See the
437 rationale for this in gimplify_modify_expr. */
439 static bool
440 is_gimple_mem_rhs_or_call (tree t)
442 /* If we're dealing with a renamable type, either source or dest must be
443 a renamed variable. */
444 if (is_gimple_reg_type (TREE_TYPE (t)))
445 return is_gimple_val (t);
446 else
447 return (is_gimple_val (t) || is_gimple_lvalue (t)
448 || TREE_CODE (t) == CALL_EXPR);
451 /* Create a temporary with a name derived from VAL. Subroutine of
452 lookup_tmp_var; nobody else should call this function. */
454 static inline tree
455 create_tmp_from_val (tree val)
457 /* Drop all qualifiers and address-space information from the value type. */
458 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
459 tree var = create_tmp_var (type, get_name (val));
460 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
461 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
462 DECL_GIMPLE_REG_P (var) = 1;
463 return var;
466 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
467 an existing expression temporary. */
469 static tree
470 lookup_tmp_var (tree val, bool is_formal)
472 tree ret;
474 /* If not optimizing, never really reuse a temporary. local-alloc
475 won't allocate any variable that is used in more than one basic
476 block, which means it will go into memory, causing much extra
477 work in reload and final and poorer code generation, outweighing
478 the extra memory allocation here. */
479 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
480 ret = create_tmp_from_val (val);
481 else
483 elt_t elt, *elt_p;
484 elt_t **slot;
486 elt.val = val;
487 if (!gimplify_ctxp->temp_htab)
488 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
489 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
490 if (*slot == NULL)
492 elt_p = XNEW (elt_t);
493 elt_p->val = val;
494 elt_p->temp = ret = create_tmp_from_val (val);
495 *slot = elt_p;
497 else
499 elt_p = *slot;
500 ret = elt_p->temp;
504 return ret;
507 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
509 static tree
510 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
511 bool is_formal)
513 tree t, mod;
515 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
516 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
517 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
518 fb_rvalue);
520 if (gimplify_ctxp->into_ssa
521 && is_gimple_reg_type (TREE_TYPE (val)))
522 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)), NULL);
523 else
524 t = lookup_tmp_var (val, is_formal);
526 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
528 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
530 /* gimplify_modify_expr might want to reduce this further. */
531 gimplify_and_add (mod, pre_p);
532 ggc_free (mod);
534 return t;
537 /* Return a formal temporary variable initialized with VAL. PRE_P is as
538 in gimplify_expr. Only use this function if:
540 1) The value of the unfactored expression represented by VAL will not
541 change between the initialization and use of the temporary, and
542 2) The temporary will not be otherwise modified.
544 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
545 and #2 means it is inappropriate for && temps.
547 For other cases, use get_initialized_tmp_var instead. */
549 tree
550 get_formal_tmp_var (tree val, gimple_seq *pre_p)
552 return internal_get_tmp_var (val, pre_p, NULL, true);
555 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
556 are as in gimplify_expr. */
558 tree
559 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
561 return internal_get_tmp_var (val, pre_p, post_p, false);
564 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
565 generate debug info for them; otherwise don't. */
567 void
568 declare_vars (tree vars, gimple scope, bool debug_info)
570 tree last = vars;
571 if (last)
573 tree temps, block;
575 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
577 temps = nreverse (last);
579 block = gimple_bind_block (scope);
580 gcc_assert (!block || TREE_CODE (block) == BLOCK);
581 if (!block || !debug_info)
583 DECL_CHAIN (last) = gimple_bind_vars (scope);
584 gimple_bind_set_vars (scope, temps);
586 else
588 /* We need to attach the nodes both to the BIND_EXPR and to its
589 associated BLOCK for debugging purposes. The key point here
590 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
591 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
592 if (BLOCK_VARS (block))
593 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
594 else
596 gimple_bind_set_vars (scope,
597 chainon (gimple_bind_vars (scope), temps));
598 BLOCK_VARS (block) = temps;
604 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
605 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
606 no such upper bound can be obtained. */
608 static void
609 force_constant_size (tree var)
611 /* The only attempt we make is by querying the maximum size of objects
612 of the variable's type. */
614 HOST_WIDE_INT max_size;
616 gcc_assert (TREE_CODE (var) == VAR_DECL);
618 max_size = max_int_size_in_bytes (TREE_TYPE (var));
620 gcc_assert (max_size >= 0);
622 DECL_SIZE_UNIT (var)
623 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
624 DECL_SIZE (var)
625 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
628 /* Push the temporary variable TMP into the current binding. */
630 void
631 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
633 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
635 /* Later processing assumes that the object size is constant, which might
636 not be true at this point. Force the use of a constant upper bound in
637 this case. */
638 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
639 force_constant_size (tmp);
641 DECL_CONTEXT (tmp) = fn->decl;
642 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
644 record_vars_into (tmp, fn->decl);
647 /* Push the temporary variable TMP into the current binding. */
649 void
650 gimple_add_tmp_var (tree tmp)
652 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
654 /* Later processing assumes that the object size is constant, which might
655 not be true at this point. Force the use of a constant upper bound in
656 this case. */
657 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
658 force_constant_size (tmp);
660 DECL_CONTEXT (tmp) = current_function_decl;
661 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
663 if (gimplify_ctxp)
665 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
666 gimplify_ctxp->temps = tmp;
668 /* Mark temporaries local within the nearest enclosing parallel. */
669 if (gimplify_omp_ctxp)
671 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
672 while (ctx
673 && (ctx->region_type == ORT_WORKSHARE
674 || ctx->region_type == ORT_SIMD))
675 ctx = ctx->outer_context;
676 if (ctx)
677 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
680 else if (cfun)
681 record_vars (tmp);
682 else
684 gimple_seq body_seq;
686 /* This case is for nested functions. We need to expose the locals
687 they create. */
688 body_seq = gimple_body (current_function_decl);
689 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
695 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
696 nodes that are referenced more than once in GENERIC functions. This is
697 necessary because gimplification (translation into GIMPLE) is performed
698 by modifying tree nodes in-place, so gimplication of a shared node in a
699 first context could generate an invalid GIMPLE form in a second context.
701 This is achieved with a simple mark/copy/unmark algorithm that walks the
702 GENERIC representation top-down, marks nodes with TREE_VISITED the first
703 time it encounters them, duplicates them if they already have TREE_VISITED
704 set, and finally removes the TREE_VISITED marks it has set.
706 The algorithm works only at the function level, i.e. it generates a GENERIC
707 representation of a function with no nodes shared within the function when
708 passed a GENERIC function (except for nodes that are allowed to be shared).
710 At the global level, it is also necessary to unshare tree nodes that are
711 referenced in more than one function, for the same aforementioned reason.
712 This requires some cooperation from the front-end. There are 2 strategies:
714 1. Manual unsharing. The front-end needs to call unshare_expr on every
715 expression that might end up being shared across functions.
717 2. Deep unsharing. This is an extension of regular unsharing. Instead
718 of calling unshare_expr on expressions that might be shared across
719 functions, the front-end pre-marks them with TREE_VISITED. This will
720 ensure that they are unshared on the first reference within functions
721 when the regular unsharing algorithm runs. The counterpart is that
722 this algorithm must look deeper than for manual unsharing, which is
723 specified by LANG_HOOKS_DEEP_UNSHARING.
725 If there are only few specific cases of node sharing across functions, it is
726 probably easier for a front-end to unshare the expressions manually. On the
727 contrary, if the expressions generated at the global level are as widespread
728 as expressions generated within functions, deep unsharing is very likely the
729 way to go. */
731 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
732 These nodes model computations that must be done once. If we were to
733 unshare something like SAVE_EXPR(i++), the gimplification process would
734 create wrong code. However, if DATA is non-null, it must hold a pointer
735 set that is used to unshare the subtrees of these nodes. */
737 static tree
738 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
740 tree t = *tp;
741 enum tree_code code = TREE_CODE (t);
743 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
744 copy their subtrees if we can make sure to do it only once. */
745 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
747 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
749 else
750 *walk_subtrees = 0;
753 /* Stop at types, decls, constants like copy_tree_r. */
754 else if (TREE_CODE_CLASS (code) == tcc_type
755 || TREE_CODE_CLASS (code) == tcc_declaration
756 || TREE_CODE_CLASS (code) == tcc_constant
757 /* We can't do anything sensible with a BLOCK used as an
758 expression, but we also can't just die when we see it
759 because of non-expression uses. So we avert our eyes
760 and cross our fingers. Silly Java. */
761 || code == BLOCK)
762 *walk_subtrees = 0;
764 /* Cope with the statement expression extension. */
765 else if (code == STATEMENT_LIST)
768 /* Leave the bulk of the work to copy_tree_r itself. */
769 else
770 copy_tree_r (tp, walk_subtrees, NULL);
772 return NULL_TREE;
775 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
776 If *TP has been visited already, then *TP is deeply copied by calling
777 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
779 static tree
780 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
782 tree t = *tp;
783 enum tree_code code = TREE_CODE (t);
785 /* Skip types, decls, and constants. But we do want to look at their
786 types and the bounds of types. Mark them as visited so we properly
787 unmark their subtrees on the unmark pass. If we've already seen them,
788 don't look down further. */
789 if (TREE_CODE_CLASS (code) == tcc_type
790 || TREE_CODE_CLASS (code) == tcc_declaration
791 || TREE_CODE_CLASS (code) == tcc_constant)
793 if (TREE_VISITED (t))
794 *walk_subtrees = 0;
795 else
796 TREE_VISITED (t) = 1;
799 /* If this node has been visited already, unshare it and don't look
800 any deeper. */
801 else if (TREE_VISITED (t))
803 walk_tree (tp, mostly_copy_tree_r, data, NULL);
804 *walk_subtrees = 0;
807 /* Otherwise, mark the node as visited and keep looking. */
808 else
809 TREE_VISITED (t) = 1;
811 return NULL_TREE;
814 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
815 copy_if_shared_r callback unmodified. */
817 static inline void
818 copy_if_shared (tree *tp, void *data)
820 walk_tree (tp, copy_if_shared_r, data, NULL);
823 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
824 any nested functions. */
826 static void
827 unshare_body (tree fndecl)
829 struct cgraph_node *cgn = cgraph_get_node (fndecl);
830 /* If the language requires deep unsharing, we need a pointer set to make
831 sure we don't repeatedly unshare subtrees of unshareable nodes. */
832 struct pointer_set_t *visited
833 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
835 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
836 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
837 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
839 if (visited)
840 pointer_set_destroy (visited);
842 if (cgn)
843 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
844 unshare_body (cgn->decl);
847 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
848 Subtrees are walked until the first unvisited node is encountered. */
850 static tree
851 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
853 tree t = *tp;
855 /* If this node has been visited, unmark it and keep looking. */
856 if (TREE_VISITED (t))
857 TREE_VISITED (t) = 0;
859 /* Otherwise, don't look any deeper. */
860 else
861 *walk_subtrees = 0;
863 return NULL_TREE;
866 /* Unmark the visited trees rooted at *TP. */
868 static inline void
869 unmark_visited (tree *tp)
871 walk_tree (tp, unmark_visited_r, NULL, NULL);
874 /* Likewise, but mark all trees as not visited. */
876 static void
877 unvisit_body (tree fndecl)
879 struct cgraph_node *cgn = cgraph_get_node (fndecl);
881 unmark_visited (&DECL_SAVED_TREE (fndecl));
882 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
883 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
885 if (cgn)
886 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
887 unvisit_body (cgn->decl);
890 /* Unconditionally make an unshared copy of EXPR. This is used when using
891 stored expressions which span multiple functions, such as BINFO_VTABLE,
892 as the normal unsharing process can't tell that they're shared. */
894 tree
895 unshare_expr (tree expr)
897 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
898 return expr;
901 /* Worker for unshare_expr_without_location. */
903 static tree
904 prune_expr_location (tree *tp, int *walk_subtrees, void *)
906 if (EXPR_P (*tp))
907 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
908 else
909 *walk_subtrees = 0;
910 return NULL_TREE;
913 /* Similar to unshare_expr but also prune all expression locations
914 from EXPR. */
916 tree
917 unshare_expr_without_location (tree expr)
919 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
920 if (EXPR_P (expr))
921 walk_tree (&expr, prune_expr_location, NULL, NULL);
922 return expr;
925 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
926 contain statements and have a value. Assign its value to a temporary
927 and give it void_type_node. Return the temporary, or NULL_TREE if
928 WRAPPER was already void. */
930 tree
931 voidify_wrapper_expr (tree wrapper, tree temp)
933 tree type = TREE_TYPE (wrapper);
934 if (type && !VOID_TYPE_P (type))
936 tree *p;
938 /* Set p to point to the body of the wrapper. Loop until we find
939 something that isn't a wrapper. */
940 for (p = &wrapper; p && *p; )
942 switch (TREE_CODE (*p))
944 case BIND_EXPR:
945 TREE_SIDE_EFFECTS (*p) = 1;
946 TREE_TYPE (*p) = void_type_node;
947 /* For a BIND_EXPR, the body is operand 1. */
948 p = &BIND_EXPR_BODY (*p);
949 break;
951 case CLEANUP_POINT_EXPR:
952 case TRY_FINALLY_EXPR:
953 case TRY_CATCH_EXPR:
954 TREE_SIDE_EFFECTS (*p) = 1;
955 TREE_TYPE (*p) = void_type_node;
956 p = &TREE_OPERAND (*p, 0);
957 break;
959 case STATEMENT_LIST:
961 tree_stmt_iterator i = tsi_last (*p);
962 TREE_SIDE_EFFECTS (*p) = 1;
963 TREE_TYPE (*p) = void_type_node;
964 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
966 break;
968 case COMPOUND_EXPR:
969 /* Advance to the last statement. Set all container types to
970 void. */
971 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
973 TREE_SIDE_EFFECTS (*p) = 1;
974 TREE_TYPE (*p) = void_type_node;
976 break;
978 case TRANSACTION_EXPR:
979 TREE_SIDE_EFFECTS (*p) = 1;
980 TREE_TYPE (*p) = void_type_node;
981 p = &TRANSACTION_EXPR_BODY (*p);
982 break;
984 default:
985 /* Assume that any tree upon which voidify_wrapper_expr is
986 directly called is a wrapper, and that its body is op0. */
987 if (p == &wrapper)
989 TREE_SIDE_EFFECTS (*p) = 1;
990 TREE_TYPE (*p) = void_type_node;
991 p = &TREE_OPERAND (*p, 0);
992 break;
994 goto out;
998 out:
999 if (p == NULL || IS_EMPTY_STMT (*p))
1000 temp = NULL_TREE;
1001 else if (temp)
1003 /* The wrapper is on the RHS of an assignment that we're pushing
1004 down. */
1005 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1006 || TREE_CODE (temp) == MODIFY_EXPR);
1007 TREE_OPERAND (temp, 1) = *p;
1008 *p = temp;
1010 else
1012 temp = create_tmp_var (type, "retval");
1013 *p = build2 (INIT_EXPR, type, temp, *p);
1016 return temp;
1019 return NULL_TREE;
1022 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1023 a temporary through which they communicate. */
1025 static void
1026 build_stack_save_restore (gimple *save, gimple *restore)
1028 tree tmp_var;
1030 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1031 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1032 gimple_call_set_lhs (*save, tmp_var);
1034 *restore
1035 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1036 1, tmp_var);
1039 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1041 static enum gimplify_status
1042 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1044 tree bind_expr = *expr_p;
1045 bool old_save_stack = gimplify_ctxp->save_stack;
1046 tree t;
1047 gimple gimple_bind;
1048 gimple_seq body, cleanup;
1049 gimple stack_save;
1050 location_t start_locus = 0, end_locus = 0;
1052 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1054 /* Mark variables seen in this bind expr. */
1055 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1057 if (TREE_CODE (t) == VAR_DECL)
1059 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1061 /* Mark variable as local. */
1062 if (ctx && !DECL_EXTERNAL (t)
1063 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1064 || splay_tree_lookup (ctx->variables,
1065 (splay_tree_key) t) == NULL))
1067 if (ctx->region_type == ORT_SIMD
1068 && TREE_ADDRESSABLE (t)
1069 && !TREE_STATIC (t))
1070 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1071 else
1072 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1075 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1077 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1078 cfun->has_local_explicit_reg_vars = true;
1081 /* Preliminarily mark non-addressed complex variables as eligible
1082 for promotion to gimple registers. We'll transform their uses
1083 as we find them. */
1084 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1085 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1086 && !TREE_THIS_VOLATILE (t)
1087 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1088 && !needs_to_live_in_memory (t))
1089 DECL_GIMPLE_REG_P (t) = 1;
1092 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1093 BIND_EXPR_BLOCK (bind_expr));
1094 gimple_push_bind_expr (gimple_bind);
1096 gimplify_ctxp->save_stack = false;
1098 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1099 body = NULL;
1100 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1101 gimple_bind_set_body (gimple_bind, body);
1103 /* Source location wise, the cleanup code (stack_restore and clobbers)
1104 belongs to the end of the block, so propagate what we have. The
1105 stack_save operation belongs to the beginning of block, which we can
1106 infer from the bind_expr directly if the block has no explicit
1107 assignment. */
1108 if (BIND_EXPR_BLOCK (bind_expr))
1110 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1111 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1113 if (start_locus == 0)
1114 start_locus = EXPR_LOCATION (bind_expr);
1116 cleanup = NULL;
1117 stack_save = NULL;
1118 if (gimplify_ctxp->save_stack)
1120 gimple stack_restore;
1122 /* Save stack on entry and restore it on exit. Add a try_finally
1123 block to achieve this. */
1124 build_stack_save_restore (&stack_save, &stack_restore);
1126 gimple_set_location (stack_save, start_locus);
1127 gimple_set_location (stack_restore, end_locus);
1129 gimplify_seq_add_stmt (&cleanup, stack_restore);
1132 /* Add clobbers for all variables that go out of scope. */
1133 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1135 if (TREE_CODE (t) == VAR_DECL
1136 && !is_global_var (t)
1137 && DECL_CONTEXT (t) == current_function_decl
1138 && !DECL_HARD_REGISTER (t)
1139 && !TREE_THIS_VOLATILE (t)
1140 && !DECL_HAS_VALUE_EXPR_P (t)
1141 /* Only care for variables that have to be in memory. Others
1142 will be rewritten into SSA names, hence moved to the top-level. */
1143 && !is_gimple_reg (t)
1144 && flag_stack_reuse != SR_NONE)
1146 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1147 gimple clobber_stmt;
1148 TREE_THIS_VOLATILE (clobber) = 1;
1149 clobber_stmt = gimple_build_assign (t, clobber);
1150 gimple_set_location (clobber_stmt, end_locus);
1151 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1155 if (cleanup)
1157 gimple gs;
1158 gimple_seq new_body;
1160 new_body = NULL;
1161 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1162 GIMPLE_TRY_FINALLY);
1164 if (stack_save)
1165 gimplify_seq_add_stmt (&new_body, stack_save);
1166 gimplify_seq_add_stmt (&new_body, gs);
1167 gimple_bind_set_body (gimple_bind, new_body);
1170 gimplify_ctxp->save_stack = old_save_stack;
1171 gimple_pop_bind_expr ();
1173 gimplify_seq_add_stmt (pre_p, gimple_bind);
1175 if (temp)
1177 *expr_p = temp;
1178 return GS_OK;
1181 *expr_p = NULL_TREE;
1182 return GS_ALL_DONE;
1185 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1186 GIMPLE value, it is assigned to a new temporary and the statement is
1187 re-written to return the temporary.
1189 PRE_P points to the sequence where side effects that must happen before
1190 STMT should be stored. */
1192 static enum gimplify_status
1193 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1195 gimple ret;
1196 tree ret_expr = TREE_OPERAND (stmt, 0);
1197 tree result_decl, result;
1199 if (ret_expr == error_mark_node)
1200 return GS_ERROR;
1202 /* Implicit _Cilk_sync must be inserted right before any return statement
1203 if there is a _Cilk_spawn in the function. If the user has provided a
1204 _Cilk_sync, the optimizer should remove this duplicate one. */
1205 if (fn_contains_cilk_spawn_p (cfun))
1207 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1208 gimplify_and_add (impl_sync, pre_p);
1211 if (!ret_expr
1212 || TREE_CODE (ret_expr) == RESULT_DECL
1213 || ret_expr == error_mark_node)
1215 gimple ret = gimple_build_return (ret_expr);
1216 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1217 gimplify_seq_add_stmt (pre_p, ret);
1218 return GS_ALL_DONE;
1221 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1222 result_decl = NULL_TREE;
1223 else
1225 result_decl = TREE_OPERAND (ret_expr, 0);
1227 /* See through a return by reference. */
1228 if (TREE_CODE (result_decl) == INDIRECT_REF)
1229 result_decl = TREE_OPERAND (result_decl, 0);
1231 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1232 || TREE_CODE (ret_expr) == INIT_EXPR)
1233 && TREE_CODE (result_decl) == RESULT_DECL);
1236 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1237 Recall that aggregate_value_p is FALSE for any aggregate type that is
1238 returned in registers. If we're returning values in registers, then
1239 we don't want to extend the lifetime of the RESULT_DECL, particularly
1240 across another call. In addition, for those aggregates for which
1241 hard_function_value generates a PARALLEL, we'll die during normal
1242 expansion of structure assignments; there's special code in expand_return
1243 to handle this case that does not exist in expand_expr. */
1244 if (!result_decl)
1245 result = NULL_TREE;
1246 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1248 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1250 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1251 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1252 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1253 should be effectively allocated by the caller, i.e. all calls to
1254 this function must be subject to the Return Slot Optimization. */
1255 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1256 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1258 result = result_decl;
1260 else if (gimplify_ctxp->return_temp)
1261 result = gimplify_ctxp->return_temp;
1262 else
1264 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
1266 /* ??? With complex control flow (usually involving abnormal edges),
1267 we can wind up warning about an uninitialized value for this. Due
1268 to how this variable is constructed and initialized, this is never
1269 true. Give up and never warn. */
1270 TREE_NO_WARNING (result) = 1;
1272 gimplify_ctxp->return_temp = result;
1275 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1276 Then gimplify the whole thing. */
1277 if (result != result_decl)
1278 TREE_OPERAND (ret_expr, 0) = result;
1280 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1282 ret = gimple_build_return (result);
1283 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1284 gimplify_seq_add_stmt (pre_p, ret);
1286 return GS_ALL_DONE;
1289 /* Gimplify a variable-length array DECL. */
1291 static void
1292 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1294 /* This is a variable-sized decl. Simplify its size and mark it
1295 for deferred expansion. */
1296 tree t, addr, ptr_type;
1298 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1299 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1301 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1302 if (DECL_HAS_VALUE_EXPR_P (decl))
1303 return;
1305 /* All occurrences of this decl in final gimplified code will be
1306 replaced by indirection. Setting DECL_VALUE_EXPR does two
1307 things: First, it lets the rest of the gimplifier know what
1308 replacement to use. Second, it lets the debug info know
1309 where to find the value. */
1310 ptr_type = build_pointer_type (TREE_TYPE (decl));
1311 addr = create_tmp_var (ptr_type, get_name (decl));
1312 DECL_IGNORED_P (addr) = 0;
1313 t = build_fold_indirect_ref (addr);
1314 TREE_THIS_NOTRAP (t) = 1;
1315 SET_DECL_VALUE_EXPR (decl, t);
1316 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1318 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1319 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1320 size_int (DECL_ALIGN (decl)));
1321 /* The call has been built for a variable-sized object. */
1322 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1323 t = fold_convert (ptr_type, t);
1324 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1326 gimplify_and_add (t, seq_p);
1328 /* Indicate that we need to restore the stack level when the
1329 enclosing BIND_EXPR is exited. */
1330 gimplify_ctxp->save_stack = true;
1333 /* A helper function to be called via walk_tree. Mark all labels under *TP
1334 as being forced. To be called for DECL_INITIAL of static variables. */
1336 static tree
1337 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1339 if (TYPE_P (*tp))
1340 *walk_subtrees = 0;
1341 if (TREE_CODE (*tp) == LABEL_DECL)
1342 FORCED_LABEL (*tp) = 1;
1344 return NULL_TREE;
1347 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1348 and initialization explicit. */
1350 static enum gimplify_status
1351 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1353 tree stmt = *stmt_p;
1354 tree decl = DECL_EXPR_DECL (stmt);
1356 *stmt_p = NULL_TREE;
1358 if (TREE_TYPE (decl) == error_mark_node)
1359 return GS_ERROR;
1361 if ((TREE_CODE (decl) == TYPE_DECL
1362 || TREE_CODE (decl) == VAR_DECL)
1363 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1364 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1366 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1367 in case its size expressions contain problematic nodes like CALL_EXPR. */
1368 if (TREE_CODE (decl) == TYPE_DECL
1369 && DECL_ORIGINAL_TYPE (decl)
1370 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1371 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1373 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1375 tree init = DECL_INITIAL (decl);
1377 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1378 || (!TREE_STATIC (decl)
1379 && flag_stack_check == GENERIC_STACK_CHECK
1380 && compare_tree_int (DECL_SIZE_UNIT (decl),
1381 STACK_CHECK_MAX_VAR_SIZE) > 0))
1382 gimplify_vla_decl (decl, seq_p);
1384 /* Some front ends do not explicitly declare all anonymous
1385 artificial variables. We compensate here by declaring the
1386 variables, though it would be better if the front ends would
1387 explicitly declare them. */
1388 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1389 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1390 gimple_add_tmp_var (decl);
1392 if (init && init != error_mark_node)
1394 if (!TREE_STATIC (decl))
1396 DECL_INITIAL (decl) = NULL_TREE;
1397 init = build2 (INIT_EXPR, void_type_node, decl, init);
1398 gimplify_and_add (init, seq_p);
1399 ggc_free (init);
1401 else
1402 /* We must still examine initializers for static variables
1403 as they may contain a label address. */
1404 walk_tree (&init, force_labels_r, NULL, NULL);
1408 return GS_ALL_DONE;
1411 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1412 and replacing the LOOP_EXPR with goto, but if the loop contains an
1413 EXIT_EXPR, we need to append a label for it to jump to. */
1415 static enum gimplify_status
1416 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1418 tree saved_label = gimplify_ctxp->exit_label;
1419 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1421 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1423 gimplify_ctxp->exit_label = NULL_TREE;
1425 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1427 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1429 if (gimplify_ctxp->exit_label)
1430 gimplify_seq_add_stmt (pre_p,
1431 gimple_build_label (gimplify_ctxp->exit_label));
1433 gimplify_ctxp->exit_label = saved_label;
1435 *expr_p = NULL;
1436 return GS_ALL_DONE;
1439 /* Gimplify a statement list onto a sequence. These may be created either
1440 by an enlightened front-end, or by shortcut_cond_expr. */
1442 static enum gimplify_status
1443 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1445 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1447 tree_stmt_iterator i = tsi_start (*expr_p);
1449 while (!tsi_end_p (i))
1451 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1452 tsi_delink (&i);
1455 if (temp)
1457 *expr_p = temp;
1458 return GS_OK;
1461 return GS_ALL_DONE;
1465 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1466 branch to. */
1468 static enum gimplify_status
1469 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1471 tree switch_expr = *expr_p;
1472 gimple_seq switch_body_seq = NULL;
1473 enum gimplify_status ret;
1474 tree index_type = TREE_TYPE (switch_expr);
1475 if (index_type == NULL_TREE)
1476 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1478 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1479 fb_rvalue);
1480 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1481 return ret;
1483 if (SWITCH_BODY (switch_expr))
1485 vec<tree> labels;
1486 vec<tree> saved_labels;
1487 tree default_case = NULL_TREE;
1488 gimple gimple_switch;
1490 /* If someone can be bothered to fill in the labels, they can
1491 be bothered to null out the body too. */
1492 gcc_assert (!SWITCH_LABELS (switch_expr));
1494 /* Save old labels, get new ones from body, then restore the old
1495 labels. Save all the things from the switch body to append after. */
1496 saved_labels = gimplify_ctxp->case_labels;
1497 gimplify_ctxp->case_labels.create (8);
1499 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1500 labels = gimplify_ctxp->case_labels;
1501 gimplify_ctxp->case_labels = saved_labels;
1503 preprocess_case_label_vec_for_gimple (labels, index_type,
1504 &default_case);
1506 if (!default_case)
1508 gimple new_default;
1510 default_case
1511 = build_case_label (NULL_TREE, NULL_TREE,
1512 create_artificial_label (UNKNOWN_LOCATION));
1513 new_default = gimple_build_label (CASE_LABEL (default_case));
1514 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1517 gimple_switch = gimple_build_switch (SWITCH_COND (switch_expr),
1518 default_case, labels);
1519 gimplify_seq_add_stmt (pre_p, gimple_switch);
1520 gimplify_seq_add_seq (pre_p, switch_body_seq);
1521 labels.release ();
1523 else
1524 gcc_assert (SWITCH_LABELS (switch_expr));
1526 return GS_ALL_DONE;
1529 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1531 static enum gimplify_status
1532 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1534 struct gimplify_ctx *ctxp;
1535 gimple gimple_label;
1537 /* Invalid OpenMP programs can play Duff's Device type games with
1538 #pragma omp parallel. At least in the C front end, we don't
1539 detect such invalid branches until after gimplification. */
1540 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1541 if (ctxp->case_labels.exists ())
1542 break;
1544 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1545 ctxp->case_labels.safe_push (*expr_p);
1546 gimplify_seq_add_stmt (pre_p, gimple_label);
1548 return GS_ALL_DONE;
1551 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1552 if necessary. */
1554 tree
1555 build_and_jump (tree *label_p)
1557 if (label_p == NULL)
1558 /* If there's nowhere to jump, just fall through. */
1559 return NULL_TREE;
1561 if (*label_p == NULL_TREE)
1563 tree label = create_artificial_label (UNKNOWN_LOCATION);
1564 *label_p = label;
1567 return build1 (GOTO_EXPR, void_type_node, *label_p);
1570 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1571 This also involves building a label to jump to and communicating it to
1572 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1574 static enum gimplify_status
1575 gimplify_exit_expr (tree *expr_p)
1577 tree cond = TREE_OPERAND (*expr_p, 0);
1578 tree expr;
1580 expr = build_and_jump (&gimplify_ctxp->exit_label);
1581 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1582 *expr_p = expr;
1584 return GS_OK;
1587 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1588 different from its canonical type, wrap the whole thing inside a
1589 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1590 type.
1592 The canonical type of a COMPONENT_REF is the type of the field being
1593 referenced--unless the field is a bit-field which can be read directly
1594 in a smaller mode, in which case the canonical type is the
1595 sign-appropriate type corresponding to that mode. */
1597 static void
1598 canonicalize_component_ref (tree *expr_p)
1600 tree expr = *expr_p;
1601 tree type;
1603 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1605 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1606 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1607 else
1608 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1610 /* One could argue that all the stuff below is not necessary for
1611 the non-bitfield case and declare it a FE error if type
1612 adjustment would be needed. */
1613 if (TREE_TYPE (expr) != type)
1615 #ifdef ENABLE_TYPES_CHECKING
1616 tree old_type = TREE_TYPE (expr);
1617 #endif
1618 int type_quals;
1620 /* We need to preserve qualifiers and propagate them from
1621 operand 0. */
1622 type_quals = TYPE_QUALS (type)
1623 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1624 if (TYPE_QUALS (type) != type_quals)
1625 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1627 /* Set the type of the COMPONENT_REF to the underlying type. */
1628 TREE_TYPE (expr) = type;
1630 #ifdef ENABLE_TYPES_CHECKING
1631 /* It is now a FE error, if the conversion from the canonical
1632 type to the original expression type is not useless. */
1633 gcc_assert (useless_type_conversion_p (old_type, type));
1634 #endif
1638 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1639 to foo, embed that change in the ADDR_EXPR by converting
1640 T array[U];
1641 (T *)&array
1643 &array[L]
1644 where L is the lower bound. For simplicity, only do this for constant
1645 lower bound.
1646 The constraint is that the type of &array[L] is trivially convertible
1647 to T *. */
1649 static void
1650 canonicalize_addr_expr (tree *expr_p)
1652 tree expr = *expr_p;
1653 tree addr_expr = TREE_OPERAND (expr, 0);
1654 tree datype, ddatype, pddatype;
1656 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1657 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1658 || TREE_CODE (addr_expr) != ADDR_EXPR)
1659 return;
1661 /* The addr_expr type should be a pointer to an array. */
1662 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1663 if (TREE_CODE (datype) != ARRAY_TYPE)
1664 return;
1666 /* The pointer to element type shall be trivially convertible to
1667 the expression pointer type. */
1668 ddatype = TREE_TYPE (datype);
1669 pddatype = build_pointer_type (ddatype);
1670 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1671 pddatype))
1672 return;
1674 /* The lower bound and element sizes must be constant. */
1675 if (!TYPE_SIZE_UNIT (ddatype)
1676 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1677 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1678 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1679 return;
1681 /* All checks succeeded. Build a new node to merge the cast. */
1682 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1683 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1684 NULL_TREE, NULL_TREE);
1685 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1687 /* We can have stripped a required restrict qualifier above. */
1688 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1689 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1692 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1693 underneath as appropriate. */
1695 static enum gimplify_status
1696 gimplify_conversion (tree *expr_p)
1698 location_t loc = EXPR_LOCATION (*expr_p);
1699 gcc_assert (CONVERT_EXPR_P (*expr_p));
1701 /* Then strip away all but the outermost conversion. */
1702 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1704 /* And remove the outermost conversion if it's useless. */
1705 if (tree_ssa_useless_type_conversion (*expr_p))
1706 *expr_p = TREE_OPERAND (*expr_p, 0);
1708 /* If we still have a conversion at the toplevel,
1709 then canonicalize some constructs. */
1710 if (CONVERT_EXPR_P (*expr_p))
1712 tree sub = TREE_OPERAND (*expr_p, 0);
1714 /* If a NOP conversion is changing the type of a COMPONENT_REF
1715 expression, then canonicalize its type now in order to expose more
1716 redundant conversions. */
1717 if (TREE_CODE (sub) == COMPONENT_REF)
1718 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1720 /* If a NOP conversion is changing a pointer to array of foo
1721 to a pointer to foo, embed that change in the ADDR_EXPR. */
1722 else if (TREE_CODE (sub) == ADDR_EXPR)
1723 canonicalize_addr_expr (expr_p);
1726 /* If we have a conversion to a non-register type force the
1727 use of a VIEW_CONVERT_EXPR instead. */
1728 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1729 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1730 TREE_OPERAND (*expr_p, 0));
1732 return GS_OK;
1735 /* Nonlocal VLAs seen in the current function. */
1736 static struct pointer_set_t *nonlocal_vlas;
1738 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
1739 static tree nonlocal_vla_vars;
1741 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
1742 DECL_VALUE_EXPR, and it's worth re-examining things. */
1744 static enum gimplify_status
1745 gimplify_var_or_parm_decl (tree *expr_p)
1747 tree decl = *expr_p;
1749 /* ??? If this is a local variable, and it has not been seen in any
1750 outer BIND_EXPR, then it's probably the result of a duplicate
1751 declaration, for which we've already issued an error. It would
1752 be really nice if the front end wouldn't leak these at all.
1753 Currently the only known culprit is C++ destructors, as seen
1754 in g++.old-deja/g++.jason/binding.C. */
1755 if (TREE_CODE (decl) == VAR_DECL
1756 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1757 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1758 && decl_function_context (decl) == current_function_decl)
1760 gcc_assert (seen_error ());
1761 return GS_ERROR;
1764 /* When within an OpenMP context, notice uses of variables. */
1765 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1766 return GS_ALL_DONE;
1768 /* If the decl is an alias for another expression, substitute it now. */
1769 if (DECL_HAS_VALUE_EXPR_P (decl))
1771 tree value_expr = DECL_VALUE_EXPR (decl);
1773 /* For referenced nonlocal VLAs add a decl for debugging purposes
1774 to the current function. */
1775 if (TREE_CODE (decl) == VAR_DECL
1776 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1777 && nonlocal_vlas != NULL
1778 && TREE_CODE (value_expr) == INDIRECT_REF
1779 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1780 && decl_function_context (decl) != current_function_decl)
1782 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1783 while (ctx
1784 && (ctx->region_type == ORT_WORKSHARE
1785 || ctx->region_type == ORT_SIMD))
1786 ctx = ctx->outer_context;
1787 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1789 tree copy = copy_node (decl);
1791 lang_hooks.dup_lang_specific_decl (copy);
1792 SET_DECL_RTL (copy, 0);
1793 TREE_USED (copy) = 1;
1794 DECL_CHAIN (copy) = nonlocal_vla_vars;
1795 nonlocal_vla_vars = copy;
1796 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1797 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1801 *expr_p = unshare_expr (value_expr);
1802 return GS_OK;
1805 return GS_ALL_DONE;
1808 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
1810 static void
1811 recalculate_side_effects (tree t)
1813 enum tree_code code = TREE_CODE (t);
1814 int len = TREE_OPERAND_LENGTH (t);
1815 int i;
1817 switch (TREE_CODE_CLASS (code))
1819 case tcc_expression:
1820 switch (code)
1822 case INIT_EXPR:
1823 case MODIFY_EXPR:
1824 case VA_ARG_EXPR:
1825 case PREDECREMENT_EXPR:
1826 case PREINCREMENT_EXPR:
1827 case POSTDECREMENT_EXPR:
1828 case POSTINCREMENT_EXPR:
1829 /* All of these have side-effects, no matter what their
1830 operands are. */
1831 return;
1833 default:
1834 break;
1836 /* Fall through. */
1838 case tcc_comparison: /* a comparison expression */
1839 case tcc_unary: /* a unary arithmetic expression */
1840 case tcc_binary: /* a binary arithmetic expression */
1841 case tcc_reference: /* a reference */
1842 case tcc_vl_exp: /* a function call */
1843 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1844 for (i = 0; i < len; ++i)
1846 tree op = TREE_OPERAND (t, i);
1847 if (op && TREE_SIDE_EFFECTS (op))
1848 TREE_SIDE_EFFECTS (t) = 1;
1850 break;
1852 case tcc_constant:
1853 /* No side-effects. */
1854 return;
1856 default:
1857 gcc_unreachable ();
1861 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1862 node *EXPR_P.
1864 compound_lval
1865 : min_lval '[' val ']'
1866 | min_lval '.' ID
1867 | compound_lval '[' val ']'
1868 | compound_lval '.' ID
1870 This is not part of the original SIMPLE definition, which separates
1871 array and member references, but it seems reasonable to handle them
1872 together. Also, this way we don't run into problems with union
1873 aliasing; gcc requires that for accesses through a union to alias, the
1874 union reference must be explicit, which was not always the case when we
1875 were splitting up array and member refs.
1877 PRE_P points to the sequence where side effects that must happen before
1878 *EXPR_P should be stored.
1880 POST_P points to the sequence where side effects that must happen after
1881 *EXPR_P should be stored. */
1883 static enum gimplify_status
1884 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1885 fallback_t fallback)
1887 tree *p;
1888 enum gimplify_status ret = GS_ALL_DONE, tret;
1889 int i;
1890 location_t loc = EXPR_LOCATION (*expr_p);
1891 tree expr = *expr_p;
1893 /* Create a stack of the subexpressions so later we can walk them in
1894 order from inner to outer. */
1895 auto_vec<tree, 10> expr_stack;
1897 /* We can handle anything that get_inner_reference can deal with. */
1898 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1900 restart:
1901 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1902 if (TREE_CODE (*p) == INDIRECT_REF)
1903 *p = fold_indirect_ref_loc (loc, *p);
1905 if (handled_component_p (*p))
1907 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1908 additional COMPONENT_REFs. */
1909 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1910 && gimplify_var_or_parm_decl (p) == GS_OK)
1911 goto restart;
1912 else
1913 break;
1915 expr_stack.safe_push (*p);
1918 gcc_assert (expr_stack.length ());
1920 /* Now EXPR_STACK is a stack of pointers to all the refs we've
1921 walked through and P points to the innermost expression.
1923 Java requires that we elaborated nodes in source order. That
1924 means we must gimplify the inner expression followed by each of
1925 the indices, in order. But we can't gimplify the inner
1926 expression until we deal with any variable bounds, sizes, or
1927 positions in order to deal with PLACEHOLDER_EXPRs.
1929 So we do this in three steps. First we deal with the annotations
1930 for any variables in the components, then we gimplify the base,
1931 then we gimplify any indices, from left to right. */
1932 for (i = expr_stack.length () - 1; i >= 0; i--)
1934 tree t = expr_stack[i];
1936 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1938 /* Gimplify the low bound and element type size and put them into
1939 the ARRAY_REF. If these values are set, they have already been
1940 gimplified. */
1941 if (TREE_OPERAND (t, 2) == NULL_TREE)
1943 tree low = unshare_expr (array_ref_low_bound (t));
1944 if (!is_gimple_min_invariant (low))
1946 TREE_OPERAND (t, 2) = low;
1947 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1948 post_p, is_gimple_reg,
1949 fb_rvalue);
1950 ret = MIN (ret, tret);
1953 else
1955 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1956 is_gimple_reg, fb_rvalue);
1957 ret = MIN (ret, tret);
1960 if (TREE_OPERAND (t, 3) == NULL_TREE)
1962 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1963 tree elmt_size = unshare_expr (array_ref_element_size (t));
1964 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1966 /* Divide the element size by the alignment of the element
1967 type (above). */
1968 elmt_size
1969 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
1971 if (!is_gimple_min_invariant (elmt_size))
1973 TREE_OPERAND (t, 3) = elmt_size;
1974 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
1975 post_p, is_gimple_reg,
1976 fb_rvalue);
1977 ret = MIN (ret, tret);
1980 else
1982 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1983 is_gimple_reg, fb_rvalue);
1984 ret = MIN (ret, tret);
1987 else if (TREE_CODE (t) == COMPONENT_REF)
1989 /* Set the field offset into T and gimplify it. */
1990 if (TREE_OPERAND (t, 2) == NULL_TREE)
1992 tree offset = unshare_expr (component_ref_field_offset (t));
1993 tree field = TREE_OPERAND (t, 1);
1994 tree factor
1995 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1997 /* Divide the offset by its alignment. */
1998 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2000 if (!is_gimple_min_invariant (offset))
2002 TREE_OPERAND (t, 2) = offset;
2003 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2004 post_p, is_gimple_reg,
2005 fb_rvalue);
2006 ret = MIN (ret, tret);
2009 else
2011 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2012 is_gimple_reg, fb_rvalue);
2013 ret = MIN (ret, tret);
2018 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2019 so as to match the min_lval predicate. Failure to do so may result
2020 in the creation of large aggregate temporaries. */
2021 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2022 fallback | fb_lvalue);
2023 ret = MIN (ret, tret);
2025 /* And finally, the indices and operands of ARRAY_REF. During this
2026 loop we also remove any useless conversions. */
2027 for (; expr_stack.length () > 0; )
2029 tree t = expr_stack.pop ();
2031 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2033 /* Gimplify the dimension. */
2034 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2036 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2037 is_gimple_val, fb_rvalue);
2038 ret = MIN (ret, tret);
2042 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2044 /* The innermost expression P may have originally had
2045 TREE_SIDE_EFFECTS set which would have caused all the outer
2046 expressions in *EXPR_P leading to P to also have had
2047 TREE_SIDE_EFFECTS set. */
2048 recalculate_side_effects (t);
2051 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2052 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2054 canonicalize_component_ref (expr_p);
2057 expr_stack.release ();
2059 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2061 return ret;
2064 /* Gimplify the self modifying expression pointed to by EXPR_P
2065 (++, --, +=, -=).
2067 PRE_P points to the list where side effects that must happen before
2068 *EXPR_P should be stored.
2070 POST_P points to the list where side effects that must happen after
2071 *EXPR_P should be stored.
2073 WANT_VALUE is nonzero iff we want to use the value of this expression
2074 in another expression.
2076 ARITH_TYPE is the type the computation should be performed in. */
2078 enum gimplify_status
2079 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2080 bool want_value, tree arith_type)
2082 enum tree_code code;
2083 tree lhs, lvalue, rhs, t1;
2084 gimple_seq post = NULL, *orig_post_p = post_p;
2085 bool postfix;
2086 enum tree_code arith_code;
2087 enum gimplify_status ret;
2088 location_t loc = EXPR_LOCATION (*expr_p);
2090 code = TREE_CODE (*expr_p);
2092 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2093 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2095 /* Prefix or postfix? */
2096 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2097 /* Faster to treat as prefix if result is not used. */
2098 postfix = want_value;
2099 else
2100 postfix = false;
2102 /* For postfix, make sure the inner expression's post side effects
2103 are executed after side effects from this expression. */
2104 if (postfix)
2105 post_p = &post;
2107 /* Add or subtract? */
2108 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2109 arith_code = PLUS_EXPR;
2110 else
2111 arith_code = MINUS_EXPR;
2113 /* Gimplify the LHS into a GIMPLE lvalue. */
2114 lvalue = TREE_OPERAND (*expr_p, 0);
2115 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2116 if (ret == GS_ERROR)
2117 return ret;
2119 /* Extract the operands to the arithmetic operation. */
2120 lhs = lvalue;
2121 rhs = TREE_OPERAND (*expr_p, 1);
2123 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2124 that as the result value and in the postqueue operation. */
2125 if (postfix)
2127 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2128 if (ret == GS_ERROR)
2129 return ret;
2131 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2134 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2135 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2137 rhs = convert_to_ptrofftype_loc (loc, rhs);
2138 if (arith_code == MINUS_EXPR)
2139 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2140 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2142 else
2143 t1 = fold_convert (TREE_TYPE (*expr_p),
2144 fold_build2 (arith_code, arith_type,
2145 fold_convert (arith_type, lhs),
2146 fold_convert (arith_type, rhs)));
2148 if (postfix)
2150 gimplify_assign (lvalue, t1, pre_p);
2151 gimplify_seq_add_seq (orig_post_p, post);
2152 *expr_p = lhs;
2153 return GS_ALL_DONE;
2155 else
2157 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2158 return GS_OK;
2162 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2164 static void
2165 maybe_with_size_expr (tree *expr_p)
2167 tree expr = *expr_p;
2168 tree type = TREE_TYPE (expr);
2169 tree size;
2171 /* If we've already wrapped this or the type is error_mark_node, we can't do
2172 anything. */
2173 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2174 || type == error_mark_node)
2175 return;
2177 /* If the size isn't known or is a constant, we have nothing to do. */
2178 size = TYPE_SIZE_UNIT (type);
2179 if (!size || TREE_CODE (size) == INTEGER_CST)
2180 return;
2182 /* Otherwise, make a WITH_SIZE_EXPR. */
2183 size = unshare_expr (size);
2184 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2185 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2188 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2189 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2190 the CALL_EXPR. */
2192 enum gimplify_status
2193 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2195 bool (*test) (tree);
2196 fallback_t fb;
2198 /* In general, we allow lvalues for function arguments to avoid
2199 extra overhead of copying large aggregates out of even larger
2200 aggregates into temporaries only to copy the temporaries to
2201 the argument list. Make optimizers happy by pulling out to
2202 temporaries those types that fit in registers. */
2203 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2204 test = is_gimple_val, fb = fb_rvalue;
2205 else
2207 test = is_gimple_lvalue, fb = fb_either;
2208 /* Also strip a TARGET_EXPR that would force an extra copy. */
2209 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2211 tree init = TARGET_EXPR_INITIAL (*arg_p);
2212 if (init
2213 && !VOID_TYPE_P (TREE_TYPE (init)))
2214 *arg_p = init;
2218 /* If this is a variable sized type, we must remember the size. */
2219 maybe_with_size_expr (arg_p);
2221 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2222 /* Make sure arguments have the same location as the function call
2223 itself. */
2224 protected_set_expr_location (*arg_p, call_location);
2226 /* There is a sequence point before a function call. Side effects in
2227 the argument list must occur before the actual call. So, when
2228 gimplifying arguments, force gimplify_expr to use an internal
2229 post queue which is then appended to the end of PRE_P. */
2230 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2233 /* Don't fold STMT inside ORT_TARGET, because it can break code by adding decl
2234 references that weren't in the source. We'll do it during omplower pass
2235 instead. */
2237 static bool
2238 maybe_fold_stmt (gimple_stmt_iterator *gsi)
2240 struct gimplify_omp_ctx *ctx;
2241 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2242 if (ctx->region_type == ORT_TARGET)
2243 return false;
2244 return fold_stmt (gsi);
2247 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2248 WANT_VALUE is true if the result of the call is desired. */
2250 static enum gimplify_status
2251 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2253 tree fndecl, parms, p, fnptrtype;
2254 enum gimplify_status ret;
2255 int i, nargs;
2256 gimple call;
2257 bool builtin_va_start_p = false;
2258 location_t loc = EXPR_LOCATION (*expr_p);
2260 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2262 /* For reliable diagnostics during inlining, it is necessary that
2263 every call_expr be annotated with file and line. */
2264 if (! EXPR_HAS_LOCATION (*expr_p))
2265 SET_EXPR_LOCATION (*expr_p, input_location);
2267 /* Gimplify internal functions created in the FEs. */
2268 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
2270 nargs = call_expr_nargs (*expr_p);
2271 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
2272 auto_vec<tree> vargs (nargs);
2274 for (i = 0; i < nargs; i++)
2276 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2277 EXPR_LOCATION (*expr_p));
2278 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
2280 gimple call = gimple_build_call_internal_vec (ifn, vargs);
2281 gimplify_seq_add_stmt (pre_p, call);
2282 return GS_ALL_DONE;
2285 /* This may be a call to a builtin function.
2287 Builtin function calls may be transformed into different
2288 (and more efficient) builtin function calls under certain
2289 circumstances. Unfortunately, gimplification can muck things
2290 up enough that the builtin expanders are not aware that certain
2291 transformations are still valid.
2293 So we attempt transformation/gimplification of the call before
2294 we gimplify the CALL_EXPR. At this time we do not manage to
2295 transform all calls in the same manner as the expanders do, but
2296 we do transform most of them. */
2297 fndecl = get_callee_fndecl (*expr_p);
2298 if (fndecl
2299 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2300 switch (DECL_FUNCTION_CODE (fndecl))
2302 case BUILT_IN_VA_START:
2304 builtin_va_start_p = TRUE;
2305 if (call_expr_nargs (*expr_p) < 2)
2307 error ("too few arguments to function %<va_start%>");
2308 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2309 return GS_OK;
2312 if (fold_builtin_next_arg (*expr_p, true))
2314 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2315 return GS_OK;
2317 break;
2319 case BUILT_IN_LINE:
2321 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2322 *expr_p = build_int_cst (TREE_TYPE (*expr_p), loc.line);
2323 return GS_OK;
2325 case BUILT_IN_FILE:
2327 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2328 *expr_p = build_string_literal (strlen (loc.file) + 1, loc.file);
2329 return GS_OK;
2331 case BUILT_IN_FUNCTION:
2333 const char *function;
2334 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2335 *expr_p = build_string_literal (strlen (function) + 1, function);
2336 return GS_OK;
2338 default:
2341 if (fndecl && DECL_BUILT_IN (fndecl))
2343 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2344 if (new_tree && new_tree != *expr_p)
2346 /* There was a transformation of this call which computes the
2347 same value, but in a more efficient way. Return and try
2348 again. */
2349 *expr_p = new_tree;
2350 return GS_OK;
2354 /* Remember the original function pointer type. */
2355 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2357 /* There is a sequence point before the call, so any side effects in
2358 the calling expression must occur before the actual call. Force
2359 gimplify_expr to use an internal post queue. */
2360 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2361 is_gimple_call_addr, fb_rvalue);
2363 nargs = call_expr_nargs (*expr_p);
2365 /* Get argument types for verification. */
2366 fndecl = get_callee_fndecl (*expr_p);
2367 parms = NULL_TREE;
2368 if (fndecl)
2369 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2370 else
2371 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
2373 if (fndecl && DECL_ARGUMENTS (fndecl))
2374 p = DECL_ARGUMENTS (fndecl);
2375 else if (parms)
2376 p = parms;
2377 else
2378 p = NULL_TREE;
2379 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2382 /* If the last argument is __builtin_va_arg_pack () and it is not
2383 passed as a named argument, decrease the number of CALL_EXPR
2384 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2385 if (!p
2386 && i < nargs
2387 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2389 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2390 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2392 if (last_arg_fndecl
2393 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2394 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2395 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2397 tree call = *expr_p;
2399 --nargs;
2400 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2401 CALL_EXPR_FN (call),
2402 nargs, CALL_EXPR_ARGP (call));
2404 /* Copy all CALL_EXPR flags, location and block, except
2405 CALL_EXPR_VA_ARG_PACK flag. */
2406 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2407 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2408 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2409 = CALL_EXPR_RETURN_SLOT_OPT (call);
2410 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2411 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2413 /* Set CALL_EXPR_VA_ARG_PACK. */
2414 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2418 /* Finally, gimplify the function arguments. */
2419 if (nargs > 0)
2421 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2422 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2423 PUSH_ARGS_REVERSED ? i-- : i++)
2425 enum gimplify_status t;
2427 /* Avoid gimplifying the second argument to va_start, which needs to
2428 be the plain PARM_DECL. */
2429 if ((i != 1) || !builtin_va_start_p)
2431 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2432 EXPR_LOCATION (*expr_p));
2434 if (t == GS_ERROR)
2435 ret = GS_ERROR;
2440 /* Verify the function result. */
2441 if (want_value && fndecl
2442 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2444 error_at (loc, "using result of function returning %<void%>");
2445 ret = GS_ERROR;
2448 /* Try this again in case gimplification exposed something. */
2449 if (ret != GS_ERROR)
2451 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2453 if (new_tree && new_tree != *expr_p)
2455 /* There was a transformation of this call which computes the
2456 same value, but in a more efficient way. Return and try
2457 again. */
2458 *expr_p = new_tree;
2459 return GS_OK;
2462 else
2464 *expr_p = error_mark_node;
2465 return GS_ERROR;
2468 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2469 decl. This allows us to eliminate redundant or useless
2470 calls to "const" functions. */
2471 if (TREE_CODE (*expr_p) == CALL_EXPR)
2473 int flags = call_expr_flags (*expr_p);
2474 if (flags & (ECF_CONST | ECF_PURE)
2475 /* An infinite loop is considered a side effect. */
2476 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2477 TREE_SIDE_EFFECTS (*expr_p) = 0;
2480 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2481 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2482 form and delegate the creation of a GIMPLE_CALL to
2483 gimplify_modify_expr. This is always possible because when
2484 WANT_VALUE is true, the caller wants the result of this call into
2485 a temporary, which means that we will emit an INIT_EXPR in
2486 internal_get_tmp_var which will then be handled by
2487 gimplify_modify_expr. */
2488 if (!want_value)
2490 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2491 have to do is replicate it as a GIMPLE_CALL tuple. */
2492 gimple_stmt_iterator gsi;
2493 call = gimple_build_call_from_tree (*expr_p);
2494 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2495 notice_special_calls (call);
2496 gimplify_seq_add_stmt (pre_p, call);
2497 gsi = gsi_last (*pre_p);
2498 maybe_fold_stmt (&gsi);
2499 *expr_p = NULL_TREE;
2501 else
2502 /* Remember the original function type. */
2503 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2504 CALL_EXPR_FN (*expr_p));
2506 return ret;
2509 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2510 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2512 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2513 condition is true or false, respectively. If null, we should generate
2514 our own to skip over the evaluation of this specific expression.
2516 LOCUS is the source location of the COND_EXPR.
2518 This function is the tree equivalent of do_jump.
2520 shortcut_cond_r should only be called by shortcut_cond_expr. */
2522 static tree
2523 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2524 location_t locus)
2526 tree local_label = NULL_TREE;
2527 tree t, expr = NULL;
2529 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2530 retain the shortcut semantics. Just insert the gotos here;
2531 shortcut_cond_expr will append the real blocks later. */
2532 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2534 location_t new_locus;
2536 /* Turn if (a && b) into
2538 if (a); else goto no;
2539 if (b) goto yes; else goto no;
2540 (no:) */
2542 if (false_label_p == NULL)
2543 false_label_p = &local_label;
2545 /* Keep the original source location on the first 'if'. */
2546 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2547 append_to_statement_list (t, &expr);
2549 /* Set the source location of the && on the second 'if'. */
2550 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2551 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2552 new_locus);
2553 append_to_statement_list (t, &expr);
2555 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2557 location_t new_locus;
2559 /* Turn if (a || b) into
2561 if (a) goto yes;
2562 if (b) goto yes; else goto no;
2563 (yes:) */
2565 if (true_label_p == NULL)
2566 true_label_p = &local_label;
2568 /* Keep the original source location on the first 'if'. */
2569 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2570 append_to_statement_list (t, &expr);
2572 /* Set the source location of the || on the second 'if'. */
2573 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2574 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2575 new_locus);
2576 append_to_statement_list (t, &expr);
2578 else if (TREE_CODE (pred) == COND_EXPR
2579 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2580 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2582 location_t new_locus;
2584 /* As long as we're messing with gotos, turn if (a ? b : c) into
2585 if (a)
2586 if (b) goto yes; else goto no;
2587 else
2588 if (c) goto yes; else goto no;
2590 Don't do this if one of the arms has void type, which can happen
2591 in C++ when the arm is throw. */
2593 /* Keep the original source location on the first 'if'. Set the source
2594 location of the ? on the second 'if'. */
2595 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2596 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2597 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2598 false_label_p, locus),
2599 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2600 false_label_p, new_locus));
2602 else
2604 expr = build3 (COND_EXPR, void_type_node, pred,
2605 build_and_jump (true_label_p),
2606 build_and_jump (false_label_p));
2607 SET_EXPR_LOCATION (expr, locus);
2610 if (local_label)
2612 t = build1 (LABEL_EXPR, void_type_node, local_label);
2613 append_to_statement_list (t, &expr);
2616 return expr;
2619 /* Given a conditional expression EXPR with short-circuit boolean
2620 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2621 predicate apart into the equivalent sequence of conditionals. */
2623 static tree
2624 shortcut_cond_expr (tree expr)
2626 tree pred = TREE_OPERAND (expr, 0);
2627 tree then_ = TREE_OPERAND (expr, 1);
2628 tree else_ = TREE_OPERAND (expr, 2);
2629 tree true_label, false_label, end_label, t;
2630 tree *true_label_p;
2631 tree *false_label_p;
2632 bool emit_end, emit_false, jump_over_else;
2633 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2634 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2636 /* First do simple transformations. */
2637 if (!else_se)
2639 /* If there is no 'else', turn
2640 if (a && b) then c
2641 into
2642 if (a) if (b) then c. */
2643 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2645 /* Keep the original source location on the first 'if'. */
2646 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2647 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2648 /* Set the source location of the && on the second 'if'. */
2649 if (EXPR_HAS_LOCATION (pred))
2650 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2651 then_ = shortcut_cond_expr (expr);
2652 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2653 pred = TREE_OPERAND (pred, 0);
2654 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2655 SET_EXPR_LOCATION (expr, locus);
2659 if (!then_se)
2661 /* If there is no 'then', turn
2662 if (a || b); else d
2663 into
2664 if (a); else if (b); else d. */
2665 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2667 /* Keep the original source location on the first 'if'. */
2668 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2669 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2670 /* Set the source location of the || on the second 'if'. */
2671 if (EXPR_HAS_LOCATION (pred))
2672 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2673 else_ = shortcut_cond_expr (expr);
2674 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2675 pred = TREE_OPERAND (pred, 0);
2676 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2677 SET_EXPR_LOCATION (expr, locus);
2681 /* If we're done, great. */
2682 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2683 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2684 return expr;
2686 /* Otherwise we need to mess with gotos. Change
2687 if (a) c; else d;
2689 if (a); else goto no;
2690 c; goto end;
2691 no: d; end:
2692 and recursively gimplify the condition. */
2694 true_label = false_label = end_label = NULL_TREE;
2696 /* If our arms just jump somewhere, hijack those labels so we don't
2697 generate jumps to jumps. */
2699 if (then_
2700 && TREE_CODE (then_) == GOTO_EXPR
2701 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2703 true_label = GOTO_DESTINATION (then_);
2704 then_ = NULL;
2705 then_se = false;
2708 if (else_
2709 && TREE_CODE (else_) == GOTO_EXPR
2710 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2712 false_label = GOTO_DESTINATION (else_);
2713 else_ = NULL;
2714 else_se = false;
2717 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2718 if (true_label)
2719 true_label_p = &true_label;
2720 else
2721 true_label_p = NULL;
2723 /* The 'else' branch also needs a label if it contains interesting code. */
2724 if (false_label || else_se)
2725 false_label_p = &false_label;
2726 else
2727 false_label_p = NULL;
2729 /* If there was nothing else in our arms, just forward the label(s). */
2730 if (!then_se && !else_se)
2731 return shortcut_cond_r (pred, true_label_p, false_label_p,
2732 EXPR_LOC_OR_LOC (expr, input_location));
2734 /* If our last subexpression already has a terminal label, reuse it. */
2735 if (else_se)
2736 t = expr_last (else_);
2737 else if (then_se)
2738 t = expr_last (then_);
2739 else
2740 t = NULL;
2741 if (t && TREE_CODE (t) == LABEL_EXPR)
2742 end_label = LABEL_EXPR_LABEL (t);
2744 /* If we don't care about jumping to the 'else' branch, jump to the end
2745 if the condition is false. */
2746 if (!false_label_p)
2747 false_label_p = &end_label;
2749 /* We only want to emit these labels if we aren't hijacking them. */
2750 emit_end = (end_label == NULL_TREE);
2751 emit_false = (false_label == NULL_TREE);
2753 /* We only emit the jump over the else clause if we have to--if the
2754 then clause may fall through. Otherwise we can wind up with a
2755 useless jump and a useless label at the end of gimplified code,
2756 which will cause us to think that this conditional as a whole
2757 falls through even if it doesn't. If we then inline a function
2758 which ends with such a condition, that can cause us to issue an
2759 inappropriate warning about control reaching the end of a
2760 non-void function. */
2761 jump_over_else = block_may_fallthru (then_);
2763 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2764 EXPR_LOC_OR_LOC (expr, input_location));
2766 expr = NULL;
2767 append_to_statement_list (pred, &expr);
2769 append_to_statement_list (then_, &expr);
2770 if (else_se)
2772 if (jump_over_else)
2774 tree last = expr_last (expr);
2775 t = build_and_jump (&end_label);
2776 if (EXPR_HAS_LOCATION (last))
2777 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2778 append_to_statement_list (t, &expr);
2780 if (emit_false)
2782 t = build1 (LABEL_EXPR, void_type_node, false_label);
2783 append_to_statement_list (t, &expr);
2785 append_to_statement_list (else_, &expr);
2787 if (emit_end && end_label)
2789 t = build1 (LABEL_EXPR, void_type_node, end_label);
2790 append_to_statement_list (t, &expr);
2793 return expr;
2796 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2798 tree
2799 gimple_boolify (tree expr)
2801 tree type = TREE_TYPE (expr);
2802 location_t loc = EXPR_LOCATION (expr);
2804 if (TREE_CODE (expr) == NE_EXPR
2805 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2806 && integer_zerop (TREE_OPERAND (expr, 1)))
2808 tree call = TREE_OPERAND (expr, 0);
2809 tree fn = get_callee_fndecl (call);
2811 /* For __builtin_expect ((long) (x), y) recurse into x as well
2812 if x is truth_value_p. */
2813 if (fn
2814 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2815 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2816 && call_expr_nargs (call) == 2)
2818 tree arg = CALL_EXPR_ARG (call, 0);
2819 if (arg)
2821 if (TREE_CODE (arg) == NOP_EXPR
2822 && TREE_TYPE (arg) == TREE_TYPE (call))
2823 arg = TREE_OPERAND (arg, 0);
2824 if (truth_value_p (TREE_CODE (arg)))
2826 arg = gimple_boolify (arg);
2827 CALL_EXPR_ARG (call, 0)
2828 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2834 switch (TREE_CODE (expr))
2836 case TRUTH_AND_EXPR:
2837 case TRUTH_OR_EXPR:
2838 case TRUTH_XOR_EXPR:
2839 case TRUTH_ANDIF_EXPR:
2840 case TRUTH_ORIF_EXPR:
2841 /* Also boolify the arguments of truth exprs. */
2842 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2843 /* FALLTHRU */
2845 case TRUTH_NOT_EXPR:
2846 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2848 /* These expressions always produce boolean results. */
2849 if (TREE_CODE (type) != BOOLEAN_TYPE)
2850 TREE_TYPE (expr) = boolean_type_node;
2851 return expr;
2853 case ANNOTATE_EXPR:
2854 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
2856 case annot_expr_ivdep_kind:
2857 case annot_expr_no_vector_kind:
2858 case annot_expr_vector_kind:
2859 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2860 if (TREE_CODE (type) != BOOLEAN_TYPE)
2861 TREE_TYPE (expr) = boolean_type_node;
2862 return expr;
2863 default:
2864 gcc_unreachable ();
2867 default:
2868 if (COMPARISON_CLASS_P (expr))
2870 /* There expressions always prduce boolean results. */
2871 if (TREE_CODE (type) != BOOLEAN_TYPE)
2872 TREE_TYPE (expr) = boolean_type_node;
2873 return expr;
2875 /* Other expressions that get here must have boolean values, but
2876 might need to be converted to the appropriate mode. */
2877 if (TREE_CODE (type) == BOOLEAN_TYPE)
2878 return expr;
2879 return fold_convert_loc (loc, boolean_type_node, expr);
2883 /* Given a conditional expression *EXPR_P without side effects, gimplify
2884 its operands. New statements are inserted to PRE_P. */
2886 static enum gimplify_status
2887 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2889 tree expr = *expr_p, cond;
2890 enum gimplify_status ret, tret;
2891 enum tree_code code;
2893 cond = gimple_boolify (COND_EXPR_COND (expr));
2895 /* We need to handle && and || specially, as their gimplification
2896 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2897 code = TREE_CODE (cond);
2898 if (code == TRUTH_ANDIF_EXPR)
2899 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2900 else if (code == TRUTH_ORIF_EXPR)
2901 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2902 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2903 COND_EXPR_COND (*expr_p) = cond;
2905 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2906 is_gimple_val, fb_rvalue);
2907 ret = MIN (ret, tret);
2908 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2909 is_gimple_val, fb_rvalue);
2911 return MIN (ret, tret);
2914 /* Return true if evaluating EXPR could trap.
2915 EXPR is GENERIC, while tree_could_trap_p can be called
2916 only on GIMPLE. */
2918 static bool
2919 generic_expr_could_trap_p (tree expr)
2921 unsigned i, n;
2923 if (!expr || is_gimple_val (expr))
2924 return false;
2926 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2927 return true;
2929 n = TREE_OPERAND_LENGTH (expr);
2930 for (i = 0; i < n; i++)
2931 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2932 return true;
2934 return false;
2937 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2938 into
2940 if (p) if (p)
2941 t1 = a; a;
2942 else or else
2943 t1 = b; b;
2946 The second form is used when *EXPR_P is of type void.
2948 PRE_P points to the list where side effects that must happen before
2949 *EXPR_P should be stored. */
2951 static enum gimplify_status
2952 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2954 tree expr = *expr_p;
2955 tree type = TREE_TYPE (expr);
2956 location_t loc = EXPR_LOCATION (expr);
2957 tree tmp, arm1, arm2;
2958 enum gimplify_status ret;
2959 tree label_true, label_false, label_cont;
2960 bool have_then_clause_p, have_else_clause_p;
2961 gimple gimple_cond;
2962 enum tree_code pred_code;
2963 gimple_seq seq = NULL;
2965 /* If this COND_EXPR has a value, copy the values into a temporary within
2966 the arms. */
2967 if (!VOID_TYPE_P (type))
2969 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
2970 tree result;
2972 /* If either an rvalue is ok or we do not require an lvalue, create the
2973 temporary. But we cannot do that if the type is addressable. */
2974 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
2975 && !TREE_ADDRESSABLE (type))
2977 if (gimplify_ctxp->allow_rhs_cond_expr
2978 /* If either branch has side effects or could trap, it can't be
2979 evaluated unconditionally. */
2980 && !TREE_SIDE_EFFECTS (then_)
2981 && !generic_expr_could_trap_p (then_)
2982 && !TREE_SIDE_EFFECTS (else_)
2983 && !generic_expr_could_trap_p (else_))
2984 return gimplify_pure_cond_expr (expr_p, pre_p);
2986 tmp = create_tmp_var (type, "iftmp");
2987 result = tmp;
2990 /* Otherwise, only create and copy references to the values. */
2991 else
2993 type = build_pointer_type (type);
2995 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2996 then_ = build_fold_addr_expr_loc (loc, then_);
2998 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2999 else_ = build_fold_addr_expr_loc (loc, else_);
3001 expr
3002 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3004 tmp = create_tmp_var (type, "iftmp");
3005 result = build_simple_mem_ref_loc (loc, tmp);
3008 /* Build the new then clause, `tmp = then_;'. But don't build the
3009 assignment if the value is void; in C++ it can be if it's a throw. */
3010 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3011 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3013 /* Similarly, build the new else clause, `tmp = else_;'. */
3014 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3015 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3017 TREE_TYPE (expr) = void_type_node;
3018 recalculate_side_effects (expr);
3020 /* Move the COND_EXPR to the prequeue. */
3021 gimplify_stmt (&expr, pre_p);
3023 *expr_p = result;
3024 return GS_ALL_DONE;
3027 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3028 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3029 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3030 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3032 /* Make sure the condition has BOOLEAN_TYPE. */
3033 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3035 /* Break apart && and || conditions. */
3036 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3037 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3039 expr = shortcut_cond_expr (expr);
3041 if (expr != *expr_p)
3043 *expr_p = expr;
3045 /* We can't rely on gimplify_expr to re-gimplify the expanded
3046 form properly, as cleanups might cause the target labels to be
3047 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3048 set up a conditional context. */
3049 gimple_push_condition ();
3050 gimplify_stmt (expr_p, &seq);
3051 gimple_pop_condition (pre_p);
3052 gimple_seq_add_seq (pre_p, seq);
3054 return GS_ALL_DONE;
3058 /* Now do the normal gimplification. */
3060 /* Gimplify condition. */
3061 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3062 fb_rvalue);
3063 if (ret == GS_ERROR)
3064 return GS_ERROR;
3065 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3067 gimple_push_condition ();
3069 have_then_clause_p = have_else_clause_p = false;
3070 if (TREE_OPERAND (expr, 1) != NULL
3071 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3072 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3073 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3074 == current_function_decl)
3075 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3076 have different locations, otherwise we end up with incorrect
3077 location information on the branches. */
3078 && (optimize
3079 || !EXPR_HAS_LOCATION (expr)
3080 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3081 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3083 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3084 have_then_clause_p = true;
3086 else
3087 label_true = create_artificial_label (UNKNOWN_LOCATION);
3088 if (TREE_OPERAND (expr, 2) != NULL
3089 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3090 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3091 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3092 == current_function_decl)
3093 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3094 have different locations, otherwise we end up with incorrect
3095 location information on the branches. */
3096 && (optimize
3097 || !EXPR_HAS_LOCATION (expr)
3098 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3099 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3101 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3102 have_else_clause_p = true;
3104 else
3105 label_false = create_artificial_label (UNKNOWN_LOCATION);
3107 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3108 &arm2);
3110 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3111 label_false);
3113 gimplify_seq_add_stmt (&seq, gimple_cond);
3114 label_cont = NULL_TREE;
3115 if (!have_then_clause_p)
3117 /* For if (...) {} else { code; } put label_true after
3118 the else block. */
3119 if (TREE_OPERAND (expr, 1) == NULL_TREE
3120 && !have_else_clause_p
3121 && TREE_OPERAND (expr, 2) != NULL_TREE)
3122 label_cont = label_true;
3123 else
3125 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3126 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3127 /* For if (...) { code; } else {} or
3128 if (...) { code; } else goto label; or
3129 if (...) { code; return; } else { ... }
3130 label_cont isn't needed. */
3131 if (!have_else_clause_p
3132 && TREE_OPERAND (expr, 2) != NULL_TREE
3133 && gimple_seq_may_fallthru (seq))
3135 gimple g;
3136 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3138 g = gimple_build_goto (label_cont);
3140 /* GIMPLE_COND's are very low level; they have embedded
3141 gotos. This particular embedded goto should not be marked
3142 with the location of the original COND_EXPR, as it would
3143 correspond to the COND_EXPR's condition, not the ELSE or the
3144 THEN arms. To avoid marking it with the wrong location, flag
3145 it as "no location". */
3146 gimple_set_do_not_emit_location (g);
3148 gimplify_seq_add_stmt (&seq, g);
3152 if (!have_else_clause_p)
3154 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3155 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3157 if (label_cont)
3158 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3160 gimple_pop_condition (pre_p);
3161 gimple_seq_add_seq (pre_p, seq);
3163 if (ret == GS_ERROR)
3164 ; /* Do nothing. */
3165 else if (have_then_clause_p || have_else_clause_p)
3166 ret = GS_ALL_DONE;
3167 else
3169 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3170 expr = TREE_OPERAND (expr, 0);
3171 gimplify_stmt (&expr, pre_p);
3174 *expr_p = NULL;
3175 return ret;
3178 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3179 to be marked addressable.
3181 We cannot rely on such an expression being directly markable if a temporary
3182 has been created by the gimplification. In this case, we create another
3183 temporary and initialize it with a copy, which will become a store after we
3184 mark it addressable. This can happen if the front-end passed us something
3185 that it could not mark addressable yet, like a Fortran pass-by-reference
3186 parameter (int) floatvar. */
3188 static void
3189 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3191 while (handled_component_p (*expr_p))
3192 expr_p = &TREE_OPERAND (*expr_p, 0);
3193 if (is_gimple_reg (*expr_p))
3195 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3196 DECL_GIMPLE_REG_P (var) = 0;
3197 *expr_p = var;
3201 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3202 a call to __builtin_memcpy. */
3204 static enum gimplify_status
3205 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3206 gimple_seq *seq_p)
3208 tree t, to, to_ptr, from, from_ptr;
3209 gimple gs;
3210 location_t loc = EXPR_LOCATION (*expr_p);
3212 to = TREE_OPERAND (*expr_p, 0);
3213 from = TREE_OPERAND (*expr_p, 1);
3215 /* Mark the RHS addressable. Beware that it may not be possible to do so
3216 directly if a temporary has been created by the gimplification. */
3217 prepare_gimple_addressable (&from, seq_p);
3219 mark_addressable (from);
3220 from_ptr = build_fold_addr_expr_loc (loc, from);
3221 gimplify_arg (&from_ptr, seq_p, loc);
3223 mark_addressable (to);
3224 to_ptr = build_fold_addr_expr_loc (loc, to);
3225 gimplify_arg (&to_ptr, seq_p, loc);
3227 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3229 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3231 if (want_value)
3233 /* tmp = memcpy() */
3234 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3235 gimple_call_set_lhs (gs, t);
3236 gimplify_seq_add_stmt (seq_p, gs);
3238 *expr_p = build_simple_mem_ref (t);
3239 return GS_ALL_DONE;
3242 gimplify_seq_add_stmt (seq_p, gs);
3243 *expr_p = NULL;
3244 return GS_ALL_DONE;
3247 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3248 a call to __builtin_memset. In this case we know that the RHS is
3249 a CONSTRUCTOR with an empty element list. */
3251 static enum gimplify_status
3252 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3253 gimple_seq *seq_p)
3255 tree t, from, to, to_ptr;
3256 gimple gs;
3257 location_t loc = EXPR_LOCATION (*expr_p);
3259 /* Assert our assumptions, to abort instead of producing wrong code
3260 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3261 not be immediately exposed. */
3262 from = TREE_OPERAND (*expr_p, 1);
3263 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3264 from = TREE_OPERAND (from, 0);
3266 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3267 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3269 /* Now proceed. */
3270 to = TREE_OPERAND (*expr_p, 0);
3272 to_ptr = build_fold_addr_expr_loc (loc, to);
3273 gimplify_arg (&to_ptr, seq_p, loc);
3274 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3276 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3278 if (want_value)
3280 /* tmp = memset() */
3281 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3282 gimple_call_set_lhs (gs, t);
3283 gimplify_seq_add_stmt (seq_p, gs);
3285 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3286 return GS_ALL_DONE;
3289 gimplify_seq_add_stmt (seq_p, gs);
3290 *expr_p = NULL;
3291 return GS_ALL_DONE;
3294 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3295 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3296 assignment. Return non-null if we detect a potential overlap. */
3298 struct gimplify_init_ctor_preeval_data
3300 /* The base decl of the lhs object. May be NULL, in which case we
3301 have to assume the lhs is indirect. */
3302 tree lhs_base_decl;
3304 /* The alias set of the lhs object. */
3305 alias_set_type lhs_alias_set;
3308 static tree
3309 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3311 struct gimplify_init_ctor_preeval_data *data
3312 = (struct gimplify_init_ctor_preeval_data *) xdata;
3313 tree t = *tp;
3315 /* If we find the base object, obviously we have overlap. */
3316 if (data->lhs_base_decl == t)
3317 return t;
3319 /* If the constructor component is indirect, determine if we have a
3320 potential overlap with the lhs. The only bits of information we
3321 have to go on at this point are addressability and alias sets. */
3322 if ((INDIRECT_REF_P (t)
3323 || TREE_CODE (t) == MEM_REF)
3324 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3325 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3326 return t;
3328 /* If the constructor component is a call, determine if it can hide a
3329 potential overlap with the lhs through an INDIRECT_REF like above.
3330 ??? Ugh - this is completely broken. In fact this whole analysis
3331 doesn't look conservative. */
3332 if (TREE_CODE (t) == CALL_EXPR)
3334 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3336 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3337 if (POINTER_TYPE_P (TREE_VALUE (type))
3338 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3339 && alias_sets_conflict_p (data->lhs_alias_set,
3340 get_alias_set
3341 (TREE_TYPE (TREE_VALUE (type)))))
3342 return t;
3345 if (IS_TYPE_OR_DECL_P (t))
3346 *walk_subtrees = 0;
3347 return NULL;
3350 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3351 force values that overlap with the lhs (as described by *DATA)
3352 into temporaries. */
3354 static void
3355 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3356 struct gimplify_init_ctor_preeval_data *data)
3358 enum gimplify_status one;
3360 /* If the value is constant, then there's nothing to pre-evaluate. */
3361 if (TREE_CONSTANT (*expr_p))
3363 /* Ensure it does not have side effects, it might contain a reference to
3364 the object we're initializing. */
3365 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3366 return;
3369 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3370 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3371 return;
3373 /* Recurse for nested constructors. */
3374 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3376 unsigned HOST_WIDE_INT ix;
3377 constructor_elt *ce;
3378 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3380 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3381 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3383 return;
3386 /* If this is a variable sized type, we must remember the size. */
3387 maybe_with_size_expr (expr_p);
3389 /* Gimplify the constructor element to something appropriate for the rhs
3390 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3391 the gimplifier will consider this a store to memory. Doing this
3392 gimplification now means that we won't have to deal with complicated
3393 language-specific trees, nor trees like SAVE_EXPR that can induce
3394 exponential search behavior. */
3395 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3396 if (one == GS_ERROR)
3398 *expr_p = NULL;
3399 return;
3402 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3403 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3404 always be true for all scalars, since is_gimple_mem_rhs insists on a
3405 temporary variable for them. */
3406 if (DECL_P (*expr_p))
3407 return;
3409 /* If this is of variable size, we have no choice but to assume it doesn't
3410 overlap since we can't make a temporary for it. */
3411 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3412 return;
3414 /* Otherwise, we must search for overlap ... */
3415 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3416 return;
3418 /* ... and if found, force the value into a temporary. */
3419 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3422 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3423 a RANGE_EXPR in a CONSTRUCTOR for an array.
3425 var = lower;
3426 loop_entry:
3427 object[var] = value;
3428 if (var == upper)
3429 goto loop_exit;
3430 var = var + 1;
3431 goto loop_entry;
3432 loop_exit:
3434 We increment var _after_ the loop exit check because we might otherwise
3435 fail if upper == TYPE_MAX_VALUE (type for upper).
3437 Note that we never have to deal with SAVE_EXPRs here, because this has
3438 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3440 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3441 gimple_seq *, bool);
3443 static void
3444 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3445 tree value, tree array_elt_type,
3446 gimple_seq *pre_p, bool cleared)
3448 tree loop_entry_label, loop_exit_label, fall_thru_label;
3449 tree var, var_type, cref, tmp;
3451 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3452 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3453 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3455 /* Create and initialize the index variable. */
3456 var_type = TREE_TYPE (upper);
3457 var = create_tmp_var (var_type, NULL);
3458 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3460 /* Add the loop entry label. */
3461 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3463 /* Build the reference. */
3464 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3465 var, NULL_TREE, NULL_TREE);
3467 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3468 the store. Otherwise just assign value to the reference. */
3470 if (TREE_CODE (value) == CONSTRUCTOR)
3471 /* NB we might have to call ourself recursively through
3472 gimplify_init_ctor_eval if the value is a constructor. */
3473 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3474 pre_p, cleared);
3475 else
3476 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3478 /* We exit the loop when the index var is equal to the upper bound. */
3479 gimplify_seq_add_stmt (pre_p,
3480 gimple_build_cond (EQ_EXPR, var, upper,
3481 loop_exit_label, fall_thru_label));
3483 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3485 /* Otherwise, increment the index var... */
3486 tmp = build2 (PLUS_EXPR, var_type, var,
3487 fold_convert (var_type, integer_one_node));
3488 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3490 /* ...and jump back to the loop entry. */
3491 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3493 /* Add the loop exit label. */
3494 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3497 /* Return true if FDECL is accessing a field that is zero sized. */
3499 static bool
3500 zero_sized_field_decl (const_tree fdecl)
3502 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3503 && integer_zerop (DECL_SIZE (fdecl)))
3504 return true;
3505 return false;
3508 /* Return true if TYPE is zero sized. */
3510 static bool
3511 zero_sized_type (const_tree type)
3513 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3514 && integer_zerop (TYPE_SIZE (type)))
3515 return true;
3516 return false;
3519 /* A subroutine of gimplify_init_constructor. Generate individual
3520 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3521 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3522 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3523 zeroed first. */
3525 static void
3526 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3527 gimple_seq *pre_p, bool cleared)
3529 tree array_elt_type = NULL;
3530 unsigned HOST_WIDE_INT ix;
3531 tree purpose, value;
3533 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3534 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3536 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3538 tree cref;
3540 /* NULL values are created above for gimplification errors. */
3541 if (value == NULL)
3542 continue;
3544 if (cleared && initializer_zerop (value))
3545 continue;
3547 /* ??? Here's to hoping the front end fills in all of the indices,
3548 so we don't have to figure out what's missing ourselves. */
3549 gcc_assert (purpose);
3551 /* Skip zero-sized fields, unless value has side-effects. This can
3552 happen with calls to functions returning a zero-sized type, which
3553 we shouldn't discard. As a number of downstream passes don't
3554 expect sets of zero-sized fields, we rely on the gimplification of
3555 the MODIFY_EXPR we make below to drop the assignment statement. */
3556 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3557 continue;
3559 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3560 whole range. */
3561 if (TREE_CODE (purpose) == RANGE_EXPR)
3563 tree lower = TREE_OPERAND (purpose, 0);
3564 tree upper = TREE_OPERAND (purpose, 1);
3566 /* If the lower bound is equal to upper, just treat it as if
3567 upper was the index. */
3568 if (simple_cst_equal (lower, upper))
3569 purpose = upper;
3570 else
3572 gimplify_init_ctor_eval_range (object, lower, upper, value,
3573 array_elt_type, pre_p, cleared);
3574 continue;
3578 if (array_elt_type)
3580 /* Do not use bitsizetype for ARRAY_REF indices. */
3581 if (TYPE_DOMAIN (TREE_TYPE (object)))
3582 purpose
3583 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3584 purpose);
3585 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3586 purpose, NULL_TREE, NULL_TREE);
3588 else
3590 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3591 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3592 unshare_expr (object), purpose, NULL_TREE);
3595 if (TREE_CODE (value) == CONSTRUCTOR
3596 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3597 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3598 pre_p, cleared);
3599 else
3601 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3602 gimplify_and_add (init, pre_p);
3603 ggc_free (init);
3608 /* Return the appropriate RHS predicate for this LHS. */
3610 gimple_predicate
3611 rhs_predicate_for (tree lhs)
3613 if (is_gimple_reg (lhs))
3614 return is_gimple_reg_rhs_or_call;
3615 else
3616 return is_gimple_mem_rhs_or_call;
3619 /* Gimplify a C99 compound literal expression. This just means adding
3620 the DECL_EXPR before the current statement and using its anonymous
3621 decl instead. */
3623 static enum gimplify_status
3624 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3625 bool (*gimple_test_f) (tree),
3626 fallback_t fallback)
3628 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3629 tree decl = DECL_EXPR_DECL (decl_s);
3630 tree init = DECL_INITIAL (decl);
3631 /* Mark the decl as addressable if the compound literal
3632 expression is addressable now, otherwise it is marked too late
3633 after we gimplify the initialization expression. */
3634 if (TREE_ADDRESSABLE (*expr_p))
3635 TREE_ADDRESSABLE (decl) = 1;
3636 /* Otherwise, if we don't need an lvalue and have a literal directly
3637 substitute it. Check if it matches the gimple predicate, as
3638 otherwise we'd generate a new temporary, and we can as well just
3639 use the decl we already have. */
3640 else if (!TREE_ADDRESSABLE (decl)
3641 && init
3642 && (fallback & fb_lvalue) == 0
3643 && gimple_test_f (init))
3645 *expr_p = init;
3646 return GS_OK;
3649 /* Preliminarily mark non-addressed complex variables as eligible
3650 for promotion to gimple registers. We'll transform their uses
3651 as we find them. */
3652 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3653 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3654 && !TREE_THIS_VOLATILE (decl)
3655 && !needs_to_live_in_memory (decl))
3656 DECL_GIMPLE_REG_P (decl) = 1;
3658 /* If the decl is not addressable, then it is being used in some
3659 expression or on the right hand side of a statement, and it can
3660 be put into a readonly data section. */
3661 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3662 TREE_READONLY (decl) = 1;
3664 /* This decl isn't mentioned in the enclosing block, so add it to the
3665 list of temps. FIXME it seems a bit of a kludge to say that
3666 anonymous artificial vars aren't pushed, but everything else is. */
3667 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3668 gimple_add_tmp_var (decl);
3670 gimplify_and_add (decl_s, pre_p);
3671 *expr_p = decl;
3672 return GS_OK;
3675 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3676 return a new CONSTRUCTOR if something changed. */
3678 static tree
3679 optimize_compound_literals_in_ctor (tree orig_ctor)
3681 tree ctor = orig_ctor;
3682 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3683 unsigned int idx, num = vec_safe_length (elts);
3685 for (idx = 0; idx < num; idx++)
3687 tree value = (*elts)[idx].value;
3688 tree newval = value;
3689 if (TREE_CODE (value) == CONSTRUCTOR)
3690 newval = optimize_compound_literals_in_ctor (value);
3691 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3693 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3694 tree decl = DECL_EXPR_DECL (decl_s);
3695 tree init = DECL_INITIAL (decl);
3697 if (!TREE_ADDRESSABLE (value)
3698 && !TREE_ADDRESSABLE (decl)
3699 && init
3700 && TREE_CODE (init) == CONSTRUCTOR)
3701 newval = optimize_compound_literals_in_ctor (init);
3703 if (newval == value)
3704 continue;
3706 if (ctor == orig_ctor)
3708 ctor = copy_node (orig_ctor);
3709 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3710 elts = CONSTRUCTOR_ELTS (ctor);
3712 (*elts)[idx].value = newval;
3714 return ctor;
3717 /* A subroutine of gimplify_modify_expr. Break out elements of a
3718 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3720 Note that we still need to clear any elements that don't have explicit
3721 initializers, so if not all elements are initialized we keep the
3722 original MODIFY_EXPR, we just remove all of the constructor elements.
3724 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3725 GS_ERROR if we would have to create a temporary when gimplifying
3726 this constructor. Otherwise, return GS_OK.
3728 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3730 static enum gimplify_status
3731 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3732 bool want_value, bool notify_temp_creation)
3734 tree object, ctor, type;
3735 enum gimplify_status ret;
3736 vec<constructor_elt, va_gc> *elts;
3738 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3740 if (!notify_temp_creation)
3742 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3743 is_gimple_lvalue, fb_lvalue);
3744 if (ret == GS_ERROR)
3745 return ret;
3748 object = TREE_OPERAND (*expr_p, 0);
3749 ctor = TREE_OPERAND (*expr_p, 1) =
3750 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3751 type = TREE_TYPE (ctor);
3752 elts = CONSTRUCTOR_ELTS (ctor);
3753 ret = GS_ALL_DONE;
3755 switch (TREE_CODE (type))
3757 case RECORD_TYPE:
3758 case UNION_TYPE:
3759 case QUAL_UNION_TYPE:
3760 case ARRAY_TYPE:
3762 struct gimplify_init_ctor_preeval_data preeval_data;
3763 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3764 bool cleared, complete_p, valid_const_initializer;
3766 /* Aggregate types must lower constructors to initialization of
3767 individual elements. The exception is that a CONSTRUCTOR node
3768 with no elements indicates zero-initialization of the whole. */
3769 if (vec_safe_is_empty (elts))
3771 if (notify_temp_creation)
3772 return GS_OK;
3773 break;
3776 /* Fetch information about the constructor to direct later processing.
3777 We might want to make static versions of it in various cases, and
3778 can only do so if it known to be a valid constant initializer. */
3779 valid_const_initializer
3780 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3781 &num_ctor_elements, &complete_p);
3783 /* If a const aggregate variable is being initialized, then it
3784 should never be a lose to promote the variable to be static. */
3785 if (valid_const_initializer
3786 && num_nonzero_elements > 1
3787 && TREE_READONLY (object)
3788 && TREE_CODE (object) == VAR_DECL
3789 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3791 if (notify_temp_creation)
3792 return GS_ERROR;
3793 DECL_INITIAL (object) = ctor;
3794 TREE_STATIC (object) = 1;
3795 if (!DECL_NAME (object))
3796 DECL_NAME (object) = create_tmp_var_name ("C");
3797 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3799 /* ??? C++ doesn't automatically append a .<number> to the
3800 assembler name, and even when it does, it looks at FE private
3801 data structures to figure out what that number should be,
3802 which are not set for this variable. I suppose this is
3803 important for local statics for inline functions, which aren't
3804 "local" in the object file sense. So in order to get a unique
3805 TU-local symbol, we must invoke the lhd version now. */
3806 lhd_set_decl_assembler_name (object);
3808 *expr_p = NULL_TREE;
3809 break;
3812 /* If there are "lots" of initialized elements, even discounting
3813 those that are not address constants (and thus *must* be
3814 computed at runtime), then partition the constructor into
3815 constant and non-constant parts. Block copy the constant
3816 parts in, then generate code for the non-constant parts. */
3817 /* TODO. There's code in cp/typeck.c to do this. */
3819 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3820 /* store_constructor will ignore the clearing of variable-sized
3821 objects. Initializers for such objects must explicitly set
3822 every field that needs to be set. */
3823 cleared = false;
3824 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
3825 /* If the constructor isn't complete, clear the whole object
3826 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
3828 ??? This ought not to be needed. For any element not present
3829 in the initializer, we should simply set them to zero. Except
3830 we'd need to *find* the elements that are not present, and that
3831 requires trickery to avoid quadratic compile-time behavior in
3832 large cases or excessive memory use in small cases. */
3833 cleared = true;
3834 else if (num_ctor_elements - num_nonzero_elements
3835 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3836 && num_nonzero_elements < num_ctor_elements / 4)
3837 /* If there are "lots" of zeros, it's more efficient to clear
3838 the memory and then set the nonzero elements. */
3839 cleared = true;
3840 else
3841 cleared = false;
3843 /* If there are "lots" of initialized elements, and all of them
3844 are valid address constants, then the entire initializer can
3845 be dropped to memory, and then memcpy'd out. Don't do this
3846 for sparse arrays, though, as it's more efficient to follow
3847 the standard CONSTRUCTOR behavior of memset followed by
3848 individual element initialization. Also don't do this for small
3849 all-zero initializers (which aren't big enough to merit
3850 clearing), and don't try to make bitwise copies of
3851 TREE_ADDRESSABLE types. */
3852 if (valid_const_initializer
3853 && !(cleared || num_nonzero_elements == 0)
3854 && !TREE_ADDRESSABLE (type))
3856 HOST_WIDE_INT size = int_size_in_bytes (type);
3857 unsigned int align;
3859 /* ??? We can still get unbounded array types, at least
3860 from the C++ front end. This seems wrong, but attempt
3861 to work around it for now. */
3862 if (size < 0)
3864 size = int_size_in_bytes (TREE_TYPE (object));
3865 if (size >= 0)
3866 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3869 /* Find the maximum alignment we can assume for the object. */
3870 /* ??? Make use of DECL_OFFSET_ALIGN. */
3871 if (DECL_P (object))
3872 align = DECL_ALIGN (object);
3873 else
3874 align = TYPE_ALIGN (type);
3876 /* Do a block move either if the size is so small as to make
3877 each individual move a sub-unit move on average, or if it
3878 is so large as to make individual moves inefficient. */
3879 if (size > 0
3880 && num_nonzero_elements > 1
3881 && (size < num_nonzero_elements
3882 || !can_move_by_pieces (size, align)))
3884 if (notify_temp_creation)
3885 return GS_ERROR;
3887 walk_tree (&ctor, force_labels_r, NULL, NULL);
3888 ctor = tree_output_constant_def (ctor);
3889 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3890 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3891 TREE_OPERAND (*expr_p, 1) = ctor;
3893 /* This is no longer an assignment of a CONSTRUCTOR, but
3894 we still may have processing to do on the LHS. So
3895 pretend we didn't do anything here to let that happen. */
3896 return GS_UNHANDLED;
3900 /* If the target is volatile, we have non-zero elements and more than
3901 one field to assign, initialize the target from a temporary. */
3902 if (TREE_THIS_VOLATILE (object)
3903 && !TREE_ADDRESSABLE (type)
3904 && num_nonzero_elements > 0
3905 && vec_safe_length (elts) > 1)
3907 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3908 TREE_OPERAND (*expr_p, 0) = temp;
3909 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3910 *expr_p,
3911 build2 (MODIFY_EXPR, void_type_node,
3912 object, temp));
3913 return GS_OK;
3916 if (notify_temp_creation)
3917 return GS_OK;
3919 /* If there are nonzero elements and if needed, pre-evaluate to capture
3920 elements overlapping with the lhs into temporaries. We must do this
3921 before clearing to fetch the values before they are zeroed-out. */
3922 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
3924 preeval_data.lhs_base_decl = get_base_address (object);
3925 if (!DECL_P (preeval_data.lhs_base_decl))
3926 preeval_data.lhs_base_decl = NULL;
3927 preeval_data.lhs_alias_set = get_alias_set (object);
3929 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3930 pre_p, post_p, &preeval_data);
3933 if (cleared)
3935 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3936 Note that we still have to gimplify, in order to handle the
3937 case of variable sized types. Avoid shared tree structures. */
3938 CONSTRUCTOR_ELTS (ctor) = NULL;
3939 TREE_SIDE_EFFECTS (ctor) = 0;
3940 object = unshare_expr (object);
3941 gimplify_stmt (expr_p, pre_p);
3944 /* If we have not block cleared the object, or if there are nonzero
3945 elements in the constructor, add assignments to the individual
3946 scalar fields of the object. */
3947 if (!cleared || num_nonzero_elements > 0)
3948 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3950 *expr_p = NULL_TREE;
3952 break;
3954 case COMPLEX_TYPE:
3956 tree r, i;
3958 if (notify_temp_creation)
3959 return GS_OK;
3961 /* Extract the real and imaginary parts out of the ctor. */
3962 gcc_assert (elts->length () == 2);
3963 r = (*elts)[0].value;
3964 i = (*elts)[1].value;
3965 if (r == NULL || i == NULL)
3967 tree zero = build_zero_cst (TREE_TYPE (type));
3968 if (r == NULL)
3969 r = zero;
3970 if (i == NULL)
3971 i = zero;
3974 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3975 represent creation of a complex value. */
3976 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3978 ctor = build_complex (type, r, i);
3979 TREE_OPERAND (*expr_p, 1) = ctor;
3981 else
3983 ctor = build2 (COMPLEX_EXPR, type, r, i);
3984 TREE_OPERAND (*expr_p, 1) = ctor;
3985 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3986 pre_p,
3987 post_p,
3988 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3989 fb_rvalue);
3992 break;
3994 case VECTOR_TYPE:
3996 unsigned HOST_WIDE_INT ix;
3997 constructor_elt *ce;
3999 if (notify_temp_creation)
4000 return GS_OK;
4002 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4003 if (TREE_CONSTANT (ctor))
4005 bool constant_p = true;
4006 tree value;
4008 /* Even when ctor is constant, it might contain non-*_CST
4009 elements, such as addresses or trapping values like
4010 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4011 in VECTOR_CST nodes. */
4012 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4013 if (!CONSTANT_CLASS_P (value))
4015 constant_p = false;
4016 break;
4019 if (constant_p)
4021 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4022 break;
4025 /* Don't reduce an initializer constant even if we can't
4026 make a VECTOR_CST. It won't do anything for us, and it'll
4027 prevent us from representing it as a single constant. */
4028 if (initializer_constant_valid_p (ctor, type))
4029 break;
4031 TREE_CONSTANT (ctor) = 0;
4034 /* Vector types use CONSTRUCTOR all the way through gimple
4035 compilation as a general initializer. */
4036 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4038 enum gimplify_status tret;
4039 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4040 fb_rvalue);
4041 if (tret == GS_ERROR)
4042 ret = GS_ERROR;
4044 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4045 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4047 break;
4049 default:
4050 /* So how did we get a CONSTRUCTOR for a scalar type? */
4051 gcc_unreachable ();
4054 if (ret == GS_ERROR)
4055 return GS_ERROR;
4056 else if (want_value)
4058 *expr_p = object;
4059 return GS_OK;
4061 else
4063 /* If we have gimplified both sides of the initializer but have
4064 not emitted an assignment, do so now. */
4065 if (*expr_p)
4067 tree lhs = TREE_OPERAND (*expr_p, 0);
4068 tree rhs = TREE_OPERAND (*expr_p, 1);
4069 gimple init = gimple_build_assign (lhs, rhs);
4070 gimplify_seq_add_stmt (pre_p, init);
4071 *expr_p = NULL;
4074 return GS_ALL_DONE;
4078 /* Given a pointer value OP0, return a simplified version of an
4079 indirection through OP0, or NULL_TREE if no simplification is
4080 possible. This may only be applied to a rhs of an expression.
4081 Note that the resulting type may be different from the type pointed
4082 to in the sense that it is still compatible from the langhooks
4083 point of view. */
4085 static tree
4086 gimple_fold_indirect_ref_rhs (tree t)
4088 return gimple_fold_indirect_ref (t);
4091 /* Subroutine of gimplify_modify_expr to do simplifications of
4092 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4093 something changes. */
4095 static enum gimplify_status
4096 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4097 gimple_seq *pre_p, gimple_seq *post_p,
4098 bool want_value)
4100 enum gimplify_status ret = GS_UNHANDLED;
4101 bool changed;
4105 changed = false;
4106 switch (TREE_CODE (*from_p))
4108 case VAR_DECL:
4109 /* If we're assigning from a read-only variable initialized with
4110 a constructor, do the direct assignment from the constructor,
4111 but only if neither source nor target are volatile since this
4112 latter assignment might end up being done on a per-field basis. */
4113 if (DECL_INITIAL (*from_p)
4114 && TREE_READONLY (*from_p)
4115 && !TREE_THIS_VOLATILE (*from_p)
4116 && !TREE_THIS_VOLATILE (*to_p)
4117 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4119 tree old_from = *from_p;
4120 enum gimplify_status subret;
4122 /* Move the constructor into the RHS. */
4123 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4125 /* Let's see if gimplify_init_constructor will need to put
4126 it in memory. */
4127 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4128 false, true);
4129 if (subret == GS_ERROR)
4131 /* If so, revert the change. */
4132 *from_p = old_from;
4134 else
4136 ret = GS_OK;
4137 changed = true;
4140 break;
4141 case INDIRECT_REF:
4143 /* If we have code like
4145 *(const A*)(A*)&x
4147 where the type of "x" is a (possibly cv-qualified variant
4148 of "A"), treat the entire expression as identical to "x".
4149 This kind of code arises in C++ when an object is bound
4150 to a const reference, and if "x" is a TARGET_EXPR we want
4151 to take advantage of the optimization below. */
4152 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4153 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4154 if (t)
4156 if (TREE_THIS_VOLATILE (t) != volatile_p)
4158 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4159 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4160 build_fold_addr_expr (t));
4161 if (REFERENCE_CLASS_P (t))
4162 TREE_THIS_VOLATILE (t) = volatile_p;
4164 *from_p = t;
4165 ret = GS_OK;
4166 changed = true;
4168 break;
4171 case TARGET_EXPR:
4173 /* If we are initializing something from a TARGET_EXPR, strip the
4174 TARGET_EXPR and initialize it directly, if possible. This can't
4175 be done if the initializer is void, since that implies that the
4176 temporary is set in some non-trivial way.
4178 ??? What about code that pulls out the temp and uses it
4179 elsewhere? I think that such code never uses the TARGET_EXPR as
4180 an initializer. If I'm wrong, we'll die because the temp won't
4181 have any RTL. In that case, I guess we'll need to replace
4182 references somehow. */
4183 tree init = TARGET_EXPR_INITIAL (*from_p);
4185 if (init
4186 && !VOID_TYPE_P (TREE_TYPE (init)))
4188 *from_p = init;
4189 ret = GS_OK;
4190 changed = true;
4193 break;
4195 case COMPOUND_EXPR:
4196 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4197 caught. */
4198 gimplify_compound_expr (from_p, pre_p, true);
4199 ret = GS_OK;
4200 changed = true;
4201 break;
4203 case CONSTRUCTOR:
4204 /* If we already made some changes, let the front end have a
4205 crack at this before we break it down. */
4206 if (ret != GS_UNHANDLED)
4207 break;
4208 /* If we're initializing from a CONSTRUCTOR, break this into
4209 individual MODIFY_EXPRs. */
4210 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4211 false);
4213 case COND_EXPR:
4214 /* If we're assigning to a non-register type, push the assignment
4215 down into the branches. This is mandatory for ADDRESSABLE types,
4216 since we cannot generate temporaries for such, but it saves a
4217 copy in other cases as well. */
4218 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4220 /* This code should mirror the code in gimplify_cond_expr. */
4221 enum tree_code code = TREE_CODE (*expr_p);
4222 tree cond = *from_p;
4223 tree result = *to_p;
4225 ret = gimplify_expr (&result, pre_p, post_p,
4226 is_gimple_lvalue, fb_lvalue);
4227 if (ret != GS_ERROR)
4228 ret = GS_OK;
4230 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4231 TREE_OPERAND (cond, 1)
4232 = build2 (code, void_type_node, result,
4233 TREE_OPERAND (cond, 1));
4234 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4235 TREE_OPERAND (cond, 2)
4236 = build2 (code, void_type_node, unshare_expr (result),
4237 TREE_OPERAND (cond, 2));
4239 TREE_TYPE (cond) = void_type_node;
4240 recalculate_side_effects (cond);
4242 if (want_value)
4244 gimplify_and_add (cond, pre_p);
4245 *expr_p = unshare_expr (result);
4247 else
4248 *expr_p = cond;
4249 return ret;
4251 break;
4253 case CALL_EXPR:
4254 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4255 return slot so that we don't generate a temporary. */
4256 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4257 && aggregate_value_p (*from_p, *from_p))
4259 bool use_target;
4261 if (!(rhs_predicate_for (*to_p))(*from_p))
4262 /* If we need a temporary, *to_p isn't accurate. */
4263 use_target = false;
4264 /* It's OK to use the return slot directly unless it's an NRV. */
4265 else if (TREE_CODE (*to_p) == RESULT_DECL
4266 && DECL_NAME (*to_p) == NULL_TREE
4267 && needs_to_live_in_memory (*to_p))
4268 use_target = true;
4269 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4270 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4271 /* Don't force regs into memory. */
4272 use_target = false;
4273 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4274 /* It's OK to use the target directly if it's being
4275 initialized. */
4276 use_target = true;
4277 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4278 /* Always use the target and thus RSO for variable-sized types.
4279 GIMPLE cannot deal with a variable-sized assignment
4280 embedded in a call statement. */
4281 use_target = true;
4282 else if (TREE_CODE (*to_p) != SSA_NAME
4283 && (!is_gimple_variable (*to_p)
4284 || needs_to_live_in_memory (*to_p)))
4285 /* Don't use the original target if it's already addressable;
4286 if its address escapes, and the called function uses the
4287 NRV optimization, a conforming program could see *to_p
4288 change before the called function returns; see c++/19317.
4289 When optimizing, the return_slot pass marks more functions
4290 as safe after we have escape info. */
4291 use_target = false;
4292 else
4293 use_target = true;
4295 if (use_target)
4297 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4298 mark_addressable (*to_p);
4301 break;
4303 case WITH_SIZE_EXPR:
4304 /* Likewise for calls that return an aggregate of non-constant size,
4305 since we would not be able to generate a temporary at all. */
4306 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4308 *from_p = TREE_OPERAND (*from_p, 0);
4309 /* We don't change ret in this case because the
4310 WITH_SIZE_EXPR might have been added in
4311 gimplify_modify_expr, so returning GS_OK would lead to an
4312 infinite loop. */
4313 changed = true;
4315 break;
4317 /* If we're initializing from a container, push the initialization
4318 inside it. */
4319 case CLEANUP_POINT_EXPR:
4320 case BIND_EXPR:
4321 case STATEMENT_LIST:
4323 tree wrap = *from_p;
4324 tree t;
4326 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4327 fb_lvalue);
4328 if (ret != GS_ERROR)
4329 ret = GS_OK;
4331 t = voidify_wrapper_expr (wrap, *expr_p);
4332 gcc_assert (t == *expr_p);
4334 if (want_value)
4336 gimplify_and_add (wrap, pre_p);
4337 *expr_p = unshare_expr (*to_p);
4339 else
4340 *expr_p = wrap;
4341 return GS_OK;
4344 case COMPOUND_LITERAL_EXPR:
4346 tree complit = TREE_OPERAND (*expr_p, 1);
4347 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4348 tree decl = DECL_EXPR_DECL (decl_s);
4349 tree init = DECL_INITIAL (decl);
4351 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4352 into struct T x = { 0, 1, 2 } if the address of the
4353 compound literal has never been taken. */
4354 if (!TREE_ADDRESSABLE (complit)
4355 && !TREE_ADDRESSABLE (decl)
4356 && init)
4358 *expr_p = copy_node (*expr_p);
4359 TREE_OPERAND (*expr_p, 1) = init;
4360 return GS_OK;
4364 default:
4365 break;
4368 while (changed);
4370 return ret;
4374 /* Return true if T looks like a valid GIMPLE statement. */
4376 static bool
4377 is_gimple_stmt (tree t)
4379 const enum tree_code code = TREE_CODE (t);
4381 switch (code)
4383 case NOP_EXPR:
4384 /* The only valid NOP_EXPR is the empty statement. */
4385 return IS_EMPTY_STMT (t);
4387 case BIND_EXPR:
4388 case COND_EXPR:
4389 /* These are only valid if they're void. */
4390 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4392 case SWITCH_EXPR:
4393 case GOTO_EXPR:
4394 case RETURN_EXPR:
4395 case LABEL_EXPR:
4396 case CASE_LABEL_EXPR:
4397 case TRY_CATCH_EXPR:
4398 case TRY_FINALLY_EXPR:
4399 case EH_FILTER_EXPR:
4400 case CATCH_EXPR:
4401 case ASM_EXPR:
4402 case STATEMENT_LIST:
4403 case OMP_PARALLEL:
4404 case OMP_FOR:
4405 case OMP_SIMD:
4406 case CILK_SIMD:
4407 case OMP_DISTRIBUTE:
4408 case OMP_SECTIONS:
4409 case OMP_SECTION:
4410 case OMP_SINGLE:
4411 case OMP_MASTER:
4412 case OMP_TASKGROUP:
4413 case OMP_ORDERED:
4414 case OMP_CRITICAL:
4415 case OMP_TASK:
4416 /* These are always void. */
4417 return true;
4419 case CALL_EXPR:
4420 case MODIFY_EXPR:
4421 case PREDICT_EXPR:
4422 /* These are valid regardless of their type. */
4423 return true;
4425 default:
4426 return false;
4431 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4432 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4433 DECL_GIMPLE_REG_P set.
4435 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4436 other, unmodified part of the complex object just before the total store.
4437 As a consequence, if the object is still uninitialized, an undefined value
4438 will be loaded into a register, which may result in a spurious exception
4439 if the register is floating-point and the value happens to be a signaling
4440 NaN for example. Then the fully-fledged complex operations lowering pass
4441 followed by a DCE pass are necessary in order to fix things up. */
4443 static enum gimplify_status
4444 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4445 bool want_value)
4447 enum tree_code code, ocode;
4448 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4450 lhs = TREE_OPERAND (*expr_p, 0);
4451 rhs = TREE_OPERAND (*expr_p, 1);
4452 code = TREE_CODE (lhs);
4453 lhs = TREE_OPERAND (lhs, 0);
4455 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4456 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4457 TREE_NO_WARNING (other) = 1;
4458 other = get_formal_tmp_var (other, pre_p);
4460 realpart = code == REALPART_EXPR ? rhs : other;
4461 imagpart = code == REALPART_EXPR ? other : rhs;
4463 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4464 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4465 else
4466 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4468 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4469 *expr_p = (want_value) ? rhs : NULL_TREE;
4471 return GS_ALL_DONE;
4474 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4476 modify_expr
4477 : varname '=' rhs
4478 | '*' ID '=' rhs
4480 PRE_P points to the list where side effects that must happen before
4481 *EXPR_P should be stored.
4483 POST_P points to the list where side effects that must happen after
4484 *EXPR_P should be stored.
4486 WANT_VALUE is nonzero iff we want to use the value of this expression
4487 in another expression. */
4489 static enum gimplify_status
4490 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4491 bool want_value)
4493 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4494 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4495 enum gimplify_status ret = GS_UNHANDLED;
4496 gimple assign;
4497 location_t loc = EXPR_LOCATION (*expr_p);
4498 gimple_stmt_iterator gsi;
4500 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4501 || TREE_CODE (*expr_p) == INIT_EXPR);
4503 /* Trying to simplify a clobber using normal logic doesn't work,
4504 so handle it here. */
4505 if (TREE_CLOBBER_P (*from_p))
4507 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4508 if (ret == GS_ERROR)
4509 return ret;
4510 gcc_assert (!want_value
4511 && (TREE_CODE (*to_p) == VAR_DECL
4512 || TREE_CODE (*to_p) == MEM_REF));
4513 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4514 *expr_p = NULL;
4515 return GS_ALL_DONE;
4518 /* Insert pointer conversions required by the middle-end that are not
4519 required by the frontend. This fixes middle-end type checking for
4520 for example gcc.dg/redecl-6.c. */
4521 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4523 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4524 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4525 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4528 /* See if any simplifications can be done based on what the RHS is. */
4529 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4530 want_value);
4531 if (ret != GS_UNHANDLED)
4532 return ret;
4534 /* For zero sized types only gimplify the left hand side and right hand
4535 side as statements and throw away the assignment. Do this after
4536 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4537 types properly. */
4538 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4540 gimplify_stmt (from_p, pre_p);
4541 gimplify_stmt (to_p, pre_p);
4542 *expr_p = NULL_TREE;
4543 return GS_ALL_DONE;
4546 /* If the value being copied is of variable width, compute the length
4547 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4548 before gimplifying any of the operands so that we can resolve any
4549 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4550 the size of the expression to be copied, not of the destination, so
4551 that is what we must do here. */
4552 maybe_with_size_expr (from_p);
4554 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4555 if (ret == GS_ERROR)
4556 return ret;
4558 /* As a special case, we have to temporarily allow for assignments
4559 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4560 a toplevel statement, when gimplifying the GENERIC expression
4561 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4562 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4564 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4565 prevent gimplify_expr from trying to create a new temporary for
4566 foo's LHS, we tell it that it should only gimplify until it
4567 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4568 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4569 and all we need to do here is set 'a' to be its LHS. */
4570 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4571 fb_rvalue);
4572 if (ret == GS_ERROR)
4573 return ret;
4575 /* Now see if the above changed *from_p to something we handle specially. */
4576 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4577 want_value);
4578 if (ret != GS_UNHANDLED)
4579 return ret;
4581 /* If we've got a variable sized assignment between two lvalues (i.e. does
4582 not involve a call), then we can make things a bit more straightforward
4583 by converting the assignment to memcpy or memset. */
4584 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4586 tree from = TREE_OPERAND (*from_p, 0);
4587 tree size = TREE_OPERAND (*from_p, 1);
4589 if (TREE_CODE (from) == CONSTRUCTOR)
4590 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4592 if (is_gimple_addressable (from))
4594 *from_p = from;
4595 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4596 pre_p);
4600 /* Transform partial stores to non-addressable complex variables into
4601 total stores. This allows us to use real instead of virtual operands
4602 for these variables, which improves optimization. */
4603 if ((TREE_CODE (*to_p) == REALPART_EXPR
4604 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4605 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4606 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4608 /* Try to alleviate the effects of the gimplification creating artificial
4609 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4610 if (!gimplify_ctxp->into_ssa
4611 && TREE_CODE (*from_p) == VAR_DECL
4612 && DECL_IGNORED_P (*from_p)
4613 && DECL_P (*to_p)
4614 && !DECL_IGNORED_P (*to_p))
4616 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4617 DECL_NAME (*from_p)
4618 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4619 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4620 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4623 if (want_value && TREE_THIS_VOLATILE (*to_p))
4624 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4626 if (TREE_CODE (*from_p) == CALL_EXPR)
4628 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4629 instead of a GIMPLE_ASSIGN. */
4630 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4631 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4632 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4633 tree fndecl = get_callee_fndecl (*from_p);
4634 if (fndecl
4635 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4636 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
4637 && call_expr_nargs (*from_p) == 3)
4638 assign = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
4639 CALL_EXPR_ARG (*from_p, 0),
4640 CALL_EXPR_ARG (*from_p, 1),
4641 CALL_EXPR_ARG (*from_p, 2));
4642 else
4644 assign = gimple_build_call_from_tree (*from_p);
4645 gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
4647 notice_special_calls (assign);
4648 if (!gimple_call_noreturn_p (assign))
4649 gimple_call_set_lhs (assign, *to_p);
4651 else
4653 assign = gimple_build_assign (*to_p, *from_p);
4654 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4657 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4659 /* We should have got an SSA name from the start. */
4660 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4663 gimplify_seq_add_stmt (pre_p, assign);
4664 gsi = gsi_last (*pre_p);
4665 maybe_fold_stmt (&gsi);
4667 if (want_value)
4669 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4670 return GS_OK;
4672 else
4673 *expr_p = NULL;
4675 return GS_ALL_DONE;
4678 /* Gimplify a comparison between two variable-sized objects. Do this
4679 with a call to BUILT_IN_MEMCMP. */
4681 static enum gimplify_status
4682 gimplify_variable_sized_compare (tree *expr_p)
4684 location_t loc = EXPR_LOCATION (*expr_p);
4685 tree op0 = TREE_OPERAND (*expr_p, 0);
4686 tree op1 = TREE_OPERAND (*expr_p, 1);
4687 tree t, arg, dest, src, expr;
4689 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4690 arg = unshare_expr (arg);
4691 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4692 src = build_fold_addr_expr_loc (loc, op1);
4693 dest = build_fold_addr_expr_loc (loc, op0);
4694 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4695 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4697 expr
4698 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4699 SET_EXPR_LOCATION (expr, loc);
4700 *expr_p = expr;
4702 return GS_OK;
4705 /* Gimplify a comparison between two aggregate objects of integral scalar
4706 mode as a comparison between the bitwise equivalent scalar values. */
4708 static enum gimplify_status
4709 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4711 location_t loc = EXPR_LOCATION (*expr_p);
4712 tree op0 = TREE_OPERAND (*expr_p, 0);
4713 tree op1 = TREE_OPERAND (*expr_p, 1);
4715 tree type = TREE_TYPE (op0);
4716 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4718 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4719 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4721 *expr_p
4722 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4724 return GS_OK;
4727 /* Gimplify an expression sequence. This function gimplifies each
4728 expression and rewrites the original expression with the last
4729 expression of the sequence in GIMPLE form.
4731 PRE_P points to the list where the side effects for all the
4732 expressions in the sequence will be emitted.
4734 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4736 static enum gimplify_status
4737 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4739 tree t = *expr_p;
4743 tree *sub_p = &TREE_OPERAND (t, 0);
4745 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4746 gimplify_compound_expr (sub_p, pre_p, false);
4747 else
4748 gimplify_stmt (sub_p, pre_p);
4750 t = TREE_OPERAND (t, 1);
4752 while (TREE_CODE (t) == COMPOUND_EXPR);
4754 *expr_p = t;
4755 if (want_value)
4756 return GS_OK;
4757 else
4759 gimplify_stmt (expr_p, pre_p);
4760 return GS_ALL_DONE;
4764 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4765 gimplify. After gimplification, EXPR_P will point to a new temporary
4766 that holds the original value of the SAVE_EXPR node.
4768 PRE_P points to the list where side effects that must happen before
4769 *EXPR_P should be stored. */
4771 static enum gimplify_status
4772 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4774 enum gimplify_status ret = GS_ALL_DONE;
4775 tree val;
4777 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4778 val = TREE_OPERAND (*expr_p, 0);
4780 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4781 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4783 /* The operand may be a void-valued expression such as SAVE_EXPRs
4784 generated by the Java frontend for class initialization. It is
4785 being executed only for its side-effects. */
4786 if (TREE_TYPE (val) == void_type_node)
4788 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4789 is_gimple_stmt, fb_none);
4790 val = NULL;
4792 else
4793 val = get_initialized_tmp_var (val, pre_p, post_p);
4795 TREE_OPERAND (*expr_p, 0) = val;
4796 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4799 *expr_p = val;
4801 return ret;
4804 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
4806 unary_expr
4807 : ...
4808 | '&' varname
4811 PRE_P points to the list where side effects that must happen before
4812 *EXPR_P should be stored.
4814 POST_P points to the list where side effects that must happen after
4815 *EXPR_P should be stored. */
4817 static enum gimplify_status
4818 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4820 tree expr = *expr_p;
4821 tree op0 = TREE_OPERAND (expr, 0);
4822 enum gimplify_status ret;
4823 location_t loc = EXPR_LOCATION (*expr_p);
4825 switch (TREE_CODE (op0))
4827 case INDIRECT_REF:
4828 do_indirect_ref:
4829 /* Check if we are dealing with an expression of the form '&*ptr'.
4830 While the front end folds away '&*ptr' into 'ptr', these
4831 expressions may be generated internally by the compiler (e.g.,
4832 builtins like __builtin_va_end). */
4833 /* Caution: the silent array decomposition semantics we allow for
4834 ADDR_EXPR means we can't always discard the pair. */
4835 /* Gimplification of the ADDR_EXPR operand may drop
4836 cv-qualification conversions, so make sure we add them if
4837 needed. */
4839 tree op00 = TREE_OPERAND (op0, 0);
4840 tree t_expr = TREE_TYPE (expr);
4841 tree t_op00 = TREE_TYPE (op00);
4843 if (!useless_type_conversion_p (t_expr, t_op00))
4844 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4845 *expr_p = op00;
4846 ret = GS_OK;
4848 break;
4850 case VIEW_CONVERT_EXPR:
4851 /* Take the address of our operand and then convert it to the type of
4852 this ADDR_EXPR.
4854 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4855 all clear. The impact of this transformation is even less clear. */
4857 /* If the operand is a useless conversion, look through it. Doing so
4858 guarantees that the ADDR_EXPR and its operand will remain of the
4859 same type. */
4860 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4861 op0 = TREE_OPERAND (op0, 0);
4863 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4864 build_fold_addr_expr_loc (loc,
4865 TREE_OPERAND (op0, 0)));
4866 ret = GS_OK;
4867 break;
4869 default:
4870 /* We use fb_either here because the C frontend sometimes takes
4871 the address of a call that returns a struct; see
4872 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4873 the implied temporary explicit. */
4875 /* Make the operand addressable. */
4876 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4877 is_gimple_addressable, fb_either);
4878 if (ret == GS_ERROR)
4879 break;
4881 /* Then mark it. Beware that it may not be possible to do so directly
4882 if a temporary has been created by the gimplification. */
4883 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
4885 op0 = TREE_OPERAND (expr, 0);
4887 /* For various reasons, the gimplification of the expression
4888 may have made a new INDIRECT_REF. */
4889 if (TREE_CODE (op0) == INDIRECT_REF)
4890 goto do_indirect_ref;
4892 mark_addressable (TREE_OPERAND (expr, 0));
4894 /* The FEs may end up building ADDR_EXPRs early on a decl with
4895 an incomplete type. Re-build ADDR_EXPRs in canonical form
4896 here. */
4897 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4898 *expr_p = build_fold_addr_expr (op0);
4900 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
4901 recompute_tree_invariant_for_addr_expr (*expr_p);
4903 /* If we re-built the ADDR_EXPR add a conversion to the original type
4904 if required. */
4905 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4906 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
4908 break;
4911 return ret;
4914 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4915 value; output operands should be a gimple lvalue. */
4917 static enum gimplify_status
4918 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4920 tree expr;
4921 int noutputs;
4922 const char **oconstraints;
4923 int i;
4924 tree link;
4925 const char *constraint;
4926 bool allows_mem, allows_reg, is_inout;
4927 enum gimplify_status ret, tret;
4928 gimple stmt;
4929 vec<tree, va_gc> *inputs;
4930 vec<tree, va_gc> *outputs;
4931 vec<tree, va_gc> *clobbers;
4932 vec<tree, va_gc> *labels;
4933 tree link_next;
4935 expr = *expr_p;
4936 noutputs = list_length (ASM_OUTPUTS (expr));
4937 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4939 inputs = NULL;
4940 outputs = NULL;
4941 clobbers = NULL;
4942 labels = NULL;
4944 ret = GS_ALL_DONE;
4945 link_next = NULL_TREE;
4946 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4948 bool ok;
4949 size_t constraint_len;
4951 link_next = TREE_CHAIN (link);
4953 oconstraints[i]
4954 = constraint
4955 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4956 constraint_len = strlen (constraint);
4957 if (constraint_len == 0)
4958 continue;
4960 ok = parse_output_constraint (&constraint, i, 0, 0,
4961 &allows_mem, &allows_reg, &is_inout);
4962 if (!ok)
4964 ret = GS_ERROR;
4965 is_inout = false;
4968 if (!allows_reg && allows_mem)
4969 mark_addressable (TREE_VALUE (link));
4971 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4972 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4973 fb_lvalue | fb_mayfail);
4974 if (tret == GS_ERROR)
4976 error ("invalid lvalue in asm output %d", i);
4977 ret = tret;
4980 vec_safe_push (outputs, link);
4981 TREE_CHAIN (link) = NULL_TREE;
4983 if (is_inout)
4985 /* An input/output operand. To give the optimizers more
4986 flexibility, split it into separate input and output
4987 operands. */
4988 tree input;
4989 char buf[10];
4991 /* Turn the in/out constraint into an output constraint. */
4992 char *p = xstrdup (constraint);
4993 p[0] = '=';
4994 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4996 /* And add a matching input constraint. */
4997 if (allows_reg)
4999 sprintf (buf, "%d", i);
5001 /* If there are multiple alternatives in the constraint,
5002 handle each of them individually. Those that allow register
5003 will be replaced with operand number, the others will stay
5004 unchanged. */
5005 if (strchr (p, ',') != NULL)
5007 size_t len = 0, buflen = strlen (buf);
5008 char *beg, *end, *str, *dst;
5010 for (beg = p + 1;;)
5012 end = strchr (beg, ',');
5013 if (end == NULL)
5014 end = strchr (beg, '\0');
5015 if ((size_t) (end - beg) < buflen)
5016 len += buflen + 1;
5017 else
5018 len += end - beg + 1;
5019 if (*end)
5020 beg = end + 1;
5021 else
5022 break;
5025 str = (char *) alloca (len);
5026 for (beg = p + 1, dst = str;;)
5028 const char *tem;
5029 bool mem_p, reg_p, inout_p;
5031 end = strchr (beg, ',');
5032 if (end)
5033 *end = '\0';
5034 beg[-1] = '=';
5035 tem = beg - 1;
5036 parse_output_constraint (&tem, i, 0, 0,
5037 &mem_p, &reg_p, &inout_p);
5038 if (dst != str)
5039 *dst++ = ',';
5040 if (reg_p)
5042 memcpy (dst, buf, buflen);
5043 dst += buflen;
5045 else
5047 if (end)
5048 len = end - beg;
5049 else
5050 len = strlen (beg);
5051 memcpy (dst, beg, len);
5052 dst += len;
5054 if (end)
5055 beg = end + 1;
5056 else
5057 break;
5059 *dst = '\0';
5060 input = build_string (dst - str, str);
5062 else
5063 input = build_string (strlen (buf), buf);
5065 else
5066 input = build_string (constraint_len - 1, constraint + 1);
5068 free (p);
5070 input = build_tree_list (build_tree_list (NULL_TREE, input),
5071 unshare_expr (TREE_VALUE (link)));
5072 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5076 link_next = NULL_TREE;
5077 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5079 link_next = TREE_CHAIN (link);
5080 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5081 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5082 oconstraints, &allows_mem, &allows_reg);
5084 /* If we can't make copies, we can only accept memory. */
5085 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5087 if (allows_mem)
5088 allows_reg = 0;
5089 else
5091 error ("impossible constraint in %<asm%>");
5092 error ("non-memory input %d must stay in memory", i);
5093 return GS_ERROR;
5097 /* If the operand is a memory input, it should be an lvalue. */
5098 if (!allows_reg && allows_mem)
5100 tree inputv = TREE_VALUE (link);
5101 STRIP_NOPS (inputv);
5102 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5103 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5104 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5105 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5106 TREE_VALUE (link) = error_mark_node;
5107 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5108 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5109 mark_addressable (TREE_VALUE (link));
5110 if (tret == GS_ERROR)
5112 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5113 input_location = EXPR_LOCATION (TREE_VALUE (link));
5114 error ("memory input %d is not directly addressable", i);
5115 ret = tret;
5118 else
5120 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5121 is_gimple_asm_val, fb_rvalue);
5122 if (tret == GS_ERROR)
5123 ret = tret;
5126 TREE_CHAIN (link) = NULL_TREE;
5127 vec_safe_push (inputs, link);
5130 link_next = NULL_TREE;
5131 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5133 link_next = TREE_CHAIN (link);
5134 TREE_CHAIN (link) = NULL_TREE;
5135 vec_safe_push (clobbers, link);
5138 link_next = NULL_TREE;
5139 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5141 link_next = TREE_CHAIN (link);
5142 TREE_CHAIN (link) = NULL_TREE;
5143 vec_safe_push (labels, link);
5146 /* Do not add ASMs with errors to the gimple IL stream. */
5147 if (ret != GS_ERROR)
5149 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5150 inputs, outputs, clobbers, labels);
5152 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5153 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5155 gimplify_seq_add_stmt (pre_p, stmt);
5158 return ret;
5161 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5162 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5163 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5164 return to this function.
5166 FIXME should we complexify the prequeue handling instead? Or use flags
5167 for all the cleanups and let the optimizer tighten them up? The current
5168 code seems pretty fragile; it will break on a cleanup within any
5169 non-conditional nesting. But any such nesting would be broken, anyway;
5170 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5171 and continues out of it. We can do that at the RTL level, though, so
5172 having an optimizer to tighten up try/finally regions would be a Good
5173 Thing. */
5175 static enum gimplify_status
5176 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5178 gimple_stmt_iterator iter;
5179 gimple_seq body_sequence = NULL;
5181 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5183 /* We only care about the number of conditions between the innermost
5184 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5185 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5186 int old_conds = gimplify_ctxp->conditions;
5187 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5188 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5189 gimplify_ctxp->conditions = 0;
5190 gimplify_ctxp->conditional_cleanups = NULL;
5191 gimplify_ctxp->in_cleanup_point_expr = true;
5193 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5195 gimplify_ctxp->conditions = old_conds;
5196 gimplify_ctxp->conditional_cleanups = old_cleanups;
5197 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5199 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5201 gimple wce = gsi_stmt (iter);
5203 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5205 if (gsi_one_before_end_p (iter))
5207 /* Note that gsi_insert_seq_before and gsi_remove do not
5208 scan operands, unlike some other sequence mutators. */
5209 if (!gimple_wce_cleanup_eh_only (wce))
5210 gsi_insert_seq_before_without_update (&iter,
5211 gimple_wce_cleanup (wce),
5212 GSI_SAME_STMT);
5213 gsi_remove (&iter, true);
5214 break;
5216 else
5218 gimple_statement_try *gtry;
5219 gimple_seq seq;
5220 enum gimple_try_flags kind;
5222 if (gimple_wce_cleanup_eh_only (wce))
5223 kind = GIMPLE_TRY_CATCH;
5224 else
5225 kind = GIMPLE_TRY_FINALLY;
5226 seq = gsi_split_seq_after (iter);
5228 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5229 /* Do not use gsi_replace here, as it may scan operands.
5230 We want to do a simple structural modification only. */
5231 gsi_set_stmt (&iter, gtry);
5232 iter = gsi_start (gtry->eval);
5235 else
5236 gsi_next (&iter);
5239 gimplify_seq_add_seq (pre_p, body_sequence);
5240 if (temp)
5242 *expr_p = temp;
5243 return GS_OK;
5245 else
5247 *expr_p = NULL;
5248 return GS_ALL_DONE;
5252 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5253 is the cleanup action required. EH_ONLY is true if the cleanup should
5254 only be executed if an exception is thrown, not on normal exit. */
5256 static void
5257 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5259 gimple wce;
5260 gimple_seq cleanup_stmts = NULL;
5262 /* Errors can result in improperly nested cleanups. Which results in
5263 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5264 if (seen_error ())
5265 return;
5267 if (gimple_conditional_context ())
5269 /* If we're in a conditional context, this is more complex. We only
5270 want to run the cleanup if we actually ran the initialization that
5271 necessitates it, but we want to run it after the end of the
5272 conditional context. So we wrap the try/finally around the
5273 condition and use a flag to determine whether or not to actually
5274 run the destructor. Thus
5276 test ? f(A()) : 0
5278 becomes (approximately)
5280 flag = 0;
5281 try {
5282 if (test) { A::A(temp); flag = 1; val = f(temp); }
5283 else { val = 0; }
5284 } finally {
5285 if (flag) A::~A(temp);
5289 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5290 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5291 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5293 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5294 gimplify_stmt (&cleanup, &cleanup_stmts);
5295 wce = gimple_build_wce (cleanup_stmts);
5297 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5298 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5299 gimplify_seq_add_stmt (pre_p, ftrue);
5301 /* Because of this manipulation, and the EH edges that jump
5302 threading cannot redirect, the temporary (VAR) will appear
5303 to be used uninitialized. Don't warn. */
5304 TREE_NO_WARNING (var) = 1;
5306 else
5308 gimplify_stmt (&cleanup, &cleanup_stmts);
5309 wce = gimple_build_wce (cleanup_stmts);
5310 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5311 gimplify_seq_add_stmt (pre_p, wce);
5315 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5317 static enum gimplify_status
5318 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5320 tree targ = *expr_p;
5321 tree temp = TARGET_EXPR_SLOT (targ);
5322 tree init = TARGET_EXPR_INITIAL (targ);
5323 enum gimplify_status ret;
5325 if (init)
5327 tree cleanup = NULL_TREE;
5329 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5330 to the temps list. Handle also variable length TARGET_EXPRs. */
5331 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5333 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5334 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5335 gimplify_vla_decl (temp, pre_p);
5337 else
5338 gimple_add_tmp_var (temp);
5340 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5341 expression is supposed to initialize the slot. */
5342 if (VOID_TYPE_P (TREE_TYPE (init)))
5343 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5344 else
5346 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5347 init = init_expr;
5348 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5349 init = NULL;
5350 ggc_free (init_expr);
5352 if (ret == GS_ERROR)
5354 /* PR c++/28266 Make sure this is expanded only once. */
5355 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5356 return GS_ERROR;
5358 if (init)
5359 gimplify_and_add (init, pre_p);
5361 /* If needed, push the cleanup for the temp. */
5362 if (TARGET_EXPR_CLEANUP (targ))
5364 if (CLEANUP_EH_ONLY (targ))
5365 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5366 CLEANUP_EH_ONLY (targ), pre_p);
5367 else
5368 cleanup = TARGET_EXPR_CLEANUP (targ);
5371 /* Add a clobber for the temporary going out of scope, like
5372 gimplify_bind_expr. */
5373 if (gimplify_ctxp->in_cleanup_point_expr
5374 && needs_to_live_in_memory (temp)
5375 && flag_stack_reuse == SR_ALL)
5377 tree clobber = build_constructor (TREE_TYPE (temp),
5378 NULL);
5379 TREE_THIS_VOLATILE (clobber) = true;
5380 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5381 if (cleanup)
5382 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5383 clobber);
5384 else
5385 cleanup = clobber;
5388 if (cleanup)
5389 gimple_push_cleanup (temp, cleanup, false, pre_p);
5391 /* Only expand this once. */
5392 TREE_OPERAND (targ, 3) = init;
5393 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5395 else
5396 /* We should have expanded this before. */
5397 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5399 *expr_p = temp;
5400 return GS_OK;
5403 /* Gimplification of expression trees. */
5405 /* Gimplify an expression which appears at statement context. The
5406 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5407 NULL, a new sequence is allocated.
5409 Return true if we actually added a statement to the queue. */
5411 bool
5412 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5414 gimple_seq_node last;
5416 last = gimple_seq_last (*seq_p);
5417 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5418 return last != gimple_seq_last (*seq_p);
5421 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5422 to CTX. If entries already exist, force them to be some flavor of private.
5423 If there is no enclosing parallel, do nothing. */
5425 void
5426 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5428 splay_tree_node n;
5430 if (decl == NULL || !DECL_P (decl))
5431 return;
5435 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5436 if (n != NULL)
5438 if (n->value & GOVD_SHARED)
5439 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5440 else if (n->value & GOVD_MAP)
5441 n->value |= GOVD_MAP_TO_ONLY;
5442 else
5443 return;
5445 else if (ctx->region_type == ORT_TARGET)
5446 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5447 else if (ctx->region_type != ORT_WORKSHARE
5448 && ctx->region_type != ORT_SIMD
5449 && ctx->region_type != ORT_TARGET_DATA)
5450 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5452 ctx = ctx->outer_context;
5454 while (ctx);
5457 /* Similarly for each of the type sizes of TYPE. */
5459 static void
5460 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5462 if (type == NULL || type == error_mark_node)
5463 return;
5464 type = TYPE_MAIN_VARIANT (type);
5466 if (pointer_set_insert (ctx->privatized_types, type))
5467 return;
5469 switch (TREE_CODE (type))
5471 case INTEGER_TYPE:
5472 case ENUMERAL_TYPE:
5473 case BOOLEAN_TYPE:
5474 case REAL_TYPE:
5475 case FIXED_POINT_TYPE:
5476 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5477 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5478 break;
5480 case ARRAY_TYPE:
5481 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5482 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5483 break;
5485 case RECORD_TYPE:
5486 case UNION_TYPE:
5487 case QUAL_UNION_TYPE:
5489 tree field;
5490 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5491 if (TREE_CODE (field) == FIELD_DECL)
5493 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5494 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5497 break;
5499 case POINTER_TYPE:
5500 case REFERENCE_TYPE:
5501 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5502 break;
5504 default:
5505 break;
5508 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5509 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5510 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5513 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5515 static void
5516 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5518 splay_tree_node n;
5519 unsigned int nflags;
5520 tree t;
5522 if (error_operand_p (decl))
5523 return;
5525 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5526 there are constructors involved somewhere. */
5527 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5528 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5529 flags |= GOVD_SEEN;
5531 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5532 if (n != NULL && n->value != GOVD_ALIGNED)
5534 /* We shouldn't be re-adding the decl with the same data
5535 sharing class. */
5536 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5537 /* The only combination of data sharing classes we should see is
5538 FIRSTPRIVATE and LASTPRIVATE. */
5539 nflags = n->value | flags;
5540 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5541 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)
5542 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5543 n->value = nflags;
5544 return;
5547 /* When adding a variable-sized variable, we have to handle all sorts
5548 of additional bits of data: the pointer replacement variable, and
5549 the parameters of the type. */
5550 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5552 /* Add the pointer replacement variable as PRIVATE if the variable
5553 replacement is private, else FIRSTPRIVATE since we'll need the
5554 address of the original variable either for SHARED, or for the
5555 copy into or out of the context. */
5556 if (!(flags & GOVD_LOCAL))
5558 nflags = flags & GOVD_MAP
5559 ? GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT
5560 : flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5561 nflags |= flags & GOVD_SEEN;
5562 t = DECL_VALUE_EXPR (decl);
5563 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5564 t = TREE_OPERAND (t, 0);
5565 gcc_assert (DECL_P (t));
5566 omp_add_variable (ctx, t, nflags);
5569 /* Add all of the variable and type parameters (which should have
5570 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5571 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5572 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5573 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5575 /* The variable-sized variable itself is never SHARED, only some form
5576 of PRIVATE. The sharing would take place via the pointer variable
5577 which we remapped above. */
5578 if (flags & GOVD_SHARED)
5579 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5580 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5582 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5583 alloca statement we generate for the variable, so make sure it
5584 is available. This isn't automatically needed for the SHARED
5585 case, since we won't be allocating local storage then.
5586 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5587 in this case omp_notice_variable will be called later
5588 on when it is gimplified. */
5589 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5590 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5591 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5593 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5594 && lang_hooks.decls.omp_privatize_by_reference (decl))
5596 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5598 /* Similar to the direct variable sized case above, we'll need the
5599 size of references being privatized. */
5600 if ((flags & GOVD_SHARED) == 0)
5602 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5603 if (TREE_CODE (t) != INTEGER_CST)
5604 omp_notice_variable (ctx, t, true);
5608 if (n != NULL)
5609 n->value |= flags;
5610 else
5611 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5614 /* Notice a threadprivate variable DECL used in OpenMP context CTX.
5615 This just prints out diagnostics about threadprivate variable uses
5616 in untied tasks. If DECL2 is non-NULL, prevent this warning
5617 on that variable. */
5619 static bool
5620 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5621 tree decl2)
5623 splay_tree_node n;
5624 struct gimplify_omp_ctx *octx;
5626 for (octx = ctx; octx; octx = octx->outer_context)
5627 if (octx->region_type == ORT_TARGET)
5629 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5630 if (n == NULL)
5632 error ("threadprivate variable %qE used in target region",
5633 DECL_NAME (decl));
5634 error_at (octx->location, "enclosing target region");
5635 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5637 if (decl2)
5638 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5641 if (ctx->region_type != ORT_UNTIED_TASK)
5642 return false;
5643 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5644 if (n == NULL)
5646 error ("threadprivate variable %qE used in untied task",
5647 DECL_NAME (decl));
5648 error_at (ctx->location, "enclosing task");
5649 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5651 if (decl2)
5652 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5653 return false;
5656 /* Record the fact that DECL was used within the OpenMP context CTX.
5657 IN_CODE is true when real code uses DECL, and false when we should
5658 merely emit default(none) errors. Return true if DECL is going to
5659 be remapped and thus DECL shouldn't be gimplified into its
5660 DECL_VALUE_EXPR (if any). */
5662 static bool
5663 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5665 splay_tree_node n;
5666 unsigned flags = in_code ? GOVD_SEEN : 0;
5667 bool ret = false, shared;
5669 if (error_operand_p (decl))
5670 return false;
5672 /* Threadprivate variables are predetermined. */
5673 if (is_global_var (decl))
5675 if (DECL_THREAD_LOCAL_P (decl))
5676 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5678 if (DECL_HAS_VALUE_EXPR_P (decl))
5680 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5682 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5683 return omp_notice_threadprivate_variable (ctx, decl, value);
5687 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5688 if (ctx->region_type == ORT_TARGET)
5690 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
5691 if (n == NULL)
5693 if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl)))
5695 error ("%qD referenced in target region does not have "
5696 "a mappable type", decl);
5697 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_EXPLICIT | flags);
5699 else
5700 omp_add_variable (ctx, decl, GOVD_MAP | flags);
5702 else
5704 /* If nothing changed, there's nothing left to do. */
5705 if ((n->value & flags) == flags)
5706 return ret;
5707 n->value |= flags;
5709 goto do_outer;
5712 if (n == NULL)
5714 enum omp_clause_default_kind default_kind, kind;
5715 struct gimplify_omp_ctx *octx;
5717 if (ctx->region_type == ORT_WORKSHARE
5718 || ctx->region_type == ORT_SIMD
5719 || ctx->region_type == ORT_TARGET_DATA)
5720 goto do_outer;
5722 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5723 remapped firstprivate instead of shared. To some extent this is
5724 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5725 default_kind = ctx->default_kind;
5726 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5727 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5728 default_kind = kind;
5730 switch (default_kind)
5732 case OMP_CLAUSE_DEFAULT_NONE:
5733 if ((ctx->region_type & ORT_PARALLEL) != 0)
5735 error ("%qE not specified in enclosing parallel",
5736 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5737 error_at (ctx->location, "enclosing parallel");
5739 else if ((ctx->region_type & ORT_TASK) != 0)
5741 error ("%qE not specified in enclosing task",
5742 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5743 error_at (ctx->location, "enclosing task");
5745 else if (ctx->region_type == ORT_TEAMS)
5747 error ("%qE not specified in enclosing teams construct",
5748 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5749 error_at (ctx->location, "enclosing teams construct");
5751 else
5752 gcc_unreachable ();
5753 /* FALLTHRU */
5754 case OMP_CLAUSE_DEFAULT_SHARED:
5755 flags |= GOVD_SHARED;
5756 break;
5757 case OMP_CLAUSE_DEFAULT_PRIVATE:
5758 flags |= GOVD_PRIVATE;
5759 break;
5760 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5761 flags |= GOVD_FIRSTPRIVATE;
5762 break;
5763 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5764 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5765 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5766 if (ctx->outer_context)
5767 omp_notice_variable (ctx->outer_context, decl, in_code);
5768 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5770 splay_tree_node n2;
5772 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0)
5773 continue;
5774 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5775 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5777 flags |= GOVD_FIRSTPRIVATE;
5778 break;
5780 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
5781 break;
5783 if (flags & GOVD_FIRSTPRIVATE)
5784 break;
5785 if (octx == NULL
5786 && (TREE_CODE (decl) == PARM_DECL
5787 || (!is_global_var (decl)
5788 && DECL_CONTEXT (decl) == current_function_decl)))
5790 flags |= GOVD_FIRSTPRIVATE;
5791 break;
5793 flags |= GOVD_SHARED;
5794 break;
5795 default:
5796 gcc_unreachable ();
5799 if ((flags & GOVD_PRIVATE)
5800 && lang_hooks.decls.omp_private_outer_ref (decl))
5801 flags |= GOVD_PRIVATE_OUTER_REF;
5803 omp_add_variable (ctx, decl, flags);
5805 shared = (flags & GOVD_SHARED) != 0;
5806 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5807 goto do_outer;
5810 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5811 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5812 && DECL_SIZE (decl)
5813 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5815 splay_tree_node n2;
5816 tree t = DECL_VALUE_EXPR (decl);
5817 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5818 t = TREE_OPERAND (t, 0);
5819 gcc_assert (DECL_P (t));
5820 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5821 n2->value |= GOVD_SEEN;
5824 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5825 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5827 /* If nothing changed, there's nothing left to do. */
5828 if ((n->value & flags) == flags)
5829 return ret;
5830 flags |= n->value;
5831 n->value = flags;
5833 do_outer:
5834 /* If the variable is private in the current context, then we don't
5835 need to propagate anything to an outer context. */
5836 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5837 return ret;
5838 if (ctx->outer_context
5839 && omp_notice_variable (ctx->outer_context, decl, in_code))
5840 return true;
5841 return ret;
5844 /* Verify that DECL is private within CTX. If there's specific information
5845 to the contrary in the innermost scope, generate an error. */
5847 static bool
5848 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
5850 splay_tree_node n;
5852 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5853 if (n != NULL)
5855 if (n->value & GOVD_SHARED)
5857 if (ctx == gimplify_omp_ctxp)
5859 if (simd)
5860 error ("iteration variable %qE is predetermined linear",
5861 DECL_NAME (decl));
5862 else
5863 error ("iteration variable %qE should be private",
5864 DECL_NAME (decl));
5865 n->value = GOVD_PRIVATE;
5866 return true;
5868 else
5869 return false;
5871 else if ((n->value & GOVD_EXPLICIT) != 0
5872 && (ctx == gimplify_omp_ctxp
5873 || (ctx->region_type == ORT_COMBINED_PARALLEL
5874 && gimplify_omp_ctxp->outer_context == ctx)))
5876 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5877 error ("iteration variable %qE should not be firstprivate",
5878 DECL_NAME (decl));
5879 else if ((n->value & GOVD_REDUCTION) != 0)
5880 error ("iteration variable %qE should not be reduction",
5881 DECL_NAME (decl));
5882 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
5883 error ("iteration variable %qE should not be lastprivate",
5884 DECL_NAME (decl));
5885 else if (simd && (n->value & GOVD_PRIVATE) != 0)
5886 error ("iteration variable %qE should not be private",
5887 DECL_NAME (decl));
5888 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
5889 error ("iteration variable %qE is predetermined linear",
5890 DECL_NAME (decl));
5892 return (ctx == gimplify_omp_ctxp
5893 || (ctx->region_type == ORT_COMBINED_PARALLEL
5894 && gimplify_omp_ctxp->outer_context == ctx));
5897 if (ctx->region_type != ORT_WORKSHARE
5898 && ctx->region_type != ORT_SIMD)
5899 return false;
5900 else if (ctx->outer_context)
5901 return omp_is_private (ctx->outer_context, decl, simd);
5902 return false;
5905 /* Return true if DECL is private within a parallel region
5906 that binds to the current construct's context or in parallel
5907 region's REDUCTION clause. */
5909 static bool
5910 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
5912 splay_tree_node n;
5916 ctx = ctx->outer_context;
5917 if (ctx == NULL)
5918 return !(is_global_var (decl)
5919 /* References might be private, but might be shared too,
5920 when checking for copyprivate, assume they might be
5921 private, otherwise assume they might be shared. */
5922 || (!copyprivate
5923 && lang_hooks.decls.omp_privatize_by_reference (decl)));
5925 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
5926 continue;
5928 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5929 if (n != NULL)
5930 return (n->value & GOVD_SHARED) == 0;
5932 while (ctx->region_type == ORT_WORKSHARE
5933 || ctx->region_type == ORT_SIMD);
5934 return false;
5937 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5938 and previous omp contexts. */
5940 static void
5941 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5942 enum omp_region_type region_type)
5944 struct gimplify_omp_ctx *ctx, *outer_ctx;
5945 tree c;
5947 ctx = new_omp_context (region_type);
5948 outer_ctx = ctx->outer_context;
5950 while ((c = *list_p) != NULL)
5952 bool remove = false;
5953 bool notice_outer = true;
5954 const char *check_non_private = NULL;
5955 unsigned int flags;
5956 tree decl;
5958 switch (OMP_CLAUSE_CODE (c))
5960 case OMP_CLAUSE_PRIVATE:
5961 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5962 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5964 flags |= GOVD_PRIVATE_OUTER_REF;
5965 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5967 else
5968 notice_outer = false;
5969 goto do_add;
5970 case OMP_CLAUSE_SHARED:
5971 flags = GOVD_SHARED | GOVD_EXPLICIT;
5972 goto do_add;
5973 case OMP_CLAUSE_FIRSTPRIVATE:
5974 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5975 check_non_private = "firstprivate";
5976 goto do_add;
5977 case OMP_CLAUSE_LASTPRIVATE:
5978 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5979 check_non_private = "lastprivate";
5980 goto do_add;
5981 case OMP_CLAUSE_REDUCTION:
5982 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5983 check_non_private = "reduction";
5984 goto do_add;
5985 case OMP_CLAUSE_LINEAR:
5986 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
5987 is_gimple_val, fb_rvalue) == GS_ERROR)
5989 remove = true;
5990 break;
5992 flags = GOVD_LINEAR | GOVD_EXPLICIT;
5993 goto do_add;
5995 case OMP_CLAUSE_MAP:
5996 decl = OMP_CLAUSE_DECL (c);
5997 if (error_operand_p (decl))
5999 remove = true;
6000 break;
6002 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6003 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6004 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6005 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6006 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6008 remove = true;
6009 break;
6011 if (!DECL_P (decl))
6013 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6014 NULL, is_gimple_lvalue, fb_lvalue)
6015 == GS_ERROR)
6017 remove = true;
6018 break;
6020 break;
6022 flags = GOVD_MAP | GOVD_EXPLICIT;
6023 goto do_add;
6025 case OMP_CLAUSE_DEPEND:
6026 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
6028 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
6029 NULL, is_gimple_val, fb_rvalue);
6030 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
6032 if (error_operand_p (OMP_CLAUSE_DECL (c)))
6034 remove = true;
6035 break;
6037 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
6038 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
6039 is_gimple_val, fb_rvalue) == GS_ERROR)
6041 remove = true;
6042 break;
6044 break;
6046 case OMP_CLAUSE_TO:
6047 case OMP_CLAUSE_FROM:
6048 decl = OMP_CLAUSE_DECL (c);
6049 if (error_operand_p (decl))
6051 remove = true;
6052 break;
6054 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6055 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6056 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6057 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6058 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6060 remove = true;
6061 break;
6063 if (!DECL_P (decl))
6065 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6066 NULL, is_gimple_lvalue, fb_lvalue)
6067 == GS_ERROR)
6069 remove = true;
6070 break;
6072 break;
6074 goto do_notice;
6076 do_add:
6077 decl = OMP_CLAUSE_DECL (c);
6078 if (error_operand_p (decl))
6080 remove = true;
6081 break;
6083 omp_add_variable (ctx, decl, flags);
6084 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6085 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6087 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
6088 GOVD_LOCAL | GOVD_SEEN);
6089 gimplify_omp_ctxp = ctx;
6090 push_gimplify_context ();
6092 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6093 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6095 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
6096 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
6097 pop_gimplify_context
6098 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
6099 push_gimplify_context ();
6100 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
6101 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6102 pop_gimplify_context
6103 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
6104 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
6105 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
6107 gimplify_omp_ctxp = outer_ctx;
6109 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6110 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
6112 gimplify_omp_ctxp = ctx;
6113 push_gimplify_context ();
6114 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
6116 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6117 NULL, NULL);
6118 TREE_SIDE_EFFECTS (bind) = 1;
6119 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6120 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6122 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6123 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6124 pop_gimplify_context
6125 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6126 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6128 gimplify_omp_ctxp = outer_ctx;
6130 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6131 && OMP_CLAUSE_LINEAR_STMT (c))
6133 gimplify_omp_ctxp = ctx;
6134 push_gimplify_context ();
6135 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
6137 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6138 NULL, NULL);
6139 TREE_SIDE_EFFECTS (bind) = 1;
6140 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
6141 OMP_CLAUSE_LINEAR_STMT (c) = bind;
6143 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
6144 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6145 pop_gimplify_context
6146 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
6147 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
6149 gimplify_omp_ctxp = outer_ctx;
6151 if (notice_outer)
6152 goto do_notice;
6153 break;
6155 case OMP_CLAUSE_COPYIN:
6156 case OMP_CLAUSE_COPYPRIVATE:
6157 decl = OMP_CLAUSE_DECL (c);
6158 if (error_operand_p (decl))
6160 remove = true;
6161 break;
6163 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
6164 && !remove
6165 && !omp_check_private (ctx, decl, true))
6167 remove = true;
6168 if (is_global_var (decl))
6170 if (DECL_THREAD_LOCAL_P (decl))
6171 remove = false;
6172 else if (DECL_HAS_VALUE_EXPR_P (decl))
6174 tree value = get_base_address (DECL_VALUE_EXPR (decl));
6176 if (value
6177 && DECL_P (value)
6178 && DECL_THREAD_LOCAL_P (value))
6179 remove = false;
6182 if (remove)
6183 error_at (OMP_CLAUSE_LOCATION (c),
6184 "copyprivate variable %qE is not threadprivate"
6185 " or private in outer context", DECL_NAME (decl));
6187 do_notice:
6188 if (outer_ctx)
6189 omp_notice_variable (outer_ctx, decl, true);
6190 if (check_non_private
6191 && region_type == ORT_WORKSHARE
6192 && omp_check_private (ctx, decl, false))
6194 error ("%s variable %qE is private in outer context",
6195 check_non_private, DECL_NAME (decl));
6196 remove = true;
6198 break;
6200 case OMP_CLAUSE_FINAL:
6201 case OMP_CLAUSE_IF:
6202 OMP_CLAUSE_OPERAND (c, 0)
6203 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6204 /* Fall through. */
6206 case OMP_CLAUSE_SCHEDULE:
6207 case OMP_CLAUSE_NUM_THREADS:
6208 case OMP_CLAUSE_NUM_TEAMS:
6209 case OMP_CLAUSE_THREAD_LIMIT:
6210 case OMP_CLAUSE_DIST_SCHEDULE:
6211 case OMP_CLAUSE_DEVICE:
6212 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6213 is_gimple_val, fb_rvalue) == GS_ERROR)
6214 remove = true;
6215 break;
6217 case OMP_CLAUSE_NOWAIT:
6218 case OMP_CLAUSE_ORDERED:
6219 case OMP_CLAUSE_UNTIED:
6220 case OMP_CLAUSE_COLLAPSE:
6221 case OMP_CLAUSE_MERGEABLE:
6222 case OMP_CLAUSE_PROC_BIND:
6223 case OMP_CLAUSE_SAFELEN:
6224 break;
6226 case OMP_CLAUSE_ALIGNED:
6227 decl = OMP_CLAUSE_DECL (c);
6228 if (error_operand_p (decl))
6230 remove = true;
6231 break;
6233 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
6234 is_gimple_val, fb_rvalue) == GS_ERROR)
6236 remove = true;
6237 break;
6239 if (!is_global_var (decl)
6240 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6241 omp_add_variable (ctx, decl, GOVD_ALIGNED);
6242 break;
6244 case OMP_CLAUSE_DEFAULT:
6245 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6246 break;
6248 default:
6249 gcc_unreachable ();
6252 if (remove)
6253 *list_p = OMP_CLAUSE_CHAIN (c);
6254 else
6255 list_p = &OMP_CLAUSE_CHAIN (c);
6258 gimplify_omp_ctxp = ctx;
6261 struct gimplify_adjust_omp_clauses_data
6263 tree *list_p;
6264 gimple_seq *pre_p;
6267 /* For all variables that were not actually used within the context,
6268 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
6270 static int
6271 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6273 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
6274 gimple_seq *pre_p
6275 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
6276 tree decl = (tree) n->key;
6277 unsigned flags = n->value;
6278 enum omp_clause_code code;
6279 tree clause;
6280 bool private_debug;
6282 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6283 return 0;
6284 if ((flags & GOVD_SEEN) == 0)
6285 return 0;
6286 if (flags & GOVD_DEBUG_PRIVATE)
6288 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6289 private_debug = true;
6291 else if (flags & GOVD_MAP)
6292 private_debug = false;
6293 else
6294 private_debug
6295 = lang_hooks.decls.omp_private_debug_clause (decl,
6296 !!(flags & GOVD_SHARED));
6297 if (private_debug)
6298 code = OMP_CLAUSE_PRIVATE;
6299 else if (flags & GOVD_MAP)
6300 code = OMP_CLAUSE_MAP;
6301 else if (flags & GOVD_SHARED)
6303 if (is_global_var (decl))
6305 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6306 while (ctx != NULL)
6308 splay_tree_node on
6309 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6310 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6311 | GOVD_PRIVATE | GOVD_REDUCTION
6312 | GOVD_LINEAR)) != 0)
6313 break;
6314 ctx = ctx->outer_context;
6316 if (ctx == NULL)
6317 return 0;
6319 code = OMP_CLAUSE_SHARED;
6321 else if (flags & GOVD_PRIVATE)
6322 code = OMP_CLAUSE_PRIVATE;
6323 else if (flags & GOVD_FIRSTPRIVATE)
6324 code = OMP_CLAUSE_FIRSTPRIVATE;
6325 else if (flags & GOVD_LASTPRIVATE)
6326 code = OMP_CLAUSE_LASTPRIVATE;
6327 else if (flags & GOVD_ALIGNED)
6328 return 0;
6329 else
6330 gcc_unreachable ();
6332 clause = build_omp_clause (input_location, code);
6333 OMP_CLAUSE_DECL (clause) = decl;
6334 OMP_CLAUSE_CHAIN (clause) = *list_p;
6335 if (private_debug)
6336 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
6337 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6338 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
6339 else if (code == OMP_CLAUSE_MAP)
6341 OMP_CLAUSE_MAP_KIND (clause) = flags & GOVD_MAP_TO_ONLY
6342 ? OMP_CLAUSE_MAP_TO
6343 : OMP_CLAUSE_MAP_TOFROM;
6344 if (DECL_SIZE (decl)
6345 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6347 tree decl2 = DECL_VALUE_EXPR (decl);
6348 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6349 decl2 = TREE_OPERAND (decl2, 0);
6350 gcc_assert (DECL_P (decl2));
6351 tree mem = build_simple_mem_ref (decl2);
6352 OMP_CLAUSE_DECL (clause) = mem;
6353 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6354 if (gimplify_omp_ctxp->outer_context)
6356 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6357 omp_notice_variable (ctx, decl2, true);
6358 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
6360 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
6361 OMP_CLAUSE_MAP);
6362 OMP_CLAUSE_DECL (nc) = decl;
6363 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6364 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6365 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
6366 OMP_CLAUSE_CHAIN (clause) = nc;
6368 else
6369 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
6371 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
6373 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
6374 OMP_CLAUSE_DECL (nc) = decl;
6375 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
6376 OMP_CLAUSE_CHAIN (nc) = *list_p;
6377 OMP_CLAUSE_CHAIN (clause) = nc;
6378 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6379 gimplify_omp_ctxp = ctx->outer_context;
6380 lang_hooks.decls.omp_finish_clause (nc, pre_p);
6381 gimplify_omp_ctxp = ctx;
6383 *list_p = clause;
6384 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6385 gimplify_omp_ctxp = ctx->outer_context;
6386 lang_hooks.decls.omp_finish_clause (clause, pre_p);
6387 gimplify_omp_ctxp = ctx;
6388 return 0;
6391 static void
6392 gimplify_adjust_omp_clauses (gimple_seq *pre_p, tree *list_p)
6394 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6395 tree c, decl;
6397 while ((c = *list_p) != NULL)
6399 splay_tree_node n;
6400 bool remove = false;
6402 switch (OMP_CLAUSE_CODE (c))
6404 case OMP_CLAUSE_PRIVATE:
6405 case OMP_CLAUSE_SHARED:
6406 case OMP_CLAUSE_FIRSTPRIVATE:
6407 case OMP_CLAUSE_LINEAR:
6408 decl = OMP_CLAUSE_DECL (c);
6409 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6410 remove = !(n->value & GOVD_SEEN);
6411 if (! remove)
6413 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
6414 if ((n->value & GOVD_DEBUG_PRIVATE)
6415 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6417 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6418 || ((n->value & GOVD_DATA_SHARE_CLASS)
6419 == GOVD_PRIVATE));
6420 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
6421 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6423 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6424 && ctx->outer_context
6425 && !(OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6426 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6428 if (ctx->outer_context->combined_loop
6429 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6431 n = splay_tree_lookup (ctx->outer_context->variables,
6432 (splay_tree_key) decl);
6433 if (n == NULL
6434 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
6436 int flags = GOVD_FIRSTPRIVATE;
6437 /* #pragma omp distribute does not allow
6438 lastprivate clause. */
6439 if (!ctx->outer_context->distribute)
6440 flags |= GOVD_LASTPRIVATE;
6441 if (n == NULL)
6442 omp_add_variable (ctx->outer_context, decl,
6443 flags | GOVD_SEEN);
6444 else
6445 n->value |= flags | GOVD_SEEN;
6448 else if (!is_global_var (decl))
6449 omp_notice_variable (ctx->outer_context, decl, true);
6452 break;
6454 case OMP_CLAUSE_LASTPRIVATE:
6455 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6456 accurately reflect the presence of a FIRSTPRIVATE clause. */
6457 decl = OMP_CLAUSE_DECL (c);
6458 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6459 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6460 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6461 break;
6463 case OMP_CLAUSE_ALIGNED:
6464 decl = OMP_CLAUSE_DECL (c);
6465 if (!is_global_var (decl))
6467 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6468 remove = n == NULL || !(n->value & GOVD_SEEN);
6469 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6471 struct gimplify_omp_ctx *octx;
6472 if (n != NULL
6473 && (n->value & (GOVD_DATA_SHARE_CLASS
6474 & ~GOVD_FIRSTPRIVATE)))
6475 remove = true;
6476 else
6477 for (octx = ctx->outer_context; octx;
6478 octx = octx->outer_context)
6480 n = splay_tree_lookup (octx->variables,
6481 (splay_tree_key) decl);
6482 if (n == NULL)
6483 continue;
6484 if (n->value & GOVD_LOCAL)
6485 break;
6486 /* We have to avoid assigning a shared variable
6487 to itself when trying to add
6488 __builtin_assume_aligned. */
6489 if (n->value & GOVD_SHARED)
6491 remove = true;
6492 break;
6497 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
6499 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6500 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6501 remove = true;
6503 break;
6505 case OMP_CLAUSE_MAP:
6506 decl = OMP_CLAUSE_DECL (c);
6507 if (!DECL_P (decl))
6508 break;
6509 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6510 if (ctx->region_type == ORT_TARGET && !(n->value & GOVD_SEEN))
6511 remove = true;
6512 else if (DECL_SIZE (decl)
6513 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
6514 && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_POINTER)
6516 tree decl2 = DECL_VALUE_EXPR (decl);
6517 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6518 decl2 = TREE_OPERAND (decl2, 0);
6519 gcc_assert (DECL_P (decl2));
6520 tree mem = build_simple_mem_ref (decl2);
6521 OMP_CLAUSE_DECL (c) = mem;
6522 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6523 if (ctx->outer_context)
6525 omp_notice_variable (ctx->outer_context, decl2, true);
6526 omp_notice_variable (ctx->outer_context,
6527 OMP_CLAUSE_SIZE (c), true);
6529 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6530 OMP_CLAUSE_MAP);
6531 OMP_CLAUSE_DECL (nc) = decl;
6532 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6533 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6534 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
6535 OMP_CLAUSE_CHAIN (c) = nc;
6536 c = nc;
6538 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6539 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
6540 break;
6542 case OMP_CLAUSE_TO:
6543 case OMP_CLAUSE_FROM:
6544 decl = OMP_CLAUSE_DECL (c);
6545 if (!DECL_P (decl))
6546 break;
6547 if (DECL_SIZE (decl)
6548 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6550 tree decl2 = DECL_VALUE_EXPR (decl);
6551 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6552 decl2 = TREE_OPERAND (decl2, 0);
6553 gcc_assert (DECL_P (decl2));
6554 tree mem = build_simple_mem_ref (decl2);
6555 OMP_CLAUSE_DECL (c) = mem;
6556 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6557 if (ctx->outer_context)
6559 omp_notice_variable (ctx->outer_context, decl2, true);
6560 omp_notice_variable (ctx->outer_context,
6561 OMP_CLAUSE_SIZE (c), true);
6564 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6565 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
6566 break;
6568 case OMP_CLAUSE_REDUCTION:
6569 case OMP_CLAUSE_COPYIN:
6570 case OMP_CLAUSE_COPYPRIVATE:
6571 case OMP_CLAUSE_IF:
6572 case OMP_CLAUSE_NUM_THREADS:
6573 case OMP_CLAUSE_NUM_TEAMS:
6574 case OMP_CLAUSE_THREAD_LIMIT:
6575 case OMP_CLAUSE_DIST_SCHEDULE:
6576 case OMP_CLAUSE_DEVICE:
6577 case OMP_CLAUSE_SCHEDULE:
6578 case OMP_CLAUSE_NOWAIT:
6579 case OMP_CLAUSE_ORDERED:
6580 case OMP_CLAUSE_DEFAULT:
6581 case OMP_CLAUSE_UNTIED:
6582 case OMP_CLAUSE_COLLAPSE:
6583 case OMP_CLAUSE_FINAL:
6584 case OMP_CLAUSE_MERGEABLE:
6585 case OMP_CLAUSE_PROC_BIND:
6586 case OMP_CLAUSE_SAFELEN:
6587 case OMP_CLAUSE_DEPEND:
6588 break;
6590 default:
6591 gcc_unreachable ();
6594 if (remove)
6595 *list_p = OMP_CLAUSE_CHAIN (c);
6596 else
6597 list_p = &OMP_CLAUSE_CHAIN (c);
6600 /* Add in any implicit data sharing. */
6601 struct gimplify_adjust_omp_clauses_data data;
6602 data.list_p = list_p;
6603 data.pre_p = pre_p;
6604 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
6606 gimplify_omp_ctxp = ctx->outer_context;
6607 delete_omp_context (ctx);
6610 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
6611 gimplification of the body, as well as scanning the body for used
6612 variables. We need to do this scan now, because variable-sized
6613 decls will be decomposed during gimplification. */
6615 static void
6616 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6618 tree expr = *expr_p;
6619 gimple g;
6620 gimple_seq body = NULL;
6622 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6623 OMP_PARALLEL_COMBINED (expr)
6624 ? ORT_COMBINED_PARALLEL
6625 : ORT_PARALLEL);
6627 push_gimplify_context ();
6629 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6630 if (gimple_code (g) == GIMPLE_BIND)
6631 pop_gimplify_context (g);
6632 else
6633 pop_gimplify_context (NULL);
6635 gimplify_adjust_omp_clauses (pre_p, &OMP_PARALLEL_CLAUSES (expr));
6637 g = gimple_build_omp_parallel (body,
6638 OMP_PARALLEL_CLAUSES (expr),
6639 NULL_TREE, NULL_TREE);
6640 if (OMP_PARALLEL_COMBINED (expr))
6641 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6642 gimplify_seq_add_stmt (pre_p, g);
6643 *expr_p = NULL_TREE;
6646 /* Gimplify the contents of an OMP_TASK statement. This involves
6647 gimplification of the body, as well as scanning the body for used
6648 variables. We need to do this scan now, because variable-sized
6649 decls will be decomposed during gimplification. */
6651 static void
6652 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6654 tree expr = *expr_p;
6655 gimple g;
6656 gimple_seq body = NULL;
6658 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6659 find_omp_clause (OMP_TASK_CLAUSES (expr),
6660 OMP_CLAUSE_UNTIED)
6661 ? ORT_UNTIED_TASK : ORT_TASK);
6663 push_gimplify_context ();
6665 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6666 if (gimple_code (g) == GIMPLE_BIND)
6667 pop_gimplify_context (g);
6668 else
6669 pop_gimplify_context (NULL);
6671 gimplify_adjust_omp_clauses (pre_p, &OMP_TASK_CLAUSES (expr));
6673 g = gimple_build_omp_task (body,
6674 OMP_TASK_CLAUSES (expr),
6675 NULL_TREE, NULL_TREE,
6676 NULL_TREE, NULL_TREE, NULL_TREE);
6677 gimplify_seq_add_stmt (pre_p, g);
6678 *expr_p = NULL_TREE;
6681 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
6682 with non-NULL OMP_FOR_INIT. */
6684 static tree
6685 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
6687 *walk_subtrees = 0;
6688 switch (TREE_CODE (*tp))
6690 case OMP_FOR:
6691 *walk_subtrees = 1;
6692 /* FALLTHRU */
6693 case OMP_SIMD:
6694 if (OMP_FOR_INIT (*tp) != NULL_TREE)
6695 return *tp;
6696 break;
6697 case BIND_EXPR:
6698 case STATEMENT_LIST:
6699 case OMP_PARALLEL:
6700 *walk_subtrees = 1;
6701 break;
6702 default:
6703 break;
6705 return NULL_TREE;
6708 /* Gimplify the gross structure of an OMP_FOR statement. */
6710 static enum gimplify_status
6711 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6713 tree for_stmt, orig_for_stmt, decl, var, t;
6714 enum gimplify_status ret = GS_ALL_DONE;
6715 enum gimplify_status tret;
6716 gimple gfor;
6717 gimple_seq for_body, for_pre_body;
6718 int i;
6719 bool simd;
6720 bitmap has_decl_expr = NULL;
6722 orig_for_stmt = for_stmt = *expr_p;
6724 simd = (TREE_CODE (for_stmt) == OMP_SIMD
6725 || TREE_CODE (for_stmt) == CILK_SIMD);
6726 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6727 simd ? ORT_SIMD : ORT_WORKSHARE);
6728 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
6729 gimplify_omp_ctxp->distribute = true;
6731 /* Handle OMP_FOR_INIT. */
6732 for_pre_body = NULL;
6733 if (simd && OMP_FOR_PRE_BODY (for_stmt))
6735 has_decl_expr = BITMAP_ALLOC (NULL);
6736 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
6737 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
6738 == VAR_DECL)
6740 t = OMP_FOR_PRE_BODY (for_stmt);
6741 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6743 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
6745 tree_stmt_iterator si;
6746 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
6747 tsi_next (&si))
6749 t = tsi_stmt (si);
6750 if (TREE_CODE (t) == DECL_EXPR
6751 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
6752 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6756 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6757 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6759 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
6761 for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), find_combined_omp_for,
6762 NULL, NULL);
6763 gcc_assert (for_stmt != NULL_TREE);
6764 gimplify_omp_ctxp->combined_loop = true;
6767 for_body = NULL;
6768 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6769 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6770 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6771 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6772 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6774 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6775 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6776 decl = TREE_OPERAND (t, 0);
6777 gcc_assert (DECL_P (decl));
6778 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6779 || POINTER_TYPE_P (TREE_TYPE (decl)));
6781 /* Make sure the iteration variable is private. */
6782 tree c = NULL_TREE;
6783 tree c2 = NULL_TREE;
6784 if (orig_for_stmt != for_stmt)
6785 /* Do this only on innermost construct for combined ones. */;
6786 else if (simd)
6788 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
6789 (splay_tree_key)decl);
6790 omp_is_private (gimplify_omp_ctxp, decl,
6791 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6792 != 1));
6793 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6794 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6795 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
6797 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
6798 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
6799 if (has_decl_expr
6800 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
6801 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6802 OMP_CLAUSE_DECL (c) = decl;
6803 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6804 OMP_FOR_CLAUSES (for_stmt) = c;
6805 omp_add_variable (gimplify_omp_ctxp, decl,
6806 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
6808 else
6810 bool lastprivate
6811 = (!has_decl_expr
6812 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
6813 if (lastprivate
6814 && gimplify_omp_ctxp->outer_context
6815 && gimplify_omp_ctxp->outer_context->region_type
6816 == ORT_WORKSHARE
6817 && gimplify_omp_ctxp->outer_context->combined_loop
6818 && !gimplify_omp_ctxp->outer_context->distribute)
6820 struct gimplify_omp_ctx *outer
6821 = gimplify_omp_ctxp->outer_context;
6822 n = splay_tree_lookup (outer->variables,
6823 (splay_tree_key) decl);
6824 if (n != NULL
6825 && (n->value & GOVD_DATA_SHARE_CLASS) == GOVD_LOCAL)
6826 lastprivate = false;
6827 else if (omp_check_private (outer, decl, false))
6828 error ("lastprivate variable %qE is private in outer "
6829 "context", DECL_NAME (decl));
6830 else
6832 omp_add_variable (outer, decl,
6833 GOVD_LASTPRIVATE | GOVD_SEEN);
6834 if (outer->outer_context)
6835 omp_notice_variable (outer->outer_context, decl, true);
6838 c = build_omp_clause (input_location,
6839 lastprivate ? OMP_CLAUSE_LASTPRIVATE
6840 : OMP_CLAUSE_PRIVATE);
6841 OMP_CLAUSE_DECL (c) = decl;
6842 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6843 OMP_FOR_CLAUSES (for_stmt) = c;
6844 omp_add_variable (gimplify_omp_ctxp, decl,
6845 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
6846 | GOVD_EXPLICIT | GOVD_SEEN);
6847 c = NULL_TREE;
6850 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
6851 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6852 else
6853 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6855 /* If DECL is not a gimple register, create a temporary variable to act
6856 as an iteration counter. This is valid, since DECL cannot be
6857 modified in the body of the loop. Similarly for any iteration vars
6858 in simd with collapse > 1 where the iterator vars must be
6859 lastprivate. */
6860 if (orig_for_stmt != for_stmt)
6861 var = decl;
6862 else if (!is_gimple_reg (decl)
6863 || (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
6865 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6866 TREE_OPERAND (t, 0) = var;
6868 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6870 if (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
6872 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
6873 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
6874 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
6875 OMP_CLAUSE_DECL (c2) = var;
6876 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
6877 OMP_FOR_CLAUSES (for_stmt) = c2;
6878 omp_add_variable (gimplify_omp_ctxp, var,
6879 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
6880 if (c == NULL_TREE)
6882 c = c2;
6883 c2 = NULL_TREE;
6886 else
6887 omp_add_variable (gimplify_omp_ctxp, var,
6888 GOVD_PRIVATE | GOVD_SEEN);
6890 else
6891 var = decl;
6893 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6894 is_gimple_val, fb_rvalue);
6895 ret = MIN (ret, tret);
6896 if (ret == GS_ERROR)
6897 return ret;
6899 /* Handle OMP_FOR_COND. */
6900 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6901 gcc_assert (COMPARISON_CLASS_P (t));
6902 gcc_assert (TREE_OPERAND (t, 0) == decl);
6904 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6905 is_gimple_val, fb_rvalue);
6906 ret = MIN (ret, tret);
6908 /* Handle OMP_FOR_INCR. */
6909 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6910 switch (TREE_CODE (t))
6912 case PREINCREMENT_EXPR:
6913 case POSTINCREMENT_EXPR:
6915 tree decl = TREE_OPERAND (t, 0);
6916 /* c_omp_for_incr_canonicalize_ptr() should have been
6917 called to massage things appropriately. */
6918 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
6920 if (orig_for_stmt != for_stmt)
6921 break;
6922 t = build_int_cst (TREE_TYPE (decl), 1);
6923 if (c)
6924 OMP_CLAUSE_LINEAR_STEP (c) = t;
6925 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6926 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6927 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6928 break;
6931 case PREDECREMENT_EXPR:
6932 case POSTDECREMENT_EXPR:
6933 /* c_omp_for_incr_canonicalize_ptr() should have been
6934 called to massage things appropriately. */
6935 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
6936 if (orig_for_stmt != for_stmt)
6937 break;
6938 t = build_int_cst (TREE_TYPE (decl), -1);
6939 if (c)
6940 OMP_CLAUSE_LINEAR_STEP (c) = t;
6941 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6942 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6943 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6944 break;
6946 case MODIFY_EXPR:
6947 gcc_assert (TREE_OPERAND (t, 0) == decl);
6948 TREE_OPERAND (t, 0) = var;
6950 t = TREE_OPERAND (t, 1);
6951 switch (TREE_CODE (t))
6953 case PLUS_EXPR:
6954 if (TREE_OPERAND (t, 1) == decl)
6956 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6957 TREE_OPERAND (t, 0) = var;
6958 break;
6961 /* Fallthru. */
6962 case MINUS_EXPR:
6963 case POINTER_PLUS_EXPR:
6964 gcc_assert (TREE_OPERAND (t, 0) == decl);
6965 TREE_OPERAND (t, 0) = var;
6966 break;
6967 default:
6968 gcc_unreachable ();
6971 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6972 is_gimple_val, fb_rvalue);
6973 ret = MIN (ret, tret);
6974 if (c)
6976 tree step = TREE_OPERAND (t, 1);
6977 tree stept = TREE_TYPE (decl);
6978 if (POINTER_TYPE_P (stept))
6979 stept = sizetype;
6980 step = fold_convert (stept, step);
6981 if (TREE_CODE (t) == MINUS_EXPR)
6982 step = fold_build1 (NEGATE_EXPR, stept, step);
6983 OMP_CLAUSE_LINEAR_STEP (c) = step;
6984 if (step != TREE_OPERAND (t, 1))
6986 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
6987 &for_pre_body, NULL,
6988 is_gimple_val, fb_rvalue);
6989 ret = MIN (ret, tret);
6992 break;
6994 default:
6995 gcc_unreachable ();
6998 if (c2)
7000 gcc_assert (c);
7001 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
7004 if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
7005 && orig_for_stmt == for_stmt)
7007 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
7008 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7009 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
7010 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7011 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
7012 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
7013 && OMP_CLAUSE_DECL (c) == decl)
7015 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7016 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
7017 gcc_assert (TREE_OPERAND (t, 0) == var);
7018 t = TREE_OPERAND (t, 1);
7019 gcc_assert (TREE_CODE (t) == PLUS_EXPR
7020 || TREE_CODE (t) == MINUS_EXPR
7021 || TREE_CODE (t) == POINTER_PLUS_EXPR);
7022 gcc_assert (TREE_OPERAND (t, 0) == var);
7023 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
7024 TREE_OPERAND (t, 1));
7025 gimple_seq *seq;
7026 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
7027 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
7028 else
7029 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
7030 gimplify_assign (decl, t, seq);
7035 BITMAP_FREE (has_decl_expr);
7037 gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body);
7039 if (orig_for_stmt != for_stmt)
7040 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7042 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7043 decl = TREE_OPERAND (t, 0);
7044 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
7045 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
7046 TREE_OPERAND (t, 0) = var;
7047 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7048 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
7049 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
7052 gimplify_adjust_omp_clauses (pre_p, &OMP_FOR_CLAUSES (orig_for_stmt));
7054 int kind;
7055 switch (TREE_CODE (orig_for_stmt))
7057 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
7058 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
7059 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
7060 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
7061 default:
7062 gcc_unreachable ();
7064 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
7065 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
7066 for_pre_body);
7067 if (orig_for_stmt != for_stmt)
7068 gimple_omp_for_set_combined_p (gfor, true);
7069 if (gimplify_omp_ctxp
7070 && (gimplify_omp_ctxp->combined_loop
7071 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
7072 && gimplify_omp_ctxp->outer_context
7073 && gimplify_omp_ctxp->outer_context->combined_loop)))
7075 gimple_omp_for_set_combined_into_p (gfor, true);
7076 if (gimplify_omp_ctxp->combined_loop)
7077 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
7078 else
7079 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
7082 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7084 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7085 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
7086 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
7087 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
7088 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
7089 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
7090 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7091 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
7094 gimplify_seq_add_stmt (pre_p, gfor);
7095 if (ret != GS_ALL_DONE)
7096 return GS_ERROR;
7097 *expr_p = NULL_TREE;
7098 return GS_ALL_DONE;
7101 /* Gimplify the gross structure of other OpenMP constructs.
7102 In particular, OMP_SECTIONS, OMP_SINGLE, OMP_TARGET, OMP_TARGET_DATA
7103 and OMP_TEAMS. */
7105 static void
7106 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
7108 tree expr = *expr_p;
7109 gimple stmt;
7110 gimple_seq body = NULL;
7111 enum omp_region_type ort = ORT_WORKSHARE;
7113 switch (TREE_CODE (expr))
7115 case OMP_SECTIONS:
7116 case OMP_SINGLE:
7117 break;
7118 case OMP_TARGET:
7119 ort = ORT_TARGET;
7120 break;
7121 case OMP_TARGET_DATA:
7122 ort = ORT_TARGET_DATA;
7123 break;
7124 case OMP_TEAMS:
7125 ort = ORT_TEAMS;
7126 break;
7127 default:
7128 gcc_unreachable ();
7130 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort);
7131 if (ort == ORT_TARGET || ort == ORT_TARGET_DATA)
7133 push_gimplify_context ();
7134 gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
7135 if (gimple_code (g) == GIMPLE_BIND)
7136 pop_gimplify_context (g);
7137 else
7138 pop_gimplify_context (NULL);
7139 if (ort == ORT_TARGET_DATA)
7141 gimple_seq cleanup = NULL;
7142 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TARGET_END_DATA);
7143 g = gimple_build_call (fn, 0);
7144 gimple_seq_add_stmt (&cleanup, g);
7145 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
7146 body = NULL;
7147 gimple_seq_add_stmt (&body, g);
7150 else
7151 gimplify_and_add (OMP_BODY (expr), &body);
7152 gimplify_adjust_omp_clauses (pre_p, &OMP_CLAUSES (expr));
7154 switch (TREE_CODE (expr))
7156 case OMP_SECTIONS:
7157 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
7158 break;
7159 case OMP_SINGLE:
7160 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
7161 break;
7162 case OMP_TARGET:
7163 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
7164 OMP_CLAUSES (expr));
7165 break;
7166 case OMP_TARGET_DATA:
7167 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
7168 OMP_CLAUSES (expr));
7169 break;
7170 case OMP_TEAMS:
7171 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
7172 break;
7173 default:
7174 gcc_unreachable ();
7177 gimplify_seq_add_stmt (pre_p, stmt);
7178 *expr_p = NULL_TREE;
7181 /* Gimplify the gross structure of OpenMP target update construct. */
7183 static void
7184 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
7186 tree expr = *expr_p;
7187 gimple stmt;
7189 gimplify_scan_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr), pre_p,
7190 ORT_WORKSHARE);
7191 gimplify_adjust_omp_clauses (pre_p, &OMP_TARGET_UPDATE_CLAUSES (expr));
7192 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_UPDATE,
7193 OMP_TARGET_UPDATE_CLAUSES (expr));
7195 gimplify_seq_add_stmt (pre_p, stmt);
7196 *expr_p = NULL_TREE;
7199 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
7200 stabilized the lhs of the atomic operation as *ADDR. Return true if
7201 EXPR is this stabilized form. */
7203 static bool
7204 goa_lhs_expr_p (tree expr, tree addr)
7206 /* Also include casts to other type variants. The C front end is fond
7207 of adding these for e.g. volatile variables. This is like
7208 STRIP_TYPE_NOPS but includes the main variant lookup. */
7209 STRIP_USELESS_TYPE_CONVERSION (expr);
7211 if (TREE_CODE (expr) == INDIRECT_REF)
7213 expr = TREE_OPERAND (expr, 0);
7214 while (expr != addr
7215 && (CONVERT_EXPR_P (expr)
7216 || TREE_CODE (expr) == NON_LVALUE_EXPR)
7217 && TREE_CODE (expr) == TREE_CODE (addr)
7218 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
7220 expr = TREE_OPERAND (expr, 0);
7221 addr = TREE_OPERAND (addr, 0);
7223 if (expr == addr)
7224 return true;
7225 return (TREE_CODE (addr) == ADDR_EXPR
7226 && TREE_CODE (expr) == ADDR_EXPR
7227 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
7229 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
7230 return true;
7231 return false;
7234 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
7235 expression does not involve the lhs, evaluate it into a temporary.
7236 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
7237 or -1 if an error was encountered. */
7239 static int
7240 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
7241 tree lhs_var)
7243 tree expr = *expr_p;
7244 int saw_lhs;
7246 if (goa_lhs_expr_p (expr, lhs_addr))
7248 *expr_p = lhs_var;
7249 return 1;
7251 if (is_gimple_val (expr))
7252 return 0;
7254 saw_lhs = 0;
7255 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
7257 case tcc_binary:
7258 case tcc_comparison:
7259 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
7260 lhs_var);
7261 case tcc_unary:
7262 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
7263 lhs_var);
7264 break;
7265 case tcc_expression:
7266 switch (TREE_CODE (expr))
7268 case TRUTH_ANDIF_EXPR:
7269 case TRUTH_ORIF_EXPR:
7270 case TRUTH_AND_EXPR:
7271 case TRUTH_OR_EXPR:
7272 case TRUTH_XOR_EXPR:
7273 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
7274 lhs_addr, lhs_var);
7275 case TRUTH_NOT_EXPR:
7276 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
7277 lhs_addr, lhs_var);
7278 break;
7279 case COMPOUND_EXPR:
7280 /* Break out any preevaluations from cp_build_modify_expr. */
7281 for (; TREE_CODE (expr) == COMPOUND_EXPR;
7282 expr = TREE_OPERAND (expr, 1))
7283 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
7284 *expr_p = expr;
7285 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
7286 default:
7287 break;
7289 break;
7290 default:
7291 break;
7294 if (saw_lhs == 0)
7296 enum gimplify_status gs;
7297 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
7298 if (gs != GS_ALL_DONE)
7299 saw_lhs = -1;
7302 return saw_lhs;
7305 /* Gimplify an OMP_ATOMIC statement. */
7307 static enum gimplify_status
7308 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
7310 tree addr = TREE_OPERAND (*expr_p, 0);
7311 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
7312 ? NULL : TREE_OPERAND (*expr_p, 1);
7313 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7314 tree tmp_load;
7315 gimple loadstmt, storestmt;
7317 tmp_load = create_tmp_reg (type, NULL);
7318 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
7319 return GS_ERROR;
7321 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
7322 != GS_ALL_DONE)
7323 return GS_ERROR;
7325 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
7326 gimplify_seq_add_stmt (pre_p, loadstmt);
7327 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
7328 != GS_ALL_DONE)
7329 return GS_ERROR;
7331 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
7332 rhs = tmp_load;
7333 storestmt = gimple_build_omp_atomic_store (rhs);
7334 gimplify_seq_add_stmt (pre_p, storestmt);
7335 if (OMP_ATOMIC_SEQ_CST (*expr_p))
7337 gimple_omp_atomic_set_seq_cst (loadstmt);
7338 gimple_omp_atomic_set_seq_cst (storestmt);
7340 switch (TREE_CODE (*expr_p))
7342 case OMP_ATOMIC_READ:
7343 case OMP_ATOMIC_CAPTURE_OLD:
7344 *expr_p = tmp_load;
7345 gimple_omp_atomic_set_need_value (loadstmt);
7346 break;
7347 case OMP_ATOMIC_CAPTURE_NEW:
7348 *expr_p = rhs;
7349 gimple_omp_atomic_set_need_value (storestmt);
7350 break;
7351 default:
7352 *expr_p = NULL;
7353 break;
7356 return GS_ALL_DONE;
7359 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
7360 body, and adding some EH bits. */
7362 static enum gimplify_status
7363 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
7365 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
7366 gimple g;
7367 gimple_seq body = NULL;
7368 int subcode = 0;
7370 /* Wrap the transaction body in a BIND_EXPR so we have a context
7371 where to put decls for OpenMP. */
7372 if (TREE_CODE (tbody) != BIND_EXPR)
7374 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
7375 TREE_SIDE_EFFECTS (bind) = 1;
7376 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
7377 TRANSACTION_EXPR_BODY (expr) = bind;
7380 push_gimplify_context ();
7381 temp = voidify_wrapper_expr (*expr_p, NULL);
7383 g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
7384 pop_gimplify_context (g);
7386 g = gimple_build_transaction (body, NULL);
7387 if (TRANSACTION_EXPR_OUTER (expr))
7388 subcode = GTMA_IS_OUTER;
7389 else if (TRANSACTION_EXPR_RELAXED (expr))
7390 subcode = GTMA_IS_RELAXED;
7391 gimple_transaction_set_subcode (g, subcode);
7393 gimplify_seq_add_stmt (pre_p, g);
7395 if (temp)
7397 *expr_p = temp;
7398 return GS_OK;
7401 *expr_p = NULL_TREE;
7402 return GS_ALL_DONE;
7405 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
7406 expression produces a value to be used as an operand inside a GIMPLE
7407 statement, the value will be stored back in *EXPR_P. This value will
7408 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
7409 an SSA_NAME. The corresponding sequence of GIMPLE statements is
7410 emitted in PRE_P and POST_P.
7412 Additionally, this process may overwrite parts of the input
7413 expression during gimplification. Ideally, it should be
7414 possible to do non-destructive gimplification.
7416 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
7417 the expression needs to evaluate to a value to be used as
7418 an operand in a GIMPLE statement, this value will be stored in
7419 *EXPR_P on exit. This happens when the caller specifies one
7420 of fb_lvalue or fb_rvalue fallback flags.
7422 PRE_P will contain the sequence of GIMPLE statements corresponding
7423 to the evaluation of EXPR and all the side-effects that must
7424 be executed before the main expression. On exit, the last
7425 statement of PRE_P is the core statement being gimplified. For
7426 instance, when gimplifying 'if (++a)' the last statement in
7427 PRE_P will be 'if (t.1)' where t.1 is the result of
7428 pre-incrementing 'a'.
7430 POST_P will contain the sequence of GIMPLE statements corresponding
7431 to the evaluation of all the side-effects that must be executed
7432 after the main expression. If this is NULL, the post
7433 side-effects are stored at the end of PRE_P.
7435 The reason why the output is split in two is to handle post
7436 side-effects explicitly. In some cases, an expression may have
7437 inner and outer post side-effects which need to be emitted in
7438 an order different from the one given by the recursive
7439 traversal. For instance, for the expression (*p--)++ the post
7440 side-effects of '--' must actually occur *after* the post
7441 side-effects of '++'. However, gimplification will first visit
7442 the inner expression, so if a separate POST sequence was not
7443 used, the resulting sequence would be:
7445 1 t.1 = *p
7446 2 p = p - 1
7447 3 t.2 = t.1 + 1
7448 4 *p = t.2
7450 However, the post-decrement operation in line #2 must not be
7451 evaluated until after the store to *p at line #4, so the
7452 correct sequence should be:
7454 1 t.1 = *p
7455 2 t.2 = t.1 + 1
7456 3 *p = t.2
7457 4 p = p - 1
7459 So, by specifying a separate post queue, it is possible
7460 to emit the post side-effects in the correct order.
7461 If POST_P is NULL, an internal queue will be used. Before
7462 returning to the caller, the sequence POST_P is appended to
7463 the main output sequence PRE_P.
7465 GIMPLE_TEST_F points to a function that takes a tree T and
7466 returns nonzero if T is in the GIMPLE form requested by the
7467 caller. The GIMPLE predicates are in gimple.c.
7469 FALLBACK tells the function what sort of a temporary we want if
7470 gimplification cannot produce an expression that complies with
7471 GIMPLE_TEST_F.
7473 fb_none means that no temporary should be generated
7474 fb_rvalue means that an rvalue is OK to generate
7475 fb_lvalue means that an lvalue is OK to generate
7476 fb_either means that either is OK, but an lvalue is preferable.
7477 fb_mayfail means that gimplification may fail (in which case
7478 GS_ERROR will be returned)
7480 The return value is either GS_ERROR or GS_ALL_DONE, since this
7481 function iterates until EXPR is completely gimplified or an error
7482 occurs. */
7484 enum gimplify_status
7485 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7486 bool (*gimple_test_f) (tree), fallback_t fallback)
7488 tree tmp;
7489 gimple_seq internal_pre = NULL;
7490 gimple_seq internal_post = NULL;
7491 tree save_expr;
7492 bool is_statement;
7493 location_t saved_location;
7494 enum gimplify_status ret;
7495 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
7497 save_expr = *expr_p;
7498 if (save_expr == NULL_TREE)
7499 return GS_ALL_DONE;
7501 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
7502 is_statement = gimple_test_f == is_gimple_stmt;
7503 if (is_statement)
7504 gcc_assert (pre_p);
7506 /* Consistency checks. */
7507 if (gimple_test_f == is_gimple_reg)
7508 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
7509 else if (gimple_test_f == is_gimple_val
7510 || gimple_test_f == is_gimple_call_addr
7511 || gimple_test_f == is_gimple_condexpr
7512 || gimple_test_f == is_gimple_mem_rhs
7513 || gimple_test_f == is_gimple_mem_rhs_or_call
7514 || gimple_test_f == is_gimple_reg_rhs
7515 || gimple_test_f == is_gimple_reg_rhs_or_call
7516 || gimple_test_f == is_gimple_asm_val
7517 || gimple_test_f == is_gimple_mem_ref_addr)
7518 gcc_assert (fallback & fb_rvalue);
7519 else if (gimple_test_f == is_gimple_min_lval
7520 || gimple_test_f == is_gimple_lvalue)
7521 gcc_assert (fallback & fb_lvalue);
7522 else if (gimple_test_f == is_gimple_addressable)
7523 gcc_assert (fallback & fb_either);
7524 else if (gimple_test_f == is_gimple_stmt)
7525 gcc_assert (fallback == fb_none);
7526 else
7528 /* We should have recognized the GIMPLE_TEST_F predicate to
7529 know what kind of fallback to use in case a temporary is
7530 needed to hold the value or address of *EXPR_P. */
7531 gcc_unreachable ();
7534 /* We used to check the predicate here and return immediately if it
7535 succeeds. This is wrong; the design is for gimplification to be
7536 idempotent, and for the predicates to only test for valid forms, not
7537 whether they are fully simplified. */
7538 if (pre_p == NULL)
7539 pre_p = &internal_pre;
7541 if (post_p == NULL)
7542 post_p = &internal_post;
7544 /* Remember the last statements added to PRE_P and POST_P. Every
7545 new statement added by the gimplification helpers needs to be
7546 annotated with location information. To centralize the
7547 responsibility, we remember the last statement that had been
7548 added to both queues before gimplifying *EXPR_P. If
7549 gimplification produces new statements in PRE_P and POST_P, those
7550 statements will be annotated with the same location information
7551 as *EXPR_P. */
7552 pre_last_gsi = gsi_last (*pre_p);
7553 post_last_gsi = gsi_last (*post_p);
7555 saved_location = input_location;
7556 if (save_expr != error_mark_node
7557 && EXPR_HAS_LOCATION (*expr_p))
7558 input_location = EXPR_LOCATION (*expr_p);
7560 /* Loop over the specific gimplifiers until the toplevel node
7561 remains the same. */
7564 /* Strip away as many useless type conversions as possible
7565 at the toplevel. */
7566 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
7568 /* Remember the expr. */
7569 save_expr = *expr_p;
7571 /* Die, die, die, my darling. */
7572 if (save_expr == error_mark_node
7573 || (TREE_TYPE (save_expr)
7574 && TREE_TYPE (save_expr) == error_mark_node))
7576 ret = GS_ERROR;
7577 break;
7580 /* Do any language-specific gimplification. */
7581 ret = ((enum gimplify_status)
7582 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
7583 if (ret == GS_OK)
7585 if (*expr_p == NULL_TREE)
7586 break;
7587 if (*expr_p != save_expr)
7588 continue;
7590 else if (ret != GS_UNHANDLED)
7591 break;
7593 /* Make sure that all the cases set 'ret' appropriately. */
7594 ret = GS_UNHANDLED;
7595 switch (TREE_CODE (*expr_p))
7597 /* First deal with the special cases. */
7599 case POSTINCREMENT_EXPR:
7600 case POSTDECREMENT_EXPR:
7601 case PREINCREMENT_EXPR:
7602 case PREDECREMENT_EXPR:
7603 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
7604 fallback != fb_none,
7605 TREE_TYPE (*expr_p));
7606 break;
7608 case VIEW_CONVERT_EXPR:
7609 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
7610 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
7612 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7613 post_p, is_gimple_val, fb_rvalue);
7614 recalculate_side_effects (*expr_p);
7615 break;
7617 /* Fallthru. */
7619 case ARRAY_REF:
7620 case ARRAY_RANGE_REF:
7621 case REALPART_EXPR:
7622 case IMAGPART_EXPR:
7623 case COMPONENT_REF:
7624 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
7625 fallback ? fallback : fb_rvalue);
7626 break;
7628 case COND_EXPR:
7629 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
7631 /* C99 code may assign to an array in a structure value of a
7632 conditional expression, and this has undefined behavior
7633 only on execution, so create a temporary if an lvalue is
7634 required. */
7635 if (fallback == fb_lvalue)
7637 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7638 mark_addressable (*expr_p);
7639 ret = GS_OK;
7641 break;
7643 case CALL_EXPR:
7644 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
7646 /* C99 code may assign to an array in a structure returned
7647 from a function, and this has undefined behavior only on
7648 execution, so create a temporary if an lvalue is
7649 required. */
7650 if (fallback == fb_lvalue)
7652 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7653 mark_addressable (*expr_p);
7654 ret = GS_OK;
7656 break;
7658 case TREE_LIST:
7659 gcc_unreachable ();
7661 case COMPOUND_EXPR:
7662 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
7663 break;
7665 case COMPOUND_LITERAL_EXPR:
7666 ret = gimplify_compound_literal_expr (expr_p, pre_p,
7667 gimple_test_f, fallback);
7668 break;
7670 case MODIFY_EXPR:
7671 case INIT_EXPR:
7672 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
7673 fallback != fb_none);
7674 break;
7676 case TRUTH_ANDIF_EXPR:
7677 case TRUTH_ORIF_EXPR:
7679 /* Preserve the original type of the expression and the
7680 source location of the outer expression. */
7681 tree org_type = TREE_TYPE (*expr_p);
7682 *expr_p = gimple_boolify (*expr_p);
7683 *expr_p = build3_loc (input_location, COND_EXPR,
7684 org_type, *expr_p,
7685 fold_convert_loc
7686 (input_location,
7687 org_type, boolean_true_node),
7688 fold_convert_loc
7689 (input_location,
7690 org_type, boolean_false_node));
7691 ret = GS_OK;
7692 break;
7695 case TRUTH_NOT_EXPR:
7697 tree type = TREE_TYPE (*expr_p);
7698 /* The parsers are careful to generate TRUTH_NOT_EXPR
7699 only with operands that are always zero or one.
7700 We do not fold here but handle the only interesting case
7701 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
7702 *expr_p = gimple_boolify (*expr_p);
7703 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
7704 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
7705 TREE_TYPE (*expr_p),
7706 TREE_OPERAND (*expr_p, 0));
7707 else
7708 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
7709 TREE_TYPE (*expr_p),
7710 TREE_OPERAND (*expr_p, 0),
7711 build_int_cst (TREE_TYPE (*expr_p), 1));
7712 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
7713 *expr_p = fold_convert_loc (input_location, type, *expr_p);
7714 ret = GS_OK;
7715 break;
7718 case ADDR_EXPR:
7719 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
7720 break;
7722 case ANNOTATE_EXPR:
7724 tree cond = TREE_OPERAND (*expr_p, 0);
7725 tree kind = TREE_OPERAND (*expr_p, 1);
7726 tree type = TREE_TYPE (cond);
7727 if (!INTEGRAL_TYPE_P (type))
7729 *expr_p = cond;
7730 ret = GS_OK;
7731 break;
7733 tree tmp = create_tmp_var (type, NULL);
7734 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
7735 gimple call
7736 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
7737 gimple_call_set_lhs (call, tmp);
7738 gimplify_seq_add_stmt (pre_p, call);
7739 *expr_p = tmp;
7740 ret = GS_ALL_DONE;
7741 break;
7744 case VA_ARG_EXPR:
7745 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
7746 break;
7748 CASE_CONVERT:
7749 if (IS_EMPTY_STMT (*expr_p))
7751 ret = GS_ALL_DONE;
7752 break;
7755 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7756 || fallback == fb_none)
7758 /* Just strip a conversion to void (or in void context) and
7759 try again. */
7760 *expr_p = TREE_OPERAND (*expr_p, 0);
7761 ret = GS_OK;
7762 break;
7765 ret = gimplify_conversion (expr_p);
7766 if (ret == GS_ERROR)
7767 break;
7768 if (*expr_p != save_expr)
7769 break;
7770 /* FALLTHRU */
7772 case FIX_TRUNC_EXPR:
7773 /* unary_expr: ... | '(' cast ')' val | ... */
7774 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7775 is_gimple_val, fb_rvalue);
7776 recalculate_side_effects (*expr_p);
7777 break;
7779 case INDIRECT_REF:
7781 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
7782 bool notrap = TREE_THIS_NOTRAP (*expr_p);
7783 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7785 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7786 if (*expr_p != save_expr)
7788 ret = GS_OK;
7789 break;
7792 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7793 is_gimple_reg, fb_rvalue);
7794 if (ret == GS_ERROR)
7795 break;
7797 recalculate_side_effects (*expr_p);
7798 *expr_p = fold_build2_loc (input_location, MEM_REF,
7799 TREE_TYPE (*expr_p),
7800 TREE_OPERAND (*expr_p, 0),
7801 build_int_cst (saved_ptr_type, 0));
7802 TREE_THIS_VOLATILE (*expr_p) = volatilep;
7803 TREE_THIS_NOTRAP (*expr_p) = notrap;
7804 ret = GS_OK;
7805 break;
7808 /* We arrive here through the various re-gimplifcation paths. */
7809 case MEM_REF:
7810 /* First try re-folding the whole thing. */
7811 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7812 TREE_OPERAND (*expr_p, 0),
7813 TREE_OPERAND (*expr_p, 1));
7814 if (tmp)
7816 *expr_p = tmp;
7817 recalculate_side_effects (*expr_p);
7818 ret = GS_OK;
7819 break;
7821 /* Avoid re-gimplifying the address operand if it is already
7822 in suitable form. Re-gimplifying would mark the address
7823 operand addressable. Always gimplify when not in SSA form
7824 as we still may have to gimplify decls with value-exprs. */
7825 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7826 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7828 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7829 is_gimple_mem_ref_addr, fb_rvalue);
7830 if (ret == GS_ERROR)
7831 break;
7833 recalculate_side_effects (*expr_p);
7834 ret = GS_ALL_DONE;
7835 break;
7837 /* Constants need not be gimplified. */
7838 case INTEGER_CST:
7839 case REAL_CST:
7840 case FIXED_CST:
7841 case STRING_CST:
7842 case COMPLEX_CST:
7843 case VECTOR_CST:
7844 /* Drop the overflow flag on constants, we do not want
7845 that in the GIMPLE IL. */
7846 if (TREE_OVERFLOW_P (*expr_p))
7847 *expr_p = drop_tree_overflow (*expr_p);
7848 ret = GS_ALL_DONE;
7849 break;
7851 case CONST_DECL:
7852 /* If we require an lvalue, such as for ADDR_EXPR, retain the
7853 CONST_DECL node. Otherwise the decl is replaceable by its
7854 value. */
7855 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
7856 if (fallback & fb_lvalue)
7857 ret = GS_ALL_DONE;
7858 else
7860 *expr_p = DECL_INITIAL (*expr_p);
7861 ret = GS_OK;
7863 break;
7865 case DECL_EXPR:
7866 ret = gimplify_decl_expr (expr_p, pre_p);
7867 break;
7869 case BIND_EXPR:
7870 ret = gimplify_bind_expr (expr_p, pre_p);
7871 break;
7873 case LOOP_EXPR:
7874 ret = gimplify_loop_expr (expr_p, pre_p);
7875 break;
7877 case SWITCH_EXPR:
7878 ret = gimplify_switch_expr (expr_p, pre_p);
7879 break;
7881 case EXIT_EXPR:
7882 ret = gimplify_exit_expr (expr_p);
7883 break;
7885 case GOTO_EXPR:
7886 /* If the target is not LABEL, then it is a computed jump
7887 and the target needs to be gimplified. */
7888 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
7890 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7891 NULL, is_gimple_val, fb_rvalue);
7892 if (ret == GS_ERROR)
7893 break;
7895 gimplify_seq_add_stmt (pre_p,
7896 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
7897 ret = GS_ALL_DONE;
7898 break;
7900 case PREDICT_EXPR:
7901 gimplify_seq_add_stmt (pre_p,
7902 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7903 PREDICT_EXPR_OUTCOME (*expr_p)));
7904 ret = GS_ALL_DONE;
7905 break;
7907 case LABEL_EXPR:
7908 ret = GS_ALL_DONE;
7909 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7910 == current_function_decl);
7911 gimplify_seq_add_stmt (pre_p,
7912 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
7913 break;
7915 case CASE_LABEL_EXPR:
7916 ret = gimplify_case_label_expr (expr_p, pre_p);
7917 break;
7919 case RETURN_EXPR:
7920 ret = gimplify_return_expr (*expr_p, pre_p);
7921 break;
7923 case CONSTRUCTOR:
7924 /* Don't reduce this in place; let gimplify_init_constructor work its
7925 magic. Buf if we're just elaborating this for side effects, just
7926 gimplify any element that has side-effects. */
7927 if (fallback == fb_none)
7929 unsigned HOST_WIDE_INT ix;
7930 tree val;
7931 tree temp = NULL_TREE;
7932 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7933 if (TREE_SIDE_EFFECTS (val))
7934 append_to_statement_list (val, &temp);
7936 *expr_p = temp;
7937 ret = temp ? GS_OK : GS_ALL_DONE;
7939 /* C99 code may assign to an array in a constructed
7940 structure or union, and this has undefined behavior only
7941 on execution, so create a temporary if an lvalue is
7942 required. */
7943 else if (fallback == fb_lvalue)
7945 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7946 mark_addressable (*expr_p);
7947 ret = GS_OK;
7949 else
7950 ret = GS_ALL_DONE;
7951 break;
7953 /* The following are special cases that are not handled by the
7954 original GIMPLE grammar. */
7956 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7957 eliminated. */
7958 case SAVE_EXPR:
7959 ret = gimplify_save_expr (expr_p, pre_p, post_p);
7960 break;
7962 case BIT_FIELD_REF:
7963 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7964 post_p, is_gimple_lvalue, fb_either);
7965 recalculate_side_effects (*expr_p);
7966 break;
7968 case TARGET_MEM_REF:
7970 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7972 if (TMR_BASE (*expr_p))
7973 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
7974 post_p, is_gimple_mem_ref_addr, fb_either);
7975 if (TMR_INDEX (*expr_p))
7976 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7977 post_p, is_gimple_val, fb_rvalue);
7978 if (TMR_INDEX2 (*expr_p))
7979 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7980 post_p, is_gimple_val, fb_rvalue);
7981 /* TMR_STEP and TMR_OFFSET are always integer constants. */
7982 ret = MIN (r0, r1);
7984 break;
7986 case NON_LVALUE_EXPR:
7987 /* This should have been stripped above. */
7988 gcc_unreachable ();
7990 case ASM_EXPR:
7991 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
7992 break;
7994 case TRY_FINALLY_EXPR:
7995 case TRY_CATCH_EXPR:
7997 gimple_seq eval, cleanup;
7998 gimple try_;
8000 /* Calls to destructors are generated automatically in FINALLY/CATCH
8001 block. They should have location as UNKNOWN_LOCATION. However,
8002 gimplify_call_expr will reset these call stmts to input_location
8003 if it finds stmt's location is unknown. To prevent resetting for
8004 destructors, we set the input_location to unknown.
8005 Note that this only affects the destructor calls in FINALLY/CATCH
8006 block, and will automatically reset to its original value by the
8007 end of gimplify_expr. */
8008 input_location = UNKNOWN_LOCATION;
8009 eval = cleanup = NULL;
8010 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
8011 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
8012 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
8013 if (gimple_seq_empty_p (cleanup))
8015 gimple_seq_add_seq (pre_p, eval);
8016 ret = GS_ALL_DONE;
8017 break;
8019 try_ = gimple_build_try (eval, cleanup,
8020 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
8021 ? GIMPLE_TRY_FINALLY
8022 : GIMPLE_TRY_CATCH);
8023 if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
8024 gimple_set_location (try_, saved_location);
8025 else
8026 gimple_set_location (try_, EXPR_LOCATION (save_expr));
8027 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
8028 gimple_try_set_catch_is_cleanup (try_,
8029 TRY_CATCH_IS_CLEANUP (*expr_p));
8030 gimplify_seq_add_stmt (pre_p, try_);
8031 ret = GS_ALL_DONE;
8032 break;
8035 case CLEANUP_POINT_EXPR:
8036 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
8037 break;
8039 case TARGET_EXPR:
8040 ret = gimplify_target_expr (expr_p, pre_p, post_p);
8041 break;
8043 case CATCH_EXPR:
8045 gimple c;
8046 gimple_seq handler = NULL;
8047 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
8048 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
8049 gimplify_seq_add_stmt (pre_p, c);
8050 ret = GS_ALL_DONE;
8051 break;
8054 case EH_FILTER_EXPR:
8056 gimple ehf;
8057 gimple_seq failure = NULL;
8059 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
8060 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
8061 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
8062 gimplify_seq_add_stmt (pre_p, ehf);
8063 ret = GS_ALL_DONE;
8064 break;
8067 case OBJ_TYPE_REF:
8069 enum gimplify_status r0, r1;
8070 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
8071 post_p, is_gimple_val, fb_rvalue);
8072 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
8073 post_p, is_gimple_val, fb_rvalue);
8074 TREE_SIDE_EFFECTS (*expr_p) = 0;
8075 ret = MIN (r0, r1);
8077 break;
8079 case LABEL_DECL:
8080 /* We get here when taking the address of a label. We mark
8081 the label as "forced"; meaning it can never be removed and
8082 it is a potential target for any computed goto. */
8083 FORCED_LABEL (*expr_p) = 1;
8084 ret = GS_ALL_DONE;
8085 break;
8087 case STATEMENT_LIST:
8088 ret = gimplify_statement_list (expr_p, pre_p);
8089 break;
8091 case WITH_SIZE_EXPR:
8093 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8094 post_p == &internal_post ? NULL : post_p,
8095 gimple_test_f, fallback);
8096 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8097 is_gimple_val, fb_rvalue);
8098 ret = GS_ALL_DONE;
8100 break;
8102 case VAR_DECL:
8103 case PARM_DECL:
8104 ret = gimplify_var_or_parm_decl (expr_p);
8105 break;
8107 case RESULT_DECL:
8108 /* When within an OpenMP context, notice uses of variables. */
8109 if (gimplify_omp_ctxp)
8110 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
8111 ret = GS_ALL_DONE;
8112 break;
8114 case SSA_NAME:
8115 /* Allow callbacks into the gimplifier during optimization. */
8116 ret = GS_ALL_DONE;
8117 break;
8119 case OMP_PARALLEL:
8120 gimplify_omp_parallel (expr_p, pre_p);
8121 ret = GS_ALL_DONE;
8122 break;
8124 case OMP_TASK:
8125 gimplify_omp_task (expr_p, pre_p);
8126 ret = GS_ALL_DONE;
8127 break;
8129 case OMP_FOR:
8130 case OMP_SIMD:
8131 case CILK_SIMD:
8132 case OMP_DISTRIBUTE:
8133 ret = gimplify_omp_for (expr_p, pre_p);
8134 break;
8136 case OMP_SECTIONS:
8137 case OMP_SINGLE:
8138 case OMP_TARGET:
8139 case OMP_TARGET_DATA:
8140 case OMP_TEAMS:
8141 gimplify_omp_workshare (expr_p, pre_p);
8142 ret = GS_ALL_DONE;
8143 break;
8145 case OMP_TARGET_UPDATE:
8146 gimplify_omp_target_update (expr_p, pre_p);
8147 ret = GS_ALL_DONE;
8148 break;
8150 case OMP_SECTION:
8151 case OMP_MASTER:
8152 case OMP_TASKGROUP:
8153 case OMP_ORDERED:
8154 case OMP_CRITICAL:
8156 gimple_seq body = NULL;
8157 gimple g;
8159 gimplify_and_add (OMP_BODY (*expr_p), &body);
8160 switch (TREE_CODE (*expr_p))
8162 case OMP_SECTION:
8163 g = gimple_build_omp_section (body);
8164 break;
8165 case OMP_MASTER:
8166 g = gimple_build_omp_master (body);
8167 break;
8168 case OMP_TASKGROUP:
8170 gimple_seq cleanup = NULL;
8171 tree fn
8172 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
8173 g = gimple_build_call (fn, 0);
8174 gimple_seq_add_stmt (&cleanup, g);
8175 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
8176 body = NULL;
8177 gimple_seq_add_stmt (&body, g);
8178 g = gimple_build_omp_taskgroup (body);
8180 break;
8181 case OMP_ORDERED:
8182 g = gimple_build_omp_ordered (body);
8183 break;
8184 case OMP_CRITICAL:
8185 g = gimple_build_omp_critical (body,
8186 OMP_CRITICAL_NAME (*expr_p));
8187 break;
8188 default:
8189 gcc_unreachable ();
8191 gimplify_seq_add_stmt (pre_p, g);
8192 ret = GS_ALL_DONE;
8193 break;
8196 case OMP_ATOMIC:
8197 case OMP_ATOMIC_READ:
8198 case OMP_ATOMIC_CAPTURE_OLD:
8199 case OMP_ATOMIC_CAPTURE_NEW:
8200 ret = gimplify_omp_atomic (expr_p, pre_p);
8201 break;
8203 case TRANSACTION_EXPR:
8204 ret = gimplify_transaction (expr_p, pre_p);
8205 break;
8207 case TRUTH_AND_EXPR:
8208 case TRUTH_OR_EXPR:
8209 case TRUTH_XOR_EXPR:
8211 tree orig_type = TREE_TYPE (*expr_p);
8212 tree new_type, xop0, xop1;
8213 *expr_p = gimple_boolify (*expr_p);
8214 new_type = TREE_TYPE (*expr_p);
8215 if (!useless_type_conversion_p (orig_type, new_type))
8217 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
8218 ret = GS_OK;
8219 break;
8222 /* Boolified binary truth expressions are semantically equivalent
8223 to bitwise binary expressions. Canonicalize them to the
8224 bitwise variant. */
8225 switch (TREE_CODE (*expr_p))
8227 case TRUTH_AND_EXPR:
8228 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
8229 break;
8230 case TRUTH_OR_EXPR:
8231 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
8232 break;
8233 case TRUTH_XOR_EXPR:
8234 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
8235 break;
8236 default:
8237 break;
8239 /* Now make sure that operands have compatible type to
8240 expression's new_type. */
8241 xop0 = TREE_OPERAND (*expr_p, 0);
8242 xop1 = TREE_OPERAND (*expr_p, 1);
8243 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
8244 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
8245 new_type,
8246 xop0);
8247 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
8248 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
8249 new_type,
8250 xop1);
8251 /* Continue classified as tcc_binary. */
8252 goto expr_2;
8255 case FMA_EXPR:
8256 case VEC_COND_EXPR:
8257 case VEC_PERM_EXPR:
8258 /* Classified as tcc_expression. */
8259 goto expr_3;
8261 case POINTER_PLUS_EXPR:
8263 enum gimplify_status r0, r1;
8264 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8265 post_p, is_gimple_val, fb_rvalue);
8266 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8267 post_p, is_gimple_val, fb_rvalue);
8268 recalculate_side_effects (*expr_p);
8269 ret = MIN (r0, r1);
8270 /* Convert &X + CST to invariant &MEM[&X, CST]. Do this
8271 after gimplifying operands - this is similar to how
8272 it would be folding all gimplified stmts on creation
8273 to have them canonicalized, which is what we eventually
8274 should do anyway. */
8275 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
8276 && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
8278 *expr_p = build_fold_addr_expr_with_type_loc
8279 (input_location,
8280 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
8281 TREE_OPERAND (*expr_p, 0),
8282 fold_convert (ptr_type_node,
8283 TREE_OPERAND (*expr_p, 1))),
8284 TREE_TYPE (*expr_p));
8285 ret = MIN (ret, GS_OK);
8287 break;
8290 case CILK_SYNC_STMT:
8292 if (!fn_contains_cilk_spawn_p (cfun))
8294 error_at (EXPR_LOCATION (*expr_p),
8295 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
8296 ret = GS_ERROR;
8298 else
8300 gimplify_cilk_sync (expr_p, pre_p);
8301 ret = GS_ALL_DONE;
8303 break;
8306 default:
8307 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
8309 case tcc_comparison:
8310 /* Handle comparison of objects of non scalar mode aggregates
8311 with a call to memcmp. It would be nice to only have to do
8312 this for variable-sized objects, but then we'd have to allow
8313 the same nest of reference nodes we allow for MODIFY_EXPR and
8314 that's too complex.
8316 Compare scalar mode aggregates as scalar mode values. Using
8317 memcmp for them would be very inefficient at best, and is
8318 plain wrong if bitfields are involved. */
8320 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
8322 /* Vector comparisons need no boolification. */
8323 if (TREE_CODE (type) == VECTOR_TYPE)
8324 goto expr_2;
8325 else if (!AGGREGATE_TYPE_P (type))
8327 tree org_type = TREE_TYPE (*expr_p);
8328 *expr_p = gimple_boolify (*expr_p);
8329 if (!useless_type_conversion_p (org_type,
8330 TREE_TYPE (*expr_p)))
8332 *expr_p = fold_convert_loc (input_location,
8333 org_type, *expr_p);
8334 ret = GS_OK;
8336 else
8337 goto expr_2;
8339 else if (TYPE_MODE (type) != BLKmode)
8340 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
8341 else
8342 ret = gimplify_variable_sized_compare (expr_p);
8344 break;
8347 /* If *EXPR_P does not need to be special-cased, handle it
8348 according to its class. */
8349 case tcc_unary:
8350 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8351 post_p, is_gimple_val, fb_rvalue);
8352 break;
8354 case tcc_binary:
8355 expr_2:
8357 enum gimplify_status r0, r1;
8359 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8360 post_p, is_gimple_val, fb_rvalue);
8361 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8362 post_p, is_gimple_val, fb_rvalue);
8364 ret = MIN (r0, r1);
8365 break;
8368 expr_3:
8370 enum gimplify_status r0, r1, r2;
8372 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8373 post_p, is_gimple_val, fb_rvalue);
8374 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8375 post_p, is_gimple_val, fb_rvalue);
8376 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
8377 post_p, is_gimple_val, fb_rvalue);
8379 ret = MIN (MIN (r0, r1), r2);
8380 break;
8383 case tcc_declaration:
8384 case tcc_constant:
8385 ret = GS_ALL_DONE;
8386 goto dont_recalculate;
8388 default:
8389 gcc_unreachable ();
8392 recalculate_side_effects (*expr_p);
8394 dont_recalculate:
8395 break;
8398 gcc_assert (*expr_p || ret != GS_OK);
8400 while (ret == GS_OK);
8402 /* If we encountered an error_mark somewhere nested inside, either
8403 stub out the statement or propagate the error back out. */
8404 if (ret == GS_ERROR)
8406 if (is_statement)
8407 *expr_p = NULL;
8408 goto out;
8411 /* This was only valid as a return value from the langhook, which
8412 we handled. Make sure it doesn't escape from any other context. */
8413 gcc_assert (ret != GS_UNHANDLED);
8415 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
8417 /* We aren't looking for a value, and we don't have a valid
8418 statement. If it doesn't have side-effects, throw it away. */
8419 if (!TREE_SIDE_EFFECTS (*expr_p))
8420 *expr_p = NULL;
8421 else if (!TREE_THIS_VOLATILE (*expr_p))
8423 /* This is probably a _REF that contains something nested that
8424 has side effects. Recurse through the operands to find it. */
8425 enum tree_code code = TREE_CODE (*expr_p);
8427 switch (code)
8429 case COMPONENT_REF:
8430 case REALPART_EXPR:
8431 case IMAGPART_EXPR:
8432 case VIEW_CONVERT_EXPR:
8433 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8434 gimple_test_f, fallback);
8435 break;
8437 case ARRAY_REF:
8438 case ARRAY_RANGE_REF:
8439 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8440 gimple_test_f, fallback);
8441 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8442 gimple_test_f, fallback);
8443 break;
8445 default:
8446 /* Anything else with side-effects must be converted to
8447 a valid statement before we get here. */
8448 gcc_unreachable ();
8451 *expr_p = NULL;
8453 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
8454 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
8456 /* Historically, the compiler has treated a bare reference
8457 to a non-BLKmode volatile lvalue as forcing a load. */
8458 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
8460 /* Normally, we do not want to create a temporary for a
8461 TREE_ADDRESSABLE type because such a type should not be
8462 copied by bitwise-assignment. However, we make an
8463 exception here, as all we are doing here is ensuring that
8464 we read the bytes that make up the type. We use
8465 create_tmp_var_raw because create_tmp_var will abort when
8466 given a TREE_ADDRESSABLE type. */
8467 tree tmp = create_tmp_var_raw (type, "vol");
8468 gimple_add_tmp_var (tmp);
8469 gimplify_assign (tmp, *expr_p, pre_p);
8470 *expr_p = NULL;
8472 else
8473 /* We can't do anything useful with a volatile reference to
8474 an incomplete type, so just throw it away. Likewise for
8475 a BLKmode type, since any implicit inner load should
8476 already have been turned into an explicit one by the
8477 gimplification process. */
8478 *expr_p = NULL;
8481 /* If we are gimplifying at the statement level, we're done. Tack
8482 everything together and return. */
8483 if (fallback == fb_none || is_statement)
8485 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
8486 it out for GC to reclaim it. */
8487 *expr_p = NULL_TREE;
8489 if (!gimple_seq_empty_p (internal_pre)
8490 || !gimple_seq_empty_p (internal_post))
8492 gimplify_seq_add_seq (&internal_pre, internal_post);
8493 gimplify_seq_add_seq (pre_p, internal_pre);
8496 /* The result of gimplifying *EXPR_P is going to be the last few
8497 statements in *PRE_P and *POST_P. Add location information
8498 to all the statements that were added by the gimplification
8499 helpers. */
8500 if (!gimple_seq_empty_p (*pre_p))
8501 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
8503 if (!gimple_seq_empty_p (*post_p))
8504 annotate_all_with_location_after (*post_p, post_last_gsi,
8505 input_location);
8507 goto out;
8510 #ifdef ENABLE_GIMPLE_CHECKING
8511 if (*expr_p)
8513 enum tree_code code = TREE_CODE (*expr_p);
8514 /* These expressions should already be in gimple IR form. */
8515 gcc_assert (code != MODIFY_EXPR
8516 && code != ASM_EXPR
8517 && code != BIND_EXPR
8518 && code != CATCH_EXPR
8519 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
8520 && code != EH_FILTER_EXPR
8521 && code != GOTO_EXPR
8522 && code != LABEL_EXPR
8523 && code != LOOP_EXPR
8524 && code != SWITCH_EXPR
8525 && code != TRY_FINALLY_EXPR
8526 && code != OMP_CRITICAL
8527 && code != OMP_FOR
8528 && code != OMP_MASTER
8529 && code != OMP_TASKGROUP
8530 && code != OMP_ORDERED
8531 && code != OMP_PARALLEL
8532 && code != OMP_SECTIONS
8533 && code != OMP_SECTION
8534 && code != OMP_SINGLE);
8536 #endif
8538 /* Otherwise we're gimplifying a subexpression, so the resulting
8539 value is interesting. If it's a valid operand that matches
8540 GIMPLE_TEST_F, we're done. Unless we are handling some
8541 post-effects internally; if that's the case, we need to copy into
8542 a temporary before adding the post-effects to POST_P. */
8543 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
8544 goto out;
8546 /* Otherwise, we need to create a new temporary for the gimplified
8547 expression. */
8549 /* We can't return an lvalue if we have an internal postqueue. The
8550 object the lvalue refers to would (probably) be modified by the
8551 postqueue; we need to copy the value out first, which means an
8552 rvalue. */
8553 if ((fallback & fb_lvalue)
8554 && gimple_seq_empty_p (internal_post)
8555 && is_gimple_addressable (*expr_p))
8557 /* An lvalue will do. Take the address of the expression, store it
8558 in a temporary, and replace the expression with an INDIRECT_REF of
8559 that temporary. */
8560 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
8561 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
8562 *expr_p = build_simple_mem_ref (tmp);
8564 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
8566 /* An rvalue will do. Assign the gimplified expression into a
8567 new temporary TMP and replace the original expression with
8568 TMP. First, make sure that the expression has a type so that
8569 it can be assigned into a temporary. */
8570 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
8571 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
8573 else
8575 #ifdef ENABLE_GIMPLE_CHECKING
8576 if (!(fallback & fb_mayfail))
8578 fprintf (stderr, "gimplification failed:\n");
8579 print_generic_expr (stderr, *expr_p, 0);
8580 debug_tree (*expr_p);
8581 internal_error ("gimplification failed");
8583 #endif
8584 gcc_assert (fallback & fb_mayfail);
8586 /* If this is an asm statement, and the user asked for the
8587 impossible, don't die. Fail and let gimplify_asm_expr
8588 issue an error. */
8589 ret = GS_ERROR;
8590 goto out;
8593 /* Make sure the temporary matches our predicate. */
8594 gcc_assert ((*gimple_test_f) (*expr_p));
8596 if (!gimple_seq_empty_p (internal_post))
8598 annotate_all_with_location (internal_post, input_location);
8599 gimplify_seq_add_seq (pre_p, internal_post);
8602 out:
8603 input_location = saved_location;
8604 return ret;
8607 /* Look through TYPE for variable-sized objects and gimplify each such
8608 size that we find. Add to LIST_P any statements generated. */
8610 void
8611 gimplify_type_sizes (tree type, gimple_seq *list_p)
8613 tree field, t;
8615 if (type == NULL || type == error_mark_node)
8616 return;
8618 /* We first do the main variant, then copy into any other variants. */
8619 type = TYPE_MAIN_VARIANT (type);
8621 /* Avoid infinite recursion. */
8622 if (TYPE_SIZES_GIMPLIFIED (type))
8623 return;
8625 TYPE_SIZES_GIMPLIFIED (type) = 1;
8627 switch (TREE_CODE (type))
8629 case INTEGER_TYPE:
8630 case ENUMERAL_TYPE:
8631 case BOOLEAN_TYPE:
8632 case REAL_TYPE:
8633 case FIXED_POINT_TYPE:
8634 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
8635 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
8637 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8639 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
8640 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
8642 break;
8644 case ARRAY_TYPE:
8645 /* These types may not have declarations, so handle them here. */
8646 gimplify_type_sizes (TREE_TYPE (type), list_p);
8647 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
8648 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
8649 with assigned stack slots, for -O1+ -g they should be tracked
8650 by VTA. */
8651 if (!(TYPE_NAME (type)
8652 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
8653 && DECL_IGNORED_P (TYPE_NAME (type)))
8654 && TYPE_DOMAIN (type)
8655 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
8657 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8658 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8659 DECL_IGNORED_P (t) = 0;
8660 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8661 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8662 DECL_IGNORED_P (t) = 0;
8664 break;
8666 case RECORD_TYPE:
8667 case UNION_TYPE:
8668 case QUAL_UNION_TYPE:
8669 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8670 if (TREE_CODE (field) == FIELD_DECL)
8672 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
8673 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
8674 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8675 gimplify_type_sizes (TREE_TYPE (field), list_p);
8677 break;
8679 case POINTER_TYPE:
8680 case REFERENCE_TYPE:
8681 /* We used to recurse on the pointed-to type here, which turned out to
8682 be incorrect because its definition might refer to variables not
8683 yet initialized at this point if a forward declaration is involved.
8685 It was actually useful for anonymous pointed-to types to ensure
8686 that the sizes evaluation dominates every possible later use of the
8687 values. Restricting to such types here would be safe since there
8688 is no possible forward declaration around, but would introduce an
8689 undesirable middle-end semantic to anonymity. We then defer to
8690 front-ends the responsibility of ensuring that the sizes are
8691 evaluated both early and late enough, e.g. by attaching artificial
8692 type declarations to the tree. */
8693 break;
8695 default:
8696 break;
8699 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
8700 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
8702 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8704 TYPE_SIZE (t) = TYPE_SIZE (type);
8705 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
8706 TYPE_SIZES_GIMPLIFIED (t) = 1;
8710 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
8711 a size or position, has had all of its SAVE_EXPRs evaluated.
8712 We add any required statements to *STMT_P. */
8714 void
8715 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
8717 tree expr = *expr_p;
8719 /* We don't do anything if the value isn't there, is constant, or contains
8720 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
8721 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
8722 will want to replace it with a new variable, but that will cause problems
8723 if this type is from outside the function. It's OK to have that here. */
8724 if (is_gimple_sizepos (expr))
8725 return;
8727 *expr_p = unshare_expr (expr);
8729 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
8732 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
8733 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
8734 is true, also gimplify the parameters. */
8736 gimple
8737 gimplify_body (tree fndecl, bool do_parms)
8739 location_t saved_location = input_location;
8740 gimple_seq parm_stmts, seq;
8741 gimple outer_bind;
8742 struct cgraph_node *cgn;
8744 timevar_push (TV_TREE_GIMPLIFY);
8746 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
8747 gimplification. */
8748 default_rtl_profile ();
8750 gcc_assert (gimplify_ctxp == NULL);
8751 push_gimplify_context ();
8753 if (flag_openmp)
8755 gcc_assert (gimplify_omp_ctxp == NULL);
8756 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
8757 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
8760 /* Unshare most shared trees in the body and in that of any nested functions.
8761 It would seem we don't have to do this for nested functions because
8762 they are supposed to be output and then the outer function gimplified
8763 first, but the g++ front end doesn't always do it that way. */
8764 unshare_body (fndecl);
8765 unvisit_body (fndecl);
8767 cgn = cgraph_get_node (fndecl);
8768 if (cgn && cgn->origin)
8769 nonlocal_vlas = pointer_set_create ();
8771 /* Make sure input_location isn't set to something weird. */
8772 input_location = DECL_SOURCE_LOCATION (fndecl);
8774 /* Resolve callee-copies. This has to be done before processing
8775 the body so that DECL_VALUE_EXPR gets processed correctly. */
8776 parm_stmts = do_parms ? gimplify_parameters () : NULL;
8778 /* Gimplify the function's body. */
8779 seq = NULL;
8780 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
8781 outer_bind = gimple_seq_first_stmt (seq);
8782 if (!outer_bind)
8784 outer_bind = gimple_build_nop ();
8785 gimplify_seq_add_stmt (&seq, outer_bind);
8788 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
8789 not the case, wrap everything in a GIMPLE_BIND to make it so. */
8790 if (gimple_code (outer_bind) == GIMPLE_BIND
8791 && gimple_seq_first (seq) == gimple_seq_last (seq))
8793 else
8794 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8796 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8798 /* If we had callee-copies statements, insert them at the beginning
8799 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
8800 if (!gimple_seq_empty_p (parm_stmts))
8802 tree parm;
8804 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8805 gimple_bind_set_body (outer_bind, parm_stmts);
8807 for (parm = DECL_ARGUMENTS (current_function_decl);
8808 parm; parm = DECL_CHAIN (parm))
8809 if (DECL_HAS_VALUE_EXPR_P (parm))
8811 DECL_HAS_VALUE_EXPR_P (parm) = 0;
8812 DECL_IGNORED_P (parm) = 0;
8816 if (nonlocal_vlas)
8818 if (nonlocal_vla_vars)
8820 /* tree-nested.c may later on call declare_vars (..., true);
8821 which relies on BLOCK_VARS chain to be the tail of the
8822 gimple_bind_vars chain. Ensure we don't violate that
8823 assumption. */
8824 if (gimple_bind_block (outer_bind)
8825 == DECL_INITIAL (current_function_decl))
8826 declare_vars (nonlocal_vla_vars, outer_bind, true);
8827 else
8828 BLOCK_VARS (DECL_INITIAL (current_function_decl))
8829 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
8830 nonlocal_vla_vars);
8831 nonlocal_vla_vars = NULL_TREE;
8833 pointer_set_destroy (nonlocal_vlas);
8834 nonlocal_vlas = NULL;
8837 if ((flag_openmp || flag_openmp_simd) && gimplify_omp_ctxp)
8839 delete_omp_context (gimplify_omp_ctxp);
8840 gimplify_omp_ctxp = NULL;
8843 pop_gimplify_context (outer_bind);
8844 gcc_assert (gimplify_ctxp == NULL);
8846 #ifdef ENABLE_CHECKING
8847 if (!seen_error ())
8848 verify_gimple_in_seq (gimple_bind_body (outer_bind));
8849 #endif
8851 timevar_pop (TV_TREE_GIMPLIFY);
8852 input_location = saved_location;
8854 return outer_bind;
8857 typedef char *char_p; /* For DEF_VEC_P. */
8859 /* Return whether we should exclude FNDECL from instrumentation. */
8861 static bool
8862 flag_instrument_functions_exclude_p (tree fndecl)
8864 vec<char_p> *v;
8866 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
8867 if (v && v->length () > 0)
8869 const char *name;
8870 int i;
8871 char *s;
8873 name = lang_hooks.decl_printable_name (fndecl, 0);
8874 FOR_EACH_VEC_ELT (*v, i, s)
8875 if (strstr (name, s) != NULL)
8876 return true;
8879 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
8880 if (v && v->length () > 0)
8882 const char *name;
8883 int i;
8884 char *s;
8886 name = DECL_SOURCE_FILE (fndecl);
8887 FOR_EACH_VEC_ELT (*v, i, s)
8888 if (strstr (name, s) != NULL)
8889 return true;
8892 return false;
8895 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
8896 node for the function we want to gimplify.
8898 Return the sequence of GIMPLE statements corresponding to the body
8899 of FNDECL. */
8901 void
8902 gimplify_function_tree (tree fndecl)
8904 tree parm, ret;
8905 gimple_seq seq;
8906 gimple bind;
8908 gcc_assert (!gimple_body (fndecl));
8910 if (DECL_STRUCT_FUNCTION (fndecl))
8911 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8912 else
8913 push_struct_function (fndecl);
8915 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
8917 /* Preliminarily mark non-addressed complex variables as eligible
8918 for promotion to gimple registers. We'll transform their uses
8919 as we find them. */
8920 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8921 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
8922 && !TREE_THIS_VOLATILE (parm)
8923 && !needs_to_live_in_memory (parm))
8924 DECL_GIMPLE_REG_P (parm) = 1;
8927 ret = DECL_RESULT (fndecl);
8928 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
8929 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
8930 && !needs_to_live_in_memory (ret))
8931 DECL_GIMPLE_REG_P (ret) = 1;
8933 bind = gimplify_body (fndecl, true);
8935 /* The tree body of the function is no longer needed, replace it
8936 with the new GIMPLE body. */
8937 seq = NULL;
8938 gimple_seq_add_stmt (&seq, bind);
8939 gimple_set_body (fndecl, seq);
8941 /* If we're instrumenting function entry/exit, then prepend the call to
8942 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8943 catch the exit hook. */
8944 /* ??? Add some way to ignore exceptions for this TFE. */
8945 if (flag_instrument_function_entry_exit
8946 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8947 && !flag_instrument_functions_exclude_p (fndecl))
8949 tree x;
8950 gimple new_bind;
8951 gimple tf;
8952 gimple_seq cleanup = NULL, body = NULL;
8953 tree tmp_var;
8954 gimple call;
8956 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8957 call = gimple_build_call (x, 1, integer_zero_node);
8958 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8959 gimple_call_set_lhs (call, tmp_var);
8960 gimplify_seq_add_stmt (&cleanup, call);
8961 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
8962 call = gimple_build_call (x, 2,
8963 build_fold_addr_expr (current_function_decl),
8964 tmp_var);
8965 gimplify_seq_add_stmt (&cleanup, call);
8966 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
8968 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8969 call = gimple_build_call (x, 1, integer_zero_node);
8970 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8971 gimple_call_set_lhs (call, tmp_var);
8972 gimplify_seq_add_stmt (&body, call);
8973 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
8974 call = gimple_build_call (x, 2,
8975 build_fold_addr_expr (current_function_decl),
8976 tmp_var);
8977 gimplify_seq_add_stmt (&body, call);
8978 gimplify_seq_add_stmt (&body, tf);
8979 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
8980 /* Clear the block for BIND, since it is no longer directly inside
8981 the function, but within a try block. */
8982 gimple_bind_set_block (bind, NULL);
8984 /* Replace the current function body with the body
8985 wrapped in the try/finally TF. */
8986 seq = NULL;
8987 gimple_seq_add_stmt (&seq, new_bind);
8988 gimple_set_body (fndecl, seq);
8991 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8992 cfun->curr_properties = PROP_gimple_any;
8994 pop_cfun ();
8997 /* Return a dummy expression of type TYPE in order to keep going after an
8998 error. */
9000 static tree
9001 dummy_object (tree type)
9003 tree t = build_int_cst (build_pointer_type (type), 0);
9004 return build2 (MEM_REF, type, t, t);
9007 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
9008 builtin function, but a very special sort of operator. */
9010 enum gimplify_status
9011 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
9013 tree promoted_type, have_va_type;
9014 tree valist = TREE_OPERAND (*expr_p, 0);
9015 tree type = TREE_TYPE (*expr_p);
9016 tree t;
9017 location_t loc = EXPR_LOCATION (*expr_p);
9019 /* Verify that valist is of the proper type. */
9020 have_va_type = TREE_TYPE (valist);
9021 if (have_va_type == error_mark_node)
9022 return GS_ERROR;
9023 have_va_type = targetm.canonical_va_list_type (have_va_type);
9025 if (have_va_type == NULL_TREE)
9027 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
9028 return GS_ERROR;
9031 /* Generate a diagnostic for requesting data of a type that cannot
9032 be passed through `...' due to type promotion at the call site. */
9033 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
9034 != type)
9036 static bool gave_help;
9037 bool warned;
9039 /* Unfortunately, this is merely undefined, rather than a constraint
9040 violation, so we cannot make this an error. If this call is never
9041 executed, the program is still strictly conforming. */
9042 warned = warning_at (loc, 0,
9043 "%qT is promoted to %qT when passed through %<...%>",
9044 type, promoted_type);
9045 if (!gave_help && warned)
9047 gave_help = true;
9048 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
9049 promoted_type, type);
9052 /* We can, however, treat "undefined" any way we please.
9053 Call abort to encourage the user to fix the program. */
9054 if (warned)
9055 inform (loc, "if this code is reached, the program will abort");
9056 /* Before the abort, allow the evaluation of the va_list
9057 expression to exit or longjmp. */
9058 gimplify_and_add (valist, pre_p);
9059 t = build_call_expr_loc (loc,
9060 builtin_decl_implicit (BUILT_IN_TRAP), 0);
9061 gimplify_and_add (t, pre_p);
9063 /* This is dead code, but go ahead and finish so that the
9064 mode of the result comes out right. */
9065 *expr_p = dummy_object (type);
9066 return GS_ALL_DONE;
9068 else
9070 /* Make it easier for the backends by protecting the valist argument
9071 from multiple evaluations. */
9072 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
9074 /* For this case, the backends will be expecting a pointer to
9075 TREE_TYPE (abi), but it's possible we've
9076 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
9077 So fix it. */
9078 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
9080 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
9081 valist = fold_convert_loc (loc, p1,
9082 build_fold_addr_expr_loc (loc, valist));
9085 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
9087 else
9088 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
9090 if (!targetm.gimplify_va_arg_expr)
9091 /* FIXME: Once most targets are converted we should merely
9092 assert this is non-null. */
9093 return GS_ALL_DONE;
9095 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
9096 return GS_OK;
9100 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
9102 DST/SRC are the destination and source respectively. You can pass
9103 ungimplified trees in DST or SRC, in which case they will be
9104 converted to a gimple operand if necessary.
9106 This function returns the newly created GIMPLE_ASSIGN tuple. */
9108 gimple
9109 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
9111 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9112 gimplify_and_add (t, seq_p);
9113 ggc_free (t);
9114 return gimple_seq_last_stmt (*seq_p);
9117 inline hashval_t
9118 gimplify_hasher::hash (const value_type *p)
9120 tree t = p->val;
9121 return iterative_hash_expr (t, 0);
9124 inline bool
9125 gimplify_hasher::equal (const value_type *p1, const compare_type *p2)
9127 tree t1 = p1->val;
9128 tree t2 = p2->val;
9129 enum tree_code code = TREE_CODE (t1);
9131 if (TREE_CODE (t2) != code
9132 || TREE_TYPE (t1) != TREE_TYPE (t2))
9133 return false;
9135 if (!operand_equal_p (t1, t2, 0))
9136 return false;
9138 #ifdef ENABLE_CHECKING
9139 /* Only allow them to compare equal if they also hash equal; otherwise
9140 results are nondeterminate, and we fail bootstrap comparison. */
9141 gcc_assert (hash (p1) == hash (p2));
9142 #endif
9144 return true;