Fix typo/copy'n'pasto.
[official-gcc.git] / gcc / gimplify.c
blob9788f4cb2321f82d4f6a8ca667da0c5590f89ad8
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2014 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tree.h"
27 #include "expr.h"
28 #include "pointer-set.h"
29 #include "hash-table.h"
30 #include "basic-block.h"
31 #include "tree-ssa-alias.h"
32 #include "internal-fn.h"
33 #include "gimple-fold.h"
34 #include "tree-eh.h"
35 #include "gimple-expr.h"
36 #include "is-a.h"
37 #include "gimple.h"
38 #include "gimplify.h"
39 #include "gimple-iterator.h"
40 #include "stringpool.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stor-layout.h"
44 #include "stmt.h"
45 #include "print-tree.h"
46 #include "tree-iterator.h"
47 #include "tree-inline.h"
48 #include "tree-pretty-print.h"
49 #include "langhooks.h"
50 #include "bitmap.h"
51 #include "gimple-ssa.h"
52 #include "cgraph.h"
53 #include "tree-cfg.h"
54 #include "tree-ssanames.h"
55 #include "tree-ssa.h"
56 #include "diagnostic-core.h"
57 #include "target.h"
58 #include "splay-tree.h"
59 #include "omp-low.h"
60 #include "gimple-low.h"
61 #include "cilk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
66 enum gimplify_omp_var_data
68 GOVD_SEEN = 1,
69 GOVD_EXPLICIT = 2,
70 GOVD_SHARED = 4,
71 GOVD_PRIVATE = 8,
72 GOVD_FIRSTPRIVATE = 16,
73 GOVD_LASTPRIVATE = 32,
74 GOVD_REDUCTION = 64,
75 GOVD_LOCAL = 128,
76 GOVD_MAP = 256,
77 GOVD_DEBUG_PRIVATE = 512,
78 GOVD_PRIVATE_OUTER_REF = 1024,
79 GOVD_LINEAR = 2048,
80 GOVD_ALIGNED = 4096,
82 /* Flags for GOVD_MAP. */
83 /* Don't copy back. */
84 GOVD_MAP_TO_ONLY = 8192,
85 /* Force a specific behavior (or else, a run-time error). */
86 GOVD_MAP_FORCE = 16384,
88 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
89 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
90 | GOVD_LOCAL)
94 enum omp_region_type
96 ORT_WORKSHARE = 0,
97 ORT_SIMD = 1,
98 ORT_PARALLEL = 2,
99 ORT_COMBINED_PARALLEL = 3,
100 ORT_TASK = 4,
101 ORT_UNTIED_TASK = 5,
102 ORT_TEAMS = 8,
103 ORT_TARGET = 16,
105 /* Flags for ORT_TARGET. */
106 /* Prepare this region for offloading. */
107 ORT_TARGET_OFFLOAD = 32,
108 /* Default to GOVD_MAP_FORCE for implicit mappings in this region. */
109 ORT_TARGET_MAP_FORCE = 64
112 /* Gimplify hashtable helper. */
114 struct gimplify_hasher : typed_free_remove <elt_t>
116 typedef elt_t value_type;
117 typedef elt_t compare_type;
118 static inline hashval_t hash (const value_type *);
119 static inline bool equal (const value_type *, const compare_type *);
122 struct gimplify_ctx
124 struct gimplify_ctx *prev_context;
126 vec<gimple> bind_expr_stack;
127 tree temps;
128 gimple_seq conditional_cleanups;
129 tree exit_label;
130 tree return_temp;
132 vec<tree> case_labels;
133 /* The formal temporary table. Should this be persistent? */
134 hash_table <gimplify_hasher> temp_htab;
136 int conditions;
137 bool save_stack;
138 bool into_ssa;
139 bool allow_rhs_cond_expr;
140 bool in_cleanup_point_expr;
143 struct gimplify_omp_ctx
145 struct gimplify_omp_ctx *outer_context;
146 splay_tree variables;
147 struct pointer_set_t *privatized_types;
148 location_t location;
149 enum omp_clause_default_kind default_kind;
150 enum omp_region_type region_type;
151 bool combined_loop;
154 static struct gimplify_ctx *gimplify_ctxp;
155 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
157 /* Forward declaration. */
158 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
160 /* Shorter alias name for the above function for use in gimplify.c
161 only. */
163 static inline void
164 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
166 gimple_seq_add_stmt_without_update (seq_p, gs);
169 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
170 NULL, a new sequence is allocated. This function is
171 similar to gimple_seq_add_seq, but does not scan the operands.
172 During gimplification, we need to manipulate statement sequences
173 before the def/use vectors have been constructed. */
175 static void
176 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
178 gimple_stmt_iterator si;
180 if (src == NULL)
181 return;
183 si = gsi_last (*dst_p);
184 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
188 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
189 and popping gimplify contexts. */
191 static struct gimplify_ctx *ctx_pool = NULL;
193 /* Return a gimplify context struct from the pool. */
195 static inline struct gimplify_ctx *
196 ctx_alloc (void)
198 struct gimplify_ctx * c = ctx_pool;
200 if (c)
201 ctx_pool = c->prev_context;
202 else
203 c = XNEW (struct gimplify_ctx);
205 memset (c, '\0', sizeof (*c));
206 return c;
209 /* Put gimplify context C back into the pool. */
211 static inline void
212 ctx_free (struct gimplify_ctx *c)
214 c->prev_context = ctx_pool;
215 ctx_pool = c;
218 /* Free allocated ctx stack memory. */
220 void
221 free_gimplify_stack (void)
223 struct gimplify_ctx *c;
225 while ((c = ctx_pool))
227 ctx_pool = c->prev_context;
228 free (c);
233 /* Set up a context for the gimplifier. */
235 void
236 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
238 struct gimplify_ctx *c = ctx_alloc ();
240 c->prev_context = gimplify_ctxp;
241 gimplify_ctxp = c;
242 gimplify_ctxp->into_ssa = in_ssa;
243 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
246 /* Tear down a context for the gimplifier. If BODY is non-null, then
247 put the temporaries into the outer BIND_EXPR. Otherwise, put them
248 in the local_decls.
250 BODY is not a sequence, but the first tuple in a sequence. */
252 void
253 pop_gimplify_context (gimple body)
255 struct gimplify_ctx *c = gimplify_ctxp;
257 gcc_assert (c
258 && (!c->bind_expr_stack.exists ()
259 || c->bind_expr_stack.is_empty ()));
260 c->bind_expr_stack.release ();
261 gimplify_ctxp = c->prev_context;
263 if (body)
264 declare_vars (c->temps, body, false);
265 else
266 record_vars (c->temps);
268 if (c->temp_htab.is_created ())
269 c->temp_htab.dispose ();
270 ctx_free (c);
273 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
275 static void
276 gimple_push_bind_expr (gimple gimple_bind)
278 gimplify_ctxp->bind_expr_stack.reserve (8);
279 gimplify_ctxp->bind_expr_stack.safe_push (gimple_bind);
282 /* Pop the first element off the stack of bindings. */
284 static void
285 gimple_pop_bind_expr (void)
287 gimplify_ctxp->bind_expr_stack.pop ();
290 /* Return the first element of the stack of bindings. */
292 gimple
293 gimple_current_bind_expr (void)
295 return gimplify_ctxp->bind_expr_stack.last ();
298 /* Return the stack of bindings created during gimplification. */
300 vec<gimple>
301 gimple_bind_expr_stack (void)
303 return gimplify_ctxp->bind_expr_stack;
306 /* Return true iff there is a COND_EXPR between us and the innermost
307 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
309 static bool
310 gimple_conditional_context (void)
312 return gimplify_ctxp->conditions > 0;
315 /* Note that we've entered a COND_EXPR. */
317 static void
318 gimple_push_condition (void)
320 #ifdef ENABLE_GIMPLE_CHECKING
321 if (gimplify_ctxp->conditions == 0)
322 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
323 #endif
324 ++(gimplify_ctxp->conditions);
327 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
328 now, add any conditional cleanups we've seen to the prequeue. */
330 static void
331 gimple_pop_condition (gimple_seq *pre_p)
333 int conds = --(gimplify_ctxp->conditions);
335 gcc_assert (conds >= 0);
336 if (conds == 0)
338 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
339 gimplify_ctxp->conditional_cleanups = NULL;
343 /* A stable comparison routine for use with splay trees and DECLs. */
345 static int
346 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
348 tree a = (tree) xa;
349 tree b = (tree) xb;
351 return DECL_UID (a) - DECL_UID (b);
354 /* Create a new omp construct that deals with variable remapping. */
356 static struct gimplify_omp_ctx *
357 new_omp_context (enum omp_region_type region_type)
359 struct gimplify_omp_ctx *c;
361 c = XCNEW (struct gimplify_omp_ctx);
362 c->outer_context = gimplify_omp_ctxp;
363 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
364 c->privatized_types = pointer_set_create ();
365 c->location = input_location;
366 c->region_type = region_type;
367 if ((region_type & ORT_TASK) == 0)
368 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
369 else
370 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
372 return c;
375 /* Destroy an omp construct that deals with variable remapping. */
377 static void
378 delete_omp_context (struct gimplify_omp_ctx *c)
380 splay_tree_delete (c->variables);
381 pointer_set_destroy (c->privatized_types);
382 XDELETE (c);
385 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
386 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
388 /* Both gimplify the statement T and append it to *SEQ_P. This function
389 behaves exactly as gimplify_stmt, but you don't have to pass T as a
390 reference. */
392 void
393 gimplify_and_add (tree t, gimple_seq *seq_p)
395 gimplify_stmt (&t, seq_p);
398 /* Gimplify statement T into sequence *SEQ_P, and return the first
399 tuple in the sequence of generated tuples for this statement.
400 Return NULL if gimplifying T produced no tuples. */
402 static gimple
403 gimplify_and_return_first (tree t, gimple_seq *seq_p)
405 gimple_stmt_iterator last = gsi_last (*seq_p);
407 gimplify_and_add (t, seq_p);
409 if (!gsi_end_p (last))
411 gsi_next (&last);
412 return gsi_stmt (last);
414 else
415 return gimple_seq_first_stmt (*seq_p);
418 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
419 LHS, or for a call argument. */
421 static bool
422 is_gimple_mem_rhs (tree t)
424 /* If we're dealing with a renamable type, either source or dest must be
425 a renamed variable. */
426 if (is_gimple_reg_type (TREE_TYPE (t)))
427 return is_gimple_val (t);
428 else
429 return is_gimple_val (t) || is_gimple_lvalue (t);
432 /* Return true if T is a CALL_EXPR or an expression that can be
433 assigned to a temporary. Note that this predicate should only be
434 used during gimplification. See the rationale for this in
435 gimplify_modify_expr. */
437 static bool
438 is_gimple_reg_rhs_or_call (tree t)
440 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
441 || TREE_CODE (t) == CALL_EXPR);
444 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
445 this predicate should only be used during gimplification. See the
446 rationale for this in gimplify_modify_expr. */
448 static bool
449 is_gimple_mem_rhs_or_call (tree t)
451 /* If we're dealing with a renamable type, either source or dest must be
452 a renamed variable. */
453 if (is_gimple_reg_type (TREE_TYPE (t)))
454 return is_gimple_val (t);
455 else
456 return (is_gimple_val (t) || is_gimple_lvalue (t)
457 || TREE_CODE (t) == CALL_EXPR);
460 /* Create a temporary with a name derived from VAL. Subroutine of
461 lookup_tmp_var; nobody else should call this function. */
463 static inline tree
464 create_tmp_from_val (tree val, bool is_formal)
466 /* Drop all qualifiers and address-space information from the value type. */
467 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
468 tree var = create_tmp_var (type, get_name (val));
469 if (is_formal
470 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
471 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE))
472 DECL_GIMPLE_REG_P (var) = 1;
473 return var;
476 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
477 an existing expression temporary. */
479 static tree
480 lookup_tmp_var (tree val, bool is_formal)
482 tree ret;
484 /* If not optimizing, never really reuse a temporary. local-alloc
485 won't allocate any variable that is used in more than one basic
486 block, which means it will go into memory, causing much extra
487 work in reload and final and poorer code generation, outweighing
488 the extra memory allocation here. */
489 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
490 ret = create_tmp_from_val (val, is_formal);
491 else
493 elt_t elt, *elt_p;
494 elt_t **slot;
496 elt.val = val;
497 if (!gimplify_ctxp->temp_htab.is_created ())
498 gimplify_ctxp->temp_htab.create (1000);
499 slot = gimplify_ctxp->temp_htab.find_slot (&elt, INSERT);
500 if (*slot == NULL)
502 elt_p = XNEW (elt_t);
503 elt_p->val = val;
504 elt_p->temp = ret = create_tmp_from_val (val, is_formal);
505 *slot = elt_p;
507 else
509 elt_p = *slot;
510 ret = elt_p->temp;
514 return ret;
517 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
519 static tree
520 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
521 bool is_formal)
523 tree t, mod;
525 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
526 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
527 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
528 fb_rvalue);
530 if (gimplify_ctxp->into_ssa
531 && is_gimple_reg_type (TREE_TYPE (val)))
532 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)), NULL);
533 else
534 t = lookup_tmp_var (val, is_formal);
536 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
538 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
540 /* gimplify_modify_expr might want to reduce this further. */
541 gimplify_and_add (mod, pre_p);
542 ggc_free (mod);
544 return t;
547 /* Return a formal temporary variable initialized with VAL. PRE_P is as
548 in gimplify_expr. Only use this function if:
550 1) The value of the unfactored expression represented by VAL will not
551 change between the initialization and use of the temporary, and
552 2) The temporary will not be otherwise modified.
554 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
555 and #2 means it is inappropriate for && temps.
557 For other cases, use get_initialized_tmp_var instead. */
559 tree
560 get_formal_tmp_var (tree val, gimple_seq *pre_p)
562 return internal_get_tmp_var (val, pre_p, NULL, true);
565 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
566 are as in gimplify_expr. */
568 tree
569 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
571 return internal_get_tmp_var (val, pre_p, post_p, false);
574 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
575 generate debug info for them; otherwise don't. */
577 void
578 declare_vars (tree vars, gimple scope, bool debug_info)
580 tree last = vars;
581 if (last)
583 tree temps, block;
585 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
587 temps = nreverse (last);
589 block = gimple_bind_block (scope);
590 gcc_assert (!block || TREE_CODE (block) == BLOCK);
591 if (!block || !debug_info)
593 DECL_CHAIN (last) = gimple_bind_vars (scope);
594 gimple_bind_set_vars (scope, temps);
596 else
598 /* We need to attach the nodes both to the BIND_EXPR and to its
599 associated BLOCK for debugging purposes. The key point here
600 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
601 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
602 if (BLOCK_VARS (block))
603 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
604 else
606 gimple_bind_set_vars (scope,
607 chainon (gimple_bind_vars (scope), temps));
608 BLOCK_VARS (block) = temps;
614 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
615 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
616 no such upper bound can be obtained. */
618 static void
619 force_constant_size (tree var)
621 /* The only attempt we make is by querying the maximum size of objects
622 of the variable's type. */
624 HOST_WIDE_INT max_size;
626 gcc_assert (TREE_CODE (var) == VAR_DECL);
628 max_size = max_int_size_in_bytes (TREE_TYPE (var));
630 gcc_assert (max_size >= 0);
632 DECL_SIZE_UNIT (var)
633 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
634 DECL_SIZE (var)
635 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
638 /* Push the temporary variable TMP into the current binding. */
640 void
641 gimple_add_tmp_var (tree tmp)
643 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
645 /* Later processing assumes that the object size is constant, which might
646 not be true at this point. Force the use of a constant upper bound in
647 this case. */
648 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
649 force_constant_size (tmp);
651 DECL_CONTEXT (tmp) = current_function_decl;
652 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
654 if (gimplify_ctxp)
656 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
657 gimplify_ctxp->temps = tmp;
659 /* Mark temporaries local within the nearest enclosing parallel. */
660 if (gimplify_omp_ctxp)
662 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
663 while (ctx
664 && (ctx->region_type == ORT_WORKSHARE
665 || ctx->region_type == ORT_SIMD))
666 ctx = ctx->outer_context;
667 if (ctx)
668 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
671 else if (cfun)
672 record_vars (tmp);
673 else
675 gimple_seq body_seq;
677 /* This case is for nested functions. We need to expose the locals
678 they create. */
679 body_seq = gimple_body (current_function_decl);
680 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
686 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
687 nodes that are referenced more than once in GENERIC functions. This is
688 necessary because gimplification (translation into GIMPLE) is performed
689 by modifying tree nodes in-place, so gimplication of a shared node in a
690 first context could generate an invalid GIMPLE form in a second context.
692 This is achieved with a simple mark/copy/unmark algorithm that walks the
693 GENERIC representation top-down, marks nodes with TREE_VISITED the first
694 time it encounters them, duplicates them if they already have TREE_VISITED
695 set, and finally removes the TREE_VISITED marks it has set.
697 The algorithm works only at the function level, i.e. it generates a GENERIC
698 representation of a function with no nodes shared within the function when
699 passed a GENERIC function (except for nodes that are allowed to be shared).
701 At the global level, it is also necessary to unshare tree nodes that are
702 referenced in more than one function, for the same aforementioned reason.
703 This requires some cooperation from the front-end. There are 2 strategies:
705 1. Manual unsharing. The front-end needs to call unshare_expr on every
706 expression that might end up being shared across functions.
708 2. Deep unsharing. This is an extension of regular unsharing. Instead
709 of calling unshare_expr on expressions that might be shared across
710 functions, the front-end pre-marks them with TREE_VISITED. This will
711 ensure that they are unshared on the first reference within functions
712 when the regular unsharing algorithm runs. The counterpart is that
713 this algorithm must look deeper than for manual unsharing, which is
714 specified by LANG_HOOKS_DEEP_UNSHARING.
716 If there are only few specific cases of node sharing across functions, it is
717 probably easier for a front-end to unshare the expressions manually. On the
718 contrary, if the expressions generated at the global level are as widespread
719 as expressions generated within functions, deep unsharing is very likely the
720 way to go. */
722 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
723 These nodes model computations that must be done once. If we were to
724 unshare something like SAVE_EXPR(i++), the gimplification process would
725 create wrong code. However, if DATA is non-null, it must hold a pointer
726 set that is used to unshare the subtrees of these nodes. */
728 static tree
729 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
731 tree t = *tp;
732 enum tree_code code = TREE_CODE (t);
734 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
735 copy their subtrees if we can make sure to do it only once. */
736 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
738 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
740 else
741 *walk_subtrees = 0;
744 /* Stop at types, decls, constants like copy_tree_r. */
745 else if (TREE_CODE_CLASS (code) == tcc_type
746 || TREE_CODE_CLASS (code) == tcc_declaration
747 || TREE_CODE_CLASS (code) == tcc_constant
748 /* We can't do anything sensible with a BLOCK used as an
749 expression, but we also can't just die when we see it
750 because of non-expression uses. So we avert our eyes
751 and cross our fingers. Silly Java. */
752 || code == BLOCK)
753 *walk_subtrees = 0;
755 /* Cope with the statement expression extension. */
756 else if (code == STATEMENT_LIST)
759 /* Leave the bulk of the work to copy_tree_r itself. */
760 else
761 copy_tree_r (tp, walk_subtrees, NULL);
763 return NULL_TREE;
766 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
767 If *TP has been visited already, then *TP is deeply copied by calling
768 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
770 static tree
771 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
773 tree t = *tp;
774 enum tree_code code = TREE_CODE (t);
776 /* Skip types, decls, and constants. But we do want to look at their
777 types and the bounds of types. Mark them as visited so we properly
778 unmark their subtrees on the unmark pass. If we've already seen them,
779 don't look down further. */
780 if (TREE_CODE_CLASS (code) == tcc_type
781 || TREE_CODE_CLASS (code) == tcc_declaration
782 || TREE_CODE_CLASS (code) == tcc_constant)
784 if (TREE_VISITED (t))
785 *walk_subtrees = 0;
786 else
787 TREE_VISITED (t) = 1;
790 /* If this node has been visited already, unshare it and don't look
791 any deeper. */
792 else if (TREE_VISITED (t))
794 walk_tree (tp, mostly_copy_tree_r, data, NULL);
795 *walk_subtrees = 0;
798 /* Otherwise, mark the node as visited and keep looking. */
799 else
800 TREE_VISITED (t) = 1;
802 return NULL_TREE;
805 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
806 copy_if_shared_r callback unmodified. */
808 static inline void
809 copy_if_shared (tree *tp, void *data)
811 walk_tree (tp, copy_if_shared_r, data, NULL);
814 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
815 any nested functions. */
817 static void
818 unshare_body (tree fndecl)
820 struct cgraph_node *cgn = cgraph_get_node (fndecl);
821 /* If the language requires deep unsharing, we need a pointer set to make
822 sure we don't repeatedly unshare subtrees of unshareable nodes. */
823 struct pointer_set_t *visited
824 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
826 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
827 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
828 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
830 if (visited)
831 pointer_set_destroy (visited);
833 if (cgn)
834 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
835 unshare_body (cgn->decl);
838 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
839 Subtrees are walked until the first unvisited node is encountered. */
841 static tree
842 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
844 tree t = *tp;
846 /* If this node has been visited, unmark it and keep looking. */
847 if (TREE_VISITED (t))
848 TREE_VISITED (t) = 0;
850 /* Otherwise, don't look any deeper. */
851 else
852 *walk_subtrees = 0;
854 return NULL_TREE;
857 /* Unmark the visited trees rooted at *TP. */
859 static inline void
860 unmark_visited (tree *tp)
862 walk_tree (tp, unmark_visited_r, NULL, NULL);
865 /* Likewise, but mark all trees as not visited. */
867 static void
868 unvisit_body (tree fndecl)
870 struct cgraph_node *cgn = cgraph_get_node (fndecl);
872 unmark_visited (&DECL_SAVED_TREE (fndecl));
873 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
874 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
876 if (cgn)
877 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
878 unvisit_body (cgn->decl);
881 /* Unconditionally make an unshared copy of EXPR. This is used when using
882 stored expressions which span multiple functions, such as BINFO_VTABLE,
883 as the normal unsharing process can't tell that they're shared. */
885 tree
886 unshare_expr (tree expr)
888 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
889 return expr;
892 /* Worker for unshare_expr_without_location. */
894 static tree
895 prune_expr_location (tree *tp, int *walk_subtrees, void *)
897 if (EXPR_P (*tp))
898 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
899 else
900 *walk_subtrees = 0;
901 return NULL_TREE;
904 /* Similar to unshare_expr but also prune all expression locations
905 from EXPR. */
907 tree
908 unshare_expr_without_location (tree expr)
910 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
911 if (EXPR_P (expr))
912 walk_tree (&expr, prune_expr_location, NULL, NULL);
913 return expr;
916 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
917 contain statements and have a value. Assign its value to a temporary
918 and give it void_type_node. Return the temporary, or NULL_TREE if
919 WRAPPER was already void. */
921 tree
922 voidify_wrapper_expr (tree wrapper, tree temp)
924 tree type = TREE_TYPE (wrapper);
925 if (type && !VOID_TYPE_P (type))
927 tree *p;
929 /* Set p to point to the body of the wrapper. Loop until we find
930 something that isn't a wrapper. */
931 for (p = &wrapper; p && *p; )
933 switch (TREE_CODE (*p))
935 case BIND_EXPR:
936 TREE_SIDE_EFFECTS (*p) = 1;
937 TREE_TYPE (*p) = void_type_node;
938 /* For a BIND_EXPR, the body is operand 1. */
939 p = &BIND_EXPR_BODY (*p);
940 break;
942 case CLEANUP_POINT_EXPR:
943 case TRY_FINALLY_EXPR:
944 case TRY_CATCH_EXPR:
945 TREE_SIDE_EFFECTS (*p) = 1;
946 TREE_TYPE (*p) = void_type_node;
947 p = &TREE_OPERAND (*p, 0);
948 break;
950 case STATEMENT_LIST:
952 tree_stmt_iterator i = tsi_last (*p);
953 TREE_SIDE_EFFECTS (*p) = 1;
954 TREE_TYPE (*p) = void_type_node;
955 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
957 break;
959 case COMPOUND_EXPR:
960 /* Advance to the last statement. Set all container types to
961 void. */
962 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
964 TREE_SIDE_EFFECTS (*p) = 1;
965 TREE_TYPE (*p) = void_type_node;
967 break;
969 case TRANSACTION_EXPR:
970 TREE_SIDE_EFFECTS (*p) = 1;
971 TREE_TYPE (*p) = void_type_node;
972 p = &TRANSACTION_EXPR_BODY (*p);
973 break;
975 default:
976 /* Assume that any tree upon which voidify_wrapper_expr is
977 directly called is a wrapper, and that its body is op0. */
978 if (p == &wrapper)
980 TREE_SIDE_EFFECTS (*p) = 1;
981 TREE_TYPE (*p) = void_type_node;
982 p = &TREE_OPERAND (*p, 0);
983 break;
985 goto out;
989 out:
990 if (p == NULL || IS_EMPTY_STMT (*p))
991 temp = NULL_TREE;
992 else if (temp)
994 /* The wrapper is on the RHS of an assignment that we're pushing
995 down. */
996 gcc_assert (TREE_CODE (temp) == INIT_EXPR
997 || TREE_CODE (temp) == MODIFY_EXPR);
998 TREE_OPERAND (temp, 1) = *p;
999 *p = temp;
1001 else
1003 temp = create_tmp_var (type, "retval");
1004 *p = build2 (INIT_EXPR, type, temp, *p);
1007 return temp;
1010 return NULL_TREE;
1013 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1014 a temporary through which they communicate. */
1016 static void
1017 build_stack_save_restore (gimple *save, gimple *restore)
1019 tree tmp_var;
1021 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1022 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1023 gimple_call_set_lhs (*save, tmp_var);
1025 *restore
1026 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1027 1, tmp_var);
1030 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1032 static enum gimplify_status
1033 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1035 tree bind_expr = *expr_p;
1036 bool old_save_stack = gimplify_ctxp->save_stack;
1037 tree t;
1038 gimple gimple_bind;
1039 gimple_seq body, cleanup;
1040 gimple stack_save;
1042 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1044 /* Mark variables seen in this bind expr. */
1045 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1047 if (TREE_CODE (t) == VAR_DECL)
1049 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1051 /* Mark variable as local. */
1052 if (ctx && !DECL_EXTERNAL (t)
1053 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1054 || splay_tree_lookup (ctx->variables,
1055 (splay_tree_key) t) == NULL))
1057 if (ctx->region_type == ORT_SIMD
1058 && TREE_ADDRESSABLE (t)
1059 && !TREE_STATIC (t))
1060 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1061 else
1062 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1065 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1067 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1068 cfun->has_local_explicit_reg_vars = true;
1071 /* Preliminarily mark non-addressed complex variables as eligible
1072 for promotion to gimple registers. We'll transform their uses
1073 as we find them. */
1074 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1075 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1076 && !TREE_THIS_VOLATILE (t)
1077 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1078 && !needs_to_live_in_memory (t))
1079 DECL_GIMPLE_REG_P (t) = 1;
1082 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1083 BIND_EXPR_BLOCK (bind_expr));
1084 gimple_push_bind_expr (gimple_bind);
1086 gimplify_ctxp->save_stack = false;
1088 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1089 body = NULL;
1090 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1091 gimple_bind_set_body (gimple_bind, body);
1093 cleanup = NULL;
1094 stack_save = NULL;
1095 if (gimplify_ctxp->save_stack)
1097 gimple stack_restore;
1099 /* Save stack on entry and restore it on exit. Add a try_finally
1100 block to achieve this. */
1101 build_stack_save_restore (&stack_save, &stack_restore);
1103 gimplify_seq_add_stmt (&cleanup, stack_restore);
1106 /* Add clobbers for all variables that go out of scope. */
1107 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1109 if (TREE_CODE (t) == VAR_DECL
1110 && !is_global_var (t)
1111 && DECL_CONTEXT (t) == current_function_decl
1112 && !DECL_HARD_REGISTER (t)
1113 && !TREE_THIS_VOLATILE (t)
1114 && !DECL_HAS_VALUE_EXPR_P (t)
1115 /* Only care for variables that have to be in memory. Others
1116 will be rewritten into SSA names, hence moved to the top-level. */
1117 && !is_gimple_reg (t)
1118 && flag_stack_reuse != SR_NONE)
1120 tree clobber = build_constructor (TREE_TYPE (t),
1121 NULL);
1122 TREE_THIS_VOLATILE (clobber) = 1;
1123 gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
1127 if (cleanup)
1129 gimple gs;
1130 gimple_seq new_body;
1132 new_body = NULL;
1133 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1134 GIMPLE_TRY_FINALLY);
1136 if (stack_save)
1137 gimplify_seq_add_stmt (&new_body, stack_save);
1138 gimplify_seq_add_stmt (&new_body, gs);
1139 gimple_bind_set_body (gimple_bind, new_body);
1142 gimplify_ctxp->save_stack = old_save_stack;
1143 gimple_pop_bind_expr ();
1145 gimplify_seq_add_stmt (pre_p, gimple_bind);
1147 if (temp)
1149 *expr_p = temp;
1150 return GS_OK;
1153 *expr_p = NULL_TREE;
1154 return GS_ALL_DONE;
1157 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1158 GIMPLE value, it is assigned to a new temporary and the statement is
1159 re-written to return the temporary.
1161 PRE_P points to the sequence where side effects that must happen before
1162 STMT should be stored. */
1164 static enum gimplify_status
1165 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1167 gimple ret;
1168 tree ret_expr = TREE_OPERAND (stmt, 0);
1169 tree result_decl, result;
1171 if (ret_expr == error_mark_node)
1172 return GS_ERROR;
1174 /* Implicit _Cilk_sync must be inserted right before any return statement
1175 if there is a _Cilk_spawn in the function. If the user has provided a
1176 _Cilk_sync, the optimizer should remove this duplicate one. */
1177 if (fn_contains_cilk_spawn_p (cfun))
1179 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1180 gimplify_and_add (impl_sync, pre_p);
1183 if (!ret_expr
1184 || TREE_CODE (ret_expr) == RESULT_DECL
1185 || ret_expr == error_mark_node)
1187 gimple ret = gimple_build_return (ret_expr);
1188 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1189 gimplify_seq_add_stmt (pre_p, ret);
1190 return GS_ALL_DONE;
1193 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1194 result_decl = NULL_TREE;
1195 else
1197 result_decl = TREE_OPERAND (ret_expr, 0);
1199 /* See through a return by reference. */
1200 if (TREE_CODE (result_decl) == INDIRECT_REF)
1201 result_decl = TREE_OPERAND (result_decl, 0);
1203 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1204 || TREE_CODE (ret_expr) == INIT_EXPR)
1205 && TREE_CODE (result_decl) == RESULT_DECL);
1208 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1209 Recall that aggregate_value_p is FALSE for any aggregate type that is
1210 returned in registers. If we're returning values in registers, then
1211 we don't want to extend the lifetime of the RESULT_DECL, particularly
1212 across another call. In addition, for those aggregates for which
1213 hard_function_value generates a PARALLEL, we'll die during normal
1214 expansion of structure assignments; there's special code in expand_return
1215 to handle this case that does not exist in expand_expr. */
1216 if (!result_decl)
1217 result = NULL_TREE;
1218 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1220 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1222 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1223 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1224 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1225 should be effectively allocated by the caller, i.e. all calls to
1226 this function must be subject to the Return Slot Optimization. */
1227 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1228 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1230 result = result_decl;
1232 else if (gimplify_ctxp->return_temp)
1233 result = gimplify_ctxp->return_temp;
1234 else
1236 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
1238 /* ??? With complex control flow (usually involving abnormal edges),
1239 we can wind up warning about an uninitialized value for this. Due
1240 to how this variable is constructed and initialized, this is never
1241 true. Give up and never warn. */
1242 TREE_NO_WARNING (result) = 1;
1244 gimplify_ctxp->return_temp = result;
1247 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1248 Then gimplify the whole thing. */
1249 if (result != result_decl)
1250 TREE_OPERAND (ret_expr, 0) = result;
1252 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1254 ret = gimple_build_return (result);
1255 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1256 gimplify_seq_add_stmt (pre_p, ret);
1258 return GS_ALL_DONE;
1261 /* Gimplify a variable-length array DECL. */
1263 static void
1264 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1266 /* This is a variable-sized decl. Simplify its size and mark it
1267 for deferred expansion. */
1268 tree t, addr, ptr_type;
1270 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1271 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1273 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1274 if (DECL_HAS_VALUE_EXPR_P (decl))
1275 return;
1277 /* All occurrences of this decl in final gimplified code will be
1278 replaced by indirection. Setting DECL_VALUE_EXPR does two
1279 things: First, it lets the rest of the gimplifier know what
1280 replacement to use. Second, it lets the debug info know
1281 where to find the value. */
1282 ptr_type = build_pointer_type (TREE_TYPE (decl));
1283 addr = create_tmp_var (ptr_type, get_name (decl));
1284 DECL_IGNORED_P (addr) = 0;
1285 t = build_fold_indirect_ref (addr);
1286 TREE_THIS_NOTRAP (t) = 1;
1287 SET_DECL_VALUE_EXPR (decl, t);
1288 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1290 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1291 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1292 size_int (DECL_ALIGN (decl)));
1293 /* The call has been built for a variable-sized object. */
1294 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1295 t = fold_convert (ptr_type, t);
1296 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1298 gimplify_and_add (t, seq_p);
1300 /* Indicate that we need to restore the stack level when the
1301 enclosing BIND_EXPR is exited. */
1302 gimplify_ctxp->save_stack = true;
1305 /* A helper function to be called via walk_tree. Mark all labels under *TP
1306 as being forced. To be called for DECL_INITIAL of static variables. */
1308 static tree
1309 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1311 if (TYPE_P (*tp))
1312 *walk_subtrees = 0;
1313 if (TREE_CODE (*tp) == LABEL_DECL)
1314 FORCED_LABEL (*tp) = 1;
1316 return NULL_TREE;
1319 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1320 and initialization explicit. */
1322 static enum gimplify_status
1323 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1325 tree stmt = *stmt_p;
1326 tree decl = DECL_EXPR_DECL (stmt);
1328 *stmt_p = NULL_TREE;
1330 if (TREE_TYPE (decl) == error_mark_node)
1331 return GS_ERROR;
1333 if ((TREE_CODE (decl) == TYPE_DECL
1334 || TREE_CODE (decl) == VAR_DECL)
1335 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1336 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1338 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1339 in case its size expressions contain problematic nodes like CALL_EXPR. */
1340 if (TREE_CODE (decl) == TYPE_DECL
1341 && DECL_ORIGINAL_TYPE (decl)
1342 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1343 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1345 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1347 tree init = DECL_INITIAL (decl);
1349 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1350 || (!TREE_STATIC (decl)
1351 && flag_stack_check == GENERIC_STACK_CHECK
1352 && compare_tree_int (DECL_SIZE_UNIT (decl),
1353 STACK_CHECK_MAX_VAR_SIZE) > 0))
1354 gimplify_vla_decl (decl, seq_p);
1356 /* Some front ends do not explicitly declare all anonymous
1357 artificial variables. We compensate here by declaring the
1358 variables, though it would be better if the front ends would
1359 explicitly declare them. */
1360 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1361 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1362 gimple_add_tmp_var (decl);
1364 if (init && init != error_mark_node)
1366 if (!TREE_STATIC (decl))
1368 DECL_INITIAL (decl) = NULL_TREE;
1369 init = build2 (INIT_EXPR, void_type_node, decl, init);
1370 gimplify_and_add (init, seq_p);
1371 ggc_free (init);
1373 else
1374 /* We must still examine initializers for static variables
1375 as they may contain a label address. */
1376 walk_tree (&init, force_labels_r, NULL, NULL);
1380 return GS_ALL_DONE;
1383 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1384 and replacing the LOOP_EXPR with goto, but if the loop contains an
1385 EXIT_EXPR, we need to append a label for it to jump to. */
1387 static enum gimplify_status
1388 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1390 tree saved_label = gimplify_ctxp->exit_label;
1391 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1393 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1395 gimplify_ctxp->exit_label = NULL_TREE;
1397 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1399 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1401 if (gimplify_ctxp->exit_label)
1402 gimplify_seq_add_stmt (pre_p,
1403 gimple_build_label (gimplify_ctxp->exit_label));
1405 gimplify_ctxp->exit_label = saved_label;
1407 *expr_p = NULL;
1408 return GS_ALL_DONE;
1411 /* Gimplify a statement list onto a sequence. These may be created either
1412 by an enlightened front-end, or by shortcut_cond_expr. */
1414 static enum gimplify_status
1415 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1417 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1419 tree_stmt_iterator i = tsi_start (*expr_p);
1421 while (!tsi_end_p (i))
1423 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1424 tsi_delink (&i);
1427 if (temp)
1429 *expr_p = temp;
1430 return GS_OK;
1433 return GS_ALL_DONE;
1437 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1438 branch to. */
1440 static enum gimplify_status
1441 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1443 tree switch_expr = *expr_p;
1444 gimple_seq switch_body_seq = NULL;
1445 enum gimplify_status ret;
1446 tree index_type = TREE_TYPE (switch_expr);
1447 if (index_type == NULL_TREE)
1448 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1450 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1451 fb_rvalue);
1452 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1453 return ret;
1455 if (SWITCH_BODY (switch_expr))
1457 vec<tree> labels;
1458 vec<tree> saved_labels;
1459 tree default_case = NULL_TREE;
1460 gimple gimple_switch;
1462 /* If someone can be bothered to fill in the labels, they can
1463 be bothered to null out the body too. */
1464 gcc_assert (!SWITCH_LABELS (switch_expr));
1466 /* Save old labels, get new ones from body, then restore the old
1467 labels. Save all the things from the switch body to append after. */
1468 saved_labels = gimplify_ctxp->case_labels;
1469 gimplify_ctxp->case_labels.create (8);
1471 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1472 labels = gimplify_ctxp->case_labels;
1473 gimplify_ctxp->case_labels = saved_labels;
1475 preprocess_case_label_vec_for_gimple (labels, index_type,
1476 &default_case);
1478 if (!default_case)
1480 gimple new_default;
1482 default_case
1483 = build_case_label (NULL_TREE, NULL_TREE,
1484 create_artificial_label (UNKNOWN_LOCATION));
1485 new_default = gimple_build_label (CASE_LABEL (default_case));
1486 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1489 gimple_switch = gimple_build_switch (SWITCH_COND (switch_expr),
1490 default_case, labels);
1491 gimplify_seq_add_stmt (pre_p, gimple_switch);
1492 gimplify_seq_add_seq (pre_p, switch_body_seq);
1493 labels.release ();
1495 else
1496 gcc_assert (SWITCH_LABELS (switch_expr));
1498 return GS_ALL_DONE;
1501 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1503 static enum gimplify_status
1504 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1506 struct gimplify_ctx *ctxp;
1507 gimple gimple_label;
1509 /* Invalid programs can play Duff's Device type games with, for example,
1510 #pragma omp parallel. At least in the C front end, we don't
1511 detect such invalid branches until after gimplification, in the
1512 diagnose_omp_blocks pass. */
1513 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1514 if (ctxp->case_labels.exists ())
1515 break;
1517 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1518 ctxp->case_labels.safe_push (*expr_p);
1519 gimplify_seq_add_stmt (pre_p, gimple_label);
1521 return GS_ALL_DONE;
1524 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1525 if necessary. */
1527 tree
1528 build_and_jump (tree *label_p)
1530 if (label_p == NULL)
1531 /* If there's nowhere to jump, just fall through. */
1532 return NULL_TREE;
1534 if (*label_p == NULL_TREE)
1536 tree label = create_artificial_label (UNKNOWN_LOCATION);
1537 *label_p = label;
1540 return build1 (GOTO_EXPR, void_type_node, *label_p);
1543 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1544 This also involves building a label to jump to and communicating it to
1545 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1547 static enum gimplify_status
1548 gimplify_exit_expr (tree *expr_p)
1550 tree cond = TREE_OPERAND (*expr_p, 0);
1551 tree expr;
1553 expr = build_and_jump (&gimplify_ctxp->exit_label);
1554 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1555 *expr_p = expr;
1557 return GS_OK;
1560 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1561 different from its canonical type, wrap the whole thing inside a
1562 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1563 type.
1565 The canonical type of a COMPONENT_REF is the type of the field being
1566 referenced--unless the field is a bit-field which can be read directly
1567 in a smaller mode, in which case the canonical type is the
1568 sign-appropriate type corresponding to that mode. */
1570 static void
1571 canonicalize_component_ref (tree *expr_p)
1573 tree expr = *expr_p;
1574 tree type;
1576 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1578 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1579 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1580 else
1581 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1583 /* One could argue that all the stuff below is not necessary for
1584 the non-bitfield case and declare it a FE error if type
1585 adjustment would be needed. */
1586 if (TREE_TYPE (expr) != type)
1588 #ifdef ENABLE_TYPES_CHECKING
1589 tree old_type = TREE_TYPE (expr);
1590 #endif
1591 int type_quals;
1593 /* We need to preserve qualifiers and propagate them from
1594 operand 0. */
1595 type_quals = TYPE_QUALS (type)
1596 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1597 if (TYPE_QUALS (type) != type_quals)
1598 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1600 /* Set the type of the COMPONENT_REF to the underlying type. */
1601 TREE_TYPE (expr) = type;
1603 #ifdef ENABLE_TYPES_CHECKING
1604 /* It is now a FE error, if the conversion from the canonical
1605 type to the original expression type is not useless. */
1606 gcc_assert (useless_type_conversion_p (old_type, type));
1607 #endif
1611 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1612 to foo, embed that change in the ADDR_EXPR by converting
1613 T array[U];
1614 (T *)&array
1616 &array[L]
1617 where L is the lower bound. For simplicity, only do this for constant
1618 lower bound.
1619 The constraint is that the type of &array[L] is trivially convertible
1620 to T *. */
1622 static void
1623 canonicalize_addr_expr (tree *expr_p)
1625 tree expr = *expr_p;
1626 tree addr_expr = TREE_OPERAND (expr, 0);
1627 tree datype, ddatype, pddatype;
1629 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1630 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1631 || TREE_CODE (addr_expr) != ADDR_EXPR)
1632 return;
1634 /* The addr_expr type should be a pointer to an array. */
1635 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1636 if (TREE_CODE (datype) != ARRAY_TYPE)
1637 return;
1639 /* The pointer to element type shall be trivially convertible to
1640 the expression pointer type. */
1641 ddatype = TREE_TYPE (datype);
1642 pddatype = build_pointer_type (ddatype);
1643 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1644 pddatype))
1645 return;
1647 /* The lower bound and element sizes must be constant. */
1648 if (!TYPE_SIZE_UNIT (ddatype)
1649 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1650 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1651 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1652 return;
1654 /* All checks succeeded. Build a new node to merge the cast. */
1655 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1656 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1657 NULL_TREE, NULL_TREE);
1658 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1660 /* We can have stripped a required restrict qualifier above. */
1661 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1662 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1665 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1666 underneath as appropriate. */
1668 static enum gimplify_status
1669 gimplify_conversion (tree *expr_p)
1671 location_t loc = EXPR_LOCATION (*expr_p);
1672 gcc_assert (CONVERT_EXPR_P (*expr_p));
1674 /* Then strip away all but the outermost conversion. */
1675 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1677 /* And remove the outermost conversion if it's useless. */
1678 if (tree_ssa_useless_type_conversion (*expr_p))
1679 *expr_p = TREE_OPERAND (*expr_p, 0);
1681 /* If we still have a conversion at the toplevel,
1682 then canonicalize some constructs. */
1683 if (CONVERT_EXPR_P (*expr_p))
1685 tree sub = TREE_OPERAND (*expr_p, 0);
1687 /* If a NOP conversion is changing the type of a COMPONENT_REF
1688 expression, then canonicalize its type now in order to expose more
1689 redundant conversions. */
1690 if (TREE_CODE (sub) == COMPONENT_REF)
1691 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1693 /* If a NOP conversion is changing a pointer to array of foo
1694 to a pointer to foo, embed that change in the ADDR_EXPR. */
1695 else if (TREE_CODE (sub) == ADDR_EXPR)
1696 canonicalize_addr_expr (expr_p);
1699 /* If we have a conversion to a non-register type force the
1700 use of a VIEW_CONVERT_EXPR instead. */
1701 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1702 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1703 TREE_OPERAND (*expr_p, 0));
1705 return GS_OK;
1708 /* Nonlocal VLAs seen in the current function. */
1709 static struct pointer_set_t *nonlocal_vlas;
1711 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
1712 static tree nonlocal_vla_vars;
1714 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
1715 DECL_VALUE_EXPR, and it's worth re-examining things. */
1717 static enum gimplify_status
1718 gimplify_var_or_parm_decl (tree *expr_p)
1720 tree decl = *expr_p;
1722 /* ??? If this is a local variable, and it has not been seen in any
1723 outer BIND_EXPR, then it's probably the result of a duplicate
1724 declaration, for which we've already issued an error. It would
1725 be really nice if the front end wouldn't leak these at all.
1726 Currently the only known culprit is C++ destructors, as seen
1727 in g++.old-deja/g++.jason/binding.C. */
1728 if (TREE_CODE (decl) == VAR_DECL
1729 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1730 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1731 && decl_function_context (decl) == current_function_decl)
1733 gcc_assert (seen_error ());
1734 return GS_ERROR;
1737 /* When within an OpenMP context, notice uses of variables. */
1738 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1739 return GS_ALL_DONE;
1741 /* If the decl is an alias for another expression, substitute it now. */
1742 if (DECL_HAS_VALUE_EXPR_P (decl))
1744 tree value_expr = DECL_VALUE_EXPR (decl);
1746 /* For referenced nonlocal VLAs add a decl for debugging purposes
1747 to the current function. */
1748 if (TREE_CODE (decl) == VAR_DECL
1749 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1750 && nonlocal_vlas != NULL
1751 && TREE_CODE (value_expr) == INDIRECT_REF
1752 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1753 && decl_function_context (decl) != current_function_decl)
1755 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1756 while (ctx
1757 && (ctx->region_type == ORT_WORKSHARE
1758 || ctx->region_type == ORT_SIMD))
1759 ctx = ctx->outer_context;
1760 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1762 tree copy = copy_node (decl);
1764 lang_hooks.dup_lang_specific_decl (copy);
1765 SET_DECL_RTL (copy, 0);
1766 TREE_USED (copy) = 1;
1767 DECL_CHAIN (copy) = nonlocal_vla_vars;
1768 nonlocal_vla_vars = copy;
1769 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1770 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1774 *expr_p = unshare_expr (value_expr);
1775 return GS_OK;
1778 return GS_ALL_DONE;
1781 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
1783 static void
1784 recalculate_side_effects (tree t)
1786 enum tree_code code = TREE_CODE (t);
1787 int len = TREE_OPERAND_LENGTH (t);
1788 int i;
1790 switch (TREE_CODE_CLASS (code))
1792 case tcc_expression:
1793 switch (code)
1795 case INIT_EXPR:
1796 case MODIFY_EXPR:
1797 case VA_ARG_EXPR:
1798 case PREDECREMENT_EXPR:
1799 case PREINCREMENT_EXPR:
1800 case POSTDECREMENT_EXPR:
1801 case POSTINCREMENT_EXPR:
1802 /* All of these have side-effects, no matter what their
1803 operands are. */
1804 return;
1806 default:
1807 break;
1809 /* Fall through. */
1811 case tcc_comparison: /* a comparison expression */
1812 case tcc_unary: /* a unary arithmetic expression */
1813 case tcc_binary: /* a binary arithmetic expression */
1814 case tcc_reference: /* a reference */
1815 case tcc_vl_exp: /* a function call */
1816 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1817 for (i = 0; i < len; ++i)
1819 tree op = TREE_OPERAND (t, i);
1820 if (op && TREE_SIDE_EFFECTS (op))
1821 TREE_SIDE_EFFECTS (t) = 1;
1823 break;
1825 case tcc_constant:
1826 /* No side-effects. */
1827 return;
1829 default:
1830 gcc_unreachable ();
1834 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1835 node *EXPR_P.
1837 compound_lval
1838 : min_lval '[' val ']'
1839 | min_lval '.' ID
1840 | compound_lval '[' val ']'
1841 | compound_lval '.' ID
1843 This is not part of the original SIMPLE definition, which separates
1844 array and member references, but it seems reasonable to handle them
1845 together. Also, this way we don't run into problems with union
1846 aliasing; gcc requires that for accesses through a union to alias, the
1847 union reference must be explicit, which was not always the case when we
1848 were splitting up array and member refs.
1850 PRE_P points to the sequence where side effects that must happen before
1851 *EXPR_P should be stored.
1853 POST_P points to the sequence where side effects that must happen after
1854 *EXPR_P should be stored. */
1856 static enum gimplify_status
1857 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1858 fallback_t fallback)
1860 tree *p;
1861 enum gimplify_status ret = GS_ALL_DONE, tret;
1862 int i;
1863 location_t loc = EXPR_LOCATION (*expr_p);
1864 tree expr = *expr_p;
1866 /* Create a stack of the subexpressions so later we can walk them in
1867 order from inner to outer. */
1868 auto_vec<tree, 10> expr_stack;
1870 /* We can handle anything that get_inner_reference can deal with. */
1871 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1873 restart:
1874 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1875 if (TREE_CODE (*p) == INDIRECT_REF)
1876 *p = fold_indirect_ref_loc (loc, *p);
1878 if (handled_component_p (*p))
1880 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1881 additional COMPONENT_REFs. */
1882 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1883 && gimplify_var_or_parm_decl (p) == GS_OK)
1884 goto restart;
1885 else
1886 break;
1888 expr_stack.safe_push (*p);
1891 gcc_assert (expr_stack.length ());
1893 /* Now EXPR_STACK is a stack of pointers to all the refs we've
1894 walked through and P points to the innermost expression.
1896 Java requires that we elaborated nodes in source order. That
1897 means we must gimplify the inner expression followed by each of
1898 the indices, in order. But we can't gimplify the inner
1899 expression until we deal with any variable bounds, sizes, or
1900 positions in order to deal with PLACEHOLDER_EXPRs.
1902 So we do this in three steps. First we deal with the annotations
1903 for any variables in the components, then we gimplify the base,
1904 then we gimplify any indices, from left to right. */
1905 for (i = expr_stack.length () - 1; i >= 0; i--)
1907 tree t = expr_stack[i];
1909 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1911 /* Gimplify the low bound and element type size and put them into
1912 the ARRAY_REF. If these values are set, they have already been
1913 gimplified. */
1914 if (TREE_OPERAND (t, 2) == NULL_TREE)
1916 tree low = unshare_expr (array_ref_low_bound (t));
1917 if (!is_gimple_min_invariant (low))
1919 TREE_OPERAND (t, 2) = low;
1920 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1921 post_p, is_gimple_reg,
1922 fb_rvalue);
1923 ret = MIN (ret, tret);
1926 else
1928 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1929 is_gimple_reg, fb_rvalue);
1930 ret = MIN (ret, tret);
1933 if (TREE_OPERAND (t, 3) == NULL_TREE)
1935 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1936 tree elmt_size = unshare_expr (array_ref_element_size (t));
1937 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1939 /* Divide the element size by the alignment of the element
1940 type (above). */
1941 elmt_size
1942 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
1944 if (!is_gimple_min_invariant (elmt_size))
1946 TREE_OPERAND (t, 3) = elmt_size;
1947 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
1948 post_p, is_gimple_reg,
1949 fb_rvalue);
1950 ret = MIN (ret, tret);
1953 else
1955 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1956 is_gimple_reg, fb_rvalue);
1957 ret = MIN (ret, tret);
1960 else if (TREE_CODE (t) == COMPONENT_REF)
1962 /* Set the field offset into T and gimplify it. */
1963 if (TREE_OPERAND (t, 2) == NULL_TREE)
1965 tree offset = unshare_expr (component_ref_field_offset (t));
1966 tree field = TREE_OPERAND (t, 1);
1967 tree factor
1968 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1970 /* Divide the offset by its alignment. */
1971 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
1973 if (!is_gimple_min_invariant (offset))
1975 TREE_OPERAND (t, 2) = offset;
1976 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1977 post_p, is_gimple_reg,
1978 fb_rvalue);
1979 ret = MIN (ret, tret);
1982 else
1984 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1985 is_gimple_reg, fb_rvalue);
1986 ret = MIN (ret, tret);
1991 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
1992 so as to match the min_lval predicate. Failure to do so may result
1993 in the creation of large aggregate temporaries. */
1994 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
1995 fallback | fb_lvalue);
1996 ret = MIN (ret, tret);
1998 /* And finally, the indices and operands of ARRAY_REF. During this
1999 loop we also remove any useless conversions. */
2000 for (; expr_stack.length () > 0; )
2002 tree t = expr_stack.pop ();
2004 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2006 /* Gimplify the dimension. */
2007 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2009 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2010 is_gimple_val, fb_rvalue);
2011 ret = MIN (ret, tret);
2015 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2017 /* The innermost expression P may have originally had
2018 TREE_SIDE_EFFECTS set which would have caused all the outer
2019 expressions in *EXPR_P leading to P to also have had
2020 TREE_SIDE_EFFECTS set. */
2021 recalculate_side_effects (t);
2024 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2025 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2027 canonicalize_component_ref (expr_p);
2030 expr_stack.release ();
2032 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2034 return ret;
2037 /* Gimplify the self modifying expression pointed to by EXPR_P
2038 (++, --, +=, -=).
2040 PRE_P points to the list where side effects that must happen before
2041 *EXPR_P should be stored.
2043 POST_P points to the list where side effects that must happen after
2044 *EXPR_P should be stored.
2046 WANT_VALUE is nonzero iff we want to use the value of this expression
2047 in another expression.
2049 ARITH_TYPE is the type the computation should be performed in. */
2051 enum gimplify_status
2052 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2053 bool want_value, tree arith_type)
2055 enum tree_code code;
2056 tree lhs, lvalue, rhs, t1;
2057 gimple_seq post = NULL, *orig_post_p = post_p;
2058 bool postfix;
2059 enum tree_code arith_code;
2060 enum gimplify_status ret;
2061 location_t loc = EXPR_LOCATION (*expr_p);
2063 code = TREE_CODE (*expr_p);
2065 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2066 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2068 /* Prefix or postfix? */
2069 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2070 /* Faster to treat as prefix if result is not used. */
2071 postfix = want_value;
2072 else
2073 postfix = false;
2075 /* For postfix, make sure the inner expression's post side effects
2076 are executed after side effects from this expression. */
2077 if (postfix)
2078 post_p = &post;
2080 /* Add or subtract? */
2081 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2082 arith_code = PLUS_EXPR;
2083 else
2084 arith_code = MINUS_EXPR;
2086 /* Gimplify the LHS into a GIMPLE lvalue. */
2087 lvalue = TREE_OPERAND (*expr_p, 0);
2088 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2089 if (ret == GS_ERROR)
2090 return ret;
2092 /* Extract the operands to the arithmetic operation. */
2093 lhs = lvalue;
2094 rhs = TREE_OPERAND (*expr_p, 1);
2096 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2097 that as the result value and in the postqueue operation. */
2098 if (postfix)
2100 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2101 if (ret == GS_ERROR)
2102 return ret;
2104 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2107 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2108 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2110 rhs = convert_to_ptrofftype_loc (loc, rhs);
2111 if (arith_code == MINUS_EXPR)
2112 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2113 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2115 else
2116 t1 = fold_convert (TREE_TYPE (*expr_p),
2117 fold_build2 (arith_code, arith_type,
2118 fold_convert (arith_type, lhs),
2119 fold_convert (arith_type, rhs)));
2121 if (postfix)
2123 gimplify_assign (lvalue, t1, pre_p);
2124 gimplify_seq_add_seq (orig_post_p, post);
2125 *expr_p = lhs;
2126 return GS_ALL_DONE;
2128 else
2130 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2131 return GS_OK;
2135 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2137 static void
2138 maybe_with_size_expr (tree *expr_p)
2140 tree expr = *expr_p;
2141 tree type = TREE_TYPE (expr);
2142 tree size;
2144 /* If we've already wrapped this or the type is error_mark_node, we can't do
2145 anything. */
2146 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2147 || type == error_mark_node)
2148 return;
2150 /* If the size isn't known or is a constant, we have nothing to do. */
2151 size = TYPE_SIZE_UNIT (type);
2152 if (!size || TREE_CODE (size) == INTEGER_CST)
2153 return;
2155 /* Otherwise, make a WITH_SIZE_EXPR. */
2156 size = unshare_expr (size);
2157 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2158 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2161 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2162 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2163 the CALL_EXPR. */
2165 static enum gimplify_status
2166 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2168 bool (*test) (tree);
2169 fallback_t fb;
2171 /* In general, we allow lvalues for function arguments to avoid
2172 extra overhead of copying large aggregates out of even larger
2173 aggregates into temporaries only to copy the temporaries to
2174 the argument list. Make optimizers happy by pulling out to
2175 temporaries those types that fit in registers. */
2176 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2177 test = is_gimple_val, fb = fb_rvalue;
2178 else
2180 test = is_gimple_lvalue, fb = fb_either;
2181 /* Also strip a TARGET_EXPR that would force an extra copy. */
2182 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2184 tree init = TARGET_EXPR_INITIAL (*arg_p);
2185 if (init
2186 && !VOID_TYPE_P (TREE_TYPE (init)))
2187 *arg_p = init;
2191 /* If this is a variable sized type, we must remember the size. */
2192 maybe_with_size_expr (arg_p);
2194 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2195 /* Make sure arguments have the same location as the function call
2196 itself. */
2197 protected_set_expr_location (*arg_p, call_location);
2199 /* There is a sequence point before a function call. Side effects in
2200 the argument list must occur before the actual call. So, when
2201 gimplifying arguments, force gimplify_expr to use an internal
2202 post queue which is then appended to the end of PRE_P. */
2203 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2206 /* Don't fold inside offloading regsion: it can break code by adding decl
2207 references that weren't in the source. We'll do it during omplower pass
2208 instead. */
2210 static bool
2211 maybe_fold_stmt (gimple_stmt_iterator *gsi)
2213 struct gimplify_omp_ctx *ctx;
2214 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2215 if (ctx->region_type & ORT_TARGET
2216 && ctx->region_type & ORT_TARGET_OFFLOAD)
2217 return false;
2218 return fold_stmt (gsi);
2221 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2222 WANT_VALUE is true if the result of the call is desired. */
2224 static enum gimplify_status
2225 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2227 tree fndecl, parms, p, fnptrtype;
2228 enum gimplify_status ret;
2229 int i, nargs;
2230 gimple call;
2231 bool builtin_va_start_p = FALSE;
2232 location_t loc = EXPR_LOCATION (*expr_p);
2234 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2236 /* For reliable diagnostics during inlining, it is necessary that
2237 every call_expr be annotated with file and line. */
2238 if (! EXPR_HAS_LOCATION (*expr_p))
2239 SET_EXPR_LOCATION (*expr_p, input_location);
2241 /* This may be a call to a builtin function.
2243 Builtin function calls may be transformed into different
2244 (and more efficient) builtin function calls under certain
2245 circumstances. Unfortunately, gimplification can muck things
2246 up enough that the builtin expanders are not aware that certain
2247 transformations are still valid.
2249 So we attempt transformation/gimplification of the call before
2250 we gimplify the CALL_EXPR. At this time we do not manage to
2251 transform all calls in the same manner as the expanders do, but
2252 we do transform most of them. */
2253 fndecl = get_callee_fndecl (*expr_p);
2254 if (fndecl
2255 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2256 switch (DECL_FUNCTION_CODE (fndecl))
2258 case BUILT_IN_VA_START:
2260 builtin_va_start_p = TRUE;
2261 if (call_expr_nargs (*expr_p) < 2)
2263 error ("too few arguments to function %<va_start%>");
2264 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2265 return GS_OK;
2268 if (fold_builtin_next_arg (*expr_p, true))
2270 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2271 return GS_OK;
2273 break;
2275 case BUILT_IN_LINE:
2277 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2278 *expr_p = build_int_cst (TREE_TYPE (*expr_p), loc.line);
2279 return GS_OK;
2281 case BUILT_IN_FILE:
2283 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2284 *expr_p = build_string_literal (strlen (loc.file) + 1, loc.file);
2285 return GS_OK;
2287 case BUILT_IN_FUNCTION:
2289 const char *function;
2290 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2291 *expr_p = build_string_literal (strlen (function) + 1, function);
2292 return GS_OK;
2294 default:
2297 if (fndecl && DECL_BUILT_IN (fndecl))
2299 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2300 if (new_tree && new_tree != *expr_p)
2302 /* There was a transformation of this call which computes the
2303 same value, but in a more efficient way. Return and try
2304 again. */
2305 *expr_p = new_tree;
2306 return GS_OK;
2310 /* Remember the original function pointer type. */
2311 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2313 /* There is a sequence point before the call, so any side effects in
2314 the calling expression must occur before the actual call. Force
2315 gimplify_expr to use an internal post queue. */
2316 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2317 is_gimple_call_addr, fb_rvalue);
2319 nargs = call_expr_nargs (*expr_p);
2321 /* Get argument types for verification. */
2322 fndecl = get_callee_fndecl (*expr_p);
2323 parms = NULL_TREE;
2324 if (fndecl)
2325 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2326 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2327 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2329 if (fndecl && DECL_ARGUMENTS (fndecl))
2330 p = DECL_ARGUMENTS (fndecl);
2331 else if (parms)
2332 p = parms;
2333 else
2334 p = NULL_TREE;
2335 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2338 /* If the last argument is __builtin_va_arg_pack () and it is not
2339 passed as a named argument, decrease the number of CALL_EXPR
2340 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2341 if (!p
2342 && i < nargs
2343 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2345 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2346 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2348 if (last_arg_fndecl
2349 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2350 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2351 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2353 tree call = *expr_p;
2355 --nargs;
2356 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2357 CALL_EXPR_FN (call),
2358 nargs, CALL_EXPR_ARGP (call));
2360 /* Copy all CALL_EXPR flags, location and block, except
2361 CALL_EXPR_VA_ARG_PACK flag. */
2362 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2363 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2364 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2365 = CALL_EXPR_RETURN_SLOT_OPT (call);
2366 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2367 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2369 /* Set CALL_EXPR_VA_ARG_PACK. */
2370 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2374 /* Finally, gimplify the function arguments. */
2375 if (nargs > 0)
2377 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2378 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2379 PUSH_ARGS_REVERSED ? i-- : i++)
2381 enum gimplify_status t;
2383 /* Avoid gimplifying the second argument to va_start, which needs to
2384 be the plain PARM_DECL. */
2385 if ((i != 1) || !builtin_va_start_p)
2387 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2388 EXPR_LOCATION (*expr_p));
2390 if (t == GS_ERROR)
2391 ret = GS_ERROR;
2396 /* Verify the function result. */
2397 if (want_value && fndecl
2398 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2400 error_at (loc, "using result of function returning %<void%>");
2401 ret = GS_ERROR;
2404 /* Try this again in case gimplification exposed something. */
2405 if (ret != GS_ERROR)
2407 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2409 if (new_tree && new_tree != *expr_p)
2411 /* There was a transformation of this call which computes the
2412 same value, but in a more efficient way. Return and try
2413 again. */
2414 *expr_p = new_tree;
2415 return GS_OK;
2418 else
2420 *expr_p = error_mark_node;
2421 return GS_ERROR;
2424 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2425 decl. This allows us to eliminate redundant or useless
2426 calls to "const" functions. */
2427 if (TREE_CODE (*expr_p) == CALL_EXPR)
2429 int flags = call_expr_flags (*expr_p);
2430 if (flags & (ECF_CONST | ECF_PURE)
2431 /* An infinite loop is considered a side effect. */
2432 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2433 TREE_SIDE_EFFECTS (*expr_p) = 0;
2436 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2437 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2438 form and delegate the creation of a GIMPLE_CALL to
2439 gimplify_modify_expr. This is always possible because when
2440 WANT_VALUE is true, the caller wants the result of this call into
2441 a temporary, which means that we will emit an INIT_EXPR in
2442 internal_get_tmp_var which will then be handled by
2443 gimplify_modify_expr. */
2444 if (!want_value)
2446 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2447 have to do is replicate it as a GIMPLE_CALL tuple. */
2448 gimple_stmt_iterator gsi;
2449 call = gimple_build_call_from_tree (*expr_p);
2450 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2451 notice_special_calls (call);
2452 gimplify_seq_add_stmt (pre_p, call);
2453 gsi = gsi_last (*pre_p);
2454 maybe_fold_stmt (&gsi);
2455 *expr_p = NULL_TREE;
2457 else
2458 /* Remember the original function type. */
2459 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2460 CALL_EXPR_FN (*expr_p));
2462 return ret;
2465 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2466 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2468 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2469 condition is true or false, respectively. If null, we should generate
2470 our own to skip over the evaluation of this specific expression.
2472 LOCUS is the source location of the COND_EXPR.
2474 This function is the tree equivalent of do_jump.
2476 shortcut_cond_r should only be called by shortcut_cond_expr. */
2478 static tree
2479 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2480 location_t locus)
2482 tree local_label = NULL_TREE;
2483 tree t, expr = NULL;
2485 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2486 retain the shortcut semantics. Just insert the gotos here;
2487 shortcut_cond_expr will append the real blocks later. */
2488 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2490 location_t new_locus;
2492 /* Turn if (a && b) into
2494 if (a); else goto no;
2495 if (b) goto yes; else goto no;
2496 (no:) */
2498 if (false_label_p == NULL)
2499 false_label_p = &local_label;
2501 /* Keep the original source location on the first 'if'. */
2502 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2503 append_to_statement_list (t, &expr);
2505 /* Set the source location of the && on the second 'if'. */
2506 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2507 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2508 new_locus);
2509 append_to_statement_list (t, &expr);
2511 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2513 location_t new_locus;
2515 /* Turn if (a || b) into
2517 if (a) goto yes;
2518 if (b) goto yes; else goto no;
2519 (yes:) */
2521 if (true_label_p == NULL)
2522 true_label_p = &local_label;
2524 /* Keep the original source location on the first 'if'. */
2525 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2526 append_to_statement_list (t, &expr);
2528 /* Set the source location of the || on the second 'if'. */
2529 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2530 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2531 new_locus);
2532 append_to_statement_list (t, &expr);
2534 else if (TREE_CODE (pred) == COND_EXPR
2535 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2536 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2538 location_t new_locus;
2540 /* As long as we're messing with gotos, turn if (a ? b : c) into
2541 if (a)
2542 if (b) goto yes; else goto no;
2543 else
2544 if (c) goto yes; else goto no;
2546 Don't do this if one of the arms has void type, which can happen
2547 in C++ when the arm is throw. */
2549 /* Keep the original source location on the first 'if'. Set the source
2550 location of the ? on the second 'if'. */
2551 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2552 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2553 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2554 false_label_p, locus),
2555 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2556 false_label_p, new_locus));
2558 else
2560 expr = build3 (COND_EXPR, void_type_node, pred,
2561 build_and_jump (true_label_p),
2562 build_and_jump (false_label_p));
2563 SET_EXPR_LOCATION (expr, locus);
2566 if (local_label)
2568 t = build1 (LABEL_EXPR, void_type_node, local_label);
2569 append_to_statement_list (t, &expr);
2572 return expr;
2575 /* Given a conditional expression EXPR with short-circuit boolean
2576 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2577 predicate apart into the equivalent sequence of conditionals. */
2579 static tree
2580 shortcut_cond_expr (tree expr)
2582 tree pred = TREE_OPERAND (expr, 0);
2583 tree then_ = TREE_OPERAND (expr, 1);
2584 tree else_ = TREE_OPERAND (expr, 2);
2585 tree true_label, false_label, end_label, t;
2586 tree *true_label_p;
2587 tree *false_label_p;
2588 bool emit_end, emit_false, jump_over_else;
2589 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2590 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2592 /* First do simple transformations. */
2593 if (!else_se)
2595 /* If there is no 'else', turn
2596 if (a && b) then c
2597 into
2598 if (a) if (b) then c. */
2599 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2601 /* Keep the original source location on the first 'if'. */
2602 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2603 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2604 /* Set the source location of the && on the second 'if'. */
2605 if (EXPR_HAS_LOCATION (pred))
2606 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2607 then_ = shortcut_cond_expr (expr);
2608 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2609 pred = TREE_OPERAND (pred, 0);
2610 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2611 SET_EXPR_LOCATION (expr, locus);
2615 if (!then_se)
2617 /* If there is no 'then', turn
2618 if (a || b); else d
2619 into
2620 if (a); else if (b); else d. */
2621 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2623 /* Keep the original source location on the first 'if'. */
2624 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2625 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2626 /* Set the source location of the || on the second 'if'. */
2627 if (EXPR_HAS_LOCATION (pred))
2628 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2629 else_ = shortcut_cond_expr (expr);
2630 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2631 pred = TREE_OPERAND (pred, 0);
2632 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2633 SET_EXPR_LOCATION (expr, locus);
2637 /* If we're done, great. */
2638 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2639 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2640 return expr;
2642 /* Otherwise we need to mess with gotos. Change
2643 if (a) c; else d;
2645 if (a); else goto no;
2646 c; goto end;
2647 no: d; end:
2648 and recursively gimplify the condition. */
2650 true_label = false_label = end_label = NULL_TREE;
2652 /* If our arms just jump somewhere, hijack those labels so we don't
2653 generate jumps to jumps. */
2655 if (then_
2656 && TREE_CODE (then_) == GOTO_EXPR
2657 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2659 true_label = GOTO_DESTINATION (then_);
2660 then_ = NULL;
2661 then_se = false;
2664 if (else_
2665 && TREE_CODE (else_) == GOTO_EXPR
2666 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2668 false_label = GOTO_DESTINATION (else_);
2669 else_ = NULL;
2670 else_se = false;
2673 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2674 if (true_label)
2675 true_label_p = &true_label;
2676 else
2677 true_label_p = NULL;
2679 /* The 'else' branch also needs a label if it contains interesting code. */
2680 if (false_label || else_se)
2681 false_label_p = &false_label;
2682 else
2683 false_label_p = NULL;
2685 /* If there was nothing else in our arms, just forward the label(s). */
2686 if (!then_se && !else_se)
2687 return shortcut_cond_r (pred, true_label_p, false_label_p,
2688 EXPR_LOC_OR_LOC (expr, input_location));
2690 /* If our last subexpression already has a terminal label, reuse it. */
2691 if (else_se)
2692 t = expr_last (else_);
2693 else if (then_se)
2694 t = expr_last (then_);
2695 else
2696 t = NULL;
2697 if (t && TREE_CODE (t) == LABEL_EXPR)
2698 end_label = LABEL_EXPR_LABEL (t);
2700 /* If we don't care about jumping to the 'else' branch, jump to the end
2701 if the condition is false. */
2702 if (!false_label_p)
2703 false_label_p = &end_label;
2705 /* We only want to emit these labels if we aren't hijacking them. */
2706 emit_end = (end_label == NULL_TREE);
2707 emit_false = (false_label == NULL_TREE);
2709 /* We only emit the jump over the else clause if we have to--if the
2710 then clause may fall through. Otherwise we can wind up with a
2711 useless jump and a useless label at the end of gimplified code,
2712 which will cause us to think that this conditional as a whole
2713 falls through even if it doesn't. If we then inline a function
2714 which ends with such a condition, that can cause us to issue an
2715 inappropriate warning about control reaching the end of a
2716 non-void function. */
2717 jump_over_else = block_may_fallthru (then_);
2719 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2720 EXPR_LOC_OR_LOC (expr, input_location));
2722 expr = NULL;
2723 append_to_statement_list (pred, &expr);
2725 append_to_statement_list (then_, &expr);
2726 if (else_se)
2728 if (jump_over_else)
2730 tree last = expr_last (expr);
2731 t = build_and_jump (&end_label);
2732 if (EXPR_HAS_LOCATION (last))
2733 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2734 append_to_statement_list (t, &expr);
2736 if (emit_false)
2738 t = build1 (LABEL_EXPR, void_type_node, false_label);
2739 append_to_statement_list (t, &expr);
2741 append_to_statement_list (else_, &expr);
2743 if (emit_end && end_label)
2745 t = build1 (LABEL_EXPR, void_type_node, end_label);
2746 append_to_statement_list (t, &expr);
2749 return expr;
2752 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2754 tree
2755 gimple_boolify (tree expr)
2757 tree type = TREE_TYPE (expr);
2758 location_t loc = EXPR_LOCATION (expr);
2760 if (TREE_CODE (expr) == NE_EXPR
2761 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2762 && integer_zerop (TREE_OPERAND (expr, 1)))
2764 tree call = TREE_OPERAND (expr, 0);
2765 tree fn = get_callee_fndecl (call);
2767 /* For __builtin_expect ((long) (x), y) recurse into x as well
2768 if x is truth_value_p. */
2769 if (fn
2770 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2771 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2772 && call_expr_nargs (call) == 2)
2774 tree arg = CALL_EXPR_ARG (call, 0);
2775 if (arg)
2777 if (TREE_CODE (arg) == NOP_EXPR
2778 && TREE_TYPE (arg) == TREE_TYPE (call))
2779 arg = TREE_OPERAND (arg, 0);
2780 if (truth_value_p (TREE_CODE (arg)))
2782 arg = gimple_boolify (arg);
2783 CALL_EXPR_ARG (call, 0)
2784 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2790 switch (TREE_CODE (expr))
2792 case TRUTH_AND_EXPR:
2793 case TRUTH_OR_EXPR:
2794 case TRUTH_XOR_EXPR:
2795 case TRUTH_ANDIF_EXPR:
2796 case TRUTH_ORIF_EXPR:
2797 /* Also boolify the arguments of truth exprs. */
2798 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2799 /* FALLTHRU */
2801 case TRUTH_NOT_EXPR:
2802 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2804 /* These expressions always produce boolean results. */
2805 if (TREE_CODE (type) != BOOLEAN_TYPE)
2806 TREE_TYPE (expr) = boolean_type_node;
2807 return expr;
2809 case ANNOTATE_EXPR:
2810 if ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1))
2811 == annot_expr_ivdep_kind)
2813 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2814 if (TREE_CODE (type) != BOOLEAN_TYPE)
2815 TREE_TYPE (expr) = boolean_type_node;
2816 return expr;
2818 /* FALLTHRU */
2820 default:
2821 if (COMPARISON_CLASS_P (expr))
2823 /* There expressions always prduce boolean results. */
2824 if (TREE_CODE (type) != BOOLEAN_TYPE)
2825 TREE_TYPE (expr) = boolean_type_node;
2826 return expr;
2828 /* Other expressions that get here must have boolean values, but
2829 might need to be converted to the appropriate mode. */
2830 if (TREE_CODE (type) == BOOLEAN_TYPE)
2831 return expr;
2832 return fold_convert_loc (loc, boolean_type_node, expr);
2836 /* Given a conditional expression *EXPR_P without side effects, gimplify
2837 its operands. New statements are inserted to PRE_P. */
2839 static enum gimplify_status
2840 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2842 tree expr = *expr_p, cond;
2843 enum gimplify_status ret, tret;
2844 enum tree_code code;
2846 cond = gimple_boolify (COND_EXPR_COND (expr));
2848 /* We need to handle && and || specially, as their gimplification
2849 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2850 code = TREE_CODE (cond);
2851 if (code == TRUTH_ANDIF_EXPR)
2852 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2853 else if (code == TRUTH_ORIF_EXPR)
2854 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2855 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2856 COND_EXPR_COND (*expr_p) = cond;
2858 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2859 is_gimple_val, fb_rvalue);
2860 ret = MIN (ret, tret);
2861 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2862 is_gimple_val, fb_rvalue);
2864 return MIN (ret, tret);
2867 /* Return true if evaluating EXPR could trap.
2868 EXPR is GENERIC, while tree_could_trap_p can be called
2869 only on GIMPLE. */
2871 static bool
2872 generic_expr_could_trap_p (tree expr)
2874 unsigned i, n;
2876 if (!expr || is_gimple_val (expr))
2877 return false;
2879 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2880 return true;
2882 n = TREE_OPERAND_LENGTH (expr);
2883 for (i = 0; i < n; i++)
2884 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2885 return true;
2887 return false;
2890 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2891 into
2893 if (p) if (p)
2894 t1 = a; a;
2895 else or else
2896 t1 = b; b;
2899 The second form is used when *EXPR_P is of type void.
2901 PRE_P points to the list where side effects that must happen before
2902 *EXPR_P should be stored. */
2904 static enum gimplify_status
2905 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2907 tree expr = *expr_p;
2908 tree type = TREE_TYPE (expr);
2909 location_t loc = EXPR_LOCATION (expr);
2910 tree tmp, arm1, arm2;
2911 enum gimplify_status ret;
2912 tree label_true, label_false, label_cont;
2913 bool have_then_clause_p, have_else_clause_p;
2914 gimple gimple_cond;
2915 enum tree_code pred_code;
2916 gimple_seq seq = NULL;
2918 /* If this COND_EXPR has a value, copy the values into a temporary within
2919 the arms. */
2920 if (!VOID_TYPE_P (type))
2922 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
2923 tree result;
2925 /* If either an rvalue is ok or we do not require an lvalue, create the
2926 temporary. But we cannot do that if the type is addressable. */
2927 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
2928 && !TREE_ADDRESSABLE (type))
2930 if (gimplify_ctxp->allow_rhs_cond_expr
2931 /* If either branch has side effects or could trap, it can't be
2932 evaluated unconditionally. */
2933 && !TREE_SIDE_EFFECTS (then_)
2934 && !generic_expr_could_trap_p (then_)
2935 && !TREE_SIDE_EFFECTS (else_)
2936 && !generic_expr_could_trap_p (else_))
2937 return gimplify_pure_cond_expr (expr_p, pre_p);
2939 tmp = create_tmp_var (type, "iftmp");
2940 result = tmp;
2943 /* Otherwise, only create and copy references to the values. */
2944 else
2946 type = build_pointer_type (type);
2948 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2949 then_ = build_fold_addr_expr_loc (loc, then_);
2951 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2952 else_ = build_fold_addr_expr_loc (loc, else_);
2954 expr
2955 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
2957 tmp = create_tmp_var (type, "iftmp");
2958 result = build_simple_mem_ref_loc (loc, tmp);
2961 /* Build the new then clause, `tmp = then_;'. But don't build the
2962 assignment if the value is void; in C++ it can be if it's a throw. */
2963 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2964 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
2966 /* Similarly, build the new else clause, `tmp = else_;'. */
2967 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2968 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
2970 TREE_TYPE (expr) = void_type_node;
2971 recalculate_side_effects (expr);
2973 /* Move the COND_EXPR to the prequeue. */
2974 gimplify_stmt (&expr, pre_p);
2976 *expr_p = result;
2977 return GS_ALL_DONE;
2980 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
2981 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
2982 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
2983 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
2985 /* Make sure the condition has BOOLEAN_TYPE. */
2986 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2988 /* Break apart && and || conditions. */
2989 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2990 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2992 expr = shortcut_cond_expr (expr);
2994 if (expr != *expr_p)
2996 *expr_p = expr;
2998 /* We can't rely on gimplify_expr to re-gimplify the expanded
2999 form properly, as cleanups might cause the target labels to be
3000 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3001 set up a conditional context. */
3002 gimple_push_condition ();
3003 gimplify_stmt (expr_p, &seq);
3004 gimple_pop_condition (pre_p);
3005 gimple_seq_add_seq (pre_p, seq);
3007 return GS_ALL_DONE;
3011 /* Now do the normal gimplification. */
3013 /* Gimplify condition. */
3014 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3015 fb_rvalue);
3016 if (ret == GS_ERROR)
3017 return GS_ERROR;
3018 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3020 gimple_push_condition ();
3022 have_then_clause_p = have_else_clause_p = false;
3023 if (TREE_OPERAND (expr, 1) != NULL
3024 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3025 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3026 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3027 == current_function_decl)
3028 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3029 have different locations, otherwise we end up with incorrect
3030 location information on the branches. */
3031 && (optimize
3032 || !EXPR_HAS_LOCATION (expr)
3033 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3034 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3036 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3037 have_then_clause_p = true;
3039 else
3040 label_true = create_artificial_label (UNKNOWN_LOCATION);
3041 if (TREE_OPERAND (expr, 2) != NULL
3042 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3043 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3044 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3045 == current_function_decl)
3046 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3047 have different locations, otherwise we end up with incorrect
3048 location information on the branches. */
3049 && (optimize
3050 || !EXPR_HAS_LOCATION (expr)
3051 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3052 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3054 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3055 have_else_clause_p = true;
3057 else
3058 label_false = create_artificial_label (UNKNOWN_LOCATION);
3060 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3061 &arm2);
3063 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3064 label_false);
3066 gimplify_seq_add_stmt (&seq, gimple_cond);
3067 label_cont = NULL_TREE;
3068 if (!have_then_clause_p)
3070 /* For if (...) {} else { code; } put label_true after
3071 the else block. */
3072 if (TREE_OPERAND (expr, 1) == NULL_TREE
3073 && !have_else_clause_p
3074 && TREE_OPERAND (expr, 2) != NULL_TREE)
3075 label_cont = label_true;
3076 else
3078 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3079 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3080 /* For if (...) { code; } else {} or
3081 if (...) { code; } else goto label; or
3082 if (...) { code; return; } else { ... }
3083 label_cont isn't needed. */
3084 if (!have_else_clause_p
3085 && TREE_OPERAND (expr, 2) != NULL_TREE
3086 && gimple_seq_may_fallthru (seq))
3088 gimple g;
3089 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3091 g = gimple_build_goto (label_cont);
3093 /* GIMPLE_COND's are very low level; they have embedded
3094 gotos. This particular embedded goto should not be marked
3095 with the location of the original COND_EXPR, as it would
3096 correspond to the COND_EXPR's condition, not the ELSE or the
3097 THEN arms. To avoid marking it with the wrong location, flag
3098 it as "no location". */
3099 gimple_set_do_not_emit_location (g);
3101 gimplify_seq_add_stmt (&seq, g);
3105 if (!have_else_clause_p)
3107 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3108 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3110 if (label_cont)
3111 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3113 gimple_pop_condition (pre_p);
3114 gimple_seq_add_seq (pre_p, seq);
3116 if (ret == GS_ERROR)
3117 ; /* Do nothing. */
3118 else if (have_then_clause_p || have_else_clause_p)
3119 ret = GS_ALL_DONE;
3120 else
3122 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3123 expr = TREE_OPERAND (expr, 0);
3124 gimplify_stmt (&expr, pre_p);
3127 *expr_p = NULL;
3128 return ret;
3131 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3132 to be marked addressable.
3134 We cannot rely on such an expression being directly markable if a temporary
3135 has been created by the gimplification. In this case, we create another
3136 temporary and initialize it with a copy, which will become a store after we
3137 mark it addressable. This can happen if the front-end passed us something
3138 that it could not mark addressable yet, like a Fortran pass-by-reference
3139 parameter (int) floatvar. */
3141 static void
3142 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3144 while (handled_component_p (*expr_p))
3145 expr_p = &TREE_OPERAND (*expr_p, 0);
3146 if (is_gimple_reg (*expr_p))
3147 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3150 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3151 a call to __builtin_memcpy. */
3153 static enum gimplify_status
3154 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3155 gimple_seq *seq_p)
3157 tree t, to, to_ptr, from, from_ptr;
3158 gimple gs;
3159 location_t loc = EXPR_LOCATION (*expr_p);
3161 to = TREE_OPERAND (*expr_p, 0);
3162 from = TREE_OPERAND (*expr_p, 1);
3164 /* Mark the RHS addressable. Beware that it may not be possible to do so
3165 directly if a temporary has been created by the gimplification. */
3166 prepare_gimple_addressable (&from, seq_p);
3168 mark_addressable (from);
3169 from_ptr = build_fold_addr_expr_loc (loc, from);
3170 gimplify_arg (&from_ptr, seq_p, loc);
3172 mark_addressable (to);
3173 to_ptr = build_fold_addr_expr_loc (loc, to);
3174 gimplify_arg (&to_ptr, seq_p, loc);
3176 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3178 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3180 if (want_value)
3182 /* tmp = memcpy() */
3183 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3184 gimple_call_set_lhs (gs, t);
3185 gimplify_seq_add_stmt (seq_p, gs);
3187 *expr_p = build_simple_mem_ref (t);
3188 return GS_ALL_DONE;
3191 gimplify_seq_add_stmt (seq_p, gs);
3192 *expr_p = NULL;
3193 return GS_ALL_DONE;
3196 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3197 a call to __builtin_memset. In this case we know that the RHS is
3198 a CONSTRUCTOR with an empty element list. */
3200 static enum gimplify_status
3201 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3202 gimple_seq *seq_p)
3204 tree t, from, to, to_ptr;
3205 gimple gs;
3206 location_t loc = EXPR_LOCATION (*expr_p);
3208 /* Assert our assumptions, to abort instead of producing wrong code
3209 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3210 not be immediately exposed. */
3211 from = TREE_OPERAND (*expr_p, 1);
3212 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3213 from = TREE_OPERAND (from, 0);
3215 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3216 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3218 /* Now proceed. */
3219 to = TREE_OPERAND (*expr_p, 0);
3221 to_ptr = build_fold_addr_expr_loc (loc, to);
3222 gimplify_arg (&to_ptr, seq_p, loc);
3223 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3225 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3227 if (want_value)
3229 /* tmp = memset() */
3230 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3231 gimple_call_set_lhs (gs, t);
3232 gimplify_seq_add_stmt (seq_p, gs);
3234 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3235 return GS_ALL_DONE;
3238 gimplify_seq_add_stmt (seq_p, gs);
3239 *expr_p = NULL;
3240 return GS_ALL_DONE;
3243 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3244 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3245 assignment. Return non-null if we detect a potential overlap. */
3247 struct gimplify_init_ctor_preeval_data
3249 /* The base decl of the lhs object. May be NULL, in which case we
3250 have to assume the lhs is indirect. */
3251 tree lhs_base_decl;
3253 /* The alias set of the lhs object. */
3254 alias_set_type lhs_alias_set;
3257 static tree
3258 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3260 struct gimplify_init_ctor_preeval_data *data
3261 = (struct gimplify_init_ctor_preeval_data *) xdata;
3262 tree t = *tp;
3264 /* If we find the base object, obviously we have overlap. */
3265 if (data->lhs_base_decl == t)
3266 return t;
3268 /* If the constructor component is indirect, determine if we have a
3269 potential overlap with the lhs. The only bits of information we
3270 have to go on at this point are addressability and alias sets. */
3271 if ((INDIRECT_REF_P (t)
3272 || TREE_CODE (t) == MEM_REF)
3273 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3274 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3275 return t;
3277 /* If the constructor component is a call, determine if it can hide a
3278 potential overlap with the lhs through an INDIRECT_REF like above.
3279 ??? Ugh - this is completely broken. In fact this whole analysis
3280 doesn't look conservative. */
3281 if (TREE_CODE (t) == CALL_EXPR)
3283 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3285 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3286 if (POINTER_TYPE_P (TREE_VALUE (type))
3287 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3288 && alias_sets_conflict_p (data->lhs_alias_set,
3289 get_alias_set
3290 (TREE_TYPE (TREE_VALUE (type)))))
3291 return t;
3294 if (IS_TYPE_OR_DECL_P (t))
3295 *walk_subtrees = 0;
3296 return NULL;
3299 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3300 force values that overlap with the lhs (as described by *DATA)
3301 into temporaries. */
3303 static void
3304 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3305 struct gimplify_init_ctor_preeval_data *data)
3307 enum gimplify_status one;
3309 /* If the value is constant, then there's nothing to pre-evaluate. */
3310 if (TREE_CONSTANT (*expr_p))
3312 /* Ensure it does not have side effects, it might contain a reference to
3313 the object we're initializing. */
3314 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3315 return;
3318 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3319 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3320 return;
3322 /* Recurse for nested constructors. */
3323 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3325 unsigned HOST_WIDE_INT ix;
3326 constructor_elt *ce;
3327 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3329 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3330 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3332 return;
3335 /* If this is a variable sized type, we must remember the size. */
3336 maybe_with_size_expr (expr_p);
3338 /* Gimplify the constructor element to something appropriate for the rhs
3339 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3340 the gimplifier will consider this a store to memory. Doing this
3341 gimplification now means that we won't have to deal with complicated
3342 language-specific trees, nor trees like SAVE_EXPR that can induce
3343 exponential search behavior. */
3344 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3345 if (one == GS_ERROR)
3347 *expr_p = NULL;
3348 return;
3351 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3352 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3353 always be true for all scalars, since is_gimple_mem_rhs insists on a
3354 temporary variable for them. */
3355 if (DECL_P (*expr_p))
3356 return;
3358 /* If this is of variable size, we have no choice but to assume it doesn't
3359 overlap since we can't make a temporary for it. */
3360 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3361 return;
3363 /* Otherwise, we must search for overlap ... */
3364 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3365 return;
3367 /* ... and if found, force the value into a temporary. */
3368 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3371 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3372 a RANGE_EXPR in a CONSTRUCTOR for an array.
3374 var = lower;
3375 loop_entry:
3376 object[var] = value;
3377 if (var == upper)
3378 goto loop_exit;
3379 var = var + 1;
3380 goto loop_entry;
3381 loop_exit:
3383 We increment var _after_ the loop exit check because we might otherwise
3384 fail if upper == TYPE_MAX_VALUE (type for upper).
3386 Note that we never have to deal with SAVE_EXPRs here, because this has
3387 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3389 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3390 gimple_seq *, bool);
3392 static void
3393 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3394 tree value, tree array_elt_type,
3395 gimple_seq *pre_p, bool cleared)
3397 tree loop_entry_label, loop_exit_label, fall_thru_label;
3398 tree var, var_type, cref, tmp;
3400 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3401 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3402 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3404 /* Create and initialize the index variable. */
3405 var_type = TREE_TYPE (upper);
3406 var = create_tmp_var (var_type, NULL);
3407 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3409 /* Add the loop entry label. */
3410 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3412 /* Build the reference. */
3413 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3414 var, NULL_TREE, NULL_TREE);
3416 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3417 the store. Otherwise just assign value to the reference. */
3419 if (TREE_CODE (value) == CONSTRUCTOR)
3420 /* NB we might have to call ourself recursively through
3421 gimplify_init_ctor_eval if the value is a constructor. */
3422 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3423 pre_p, cleared);
3424 else
3425 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3427 /* We exit the loop when the index var is equal to the upper bound. */
3428 gimplify_seq_add_stmt (pre_p,
3429 gimple_build_cond (EQ_EXPR, var, upper,
3430 loop_exit_label, fall_thru_label));
3432 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3434 /* Otherwise, increment the index var... */
3435 tmp = build2 (PLUS_EXPR, var_type, var,
3436 fold_convert (var_type, integer_one_node));
3437 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3439 /* ...and jump back to the loop entry. */
3440 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3442 /* Add the loop exit label. */
3443 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3446 /* Return true if FDECL is accessing a field that is zero sized. */
3448 static bool
3449 zero_sized_field_decl (const_tree fdecl)
3451 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3452 && integer_zerop (DECL_SIZE (fdecl)))
3453 return true;
3454 return false;
3457 /* Return true if TYPE is zero sized. */
3459 static bool
3460 zero_sized_type (const_tree type)
3462 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3463 && integer_zerop (TYPE_SIZE (type)))
3464 return true;
3465 return false;
3468 /* A subroutine of gimplify_init_constructor. Generate individual
3469 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3470 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3471 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3472 zeroed first. */
3474 static void
3475 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3476 gimple_seq *pre_p, bool cleared)
3478 tree array_elt_type = NULL;
3479 unsigned HOST_WIDE_INT ix;
3480 tree purpose, value;
3482 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3483 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3485 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3487 tree cref;
3489 /* NULL values are created above for gimplification errors. */
3490 if (value == NULL)
3491 continue;
3493 if (cleared && initializer_zerop (value))
3494 continue;
3496 /* ??? Here's to hoping the front end fills in all of the indices,
3497 so we don't have to figure out what's missing ourselves. */
3498 gcc_assert (purpose);
3500 /* Skip zero-sized fields, unless value has side-effects. This can
3501 happen with calls to functions returning a zero-sized type, which
3502 we shouldn't discard. As a number of downstream passes don't
3503 expect sets of zero-sized fields, we rely on the gimplification of
3504 the MODIFY_EXPR we make below to drop the assignment statement. */
3505 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3506 continue;
3508 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3509 whole range. */
3510 if (TREE_CODE (purpose) == RANGE_EXPR)
3512 tree lower = TREE_OPERAND (purpose, 0);
3513 tree upper = TREE_OPERAND (purpose, 1);
3515 /* If the lower bound is equal to upper, just treat it as if
3516 upper was the index. */
3517 if (simple_cst_equal (lower, upper))
3518 purpose = upper;
3519 else
3521 gimplify_init_ctor_eval_range (object, lower, upper, value,
3522 array_elt_type, pre_p, cleared);
3523 continue;
3527 if (array_elt_type)
3529 /* Do not use bitsizetype for ARRAY_REF indices. */
3530 if (TYPE_DOMAIN (TREE_TYPE (object)))
3531 purpose
3532 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3533 purpose);
3534 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3535 purpose, NULL_TREE, NULL_TREE);
3537 else
3539 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3540 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3541 unshare_expr (object), purpose, NULL_TREE);
3544 if (TREE_CODE (value) == CONSTRUCTOR
3545 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3546 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3547 pre_p, cleared);
3548 else
3550 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3551 gimplify_and_add (init, pre_p);
3552 ggc_free (init);
3557 /* Return the appropriate RHS predicate for this LHS. */
3559 gimple_predicate
3560 rhs_predicate_for (tree lhs)
3562 if (is_gimple_reg (lhs))
3563 return is_gimple_reg_rhs_or_call;
3564 else
3565 return is_gimple_mem_rhs_or_call;
3568 /* Gimplify a C99 compound literal expression. This just means adding
3569 the DECL_EXPR before the current statement and using its anonymous
3570 decl instead. */
3572 static enum gimplify_status
3573 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3574 bool (*gimple_test_f) (tree),
3575 fallback_t fallback)
3577 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3578 tree decl = DECL_EXPR_DECL (decl_s);
3579 tree init = DECL_INITIAL (decl);
3580 /* Mark the decl as addressable if the compound literal
3581 expression is addressable now, otherwise it is marked too late
3582 after we gimplify the initialization expression. */
3583 if (TREE_ADDRESSABLE (*expr_p))
3584 TREE_ADDRESSABLE (decl) = 1;
3585 /* Otherwise, if we don't need an lvalue and have a literal directly
3586 substitute it. Check if it matches the gimple predicate, as
3587 otherwise we'd generate a new temporary, and we can as well just
3588 use the decl we already have. */
3589 else if (!TREE_ADDRESSABLE (decl)
3590 && init
3591 && (fallback & fb_lvalue) == 0
3592 && gimple_test_f (init))
3594 *expr_p = init;
3595 return GS_OK;
3598 /* Preliminarily mark non-addressed complex variables as eligible
3599 for promotion to gimple registers. We'll transform their uses
3600 as we find them. */
3601 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3602 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3603 && !TREE_THIS_VOLATILE (decl)
3604 && !needs_to_live_in_memory (decl))
3605 DECL_GIMPLE_REG_P (decl) = 1;
3607 /* If the decl is not addressable, then it is being used in some
3608 expression or on the right hand side of a statement, and it can
3609 be put into a readonly data section. */
3610 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3611 TREE_READONLY (decl) = 1;
3613 /* This decl isn't mentioned in the enclosing block, so add it to the
3614 list of temps. FIXME it seems a bit of a kludge to say that
3615 anonymous artificial vars aren't pushed, but everything else is. */
3616 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3617 gimple_add_tmp_var (decl);
3619 gimplify_and_add (decl_s, pre_p);
3620 *expr_p = decl;
3621 return GS_OK;
3624 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3625 return a new CONSTRUCTOR if something changed. */
3627 static tree
3628 optimize_compound_literals_in_ctor (tree orig_ctor)
3630 tree ctor = orig_ctor;
3631 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3632 unsigned int idx, num = vec_safe_length (elts);
3634 for (idx = 0; idx < num; idx++)
3636 tree value = (*elts)[idx].value;
3637 tree newval = value;
3638 if (TREE_CODE (value) == CONSTRUCTOR)
3639 newval = optimize_compound_literals_in_ctor (value);
3640 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3642 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3643 tree decl = DECL_EXPR_DECL (decl_s);
3644 tree init = DECL_INITIAL (decl);
3646 if (!TREE_ADDRESSABLE (value)
3647 && !TREE_ADDRESSABLE (decl)
3648 && init
3649 && TREE_CODE (init) == CONSTRUCTOR)
3650 newval = optimize_compound_literals_in_ctor (init);
3652 if (newval == value)
3653 continue;
3655 if (ctor == orig_ctor)
3657 ctor = copy_node (orig_ctor);
3658 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3659 elts = CONSTRUCTOR_ELTS (ctor);
3661 (*elts)[idx].value = newval;
3663 return ctor;
3666 /* A subroutine of gimplify_modify_expr. Break out elements of a
3667 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3669 Note that we still need to clear any elements that don't have explicit
3670 initializers, so if not all elements are initialized we keep the
3671 original MODIFY_EXPR, we just remove all of the constructor elements.
3673 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3674 GS_ERROR if we would have to create a temporary when gimplifying
3675 this constructor. Otherwise, return GS_OK.
3677 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3679 static enum gimplify_status
3680 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3681 bool want_value, bool notify_temp_creation)
3683 tree object, ctor, type;
3684 enum gimplify_status ret;
3685 vec<constructor_elt, va_gc> *elts;
3687 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3689 if (!notify_temp_creation)
3691 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3692 is_gimple_lvalue, fb_lvalue);
3693 if (ret == GS_ERROR)
3694 return ret;
3697 object = TREE_OPERAND (*expr_p, 0);
3698 ctor = TREE_OPERAND (*expr_p, 1) =
3699 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3700 type = TREE_TYPE (ctor);
3701 elts = CONSTRUCTOR_ELTS (ctor);
3702 ret = GS_ALL_DONE;
3704 switch (TREE_CODE (type))
3706 case RECORD_TYPE:
3707 case UNION_TYPE:
3708 case QUAL_UNION_TYPE:
3709 case ARRAY_TYPE:
3711 struct gimplify_init_ctor_preeval_data preeval_data;
3712 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3713 bool cleared, complete_p, valid_const_initializer;
3715 /* Aggregate types must lower constructors to initialization of
3716 individual elements. The exception is that a CONSTRUCTOR node
3717 with no elements indicates zero-initialization of the whole. */
3718 if (vec_safe_is_empty (elts))
3720 if (notify_temp_creation)
3721 return GS_OK;
3722 break;
3725 /* Fetch information about the constructor to direct later processing.
3726 We might want to make static versions of it in various cases, and
3727 can only do so if it known to be a valid constant initializer. */
3728 valid_const_initializer
3729 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3730 &num_ctor_elements, &complete_p);
3732 /* If a const aggregate variable is being initialized, then it
3733 should never be a lose to promote the variable to be static. */
3734 if (valid_const_initializer
3735 && num_nonzero_elements > 1
3736 && TREE_READONLY (object)
3737 && TREE_CODE (object) == VAR_DECL
3738 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3740 if (notify_temp_creation)
3741 return GS_ERROR;
3742 DECL_INITIAL (object) = ctor;
3743 TREE_STATIC (object) = 1;
3744 if (!DECL_NAME (object))
3745 DECL_NAME (object) = create_tmp_var_name ("C");
3746 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3748 /* ??? C++ doesn't automatically append a .<number> to the
3749 assembler name, and even when it does, it looks at FE private
3750 data structures to figure out what that number should be,
3751 which are not set for this variable. I suppose this is
3752 important for local statics for inline functions, which aren't
3753 "local" in the object file sense. So in order to get a unique
3754 TU-local symbol, we must invoke the lhd version now. */
3755 lhd_set_decl_assembler_name (object);
3757 *expr_p = NULL_TREE;
3758 break;
3761 /* If there are "lots" of initialized elements, even discounting
3762 those that are not address constants (and thus *must* be
3763 computed at runtime), then partition the constructor into
3764 constant and non-constant parts. Block copy the constant
3765 parts in, then generate code for the non-constant parts. */
3766 /* TODO. There's code in cp/typeck.c to do this. */
3768 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3769 /* store_constructor will ignore the clearing of variable-sized
3770 objects. Initializers for such objects must explicitly set
3771 every field that needs to be set. */
3772 cleared = false;
3773 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
3774 /* If the constructor isn't complete, clear the whole object
3775 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
3777 ??? This ought not to be needed. For any element not present
3778 in the initializer, we should simply set them to zero. Except
3779 we'd need to *find* the elements that are not present, and that
3780 requires trickery to avoid quadratic compile-time behavior in
3781 large cases or excessive memory use in small cases. */
3782 cleared = true;
3783 else if (num_ctor_elements - num_nonzero_elements
3784 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3785 && num_nonzero_elements < num_ctor_elements / 4)
3786 /* If there are "lots" of zeros, it's more efficient to clear
3787 the memory and then set the nonzero elements. */
3788 cleared = true;
3789 else
3790 cleared = false;
3792 /* If there are "lots" of initialized elements, and all of them
3793 are valid address constants, then the entire initializer can
3794 be dropped to memory, and then memcpy'd out. Don't do this
3795 for sparse arrays, though, as it's more efficient to follow
3796 the standard CONSTRUCTOR behavior of memset followed by
3797 individual element initialization. Also don't do this for small
3798 all-zero initializers (which aren't big enough to merit
3799 clearing), and don't try to make bitwise copies of
3800 TREE_ADDRESSABLE types. */
3801 if (valid_const_initializer
3802 && !(cleared || num_nonzero_elements == 0)
3803 && !TREE_ADDRESSABLE (type))
3805 HOST_WIDE_INT size = int_size_in_bytes (type);
3806 unsigned int align;
3808 /* ??? We can still get unbounded array types, at least
3809 from the C++ front end. This seems wrong, but attempt
3810 to work around it for now. */
3811 if (size < 0)
3813 size = int_size_in_bytes (TREE_TYPE (object));
3814 if (size >= 0)
3815 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3818 /* Find the maximum alignment we can assume for the object. */
3819 /* ??? Make use of DECL_OFFSET_ALIGN. */
3820 if (DECL_P (object))
3821 align = DECL_ALIGN (object);
3822 else
3823 align = TYPE_ALIGN (type);
3825 /* Do a block move either if the size is so small as to make
3826 each individual move a sub-unit move on average, or if it
3827 is so large as to make individual moves inefficient. */
3828 if (size > 0
3829 && num_nonzero_elements > 1
3830 && (size < num_nonzero_elements
3831 || !can_move_by_pieces (size, align)))
3833 if (notify_temp_creation)
3834 return GS_ERROR;
3836 walk_tree (&ctor, force_labels_r, NULL, NULL);
3837 ctor = tree_output_constant_def (ctor);
3838 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3839 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3840 TREE_OPERAND (*expr_p, 1) = ctor;
3842 /* This is no longer an assignment of a CONSTRUCTOR, but
3843 we still may have processing to do on the LHS. So
3844 pretend we didn't do anything here to let that happen. */
3845 return GS_UNHANDLED;
3849 /* If the target is volatile, we have non-zero elements and more than
3850 one field to assign, initialize the target from a temporary. */
3851 if (TREE_THIS_VOLATILE (object)
3852 && !TREE_ADDRESSABLE (type)
3853 && num_nonzero_elements > 0
3854 && vec_safe_length (elts) > 1)
3856 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3857 TREE_OPERAND (*expr_p, 0) = temp;
3858 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3859 *expr_p,
3860 build2 (MODIFY_EXPR, void_type_node,
3861 object, temp));
3862 return GS_OK;
3865 if (notify_temp_creation)
3866 return GS_OK;
3868 /* If there are nonzero elements and if needed, pre-evaluate to capture
3869 elements overlapping with the lhs into temporaries. We must do this
3870 before clearing to fetch the values before they are zeroed-out. */
3871 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
3873 preeval_data.lhs_base_decl = get_base_address (object);
3874 if (!DECL_P (preeval_data.lhs_base_decl))
3875 preeval_data.lhs_base_decl = NULL;
3876 preeval_data.lhs_alias_set = get_alias_set (object);
3878 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3879 pre_p, post_p, &preeval_data);
3882 if (cleared)
3884 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3885 Note that we still have to gimplify, in order to handle the
3886 case of variable sized types. Avoid shared tree structures. */
3887 CONSTRUCTOR_ELTS (ctor) = NULL;
3888 TREE_SIDE_EFFECTS (ctor) = 0;
3889 object = unshare_expr (object);
3890 gimplify_stmt (expr_p, pre_p);
3893 /* If we have not block cleared the object, or if there are nonzero
3894 elements in the constructor, add assignments to the individual
3895 scalar fields of the object. */
3896 if (!cleared || num_nonzero_elements > 0)
3897 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3899 *expr_p = NULL_TREE;
3901 break;
3903 case COMPLEX_TYPE:
3905 tree r, i;
3907 if (notify_temp_creation)
3908 return GS_OK;
3910 /* Extract the real and imaginary parts out of the ctor. */
3911 gcc_assert (elts->length () == 2);
3912 r = (*elts)[0].value;
3913 i = (*elts)[1].value;
3914 if (r == NULL || i == NULL)
3916 tree zero = build_zero_cst (TREE_TYPE (type));
3917 if (r == NULL)
3918 r = zero;
3919 if (i == NULL)
3920 i = zero;
3923 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3924 represent creation of a complex value. */
3925 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3927 ctor = build_complex (type, r, i);
3928 TREE_OPERAND (*expr_p, 1) = ctor;
3930 else
3932 ctor = build2 (COMPLEX_EXPR, type, r, i);
3933 TREE_OPERAND (*expr_p, 1) = ctor;
3934 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3935 pre_p,
3936 post_p,
3937 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3938 fb_rvalue);
3941 break;
3943 case VECTOR_TYPE:
3945 unsigned HOST_WIDE_INT ix;
3946 constructor_elt *ce;
3948 if (notify_temp_creation)
3949 return GS_OK;
3951 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3952 if (TREE_CONSTANT (ctor))
3954 bool constant_p = true;
3955 tree value;
3957 /* Even when ctor is constant, it might contain non-*_CST
3958 elements, such as addresses or trapping values like
3959 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3960 in VECTOR_CST nodes. */
3961 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3962 if (!CONSTANT_CLASS_P (value))
3964 constant_p = false;
3965 break;
3968 if (constant_p)
3970 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3971 break;
3974 /* Don't reduce an initializer constant even if we can't
3975 make a VECTOR_CST. It won't do anything for us, and it'll
3976 prevent us from representing it as a single constant. */
3977 if (initializer_constant_valid_p (ctor, type))
3978 break;
3980 TREE_CONSTANT (ctor) = 0;
3983 /* Vector types use CONSTRUCTOR all the way through gimple
3984 compilation as a general initializer. */
3985 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
3987 enum gimplify_status tret;
3988 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3989 fb_rvalue);
3990 if (tret == GS_ERROR)
3991 ret = GS_ERROR;
3993 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3994 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3996 break;
3998 default:
3999 /* So how did we get a CONSTRUCTOR for a scalar type? */
4000 gcc_unreachable ();
4003 if (ret == GS_ERROR)
4004 return GS_ERROR;
4005 else if (want_value)
4007 *expr_p = object;
4008 return GS_OK;
4010 else
4012 /* If we have gimplified both sides of the initializer but have
4013 not emitted an assignment, do so now. */
4014 if (*expr_p)
4016 tree lhs = TREE_OPERAND (*expr_p, 0);
4017 tree rhs = TREE_OPERAND (*expr_p, 1);
4018 gimple init = gimple_build_assign (lhs, rhs);
4019 gimplify_seq_add_stmt (pre_p, init);
4020 *expr_p = NULL;
4023 return GS_ALL_DONE;
4027 /* Given a pointer value OP0, return a simplified version of an
4028 indirection through OP0, or NULL_TREE if no simplification is
4029 possible. This may only be applied to a rhs of an expression.
4030 Note that the resulting type may be different from the type pointed
4031 to in the sense that it is still compatible from the langhooks
4032 point of view. */
4034 static tree
4035 gimple_fold_indirect_ref_rhs (tree t)
4037 return gimple_fold_indirect_ref (t);
4040 /* Subroutine of gimplify_modify_expr to do simplifications of
4041 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4042 something changes. */
4044 static enum gimplify_status
4045 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4046 gimple_seq *pre_p, gimple_seq *post_p,
4047 bool want_value)
4049 enum gimplify_status ret = GS_UNHANDLED;
4050 bool changed;
4054 changed = false;
4055 switch (TREE_CODE (*from_p))
4057 case VAR_DECL:
4058 /* If we're assigning from a read-only variable initialized with
4059 a constructor, do the direct assignment from the constructor,
4060 but only if neither source nor target are volatile since this
4061 latter assignment might end up being done on a per-field basis. */
4062 if (DECL_INITIAL (*from_p)
4063 && TREE_READONLY (*from_p)
4064 && !TREE_THIS_VOLATILE (*from_p)
4065 && !TREE_THIS_VOLATILE (*to_p)
4066 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4068 tree old_from = *from_p;
4069 enum gimplify_status subret;
4071 /* Move the constructor into the RHS. */
4072 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4074 /* Let's see if gimplify_init_constructor will need to put
4075 it in memory. */
4076 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4077 false, true);
4078 if (subret == GS_ERROR)
4080 /* If so, revert the change. */
4081 *from_p = old_from;
4083 else
4085 ret = GS_OK;
4086 changed = true;
4089 break;
4090 case INDIRECT_REF:
4092 /* If we have code like
4094 *(const A*)(A*)&x
4096 where the type of "x" is a (possibly cv-qualified variant
4097 of "A"), treat the entire expression as identical to "x".
4098 This kind of code arises in C++ when an object is bound
4099 to a const reference, and if "x" is a TARGET_EXPR we want
4100 to take advantage of the optimization below. */
4101 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4102 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4103 if (t)
4105 if (TREE_THIS_VOLATILE (t) != volatile_p)
4107 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4108 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4109 build_fold_addr_expr (t));
4110 if (REFERENCE_CLASS_P (t))
4111 TREE_THIS_VOLATILE (t) = volatile_p;
4113 *from_p = t;
4114 ret = GS_OK;
4115 changed = true;
4117 break;
4120 case TARGET_EXPR:
4122 /* If we are initializing something from a TARGET_EXPR, strip the
4123 TARGET_EXPR and initialize it directly, if possible. This can't
4124 be done if the initializer is void, since that implies that the
4125 temporary is set in some non-trivial way.
4127 ??? What about code that pulls out the temp and uses it
4128 elsewhere? I think that such code never uses the TARGET_EXPR as
4129 an initializer. If I'm wrong, we'll die because the temp won't
4130 have any RTL. In that case, I guess we'll need to replace
4131 references somehow. */
4132 tree init = TARGET_EXPR_INITIAL (*from_p);
4134 if (init
4135 && !VOID_TYPE_P (TREE_TYPE (init)))
4137 *from_p = init;
4138 ret = GS_OK;
4139 changed = true;
4142 break;
4144 case COMPOUND_EXPR:
4145 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4146 caught. */
4147 gimplify_compound_expr (from_p, pre_p, true);
4148 ret = GS_OK;
4149 changed = true;
4150 break;
4152 case CONSTRUCTOR:
4153 /* If we already made some changes, let the front end have a
4154 crack at this before we break it down. */
4155 if (ret != GS_UNHANDLED)
4156 break;
4157 /* If we're initializing from a CONSTRUCTOR, break this into
4158 individual MODIFY_EXPRs. */
4159 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4160 false);
4162 case COND_EXPR:
4163 /* If we're assigning to a non-register type, push the assignment
4164 down into the branches. This is mandatory for ADDRESSABLE types,
4165 since we cannot generate temporaries for such, but it saves a
4166 copy in other cases as well. */
4167 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4169 /* This code should mirror the code in gimplify_cond_expr. */
4170 enum tree_code code = TREE_CODE (*expr_p);
4171 tree cond = *from_p;
4172 tree result = *to_p;
4174 ret = gimplify_expr (&result, pre_p, post_p,
4175 is_gimple_lvalue, fb_lvalue);
4176 if (ret != GS_ERROR)
4177 ret = GS_OK;
4179 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4180 TREE_OPERAND (cond, 1)
4181 = build2 (code, void_type_node, result,
4182 TREE_OPERAND (cond, 1));
4183 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4184 TREE_OPERAND (cond, 2)
4185 = build2 (code, void_type_node, unshare_expr (result),
4186 TREE_OPERAND (cond, 2));
4188 TREE_TYPE (cond) = void_type_node;
4189 recalculate_side_effects (cond);
4191 if (want_value)
4193 gimplify_and_add (cond, pre_p);
4194 *expr_p = unshare_expr (result);
4196 else
4197 *expr_p = cond;
4198 return ret;
4200 break;
4202 case CALL_EXPR:
4203 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4204 return slot so that we don't generate a temporary. */
4205 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4206 && aggregate_value_p (*from_p, *from_p))
4208 bool use_target;
4210 if (!(rhs_predicate_for (*to_p))(*from_p))
4211 /* If we need a temporary, *to_p isn't accurate. */
4212 use_target = false;
4213 /* It's OK to use the return slot directly unless it's an NRV. */
4214 else if (TREE_CODE (*to_p) == RESULT_DECL
4215 && DECL_NAME (*to_p) == NULL_TREE
4216 && needs_to_live_in_memory (*to_p))
4217 use_target = true;
4218 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4219 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4220 /* Don't force regs into memory. */
4221 use_target = false;
4222 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4223 /* It's OK to use the target directly if it's being
4224 initialized. */
4225 use_target = true;
4226 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4227 /* Always use the target and thus RSO for variable-sized types.
4228 GIMPLE cannot deal with a variable-sized assignment
4229 embedded in a call statement. */
4230 use_target = true;
4231 else if (TREE_CODE (*to_p) != SSA_NAME
4232 && (!is_gimple_variable (*to_p)
4233 || needs_to_live_in_memory (*to_p)))
4234 /* Don't use the original target if it's already addressable;
4235 if its address escapes, and the called function uses the
4236 NRV optimization, a conforming program could see *to_p
4237 change before the called function returns; see c++/19317.
4238 When optimizing, the return_slot pass marks more functions
4239 as safe after we have escape info. */
4240 use_target = false;
4241 else
4242 use_target = true;
4244 if (use_target)
4246 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4247 mark_addressable (*to_p);
4250 break;
4252 case WITH_SIZE_EXPR:
4253 /* Likewise for calls that return an aggregate of non-constant size,
4254 since we would not be able to generate a temporary at all. */
4255 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4257 *from_p = TREE_OPERAND (*from_p, 0);
4258 /* We don't change ret in this case because the
4259 WITH_SIZE_EXPR might have been added in
4260 gimplify_modify_expr, so returning GS_OK would lead to an
4261 infinite loop. */
4262 changed = true;
4264 break;
4266 /* If we're initializing from a container, push the initialization
4267 inside it. */
4268 case CLEANUP_POINT_EXPR:
4269 case BIND_EXPR:
4270 case STATEMENT_LIST:
4272 tree wrap = *from_p;
4273 tree t;
4275 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4276 fb_lvalue);
4277 if (ret != GS_ERROR)
4278 ret = GS_OK;
4280 t = voidify_wrapper_expr (wrap, *expr_p);
4281 gcc_assert (t == *expr_p);
4283 if (want_value)
4285 gimplify_and_add (wrap, pre_p);
4286 *expr_p = unshare_expr (*to_p);
4288 else
4289 *expr_p = wrap;
4290 return GS_OK;
4293 case COMPOUND_LITERAL_EXPR:
4295 tree complit = TREE_OPERAND (*expr_p, 1);
4296 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4297 tree decl = DECL_EXPR_DECL (decl_s);
4298 tree init = DECL_INITIAL (decl);
4300 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4301 into struct T x = { 0, 1, 2 } if the address of the
4302 compound literal has never been taken. */
4303 if (!TREE_ADDRESSABLE (complit)
4304 && !TREE_ADDRESSABLE (decl)
4305 && init)
4307 *expr_p = copy_node (*expr_p);
4308 TREE_OPERAND (*expr_p, 1) = init;
4309 return GS_OK;
4313 default:
4314 break;
4317 while (changed);
4319 return ret;
4323 /* Return true if T looks like a valid GIMPLE statement. */
4325 static bool
4326 is_gimple_stmt (tree t)
4328 const enum tree_code code = TREE_CODE (t);
4330 switch (code)
4332 case NOP_EXPR:
4333 /* The only valid NOP_EXPR is the empty statement. */
4334 return IS_EMPTY_STMT (t);
4336 case BIND_EXPR:
4337 case COND_EXPR:
4338 /* These are only valid if they're void. */
4339 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4341 case SWITCH_EXPR:
4342 case GOTO_EXPR:
4343 case RETURN_EXPR:
4344 case LABEL_EXPR:
4345 case CASE_LABEL_EXPR:
4346 case TRY_CATCH_EXPR:
4347 case TRY_FINALLY_EXPR:
4348 case EH_FILTER_EXPR:
4349 case CATCH_EXPR:
4350 case ASM_EXPR:
4351 case STATEMENT_LIST:
4352 case OACC_PARALLEL:
4353 case OACC_KERNELS:
4354 case OACC_DATA:
4355 case OACC_HOST_DATA:
4356 case OACC_DECLARE:
4357 case OACC_UPDATE:
4358 case OACC_ENTER_DATA:
4359 case OACC_EXIT_DATA:
4360 case OACC_WAIT:
4361 case OACC_CACHE:
4362 case OMP_PARALLEL:
4363 case OMP_FOR:
4364 case OMP_SIMD:
4365 case CILK_SIMD:
4366 case OMP_DISTRIBUTE:
4367 case OACC_LOOP:
4368 case OMP_SECTIONS:
4369 case OMP_SECTION:
4370 case OMP_SINGLE:
4371 case OMP_MASTER:
4372 case OMP_TASKGROUP:
4373 case OMP_ORDERED:
4374 case OMP_CRITICAL:
4375 case OMP_TASK:
4376 /* These are always void. */
4377 return true;
4379 case CALL_EXPR:
4380 case MODIFY_EXPR:
4381 case PREDICT_EXPR:
4382 /* These are valid regardless of their type. */
4383 return true;
4385 default:
4386 return false;
4391 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4392 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4393 DECL_GIMPLE_REG_P set.
4395 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4396 other, unmodified part of the complex object just before the total store.
4397 As a consequence, if the object is still uninitialized, an undefined value
4398 will be loaded into a register, which may result in a spurious exception
4399 if the register is floating-point and the value happens to be a signaling
4400 NaN for example. Then the fully-fledged complex operations lowering pass
4401 followed by a DCE pass are necessary in order to fix things up. */
4403 static enum gimplify_status
4404 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4405 bool want_value)
4407 enum tree_code code, ocode;
4408 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4410 lhs = TREE_OPERAND (*expr_p, 0);
4411 rhs = TREE_OPERAND (*expr_p, 1);
4412 code = TREE_CODE (lhs);
4413 lhs = TREE_OPERAND (lhs, 0);
4415 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4416 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4417 TREE_NO_WARNING (other) = 1;
4418 other = get_formal_tmp_var (other, pre_p);
4420 realpart = code == REALPART_EXPR ? rhs : other;
4421 imagpart = code == REALPART_EXPR ? other : rhs;
4423 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4424 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4425 else
4426 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4428 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4429 *expr_p = (want_value) ? rhs : NULL_TREE;
4431 return GS_ALL_DONE;
4434 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4436 modify_expr
4437 : varname '=' rhs
4438 | '*' ID '=' rhs
4440 PRE_P points to the list where side effects that must happen before
4441 *EXPR_P should be stored.
4443 POST_P points to the list where side effects that must happen after
4444 *EXPR_P should be stored.
4446 WANT_VALUE is nonzero iff we want to use the value of this expression
4447 in another expression. */
4449 static enum gimplify_status
4450 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4451 bool want_value)
4453 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4454 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4455 enum gimplify_status ret = GS_UNHANDLED;
4456 gimple assign;
4457 location_t loc = EXPR_LOCATION (*expr_p);
4458 gimple_stmt_iterator gsi;
4460 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4461 || TREE_CODE (*expr_p) == INIT_EXPR);
4463 /* Trying to simplify a clobber using normal logic doesn't work,
4464 so handle it here. */
4465 if (TREE_CLOBBER_P (*from_p))
4467 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4468 if (ret == GS_ERROR)
4469 return ret;
4470 gcc_assert (!want_value
4471 && (TREE_CODE (*to_p) == VAR_DECL
4472 || TREE_CODE (*to_p) == MEM_REF));
4473 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4474 *expr_p = NULL;
4475 return GS_ALL_DONE;
4478 /* Insert pointer conversions required by the middle-end that are not
4479 required by the frontend. This fixes middle-end type checking for
4480 for example gcc.dg/redecl-6.c. */
4481 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4483 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4484 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4485 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4488 /* See if any simplifications can be done based on what the RHS is. */
4489 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4490 want_value);
4491 if (ret != GS_UNHANDLED)
4492 return ret;
4494 /* For zero sized types only gimplify the left hand side and right hand
4495 side as statements and throw away the assignment. Do this after
4496 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4497 types properly. */
4498 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4500 gimplify_stmt (from_p, pre_p);
4501 gimplify_stmt (to_p, pre_p);
4502 *expr_p = NULL_TREE;
4503 return GS_ALL_DONE;
4506 /* If the value being copied is of variable width, compute the length
4507 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4508 before gimplifying any of the operands so that we can resolve any
4509 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4510 the size of the expression to be copied, not of the destination, so
4511 that is what we must do here. */
4512 maybe_with_size_expr (from_p);
4514 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4515 if (ret == GS_ERROR)
4516 return ret;
4518 /* As a special case, we have to temporarily allow for assignments
4519 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4520 a toplevel statement, when gimplifying the GENERIC expression
4521 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4522 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4524 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4525 prevent gimplify_expr from trying to create a new temporary for
4526 foo's LHS, we tell it that it should only gimplify until it
4527 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4528 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4529 and all we need to do here is set 'a' to be its LHS. */
4530 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4531 fb_rvalue);
4532 if (ret == GS_ERROR)
4533 return ret;
4535 /* Now see if the above changed *from_p to something we handle specially. */
4536 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4537 want_value);
4538 if (ret != GS_UNHANDLED)
4539 return ret;
4541 /* If we've got a variable sized assignment between two lvalues (i.e. does
4542 not involve a call), then we can make things a bit more straightforward
4543 by converting the assignment to memcpy or memset. */
4544 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4546 tree from = TREE_OPERAND (*from_p, 0);
4547 tree size = TREE_OPERAND (*from_p, 1);
4549 if (TREE_CODE (from) == CONSTRUCTOR)
4550 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4552 if (is_gimple_addressable (from))
4554 *from_p = from;
4555 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4556 pre_p);
4560 /* Transform partial stores to non-addressable complex variables into
4561 total stores. This allows us to use real instead of virtual operands
4562 for these variables, which improves optimization. */
4563 if ((TREE_CODE (*to_p) == REALPART_EXPR
4564 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4565 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4566 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4568 /* Try to alleviate the effects of the gimplification creating artificial
4569 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4570 if (!gimplify_ctxp->into_ssa
4571 && TREE_CODE (*from_p) == VAR_DECL
4572 && DECL_IGNORED_P (*from_p)
4573 && DECL_P (*to_p)
4574 && !DECL_IGNORED_P (*to_p))
4576 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4577 DECL_NAME (*from_p)
4578 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4579 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4580 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4583 if (want_value && TREE_THIS_VOLATILE (*to_p))
4584 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4586 if (TREE_CODE (*from_p) == CALL_EXPR)
4588 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4589 instead of a GIMPLE_ASSIGN. */
4590 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4591 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4592 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4593 assign = gimple_build_call_from_tree (*from_p);
4594 gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
4595 notice_special_calls (assign);
4596 if (!gimple_call_noreturn_p (assign))
4597 gimple_call_set_lhs (assign, *to_p);
4599 else
4601 assign = gimple_build_assign (*to_p, *from_p);
4602 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4605 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4607 /* We should have got an SSA name from the start. */
4608 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4611 gimplify_seq_add_stmt (pre_p, assign);
4612 gsi = gsi_last (*pre_p);
4613 maybe_fold_stmt (&gsi);
4615 if (want_value)
4617 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4618 return GS_OK;
4620 else
4621 *expr_p = NULL;
4623 return GS_ALL_DONE;
4626 /* Gimplify a comparison between two variable-sized objects. Do this
4627 with a call to BUILT_IN_MEMCMP. */
4629 static enum gimplify_status
4630 gimplify_variable_sized_compare (tree *expr_p)
4632 location_t loc = EXPR_LOCATION (*expr_p);
4633 tree op0 = TREE_OPERAND (*expr_p, 0);
4634 tree op1 = TREE_OPERAND (*expr_p, 1);
4635 tree t, arg, dest, src, expr;
4637 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4638 arg = unshare_expr (arg);
4639 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4640 src = build_fold_addr_expr_loc (loc, op1);
4641 dest = build_fold_addr_expr_loc (loc, op0);
4642 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4643 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4645 expr
4646 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4647 SET_EXPR_LOCATION (expr, loc);
4648 *expr_p = expr;
4650 return GS_OK;
4653 /* Gimplify a comparison between two aggregate objects of integral scalar
4654 mode as a comparison between the bitwise equivalent scalar values. */
4656 static enum gimplify_status
4657 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4659 location_t loc = EXPR_LOCATION (*expr_p);
4660 tree op0 = TREE_OPERAND (*expr_p, 0);
4661 tree op1 = TREE_OPERAND (*expr_p, 1);
4663 tree type = TREE_TYPE (op0);
4664 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4666 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4667 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4669 *expr_p
4670 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4672 return GS_OK;
4675 /* Gimplify an expression sequence. This function gimplifies each
4676 expression and rewrites the original expression with the last
4677 expression of the sequence in GIMPLE form.
4679 PRE_P points to the list where the side effects for all the
4680 expressions in the sequence will be emitted.
4682 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4684 static enum gimplify_status
4685 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4687 tree t = *expr_p;
4691 tree *sub_p = &TREE_OPERAND (t, 0);
4693 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4694 gimplify_compound_expr (sub_p, pre_p, false);
4695 else
4696 gimplify_stmt (sub_p, pre_p);
4698 t = TREE_OPERAND (t, 1);
4700 while (TREE_CODE (t) == COMPOUND_EXPR);
4702 *expr_p = t;
4703 if (want_value)
4704 return GS_OK;
4705 else
4707 gimplify_stmt (expr_p, pre_p);
4708 return GS_ALL_DONE;
4712 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4713 gimplify. After gimplification, EXPR_P will point to a new temporary
4714 that holds the original value of the SAVE_EXPR node.
4716 PRE_P points to the list where side effects that must happen before
4717 *EXPR_P should be stored. */
4719 static enum gimplify_status
4720 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4722 enum gimplify_status ret = GS_ALL_DONE;
4723 tree val;
4725 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4726 val = TREE_OPERAND (*expr_p, 0);
4728 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4729 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4731 /* The operand may be a void-valued expression such as SAVE_EXPRs
4732 generated by the Java frontend for class initialization. It is
4733 being executed only for its side-effects. */
4734 if (TREE_TYPE (val) == void_type_node)
4736 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4737 is_gimple_stmt, fb_none);
4738 val = NULL;
4740 else
4741 val = get_initialized_tmp_var (val, pre_p, post_p);
4743 TREE_OPERAND (*expr_p, 0) = val;
4744 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4747 *expr_p = val;
4749 return ret;
4752 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
4754 unary_expr
4755 : ...
4756 | '&' varname
4759 PRE_P points to the list where side effects that must happen before
4760 *EXPR_P should be stored.
4762 POST_P points to the list where side effects that must happen after
4763 *EXPR_P should be stored. */
4765 static enum gimplify_status
4766 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4768 tree expr = *expr_p;
4769 tree op0 = TREE_OPERAND (expr, 0);
4770 enum gimplify_status ret;
4771 location_t loc = EXPR_LOCATION (*expr_p);
4773 switch (TREE_CODE (op0))
4775 case INDIRECT_REF:
4776 do_indirect_ref:
4777 /* Check if we are dealing with an expression of the form '&*ptr'.
4778 While the front end folds away '&*ptr' into 'ptr', these
4779 expressions may be generated internally by the compiler (e.g.,
4780 builtins like __builtin_va_end). */
4781 /* Caution: the silent array decomposition semantics we allow for
4782 ADDR_EXPR means we can't always discard the pair. */
4783 /* Gimplification of the ADDR_EXPR operand may drop
4784 cv-qualification conversions, so make sure we add them if
4785 needed. */
4787 tree op00 = TREE_OPERAND (op0, 0);
4788 tree t_expr = TREE_TYPE (expr);
4789 tree t_op00 = TREE_TYPE (op00);
4791 if (!useless_type_conversion_p (t_expr, t_op00))
4792 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4793 *expr_p = op00;
4794 ret = GS_OK;
4796 break;
4798 case VIEW_CONVERT_EXPR:
4799 /* Take the address of our operand and then convert it to the type of
4800 this ADDR_EXPR.
4802 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4803 all clear. The impact of this transformation is even less clear. */
4805 /* If the operand is a useless conversion, look through it. Doing so
4806 guarantees that the ADDR_EXPR and its operand will remain of the
4807 same type. */
4808 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4809 op0 = TREE_OPERAND (op0, 0);
4811 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4812 build_fold_addr_expr_loc (loc,
4813 TREE_OPERAND (op0, 0)));
4814 ret = GS_OK;
4815 break;
4817 default:
4818 /* We use fb_either here because the C frontend sometimes takes
4819 the address of a call that returns a struct; see
4820 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4821 the implied temporary explicit. */
4823 /* Make the operand addressable. */
4824 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4825 is_gimple_addressable, fb_either);
4826 if (ret == GS_ERROR)
4827 break;
4829 /* Then mark it. Beware that it may not be possible to do so directly
4830 if a temporary has been created by the gimplification. */
4831 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
4833 op0 = TREE_OPERAND (expr, 0);
4835 /* For various reasons, the gimplification of the expression
4836 may have made a new INDIRECT_REF. */
4837 if (TREE_CODE (op0) == INDIRECT_REF)
4838 goto do_indirect_ref;
4840 mark_addressable (TREE_OPERAND (expr, 0));
4842 /* The FEs may end up building ADDR_EXPRs early on a decl with
4843 an incomplete type. Re-build ADDR_EXPRs in canonical form
4844 here. */
4845 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4846 *expr_p = build_fold_addr_expr (op0);
4848 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
4849 recompute_tree_invariant_for_addr_expr (*expr_p);
4851 /* If we re-built the ADDR_EXPR add a conversion to the original type
4852 if required. */
4853 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4854 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
4856 break;
4859 return ret;
4862 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4863 value; output operands should be a gimple lvalue. */
4865 static enum gimplify_status
4866 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4868 tree expr;
4869 int noutputs;
4870 const char **oconstraints;
4871 int i;
4872 tree link;
4873 const char *constraint;
4874 bool allows_mem, allows_reg, is_inout;
4875 enum gimplify_status ret, tret;
4876 gimple stmt;
4877 vec<tree, va_gc> *inputs;
4878 vec<tree, va_gc> *outputs;
4879 vec<tree, va_gc> *clobbers;
4880 vec<tree, va_gc> *labels;
4881 tree link_next;
4883 expr = *expr_p;
4884 noutputs = list_length (ASM_OUTPUTS (expr));
4885 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4887 inputs = NULL;
4888 outputs = NULL;
4889 clobbers = NULL;
4890 labels = NULL;
4892 ret = GS_ALL_DONE;
4893 link_next = NULL_TREE;
4894 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4896 bool ok;
4897 size_t constraint_len;
4899 link_next = TREE_CHAIN (link);
4901 oconstraints[i]
4902 = constraint
4903 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4904 constraint_len = strlen (constraint);
4905 if (constraint_len == 0)
4906 continue;
4908 ok = parse_output_constraint (&constraint, i, 0, 0,
4909 &allows_mem, &allows_reg, &is_inout);
4910 if (!ok)
4912 ret = GS_ERROR;
4913 is_inout = false;
4916 if (!allows_reg && allows_mem)
4917 mark_addressable (TREE_VALUE (link));
4919 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4920 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4921 fb_lvalue | fb_mayfail);
4922 if (tret == GS_ERROR)
4924 error ("invalid lvalue in asm output %d", i);
4925 ret = tret;
4928 vec_safe_push (outputs, link);
4929 TREE_CHAIN (link) = NULL_TREE;
4931 if (is_inout)
4933 /* An input/output operand. To give the optimizers more
4934 flexibility, split it into separate input and output
4935 operands. */
4936 tree input;
4937 char buf[10];
4939 /* Turn the in/out constraint into an output constraint. */
4940 char *p = xstrdup (constraint);
4941 p[0] = '=';
4942 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4944 /* And add a matching input constraint. */
4945 if (allows_reg)
4947 sprintf (buf, "%d", i);
4949 /* If there are multiple alternatives in the constraint,
4950 handle each of them individually. Those that allow register
4951 will be replaced with operand number, the others will stay
4952 unchanged. */
4953 if (strchr (p, ',') != NULL)
4955 size_t len = 0, buflen = strlen (buf);
4956 char *beg, *end, *str, *dst;
4958 for (beg = p + 1;;)
4960 end = strchr (beg, ',');
4961 if (end == NULL)
4962 end = strchr (beg, '\0');
4963 if ((size_t) (end - beg) < buflen)
4964 len += buflen + 1;
4965 else
4966 len += end - beg + 1;
4967 if (*end)
4968 beg = end + 1;
4969 else
4970 break;
4973 str = (char *) alloca (len);
4974 for (beg = p + 1, dst = str;;)
4976 const char *tem;
4977 bool mem_p, reg_p, inout_p;
4979 end = strchr (beg, ',');
4980 if (end)
4981 *end = '\0';
4982 beg[-1] = '=';
4983 tem = beg - 1;
4984 parse_output_constraint (&tem, i, 0, 0,
4985 &mem_p, &reg_p, &inout_p);
4986 if (dst != str)
4987 *dst++ = ',';
4988 if (reg_p)
4990 memcpy (dst, buf, buflen);
4991 dst += buflen;
4993 else
4995 if (end)
4996 len = end - beg;
4997 else
4998 len = strlen (beg);
4999 memcpy (dst, beg, len);
5000 dst += len;
5002 if (end)
5003 beg = end + 1;
5004 else
5005 break;
5007 *dst = '\0';
5008 input = build_string (dst - str, str);
5010 else
5011 input = build_string (strlen (buf), buf);
5013 else
5014 input = build_string (constraint_len - 1, constraint + 1);
5016 free (p);
5018 input = build_tree_list (build_tree_list (NULL_TREE, input),
5019 unshare_expr (TREE_VALUE (link)));
5020 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5024 link_next = NULL_TREE;
5025 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5027 link_next = TREE_CHAIN (link);
5028 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5029 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5030 oconstraints, &allows_mem, &allows_reg);
5032 /* If we can't make copies, we can only accept memory. */
5033 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5035 if (allows_mem)
5036 allows_reg = 0;
5037 else
5039 error ("impossible constraint in %<asm%>");
5040 error ("non-memory input %d must stay in memory", i);
5041 return GS_ERROR;
5045 /* If the operand is a memory input, it should be an lvalue. */
5046 if (!allows_reg && allows_mem)
5048 tree inputv = TREE_VALUE (link);
5049 STRIP_NOPS (inputv);
5050 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5051 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5052 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5053 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5054 TREE_VALUE (link) = error_mark_node;
5055 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5056 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5057 mark_addressable (TREE_VALUE (link));
5058 if (tret == GS_ERROR)
5060 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5061 input_location = EXPR_LOCATION (TREE_VALUE (link));
5062 error ("memory input %d is not directly addressable", i);
5063 ret = tret;
5066 else
5068 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5069 is_gimple_asm_val, fb_rvalue);
5070 if (tret == GS_ERROR)
5071 ret = tret;
5074 TREE_CHAIN (link) = NULL_TREE;
5075 vec_safe_push (inputs, link);
5078 link_next = NULL_TREE;
5079 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5081 link_next = TREE_CHAIN (link);
5082 TREE_CHAIN (link) = NULL_TREE;
5083 vec_safe_push (clobbers, link);
5086 link_next = NULL_TREE;
5087 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5089 link_next = TREE_CHAIN (link);
5090 TREE_CHAIN (link) = NULL_TREE;
5091 vec_safe_push (labels, link);
5094 /* Do not add ASMs with errors to the gimple IL stream. */
5095 if (ret != GS_ERROR)
5097 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5098 inputs, outputs, clobbers, labels);
5100 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5101 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5103 gimplify_seq_add_stmt (pre_p, stmt);
5106 return ret;
5109 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5110 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5111 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5112 return to this function.
5114 FIXME should we complexify the prequeue handling instead? Or use flags
5115 for all the cleanups and let the optimizer tighten them up? The current
5116 code seems pretty fragile; it will break on a cleanup within any
5117 non-conditional nesting. But any such nesting would be broken, anyway;
5118 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5119 and continues out of it. We can do that at the RTL level, though, so
5120 having an optimizer to tighten up try/finally regions would be a Good
5121 Thing. */
5123 static enum gimplify_status
5124 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5126 gimple_stmt_iterator iter;
5127 gimple_seq body_sequence = NULL;
5129 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5131 /* We only care about the number of conditions between the innermost
5132 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5133 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5134 int old_conds = gimplify_ctxp->conditions;
5135 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5136 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5137 gimplify_ctxp->conditions = 0;
5138 gimplify_ctxp->conditional_cleanups = NULL;
5139 gimplify_ctxp->in_cleanup_point_expr = true;
5141 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5143 gimplify_ctxp->conditions = old_conds;
5144 gimplify_ctxp->conditional_cleanups = old_cleanups;
5145 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5147 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5149 gimple wce = gsi_stmt (iter);
5151 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5153 if (gsi_one_before_end_p (iter))
5155 /* Note that gsi_insert_seq_before and gsi_remove do not
5156 scan operands, unlike some other sequence mutators. */
5157 if (!gimple_wce_cleanup_eh_only (wce))
5158 gsi_insert_seq_before_without_update (&iter,
5159 gimple_wce_cleanup (wce),
5160 GSI_SAME_STMT);
5161 gsi_remove (&iter, true);
5162 break;
5164 else
5166 gimple_statement_try *gtry;
5167 gimple_seq seq;
5168 enum gimple_try_flags kind;
5170 if (gimple_wce_cleanup_eh_only (wce))
5171 kind = GIMPLE_TRY_CATCH;
5172 else
5173 kind = GIMPLE_TRY_FINALLY;
5174 seq = gsi_split_seq_after (iter);
5176 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5177 /* Do not use gsi_replace here, as it may scan operands.
5178 We want to do a simple structural modification only. */
5179 gsi_set_stmt (&iter, gtry);
5180 iter = gsi_start (gtry->eval);
5183 else
5184 gsi_next (&iter);
5187 gimplify_seq_add_seq (pre_p, body_sequence);
5188 if (temp)
5190 *expr_p = temp;
5191 return GS_OK;
5193 else
5195 *expr_p = NULL;
5196 return GS_ALL_DONE;
5200 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5201 is the cleanup action required. EH_ONLY is true if the cleanup should
5202 only be executed if an exception is thrown, not on normal exit. */
5204 static void
5205 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5207 gimple wce;
5208 gimple_seq cleanup_stmts = NULL;
5210 /* Errors can result in improperly nested cleanups. Which results in
5211 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5212 if (seen_error ())
5213 return;
5215 if (gimple_conditional_context ())
5217 /* If we're in a conditional context, this is more complex. We only
5218 want to run the cleanup if we actually ran the initialization that
5219 necessitates it, but we want to run it after the end of the
5220 conditional context. So we wrap the try/finally around the
5221 condition and use a flag to determine whether or not to actually
5222 run the destructor. Thus
5224 test ? f(A()) : 0
5226 becomes (approximately)
5228 flag = 0;
5229 try {
5230 if (test) { A::A(temp); flag = 1; val = f(temp); }
5231 else { val = 0; }
5232 } finally {
5233 if (flag) A::~A(temp);
5237 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5238 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5239 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5241 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5242 gimplify_stmt (&cleanup, &cleanup_stmts);
5243 wce = gimple_build_wce (cleanup_stmts);
5245 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5246 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5247 gimplify_seq_add_stmt (pre_p, ftrue);
5249 /* Because of this manipulation, and the EH edges that jump
5250 threading cannot redirect, the temporary (VAR) will appear
5251 to be used uninitialized. Don't warn. */
5252 TREE_NO_WARNING (var) = 1;
5254 else
5256 gimplify_stmt (&cleanup, &cleanup_stmts);
5257 wce = gimple_build_wce (cleanup_stmts);
5258 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5259 gimplify_seq_add_stmt (pre_p, wce);
5263 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5265 static enum gimplify_status
5266 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5268 tree targ = *expr_p;
5269 tree temp = TARGET_EXPR_SLOT (targ);
5270 tree init = TARGET_EXPR_INITIAL (targ);
5271 enum gimplify_status ret;
5273 if (init)
5275 tree cleanup = NULL_TREE;
5277 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5278 to the temps list. Handle also variable length TARGET_EXPRs. */
5279 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5281 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5282 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5283 gimplify_vla_decl (temp, pre_p);
5285 else
5286 gimple_add_tmp_var (temp);
5288 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5289 expression is supposed to initialize the slot. */
5290 if (VOID_TYPE_P (TREE_TYPE (init)))
5291 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5292 else
5294 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5295 init = init_expr;
5296 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5297 init = NULL;
5298 ggc_free (init_expr);
5300 if (ret == GS_ERROR)
5302 /* PR c++/28266 Make sure this is expanded only once. */
5303 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5304 return GS_ERROR;
5306 if (init)
5307 gimplify_and_add (init, pre_p);
5309 /* If needed, push the cleanup for the temp. */
5310 if (TARGET_EXPR_CLEANUP (targ))
5312 if (CLEANUP_EH_ONLY (targ))
5313 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5314 CLEANUP_EH_ONLY (targ), pre_p);
5315 else
5316 cleanup = TARGET_EXPR_CLEANUP (targ);
5319 /* Add a clobber for the temporary going out of scope, like
5320 gimplify_bind_expr. */
5321 if (gimplify_ctxp->in_cleanup_point_expr
5322 && needs_to_live_in_memory (temp)
5323 && flag_stack_reuse == SR_ALL)
5325 tree clobber = build_constructor (TREE_TYPE (temp),
5326 NULL);
5327 TREE_THIS_VOLATILE (clobber) = true;
5328 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5329 if (cleanup)
5330 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5331 clobber);
5332 else
5333 cleanup = clobber;
5336 if (cleanup)
5337 gimple_push_cleanup (temp, cleanup, false, pre_p);
5339 /* Only expand this once. */
5340 TREE_OPERAND (targ, 3) = init;
5341 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5343 else
5344 /* We should have expanded this before. */
5345 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5347 *expr_p = temp;
5348 return GS_OK;
5351 /* Gimplification of expression trees. */
5353 /* Gimplify an expression which appears at statement context. The
5354 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5355 NULL, a new sequence is allocated.
5357 Return true if we actually added a statement to the queue. */
5359 bool
5360 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5362 gimple_seq_node last;
5364 last = gimple_seq_last (*seq_p);
5365 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5366 return last != gimple_seq_last (*seq_p);
5369 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5370 to CTX. If entries already exist, force them to be some flavor of private.
5371 If there is no enclosing parallel, do nothing. */
5373 void
5374 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5376 splay_tree_node n;
5378 if (decl == NULL || !DECL_P (decl))
5379 return;
5383 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5384 if (n != NULL)
5386 if (n->value & GOVD_SHARED)
5387 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5388 else if (n->value & GOVD_MAP)
5389 n->value |= GOVD_MAP_TO_ONLY;
5390 else
5391 return;
5393 else if (ctx->region_type & ORT_TARGET)
5395 if (ctx->region_type & ORT_TARGET_OFFLOAD)
5396 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5398 else if (ctx->region_type != ORT_WORKSHARE
5399 && ctx->region_type != ORT_SIMD)
5400 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5402 ctx = ctx->outer_context;
5404 while (ctx);
5407 /* Similarly for each of the type sizes of TYPE. */
5409 static void
5410 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5412 if (type == NULL || type == error_mark_node)
5413 return;
5414 type = TYPE_MAIN_VARIANT (type);
5416 if (pointer_set_insert (ctx->privatized_types, type))
5417 return;
5419 switch (TREE_CODE (type))
5421 case INTEGER_TYPE:
5422 case ENUMERAL_TYPE:
5423 case BOOLEAN_TYPE:
5424 case REAL_TYPE:
5425 case FIXED_POINT_TYPE:
5426 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5427 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5428 break;
5430 case ARRAY_TYPE:
5431 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5432 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5433 break;
5435 case RECORD_TYPE:
5436 case UNION_TYPE:
5437 case QUAL_UNION_TYPE:
5439 tree field;
5440 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5441 if (TREE_CODE (field) == FIELD_DECL)
5443 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5444 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5447 break;
5449 case POINTER_TYPE:
5450 case REFERENCE_TYPE:
5451 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5452 break;
5454 default:
5455 break;
5458 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5459 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5460 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5463 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5465 static void
5466 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5468 splay_tree_node n;
5469 unsigned int nflags;
5470 tree t;
5472 if (error_operand_p (decl))
5473 return;
5475 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5476 there are constructors involved somewhere. */
5477 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5478 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5479 flags |= GOVD_SEEN;
5481 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5482 if (n != NULL && n->value != GOVD_ALIGNED)
5484 /* We shouldn't be re-adding the decl with the same data
5485 sharing class. */
5486 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5487 /* The only combination of data sharing classes we should see is
5488 FIRSTPRIVATE and LASTPRIVATE. */
5489 nflags = n->value | flags;
5490 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5491 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)
5492 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5493 n->value = nflags;
5494 return;
5497 /* When adding a variable-sized variable, we have to handle all sorts
5498 of additional bits of data: the pointer replacement variable, and
5499 the parameters of the type. */
5500 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5502 /* Add the pointer replacement variable as PRIVATE if the variable
5503 replacement is private, else FIRSTPRIVATE since we'll need the
5504 address of the original variable either for SHARED, or for the
5505 copy into or out of the context. */
5506 if (!(flags & GOVD_LOCAL))
5508 if (flags & GOVD_MAP)
5510 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
5511 #if 0
5512 /* Not sure if this is actually needed; haven't found a case
5513 where this would change anything; TODO. */
5514 if (flags & GOVD_MAP_FORCE)
5515 nflags |= OMP_CLAUSE_MAP_FORCE;
5516 #endif
5518 else if (flags & GOVD_PRIVATE)
5519 nflags = GOVD_PRIVATE;
5520 else
5521 nflags = GOVD_FIRSTPRIVATE;
5522 nflags |= flags & GOVD_SEEN;
5523 t = DECL_VALUE_EXPR (decl);
5524 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5525 t = TREE_OPERAND (t, 0);
5526 gcc_assert (DECL_P (t));
5527 omp_add_variable (ctx, t, nflags);
5530 /* Add all of the variable and type parameters (which should have
5531 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5532 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5533 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5534 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5536 /* The variable-sized variable itself is never SHARED, only some form
5537 of PRIVATE. The sharing would take place via the pointer variable
5538 which we remapped above. */
5539 if (flags & GOVD_SHARED)
5540 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5541 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5543 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5544 alloca statement we generate for the variable, so make sure it
5545 is available. This isn't automatically needed for the SHARED
5546 case, since we won't be allocating local storage then.
5547 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5548 in this case omp_notice_variable will be called later
5549 on when it is gimplified. */
5550 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5551 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5552 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5554 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5555 && lang_hooks.decls.omp_privatize_by_reference (decl))
5557 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5559 /* Similar to the direct variable sized case above, we'll need the
5560 size of references being privatized. */
5561 if ((flags & GOVD_SHARED) == 0)
5563 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5564 if (TREE_CODE (t) != INTEGER_CST)
5565 omp_notice_variable (ctx, t, true);
5569 if (n != NULL)
5570 n->value |= flags;
5571 else
5572 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5575 /* Notice a threadprivate variable DECL used in OpenMP context CTX.
5576 This just prints out diagnostics about threadprivate variable uses
5577 in untied tasks. If DECL2 is non-NULL, prevent this warning
5578 on that variable. */
5580 static bool
5581 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5582 tree decl2)
5584 splay_tree_node n;
5585 struct gimplify_omp_ctx *octx;
5587 for (octx = ctx; octx; octx = octx->outer_context)
5588 if ((octx->region_type & ORT_TARGET)
5589 && (octx->region_type & ORT_TARGET_OFFLOAD))
5591 gcc_assert (!(octx->region_type & ORT_TARGET_MAP_FORCE));
5593 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5594 if (n == NULL)
5596 error ("threadprivate variable %qE used in target region",
5597 DECL_NAME (decl));
5598 error_at (octx->location, "enclosing target region");
5599 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5601 if (decl2)
5602 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5605 if (ctx->region_type != ORT_UNTIED_TASK)
5606 return false;
5607 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5608 if (n == NULL)
5610 error ("threadprivate variable %qE used in untied task",
5611 DECL_NAME (decl));
5612 error_at (ctx->location, "enclosing task");
5613 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5615 if (decl2)
5616 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5617 return false;
5620 /* Record the fact that DECL was used within the OpenMP context CTX.
5621 IN_CODE is true when real code uses DECL, and false when we should
5622 merely emit default(none) errors. Return true if DECL is going to
5623 be remapped and thus DECL shouldn't be gimplified into its
5624 DECL_VALUE_EXPR (if any). */
5626 static bool
5627 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5629 splay_tree_node n;
5630 unsigned flags = in_code ? GOVD_SEEN : 0;
5631 bool ret = false, shared;
5633 if (error_operand_p (decl))
5634 return false;
5636 /* Threadprivate variables are predetermined. */
5637 if (is_global_var (decl))
5639 if (DECL_THREAD_LOCAL_P (decl))
5640 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5642 if (DECL_HAS_VALUE_EXPR_P (decl))
5644 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5646 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5647 return omp_notice_threadprivate_variable (ctx, decl, value);
5651 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5652 if ((ctx->region_type & ORT_TARGET)
5653 && (ctx->region_type & ORT_TARGET_OFFLOAD))
5655 unsigned map_force;
5656 if (ctx->region_type & ORT_TARGET_MAP_FORCE)
5657 map_force = GOVD_MAP_FORCE;
5658 else
5659 map_force = 0;
5660 if (n == NULL)
5662 if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl)))
5664 error ("%qD referenced in target region does not have "
5665 "a mappable type", decl);
5666 omp_add_variable (ctx, decl, GOVD_MAP | map_force | GOVD_EXPLICIT | flags);
5668 else
5669 omp_add_variable (ctx, decl, GOVD_MAP | map_force | flags);
5671 else
5673 #if 0
5674 /* The following fails for:
5676 int l = 10;
5677 float c[l];
5678 #pragma acc parallel copy(c[2:4])
5680 #pragma acc parallel
5682 int t = sizeof c;
5686 ..., which we currently don't have to care about (nesting
5687 disabled), but eventually will have to; TODO. */
5688 if ((n->value & GOVD_MAP) && !(n->value & GOVD_EXPLICIT))
5689 gcc_assert ((n->value & GOVD_MAP_FORCE) == map_force);
5690 #endif
5692 n->value |= flags;
5694 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
5695 goto do_outer;
5698 if (n == NULL)
5700 enum omp_clause_default_kind default_kind, kind;
5701 struct gimplify_omp_ctx *octx;
5703 if (ctx->region_type == ORT_WORKSHARE
5704 || ctx->region_type == ORT_SIMD
5705 || ((ctx->region_type & ORT_TARGET)
5706 && !(ctx->region_type & ORT_TARGET_OFFLOAD)))
5707 goto do_outer;
5709 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5710 remapped firstprivate instead of shared. To some extent this is
5711 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5712 default_kind = ctx->default_kind;
5713 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5714 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5715 default_kind = kind;
5717 switch (default_kind)
5719 case OMP_CLAUSE_DEFAULT_NONE:
5720 if ((ctx->region_type & ORT_TASK) != 0)
5722 error ("%qE not specified in enclosing task",
5723 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5724 error_at (ctx->location, "enclosing task");
5726 else if (ctx->region_type == ORT_TEAMS)
5728 error ("%qE not specified in enclosing teams construct",
5729 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5730 error_at (ctx->location, "enclosing teams construct");
5732 else
5734 error ("%qE not specified in enclosing parallel",
5735 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5736 error_at (ctx->location, "enclosing parallel");
5738 /* FALLTHRU */
5739 case OMP_CLAUSE_DEFAULT_SHARED:
5740 flags |= GOVD_SHARED;
5741 break;
5742 case OMP_CLAUSE_DEFAULT_PRIVATE:
5743 flags |= GOVD_PRIVATE;
5744 break;
5745 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5746 flags |= GOVD_FIRSTPRIVATE;
5747 break;
5748 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5749 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5750 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5751 if (ctx->outer_context)
5752 omp_notice_variable (ctx->outer_context, decl, in_code);
5753 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5755 splay_tree_node n2;
5757 if (octx->region_type & ORT_TARGET)
5758 continue;
5759 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5760 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5762 flags |= GOVD_FIRSTPRIVATE;
5763 break;
5765 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
5766 break;
5768 if (flags & GOVD_FIRSTPRIVATE)
5769 break;
5770 if (octx == NULL
5771 && (TREE_CODE (decl) == PARM_DECL
5772 || (!is_global_var (decl)
5773 && DECL_CONTEXT (decl) == current_function_decl)))
5775 flags |= GOVD_FIRSTPRIVATE;
5776 break;
5778 flags |= GOVD_SHARED;
5779 break;
5780 default:
5781 gcc_unreachable ();
5784 if ((flags & GOVD_PRIVATE)
5785 && lang_hooks.decls.omp_private_outer_ref (decl))
5786 flags |= GOVD_PRIVATE_OUTER_REF;
5788 omp_add_variable (ctx, decl, flags);
5790 shared = (flags & GOVD_SHARED) != 0;
5791 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5792 goto do_outer;
5795 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5796 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5797 && DECL_SIZE (decl)
5798 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5800 splay_tree_node n2;
5801 tree t = DECL_VALUE_EXPR (decl);
5802 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5803 t = TREE_OPERAND (t, 0);
5804 gcc_assert (DECL_P (t));
5805 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5806 n2->value |= GOVD_SEEN;
5809 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5810 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5812 /* If nothing changed, there's nothing left to do. */
5813 if ((n->value & flags) == flags)
5814 return ret;
5815 flags |= n->value;
5816 n->value = flags;
5818 do_outer:
5819 /* If the variable is private in the current context, then we don't
5820 need to propagate anything to an outer context. */
5821 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5822 return ret;
5823 if (ctx->outer_context
5824 && omp_notice_variable (ctx->outer_context, decl, in_code))
5825 return true;
5826 return ret;
5829 /* Verify that DECL is private within CTX. If there's specific information
5830 to the contrary in the innermost scope, generate an error. */
5832 static bool
5833 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, bool simd)
5835 splay_tree_node n;
5837 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5838 if (n != NULL)
5840 if (n->value & GOVD_SHARED)
5842 if (ctx == gimplify_omp_ctxp)
5844 if (simd)
5845 error ("iteration variable %qE is predetermined linear",
5846 DECL_NAME (decl));
5847 else
5848 error ("iteration variable %qE should be private",
5849 DECL_NAME (decl));
5850 n->value = GOVD_PRIVATE;
5851 return true;
5853 else
5854 return false;
5856 else if ((n->value & GOVD_EXPLICIT) != 0
5857 && (ctx == gimplify_omp_ctxp
5858 || (ctx->region_type == ORT_COMBINED_PARALLEL
5859 && gimplify_omp_ctxp->outer_context == ctx)))
5861 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5862 error ("iteration variable %qE should not be firstprivate",
5863 DECL_NAME (decl));
5864 else if ((n->value & GOVD_REDUCTION) != 0)
5865 error ("iteration variable %qE should not be reduction",
5866 DECL_NAME (decl));
5867 else if (simd && (n->value & GOVD_LASTPRIVATE) != 0)
5868 error ("iteration variable %qE should not be lastprivate",
5869 DECL_NAME (decl));
5870 else if (simd && (n->value & GOVD_PRIVATE) != 0)
5871 error ("iteration variable %qE should not be private",
5872 DECL_NAME (decl));
5873 else if (simd && (n->value & GOVD_LINEAR) != 0)
5874 error ("iteration variable %qE is predetermined linear",
5875 DECL_NAME (decl));
5877 return (ctx == gimplify_omp_ctxp
5878 || (ctx->region_type == ORT_COMBINED_PARALLEL
5879 && gimplify_omp_ctxp->outer_context == ctx));
5882 if (ctx->region_type != ORT_WORKSHARE
5883 && ctx->region_type != ORT_SIMD)
5884 return false;
5885 else if (ctx->outer_context)
5886 return omp_is_private (ctx->outer_context, decl, simd);
5887 return false;
5890 /* Return true if DECL is private within a parallel region
5891 that binds to the current construct's context or in parallel
5892 region's REDUCTION clause. */
5894 static bool
5895 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
5897 splay_tree_node n;
5901 ctx = ctx->outer_context;
5902 if (ctx == NULL)
5903 return !(is_global_var (decl)
5904 /* References might be private, but might be shared too,
5905 when checking for copyprivate, assume they might be
5906 private, otherwise assume they might be shared. */
5907 || (!copyprivate
5908 && lang_hooks.decls.omp_privatize_by_reference (decl)));
5910 if (ctx->region_type & ORT_TARGET)
5911 continue;
5913 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5914 if (n != NULL)
5915 return (n->value & GOVD_SHARED) == 0;
5917 while (ctx->region_type == ORT_WORKSHARE
5918 || ctx->region_type == ORT_SIMD);
5919 return false;
5922 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5923 and previous omp contexts. */
5925 static void
5926 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5927 enum omp_region_type region_type)
5929 struct gimplify_omp_ctx *ctx, *outer_ctx;
5930 tree c;
5932 ctx = new_omp_context (region_type);
5933 outer_ctx = ctx->outer_context;
5935 while ((c = *list_p) != NULL)
5937 bool remove = false;
5938 bool notice_outer = true;
5939 const char *check_non_private = NULL;
5940 unsigned int flags;
5941 tree decl;
5943 switch (OMP_CLAUSE_CODE (c))
5945 case OMP_CLAUSE_PRIVATE:
5946 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5947 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5949 flags |= GOVD_PRIVATE_OUTER_REF;
5950 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5952 else
5953 notice_outer = false;
5954 goto do_add;
5955 case OMP_CLAUSE_SHARED:
5956 flags = GOVD_SHARED | GOVD_EXPLICIT;
5957 goto do_add;
5958 case OMP_CLAUSE_FIRSTPRIVATE:
5959 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5960 check_non_private = "firstprivate";
5961 goto do_add;
5962 case OMP_CLAUSE_LASTPRIVATE:
5963 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5964 check_non_private = "lastprivate";
5965 goto do_add;
5966 case OMP_CLAUSE_REDUCTION:
5967 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5968 check_non_private = "reduction";
5969 goto do_add;
5970 case OMP_CLAUSE_LINEAR:
5971 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
5972 is_gimple_val, fb_rvalue) == GS_ERROR)
5974 remove = true;
5975 break;
5977 flags = GOVD_LINEAR | GOVD_EXPLICIT;
5978 goto do_add;
5980 case OMP_CLAUSE_MAP:
5981 switch (OMP_CLAUSE_MAP_KIND (c))
5983 case OMP_CLAUSE_MAP_FORCE_PRESENT:
5984 case OMP_CLAUSE_MAP_FORCE_DEALLOC:
5985 case OMP_CLAUSE_MAP_FORCE_DEVICEPTR:
5986 input_location = OMP_CLAUSE_LOCATION (c);
5987 /* TODO. */
5988 sorry ("data clause not yet implemented");
5989 remove = true;
5990 break;
5991 default:
5992 break;
5994 if (OMP_CLAUSE_SIZE (c)
5995 && gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
5996 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
5998 remove = true;
5999 break;
6001 decl = OMP_CLAUSE_DECL (c);
6002 if (!DECL_P (decl))
6004 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6005 NULL, is_gimple_lvalue, fb_lvalue)
6006 == GS_ERROR)
6008 remove = true;
6009 break;
6011 break;
6013 flags = GOVD_MAP | GOVD_EXPLICIT;
6014 goto do_add;
6016 case OMP_CLAUSE_DEPEND:
6017 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
6019 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
6020 NULL, is_gimple_val, fb_rvalue);
6021 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
6023 if (error_operand_p (OMP_CLAUSE_DECL (c)))
6025 remove = true;
6026 break;
6028 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
6029 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
6030 is_gimple_val, fb_rvalue) == GS_ERROR)
6032 remove = true;
6033 break;
6035 break;
6037 case OMP_CLAUSE_TO:
6038 case OMP_CLAUSE_FROM:
6039 if (OMP_CLAUSE_SIZE (c)
6040 && gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6041 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6043 remove = true;
6044 break;
6046 decl = OMP_CLAUSE_DECL (c);
6047 if (error_operand_p (decl))
6049 remove = true;
6050 break;
6052 if (!DECL_P (decl))
6054 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6055 NULL, is_gimple_lvalue, fb_lvalue)
6056 == GS_ERROR)
6058 remove = true;
6059 break;
6061 break;
6063 goto do_notice;
6065 do_add:
6066 decl = OMP_CLAUSE_DECL (c);
6067 if (error_operand_p (decl))
6069 remove = true;
6070 break;
6072 omp_add_variable (ctx, decl, flags);
6073 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6074 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6076 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
6077 GOVD_LOCAL | GOVD_SEEN);
6078 gimplify_omp_ctxp = ctx;
6079 push_gimplify_context ();
6081 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6082 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6084 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
6085 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
6086 pop_gimplify_context
6087 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
6088 push_gimplify_context ();
6089 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
6090 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6091 pop_gimplify_context
6092 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
6093 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
6094 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
6096 gimplify_omp_ctxp = outer_ctx;
6098 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6099 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
6101 gimplify_omp_ctxp = ctx;
6102 push_gimplify_context ();
6103 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
6105 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6106 NULL, NULL);
6107 TREE_SIDE_EFFECTS (bind) = 1;
6108 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6109 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6111 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6112 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6113 pop_gimplify_context
6114 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6115 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6117 gimplify_omp_ctxp = outer_ctx;
6119 if (notice_outer)
6120 goto do_notice;
6121 break;
6123 case OMP_CLAUSE_COPYIN:
6124 case OMP_CLAUSE_COPYPRIVATE:
6125 decl = OMP_CLAUSE_DECL (c);
6126 if (error_operand_p (decl))
6128 remove = true;
6129 break;
6131 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
6132 && !remove
6133 && !omp_check_private (ctx, decl, true))
6135 remove = true;
6136 if (is_global_var (decl))
6138 if (DECL_THREAD_LOCAL_P (decl))
6139 remove = false;
6140 else if (DECL_HAS_VALUE_EXPR_P (decl))
6142 tree value = get_base_address (DECL_VALUE_EXPR (decl));
6144 if (value
6145 && DECL_P (value)
6146 && DECL_THREAD_LOCAL_P (value))
6147 remove = false;
6150 if (remove)
6151 error_at (OMP_CLAUSE_LOCATION (c),
6152 "copyprivate variable %qE is not threadprivate"
6153 " or private in outer context", DECL_NAME (decl));
6155 do_notice:
6156 if (outer_ctx)
6157 omp_notice_variable (outer_ctx, decl, true);
6158 if (check_non_private
6159 && region_type == ORT_WORKSHARE
6160 && omp_check_private (ctx, decl, false))
6162 error ("%s variable %qE is private in outer context",
6163 check_non_private, DECL_NAME (decl));
6164 remove = true;
6166 break;
6168 case OMP_CLAUSE_FINAL:
6169 case OMP_CLAUSE_IF:
6170 OMP_CLAUSE_OPERAND (c, 0)
6171 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6172 /* Fall through. */
6174 case OMP_CLAUSE_SCHEDULE:
6175 case OMP_CLAUSE_NUM_THREADS:
6176 case OMP_CLAUSE_NUM_TEAMS:
6177 case OMP_CLAUSE_THREAD_LIMIT:
6178 case OMP_CLAUSE_DIST_SCHEDULE:
6179 case OMP_CLAUSE_DEVICE:
6180 case OMP_CLAUSE_NUM_GANGS:
6181 case OMP_CLAUSE_NUM_WORKERS:
6182 case OMP_CLAUSE_VECTOR_LENGTH:
6183 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6184 is_gimple_val, fb_rvalue) == GS_ERROR)
6185 remove = true;
6186 break;
6188 case OMP_CLAUSE_HOST:
6189 case OMP_CLAUSE_OACC_DEVICE:
6190 case OMP_CLAUSE_DEVICE_RESIDENT:
6191 case OMP_CLAUSE_USE_DEVICE:
6192 case OMP_CLAUSE_GANG:
6193 case OMP_CLAUSE_ASYNC:
6194 case OMP_CLAUSE_WAIT:
6195 case OMP_NO_CLAUSE_CACHE:
6196 case OMP_CLAUSE_INDEPENDENT:
6197 case OMP_CLAUSE_WORKER:
6198 case OMP_CLAUSE_VECTOR:
6199 remove = true;
6200 break;
6202 case OMP_CLAUSE_NOWAIT:
6203 case OMP_CLAUSE_ORDERED:
6204 case OMP_CLAUSE_UNTIED:
6205 case OMP_CLAUSE_COLLAPSE:
6206 case OMP_CLAUSE_MERGEABLE:
6207 case OMP_CLAUSE_PROC_BIND:
6208 case OMP_CLAUSE_SAFELEN:
6209 break;
6211 case OMP_CLAUSE_ALIGNED:
6212 decl = OMP_CLAUSE_DECL (c);
6213 if (error_operand_p (decl))
6215 remove = true;
6216 break;
6218 if (!is_global_var (decl)
6219 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6220 omp_add_variable (ctx, decl, GOVD_ALIGNED);
6221 break;
6223 case OMP_CLAUSE_DEFAULT:
6224 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6225 break;
6227 default:
6228 gcc_unreachable ();
6231 if (remove)
6232 *list_p = OMP_CLAUSE_CHAIN (c);
6233 else
6234 list_p = &OMP_CLAUSE_CHAIN (c);
6237 gimplify_omp_ctxp = ctx;
6240 /* For all variables that were not actually used within the context,
6241 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
6243 static int
6244 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6246 tree *list_p = (tree *) data;
6247 tree decl = (tree) n->key;
6248 unsigned flags = n->value;
6249 enum omp_clause_code code;
6250 tree clause;
6251 bool private_debug;
6253 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6254 return 0;
6255 if ((flags & GOVD_SEEN) == 0)
6256 return 0;
6257 if (flags & GOVD_DEBUG_PRIVATE)
6259 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6260 private_debug = true;
6262 else if (flags & GOVD_MAP)
6263 private_debug = false;
6264 else
6265 private_debug
6266 = lang_hooks.decls.omp_private_debug_clause (decl,
6267 !!(flags & GOVD_SHARED));
6268 if (private_debug)
6269 code = OMP_CLAUSE_PRIVATE;
6270 else if (flags & GOVD_MAP)
6271 code = OMP_CLAUSE_MAP;
6272 else if (flags & GOVD_SHARED)
6274 if (is_global_var (decl))
6276 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6277 while (ctx != NULL)
6279 splay_tree_node on
6280 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6281 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6282 | GOVD_PRIVATE | GOVD_REDUCTION
6283 | GOVD_LINEAR)) != 0)
6284 break;
6285 ctx = ctx->outer_context;
6287 if (ctx == NULL)
6288 return 0;
6290 code = OMP_CLAUSE_SHARED;
6292 else if (flags & GOVD_PRIVATE)
6293 code = OMP_CLAUSE_PRIVATE;
6294 else if (flags & GOVD_FIRSTPRIVATE)
6295 code = OMP_CLAUSE_FIRSTPRIVATE;
6296 else if (flags & GOVD_LASTPRIVATE)
6297 code = OMP_CLAUSE_LASTPRIVATE;
6298 else if (flags & GOVD_ALIGNED)
6299 return 0;
6300 else
6301 gcc_unreachable ();
6303 clause = build_omp_clause (input_location, code);
6304 OMP_CLAUSE_DECL (clause) = decl;
6305 OMP_CLAUSE_CHAIN (clause) = *list_p;
6306 if (private_debug)
6307 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
6308 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6309 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
6310 else if (code == OMP_CLAUSE_MAP)
6312 unsigned map_kind;
6313 map_kind = (flags & GOVD_MAP_TO_ONLY
6314 ? OMP_CLAUSE_MAP_TO
6315 : OMP_CLAUSE_MAP_TOFROM);
6316 if (flags & GOVD_MAP_FORCE)
6317 map_kind |= OMP_CLAUSE_MAP_FORCE;
6318 OMP_CLAUSE_MAP_KIND (clause) = (enum omp_clause_map_kind) map_kind;
6320 if (DECL_SIZE (decl)
6321 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6323 tree decl2 = DECL_VALUE_EXPR (decl);
6324 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6325 decl2 = TREE_OPERAND (decl2, 0);
6326 gcc_assert (DECL_P (decl2));
6327 tree mem = build_simple_mem_ref (decl2);
6328 OMP_CLAUSE_DECL (clause) = mem;
6329 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6330 if (gimplify_omp_ctxp->outer_context)
6332 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6333 omp_notice_variable (ctx, decl2, true);
6334 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
6336 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
6337 OMP_CLAUSE_MAP);
6338 OMP_CLAUSE_DECL (nc) = decl;
6339 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6340 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6341 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
6342 OMP_CLAUSE_CHAIN (clause) = nc;
6345 *list_p = clause;
6346 lang_hooks.decls.omp_finish_clause (clause);
6348 return 0;
6351 static void
6352 gimplify_adjust_omp_clauses (tree *list_p)
6354 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6355 tree c, decl;
6357 while ((c = *list_p) != NULL)
6359 splay_tree_node n;
6360 bool remove = false;
6362 switch (OMP_CLAUSE_CODE (c))
6364 case OMP_CLAUSE_PRIVATE:
6365 case OMP_CLAUSE_SHARED:
6366 case OMP_CLAUSE_FIRSTPRIVATE:
6367 case OMP_CLAUSE_LINEAR:
6368 decl = OMP_CLAUSE_DECL (c);
6369 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6370 remove = !(n->value & GOVD_SEEN);
6371 if (! remove)
6373 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
6374 if ((n->value & GOVD_DEBUG_PRIVATE)
6375 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6377 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6378 || ((n->value & GOVD_DATA_SHARE_CLASS)
6379 == GOVD_PRIVATE));
6380 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
6381 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6383 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6384 && ctx->outer_context
6385 && !(OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6386 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6387 && !is_global_var (decl))
6389 if (ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
6391 n = splay_tree_lookup (ctx->outer_context->variables,
6392 (splay_tree_key) decl);
6393 if (n == NULL
6394 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
6396 int flags = OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6397 ? GOVD_LASTPRIVATE : GOVD_SHARED;
6398 if (n == NULL)
6399 omp_add_variable (ctx->outer_context, decl,
6400 flags | GOVD_SEEN);
6401 else
6402 n->value |= flags | GOVD_SEEN;
6405 else
6406 omp_notice_variable (ctx->outer_context, decl, true);
6409 break;
6411 case OMP_CLAUSE_LASTPRIVATE:
6412 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6413 accurately reflect the presence of a FIRSTPRIVATE clause. */
6414 decl = OMP_CLAUSE_DECL (c);
6415 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6416 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6417 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6418 break;
6420 case OMP_CLAUSE_ALIGNED:
6421 decl = OMP_CLAUSE_DECL (c);
6422 if (!is_global_var (decl))
6424 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6425 remove = n == NULL || !(n->value & GOVD_SEEN);
6426 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6428 struct gimplify_omp_ctx *octx;
6429 if (n != NULL
6430 && (n->value & (GOVD_DATA_SHARE_CLASS
6431 & ~GOVD_FIRSTPRIVATE)))
6432 remove = true;
6433 else
6434 for (octx = ctx->outer_context; octx;
6435 octx = octx->outer_context)
6437 n = splay_tree_lookup (octx->variables,
6438 (splay_tree_key) decl);
6439 if (n == NULL)
6440 continue;
6441 if (n->value & GOVD_LOCAL)
6442 break;
6443 /* We have to avoid assigning a shared variable
6444 to itself when trying to add
6445 __builtin_assume_aligned. */
6446 if (n->value & GOVD_SHARED)
6448 remove = true;
6449 break;
6454 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
6456 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6457 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6458 remove = true;
6460 break;
6462 case OMP_CLAUSE_MAP:
6463 decl = OMP_CLAUSE_DECL (c);
6464 if (!DECL_P (decl))
6465 break;
6466 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6467 if ((ctx->region_type & ORT_TARGET)
6468 && (ctx->region_type & ORT_TARGET_OFFLOAD)
6469 && !(n->value & GOVD_SEEN))
6470 remove = true;
6471 else if (DECL_SIZE (decl)
6472 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
6473 && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_POINTER)
6475 tree decl2 = DECL_VALUE_EXPR (decl);
6476 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6477 decl2 = TREE_OPERAND (decl2, 0);
6478 gcc_assert (DECL_P (decl2));
6479 tree mem = build_simple_mem_ref (decl2);
6480 OMP_CLAUSE_DECL (c) = mem;
6481 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6482 if (ctx->outer_context)
6484 omp_notice_variable (ctx->outer_context, decl2, true);
6485 omp_notice_variable (ctx->outer_context,
6486 OMP_CLAUSE_SIZE (c), true);
6488 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6489 OMP_CLAUSE_MAP);
6490 OMP_CLAUSE_DECL (nc) = decl;
6491 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6492 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6493 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
6494 OMP_CLAUSE_CHAIN (c) = nc;
6495 c = nc;
6497 break;
6499 case OMP_CLAUSE_TO:
6500 case OMP_CLAUSE_FROM:
6501 decl = OMP_CLAUSE_DECL (c);
6502 if (!DECL_P (decl))
6503 break;
6504 if (DECL_SIZE (decl)
6505 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6507 tree decl2 = DECL_VALUE_EXPR (decl);
6508 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6509 decl2 = TREE_OPERAND (decl2, 0);
6510 gcc_assert (DECL_P (decl2));
6511 tree mem = build_simple_mem_ref (decl2);
6512 OMP_CLAUSE_DECL (c) = mem;
6513 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6514 if (ctx->outer_context)
6516 omp_notice_variable (ctx->outer_context, decl2, true);
6517 omp_notice_variable (ctx->outer_context,
6518 OMP_CLAUSE_SIZE (c), true);
6521 break;
6523 case OMP_CLAUSE_REDUCTION:
6524 case OMP_CLAUSE_COPYIN:
6525 case OMP_CLAUSE_COPYPRIVATE:
6526 case OMP_CLAUSE_IF:
6527 case OMP_CLAUSE_NUM_THREADS:
6528 case OMP_CLAUSE_NUM_TEAMS:
6529 case OMP_CLAUSE_THREAD_LIMIT:
6530 case OMP_CLAUSE_DIST_SCHEDULE:
6531 case OMP_CLAUSE_DEVICE:
6532 case OMP_CLAUSE_SCHEDULE:
6533 case OMP_CLAUSE_NOWAIT:
6534 case OMP_CLAUSE_ORDERED:
6535 case OMP_CLAUSE_DEFAULT:
6536 case OMP_CLAUSE_UNTIED:
6537 case OMP_CLAUSE_COLLAPSE:
6538 case OMP_CLAUSE_FINAL:
6539 case OMP_CLAUSE_MERGEABLE:
6540 case OMP_CLAUSE_PROC_BIND:
6541 case OMP_CLAUSE_SAFELEN:
6542 case OMP_CLAUSE_DEPEND:
6543 case OMP_CLAUSE_NUM_GANGS:
6544 case OMP_CLAUSE_NUM_WORKERS:
6545 case OMP_CLAUSE_VECTOR_LENGTH:
6546 break;
6548 case OMP_CLAUSE_HOST:
6549 case OMP_CLAUSE_OACC_DEVICE:
6550 case OMP_CLAUSE_DEVICE_RESIDENT:
6551 case OMP_CLAUSE_USE_DEVICE:
6552 case OMP_CLAUSE_GANG:
6553 case OMP_CLAUSE_ASYNC:
6554 case OMP_CLAUSE_WAIT:
6555 case OMP_NO_CLAUSE_CACHE:
6556 case OMP_CLAUSE_INDEPENDENT:
6557 case OMP_CLAUSE_WORKER:
6558 case OMP_CLAUSE_VECTOR:
6559 default:
6560 gcc_unreachable ();
6563 if (remove)
6564 *list_p = OMP_CLAUSE_CHAIN (c);
6565 else
6566 list_p = &OMP_CLAUSE_CHAIN (c);
6569 /* Add in any implicit data sharing. */
6570 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
6572 gimplify_omp_ctxp = ctx->outer_context;
6573 delete_omp_context (ctx);
6576 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
6577 gimplification of the body, as well as scanning the body for used
6578 variables. We need to do this scan now, because variable-sized
6579 decls will be decomposed during gimplification. */
6581 static void
6582 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6584 tree expr = *expr_p;
6585 gimple g;
6586 gimple_seq body = NULL;
6588 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6589 OMP_PARALLEL_COMBINED (expr)
6590 ? ORT_COMBINED_PARALLEL
6591 : ORT_PARALLEL);
6593 push_gimplify_context ();
6595 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6596 if (gimple_code (g) == GIMPLE_BIND)
6597 pop_gimplify_context (g);
6598 else
6599 pop_gimplify_context (NULL);
6601 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6603 g = gimple_build_omp_parallel (body,
6604 OMP_PARALLEL_CLAUSES (expr),
6605 NULL_TREE, NULL_TREE);
6606 if (OMP_PARALLEL_COMBINED (expr))
6607 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6608 gimplify_seq_add_stmt (pre_p, g);
6609 *expr_p = NULL_TREE;
6612 /* Gimplify the contents of an OMP_TASK statement. This involves
6613 gimplification of the body, as well as scanning the body for used
6614 variables. We need to do this scan now, because variable-sized
6615 decls will be decomposed during gimplification. */
6617 static void
6618 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6620 tree expr = *expr_p;
6621 gimple g;
6622 gimple_seq body = NULL;
6624 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6625 find_omp_clause (OMP_TASK_CLAUSES (expr),
6626 OMP_CLAUSE_UNTIED)
6627 ? ORT_UNTIED_TASK : ORT_TASK);
6629 push_gimplify_context ();
6631 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6632 if (gimple_code (g) == GIMPLE_BIND)
6633 pop_gimplify_context (g);
6634 else
6635 pop_gimplify_context (NULL);
6637 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
6639 g = gimple_build_omp_task (body,
6640 OMP_TASK_CLAUSES (expr),
6641 NULL_TREE, NULL_TREE,
6642 NULL_TREE, NULL_TREE, NULL_TREE);
6643 gimplify_seq_add_stmt (pre_p, g);
6644 *expr_p = NULL_TREE;
6647 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
6648 with non-NULL OMP_FOR_INIT. */
6650 static tree
6651 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
6653 *walk_subtrees = 0;
6654 switch (TREE_CODE (*tp))
6656 case OMP_FOR:
6657 *walk_subtrees = 1;
6658 /* FALLTHRU */
6659 case OMP_SIMD:
6660 if (OMP_FOR_INIT (*tp) != NULL_TREE)
6661 return *tp;
6662 break;
6663 case BIND_EXPR:
6664 case STATEMENT_LIST:
6665 case OMP_PARALLEL:
6666 *walk_subtrees = 1;
6667 break;
6668 default:
6669 break;
6671 return NULL_TREE;
6674 /* Gimplify the gross structure of an OMP_FOR statement. */
6676 static enum gimplify_status
6677 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6679 tree for_stmt, orig_for_stmt, decl, var, t;
6680 enum gimplify_status ret = GS_ALL_DONE;
6681 enum gimplify_status tret;
6682 gimple gfor;
6683 gimple_seq for_body, for_pre_body;
6684 int i;
6685 bool simd;
6686 enum gimplify_omp_var_data govd_private;
6687 enum omp_region_type ort;
6688 bitmap has_decl_expr = NULL;
6690 orig_for_stmt = for_stmt = *expr_p;
6692 switch (TREE_CODE (for_stmt))
6694 case OMP_FOR:
6695 case OMP_DISTRIBUTE:
6696 simd = false;
6697 govd_private = GOVD_PRIVATE;
6698 ort = ORT_WORKSHARE;
6699 break;
6700 case OACC_LOOP:
6701 simd = false;
6702 govd_private = /* TODO */ GOVD_LOCAL;
6703 ort = /* TODO */ ORT_WORKSHARE;
6704 break;
6705 case OMP_SIMD:
6706 case CILK_SIMD:
6707 simd = true;
6708 govd_private = GOVD_PRIVATE;
6709 ort = ORT_SIMD;
6710 break;
6711 default:
6712 gcc_unreachable ();
6715 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort);
6717 /* Handle OMP_FOR_INIT. */
6718 for_pre_body = NULL;
6719 if (simd && OMP_FOR_PRE_BODY (for_stmt))
6721 has_decl_expr = BITMAP_ALLOC (NULL);
6722 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
6723 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
6724 == VAR_DECL)
6726 t = OMP_FOR_PRE_BODY (for_stmt);
6727 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6729 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
6731 tree_stmt_iterator si;
6732 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
6733 tsi_next (&si))
6735 t = tsi_stmt (si);
6736 if (TREE_CODE (t) == DECL_EXPR
6737 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
6738 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6742 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6743 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6745 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
6747 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
6748 for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), find_combined_omp_for,
6749 NULL, NULL);
6750 gcc_assert (for_stmt != NULL_TREE);
6751 gimplify_omp_ctxp->combined_loop = true;
6754 for_body = NULL;
6755 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6756 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6757 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6758 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6759 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6761 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6762 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6763 decl = TREE_OPERAND (t, 0);
6764 gcc_assert (DECL_P (decl));
6765 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6766 || POINTER_TYPE_P (TREE_TYPE (decl)));
6768 /* Make sure the iteration variable is some kind of private. */
6769 tree c = NULL_TREE;
6770 if (orig_for_stmt != for_stmt)
6771 /* Do this only on innermost construct for combined ones. */;
6772 else if (simd)
6774 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
6775 (splay_tree_key)decl);
6776 omp_is_private (gimplify_omp_ctxp, decl, simd);
6777 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6778 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6779 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
6781 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
6782 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
6783 if (has_decl_expr
6784 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
6785 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6786 OMP_CLAUSE_DECL (c) = decl;
6787 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6788 OMP_FOR_CLAUSES (for_stmt) = c;
6789 omp_add_variable (gimplify_omp_ctxp, decl,
6790 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
6792 else
6794 gcc_assert (govd_private == GOVD_PRIVATE);
6795 bool lastprivate
6796 = (!has_decl_expr
6797 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
6798 c = build_omp_clause (input_location,
6799 lastprivate ? OMP_CLAUSE_LASTPRIVATE
6800 : OMP_CLAUSE_PRIVATE);
6801 OMP_CLAUSE_DECL (c) = decl;
6802 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6803 omp_add_variable (gimplify_omp_ctxp, decl,
6804 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
6805 | GOVD_SEEN);
6806 c = NULL_TREE;
6809 else if (omp_is_private (gimplify_omp_ctxp, decl, simd))
6810 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6811 else
6812 omp_add_variable (gimplify_omp_ctxp, decl, govd_private | GOVD_SEEN);
6814 /* If DECL is not a gimple register, create a temporary variable to act
6815 as an iteration counter. This is valid, since DECL cannot be
6816 modified in the body of the loop. */
6817 if (orig_for_stmt != for_stmt)
6818 var = decl;
6819 else if (!is_gimple_reg (decl))
6821 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6822 TREE_OPERAND (t, 0) = var;
6824 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6826 omp_add_variable (gimplify_omp_ctxp, var, govd_private | GOVD_SEEN);
6828 else
6829 var = decl;
6831 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6832 is_gimple_val, fb_rvalue);
6833 ret = MIN (ret, tret);
6834 if (ret == GS_ERROR)
6835 return ret;
6837 /* Handle OMP_FOR_COND. */
6838 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6839 gcc_assert (COMPARISON_CLASS_P (t));
6840 gcc_assert (TREE_OPERAND (t, 0) == decl);
6842 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6843 is_gimple_val, fb_rvalue);
6844 ret = MIN (ret, tret);
6846 /* Handle OMP_FOR_INCR. */
6847 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6848 switch (TREE_CODE (t))
6850 case PREINCREMENT_EXPR:
6851 case POSTINCREMENT_EXPR:
6853 tree decl = TREE_OPERAND (t, 0);
6854 // c_omp_for_incr_canonicalize_ptr() should have been
6855 // called to massage things appropriately.
6856 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
6858 if (orig_for_stmt != for_stmt)
6859 break;
6860 t = build_int_cst (TREE_TYPE (decl), 1);
6861 if (c)
6862 OMP_CLAUSE_LINEAR_STEP (c) = t;
6863 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6864 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6865 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6866 break;
6869 case PREDECREMENT_EXPR:
6870 case POSTDECREMENT_EXPR:
6871 if (orig_for_stmt != for_stmt)
6872 break;
6873 t = build_int_cst (TREE_TYPE (decl), -1);
6874 if (c)
6875 OMP_CLAUSE_LINEAR_STEP (c) = t;
6876 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6877 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6878 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6879 break;
6881 case MODIFY_EXPR:
6882 gcc_assert (TREE_OPERAND (t, 0) == decl);
6883 TREE_OPERAND (t, 0) = var;
6885 t = TREE_OPERAND (t, 1);
6886 switch (TREE_CODE (t))
6888 case PLUS_EXPR:
6889 if (TREE_OPERAND (t, 1) == decl)
6891 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6892 TREE_OPERAND (t, 0) = var;
6893 break;
6896 /* Fallthru. */
6897 case MINUS_EXPR:
6898 case POINTER_PLUS_EXPR:
6899 gcc_assert (TREE_OPERAND (t, 0) == decl);
6900 TREE_OPERAND (t, 0) = var;
6901 break;
6902 default:
6903 gcc_unreachable ();
6906 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6907 is_gimple_val, fb_rvalue);
6908 ret = MIN (ret, tret);
6909 if (c)
6911 OMP_CLAUSE_LINEAR_STEP (c) = TREE_OPERAND (t, 1);
6912 if (TREE_CODE (t) == MINUS_EXPR)
6914 t = TREE_OPERAND (t, 1);
6915 OMP_CLAUSE_LINEAR_STEP (c)
6916 = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
6917 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
6918 &for_pre_body, NULL,
6919 is_gimple_val, fb_rvalue);
6920 ret = MIN (ret, tret);
6923 break;
6925 default:
6926 gcc_unreachable ();
6929 if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6930 && orig_for_stmt == for_stmt)
6932 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6933 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6934 && OMP_CLAUSE_DECL (c) == decl
6935 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6937 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6938 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6939 gcc_assert (TREE_OPERAND (t, 0) == var);
6940 t = TREE_OPERAND (t, 1);
6941 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6942 || TREE_CODE (t) == MINUS_EXPR
6943 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6944 gcc_assert (TREE_OPERAND (t, 0) == var);
6945 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6946 TREE_OPERAND (t, 1));
6947 gimplify_assign (decl, t,
6948 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6953 BITMAP_FREE (has_decl_expr);
6955 gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body);
6957 if (orig_for_stmt != for_stmt)
6958 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6960 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6961 decl = TREE_OPERAND (t, 0);
6962 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6963 omp_add_variable (gimplify_omp_ctxp, var, govd_private | GOVD_SEEN);
6964 TREE_OPERAND (t, 0) = var;
6965 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6966 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
6967 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
6970 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt));
6972 int kind;
6973 switch (TREE_CODE (orig_for_stmt))
6975 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
6976 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
6977 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
6978 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
6979 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
6980 default:
6981 gcc_unreachable ();
6983 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
6984 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6985 for_pre_body);
6986 if (orig_for_stmt != for_stmt)
6987 gimple_omp_for_set_combined_p (gfor, true);
6988 if (gimplify_omp_ctxp
6989 && (gimplify_omp_ctxp->combined_loop
6990 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
6991 && gimplify_omp_ctxp->outer_context
6992 && gimplify_omp_ctxp->outer_context->combined_loop)))
6994 gimple_omp_for_set_combined_into_p (gfor, true);
6995 if (gimplify_omp_ctxp->combined_loop)
6996 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
6997 else
6998 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
7001 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7003 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7004 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
7005 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
7006 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
7007 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
7008 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
7009 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7010 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
7013 gimplify_seq_add_stmt (pre_p, gfor);
7014 if (ret != GS_ALL_DONE)
7015 return GS_ERROR;
7016 *expr_p = NULL_TREE;
7017 return GS_ALL_DONE;
7020 /* Gimplify the gross structure of several OpenACC or OpenMP constructs. */
7022 static void
7023 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
7025 tree expr = *expr_p;
7026 gimple stmt;
7027 gimple_seq body = NULL;
7028 enum omp_region_type ort;
7030 switch (TREE_CODE (expr))
7032 case OACC_DATA:
7033 ort = (enum omp_region_type) (ORT_TARGET
7034 | ORT_TARGET_MAP_FORCE);
7035 break;
7036 case OACC_KERNELS:
7037 case OACC_PARALLEL:
7038 ort = (enum omp_region_type) (ORT_TARGET
7039 | ORT_TARGET_OFFLOAD
7040 | ORT_TARGET_MAP_FORCE);
7041 break;
7042 case OMP_SECTIONS:
7043 case OMP_SINGLE:
7044 ort = ORT_WORKSHARE;
7045 break;
7046 case OMP_TARGET:
7047 ort = (enum omp_region_type) (ORT_TARGET | ORT_TARGET_OFFLOAD);
7048 break;
7049 case OMP_TARGET_DATA:
7050 ort = ORT_TARGET;
7051 break;
7052 case OMP_TEAMS:
7053 ort = ORT_TEAMS;
7054 break;
7055 default:
7056 gcc_unreachable ();
7058 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort);
7059 if (ort & ORT_TARGET)
7061 push_gimplify_context ();
7062 gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
7063 if (gimple_code (g) == GIMPLE_BIND)
7064 pop_gimplify_context (g);
7065 else
7066 pop_gimplify_context (NULL);
7067 if (!(ort & ORT_TARGET_OFFLOAD))
7069 enum built_in_function end_ix;
7070 switch (TREE_CODE (expr))
7072 case OACC_DATA:
7073 end_ix = BUILT_IN_GOACC_DATA_END;
7074 break;
7075 case OMP_TARGET_DATA:
7076 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
7077 break;
7078 default:
7079 gcc_unreachable ();
7081 tree fn = builtin_decl_explicit (end_ix);
7082 g = gimple_build_call (fn, 0);
7083 gimple_seq cleanup = NULL;
7084 gimple_seq_add_stmt (&cleanup, g);
7085 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
7086 body = NULL;
7087 gimple_seq_add_stmt (&body, g);
7090 else
7091 gimplify_and_add (OMP_BODY (expr), &body);
7092 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
7094 switch (TREE_CODE (expr))
7096 case OACC_DATA:
7097 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
7098 OACC_DATA_CLAUSES (expr));
7099 break;
7100 case OACC_KERNELS:
7101 stmt = gimple_build_oacc_kernels (body, OACC_KERNELS_CLAUSES (expr));
7102 break;
7103 case OACC_PARALLEL:
7104 stmt = gimple_build_oacc_parallel (body, OACC_PARALLEL_CLAUSES (expr));
7105 break;
7106 case OMP_SECTIONS:
7107 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
7108 break;
7109 case OMP_SINGLE:
7110 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
7111 break;
7112 case OMP_TARGET:
7113 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
7114 OMP_CLAUSES (expr));
7115 break;
7116 case OMP_TARGET_DATA:
7117 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
7118 OMP_CLAUSES (expr));
7119 break;
7120 case OMP_TEAMS:
7121 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
7122 break;
7123 default:
7124 gcc_unreachable ();
7127 gimplify_seq_add_stmt (pre_p, stmt);
7128 *expr_p = NULL_TREE;
7131 /* Gimplify the gross structure of OpenMP target update construct. */
7133 static void
7134 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
7136 tree expr = *expr_p;
7137 gimple stmt;
7139 gimplify_scan_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr), pre_p,
7140 ORT_WORKSHARE);
7141 gimplify_adjust_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr));
7142 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_UPDATE,
7143 OMP_TARGET_UPDATE_CLAUSES (expr));
7145 gimplify_seq_add_stmt (pre_p, stmt);
7146 *expr_p = NULL_TREE;
7149 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
7150 stabilized the lhs of the atomic operation as *ADDR. Return true if
7151 EXPR is this stabilized form. */
7153 static bool
7154 goa_lhs_expr_p (tree expr, tree addr)
7156 /* Also include casts to other type variants. The C front end is fond
7157 of adding these for e.g. volatile variables. This is like
7158 STRIP_TYPE_NOPS but includes the main variant lookup. */
7159 STRIP_USELESS_TYPE_CONVERSION (expr);
7161 if (TREE_CODE (expr) == INDIRECT_REF)
7163 expr = TREE_OPERAND (expr, 0);
7164 while (expr != addr
7165 && (CONVERT_EXPR_P (expr)
7166 || TREE_CODE (expr) == NON_LVALUE_EXPR)
7167 && TREE_CODE (expr) == TREE_CODE (addr)
7168 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
7170 expr = TREE_OPERAND (expr, 0);
7171 addr = TREE_OPERAND (addr, 0);
7173 if (expr == addr)
7174 return true;
7175 return (TREE_CODE (addr) == ADDR_EXPR
7176 && TREE_CODE (expr) == ADDR_EXPR
7177 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
7179 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
7180 return true;
7181 return false;
7184 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
7185 expression does not involve the lhs, evaluate it into a temporary.
7186 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
7187 or -1 if an error was encountered. */
7189 static int
7190 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
7191 tree lhs_var)
7193 tree expr = *expr_p;
7194 int saw_lhs;
7196 if (goa_lhs_expr_p (expr, lhs_addr))
7198 *expr_p = lhs_var;
7199 return 1;
7201 if (is_gimple_val (expr))
7202 return 0;
7204 saw_lhs = 0;
7205 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
7207 case tcc_binary:
7208 case tcc_comparison:
7209 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
7210 lhs_var);
7211 case tcc_unary:
7212 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
7213 lhs_var);
7214 break;
7215 case tcc_expression:
7216 switch (TREE_CODE (expr))
7218 case TRUTH_ANDIF_EXPR:
7219 case TRUTH_ORIF_EXPR:
7220 case TRUTH_AND_EXPR:
7221 case TRUTH_OR_EXPR:
7222 case TRUTH_XOR_EXPR:
7223 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
7224 lhs_addr, lhs_var);
7225 case TRUTH_NOT_EXPR:
7226 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
7227 lhs_addr, lhs_var);
7228 break;
7229 case COMPOUND_EXPR:
7230 /* Break out any preevaluations from cp_build_modify_expr. */
7231 for (; TREE_CODE (expr) == COMPOUND_EXPR;
7232 expr = TREE_OPERAND (expr, 1))
7233 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
7234 *expr_p = expr;
7235 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
7236 default:
7237 break;
7239 break;
7240 default:
7241 break;
7244 if (saw_lhs == 0)
7246 enum gimplify_status gs;
7247 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
7248 if (gs != GS_ALL_DONE)
7249 saw_lhs = -1;
7252 return saw_lhs;
7255 /* Gimplify an OMP_ATOMIC statement. */
7257 static enum gimplify_status
7258 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
7260 tree addr = TREE_OPERAND (*expr_p, 0);
7261 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
7262 ? NULL : TREE_OPERAND (*expr_p, 1);
7263 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7264 tree tmp_load;
7265 gimple loadstmt, storestmt;
7267 tmp_load = create_tmp_reg (type, NULL);
7268 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
7269 return GS_ERROR;
7271 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
7272 != GS_ALL_DONE)
7273 return GS_ERROR;
7275 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
7276 gimplify_seq_add_stmt (pre_p, loadstmt);
7277 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
7278 != GS_ALL_DONE)
7279 return GS_ERROR;
7281 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
7282 rhs = tmp_load;
7283 storestmt = gimple_build_omp_atomic_store (rhs);
7284 gimplify_seq_add_stmt (pre_p, storestmt);
7285 if (OMP_ATOMIC_SEQ_CST (*expr_p))
7287 gimple_omp_atomic_set_seq_cst (loadstmt);
7288 gimple_omp_atomic_set_seq_cst (storestmt);
7290 switch (TREE_CODE (*expr_p))
7292 case OMP_ATOMIC_READ:
7293 case OMP_ATOMIC_CAPTURE_OLD:
7294 *expr_p = tmp_load;
7295 gimple_omp_atomic_set_need_value (loadstmt);
7296 break;
7297 case OMP_ATOMIC_CAPTURE_NEW:
7298 *expr_p = rhs;
7299 gimple_omp_atomic_set_need_value (storestmt);
7300 break;
7301 default:
7302 *expr_p = NULL;
7303 break;
7306 return GS_ALL_DONE;
7309 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
7310 body, and adding some EH bits. */
7312 static enum gimplify_status
7313 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
7315 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
7316 gimple g;
7317 gimple_seq body = NULL;
7318 int subcode = 0;
7320 /* Wrap the transaction body in a BIND_EXPR so we have a context
7321 where to put decls for OpenMP. */
7322 if (TREE_CODE (tbody) != BIND_EXPR)
7324 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
7325 TREE_SIDE_EFFECTS (bind) = 1;
7326 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
7327 TRANSACTION_EXPR_BODY (expr) = bind;
7330 push_gimplify_context ();
7331 temp = voidify_wrapper_expr (*expr_p, NULL);
7333 g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
7334 pop_gimplify_context (g);
7336 g = gimple_build_transaction (body, NULL);
7337 if (TRANSACTION_EXPR_OUTER (expr))
7338 subcode = GTMA_IS_OUTER;
7339 else if (TRANSACTION_EXPR_RELAXED (expr))
7340 subcode = GTMA_IS_RELAXED;
7341 gimple_transaction_set_subcode (g, subcode);
7343 gimplify_seq_add_stmt (pre_p, g);
7345 if (temp)
7347 *expr_p = temp;
7348 return GS_OK;
7351 *expr_p = NULL_TREE;
7352 return GS_ALL_DONE;
7355 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
7356 expression produces a value to be used as an operand inside a GIMPLE
7357 statement, the value will be stored back in *EXPR_P. This value will
7358 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
7359 an SSA_NAME. The corresponding sequence of GIMPLE statements is
7360 emitted in PRE_P and POST_P.
7362 Additionally, this process may overwrite parts of the input
7363 expression during gimplification. Ideally, it should be
7364 possible to do non-destructive gimplification.
7366 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
7367 the expression needs to evaluate to a value to be used as
7368 an operand in a GIMPLE statement, this value will be stored in
7369 *EXPR_P on exit. This happens when the caller specifies one
7370 of fb_lvalue or fb_rvalue fallback flags.
7372 PRE_P will contain the sequence of GIMPLE statements corresponding
7373 to the evaluation of EXPR and all the side-effects that must
7374 be executed before the main expression. On exit, the last
7375 statement of PRE_P is the core statement being gimplified. For
7376 instance, when gimplifying 'if (++a)' the last statement in
7377 PRE_P will be 'if (t.1)' where t.1 is the result of
7378 pre-incrementing 'a'.
7380 POST_P will contain the sequence of GIMPLE statements corresponding
7381 to the evaluation of all the side-effects that must be executed
7382 after the main expression. If this is NULL, the post
7383 side-effects are stored at the end of PRE_P.
7385 The reason why the output is split in two is to handle post
7386 side-effects explicitly. In some cases, an expression may have
7387 inner and outer post side-effects which need to be emitted in
7388 an order different from the one given by the recursive
7389 traversal. For instance, for the expression (*p--)++ the post
7390 side-effects of '--' must actually occur *after* the post
7391 side-effects of '++'. However, gimplification will first visit
7392 the inner expression, so if a separate POST sequence was not
7393 used, the resulting sequence would be:
7395 1 t.1 = *p
7396 2 p = p - 1
7397 3 t.2 = t.1 + 1
7398 4 *p = t.2
7400 However, the post-decrement operation in line #2 must not be
7401 evaluated until after the store to *p at line #4, so the
7402 correct sequence should be:
7404 1 t.1 = *p
7405 2 t.2 = t.1 + 1
7406 3 *p = t.2
7407 4 p = p - 1
7409 So, by specifying a separate post queue, it is possible
7410 to emit the post side-effects in the correct order.
7411 If POST_P is NULL, an internal queue will be used. Before
7412 returning to the caller, the sequence POST_P is appended to
7413 the main output sequence PRE_P.
7415 GIMPLE_TEST_F points to a function that takes a tree T and
7416 returns nonzero if T is in the GIMPLE form requested by the
7417 caller. The GIMPLE predicates are in gimple.c.
7419 FALLBACK tells the function what sort of a temporary we want if
7420 gimplification cannot produce an expression that complies with
7421 GIMPLE_TEST_F.
7423 fb_none means that no temporary should be generated
7424 fb_rvalue means that an rvalue is OK to generate
7425 fb_lvalue means that an lvalue is OK to generate
7426 fb_either means that either is OK, but an lvalue is preferable.
7427 fb_mayfail means that gimplification may fail (in which case
7428 GS_ERROR will be returned)
7430 The return value is either GS_ERROR or GS_ALL_DONE, since this
7431 function iterates until EXPR is completely gimplified or an error
7432 occurs. */
7434 enum gimplify_status
7435 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7436 bool (*gimple_test_f) (tree), fallback_t fallback)
7438 tree tmp;
7439 gimple_seq internal_pre = NULL;
7440 gimple_seq internal_post = NULL;
7441 tree save_expr;
7442 bool is_statement;
7443 location_t saved_location;
7444 enum gimplify_status ret;
7445 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
7447 save_expr = *expr_p;
7448 if (save_expr == NULL_TREE)
7449 return GS_ALL_DONE;
7451 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
7452 is_statement = gimple_test_f == is_gimple_stmt;
7453 if (is_statement)
7454 gcc_assert (pre_p);
7456 /* Consistency checks. */
7457 if (gimple_test_f == is_gimple_reg)
7458 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
7459 else if (gimple_test_f == is_gimple_val
7460 || gimple_test_f == is_gimple_call_addr
7461 || gimple_test_f == is_gimple_condexpr
7462 || gimple_test_f == is_gimple_mem_rhs
7463 || gimple_test_f == is_gimple_mem_rhs_or_call
7464 || gimple_test_f == is_gimple_reg_rhs
7465 || gimple_test_f == is_gimple_reg_rhs_or_call
7466 || gimple_test_f == is_gimple_asm_val
7467 || gimple_test_f == is_gimple_mem_ref_addr)
7468 gcc_assert (fallback & fb_rvalue);
7469 else if (gimple_test_f == is_gimple_min_lval
7470 || gimple_test_f == is_gimple_lvalue)
7471 gcc_assert (fallback & fb_lvalue);
7472 else if (gimple_test_f == is_gimple_addressable)
7473 gcc_assert (fallback & fb_either);
7474 else if (gimple_test_f == is_gimple_stmt)
7475 gcc_assert (fallback == fb_none);
7476 else
7478 /* We should have recognized the GIMPLE_TEST_F predicate to
7479 know what kind of fallback to use in case a temporary is
7480 needed to hold the value or address of *EXPR_P. */
7481 gcc_unreachable ();
7484 /* We used to check the predicate here and return immediately if it
7485 succeeds. This is wrong; the design is for gimplification to be
7486 idempotent, and for the predicates to only test for valid forms, not
7487 whether they are fully simplified. */
7488 if (pre_p == NULL)
7489 pre_p = &internal_pre;
7491 if (post_p == NULL)
7492 post_p = &internal_post;
7494 /* Remember the last statements added to PRE_P and POST_P. Every
7495 new statement added by the gimplification helpers needs to be
7496 annotated with location information. To centralize the
7497 responsibility, we remember the last statement that had been
7498 added to both queues before gimplifying *EXPR_P. If
7499 gimplification produces new statements in PRE_P and POST_P, those
7500 statements will be annotated with the same location information
7501 as *EXPR_P. */
7502 pre_last_gsi = gsi_last (*pre_p);
7503 post_last_gsi = gsi_last (*post_p);
7505 saved_location = input_location;
7506 if (save_expr != error_mark_node
7507 && EXPR_HAS_LOCATION (*expr_p))
7508 input_location = EXPR_LOCATION (*expr_p);
7510 /* Loop over the specific gimplifiers until the toplevel node
7511 remains the same. */
7514 /* Strip away as many useless type conversions as possible
7515 at the toplevel. */
7516 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
7518 /* Remember the expr. */
7519 save_expr = *expr_p;
7521 /* Die, die, die, my darling. */
7522 if (save_expr == error_mark_node
7523 || (TREE_TYPE (save_expr)
7524 && TREE_TYPE (save_expr) == error_mark_node))
7526 ret = GS_ERROR;
7527 break;
7530 /* Do any language-specific gimplification. */
7531 ret = ((enum gimplify_status)
7532 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
7533 if (ret == GS_OK)
7535 if (*expr_p == NULL_TREE)
7536 break;
7537 if (*expr_p != save_expr)
7538 continue;
7540 else if (ret != GS_UNHANDLED)
7541 break;
7543 /* Make sure that all the cases set 'ret' appropriately. */
7544 ret = GS_UNHANDLED;
7545 switch (TREE_CODE (*expr_p))
7547 /* First deal with the special cases. */
7549 case POSTINCREMENT_EXPR:
7550 case POSTDECREMENT_EXPR:
7551 case PREINCREMENT_EXPR:
7552 case PREDECREMENT_EXPR:
7553 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
7554 fallback != fb_none,
7555 TREE_TYPE (*expr_p));
7556 break;
7558 case VIEW_CONVERT_EXPR:
7559 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
7560 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
7562 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7563 post_p, is_gimple_val, fb_rvalue);
7564 recalculate_side_effects (*expr_p);
7565 break;
7567 /* Fallthru. */
7569 case ARRAY_REF:
7570 case ARRAY_RANGE_REF:
7571 case REALPART_EXPR:
7572 case IMAGPART_EXPR:
7573 case COMPONENT_REF:
7574 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
7575 fallback ? fallback : fb_rvalue);
7576 break;
7578 case COND_EXPR:
7579 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
7581 /* C99 code may assign to an array in a structure value of a
7582 conditional expression, and this has undefined behavior
7583 only on execution, so create a temporary if an lvalue is
7584 required. */
7585 if (fallback == fb_lvalue)
7587 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7588 mark_addressable (*expr_p);
7589 ret = GS_OK;
7591 break;
7593 case CALL_EXPR:
7594 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
7596 /* C99 code may assign to an array in a structure returned
7597 from a function, and this has undefined behavior only on
7598 execution, so create a temporary if an lvalue is
7599 required. */
7600 if (fallback == fb_lvalue)
7602 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7603 mark_addressable (*expr_p);
7604 ret = GS_OK;
7606 break;
7608 case TREE_LIST:
7609 gcc_unreachable ();
7611 case COMPOUND_EXPR:
7612 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
7613 break;
7615 case COMPOUND_LITERAL_EXPR:
7616 ret = gimplify_compound_literal_expr (expr_p, pre_p,
7617 gimple_test_f, fallback);
7618 break;
7620 case MODIFY_EXPR:
7621 case INIT_EXPR:
7622 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
7623 fallback != fb_none);
7624 break;
7626 case TRUTH_ANDIF_EXPR:
7627 case TRUTH_ORIF_EXPR:
7629 /* Preserve the original type of the expression and the
7630 source location of the outer expression. */
7631 tree org_type = TREE_TYPE (*expr_p);
7632 *expr_p = gimple_boolify (*expr_p);
7633 *expr_p = build3_loc (input_location, COND_EXPR,
7634 org_type, *expr_p,
7635 fold_convert_loc
7636 (input_location,
7637 org_type, boolean_true_node),
7638 fold_convert_loc
7639 (input_location,
7640 org_type, boolean_false_node));
7641 ret = GS_OK;
7642 break;
7645 case TRUTH_NOT_EXPR:
7647 tree type = TREE_TYPE (*expr_p);
7648 /* The parsers are careful to generate TRUTH_NOT_EXPR
7649 only with operands that are always zero or one.
7650 We do not fold here but handle the only interesting case
7651 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
7652 *expr_p = gimple_boolify (*expr_p);
7653 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
7654 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
7655 TREE_TYPE (*expr_p),
7656 TREE_OPERAND (*expr_p, 0));
7657 else
7658 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
7659 TREE_TYPE (*expr_p),
7660 TREE_OPERAND (*expr_p, 0),
7661 build_int_cst (TREE_TYPE (*expr_p), 1));
7662 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
7663 *expr_p = fold_convert_loc (input_location, type, *expr_p);
7664 ret = GS_OK;
7665 break;
7668 case ADDR_EXPR:
7669 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
7670 break;
7672 case ANNOTATE_EXPR:
7674 tree cond = TREE_OPERAND (*expr_p, 0);
7675 tree id = TREE_OPERAND (*expr_p, 1);
7676 tree type = TREE_TYPE (cond);
7677 if (!INTEGRAL_TYPE_P (type))
7679 *expr_p = cond;
7680 ret = GS_OK;
7681 break;
7683 tree tmp = create_tmp_var (type, NULL);
7684 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
7685 gimple call = gimple_build_call_internal (IFN_ANNOTATE, 2,
7686 cond, id);
7687 gimple_call_set_lhs (call, tmp);
7688 gimplify_seq_add_stmt (pre_p, call);
7689 *expr_p = tmp;
7690 ret = GS_ALL_DONE;
7691 break;
7694 case VA_ARG_EXPR:
7695 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
7696 break;
7698 CASE_CONVERT:
7699 if (IS_EMPTY_STMT (*expr_p))
7701 ret = GS_ALL_DONE;
7702 break;
7705 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7706 || fallback == fb_none)
7708 /* Just strip a conversion to void (or in void context) and
7709 try again. */
7710 *expr_p = TREE_OPERAND (*expr_p, 0);
7711 ret = GS_OK;
7712 break;
7715 ret = gimplify_conversion (expr_p);
7716 if (ret == GS_ERROR)
7717 break;
7718 if (*expr_p != save_expr)
7719 break;
7720 /* FALLTHRU */
7722 case FIX_TRUNC_EXPR:
7723 /* unary_expr: ... | '(' cast ')' val | ... */
7724 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7725 is_gimple_val, fb_rvalue);
7726 recalculate_side_effects (*expr_p);
7727 break;
7729 case INDIRECT_REF:
7731 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
7732 bool notrap = TREE_THIS_NOTRAP (*expr_p);
7733 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7735 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7736 if (*expr_p != save_expr)
7738 ret = GS_OK;
7739 break;
7742 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7743 is_gimple_reg, fb_rvalue);
7744 if (ret == GS_ERROR)
7745 break;
7747 recalculate_side_effects (*expr_p);
7748 *expr_p = fold_build2_loc (input_location, MEM_REF,
7749 TREE_TYPE (*expr_p),
7750 TREE_OPERAND (*expr_p, 0),
7751 build_int_cst (saved_ptr_type, 0));
7752 TREE_THIS_VOLATILE (*expr_p) = volatilep;
7753 TREE_THIS_NOTRAP (*expr_p) = notrap;
7754 ret = GS_OK;
7755 break;
7758 /* We arrive here through the various re-gimplifcation paths. */
7759 case MEM_REF:
7760 /* First try re-folding the whole thing. */
7761 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7762 TREE_OPERAND (*expr_p, 0),
7763 TREE_OPERAND (*expr_p, 1));
7764 if (tmp)
7766 *expr_p = tmp;
7767 recalculate_side_effects (*expr_p);
7768 ret = GS_OK;
7769 break;
7771 /* Avoid re-gimplifying the address operand if it is already
7772 in suitable form. Re-gimplifying would mark the address
7773 operand addressable. Always gimplify when not in SSA form
7774 as we still may have to gimplify decls with value-exprs. */
7775 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7776 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7778 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7779 is_gimple_mem_ref_addr, fb_rvalue);
7780 if (ret == GS_ERROR)
7781 break;
7783 recalculate_side_effects (*expr_p);
7784 ret = GS_ALL_DONE;
7785 break;
7787 /* Constants need not be gimplified. */
7788 case INTEGER_CST:
7789 case REAL_CST:
7790 case FIXED_CST:
7791 case STRING_CST:
7792 case COMPLEX_CST:
7793 case VECTOR_CST:
7794 /* Drop the overflow flag on constants, we do not want
7795 that in the GIMPLE IL. */
7796 if (TREE_OVERFLOW_P (*expr_p))
7797 *expr_p = drop_tree_overflow (*expr_p);
7798 ret = GS_ALL_DONE;
7799 break;
7801 case CONST_DECL:
7802 /* If we require an lvalue, such as for ADDR_EXPR, retain the
7803 CONST_DECL node. Otherwise the decl is replaceable by its
7804 value. */
7805 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
7806 if (fallback & fb_lvalue)
7807 ret = GS_ALL_DONE;
7808 else
7810 *expr_p = DECL_INITIAL (*expr_p);
7811 ret = GS_OK;
7813 break;
7815 case DECL_EXPR:
7816 ret = gimplify_decl_expr (expr_p, pre_p);
7817 break;
7819 case BIND_EXPR:
7820 ret = gimplify_bind_expr (expr_p, pre_p);
7821 break;
7823 case LOOP_EXPR:
7824 ret = gimplify_loop_expr (expr_p, pre_p);
7825 break;
7827 case SWITCH_EXPR:
7828 ret = gimplify_switch_expr (expr_p, pre_p);
7829 break;
7831 case EXIT_EXPR:
7832 ret = gimplify_exit_expr (expr_p);
7833 break;
7835 case GOTO_EXPR:
7836 /* If the target is not LABEL, then it is a computed jump
7837 and the target needs to be gimplified. */
7838 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
7840 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7841 NULL, is_gimple_val, fb_rvalue);
7842 if (ret == GS_ERROR)
7843 break;
7845 gimplify_seq_add_stmt (pre_p,
7846 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
7847 ret = GS_ALL_DONE;
7848 break;
7850 case PREDICT_EXPR:
7851 gimplify_seq_add_stmt (pre_p,
7852 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7853 PREDICT_EXPR_OUTCOME (*expr_p)));
7854 ret = GS_ALL_DONE;
7855 break;
7857 case LABEL_EXPR:
7858 ret = GS_ALL_DONE;
7859 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7860 == current_function_decl);
7861 gimplify_seq_add_stmt (pre_p,
7862 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
7863 break;
7865 case CASE_LABEL_EXPR:
7866 ret = gimplify_case_label_expr (expr_p, pre_p);
7867 break;
7869 case RETURN_EXPR:
7870 ret = gimplify_return_expr (*expr_p, pre_p);
7871 break;
7873 case CONSTRUCTOR:
7874 /* Don't reduce this in place; let gimplify_init_constructor work its
7875 magic. Buf if we're just elaborating this for side effects, just
7876 gimplify any element that has side-effects. */
7877 if (fallback == fb_none)
7879 unsigned HOST_WIDE_INT ix;
7880 tree val;
7881 tree temp = NULL_TREE;
7882 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7883 if (TREE_SIDE_EFFECTS (val))
7884 append_to_statement_list (val, &temp);
7886 *expr_p = temp;
7887 ret = temp ? GS_OK : GS_ALL_DONE;
7889 /* C99 code may assign to an array in a constructed
7890 structure or union, and this has undefined behavior only
7891 on execution, so create a temporary if an lvalue is
7892 required. */
7893 else if (fallback == fb_lvalue)
7895 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7896 mark_addressable (*expr_p);
7897 ret = GS_OK;
7899 else
7900 ret = GS_ALL_DONE;
7901 break;
7903 /* The following are special cases that are not handled by the
7904 original GIMPLE grammar. */
7906 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7907 eliminated. */
7908 case SAVE_EXPR:
7909 ret = gimplify_save_expr (expr_p, pre_p, post_p);
7910 break;
7912 case BIT_FIELD_REF:
7913 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7914 post_p, is_gimple_lvalue, fb_either);
7915 recalculate_side_effects (*expr_p);
7916 break;
7918 case TARGET_MEM_REF:
7920 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7922 if (TMR_BASE (*expr_p))
7923 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
7924 post_p, is_gimple_mem_ref_addr, fb_either);
7925 if (TMR_INDEX (*expr_p))
7926 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7927 post_p, is_gimple_val, fb_rvalue);
7928 if (TMR_INDEX2 (*expr_p))
7929 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7930 post_p, is_gimple_val, fb_rvalue);
7931 /* TMR_STEP and TMR_OFFSET are always integer constants. */
7932 ret = MIN (r0, r1);
7934 break;
7936 case NON_LVALUE_EXPR:
7937 /* This should have been stripped above. */
7938 gcc_unreachable ();
7940 case ASM_EXPR:
7941 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
7942 break;
7944 case TRY_FINALLY_EXPR:
7945 case TRY_CATCH_EXPR:
7947 gimple_seq eval, cleanup;
7948 gimple try_;
7950 /* Calls to destructors are generated automatically in FINALLY/CATCH
7951 block. They should have location as UNKNOWN_LOCATION. However,
7952 gimplify_call_expr will reset these call stmts to input_location
7953 if it finds stmt's location is unknown. To prevent resetting for
7954 destructors, we set the input_location to unknown.
7955 Note that this only affects the destructor calls in FINALLY/CATCH
7956 block, and will automatically reset to its original value by the
7957 end of gimplify_expr. */
7958 input_location = UNKNOWN_LOCATION;
7959 eval = cleanup = NULL;
7960 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
7961 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
7962 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
7963 if (gimple_seq_empty_p (cleanup))
7965 gimple_seq_add_seq (pre_p, eval);
7966 ret = GS_ALL_DONE;
7967 break;
7969 try_ = gimple_build_try (eval, cleanup,
7970 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
7971 ? GIMPLE_TRY_FINALLY
7972 : GIMPLE_TRY_CATCH);
7973 if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
7974 gimple_set_location (try_, saved_location);
7975 else
7976 gimple_set_location (try_, EXPR_LOCATION (save_expr));
7977 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
7978 gimple_try_set_catch_is_cleanup (try_,
7979 TRY_CATCH_IS_CLEANUP (*expr_p));
7980 gimplify_seq_add_stmt (pre_p, try_);
7981 ret = GS_ALL_DONE;
7982 break;
7985 case CLEANUP_POINT_EXPR:
7986 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
7987 break;
7989 case TARGET_EXPR:
7990 ret = gimplify_target_expr (expr_p, pre_p, post_p);
7991 break;
7993 case CATCH_EXPR:
7995 gimple c;
7996 gimple_seq handler = NULL;
7997 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
7998 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
7999 gimplify_seq_add_stmt (pre_p, c);
8000 ret = GS_ALL_DONE;
8001 break;
8004 case EH_FILTER_EXPR:
8006 gimple ehf;
8007 gimple_seq failure = NULL;
8009 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
8010 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
8011 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
8012 gimplify_seq_add_stmt (pre_p, ehf);
8013 ret = GS_ALL_DONE;
8014 break;
8017 case OBJ_TYPE_REF:
8019 enum gimplify_status r0, r1;
8020 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
8021 post_p, is_gimple_val, fb_rvalue);
8022 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
8023 post_p, is_gimple_val, fb_rvalue);
8024 TREE_SIDE_EFFECTS (*expr_p) = 0;
8025 ret = MIN (r0, r1);
8027 break;
8029 case LABEL_DECL:
8030 /* We get here when taking the address of a label. We mark
8031 the label as "forced"; meaning it can never be removed and
8032 it is a potential target for any computed goto. */
8033 FORCED_LABEL (*expr_p) = 1;
8034 ret = GS_ALL_DONE;
8035 break;
8037 case STATEMENT_LIST:
8038 ret = gimplify_statement_list (expr_p, pre_p);
8039 break;
8041 case WITH_SIZE_EXPR:
8043 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8044 post_p == &internal_post ? NULL : post_p,
8045 gimple_test_f, fallback);
8046 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8047 is_gimple_val, fb_rvalue);
8048 ret = GS_ALL_DONE;
8050 break;
8052 case VAR_DECL:
8053 case PARM_DECL:
8054 ret = gimplify_var_or_parm_decl (expr_p);
8055 break;
8057 case RESULT_DECL:
8058 /* When within an OpenMP context, notice uses of variables. */
8059 if (gimplify_omp_ctxp)
8060 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
8061 ret = GS_ALL_DONE;
8062 break;
8064 case SSA_NAME:
8065 /* Allow callbacks into the gimplifier during optimization. */
8066 ret = GS_ALL_DONE;
8067 break;
8069 case OACC_HOST_DATA:
8070 case OACC_DECLARE:
8071 case OACC_UPDATE:
8072 case OACC_ENTER_DATA:
8073 case OACC_EXIT_DATA:
8074 case OACC_WAIT:
8075 case OACC_CACHE:
8076 sorry ("directive not yet implemented");
8077 ret = GS_ALL_DONE;
8078 break;
8080 case OMP_PARALLEL:
8081 gimplify_omp_parallel (expr_p, pre_p);
8082 ret = GS_ALL_DONE;
8083 break;
8085 case OMP_TASK:
8086 gimplify_omp_task (expr_p, pre_p);
8087 ret = GS_ALL_DONE;
8088 break;
8090 case OMP_FOR:
8091 case OMP_SIMD:
8092 case CILK_SIMD:
8093 case OMP_DISTRIBUTE:
8094 case OACC_LOOP:
8095 ret = gimplify_omp_for (expr_p, pre_p);
8096 break;
8098 case OACC_KERNELS:
8099 if (OACC_KERNELS_COMBINED (*expr_p))
8100 sorry ("directive not yet implemented");
8101 else
8102 gimplify_omp_workshare (expr_p, pre_p);
8103 ret = GS_ALL_DONE;
8104 break;
8106 case OACC_PARALLEL:
8107 if (OACC_PARALLEL_COMBINED (*expr_p))
8108 sorry ("directive not yet implemented");
8109 else
8110 gimplify_omp_workshare (expr_p, pre_p);
8111 ret = GS_ALL_DONE;
8112 break;
8114 case OACC_DATA:
8115 case OMP_SECTIONS:
8116 case OMP_SINGLE:
8117 case OMP_TARGET:
8118 case OMP_TARGET_DATA:
8119 case OMP_TEAMS:
8120 gimplify_omp_workshare (expr_p, pre_p);
8121 ret = GS_ALL_DONE;
8122 break;
8124 case OMP_TARGET_UPDATE:
8125 gimplify_omp_target_update (expr_p, pre_p);
8126 ret = GS_ALL_DONE;
8127 break;
8129 case OMP_SECTION:
8130 case OMP_MASTER:
8131 case OMP_TASKGROUP:
8132 case OMP_ORDERED:
8133 case OMP_CRITICAL:
8135 gimple_seq body = NULL;
8136 gimple g;
8138 gimplify_and_add (OMP_BODY (*expr_p), &body);
8139 switch (TREE_CODE (*expr_p))
8141 case OMP_SECTION:
8142 g = gimple_build_omp_section (body);
8143 break;
8144 case OMP_MASTER:
8145 g = gimple_build_omp_master (body);
8146 break;
8147 case OMP_TASKGROUP:
8149 gimple_seq cleanup = NULL;
8150 tree fn
8151 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
8152 g = gimple_build_call (fn, 0);
8153 gimple_seq_add_stmt (&cleanup, g);
8154 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
8155 body = NULL;
8156 gimple_seq_add_stmt (&body, g);
8157 g = gimple_build_omp_taskgroup (body);
8159 break;
8160 case OMP_ORDERED:
8161 g = gimple_build_omp_ordered (body);
8162 break;
8163 case OMP_CRITICAL:
8164 g = gimple_build_omp_critical (body,
8165 OMP_CRITICAL_NAME (*expr_p));
8166 break;
8167 default:
8168 gcc_unreachable ();
8170 gimplify_seq_add_stmt (pre_p, g);
8171 ret = GS_ALL_DONE;
8172 break;
8175 case OMP_ATOMIC:
8176 case OMP_ATOMIC_READ:
8177 case OMP_ATOMIC_CAPTURE_OLD:
8178 case OMP_ATOMIC_CAPTURE_NEW:
8179 ret = gimplify_omp_atomic (expr_p, pre_p);
8180 break;
8182 case TRANSACTION_EXPR:
8183 ret = gimplify_transaction (expr_p, pre_p);
8184 break;
8186 case TRUTH_AND_EXPR:
8187 case TRUTH_OR_EXPR:
8188 case TRUTH_XOR_EXPR:
8190 tree orig_type = TREE_TYPE (*expr_p);
8191 tree new_type, xop0, xop1;
8192 *expr_p = gimple_boolify (*expr_p);
8193 new_type = TREE_TYPE (*expr_p);
8194 if (!useless_type_conversion_p (orig_type, new_type))
8196 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
8197 ret = GS_OK;
8198 break;
8201 /* Boolified binary truth expressions are semantically equivalent
8202 to bitwise binary expressions. Canonicalize them to the
8203 bitwise variant. */
8204 switch (TREE_CODE (*expr_p))
8206 case TRUTH_AND_EXPR:
8207 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
8208 break;
8209 case TRUTH_OR_EXPR:
8210 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
8211 break;
8212 case TRUTH_XOR_EXPR:
8213 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
8214 break;
8215 default:
8216 break;
8218 /* Now make sure that operands have compatible type to
8219 expression's new_type. */
8220 xop0 = TREE_OPERAND (*expr_p, 0);
8221 xop1 = TREE_OPERAND (*expr_p, 1);
8222 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
8223 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
8224 new_type,
8225 xop0);
8226 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
8227 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
8228 new_type,
8229 xop1);
8230 /* Continue classified as tcc_binary. */
8231 goto expr_2;
8234 case FMA_EXPR:
8235 case VEC_COND_EXPR:
8236 case VEC_PERM_EXPR:
8237 /* Classified as tcc_expression. */
8238 goto expr_3;
8240 case POINTER_PLUS_EXPR:
8242 enum gimplify_status r0, r1;
8243 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8244 post_p, is_gimple_val, fb_rvalue);
8245 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8246 post_p, is_gimple_val, fb_rvalue);
8247 recalculate_side_effects (*expr_p);
8248 ret = MIN (r0, r1);
8249 /* Convert &X + CST to invariant &MEM[&X, CST]. Do this
8250 after gimplifying operands - this is similar to how
8251 it would be folding all gimplified stmts on creation
8252 to have them canonicalized, which is what we eventually
8253 should do anyway. */
8254 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
8255 && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
8257 *expr_p = build_fold_addr_expr_with_type_loc
8258 (input_location,
8259 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
8260 TREE_OPERAND (*expr_p, 0),
8261 fold_convert (ptr_type_node,
8262 TREE_OPERAND (*expr_p, 1))),
8263 TREE_TYPE (*expr_p));
8264 ret = MIN (ret, GS_OK);
8266 break;
8269 case CILK_SYNC_STMT:
8271 if (!fn_contains_cilk_spawn_p (cfun))
8273 error_at (EXPR_LOCATION (*expr_p),
8274 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
8275 ret = GS_ERROR;
8277 else
8279 gimplify_cilk_sync (expr_p, pre_p);
8280 ret = GS_ALL_DONE;
8282 break;
8285 default:
8286 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
8288 case tcc_comparison:
8289 /* Handle comparison of objects of non scalar mode aggregates
8290 with a call to memcmp. It would be nice to only have to do
8291 this for variable-sized objects, but then we'd have to allow
8292 the same nest of reference nodes we allow for MODIFY_EXPR and
8293 that's too complex.
8295 Compare scalar mode aggregates as scalar mode values. Using
8296 memcmp for them would be very inefficient at best, and is
8297 plain wrong if bitfields are involved. */
8299 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
8301 /* Vector comparisons need no boolification. */
8302 if (TREE_CODE (type) == VECTOR_TYPE)
8303 goto expr_2;
8304 else if (!AGGREGATE_TYPE_P (type))
8306 tree org_type = TREE_TYPE (*expr_p);
8307 *expr_p = gimple_boolify (*expr_p);
8308 if (!useless_type_conversion_p (org_type,
8309 TREE_TYPE (*expr_p)))
8311 *expr_p = fold_convert_loc (input_location,
8312 org_type, *expr_p);
8313 ret = GS_OK;
8315 else
8316 goto expr_2;
8318 else if (TYPE_MODE (type) != BLKmode)
8319 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
8320 else
8321 ret = gimplify_variable_sized_compare (expr_p);
8323 break;
8326 /* If *EXPR_P does not need to be special-cased, handle it
8327 according to its class. */
8328 case tcc_unary:
8329 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8330 post_p, is_gimple_val, fb_rvalue);
8331 break;
8333 case tcc_binary:
8334 expr_2:
8336 enum gimplify_status r0, r1;
8338 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8339 post_p, is_gimple_val, fb_rvalue);
8340 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8341 post_p, is_gimple_val, fb_rvalue);
8343 ret = MIN (r0, r1);
8344 break;
8347 expr_3:
8349 enum gimplify_status r0, r1, r2;
8351 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8352 post_p, is_gimple_val, fb_rvalue);
8353 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8354 post_p, is_gimple_val, fb_rvalue);
8355 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
8356 post_p, is_gimple_val, fb_rvalue);
8358 ret = MIN (MIN (r0, r1), r2);
8359 break;
8362 case tcc_declaration:
8363 case tcc_constant:
8364 ret = GS_ALL_DONE;
8365 goto dont_recalculate;
8367 default:
8368 gcc_unreachable ();
8371 recalculate_side_effects (*expr_p);
8373 dont_recalculate:
8374 break;
8377 gcc_assert (*expr_p || ret != GS_OK);
8379 while (ret == GS_OK);
8381 /* If we encountered an error_mark somewhere nested inside, either
8382 stub out the statement or propagate the error back out. */
8383 if (ret == GS_ERROR)
8385 if (is_statement)
8386 *expr_p = NULL;
8387 goto out;
8390 /* This was only valid as a return value from the langhook, which
8391 we handled. Make sure it doesn't escape from any other context. */
8392 gcc_assert (ret != GS_UNHANDLED);
8394 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
8396 /* We aren't looking for a value, and we don't have a valid
8397 statement. If it doesn't have side-effects, throw it away. */
8398 if (!TREE_SIDE_EFFECTS (*expr_p))
8399 *expr_p = NULL;
8400 else if (!TREE_THIS_VOLATILE (*expr_p))
8402 /* This is probably a _REF that contains something nested that
8403 has side effects. Recurse through the operands to find it. */
8404 enum tree_code code = TREE_CODE (*expr_p);
8406 switch (code)
8408 case COMPONENT_REF:
8409 case REALPART_EXPR:
8410 case IMAGPART_EXPR:
8411 case VIEW_CONVERT_EXPR:
8412 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8413 gimple_test_f, fallback);
8414 break;
8416 case ARRAY_REF:
8417 case ARRAY_RANGE_REF:
8418 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8419 gimple_test_f, fallback);
8420 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8421 gimple_test_f, fallback);
8422 break;
8424 default:
8425 /* Anything else with side-effects must be converted to
8426 a valid statement before we get here. */
8427 gcc_unreachable ();
8430 *expr_p = NULL;
8432 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
8433 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
8435 /* Historically, the compiler has treated a bare reference
8436 to a non-BLKmode volatile lvalue as forcing a load. */
8437 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
8439 /* Normally, we do not want to create a temporary for a
8440 TREE_ADDRESSABLE type because such a type should not be
8441 copied by bitwise-assignment. However, we make an
8442 exception here, as all we are doing here is ensuring that
8443 we read the bytes that make up the type. We use
8444 create_tmp_var_raw because create_tmp_var will abort when
8445 given a TREE_ADDRESSABLE type. */
8446 tree tmp = create_tmp_var_raw (type, "vol");
8447 gimple_add_tmp_var (tmp);
8448 gimplify_assign (tmp, *expr_p, pre_p);
8449 *expr_p = NULL;
8451 else
8452 /* We can't do anything useful with a volatile reference to
8453 an incomplete type, so just throw it away. Likewise for
8454 a BLKmode type, since any implicit inner load should
8455 already have been turned into an explicit one by the
8456 gimplification process. */
8457 *expr_p = NULL;
8460 /* If we are gimplifying at the statement level, we're done. Tack
8461 everything together and return. */
8462 if (fallback == fb_none || is_statement)
8464 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
8465 it out for GC to reclaim it. */
8466 *expr_p = NULL_TREE;
8468 if (!gimple_seq_empty_p (internal_pre)
8469 || !gimple_seq_empty_p (internal_post))
8471 gimplify_seq_add_seq (&internal_pre, internal_post);
8472 gimplify_seq_add_seq (pre_p, internal_pre);
8475 /* The result of gimplifying *EXPR_P is going to be the last few
8476 statements in *PRE_P and *POST_P. Add location information
8477 to all the statements that were added by the gimplification
8478 helpers. */
8479 if (!gimple_seq_empty_p (*pre_p))
8480 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
8482 if (!gimple_seq_empty_p (*post_p))
8483 annotate_all_with_location_after (*post_p, post_last_gsi,
8484 input_location);
8486 goto out;
8489 #ifdef ENABLE_GIMPLE_CHECKING
8490 if (*expr_p)
8492 enum tree_code code = TREE_CODE (*expr_p);
8493 /* These expressions should already be in gimple IR form. */
8494 gcc_assert (code != MODIFY_EXPR
8495 && code != ASM_EXPR
8496 && code != BIND_EXPR
8497 && code != CATCH_EXPR
8498 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
8499 && code != EH_FILTER_EXPR
8500 && code != GOTO_EXPR
8501 && code != LABEL_EXPR
8502 && code != LOOP_EXPR
8503 && code != SWITCH_EXPR
8504 && code != TRY_FINALLY_EXPR
8505 && code != OACC_PARALLEL
8506 && code != OACC_KERNELS
8507 && code != OACC_DATA
8508 && code != OACC_HOST_DATA
8509 && code != OACC_DECLARE
8510 && code != OACC_UPDATE
8511 && code != OACC_ENTER_DATA
8512 && code != OACC_EXIT_DATA
8513 && code != OACC_WAIT
8514 && code != OACC_CACHE
8515 && code != OMP_CRITICAL
8516 && code != OMP_FOR
8517 && code != OACC_LOOP
8518 && code != OMP_MASTER
8519 && code != OMP_TASKGROUP
8520 && code != OMP_ORDERED
8521 && code != OMP_PARALLEL
8522 && code != OMP_SECTIONS
8523 && code != OMP_SECTION
8524 && code != OMP_SINGLE);
8526 #endif
8528 /* Otherwise we're gimplifying a subexpression, so the resulting
8529 value is interesting. If it's a valid operand that matches
8530 GIMPLE_TEST_F, we're done. Unless we are handling some
8531 post-effects internally; if that's the case, we need to copy into
8532 a temporary before adding the post-effects to POST_P. */
8533 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
8534 goto out;
8536 /* Otherwise, we need to create a new temporary for the gimplified
8537 expression. */
8539 /* We can't return an lvalue if we have an internal postqueue. The
8540 object the lvalue refers to would (probably) be modified by the
8541 postqueue; we need to copy the value out first, which means an
8542 rvalue. */
8543 if ((fallback & fb_lvalue)
8544 && gimple_seq_empty_p (internal_post)
8545 && is_gimple_addressable (*expr_p))
8547 /* An lvalue will do. Take the address of the expression, store it
8548 in a temporary, and replace the expression with an INDIRECT_REF of
8549 that temporary. */
8550 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
8551 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
8552 *expr_p = build_simple_mem_ref (tmp);
8554 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
8556 /* An rvalue will do. Assign the gimplified expression into a
8557 new temporary TMP and replace the original expression with
8558 TMP. First, make sure that the expression has a type so that
8559 it can be assigned into a temporary. */
8560 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
8561 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
8563 else
8565 #ifdef ENABLE_GIMPLE_CHECKING
8566 if (!(fallback & fb_mayfail))
8568 fprintf (stderr, "gimplification failed:\n");
8569 print_generic_expr (stderr, *expr_p, 0);
8570 debug_tree (*expr_p);
8571 internal_error ("gimplification failed");
8573 #endif
8574 gcc_assert (fallback & fb_mayfail);
8576 /* If this is an asm statement, and the user asked for the
8577 impossible, don't die. Fail and let gimplify_asm_expr
8578 issue an error. */
8579 ret = GS_ERROR;
8580 goto out;
8583 /* Make sure the temporary matches our predicate. */
8584 gcc_assert ((*gimple_test_f) (*expr_p));
8586 if (!gimple_seq_empty_p (internal_post))
8588 annotate_all_with_location (internal_post, input_location);
8589 gimplify_seq_add_seq (pre_p, internal_post);
8592 out:
8593 input_location = saved_location;
8594 return ret;
8597 /* Look through TYPE for variable-sized objects and gimplify each such
8598 size that we find. Add to LIST_P any statements generated. */
8600 void
8601 gimplify_type_sizes (tree type, gimple_seq *list_p)
8603 tree field, t;
8605 if (type == NULL || type == error_mark_node)
8606 return;
8608 /* We first do the main variant, then copy into any other variants. */
8609 type = TYPE_MAIN_VARIANT (type);
8611 /* Avoid infinite recursion. */
8612 if (TYPE_SIZES_GIMPLIFIED (type))
8613 return;
8615 TYPE_SIZES_GIMPLIFIED (type) = 1;
8617 switch (TREE_CODE (type))
8619 case INTEGER_TYPE:
8620 case ENUMERAL_TYPE:
8621 case BOOLEAN_TYPE:
8622 case REAL_TYPE:
8623 case FIXED_POINT_TYPE:
8624 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
8625 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
8627 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8629 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
8630 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
8632 break;
8634 case ARRAY_TYPE:
8635 /* These types may not have declarations, so handle them here. */
8636 gimplify_type_sizes (TREE_TYPE (type), list_p);
8637 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
8638 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
8639 with assigned stack slots, for -O1+ -g they should be tracked
8640 by VTA. */
8641 if (!(TYPE_NAME (type)
8642 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
8643 && DECL_IGNORED_P (TYPE_NAME (type)))
8644 && TYPE_DOMAIN (type)
8645 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
8647 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8648 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8649 DECL_IGNORED_P (t) = 0;
8650 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8651 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8652 DECL_IGNORED_P (t) = 0;
8654 break;
8656 case RECORD_TYPE:
8657 case UNION_TYPE:
8658 case QUAL_UNION_TYPE:
8659 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8660 if (TREE_CODE (field) == FIELD_DECL)
8662 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
8663 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
8664 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8665 gimplify_type_sizes (TREE_TYPE (field), list_p);
8667 break;
8669 case POINTER_TYPE:
8670 case REFERENCE_TYPE:
8671 /* We used to recurse on the pointed-to type here, which turned out to
8672 be incorrect because its definition might refer to variables not
8673 yet initialized at this point if a forward declaration is involved.
8675 It was actually useful for anonymous pointed-to types to ensure
8676 that the sizes evaluation dominates every possible later use of the
8677 values. Restricting to such types here would be safe since there
8678 is no possible forward declaration around, but would introduce an
8679 undesirable middle-end semantic to anonymity. We then defer to
8680 front-ends the responsibility of ensuring that the sizes are
8681 evaluated both early and late enough, e.g. by attaching artificial
8682 type declarations to the tree. */
8683 break;
8685 default:
8686 break;
8689 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
8690 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
8692 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8694 TYPE_SIZE (t) = TYPE_SIZE (type);
8695 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
8696 TYPE_SIZES_GIMPLIFIED (t) = 1;
8700 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
8701 a size or position, has had all of its SAVE_EXPRs evaluated.
8702 We add any required statements to *STMT_P. */
8704 void
8705 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
8707 tree expr = *expr_p;
8709 /* We don't do anything if the value isn't there, is constant, or contains
8710 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
8711 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
8712 will want to replace it with a new variable, but that will cause problems
8713 if this type is from outside the function. It's OK to have that here. */
8714 if (is_gimple_sizepos (expr))
8715 return;
8717 *expr_p = unshare_expr (expr);
8719 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
8722 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
8723 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
8724 is true, also gimplify the parameters. */
8726 gimple
8727 gimplify_body (tree fndecl, bool do_parms)
8729 location_t saved_location = input_location;
8730 gimple_seq parm_stmts, seq;
8731 gimple outer_bind;
8732 struct cgraph_node *cgn;
8734 timevar_push (TV_TREE_GIMPLIFY);
8736 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
8737 gimplification. */
8738 default_rtl_profile ();
8740 gcc_assert (gimplify_ctxp == NULL);
8741 push_gimplify_context ();
8743 if (flag_openacc || flag_openmp)
8745 gcc_assert (gimplify_omp_ctxp == NULL);
8746 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
8747 gimplify_omp_ctxp
8748 = new_omp_context ((enum omp_region_type) (ORT_TARGET
8749 | ORT_TARGET_OFFLOAD));
8752 /* Unshare most shared trees in the body and in that of any nested functions.
8753 It would seem we don't have to do this for nested functions because
8754 they are supposed to be output and then the outer function gimplified
8755 first, but the g++ front end doesn't always do it that way. */
8756 unshare_body (fndecl);
8757 unvisit_body (fndecl);
8759 cgn = cgraph_get_node (fndecl);
8760 if (cgn && cgn->origin)
8761 nonlocal_vlas = pointer_set_create ();
8763 /* Make sure input_location isn't set to something weird. */
8764 input_location = DECL_SOURCE_LOCATION (fndecl);
8766 /* Resolve callee-copies. This has to be done before processing
8767 the body so that DECL_VALUE_EXPR gets processed correctly. */
8768 parm_stmts = do_parms ? gimplify_parameters () : NULL;
8770 /* Gimplify the function's body. */
8771 seq = NULL;
8772 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
8773 outer_bind = gimple_seq_first_stmt (seq);
8774 if (!outer_bind)
8776 outer_bind = gimple_build_nop ();
8777 gimplify_seq_add_stmt (&seq, outer_bind);
8780 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
8781 not the case, wrap everything in a GIMPLE_BIND to make it so. */
8782 if (gimple_code (outer_bind) == GIMPLE_BIND
8783 && gimple_seq_first (seq) == gimple_seq_last (seq))
8785 else
8786 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8788 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8790 /* If we had callee-copies statements, insert them at the beginning
8791 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
8792 if (!gimple_seq_empty_p (parm_stmts))
8794 tree parm;
8796 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8797 gimple_bind_set_body (outer_bind, parm_stmts);
8799 for (parm = DECL_ARGUMENTS (current_function_decl);
8800 parm; parm = DECL_CHAIN (parm))
8801 if (DECL_HAS_VALUE_EXPR_P (parm))
8803 DECL_HAS_VALUE_EXPR_P (parm) = 0;
8804 DECL_IGNORED_P (parm) = 0;
8808 if (nonlocal_vlas)
8810 if (nonlocal_vla_vars)
8812 /* tree-nested.c may later on call declare_vars (..., true);
8813 which relies on BLOCK_VARS chain to be the tail of the
8814 gimple_bind_vars chain. Ensure we don't violate that
8815 assumption. */
8816 if (gimple_bind_block (outer_bind)
8817 == DECL_INITIAL (current_function_decl))
8818 declare_vars (nonlocal_vla_vars, outer_bind, true);
8819 else
8820 BLOCK_VARS (DECL_INITIAL (current_function_decl))
8821 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
8822 nonlocal_vla_vars);
8823 nonlocal_vla_vars = NULL_TREE;
8825 pointer_set_destroy (nonlocal_vlas);
8826 nonlocal_vlas = NULL;
8829 if ((flag_openacc || flag_openmp || flag_openmp_simd)
8830 && gimplify_omp_ctxp)
8832 delete_omp_context (gimplify_omp_ctxp);
8833 gimplify_omp_ctxp = NULL;
8836 pop_gimplify_context (outer_bind);
8837 gcc_assert (gimplify_ctxp == NULL);
8839 #ifdef ENABLE_CHECKING
8840 if (!seen_error ())
8841 verify_gimple_in_seq (gimple_bind_body (outer_bind));
8842 #endif
8844 timevar_pop (TV_TREE_GIMPLIFY);
8845 input_location = saved_location;
8847 return outer_bind;
8850 typedef char *char_p; /* For DEF_VEC_P. */
8852 /* Return whether we should exclude FNDECL from instrumentation. */
8854 static bool
8855 flag_instrument_functions_exclude_p (tree fndecl)
8857 vec<char_p> *v;
8859 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
8860 if (v && v->length () > 0)
8862 const char *name;
8863 int i;
8864 char *s;
8866 name = lang_hooks.decl_printable_name (fndecl, 0);
8867 FOR_EACH_VEC_ELT (*v, i, s)
8868 if (strstr (name, s) != NULL)
8869 return true;
8872 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
8873 if (v && v->length () > 0)
8875 const char *name;
8876 int i;
8877 char *s;
8879 name = DECL_SOURCE_FILE (fndecl);
8880 FOR_EACH_VEC_ELT (*v, i, s)
8881 if (strstr (name, s) != NULL)
8882 return true;
8885 return false;
8888 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
8889 node for the function we want to gimplify.
8891 Return the sequence of GIMPLE statements corresponding to the body
8892 of FNDECL. */
8894 void
8895 gimplify_function_tree (tree fndecl)
8897 tree parm, ret;
8898 gimple_seq seq;
8899 gimple bind;
8901 gcc_assert (!gimple_body (fndecl));
8903 if (DECL_STRUCT_FUNCTION (fndecl))
8904 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8905 else
8906 push_struct_function (fndecl);
8908 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
8910 /* Preliminarily mark non-addressed complex variables as eligible
8911 for promotion to gimple registers. We'll transform their uses
8912 as we find them. */
8913 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8914 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
8915 && !TREE_THIS_VOLATILE (parm)
8916 && !needs_to_live_in_memory (parm))
8917 DECL_GIMPLE_REG_P (parm) = 1;
8920 ret = DECL_RESULT (fndecl);
8921 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
8922 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
8923 && !needs_to_live_in_memory (ret))
8924 DECL_GIMPLE_REG_P (ret) = 1;
8926 bind = gimplify_body (fndecl, true);
8928 /* The tree body of the function is no longer needed, replace it
8929 with the new GIMPLE body. */
8930 seq = NULL;
8931 gimple_seq_add_stmt (&seq, bind);
8932 gimple_set_body (fndecl, seq);
8934 /* If we're instrumenting function entry/exit, then prepend the call to
8935 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8936 catch the exit hook. */
8937 /* ??? Add some way to ignore exceptions for this TFE. */
8938 if (flag_instrument_function_entry_exit
8939 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8940 && !flag_instrument_functions_exclude_p (fndecl))
8942 tree x;
8943 gimple new_bind;
8944 gimple tf;
8945 gimple_seq cleanup = NULL, body = NULL;
8946 tree tmp_var;
8947 gimple call;
8949 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8950 call = gimple_build_call (x, 1, integer_zero_node);
8951 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8952 gimple_call_set_lhs (call, tmp_var);
8953 gimplify_seq_add_stmt (&cleanup, call);
8954 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
8955 call = gimple_build_call (x, 2,
8956 build_fold_addr_expr (current_function_decl),
8957 tmp_var);
8958 gimplify_seq_add_stmt (&cleanup, call);
8959 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
8961 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8962 call = gimple_build_call (x, 1, integer_zero_node);
8963 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8964 gimple_call_set_lhs (call, tmp_var);
8965 gimplify_seq_add_stmt (&body, call);
8966 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
8967 call = gimple_build_call (x, 2,
8968 build_fold_addr_expr (current_function_decl),
8969 tmp_var);
8970 gimplify_seq_add_stmt (&body, call);
8971 gimplify_seq_add_stmt (&body, tf);
8972 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
8973 /* Clear the block for BIND, since it is no longer directly inside
8974 the function, but within a try block. */
8975 gimple_bind_set_block (bind, NULL);
8977 /* Replace the current function body with the body
8978 wrapped in the try/finally TF. */
8979 seq = NULL;
8980 gimple_seq_add_stmt (&seq, new_bind);
8981 gimple_set_body (fndecl, seq);
8984 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8985 cfun->curr_properties = PROP_gimple_any;
8987 pop_cfun ();
8990 /* Return a dummy expression of type TYPE in order to keep going after an
8991 error. */
8993 static tree
8994 dummy_object (tree type)
8996 tree t = build_int_cst (build_pointer_type (type), 0);
8997 return build2 (MEM_REF, type, t, t);
9000 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
9001 builtin function, but a very special sort of operator. */
9003 enum gimplify_status
9004 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
9006 tree promoted_type, have_va_type;
9007 tree valist = TREE_OPERAND (*expr_p, 0);
9008 tree type = TREE_TYPE (*expr_p);
9009 tree t;
9010 location_t loc = EXPR_LOCATION (*expr_p);
9012 /* Verify that valist is of the proper type. */
9013 have_va_type = TREE_TYPE (valist);
9014 if (have_va_type == error_mark_node)
9015 return GS_ERROR;
9016 have_va_type = targetm.canonical_va_list_type (have_va_type);
9018 if (have_va_type == NULL_TREE)
9020 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
9021 return GS_ERROR;
9024 /* Generate a diagnostic for requesting data of a type that cannot
9025 be passed through `...' due to type promotion at the call site. */
9026 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
9027 != type)
9029 static bool gave_help;
9030 bool warned;
9032 /* Unfortunately, this is merely undefined, rather than a constraint
9033 violation, so we cannot make this an error. If this call is never
9034 executed, the program is still strictly conforming. */
9035 warned = warning_at (loc, 0,
9036 "%qT is promoted to %qT when passed through %<...%>",
9037 type, promoted_type);
9038 if (!gave_help && warned)
9040 gave_help = true;
9041 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
9042 promoted_type, type);
9045 /* We can, however, treat "undefined" any way we please.
9046 Call abort to encourage the user to fix the program. */
9047 if (warned)
9048 inform (loc, "if this code is reached, the program will abort");
9049 /* Before the abort, allow the evaluation of the va_list
9050 expression to exit or longjmp. */
9051 gimplify_and_add (valist, pre_p);
9052 t = build_call_expr_loc (loc,
9053 builtin_decl_implicit (BUILT_IN_TRAP), 0);
9054 gimplify_and_add (t, pre_p);
9056 /* This is dead code, but go ahead and finish so that the
9057 mode of the result comes out right. */
9058 *expr_p = dummy_object (type);
9059 return GS_ALL_DONE;
9061 else
9063 /* Make it easier for the backends by protecting the valist argument
9064 from multiple evaluations. */
9065 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
9067 /* For this case, the backends will be expecting a pointer to
9068 TREE_TYPE (abi), but it's possible we've
9069 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
9070 So fix it. */
9071 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
9073 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
9074 valist = fold_convert_loc (loc, p1,
9075 build_fold_addr_expr_loc (loc, valist));
9078 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
9080 else
9081 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
9083 if (!targetm.gimplify_va_arg_expr)
9084 /* FIXME: Once most targets are converted we should merely
9085 assert this is non-null. */
9086 return GS_ALL_DONE;
9088 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
9089 return GS_OK;
9093 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
9095 DST/SRC are the destination and source respectively. You can pass
9096 ungimplified trees in DST or SRC, in which case they will be
9097 converted to a gimple operand if necessary.
9099 This function returns the newly created GIMPLE_ASSIGN tuple. */
9101 gimple
9102 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
9104 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9105 gimplify_and_add (t, seq_p);
9106 ggc_free (t);
9107 return gimple_seq_last_stmt (*seq_p);
9110 inline hashval_t
9111 gimplify_hasher::hash (const value_type *p)
9113 tree t = p->val;
9114 return iterative_hash_expr (t, 0);
9117 inline bool
9118 gimplify_hasher::equal (const value_type *p1, const compare_type *p2)
9120 tree t1 = p1->val;
9121 tree t2 = p2->val;
9122 enum tree_code code = TREE_CODE (t1);
9124 if (TREE_CODE (t2) != code
9125 || TREE_TYPE (t1) != TREE_TYPE (t2))
9126 return false;
9128 if (!operand_equal_p (t1, t2, 0))
9129 return false;
9131 #ifdef ENABLE_CHECKING
9132 /* Only allow them to compare equal if they also hash equal; otherwise
9133 results are nondeterminate, and we fail bootstrap comparison. */
9134 gcc_assert (hash (p1) == hash (p2));
9135 #endif
9137 return true;