* gimple-walk.h: New File. Relocate prototypes from gimple.h.
[official-gcc.git] / gcc / gimplify.c
blob3253f861958d3e4cec8f5cb933324f5e234e21a0
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2013 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "gimplify.h"
29 #include "gimple-iterator.h"
30 #include "tree-iterator.h"
31 #include "tree-inline.h"
32 #include "tree-pretty-print.h"
33 #include "langhooks.h"
34 #include "bitmap.h"
35 #include "gimple-ssa.h"
36 #include "cgraph.h"
37 #include "tree-cfg.h"
38 #include "tree-ssanames.h"
39 #include "tree-ssa.h"
40 #include "timevar.h"
41 #include "hashtab.h"
42 #include "flags.h"
43 #include "function.h"
44 #include "ggc.h"
45 #include "diagnostic-core.h"
46 #include "target.h"
47 #include "pointer-set.h"
48 #include "splay-tree.h"
49 #include "vec.h"
50 #include "omp-low.h"
51 #include "gimple-low.h"
52 #include "cilk.h"
54 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
55 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
56 #include "expr.h"
57 #include "tm_p.h"
59 enum gimplify_omp_var_data
61 GOVD_SEEN = 1,
62 GOVD_EXPLICIT = 2,
63 GOVD_SHARED = 4,
64 GOVD_PRIVATE = 8,
65 GOVD_FIRSTPRIVATE = 16,
66 GOVD_LASTPRIVATE = 32,
67 GOVD_REDUCTION = 64,
68 GOVD_LOCAL = 128,
69 GOVD_MAP = 256,
70 GOVD_DEBUG_PRIVATE = 512,
71 GOVD_PRIVATE_OUTER_REF = 1024,
72 GOVD_LINEAR = 2048,
73 GOVD_ALIGNED = 4096,
74 GOVD_MAP_TO_ONLY = 8192,
75 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
76 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
77 | GOVD_LOCAL)
81 enum omp_region_type
83 ORT_WORKSHARE = 0,
84 ORT_SIMD = 1,
85 ORT_PARALLEL = 2,
86 ORT_COMBINED_PARALLEL = 3,
87 ORT_TASK = 4,
88 ORT_UNTIED_TASK = 5,
89 ORT_TEAMS = 8,
90 ORT_TARGET_DATA = 16,
91 ORT_TARGET = 32
94 struct gimplify_omp_ctx
96 struct gimplify_omp_ctx *outer_context;
97 splay_tree variables;
98 struct pointer_set_t *privatized_types;
99 location_t location;
100 enum omp_clause_default_kind default_kind;
101 enum omp_region_type region_type;
102 bool combined_loop;
105 static struct gimplify_ctx *gimplify_ctxp;
106 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
109 /* Forward declaration. */
110 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
112 /* Shorter alias name for the above function for use in gimplify.c
113 only. */
115 static inline void
116 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
118 gimple_seq_add_stmt_without_update (seq_p, gs);
121 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
122 NULL, a new sequence is allocated. This function is
123 similar to gimple_seq_add_seq, but does not scan the operands.
124 During gimplification, we need to manipulate statement sequences
125 before the def/use vectors have been constructed. */
127 static void
128 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
130 gimple_stmt_iterator si;
132 if (src == NULL)
133 return;
135 si = gsi_last (*dst_p);
136 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
139 /* Set up a context for the gimplifier. */
141 void
142 push_gimplify_context (struct gimplify_ctx *c)
144 memset (c, '\0', sizeof (*c));
145 c->prev_context = gimplify_ctxp;
146 gimplify_ctxp = c;
149 /* Tear down a context for the gimplifier. If BODY is non-null, then
150 put the temporaries into the outer BIND_EXPR. Otherwise, put them
151 in the local_decls.
153 BODY is not a sequence, but the first tuple in a sequence. */
155 void
156 pop_gimplify_context (gimple body)
158 struct gimplify_ctx *c = gimplify_ctxp;
160 gcc_assert (c
161 && (!c->bind_expr_stack.exists ()
162 || c->bind_expr_stack.is_empty ()));
163 c->bind_expr_stack.release ();
164 gimplify_ctxp = c->prev_context;
166 if (body)
167 declare_vars (c->temps, body, false);
168 else
169 record_vars (c->temps);
171 if (c->temp_htab.is_created ())
172 c->temp_htab.dispose ();
175 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
177 static void
178 gimple_push_bind_expr (gimple gimple_bind)
180 gimplify_ctxp->bind_expr_stack.reserve (8);
181 gimplify_ctxp->bind_expr_stack.safe_push (gimple_bind);
184 /* Pop the first element off the stack of bindings. */
186 static void
187 gimple_pop_bind_expr (void)
189 gimplify_ctxp->bind_expr_stack.pop ();
192 /* Return the first element of the stack of bindings. */
194 gimple
195 gimple_current_bind_expr (void)
197 return gimplify_ctxp->bind_expr_stack.last ();
200 /* Return the stack of bindings created during gimplification. */
202 vec<gimple>
203 gimple_bind_expr_stack (void)
205 return gimplify_ctxp->bind_expr_stack;
208 /* Return true iff there is a COND_EXPR between us and the innermost
209 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
211 static bool
212 gimple_conditional_context (void)
214 return gimplify_ctxp->conditions > 0;
217 /* Note that we've entered a COND_EXPR. */
219 static void
220 gimple_push_condition (void)
222 #ifdef ENABLE_GIMPLE_CHECKING
223 if (gimplify_ctxp->conditions == 0)
224 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
225 #endif
226 ++(gimplify_ctxp->conditions);
229 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
230 now, add any conditional cleanups we've seen to the prequeue. */
232 static void
233 gimple_pop_condition (gimple_seq *pre_p)
235 int conds = --(gimplify_ctxp->conditions);
237 gcc_assert (conds >= 0);
238 if (conds == 0)
240 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
241 gimplify_ctxp->conditional_cleanups = NULL;
245 /* A stable comparison routine for use with splay trees and DECLs. */
247 static int
248 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
250 tree a = (tree) xa;
251 tree b = (tree) xb;
253 return DECL_UID (a) - DECL_UID (b);
256 /* Create a new omp construct that deals with variable remapping. */
258 static struct gimplify_omp_ctx *
259 new_omp_context (enum omp_region_type region_type)
261 struct gimplify_omp_ctx *c;
263 c = XCNEW (struct gimplify_omp_ctx);
264 c->outer_context = gimplify_omp_ctxp;
265 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
266 c->privatized_types = pointer_set_create ();
267 c->location = input_location;
268 c->region_type = region_type;
269 if ((region_type & ORT_TASK) == 0)
270 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
271 else
272 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
274 return c;
277 /* Destroy an omp construct that deals with variable remapping. */
279 static void
280 delete_omp_context (struct gimplify_omp_ctx *c)
282 splay_tree_delete (c->variables);
283 pointer_set_destroy (c->privatized_types);
284 XDELETE (c);
287 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
288 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
290 /* Both gimplify the statement T and append it to *SEQ_P. This function
291 behaves exactly as gimplify_stmt, but you don't have to pass T as a
292 reference. */
294 void
295 gimplify_and_add (tree t, gimple_seq *seq_p)
297 gimplify_stmt (&t, seq_p);
300 /* Gimplify statement T into sequence *SEQ_P, and return the first
301 tuple in the sequence of generated tuples for this statement.
302 Return NULL if gimplifying T produced no tuples. */
304 static gimple
305 gimplify_and_return_first (tree t, gimple_seq *seq_p)
307 gimple_stmt_iterator last = gsi_last (*seq_p);
309 gimplify_and_add (t, seq_p);
311 if (!gsi_end_p (last))
313 gsi_next (&last);
314 return gsi_stmt (last);
316 else
317 return gimple_seq_first_stmt (*seq_p);
320 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
321 LHS, or for a call argument. */
323 static bool
324 is_gimple_mem_rhs (tree t)
326 /* If we're dealing with a renamable type, either source or dest must be
327 a renamed variable. */
328 if (is_gimple_reg_type (TREE_TYPE (t)))
329 return is_gimple_val (t);
330 else
331 return is_gimple_val (t) || is_gimple_lvalue (t);
334 /* Return true if T is a CALL_EXPR or an expression that can be
335 assigned to a temporary. Note that this predicate should only be
336 used during gimplification. See the rationale for this in
337 gimplify_modify_expr. */
339 static bool
340 is_gimple_reg_rhs_or_call (tree t)
342 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
343 || TREE_CODE (t) == CALL_EXPR);
346 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
347 this predicate should only be used during gimplification. See the
348 rationale for this in gimplify_modify_expr. */
350 static bool
351 is_gimple_mem_rhs_or_call (tree t)
353 /* If we're dealing with a renamable type, either source or dest must be
354 a renamed variable. */
355 if (is_gimple_reg_type (TREE_TYPE (t)))
356 return is_gimple_val (t);
357 else
358 return (is_gimple_val (t) || is_gimple_lvalue (t)
359 || TREE_CODE (t) == CALL_EXPR);
362 /* Create a temporary with a name derived from VAL. Subroutine of
363 lookup_tmp_var; nobody else should call this function. */
365 static inline tree
366 create_tmp_from_val (tree val, bool is_formal)
368 /* Drop all qualifiers and address-space information from the value type. */
369 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
370 tree var = create_tmp_var (type, get_name (val));
371 if (is_formal
372 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
373 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE))
374 DECL_GIMPLE_REG_P (var) = 1;
375 return var;
378 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
379 an existing expression temporary. */
381 static tree
382 lookup_tmp_var (tree val, bool is_formal)
384 tree ret;
386 /* If not optimizing, never really reuse a temporary. local-alloc
387 won't allocate any variable that is used in more than one basic
388 block, which means it will go into memory, causing much extra
389 work in reload and final and poorer code generation, outweighing
390 the extra memory allocation here. */
391 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
392 ret = create_tmp_from_val (val, is_formal);
393 else
395 elt_t elt, *elt_p;
396 elt_t **slot;
398 elt.val = val;
399 if (!gimplify_ctxp->temp_htab.is_created ())
400 gimplify_ctxp->temp_htab.create (1000);
401 slot = gimplify_ctxp->temp_htab.find_slot (&elt, INSERT);
402 if (*slot == NULL)
404 elt_p = XNEW (elt_t);
405 elt_p->val = val;
406 elt_p->temp = ret = create_tmp_from_val (val, is_formal);
407 *slot = elt_p;
409 else
411 elt_p = *slot;
412 ret = elt_p->temp;
416 return ret;
419 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
421 static tree
422 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
423 bool is_formal)
425 tree t, mod;
427 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
428 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
429 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
430 fb_rvalue);
432 if (gimplify_ctxp->into_ssa
433 && is_gimple_reg_type (TREE_TYPE (val)))
434 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)), NULL);
435 else
436 t = lookup_tmp_var (val, is_formal);
438 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
440 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_HERE (val));
442 /* gimplify_modify_expr might want to reduce this further. */
443 gimplify_and_add (mod, pre_p);
444 ggc_free (mod);
446 return t;
449 /* Return a formal temporary variable initialized with VAL. PRE_P is as
450 in gimplify_expr. Only use this function if:
452 1) The value of the unfactored expression represented by VAL will not
453 change between the initialization and use of the temporary, and
454 2) The temporary will not be otherwise modified.
456 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
457 and #2 means it is inappropriate for && temps.
459 For other cases, use get_initialized_tmp_var instead. */
461 tree
462 get_formal_tmp_var (tree val, gimple_seq *pre_p)
464 return internal_get_tmp_var (val, pre_p, NULL, true);
467 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
468 are as in gimplify_expr. */
470 tree
471 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
473 return internal_get_tmp_var (val, pre_p, post_p, false);
476 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
477 generate debug info for them; otherwise don't. */
479 void
480 declare_vars (tree vars, gimple scope, bool debug_info)
482 tree last = vars;
483 if (last)
485 tree temps, block;
487 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
489 temps = nreverse (last);
491 block = gimple_bind_block (scope);
492 gcc_assert (!block || TREE_CODE (block) == BLOCK);
493 if (!block || !debug_info)
495 DECL_CHAIN (last) = gimple_bind_vars (scope);
496 gimple_bind_set_vars (scope, temps);
498 else
500 /* We need to attach the nodes both to the BIND_EXPR and to its
501 associated BLOCK for debugging purposes. The key point here
502 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
503 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
504 if (BLOCK_VARS (block))
505 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
506 else
508 gimple_bind_set_vars (scope,
509 chainon (gimple_bind_vars (scope), temps));
510 BLOCK_VARS (block) = temps;
516 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
517 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
518 no such upper bound can be obtained. */
520 static void
521 force_constant_size (tree var)
523 /* The only attempt we make is by querying the maximum size of objects
524 of the variable's type. */
526 HOST_WIDE_INT max_size;
528 gcc_assert (TREE_CODE (var) == VAR_DECL);
530 max_size = max_int_size_in_bytes (TREE_TYPE (var));
532 gcc_assert (max_size >= 0);
534 DECL_SIZE_UNIT (var)
535 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
536 DECL_SIZE (var)
537 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
540 /* Push the temporary variable TMP into the current binding. */
542 void
543 gimple_add_tmp_var (tree tmp)
545 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
547 /* Later processing assumes that the object size is constant, which might
548 not be true at this point. Force the use of a constant upper bound in
549 this case. */
550 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
551 force_constant_size (tmp);
553 DECL_CONTEXT (tmp) = current_function_decl;
554 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
556 if (gimplify_ctxp)
558 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
559 gimplify_ctxp->temps = tmp;
561 /* Mark temporaries local within the nearest enclosing parallel. */
562 if (gimplify_omp_ctxp)
564 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
565 while (ctx
566 && (ctx->region_type == ORT_WORKSHARE
567 || ctx->region_type == ORT_SIMD))
568 ctx = ctx->outer_context;
569 if (ctx)
570 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
573 else if (cfun)
574 record_vars (tmp);
575 else
577 gimple_seq body_seq;
579 /* This case is for nested functions. We need to expose the locals
580 they create. */
581 body_seq = gimple_body (current_function_decl);
582 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
588 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
589 nodes that are referenced more than once in GENERIC functions. This is
590 necessary because gimplification (translation into GIMPLE) is performed
591 by modifying tree nodes in-place, so gimplication of a shared node in a
592 first context could generate an invalid GIMPLE form in a second context.
594 This is achieved with a simple mark/copy/unmark algorithm that walks the
595 GENERIC representation top-down, marks nodes with TREE_VISITED the first
596 time it encounters them, duplicates them if they already have TREE_VISITED
597 set, and finally removes the TREE_VISITED marks it has set.
599 The algorithm works only at the function level, i.e. it generates a GENERIC
600 representation of a function with no nodes shared within the function when
601 passed a GENERIC function (except for nodes that are allowed to be shared).
603 At the global level, it is also necessary to unshare tree nodes that are
604 referenced in more than one function, for the same aforementioned reason.
605 This requires some cooperation from the front-end. There are 2 strategies:
607 1. Manual unsharing. The front-end needs to call unshare_expr on every
608 expression that might end up being shared across functions.
610 2. Deep unsharing. This is an extension of regular unsharing. Instead
611 of calling unshare_expr on expressions that might be shared across
612 functions, the front-end pre-marks them with TREE_VISITED. This will
613 ensure that they are unshared on the first reference within functions
614 when the regular unsharing algorithm runs. The counterpart is that
615 this algorithm must look deeper than for manual unsharing, which is
616 specified by LANG_HOOKS_DEEP_UNSHARING.
618 If there are only few specific cases of node sharing across functions, it is
619 probably easier for a front-end to unshare the expressions manually. On the
620 contrary, if the expressions generated at the global level are as widespread
621 as expressions generated within functions, deep unsharing is very likely the
622 way to go. */
624 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
625 These nodes model computations that must be done once. If we were to
626 unshare something like SAVE_EXPR(i++), the gimplification process would
627 create wrong code. However, if DATA is non-null, it must hold a pointer
628 set that is used to unshare the subtrees of these nodes. */
630 static tree
631 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
633 tree t = *tp;
634 enum tree_code code = TREE_CODE (t);
636 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
637 copy their subtrees if we can make sure to do it only once. */
638 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
640 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
642 else
643 *walk_subtrees = 0;
646 /* Stop at types, decls, constants like copy_tree_r. */
647 else if (TREE_CODE_CLASS (code) == tcc_type
648 || TREE_CODE_CLASS (code) == tcc_declaration
649 || TREE_CODE_CLASS (code) == tcc_constant
650 /* We can't do anything sensible with a BLOCK used as an
651 expression, but we also can't just die when we see it
652 because of non-expression uses. So we avert our eyes
653 and cross our fingers. Silly Java. */
654 || code == BLOCK)
655 *walk_subtrees = 0;
657 /* Cope with the statement expression extension. */
658 else if (code == STATEMENT_LIST)
661 /* Leave the bulk of the work to copy_tree_r itself. */
662 else
663 copy_tree_r (tp, walk_subtrees, NULL);
665 return NULL_TREE;
668 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
669 If *TP has been visited already, then *TP is deeply copied by calling
670 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
672 static tree
673 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
675 tree t = *tp;
676 enum tree_code code = TREE_CODE (t);
678 /* Skip types, decls, and constants. But we do want to look at their
679 types and the bounds of types. Mark them as visited so we properly
680 unmark their subtrees on the unmark pass. If we've already seen them,
681 don't look down further. */
682 if (TREE_CODE_CLASS (code) == tcc_type
683 || TREE_CODE_CLASS (code) == tcc_declaration
684 || TREE_CODE_CLASS (code) == tcc_constant)
686 if (TREE_VISITED (t))
687 *walk_subtrees = 0;
688 else
689 TREE_VISITED (t) = 1;
692 /* If this node has been visited already, unshare it and don't look
693 any deeper. */
694 else if (TREE_VISITED (t))
696 walk_tree (tp, mostly_copy_tree_r, data, NULL);
697 *walk_subtrees = 0;
700 /* Otherwise, mark the node as visited and keep looking. */
701 else
702 TREE_VISITED (t) = 1;
704 return NULL_TREE;
707 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
708 copy_if_shared_r callback unmodified. */
710 static inline void
711 copy_if_shared (tree *tp, void *data)
713 walk_tree (tp, copy_if_shared_r, data, NULL);
716 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
717 any nested functions. */
719 static void
720 unshare_body (tree fndecl)
722 struct cgraph_node *cgn = cgraph_get_node (fndecl);
723 /* If the language requires deep unsharing, we need a pointer set to make
724 sure we don't repeatedly unshare subtrees of unshareable nodes. */
725 struct pointer_set_t *visited
726 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
728 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
729 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
730 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
732 if (visited)
733 pointer_set_destroy (visited);
735 if (cgn)
736 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
737 unshare_body (cgn->decl);
740 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
741 Subtrees are walked until the first unvisited node is encountered. */
743 static tree
744 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
746 tree t = *tp;
748 /* If this node has been visited, unmark it and keep looking. */
749 if (TREE_VISITED (t))
750 TREE_VISITED (t) = 0;
752 /* Otherwise, don't look any deeper. */
753 else
754 *walk_subtrees = 0;
756 return NULL_TREE;
759 /* Unmark the visited trees rooted at *TP. */
761 static inline void
762 unmark_visited (tree *tp)
764 walk_tree (tp, unmark_visited_r, NULL, NULL);
767 /* Likewise, but mark all trees as not visited. */
769 static void
770 unvisit_body (tree fndecl)
772 struct cgraph_node *cgn = cgraph_get_node (fndecl);
774 unmark_visited (&DECL_SAVED_TREE (fndecl));
775 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
776 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
778 if (cgn)
779 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
780 unvisit_body (cgn->decl);
783 /* Unconditionally make an unshared copy of EXPR. This is used when using
784 stored expressions which span multiple functions, such as BINFO_VTABLE,
785 as the normal unsharing process can't tell that they're shared. */
787 tree
788 unshare_expr (tree expr)
790 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
791 return expr;
794 /* Worker for unshare_expr_without_location. */
796 static tree
797 prune_expr_location (tree *tp, int *walk_subtrees, void *)
799 if (EXPR_P (*tp))
800 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
801 else
802 *walk_subtrees = 0;
803 return NULL_TREE;
806 /* Similar to unshare_expr but also prune all expression locations
807 from EXPR. */
809 tree
810 unshare_expr_without_location (tree expr)
812 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
813 if (EXPR_P (expr))
814 walk_tree (&expr, prune_expr_location, NULL, NULL);
815 return expr;
818 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
819 contain statements and have a value. Assign its value to a temporary
820 and give it void_type_node. Return the temporary, or NULL_TREE if
821 WRAPPER was already void. */
823 tree
824 voidify_wrapper_expr (tree wrapper, tree temp)
826 tree type = TREE_TYPE (wrapper);
827 if (type && !VOID_TYPE_P (type))
829 tree *p;
831 /* Set p to point to the body of the wrapper. Loop until we find
832 something that isn't a wrapper. */
833 for (p = &wrapper; p && *p; )
835 switch (TREE_CODE (*p))
837 case BIND_EXPR:
838 TREE_SIDE_EFFECTS (*p) = 1;
839 TREE_TYPE (*p) = void_type_node;
840 /* For a BIND_EXPR, the body is operand 1. */
841 p = &BIND_EXPR_BODY (*p);
842 break;
844 case CLEANUP_POINT_EXPR:
845 case TRY_FINALLY_EXPR:
846 case TRY_CATCH_EXPR:
847 TREE_SIDE_EFFECTS (*p) = 1;
848 TREE_TYPE (*p) = void_type_node;
849 p = &TREE_OPERAND (*p, 0);
850 break;
852 case STATEMENT_LIST:
854 tree_stmt_iterator i = tsi_last (*p);
855 TREE_SIDE_EFFECTS (*p) = 1;
856 TREE_TYPE (*p) = void_type_node;
857 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
859 break;
861 case COMPOUND_EXPR:
862 /* Advance to the last statement. Set all container types to
863 void. */
864 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
866 TREE_SIDE_EFFECTS (*p) = 1;
867 TREE_TYPE (*p) = void_type_node;
869 break;
871 case TRANSACTION_EXPR:
872 TREE_SIDE_EFFECTS (*p) = 1;
873 TREE_TYPE (*p) = void_type_node;
874 p = &TRANSACTION_EXPR_BODY (*p);
875 break;
877 default:
878 /* Assume that any tree upon which voidify_wrapper_expr is
879 directly called is a wrapper, and that its body is op0. */
880 if (p == &wrapper)
882 TREE_SIDE_EFFECTS (*p) = 1;
883 TREE_TYPE (*p) = void_type_node;
884 p = &TREE_OPERAND (*p, 0);
885 break;
887 goto out;
891 out:
892 if (p == NULL || IS_EMPTY_STMT (*p))
893 temp = NULL_TREE;
894 else if (temp)
896 /* The wrapper is on the RHS of an assignment that we're pushing
897 down. */
898 gcc_assert (TREE_CODE (temp) == INIT_EXPR
899 || TREE_CODE (temp) == MODIFY_EXPR);
900 TREE_OPERAND (temp, 1) = *p;
901 *p = temp;
903 else
905 temp = create_tmp_var (type, "retval");
906 *p = build2 (INIT_EXPR, type, temp, *p);
909 return temp;
912 return NULL_TREE;
915 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
916 a temporary through which they communicate. */
918 static void
919 build_stack_save_restore (gimple *save, gimple *restore)
921 tree tmp_var;
923 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
924 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
925 gimple_call_set_lhs (*save, tmp_var);
927 *restore
928 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
929 1, tmp_var);
932 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
934 static enum gimplify_status
935 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
937 tree bind_expr = *expr_p;
938 bool old_save_stack = gimplify_ctxp->save_stack;
939 tree t;
940 gimple gimple_bind;
941 gimple_seq body, cleanup;
942 gimple stack_save;
944 tree temp = voidify_wrapper_expr (bind_expr, NULL);
946 /* Mark variables seen in this bind expr. */
947 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
949 if (TREE_CODE (t) == VAR_DECL)
951 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
953 /* Mark variable as local. */
954 if (ctx && !DECL_EXTERNAL (t)
955 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
956 || splay_tree_lookup (ctx->variables,
957 (splay_tree_key) t) == NULL))
958 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
960 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
962 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
963 cfun->has_local_explicit_reg_vars = true;
966 /* Preliminarily mark non-addressed complex variables as eligible
967 for promotion to gimple registers. We'll transform their uses
968 as we find them. */
969 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
970 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
971 && !TREE_THIS_VOLATILE (t)
972 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
973 && !needs_to_live_in_memory (t))
974 DECL_GIMPLE_REG_P (t) = 1;
977 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
978 BIND_EXPR_BLOCK (bind_expr));
979 gimple_push_bind_expr (gimple_bind);
981 gimplify_ctxp->save_stack = false;
983 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
984 body = NULL;
985 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
986 gimple_bind_set_body (gimple_bind, body);
988 cleanup = NULL;
989 stack_save = NULL;
990 if (gimplify_ctxp->save_stack)
992 gimple stack_restore;
994 /* Save stack on entry and restore it on exit. Add a try_finally
995 block to achieve this. */
996 build_stack_save_restore (&stack_save, &stack_restore);
998 gimplify_seq_add_stmt (&cleanup, stack_restore);
1001 /* Add clobbers for all variables that go out of scope. */
1002 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1004 if (TREE_CODE (t) == VAR_DECL
1005 && !is_global_var (t)
1006 && DECL_CONTEXT (t) == current_function_decl
1007 && !DECL_HARD_REGISTER (t)
1008 && !TREE_THIS_VOLATILE (t)
1009 && !DECL_HAS_VALUE_EXPR_P (t)
1010 /* Only care for variables that have to be in memory. Others
1011 will be rewritten into SSA names, hence moved to the top-level. */
1012 && !is_gimple_reg (t)
1013 && flag_stack_reuse != SR_NONE)
1015 tree clobber = build_constructor (TREE_TYPE (t),
1016 NULL);
1017 TREE_THIS_VOLATILE (clobber) = 1;
1018 gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
1022 if (cleanup)
1024 gimple gs;
1025 gimple_seq new_body;
1027 new_body = NULL;
1028 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1029 GIMPLE_TRY_FINALLY);
1031 if (stack_save)
1032 gimplify_seq_add_stmt (&new_body, stack_save);
1033 gimplify_seq_add_stmt (&new_body, gs);
1034 gimple_bind_set_body (gimple_bind, new_body);
1037 gimplify_ctxp->save_stack = old_save_stack;
1038 gimple_pop_bind_expr ();
1040 gimplify_seq_add_stmt (pre_p, gimple_bind);
1042 if (temp)
1044 *expr_p = temp;
1045 return GS_OK;
1048 *expr_p = NULL_TREE;
1049 return GS_ALL_DONE;
1052 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1053 GIMPLE value, it is assigned to a new temporary and the statement is
1054 re-written to return the temporary.
1056 PRE_P points to the sequence where side effects that must happen before
1057 STMT should be stored. */
1059 static enum gimplify_status
1060 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1062 gimple ret;
1063 tree ret_expr = TREE_OPERAND (stmt, 0);
1064 tree result_decl, result;
1066 if (ret_expr == error_mark_node)
1067 return GS_ERROR;
1069 /* Implicit _Cilk_sync must be inserted right before any return statement
1070 if there is a _Cilk_spawn in the function. If the user has provided a
1071 _Cilk_sync, the optimizer should remove this duplicate one. */
1072 if (fn_contains_cilk_spawn_p (cfun))
1074 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1075 gimplify_and_add (impl_sync, pre_p);
1078 if (!ret_expr
1079 || TREE_CODE (ret_expr) == RESULT_DECL
1080 || ret_expr == error_mark_node)
1082 gimple ret = gimple_build_return (ret_expr);
1083 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1084 gimplify_seq_add_stmt (pre_p, ret);
1085 return GS_ALL_DONE;
1088 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1089 result_decl = NULL_TREE;
1090 else
1092 result_decl = TREE_OPERAND (ret_expr, 0);
1094 /* See through a return by reference. */
1095 if (TREE_CODE (result_decl) == INDIRECT_REF)
1096 result_decl = TREE_OPERAND (result_decl, 0);
1098 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1099 || TREE_CODE (ret_expr) == INIT_EXPR)
1100 && TREE_CODE (result_decl) == RESULT_DECL);
1103 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1104 Recall that aggregate_value_p is FALSE for any aggregate type that is
1105 returned in registers. If we're returning values in registers, then
1106 we don't want to extend the lifetime of the RESULT_DECL, particularly
1107 across another call. In addition, for those aggregates for which
1108 hard_function_value generates a PARALLEL, we'll die during normal
1109 expansion of structure assignments; there's special code in expand_return
1110 to handle this case that does not exist in expand_expr. */
1111 if (!result_decl)
1112 result = NULL_TREE;
1113 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1115 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1117 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1118 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1119 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1120 should be effectively allocated by the caller, i.e. all calls to
1121 this function must be subject to the Return Slot Optimization. */
1122 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1123 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1125 result = result_decl;
1127 else if (gimplify_ctxp->return_temp)
1128 result = gimplify_ctxp->return_temp;
1129 else
1131 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
1133 /* ??? With complex control flow (usually involving abnormal edges),
1134 we can wind up warning about an uninitialized value for this. Due
1135 to how this variable is constructed and initialized, this is never
1136 true. Give up and never warn. */
1137 TREE_NO_WARNING (result) = 1;
1139 gimplify_ctxp->return_temp = result;
1142 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1143 Then gimplify the whole thing. */
1144 if (result != result_decl)
1145 TREE_OPERAND (ret_expr, 0) = result;
1147 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1149 ret = gimple_build_return (result);
1150 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1151 gimplify_seq_add_stmt (pre_p, ret);
1153 return GS_ALL_DONE;
1156 /* Gimplify a variable-length array DECL. */
1158 static void
1159 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1161 /* This is a variable-sized decl. Simplify its size and mark it
1162 for deferred expansion. */
1163 tree t, addr, ptr_type;
1165 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1166 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1168 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1169 if (DECL_HAS_VALUE_EXPR_P (decl))
1170 return;
1172 /* All occurrences of this decl in final gimplified code will be
1173 replaced by indirection. Setting DECL_VALUE_EXPR does two
1174 things: First, it lets the rest of the gimplifier know what
1175 replacement to use. Second, it lets the debug info know
1176 where to find the value. */
1177 ptr_type = build_pointer_type (TREE_TYPE (decl));
1178 addr = create_tmp_var (ptr_type, get_name (decl));
1179 DECL_IGNORED_P (addr) = 0;
1180 t = build_fold_indirect_ref (addr);
1181 TREE_THIS_NOTRAP (t) = 1;
1182 SET_DECL_VALUE_EXPR (decl, t);
1183 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1185 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1186 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1187 size_int (DECL_ALIGN (decl)));
1188 /* The call has been built for a variable-sized object. */
1189 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1190 t = fold_convert (ptr_type, t);
1191 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1193 gimplify_and_add (t, seq_p);
1195 /* Indicate that we need to restore the stack level when the
1196 enclosing BIND_EXPR is exited. */
1197 gimplify_ctxp->save_stack = true;
1200 /* A helper function to be called via walk_tree. Mark all labels under *TP
1201 as being forced. To be called for DECL_INITIAL of static variables. */
1203 static tree
1204 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1206 if (TYPE_P (*tp))
1207 *walk_subtrees = 0;
1208 if (TREE_CODE (*tp) == LABEL_DECL)
1209 FORCED_LABEL (*tp) = 1;
1211 return NULL_TREE;
1214 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1215 and initialization explicit. */
1217 static enum gimplify_status
1218 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1220 tree stmt = *stmt_p;
1221 tree decl = DECL_EXPR_DECL (stmt);
1223 *stmt_p = NULL_TREE;
1225 if (TREE_TYPE (decl) == error_mark_node)
1226 return GS_ERROR;
1228 if ((TREE_CODE (decl) == TYPE_DECL
1229 || TREE_CODE (decl) == VAR_DECL)
1230 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1231 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1233 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1234 in case its size expressions contain problematic nodes like CALL_EXPR. */
1235 if (TREE_CODE (decl) == TYPE_DECL
1236 && DECL_ORIGINAL_TYPE (decl)
1237 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1238 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1240 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1242 tree init = DECL_INITIAL (decl);
1244 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1245 || (!TREE_STATIC (decl)
1246 && flag_stack_check == GENERIC_STACK_CHECK
1247 && compare_tree_int (DECL_SIZE_UNIT (decl),
1248 STACK_CHECK_MAX_VAR_SIZE) > 0))
1249 gimplify_vla_decl (decl, seq_p);
1251 /* Some front ends do not explicitly declare all anonymous
1252 artificial variables. We compensate here by declaring the
1253 variables, though it would be better if the front ends would
1254 explicitly declare them. */
1255 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1256 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1257 gimple_add_tmp_var (decl);
1259 if (init && init != error_mark_node)
1261 if (!TREE_STATIC (decl))
1263 DECL_INITIAL (decl) = NULL_TREE;
1264 init = build2 (INIT_EXPR, void_type_node, decl, init);
1265 gimplify_and_add (init, seq_p);
1266 ggc_free (init);
1268 else
1269 /* We must still examine initializers for static variables
1270 as they may contain a label address. */
1271 walk_tree (&init, force_labels_r, NULL, NULL);
1275 return GS_ALL_DONE;
1278 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1279 and replacing the LOOP_EXPR with goto, but if the loop contains an
1280 EXIT_EXPR, we need to append a label for it to jump to. */
1282 static enum gimplify_status
1283 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1285 tree saved_label = gimplify_ctxp->exit_label;
1286 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1288 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1290 gimplify_ctxp->exit_label = NULL_TREE;
1292 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1294 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1296 if (gimplify_ctxp->exit_label)
1297 gimplify_seq_add_stmt (pre_p,
1298 gimple_build_label (gimplify_ctxp->exit_label));
1300 gimplify_ctxp->exit_label = saved_label;
1302 *expr_p = NULL;
1303 return GS_ALL_DONE;
1306 /* Gimplify a statement list onto a sequence. These may be created either
1307 by an enlightened front-end, or by shortcut_cond_expr. */
1309 static enum gimplify_status
1310 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1312 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1314 tree_stmt_iterator i = tsi_start (*expr_p);
1316 while (!tsi_end_p (i))
1318 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1319 tsi_delink (&i);
1322 if (temp)
1324 *expr_p = temp;
1325 return GS_OK;
1328 return GS_ALL_DONE;
1332 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1333 branch to. */
1335 static enum gimplify_status
1336 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1338 tree switch_expr = *expr_p;
1339 gimple_seq switch_body_seq = NULL;
1340 enum gimplify_status ret;
1341 tree index_type = TREE_TYPE (switch_expr);
1342 if (index_type == NULL_TREE)
1343 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1345 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1346 fb_rvalue);
1347 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1348 return ret;
1350 if (SWITCH_BODY (switch_expr))
1352 vec<tree> labels;
1353 vec<tree> saved_labels;
1354 tree default_case = NULL_TREE;
1355 gimple gimple_switch;
1357 /* If someone can be bothered to fill in the labels, they can
1358 be bothered to null out the body too. */
1359 gcc_assert (!SWITCH_LABELS (switch_expr));
1361 /* Save old labels, get new ones from body, then restore the old
1362 labels. Save all the things from the switch body to append after. */
1363 saved_labels = gimplify_ctxp->case_labels;
1364 gimplify_ctxp->case_labels.create (8);
1366 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1367 labels = gimplify_ctxp->case_labels;
1368 gimplify_ctxp->case_labels = saved_labels;
1370 preprocess_case_label_vec_for_gimple (labels, index_type,
1371 &default_case);
1373 if (!default_case)
1375 gimple new_default;
1377 default_case
1378 = build_case_label (NULL_TREE, NULL_TREE,
1379 create_artificial_label (UNKNOWN_LOCATION));
1380 new_default = gimple_build_label (CASE_LABEL (default_case));
1381 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1384 gimple_switch = gimple_build_switch (SWITCH_COND (switch_expr),
1385 default_case, labels);
1386 gimplify_seq_add_stmt (pre_p, gimple_switch);
1387 gimplify_seq_add_seq (pre_p, switch_body_seq);
1388 labels.release ();
1390 else
1391 gcc_assert (SWITCH_LABELS (switch_expr));
1393 return GS_ALL_DONE;
1396 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1398 static enum gimplify_status
1399 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1401 struct gimplify_ctx *ctxp;
1402 gimple gimple_label;
1404 /* Invalid OpenMP programs can play Duff's Device type games with
1405 #pragma omp parallel. At least in the C front end, we don't
1406 detect such invalid branches until after gimplification. */
1407 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1408 if (ctxp->case_labels.exists ())
1409 break;
1411 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1412 ctxp->case_labels.safe_push (*expr_p);
1413 gimplify_seq_add_stmt (pre_p, gimple_label);
1415 return GS_ALL_DONE;
1418 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1419 if necessary. */
1421 tree
1422 build_and_jump (tree *label_p)
1424 if (label_p == NULL)
1425 /* If there's nowhere to jump, just fall through. */
1426 return NULL_TREE;
1428 if (*label_p == NULL_TREE)
1430 tree label = create_artificial_label (UNKNOWN_LOCATION);
1431 *label_p = label;
1434 return build1 (GOTO_EXPR, void_type_node, *label_p);
1437 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1438 This also involves building a label to jump to and communicating it to
1439 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1441 static enum gimplify_status
1442 gimplify_exit_expr (tree *expr_p)
1444 tree cond = TREE_OPERAND (*expr_p, 0);
1445 tree expr;
1447 expr = build_and_jump (&gimplify_ctxp->exit_label);
1448 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1449 *expr_p = expr;
1451 return GS_OK;
1454 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1455 different from its canonical type, wrap the whole thing inside a
1456 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1457 type.
1459 The canonical type of a COMPONENT_REF is the type of the field being
1460 referenced--unless the field is a bit-field which can be read directly
1461 in a smaller mode, in which case the canonical type is the
1462 sign-appropriate type corresponding to that mode. */
1464 static void
1465 canonicalize_component_ref (tree *expr_p)
1467 tree expr = *expr_p;
1468 tree type;
1470 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1472 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1473 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1474 else
1475 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1477 /* One could argue that all the stuff below is not necessary for
1478 the non-bitfield case and declare it a FE error if type
1479 adjustment would be needed. */
1480 if (TREE_TYPE (expr) != type)
1482 #ifdef ENABLE_TYPES_CHECKING
1483 tree old_type = TREE_TYPE (expr);
1484 #endif
1485 int type_quals;
1487 /* We need to preserve qualifiers and propagate them from
1488 operand 0. */
1489 type_quals = TYPE_QUALS (type)
1490 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1491 if (TYPE_QUALS (type) != type_quals)
1492 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1494 /* Set the type of the COMPONENT_REF to the underlying type. */
1495 TREE_TYPE (expr) = type;
1497 #ifdef ENABLE_TYPES_CHECKING
1498 /* It is now a FE error, if the conversion from the canonical
1499 type to the original expression type is not useless. */
1500 gcc_assert (useless_type_conversion_p (old_type, type));
1501 #endif
1505 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1506 to foo, embed that change in the ADDR_EXPR by converting
1507 T array[U];
1508 (T *)&array
1510 &array[L]
1511 where L is the lower bound. For simplicity, only do this for constant
1512 lower bound.
1513 The constraint is that the type of &array[L] is trivially convertible
1514 to T *. */
1516 static void
1517 canonicalize_addr_expr (tree *expr_p)
1519 tree expr = *expr_p;
1520 tree addr_expr = TREE_OPERAND (expr, 0);
1521 tree datype, ddatype, pddatype;
1523 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1524 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1525 || TREE_CODE (addr_expr) != ADDR_EXPR)
1526 return;
1528 /* The addr_expr type should be a pointer to an array. */
1529 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1530 if (TREE_CODE (datype) != ARRAY_TYPE)
1531 return;
1533 /* The pointer to element type shall be trivially convertible to
1534 the expression pointer type. */
1535 ddatype = TREE_TYPE (datype);
1536 pddatype = build_pointer_type (ddatype);
1537 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1538 pddatype))
1539 return;
1541 /* The lower bound and element sizes must be constant. */
1542 if (!TYPE_SIZE_UNIT (ddatype)
1543 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1544 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1545 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1546 return;
1548 /* All checks succeeded. Build a new node to merge the cast. */
1549 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1550 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1551 NULL_TREE, NULL_TREE);
1552 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1554 /* We can have stripped a required restrict qualifier above. */
1555 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1556 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1559 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1560 underneath as appropriate. */
1562 static enum gimplify_status
1563 gimplify_conversion (tree *expr_p)
1565 location_t loc = EXPR_LOCATION (*expr_p);
1566 gcc_assert (CONVERT_EXPR_P (*expr_p));
1568 /* Then strip away all but the outermost conversion. */
1569 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1571 /* And remove the outermost conversion if it's useless. */
1572 if (tree_ssa_useless_type_conversion (*expr_p))
1573 *expr_p = TREE_OPERAND (*expr_p, 0);
1575 /* If we still have a conversion at the toplevel,
1576 then canonicalize some constructs. */
1577 if (CONVERT_EXPR_P (*expr_p))
1579 tree sub = TREE_OPERAND (*expr_p, 0);
1581 /* If a NOP conversion is changing the type of a COMPONENT_REF
1582 expression, then canonicalize its type now in order to expose more
1583 redundant conversions. */
1584 if (TREE_CODE (sub) == COMPONENT_REF)
1585 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1587 /* If a NOP conversion is changing a pointer to array of foo
1588 to a pointer to foo, embed that change in the ADDR_EXPR. */
1589 else if (TREE_CODE (sub) == ADDR_EXPR)
1590 canonicalize_addr_expr (expr_p);
1593 /* If we have a conversion to a non-register type force the
1594 use of a VIEW_CONVERT_EXPR instead. */
1595 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1596 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1597 TREE_OPERAND (*expr_p, 0));
1599 return GS_OK;
1602 /* Nonlocal VLAs seen in the current function. */
1603 static struct pointer_set_t *nonlocal_vlas;
1605 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
1606 DECL_VALUE_EXPR, and it's worth re-examining things. */
1608 static enum gimplify_status
1609 gimplify_var_or_parm_decl (tree *expr_p)
1611 tree decl = *expr_p;
1613 /* ??? If this is a local variable, and it has not been seen in any
1614 outer BIND_EXPR, then it's probably the result of a duplicate
1615 declaration, for which we've already issued an error. It would
1616 be really nice if the front end wouldn't leak these at all.
1617 Currently the only known culprit is C++ destructors, as seen
1618 in g++.old-deja/g++.jason/binding.C. */
1619 if (TREE_CODE (decl) == VAR_DECL
1620 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1621 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1622 && decl_function_context (decl) == current_function_decl)
1624 gcc_assert (seen_error ());
1625 return GS_ERROR;
1628 /* When within an OpenMP context, notice uses of variables. */
1629 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1630 return GS_ALL_DONE;
1632 /* If the decl is an alias for another expression, substitute it now. */
1633 if (DECL_HAS_VALUE_EXPR_P (decl))
1635 tree value_expr = DECL_VALUE_EXPR (decl);
1637 /* For referenced nonlocal VLAs add a decl for debugging purposes
1638 to the current function. */
1639 if (TREE_CODE (decl) == VAR_DECL
1640 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1641 && nonlocal_vlas != NULL
1642 && TREE_CODE (value_expr) == INDIRECT_REF
1643 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1644 && decl_function_context (decl) != current_function_decl)
1646 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1647 while (ctx
1648 && (ctx->region_type == ORT_WORKSHARE
1649 || ctx->region_type == ORT_SIMD))
1650 ctx = ctx->outer_context;
1651 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1653 tree copy = copy_node (decl), block;
1655 lang_hooks.dup_lang_specific_decl (copy);
1656 SET_DECL_RTL (copy, 0);
1657 TREE_USED (copy) = 1;
1658 block = DECL_INITIAL (current_function_decl);
1659 DECL_CHAIN (copy) = BLOCK_VARS (block);
1660 BLOCK_VARS (block) = copy;
1661 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1662 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1666 *expr_p = unshare_expr (value_expr);
1667 return GS_OK;
1670 return GS_ALL_DONE;
1673 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1674 node *EXPR_P.
1676 compound_lval
1677 : min_lval '[' val ']'
1678 | min_lval '.' ID
1679 | compound_lval '[' val ']'
1680 | compound_lval '.' ID
1682 This is not part of the original SIMPLE definition, which separates
1683 array and member references, but it seems reasonable to handle them
1684 together. Also, this way we don't run into problems with union
1685 aliasing; gcc requires that for accesses through a union to alias, the
1686 union reference must be explicit, which was not always the case when we
1687 were splitting up array and member refs.
1689 PRE_P points to the sequence where side effects that must happen before
1690 *EXPR_P should be stored.
1692 POST_P points to the sequence where side effects that must happen after
1693 *EXPR_P should be stored. */
1695 static enum gimplify_status
1696 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1697 fallback_t fallback)
1699 tree *p;
1700 enum gimplify_status ret = GS_ALL_DONE, tret;
1701 int i;
1702 location_t loc = EXPR_LOCATION (*expr_p);
1703 tree expr = *expr_p;
1705 /* Create a stack of the subexpressions so later we can walk them in
1706 order from inner to outer. */
1707 stack_vec<tree, 10> expr_stack;
1709 /* We can handle anything that get_inner_reference can deal with. */
1710 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1712 restart:
1713 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1714 if (TREE_CODE (*p) == INDIRECT_REF)
1715 *p = fold_indirect_ref_loc (loc, *p);
1717 if (handled_component_p (*p))
1719 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1720 additional COMPONENT_REFs. */
1721 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1722 && gimplify_var_or_parm_decl (p) == GS_OK)
1723 goto restart;
1724 else
1725 break;
1727 expr_stack.safe_push (*p);
1730 gcc_assert (expr_stack.length ());
1732 /* Now EXPR_STACK is a stack of pointers to all the refs we've
1733 walked through and P points to the innermost expression.
1735 Java requires that we elaborated nodes in source order. That
1736 means we must gimplify the inner expression followed by each of
1737 the indices, in order. But we can't gimplify the inner
1738 expression until we deal with any variable bounds, sizes, or
1739 positions in order to deal with PLACEHOLDER_EXPRs.
1741 So we do this in three steps. First we deal with the annotations
1742 for any variables in the components, then we gimplify the base,
1743 then we gimplify any indices, from left to right. */
1744 for (i = expr_stack.length () - 1; i >= 0; i--)
1746 tree t = expr_stack[i];
1748 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1750 /* Gimplify the low bound and element type size and put them into
1751 the ARRAY_REF. If these values are set, they have already been
1752 gimplified. */
1753 if (TREE_OPERAND (t, 2) == NULL_TREE)
1755 tree low = unshare_expr (array_ref_low_bound (t));
1756 if (!is_gimple_min_invariant (low))
1758 TREE_OPERAND (t, 2) = low;
1759 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1760 post_p, is_gimple_reg,
1761 fb_rvalue);
1762 ret = MIN (ret, tret);
1765 else
1767 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1768 is_gimple_reg, fb_rvalue);
1769 ret = MIN (ret, tret);
1772 if (TREE_OPERAND (t, 3) == NULL_TREE)
1774 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1775 tree elmt_size = unshare_expr (array_ref_element_size (t));
1776 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1778 /* Divide the element size by the alignment of the element
1779 type (above). */
1780 elmt_size
1781 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
1783 if (!is_gimple_min_invariant (elmt_size))
1785 TREE_OPERAND (t, 3) = elmt_size;
1786 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
1787 post_p, is_gimple_reg,
1788 fb_rvalue);
1789 ret = MIN (ret, tret);
1792 else
1794 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1795 is_gimple_reg, fb_rvalue);
1796 ret = MIN (ret, tret);
1799 else if (TREE_CODE (t) == COMPONENT_REF)
1801 /* Set the field offset into T and gimplify it. */
1802 if (TREE_OPERAND (t, 2) == NULL_TREE)
1804 tree offset = unshare_expr (component_ref_field_offset (t));
1805 tree field = TREE_OPERAND (t, 1);
1806 tree factor
1807 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1809 /* Divide the offset by its alignment. */
1810 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
1812 if (!is_gimple_min_invariant (offset))
1814 TREE_OPERAND (t, 2) = offset;
1815 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1816 post_p, is_gimple_reg,
1817 fb_rvalue);
1818 ret = MIN (ret, tret);
1821 else
1823 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1824 is_gimple_reg, fb_rvalue);
1825 ret = MIN (ret, tret);
1830 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
1831 so as to match the min_lval predicate. Failure to do so may result
1832 in the creation of large aggregate temporaries. */
1833 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
1834 fallback | fb_lvalue);
1835 ret = MIN (ret, tret);
1837 /* And finally, the indices and operands of ARRAY_REF. During this
1838 loop we also remove any useless conversions. */
1839 for (; expr_stack.length () > 0; )
1841 tree t = expr_stack.pop ();
1843 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1845 /* Gimplify the dimension. */
1846 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
1848 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1849 is_gimple_val, fb_rvalue);
1850 ret = MIN (ret, tret);
1854 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
1856 /* The innermost expression P may have originally had
1857 TREE_SIDE_EFFECTS set which would have caused all the outer
1858 expressions in *EXPR_P leading to P to also have had
1859 TREE_SIDE_EFFECTS set. */
1860 recalculate_side_effects (t);
1863 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
1864 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
1866 canonicalize_component_ref (expr_p);
1869 expr_stack.release ();
1871 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
1873 return ret;
1876 /* Gimplify the self modifying expression pointed to by EXPR_P
1877 (++, --, +=, -=).
1879 PRE_P points to the list where side effects that must happen before
1880 *EXPR_P should be stored.
1882 POST_P points to the list where side effects that must happen after
1883 *EXPR_P should be stored.
1885 WANT_VALUE is nonzero iff we want to use the value of this expression
1886 in another expression.
1888 ARITH_TYPE is the type the computation should be performed in. */
1890 enum gimplify_status
1891 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1892 bool want_value, tree arith_type)
1894 enum tree_code code;
1895 tree lhs, lvalue, rhs, t1;
1896 gimple_seq post = NULL, *orig_post_p = post_p;
1897 bool postfix;
1898 enum tree_code arith_code;
1899 enum gimplify_status ret;
1900 location_t loc = EXPR_LOCATION (*expr_p);
1902 code = TREE_CODE (*expr_p);
1904 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
1905 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
1907 /* Prefix or postfix? */
1908 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
1909 /* Faster to treat as prefix if result is not used. */
1910 postfix = want_value;
1911 else
1912 postfix = false;
1914 /* For postfix, make sure the inner expression's post side effects
1915 are executed after side effects from this expression. */
1916 if (postfix)
1917 post_p = &post;
1919 /* Add or subtract? */
1920 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
1921 arith_code = PLUS_EXPR;
1922 else
1923 arith_code = MINUS_EXPR;
1925 /* Gimplify the LHS into a GIMPLE lvalue. */
1926 lvalue = TREE_OPERAND (*expr_p, 0);
1927 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
1928 if (ret == GS_ERROR)
1929 return ret;
1931 /* Extract the operands to the arithmetic operation. */
1932 lhs = lvalue;
1933 rhs = TREE_OPERAND (*expr_p, 1);
1935 /* For postfix operator, we evaluate the LHS to an rvalue and then use
1936 that as the result value and in the postqueue operation. */
1937 if (postfix)
1939 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
1940 if (ret == GS_ERROR)
1941 return ret;
1943 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
1946 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
1947 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
1949 rhs = convert_to_ptrofftype_loc (loc, rhs);
1950 if (arith_code == MINUS_EXPR)
1951 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
1952 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
1954 else
1955 t1 = fold_convert (TREE_TYPE (*expr_p),
1956 fold_build2 (arith_code, arith_type,
1957 fold_convert (arith_type, lhs),
1958 fold_convert (arith_type, rhs)));
1960 if (postfix)
1962 gimplify_assign (lvalue, t1, pre_p);
1963 gimplify_seq_add_seq (orig_post_p, post);
1964 *expr_p = lhs;
1965 return GS_ALL_DONE;
1967 else
1969 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
1970 return GS_OK;
1974 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
1976 static void
1977 maybe_with_size_expr (tree *expr_p)
1979 tree expr = *expr_p;
1980 tree type = TREE_TYPE (expr);
1981 tree size;
1983 /* If we've already wrapped this or the type is error_mark_node, we can't do
1984 anything. */
1985 if (TREE_CODE (expr) == WITH_SIZE_EXPR
1986 || type == error_mark_node)
1987 return;
1989 /* If the size isn't known or is a constant, we have nothing to do. */
1990 size = TYPE_SIZE_UNIT (type);
1991 if (!size || TREE_CODE (size) == INTEGER_CST)
1992 return;
1994 /* Otherwise, make a WITH_SIZE_EXPR. */
1995 size = unshare_expr (size);
1996 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
1997 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2000 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2001 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2002 the CALL_EXPR. */
2004 static enum gimplify_status
2005 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2007 bool (*test) (tree);
2008 fallback_t fb;
2010 /* In general, we allow lvalues for function arguments to avoid
2011 extra overhead of copying large aggregates out of even larger
2012 aggregates into temporaries only to copy the temporaries to
2013 the argument list. Make optimizers happy by pulling out to
2014 temporaries those types that fit in registers. */
2015 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2016 test = is_gimple_val, fb = fb_rvalue;
2017 else
2019 test = is_gimple_lvalue, fb = fb_either;
2020 /* Also strip a TARGET_EXPR that would force an extra copy. */
2021 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2023 tree init = TARGET_EXPR_INITIAL (*arg_p);
2024 if (init
2025 && !VOID_TYPE_P (TREE_TYPE (init)))
2026 *arg_p = init;
2030 /* If this is a variable sized type, we must remember the size. */
2031 maybe_with_size_expr (arg_p);
2033 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2034 /* Make sure arguments have the same location as the function call
2035 itself. */
2036 protected_set_expr_location (*arg_p, call_location);
2038 /* There is a sequence point before a function call. Side effects in
2039 the argument list must occur before the actual call. So, when
2040 gimplifying arguments, force gimplify_expr to use an internal
2041 post queue which is then appended to the end of PRE_P. */
2042 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2045 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2046 WANT_VALUE is true if the result of the call is desired. */
2048 static enum gimplify_status
2049 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2051 tree fndecl, parms, p, fnptrtype;
2052 enum gimplify_status ret;
2053 int i, nargs;
2054 gimple call;
2055 bool builtin_va_start_p = FALSE;
2056 location_t loc = EXPR_LOCATION (*expr_p);
2058 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2060 /* For reliable diagnostics during inlining, it is necessary that
2061 every call_expr be annotated with file and line. */
2062 if (! EXPR_HAS_LOCATION (*expr_p))
2063 SET_EXPR_LOCATION (*expr_p, input_location);
2065 if (fn_contains_cilk_spawn_p (cfun)
2066 && lang_hooks.cilkplus.cilk_detect_spawn_and_unwrap (expr_p)
2067 && !seen_error ())
2068 return (enum gimplify_status)
2069 lang_hooks.cilkplus.gimplify_cilk_spawn (expr_p, pre_p, NULL);
2071 /* This may be a call to a builtin function.
2073 Builtin function calls may be transformed into different
2074 (and more efficient) builtin function calls under certain
2075 circumstances. Unfortunately, gimplification can muck things
2076 up enough that the builtin expanders are not aware that certain
2077 transformations are still valid.
2079 So we attempt transformation/gimplification of the call before
2080 we gimplify the CALL_EXPR. At this time we do not manage to
2081 transform all calls in the same manner as the expanders do, but
2082 we do transform most of them. */
2083 fndecl = get_callee_fndecl (*expr_p);
2084 if (fndecl
2085 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2086 switch (DECL_FUNCTION_CODE (fndecl))
2088 case BUILT_IN_VA_START:
2090 builtin_va_start_p = TRUE;
2091 if (call_expr_nargs (*expr_p) < 2)
2093 error ("too few arguments to function %<va_start%>");
2094 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2095 return GS_OK;
2098 if (fold_builtin_next_arg (*expr_p, true))
2100 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2101 return GS_OK;
2103 break;
2105 case BUILT_IN_LINE:
2107 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2108 *expr_p = build_int_cst (TREE_TYPE (*expr_p), loc.line);
2109 return GS_OK;
2111 case BUILT_IN_FILE:
2113 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2114 *expr_p = build_string_literal (strlen (loc.file) + 1, loc.file);
2115 return GS_OK;
2117 case BUILT_IN_FUNCTION:
2119 const char *function;
2120 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2121 *expr_p = build_string_literal (strlen (function) + 1, function);
2122 return GS_OK;
2124 default:
2127 if (fndecl && DECL_BUILT_IN (fndecl))
2129 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2130 if (new_tree && new_tree != *expr_p)
2132 /* There was a transformation of this call which computes the
2133 same value, but in a more efficient way. Return and try
2134 again. */
2135 *expr_p = new_tree;
2136 return GS_OK;
2140 /* Remember the original function pointer type. */
2141 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2143 /* There is a sequence point before the call, so any side effects in
2144 the calling expression must occur before the actual call. Force
2145 gimplify_expr to use an internal post queue. */
2146 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2147 is_gimple_call_addr, fb_rvalue);
2149 nargs = call_expr_nargs (*expr_p);
2151 /* Get argument types for verification. */
2152 fndecl = get_callee_fndecl (*expr_p);
2153 parms = NULL_TREE;
2154 if (fndecl)
2155 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2156 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2157 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2159 if (fndecl && DECL_ARGUMENTS (fndecl))
2160 p = DECL_ARGUMENTS (fndecl);
2161 else if (parms)
2162 p = parms;
2163 else
2164 p = NULL_TREE;
2165 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2168 /* If the last argument is __builtin_va_arg_pack () and it is not
2169 passed as a named argument, decrease the number of CALL_EXPR
2170 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2171 if (!p
2172 && i < nargs
2173 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2175 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2176 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2178 if (last_arg_fndecl
2179 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2180 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2181 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2183 tree call = *expr_p;
2185 --nargs;
2186 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2187 CALL_EXPR_FN (call),
2188 nargs, CALL_EXPR_ARGP (call));
2190 /* Copy all CALL_EXPR flags, location and block, except
2191 CALL_EXPR_VA_ARG_PACK flag. */
2192 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2193 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2194 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2195 = CALL_EXPR_RETURN_SLOT_OPT (call);
2196 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2197 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2199 /* Set CALL_EXPR_VA_ARG_PACK. */
2200 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2204 /* Finally, gimplify the function arguments. */
2205 if (nargs > 0)
2207 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2208 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2209 PUSH_ARGS_REVERSED ? i-- : i++)
2211 enum gimplify_status t;
2213 /* Avoid gimplifying the second argument to va_start, which needs to
2214 be the plain PARM_DECL. */
2215 if ((i != 1) || !builtin_va_start_p)
2217 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2218 EXPR_LOCATION (*expr_p));
2220 if (t == GS_ERROR)
2221 ret = GS_ERROR;
2226 /* Verify the function result. */
2227 if (want_value && fndecl
2228 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2230 error_at (loc, "using result of function returning %<void%>");
2231 ret = GS_ERROR;
2234 /* Try this again in case gimplification exposed something. */
2235 if (ret != GS_ERROR)
2237 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2239 if (new_tree && new_tree != *expr_p)
2241 /* There was a transformation of this call which computes the
2242 same value, but in a more efficient way. Return and try
2243 again. */
2244 *expr_p = new_tree;
2245 return GS_OK;
2248 else
2250 *expr_p = error_mark_node;
2251 return GS_ERROR;
2254 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2255 decl. This allows us to eliminate redundant or useless
2256 calls to "const" functions. */
2257 if (TREE_CODE (*expr_p) == CALL_EXPR)
2259 int flags = call_expr_flags (*expr_p);
2260 if (flags & (ECF_CONST | ECF_PURE)
2261 /* An infinite loop is considered a side effect. */
2262 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2263 TREE_SIDE_EFFECTS (*expr_p) = 0;
2266 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2267 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2268 form and delegate the creation of a GIMPLE_CALL to
2269 gimplify_modify_expr. This is always possible because when
2270 WANT_VALUE is true, the caller wants the result of this call into
2271 a temporary, which means that we will emit an INIT_EXPR in
2272 internal_get_tmp_var which will then be handled by
2273 gimplify_modify_expr. */
2274 if (!want_value)
2276 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2277 have to do is replicate it as a GIMPLE_CALL tuple. */
2278 gimple_stmt_iterator gsi;
2279 call = gimple_build_call_from_tree (*expr_p);
2280 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2281 notice_special_calls (call);
2282 gimplify_seq_add_stmt (pre_p, call);
2283 gsi = gsi_last (*pre_p);
2284 /* Don't fold stmts inside of target construct. We'll do it
2285 during omplower pass instead. */
2286 struct gimplify_omp_ctx *ctx;
2287 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2288 if (ctx->region_type == ORT_TARGET)
2289 break;
2290 if (ctx == NULL)
2291 fold_stmt (&gsi);
2292 *expr_p = NULL_TREE;
2294 else
2295 /* Remember the original function type. */
2296 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2297 CALL_EXPR_FN (*expr_p));
2299 return ret;
2302 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2303 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2305 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2306 condition is true or false, respectively. If null, we should generate
2307 our own to skip over the evaluation of this specific expression.
2309 LOCUS is the source location of the COND_EXPR.
2311 This function is the tree equivalent of do_jump.
2313 shortcut_cond_r should only be called by shortcut_cond_expr. */
2315 static tree
2316 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2317 location_t locus)
2319 tree local_label = NULL_TREE;
2320 tree t, expr = NULL;
2322 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2323 retain the shortcut semantics. Just insert the gotos here;
2324 shortcut_cond_expr will append the real blocks later. */
2325 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2327 location_t new_locus;
2329 /* Turn if (a && b) into
2331 if (a); else goto no;
2332 if (b) goto yes; else goto no;
2333 (no:) */
2335 if (false_label_p == NULL)
2336 false_label_p = &local_label;
2338 /* Keep the original source location on the first 'if'. */
2339 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2340 append_to_statement_list (t, &expr);
2342 /* Set the source location of the && on the second 'if'. */
2343 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2344 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2345 new_locus);
2346 append_to_statement_list (t, &expr);
2348 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2350 location_t new_locus;
2352 /* Turn if (a || b) into
2354 if (a) goto yes;
2355 if (b) goto yes; else goto no;
2356 (yes:) */
2358 if (true_label_p == NULL)
2359 true_label_p = &local_label;
2361 /* Keep the original source location on the first 'if'. */
2362 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2363 append_to_statement_list (t, &expr);
2365 /* Set the source location of the || on the second 'if'. */
2366 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2367 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2368 new_locus);
2369 append_to_statement_list (t, &expr);
2371 else if (TREE_CODE (pred) == COND_EXPR
2372 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2373 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2375 location_t new_locus;
2377 /* As long as we're messing with gotos, turn if (a ? b : c) into
2378 if (a)
2379 if (b) goto yes; else goto no;
2380 else
2381 if (c) goto yes; else goto no;
2383 Don't do this if one of the arms has void type, which can happen
2384 in C++ when the arm is throw. */
2386 /* Keep the original source location on the first 'if'. Set the source
2387 location of the ? on the second 'if'. */
2388 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2389 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2390 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2391 false_label_p, locus),
2392 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2393 false_label_p, new_locus));
2395 else
2397 expr = build3 (COND_EXPR, void_type_node, pred,
2398 build_and_jump (true_label_p),
2399 build_and_jump (false_label_p));
2400 SET_EXPR_LOCATION (expr, locus);
2403 if (local_label)
2405 t = build1 (LABEL_EXPR, void_type_node, local_label);
2406 append_to_statement_list (t, &expr);
2409 return expr;
2412 /* Given a conditional expression EXPR with short-circuit boolean
2413 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2414 predicate apart into the equivalent sequence of conditionals. */
2416 static tree
2417 shortcut_cond_expr (tree expr)
2419 tree pred = TREE_OPERAND (expr, 0);
2420 tree then_ = TREE_OPERAND (expr, 1);
2421 tree else_ = TREE_OPERAND (expr, 2);
2422 tree true_label, false_label, end_label, t;
2423 tree *true_label_p;
2424 tree *false_label_p;
2425 bool emit_end, emit_false, jump_over_else;
2426 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2427 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2429 /* First do simple transformations. */
2430 if (!else_se)
2432 /* If there is no 'else', turn
2433 if (a && b) then c
2434 into
2435 if (a) if (b) then c. */
2436 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2438 /* Keep the original source location on the first 'if'. */
2439 location_t locus = EXPR_LOC_OR_HERE (expr);
2440 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2441 /* Set the source location of the && on the second 'if'. */
2442 if (EXPR_HAS_LOCATION (pred))
2443 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2444 then_ = shortcut_cond_expr (expr);
2445 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2446 pred = TREE_OPERAND (pred, 0);
2447 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2448 SET_EXPR_LOCATION (expr, locus);
2452 if (!then_se)
2454 /* If there is no 'then', turn
2455 if (a || b); else d
2456 into
2457 if (a); else if (b); else d. */
2458 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2460 /* Keep the original source location on the first 'if'. */
2461 location_t locus = EXPR_LOC_OR_HERE (expr);
2462 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2463 /* Set the source location of the || on the second 'if'. */
2464 if (EXPR_HAS_LOCATION (pred))
2465 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2466 else_ = shortcut_cond_expr (expr);
2467 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2468 pred = TREE_OPERAND (pred, 0);
2469 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2470 SET_EXPR_LOCATION (expr, locus);
2474 /* If we're done, great. */
2475 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2476 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2477 return expr;
2479 /* Otherwise we need to mess with gotos. Change
2480 if (a) c; else d;
2482 if (a); else goto no;
2483 c; goto end;
2484 no: d; end:
2485 and recursively gimplify the condition. */
2487 true_label = false_label = end_label = NULL_TREE;
2489 /* If our arms just jump somewhere, hijack those labels so we don't
2490 generate jumps to jumps. */
2492 if (then_
2493 && TREE_CODE (then_) == GOTO_EXPR
2494 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2496 true_label = GOTO_DESTINATION (then_);
2497 then_ = NULL;
2498 then_se = false;
2501 if (else_
2502 && TREE_CODE (else_) == GOTO_EXPR
2503 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2505 false_label = GOTO_DESTINATION (else_);
2506 else_ = NULL;
2507 else_se = false;
2510 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2511 if (true_label)
2512 true_label_p = &true_label;
2513 else
2514 true_label_p = NULL;
2516 /* The 'else' branch also needs a label if it contains interesting code. */
2517 if (false_label || else_se)
2518 false_label_p = &false_label;
2519 else
2520 false_label_p = NULL;
2522 /* If there was nothing else in our arms, just forward the label(s). */
2523 if (!then_se && !else_se)
2524 return shortcut_cond_r (pred, true_label_p, false_label_p,
2525 EXPR_LOC_OR_HERE (expr));
2527 /* If our last subexpression already has a terminal label, reuse it. */
2528 if (else_se)
2529 t = expr_last (else_);
2530 else if (then_se)
2531 t = expr_last (then_);
2532 else
2533 t = NULL;
2534 if (t && TREE_CODE (t) == LABEL_EXPR)
2535 end_label = LABEL_EXPR_LABEL (t);
2537 /* If we don't care about jumping to the 'else' branch, jump to the end
2538 if the condition is false. */
2539 if (!false_label_p)
2540 false_label_p = &end_label;
2542 /* We only want to emit these labels if we aren't hijacking them. */
2543 emit_end = (end_label == NULL_TREE);
2544 emit_false = (false_label == NULL_TREE);
2546 /* We only emit the jump over the else clause if we have to--if the
2547 then clause may fall through. Otherwise we can wind up with a
2548 useless jump and a useless label at the end of gimplified code,
2549 which will cause us to think that this conditional as a whole
2550 falls through even if it doesn't. If we then inline a function
2551 which ends with such a condition, that can cause us to issue an
2552 inappropriate warning about control reaching the end of a
2553 non-void function. */
2554 jump_over_else = block_may_fallthru (then_);
2556 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2557 EXPR_LOC_OR_HERE (expr));
2559 expr = NULL;
2560 append_to_statement_list (pred, &expr);
2562 append_to_statement_list (then_, &expr);
2563 if (else_se)
2565 if (jump_over_else)
2567 tree last = expr_last (expr);
2568 t = build_and_jump (&end_label);
2569 if (EXPR_HAS_LOCATION (last))
2570 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2571 append_to_statement_list (t, &expr);
2573 if (emit_false)
2575 t = build1 (LABEL_EXPR, void_type_node, false_label);
2576 append_to_statement_list (t, &expr);
2578 append_to_statement_list (else_, &expr);
2580 if (emit_end && end_label)
2582 t = build1 (LABEL_EXPR, void_type_node, end_label);
2583 append_to_statement_list (t, &expr);
2586 return expr;
2589 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2591 tree
2592 gimple_boolify (tree expr)
2594 tree type = TREE_TYPE (expr);
2595 location_t loc = EXPR_LOCATION (expr);
2597 if (TREE_CODE (expr) == NE_EXPR
2598 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2599 && integer_zerop (TREE_OPERAND (expr, 1)))
2601 tree call = TREE_OPERAND (expr, 0);
2602 tree fn = get_callee_fndecl (call);
2604 /* For __builtin_expect ((long) (x), y) recurse into x as well
2605 if x is truth_value_p. */
2606 if (fn
2607 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2608 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2609 && call_expr_nargs (call) == 2)
2611 tree arg = CALL_EXPR_ARG (call, 0);
2612 if (arg)
2614 if (TREE_CODE (arg) == NOP_EXPR
2615 && TREE_TYPE (arg) == TREE_TYPE (call))
2616 arg = TREE_OPERAND (arg, 0);
2617 if (truth_value_p (TREE_CODE (arg)))
2619 arg = gimple_boolify (arg);
2620 CALL_EXPR_ARG (call, 0)
2621 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2627 switch (TREE_CODE (expr))
2629 case TRUTH_AND_EXPR:
2630 case TRUTH_OR_EXPR:
2631 case TRUTH_XOR_EXPR:
2632 case TRUTH_ANDIF_EXPR:
2633 case TRUTH_ORIF_EXPR:
2634 /* Also boolify the arguments of truth exprs. */
2635 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2636 /* FALLTHRU */
2638 case TRUTH_NOT_EXPR:
2639 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2641 /* These expressions always produce boolean results. */
2642 if (TREE_CODE (type) != BOOLEAN_TYPE)
2643 TREE_TYPE (expr) = boolean_type_node;
2644 return expr;
2646 case ANNOTATE_EXPR:
2647 if ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1))
2648 == annot_expr_ivdep_kind)
2650 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2651 if (TREE_CODE (type) != BOOLEAN_TYPE)
2652 TREE_TYPE (expr) = boolean_type_node;
2653 return expr;
2655 /* FALLTHRU */
2657 default:
2658 if (COMPARISON_CLASS_P (expr))
2660 /* There expressions always prduce boolean results. */
2661 if (TREE_CODE (type) != BOOLEAN_TYPE)
2662 TREE_TYPE (expr) = boolean_type_node;
2663 return expr;
2665 /* Other expressions that get here must have boolean values, but
2666 might need to be converted to the appropriate mode. */
2667 if (TREE_CODE (type) == BOOLEAN_TYPE)
2668 return expr;
2669 return fold_convert_loc (loc, boolean_type_node, expr);
2673 /* Given a conditional expression *EXPR_P without side effects, gimplify
2674 its operands. New statements are inserted to PRE_P. */
2676 static enum gimplify_status
2677 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2679 tree expr = *expr_p, cond;
2680 enum gimplify_status ret, tret;
2681 enum tree_code code;
2683 cond = gimple_boolify (COND_EXPR_COND (expr));
2685 /* We need to handle && and || specially, as their gimplification
2686 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2687 code = TREE_CODE (cond);
2688 if (code == TRUTH_ANDIF_EXPR)
2689 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2690 else if (code == TRUTH_ORIF_EXPR)
2691 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2692 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2693 COND_EXPR_COND (*expr_p) = cond;
2695 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2696 is_gimple_val, fb_rvalue);
2697 ret = MIN (ret, tret);
2698 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2699 is_gimple_val, fb_rvalue);
2701 return MIN (ret, tret);
2704 /* Return true if evaluating EXPR could trap.
2705 EXPR is GENERIC, while tree_could_trap_p can be called
2706 only on GIMPLE. */
2708 static bool
2709 generic_expr_could_trap_p (tree expr)
2711 unsigned i, n;
2713 if (!expr || is_gimple_val (expr))
2714 return false;
2716 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2717 return true;
2719 n = TREE_OPERAND_LENGTH (expr);
2720 for (i = 0; i < n; i++)
2721 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2722 return true;
2724 return false;
2727 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2728 into
2730 if (p) if (p)
2731 t1 = a; a;
2732 else or else
2733 t1 = b; b;
2736 The second form is used when *EXPR_P is of type void.
2738 PRE_P points to the list where side effects that must happen before
2739 *EXPR_P should be stored. */
2741 static enum gimplify_status
2742 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2744 tree expr = *expr_p;
2745 tree type = TREE_TYPE (expr);
2746 location_t loc = EXPR_LOCATION (expr);
2747 tree tmp, arm1, arm2;
2748 enum gimplify_status ret;
2749 tree label_true, label_false, label_cont;
2750 bool have_then_clause_p, have_else_clause_p;
2751 gimple gimple_cond;
2752 enum tree_code pred_code;
2753 gimple_seq seq = NULL;
2755 /* If this COND_EXPR has a value, copy the values into a temporary within
2756 the arms. */
2757 if (!VOID_TYPE_P (type))
2759 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
2760 tree result;
2762 /* If either an rvalue is ok or we do not require an lvalue, create the
2763 temporary. But we cannot do that if the type is addressable. */
2764 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
2765 && !TREE_ADDRESSABLE (type))
2767 if (gimplify_ctxp->allow_rhs_cond_expr
2768 /* If either branch has side effects or could trap, it can't be
2769 evaluated unconditionally. */
2770 && !TREE_SIDE_EFFECTS (then_)
2771 && !generic_expr_could_trap_p (then_)
2772 && !TREE_SIDE_EFFECTS (else_)
2773 && !generic_expr_could_trap_p (else_))
2774 return gimplify_pure_cond_expr (expr_p, pre_p);
2776 tmp = create_tmp_var (type, "iftmp");
2777 result = tmp;
2780 /* Otherwise, only create and copy references to the values. */
2781 else
2783 type = build_pointer_type (type);
2785 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2786 then_ = build_fold_addr_expr_loc (loc, then_);
2788 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2789 else_ = build_fold_addr_expr_loc (loc, else_);
2791 expr
2792 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
2794 tmp = create_tmp_var (type, "iftmp");
2795 result = build_simple_mem_ref_loc (loc, tmp);
2798 /* Build the new then clause, `tmp = then_;'. But don't build the
2799 assignment if the value is void; in C++ it can be if it's a throw. */
2800 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2801 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
2803 /* Similarly, build the new else clause, `tmp = else_;'. */
2804 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2805 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
2807 TREE_TYPE (expr) = void_type_node;
2808 recalculate_side_effects (expr);
2810 /* Move the COND_EXPR to the prequeue. */
2811 gimplify_stmt (&expr, pre_p);
2813 *expr_p = result;
2814 return GS_ALL_DONE;
2817 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
2818 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
2819 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
2820 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
2822 /* Make sure the condition has BOOLEAN_TYPE. */
2823 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2825 /* Break apart && and || conditions. */
2826 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2827 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2829 expr = shortcut_cond_expr (expr);
2831 if (expr != *expr_p)
2833 *expr_p = expr;
2835 /* We can't rely on gimplify_expr to re-gimplify the expanded
2836 form properly, as cleanups might cause the target labels to be
2837 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2838 set up a conditional context. */
2839 gimple_push_condition ();
2840 gimplify_stmt (expr_p, &seq);
2841 gimple_pop_condition (pre_p);
2842 gimple_seq_add_seq (pre_p, seq);
2844 return GS_ALL_DONE;
2848 /* Now do the normal gimplification. */
2850 /* Gimplify condition. */
2851 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
2852 fb_rvalue);
2853 if (ret == GS_ERROR)
2854 return GS_ERROR;
2855 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
2857 gimple_push_condition ();
2859 have_then_clause_p = have_else_clause_p = false;
2860 if (TREE_OPERAND (expr, 1) != NULL
2861 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
2862 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
2863 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
2864 == current_function_decl)
2865 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2866 have different locations, otherwise we end up with incorrect
2867 location information on the branches. */
2868 && (optimize
2869 || !EXPR_HAS_LOCATION (expr)
2870 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
2871 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
2873 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
2874 have_then_clause_p = true;
2876 else
2877 label_true = create_artificial_label (UNKNOWN_LOCATION);
2878 if (TREE_OPERAND (expr, 2) != NULL
2879 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
2880 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
2881 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
2882 == current_function_decl)
2883 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2884 have different locations, otherwise we end up with incorrect
2885 location information on the branches. */
2886 && (optimize
2887 || !EXPR_HAS_LOCATION (expr)
2888 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
2889 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
2891 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
2892 have_else_clause_p = true;
2894 else
2895 label_false = create_artificial_label (UNKNOWN_LOCATION);
2897 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
2898 &arm2);
2900 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
2901 label_false);
2903 gimplify_seq_add_stmt (&seq, gimple_cond);
2904 label_cont = NULL_TREE;
2905 if (!have_then_clause_p)
2907 /* For if (...) {} else { code; } put label_true after
2908 the else block. */
2909 if (TREE_OPERAND (expr, 1) == NULL_TREE
2910 && !have_else_clause_p
2911 && TREE_OPERAND (expr, 2) != NULL_TREE)
2912 label_cont = label_true;
2913 else
2915 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
2916 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
2917 /* For if (...) { code; } else {} or
2918 if (...) { code; } else goto label; or
2919 if (...) { code; return; } else { ... }
2920 label_cont isn't needed. */
2921 if (!have_else_clause_p
2922 && TREE_OPERAND (expr, 2) != NULL_TREE
2923 && gimple_seq_may_fallthru (seq))
2925 gimple g;
2926 label_cont = create_artificial_label (UNKNOWN_LOCATION);
2928 g = gimple_build_goto (label_cont);
2930 /* GIMPLE_COND's are very low level; they have embedded
2931 gotos. This particular embedded goto should not be marked
2932 with the location of the original COND_EXPR, as it would
2933 correspond to the COND_EXPR's condition, not the ELSE or the
2934 THEN arms. To avoid marking it with the wrong location, flag
2935 it as "no location". */
2936 gimple_set_do_not_emit_location (g);
2938 gimplify_seq_add_stmt (&seq, g);
2942 if (!have_else_clause_p)
2944 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
2945 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
2947 if (label_cont)
2948 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
2950 gimple_pop_condition (pre_p);
2951 gimple_seq_add_seq (pre_p, seq);
2953 if (ret == GS_ERROR)
2954 ; /* Do nothing. */
2955 else if (have_then_clause_p || have_else_clause_p)
2956 ret = GS_ALL_DONE;
2957 else
2959 /* Both arms are empty; replace the COND_EXPR with its predicate. */
2960 expr = TREE_OPERAND (expr, 0);
2961 gimplify_stmt (&expr, pre_p);
2964 *expr_p = NULL;
2965 return ret;
2968 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
2969 to be marked addressable.
2971 We cannot rely on such an expression being directly markable if a temporary
2972 has been created by the gimplification. In this case, we create another
2973 temporary and initialize it with a copy, which will become a store after we
2974 mark it addressable. This can happen if the front-end passed us something
2975 that it could not mark addressable yet, like a Fortran pass-by-reference
2976 parameter (int) floatvar. */
2978 static void
2979 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
2981 while (handled_component_p (*expr_p))
2982 expr_p = &TREE_OPERAND (*expr_p, 0);
2983 if (is_gimple_reg (*expr_p))
2984 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
2987 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2988 a call to __builtin_memcpy. */
2990 static enum gimplify_status
2991 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
2992 gimple_seq *seq_p)
2994 tree t, to, to_ptr, from, from_ptr;
2995 gimple gs;
2996 location_t loc = EXPR_LOCATION (*expr_p);
2998 to = TREE_OPERAND (*expr_p, 0);
2999 from = TREE_OPERAND (*expr_p, 1);
3001 /* Mark the RHS addressable. Beware that it may not be possible to do so
3002 directly if a temporary has been created by the gimplification. */
3003 prepare_gimple_addressable (&from, seq_p);
3005 mark_addressable (from);
3006 from_ptr = build_fold_addr_expr_loc (loc, from);
3007 gimplify_arg (&from_ptr, seq_p, loc);
3009 mark_addressable (to);
3010 to_ptr = build_fold_addr_expr_loc (loc, to);
3011 gimplify_arg (&to_ptr, seq_p, loc);
3013 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3015 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3017 if (want_value)
3019 /* tmp = memcpy() */
3020 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3021 gimple_call_set_lhs (gs, t);
3022 gimplify_seq_add_stmt (seq_p, gs);
3024 *expr_p = build_simple_mem_ref (t);
3025 return GS_ALL_DONE;
3028 gimplify_seq_add_stmt (seq_p, gs);
3029 *expr_p = NULL;
3030 return GS_ALL_DONE;
3033 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3034 a call to __builtin_memset. In this case we know that the RHS is
3035 a CONSTRUCTOR with an empty element list. */
3037 static enum gimplify_status
3038 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3039 gimple_seq *seq_p)
3041 tree t, from, to, to_ptr;
3042 gimple gs;
3043 location_t loc = EXPR_LOCATION (*expr_p);
3045 /* Assert our assumptions, to abort instead of producing wrong code
3046 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3047 not be immediately exposed. */
3048 from = TREE_OPERAND (*expr_p, 1);
3049 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3050 from = TREE_OPERAND (from, 0);
3052 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3053 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3055 /* Now proceed. */
3056 to = TREE_OPERAND (*expr_p, 0);
3058 to_ptr = build_fold_addr_expr_loc (loc, to);
3059 gimplify_arg (&to_ptr, seq_p, loc);
3060 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3062 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3064 if (want_value)
3066 /* tmp = memset() */
3067 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3068 gimple_call_set_lhs (gs, t);
3069 gimplify_seq_add_stmt (seq_p, gs);
3071 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3072 return GS_ALL_DONE;
3075 gimplify_seq_add_stmt (seq_p, gs);
3076 *expr_p = NULL;
3077 return GS_ALL_DONE;
3080 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3081 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3082 assignment. Return non-null if we detect a potential overlap. */
3084 struct gimplify_init_ctor_preeval_data
3086 /* The base decl of the lhs object. May be NULL, in which case we
3087 have to assume the lhs is indirect. */
3088 tree lhs_base_decl;
3090 /* The alias set of the lhs object. */
3091 alias_set_type lhs_alias_set;
3094 static tree
3095 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3097 struct gimplify_init_ctor_preeval_data *data
3098 = (struct gimplify_init_ctor_preeval_data *) xdata;
3099 tree t = *tp;
3101 /* If we find the base object, obviously we have overlap. */
3102 if (data->lhs_base_decl == t)
3103 return t;
3105 /* If the constructor component is indirect, determine if we have a
3106 potential overlap with the lhs. The only bits of information we
3107 have to go on at this point are addressability and alias sets. */
3108 if ((INDIRECT_REF_P (t)
3109 || TREE_CODE (t) == MEM_REF)
3110 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3111 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3112 return t;
3114 /* If the constructor component is a call, determine if it can hide a
3115 potential overlap with the lhs through an INDIRECT_REF like above.
3116 ??? Ugh - this is completely broken. In fact this whole analysis
3117 doesn't look conservative. */
3118 if (TREE_CODE (t) == CALL_EXPR)
3120 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3122 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3123 if (POINTER_TYPE_P (TREE_VALUE (type))
3124 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3125 && alias_sets_conflict_p (data->lhs_alias_set,
3126 get_alias_set
3127 (TREE_TYPE (TREE_VALUE (type)))))
3128 return t;
3131 if (IS_TYPE_OR_DECL_P (t))
3132 *walk_subtrees = 0;
3133 return NULL;
3136 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3137 force values that overlap with the lhs (as described by *DATA)
3138 into temporaries. */
3140 static void
3141 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3142 struct gimplify_init_ctor_preeval_data *data)
3144 enum gimplify_status one;
3146 /* If the value is constant, then there's nothing to pre-evaluate. */
3147 if (TREE_CONSTANT (*expr_p))
3149 /* Ensure it does not have side effects, it might contain a reference to
3150 the object we're initializing. */
3151 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3152 return;
3155 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3156 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3157 return;
3159 /* Recurse for nested constructors. */
3160 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3162 unsigned HOST_WIDE_INT ix;
3163 constructor_elt *ce;
3164 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3166 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3167 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3169 return;
3172 /* If this is a variable sized type, we must remember the size. */
3173 maybe_with_size_expr (expr_p);
3175 /* Gimplify the constructor element to something appropriate for the rhs
3176 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3177 the gimplifier will consider this a store to memory. Doing this
3178 gimplification now means that we won't have to deal with complicated
3179 language-specific trees, nor trees like SAVE_EXPR that can induce
3180 exponential search behavior. */
3181 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3182 if (one == GS_ERROR)
3184 *expr_p = NULL;
3185 return;
3188 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3189 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3190 always be true for all scalars, since is_gimple_mem_rhs insists on a
3191 temporary variable for them. */
3192 if (DECL_P (*expr_p))
3193 return;
3195 /* If this is of variable size, we have no choice but to assume it doesn't
3196 overlap since we can't make a temporary for it. */
3197 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3198 return;
3200 /* Otherwise, we must search for overlap ... */
3201 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3202 return;
3204 /* ... and if found, force the value into a temporary. */
3205 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3208 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3209 a RANGE_EXPR in a CONSTRUCTOR for an array.
3211 var = lower;
3212 loop_entry:
3213 object[var] = value;
3214 if (var == upper)
3215 goto loop_exit;
3216 var = var + 1;
3217 goto loop_entry;
3218 loop_exit:
3220 We increment var _after_ the loop exit check because we might otherwise
3221 fail if upper == TYPE_MAX_VALUE (type for upper).
3223 Note that we never have to deal with SAVE_EXPRs here, because this has
3224 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3226 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3227 gimple_seq *, bool);
3229 static void
3230 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3231 tree value, tree array_elt_type,
3232 gimple_seq *pre_p, bool cleared)
3234 tree loop_entry_label, loop_exit_label, fall_thru_label;
3235 tree var, var_type, cref, tmp;
3237 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3238 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3239 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3241 /* Create and initialize the index variable. */
3242 var_type = TREE_TYPE (upper);
3243 var = create_tmp_var (var_type, NULL);
3244 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3246 /* Add the loop entry label. */
3247 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3249 /* Build the reference. */
3250 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3251 var, NULL_TREE, NULL_TREE);
3253 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3254 the store. Otherwise just assign value to the reference. */
3256 if (TREE_CODE (value) == CONSTRUCTOR)
3257 /* NB we might have to call ourself recursively through
3258 gimplify_init_ctor_eval if the value is a constructor. */
3259 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3260 pre_p, cleared);
3261 else
3262 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3264 /* We exit the loop when the index var is equal to the upper bound. */
3265 gimplify_seq_add_stmt (pre_p,
3266 gimple_build_cond (EQ_EXPR, var, upper,
3267 loop_exit_label, fall_thru_label));
3269 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3271 /* Otherwise, increment the index var... */
3272 tmp = build2 (PLUS_EXPR, var_type, var,
3273 fold_convert (var_type, integer_one_node));
3274 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3276 /* ...and jump back to the loop entry. */
3277 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3279 /* Add the loop exit label. */
3280 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3283 /* Return true if FDECL is accessing a field that is zero sized. */
3285 static bool
3286 zero_sized_field_decl (const_tree fdecl)
3288 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3289 && integer_zerop (DECL_SIZE (fdecl)))
3290 return true;
3291 return false;
3294 /* Return true if TYPE is zero sized. */
3296 static bool
3297 zero_sized_type (const_tree type)
3299 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3300 && integer_zerop (TYPE_SIZE (type)))
3301 return true;
3302 return false;
3305 /* A subroutine of gimplify_init_constructor. Generate individual
3306 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3307 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3308 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3309 zeroed first. */
3311 static void
3312 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3313 gimple_seq *pre_p, bool cleared)
3315 tree array_elt_type = NULL;
3316 unsigned HOST_WIDE_INT ix;
3317 tree purpose, value;
3319 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3320 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3322 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3324 tree cref;
3326 /* NULL values are created above for gimplification errors. */
3327 if (value == NULL)
3328 continue;
3330 if (cleared && initializer_zerop (value))
3331 continue;
3333 /* ??? Here's to hoping the front end fills in all of the indices,
3334 so we don't have to figure out what's missing ourselves. */
3335 gcc_assert (purpose);
3337 /* Skip zero-sized fields, unless value has side-effects. This can
3338 happen with calls to functions returning a zero-sized type, which
3339 we shouldn't discard. As a number of downstream passes don't
3340 expect sets of zero-sized fields, we rely on the gimplification of
3341 the MODIFY_EXPR we make below to drop the assignment statement. */
3342 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3343 continue;
3345 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3346 whole range. */
3347 if (TREE_CODE (purpose) == RANGE_EXPR)
3349 tree lower = TREE_OPERAND (purpose, 0);
3350 tree upper = TREE_OPERAND (purpose, 1);
3352 /* If the lower bound is equal to upper, just treat it as if
3353 upper was the index. */
3354 if (simple_cst_equal (lower, upper))
3355 purpose = upper;
3356 else
3358 gimplify_init_ctor_eval_range (object, lower, upper, value,
3359 array_elt_type, pre_p, cleared);
3360 continue;
3364 if (array_elt_type)
3366 /* Do not use bitsizetype for ARRAY_REF indices. */
3367 if (TYPE_DOMAIN (TREE_TYPE (object)))
3368 purpose
3369 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3370 purpose);
3371 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3372 purpose, NULL_TREE, NULL_TREE);
3374 else
3376 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3377 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3378 unshare_expr (object), purpose, NULL_TREE);
3381 if (TREE_CODE (value) == CONSTRUCTOR
3382 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3383 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3384 pre_p, cleared);
3385 else
3387 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3388 gimplify_and_add (init, pre_p);
3389 ggc_free (init);
3394 /* Return the appropriate RHS predicate for this LHS. */
3396 static gimple_predicate
3397 rhs_predicate_for (tree lhs)
3399 if (is_gimple_reg (lhs))
3400 return is_gimple_reg_rhs_or_call;
3401 else
3402 return is_gimple_mem_rhs_or_call;
3405 /* Gimplify a C99 compound literal expression. This just means adding
3406 the DECL_EXPR before the current statement and using its anonymous
3407 decl instead. */
3409 static enum gimplify_status
3410 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3411 bool (*gimple_test_f) (tree),
3412 fallback_t fallback)
3414 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3415 tree decl = DECL_EXPR_DECL (decl_s);
3416 tree init = DECL_INITIAL (decl);
3417 /* Mark the decl as addressable if the compound literal
3418 expression is addressable now, otherwise it is marked too late
3419 after we gimplify the initialization expression. */
3420 if (TREE_ADDRESSABLE (*expr_p))
3421 TREE_ADDRESSABLE (decl) = 1;
3422 /* Otherwise, if we don't need an lvalue and have a literal directly
3423 substitute it. Check if it matches the gimple predicate, as
3424 otherwise we'd generate a new temporary, and we can as well just
3425 use the decl we already have. */
3426 else if (!TREE_ADDRESSABLE (decl)
3427 && init
3428 && (fallback & fb_lvalue) == 0
3429 && gimple_test_f (init))
3431 *expr_p = init;
3432 return GS_OK;
3435 /* Preliminarily mark non-addressed complex variables as eligible
3436 for promotion to gimple registers. We'll transform their uses
3437 as we find them. */
3438 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3439 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3440 && !TREE_THIS_VOLATILE (decl)
3441 && !needs_to_live_in_memory (decl))
3442 DECL_GIMPLE_REG_P (decl) = 1;
3444 /* If the decl is not addressable, then it is being used in some
3445 expression or on the right hand side of a statement, and it can
3446 be put into a readonly data section. */
3447 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3448 TREE_READONLY (decl) = 1;
3450 /* This decl isn't mentioned in the enclosing block, so add it to the
3451 list of temps. FIXME it seems a bit of a kludge to say that
3452 anonymous artificial vars aren't pushed, but everything else is. */
3453 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3454 gimple_add_tmp_var (decl);
3456 gimplify_and_add (decl_s, pre_p);
3457 *expr_p = decl;
3458 return GS_OK;
3461 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3462 return a new CONSTRUCTOR if something changed. */
3464 static tree
3465 optimize_compound_literals_in_ctor (tree orig_ctor)
3467 tree ctor = orig_ctor;
3468 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3469 unsigned int idx, num = vec_safe_length (elts);
3471 for (idx = 0; idx < num; idx++)
3473 tree value = (*elts)[idx].value;
3474 tree newval = value;
3475 if (TREE_CODE (value) == CONSTRUCTOR)
3476 newval = optimize_compound_literals_in_ctor (value);
3477 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3479 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3480 tree decl = DECL_EXPR_DECL (decl_s);
3481 tree init = DECL_INITIAL (decl);
3483 if (!TREE_ADDRESSABLE (value)
3484 && !TREE_ADDRESSABLE (decl)
3485 && init
3486 && TREE_CODE (init) == CONSTRUCTOR)
3487 newval = optimize_compound_literals_in_ctor (init);
3489 if (newval == value)
3490 continue;
3492 if (ctor == orig_ctor)
3494 ctor = copy_node (orig_ctor);
3495 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3496 elts = CONSTRUCTOR_ELTS (ctor);
3498 (*elts)[idx].value = newval;
3500 return ctor;
3503 /* A subroutine of gimplify_modify_expr. Break out elements of a
3504 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3506 Note that we still need to clear any elements that don't have explicit
3507 initializers, so if not all elements are initialized we keep the
3508 original MODIFY_EXPR, we just remove all of the constructor elements.
3510 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3511 GS_ERROR if we would have to create a temporary when gimplifying
3512 this constructor. Otherwise, return GS_OK.
3514 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3516 static enum gimplify_status
3517 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3518 bool want_value, bool notify_temp_creation)
3520 tree object, ctor, type;
3521 enum gimplify_status ret;
3522 vec<constructor_elt, va_gc> *elts;
3524 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3526 if (!notify_temp_creation)
3528 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3529 is_gimple_lvalue, fb_lvalue);
3530 if (ret == GS_ERROR)
3531 return ret;
3534 object = TREE_OPERAND (*expr_p, 0);
3535 ctor = TREE_OPERAND (*expr_p, 1) =
3536 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3537 type = TREE_TYPE (ctor);
3538 elts = CONSTRUCTOR_ELTS (ctor);
3539 ret = GS_ALL_DONE;
3541 switch (TREE_CODE (type))
3543 case RECORD_TYPE:
3544 case UNION_TYPE:
3545 case QUAL_UNION_TYPE:
3546 case ARRAY_TYPE:
3548 struct gimplify_init_ctor_preeval_data preeval_data;
3549 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3550 bool cleared, complete_p, valid_const_initializer;
3552 /* Aggregate types must lower constructors to initialization of
3553 individual elements. The exception is that a CONSTRUCTOR node
3554 with no elements indicates zero-initialization of the whole. */
3555 if (vec_safe_is_empty (elts))
3557 if (notify_temp_creation)
3558 return GS_OK;
3559 break;
3562 /* Fetch information about the constructor to direct later processing.
3563 We might want to make static versions of it in various cases, and
3564 can only do so if it known to be a valid constant initializer. */
3565 valid_const_initializer
3566 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3567 &num_ctor_elements, &complete_p);
3569 /* If a const aggregate variable is being initialized, then it
3570 should never be a lose to promote the variable to be static. */
3571 if (valid_const_initializer
3572 && num_nonzero_elements > 1
3573 && TREE_READONLY (object)
3574 && TREE_CODE (object) == VAR_DECL
3575 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3577 if (notify_temp_creation)
3578 return GS_ERROR;
3579 DECL_INITIAL (object) = ctor;
3580 TREE_STATIC (object) = 1;
3581 if (!DECL_NAME (object))
3582 DECL_NAME (object) = create_tmp_var_name ("C");
3583 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3585 /* ??? C++ doesn't automatically append a .<number> to the
3586 assembler name, and even when it does, it looks at FE private
3587 data structures to figure out what that number should be,
3588 which are not set for this variable. I suppose this is
3589 important for local statics for inline functions, which aren't
3590 "local" in the object file sense. So in order to get a unique
3591 TU-local symbol, we must invoke the lhd version now. */
3592 lhd_set_decl_assembler_name (object);
3594 *expr_p = NULL_TREE;
3595 break;
3598 /* If there are "lots" of initialized elements, even discounting
3599 those that are not address constants (and thus *must* be
3600 computed at runtime), then partition the constructor into
3601 constant and non-constant parts. Block copy the constant
3602 parts in, then generate code for the non-constant parts. */
3603 /* TODO. There's code in cp/typeck.c to do this. */
3605 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3606 /* store_constructor will ignore the clearing of variable-sized
3607 objects. Initializers for such objects must explicitly set
3608 every field that needs to be set. */
3609 cleared = false;
3610 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
3611 /* If the constructor isn't complete, clear the whole object
3612 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
3614 ??? This ought not to be needed. For any element not present
3615 in the initializer, we should simply set them to zero. Except
3616 we'd need to *find* the elements that are not present, and that
3617 requires trickery to avoid quadratic compile-time behavior in
3618 large cases or excessive memory use in small cases. */
3619 cleared = true;
3620 else if (num_ctor_elements - num_nonzero_elements
3621 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3622 && num_nonzero_elements < num_ctor_elements / 4)
3623 /* If there are "lots" of zeros, it's more efficient to clear
3624 the memory and then set the nonzero elements. */
3625 cleared = true;
3626 else
3627 cleared = false;
3629 /* If there are "lots" of initialized elements, and all of them
3630 are valid address constants, then the entire initializer can
3631 be dropped to memory, and then memcpy'd out. Don't do this
3632 for sparse arrays, though, as it's more efficient to follow
3633 the standard CONSTRUCTOR behavior of memset followed by
3634 individual element initialization. Also don't do this for small
3635 all-zero initializers (which aren't big enough to merit
3636 clearing), and don't try to make bitwise copies of
3637 TREE_ADDRESSABLE types.
3639 We cannot apply such transformation when compiling chkp static
3640 initializer because creation of initializer image in the memory
3641 will require static initialization of bounds for it. It should
3642 result in another gimplification of similar initializer and we
3643 may fall into infinite loop. */
3644 if (valid_const_initializer
3645 && !(cleared || num_nonzero_elements == 0)
3646 && !TREE_ADDRESSABLE (type)
3647 && (!current_function_decl
3648 || !lookup_attribute ("chkp ctor",
3649 DECL_ATTRIBUTES (current_function_decl))))
3651 HOST_WIDE_INT size = int_size_in_bytes (type);
3652 unsigned int align;
3654 /* ??? We can still get unbounded array types, at least
3655 from the C++ front end. This seems wrong, but attempt
3656 to work around it for now. */
3657 if (size < 0)
3659 size = int_size_in_bytes (TREE_TYPE (object));
3660 if (size >= 0)
3661 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3664 /* Find the maximum alignment we can assume for the object. */
3665 /* ??? Make use of DECL_OFFSET_ALIGN. */
3666 if (DECL_P (object))
3667 align = DECL_ALIGN (object);
3668 else
3669 align = TYPE_ALIGN (type);
3671 /* Do a block move either if the size is so small as to make
3672 each individual move a sub-unit move on average, or if it
3673 is so large as to make individual moves inefficient. */
3674 if (size > 0
3675 && num_nonzero_elements > 1
3676 && (size < num_nonzero_elements
3677 || !can_move_by_pieces (size, align)))
3679 if (notify_temp_creation)
3680 return GS_ERROR;
3682 walk_tree (&ctor, force_labels_r, NULL, NULL);
3683 ctor = tree_output_constant_def (ctor);
3684 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3685 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3686 TREE_OPERAND (*expr_p, 1) = ctor;
3688 /* This is no longer an assignment of a CONSTRUCTOR, but
3689 we still may have processing to do on the LHS. So
3690 pretend we didn't do anything here to let that happen. */
3691 return GS_UNHANDLED;
3695 /* If the target is volatile, we have non-zero elements and more than
3696 one field to assign, initialize the target from a temporary. */
3697 if (TREE_THIS_VOLATILE (object)
3698 && !TREE_ADDRESSABLE (type)
3699 && num_nonzero_elements > 0
3700 && vec_safe_length (elts) > 1)
3702 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3703 TREE_OPERAND (*expr_p, 0) = temp;
3704 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3705 *expr_p,
3706 build2 (MODIFY_EXPR, void_type_node,
3707 object, temp));
3708 return GS_OK;
3711 if (notify_temp_creation)
3712 return GS_OK;
3714 /* If there are nonzero elements and if needed, pre-evaluate to capture
3715 elements overlapping with the lhs into temporaries. We must do this
3716 before clearing to fetch the values before they are zeroed-out. */
3717 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
3719 preeval_data.lhs_base_decl = get_base_address (object);
3720 if (!DECL_P (preeval_data.lhs_base_decl))
3721 preeval_data.lhs_base_decl = NULL;
3722 preeval_data.lhs_alias_set = get_alias_set (object);
3724 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3725 pre_p, post_p, &preeval_data);
3728 if (cleared)
3730 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3731 Note that we still have to gimplify, in order to handle the
3732 case of variable sized types. Avoid shared tree structures. */
3733 CONSTRUCTOR_ELTS (ctor) = NULL;
3734 TREE_SIDE_EFFECTS (ctor) = 0;
3735 object = unshare_expr (object);
3736 gimplify_stmt (expr_p, pre_p);
3739 /* If we have not block cleared the object, or if there are nonzero
3740 elements in the constructor, add assignments to the individual
3741 scalar fields of the object. */
3742 if (!cleared || num_nonzero_elements > 0)
3743 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3745 *expr_p = NULL_TREE;
3747 break;
3749 case COMPLEX_TYPE:
3751 tree r, i;
3753 if (notify_temp_creation)
3754 return GS_OK;
3756 /* Extract the real and imaginary parts out of the ctor. */
3757 gcc_assert (elts->length () == 2);
3758 r = (*elts)[0].value;
3759 i = (*elts)[1].value;
3760 if (r == NULL || i == NULL)
3762 tree zero = build_zero_cst (TREE_TYPE (type));
3763 if (r == NULL)
3764 r = zero;
3765 if (i == NULL)
3766 i = zero;
3769 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3770 represent creation of a complex value. */
3771 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3773 ctor = build_complex (type, r, i);
3774 TREE_OPERAND (*expr_p, 1) = ctor;
3776 else
3778 ctor = build2 (COMPLEX_EXPR, type, r, i);
3779 TREE_OPERAND (*expr_p, 1) = ctor;
3780 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3781 pre_p,
3782 post_p,
3783 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3784 fb_rvalue);
3787 break;
3789 case VECTOR_TYPE:
3791 unsigned HOST_WIDE_INT ix;
3792 constructor_elt *ce;
3794 if (notify_temp_creation)
3795 return GS_OK;
3797 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3798 if (TREE_CONSTANT (ctor))
3800 bool constant_p = true;
3801 tree value;
3803 /* Even when ctor is constant, it might contain non-*_CST
3804 elements, such as addresses or trapping values like
3805 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3806 in VECTOR_CST nodes. */
3807 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3808 if (!CONSTANT_CLASS_P (value))
3810 constant_p = false;
3811 break;
3814 if (constant_p)
3816 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3817 break;
3820 /* Don't reduce an initializer constant even if we can't
3821 make a VECTOR_CST. It won't do anything for us, and it'll
3822 prevent us from representing it as a single constant. */
3823 if (initializer_constant_valid_p (ctor, type))
3824 break;
3826 TREE_CONSTANT (ctor) = 0;
3829 /* Vector types use CONSTRUCTOR all the way through gimple
3830 compilation as a general initializer. */
3831 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
3833 enum gimplify_status tret;
3834 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3835 fb_rvalue);
3836 if (tret == GS_ERROR)
3837 ret = GS_ERROR;
3839 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3840 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3842 break;
3844 default:
3845 /* So how did we get a CONSTRUCTOR for a scalar type? */
3846 gcc_unreachable ();
3849 if (ret == GS_ERROR)
3850 return GS_ERROR;
3851 else if (want_value)
3853 *expr_p = object;
3854 return GS_OK;
3856 else
3858 /* If we have gimplified both sides of the initializer but have
3859 not emitted an assignment, do so now. */
3860 if (*expr_p)
3862 tree lhs = TREE_OPERAND (*expr_p, 0);
3863 tree rhs = TREE_OPERAND (*expr_p, 1);
3864 gimple init = gimple_build_assign (lhs, rhs);
3865 gimplify_seq_add_stmt (pre_p, init);
3866 *expr_p = NULL;
3869 return GS_ALL_DONE;
3873 /* Given a pointer value OP0, return a simplified version of an
3874 indirection through OP0, or NULL_TREE if no simplification is
3875 possible. This may only be applied to a rhs of an expression.
3876 Note that the resulting type may be different from the type pointed
3877 to in the sense that it is still compatible from the langhooks
3878 point of view. */
3880 static tree
3881 gimple_fold_indirect_ref_rhs (tree t)
3883 return gimple_fold_indirect_ref (t);
3886 /* Subroutine of gimplify_modify_expr to do simplifications of
3887 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
3888 something changes. */
3890 static enum gimplify_status
3891 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
3892 gimple_seq *pre_p, gimple_seq *post_p,
3893 bool want_value)
3895 enum gimplify_status ret = GS_UNHANDLED;
3896 bool changed;
3900 changed = false;
3901 switch (TREE_CODE (*from_p))
3903 case VAR_DECL:
3904 /* If we're assigning from a read-only variable initialized with
3905 a constructor, do the direct assignment from the constructor,
3906 but only if neither source nor target are volatile since this
3907 latter assignment might end up being done on a per-field basis. */
3908 if (DECL_INITIAL (*from_p)
3909 && TREE_READONLY (*from_p)
3910 && !TREE_THIS_VOLATILE (*from_p)
3911 && !TREE_THIS_VOLATILE (*to_p)
3912 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
3914 tree old_from = *from_p;
3915 enum gimplify_status subret;
3917 /* Move the constructor into the RHS. */
3918 *from_p = unshare_expr (DECL_INITIAL (*from_p));
3920 /* Let's see if gimplify_init_constructor will need to put
3921 it in memory. */
3922 subret = gimplify_init_constructor (expr_p, NULL, NULL,
3923 false, true);
3924 if (subret == GS_ERROR)
3926 /* If so, revert the change. */
3927 *from_p = old_from;
3929 else
3931 ret = GS_OK;
3932 changed = true;
3935 break;
3936 case INDIRECT_REF:
3938 /* If we have code like
3940 *(const A*)(A*)&x
3942 where the type of "x" is a (possibly cv-qualified variant
3943 of "A"), treat the entire expression as identical to "x".
3944 This kind of code arises in C++ when an object is bound
3945 to a const reference, and if "x" is a TARGET_EXPR we want
3946 to take advantage of the optimization below. */
3947 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
3948 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
3949 if (t)
3951 if (TREE_THIS_VOLATILE (t) != volatile_p)
3953 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3954 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
3955 build_fold_addr_expr (t));
3956 if (REFERENCE_CLASS_P (t))
3957 TREE_THIS_VOLATILE (t) = volatile_p;
3959 *from_p = t;
3960 ret = GS_OK;
3961 changed = true;
3963 break;
3966 case TARGET_EXPR:
3968 /* If we are initializing something from a TARGET_EXPR, strip the
3969 TARGET_EXPR and initialize it directly, if possible. This can't
3970 be done if the initializer is void, since that implies that the
3971 temporary is set in some non-trivial way.
3973 ??? What about code that pulls out the temp and uses it
3974 elsewhere? I think that such code never uses the TARGET_EXPR as
3975 an initializer. If I'm wrong, we'll die because the temp won't
3976 have any RTL. In that case, I guess we'll need to replace
3977 references somehow. */
3978 tree init = TARGET_EXPR_INITIAL (*from_p);
3980 if (init
3981 && !VOID_TYPE_P (TREE_TYPE (init)))
3983 *from_p = init;
3984 ret = GS_OK;
3985 changed = true;
3988 break;
3990 case COMPOUND_EXPR:
3991 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
3992 caught. */
3993 gimplify_compound_expr (from_p, pre_p, true);
3994 ret = GS_OK;
3995 changed = true;
3996 break;
3998 case CONSTRUCTOR:
3999 /* If we already made some changes, let the front end have a
4000 crack at this before we break it down. */
4001 if (ret != GS_UNHANDLED)
4002 break;
4003 /* If we're initializing from a CONSTRUCTOR, break this into
4004 individual MODIFY_EXPRs. */
4005 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4006 false);
4008 case COND_EXPR:
4009 /* If we're assigning to a non-register type, push the assignment
4010 down into the branches. This is mandatory for ADDRESSABLE types,
4011 since we cannot generate temporaries for such, but it saves a
4012 copy in other cases as well. */
4013 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4015 /* This code should mirror the code in gimplify_cond_expr. */
4016 enum tree_code code = TREE_CODE (*expr_p);
4017 tree cond = *from_p;
4018 tree result = *to_p;
4020 ret = gimplify_expr (&result, pre_p, post_p,
4021 is_gimple_lvalue, fb_lvalue);
4022 if (ret != GS_ERROR)
4023 ret = GS_OK;
4025 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4026 TREE_OPERAND (cond, 1)
4027 = build2 (code, void_type_node, result,
4028 TREE_OPERAND (cond, 1));
4029 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4030 TREE_OPERAND (cond, 2)
4031 = build2 (code, void_type_node, unshare_expr (result),
4032 TREE_OPERAND (cond, 2));
4034 TREE_TYPE (cond) = void_type_node;
4035 recalculate_side_effects (cond);
4037 if (want_value)
4039 gimplify_and_add (cond, pre_p);
4040 *expr_p = unshare_expr (result);
4042 else
4043 *expr_p = cond;
4044 return ret;
4046 break;
4048 case CALL_EXPR:
4049 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4050 return slot so that we don't generate a temporary. */
4051 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4052 && aggregate_value_p (*from_p, *from_p))
4054 bool use_target;
4056 if (!(rhs_predicate_for (*to_p))(*from_p))
4057 /* If we need a temporary, *to_p isn't accurate. */
4058 use_target = false;
4059 /* It's OK to use the return slot directly unless it's an NRV. */
4060 else if (TREE_CODE (*to_p) == RESULT_DECL
4061 && DECL_NAME (*to_p) == NULL_TREE
4062 && needs_to_live_in_memory (*to_p))
4063 use_target = true;
4064 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4065 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4066 /* Don't force regs into memory. */
4067 use_target = false;
4068 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4069 /* It's OK to use the target directly if it's being
4070 initialized. */
4071 use_target = true;
4072 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4073 /* Always use the target and thus RSO for variable-sized types.
4074 GIMPLE cannot deal with a variable-sized assignment
4075 embedded in a call statement. */
4076 use_target = true;
4077 else if (TREE_CODE (*to_p) != SSA_NAME
4078 && (!is_gimple_variable (*to_p)
4079 || needs_to_live_in_memory (*to_p)))
4080 /* Don't use the original target if it's already addressable;
4081 if its address escapes, and the called function uses the
4082 NRV optimization, a conforming program could see *to_p
4083 change before the called function returns; see c++/19317.
4084 When optimizing, the return_slot pass marks more functions
4085 as safe after we have escape info. */
4086 use_target = false;
4087 else
4088 use_target = true;
4090 if (use_target)
4092 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4093 mark_addressable (*to_p);
4096 break;
4098 case WITH_SIZE_EXPR:
4099 /* Likewise for calls that return an aggregate of non-constant size,
4100 since we would not be able to generate a temporary at all. */
4101 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4103 *from_p = TREE_OPERAND (*from_p, 0);
4104 /* We don't change ret in this case because the
4105 WITH_SIZE_EXPR might have been added in
4106 gimplify_modify_expr, so returning GS_OK would lead to an
4107 infinite loop. */
4108 changed = true;
4110 break;
4112 /* If we're initializing from a container, push the initialization
4113 inside it. */
4114 case CLEANUP_POINT_EXPR:
4115 case BIND_EXPR:
4116 case STATEMENT_LIST:
4118 tree wrap = *from_p;
4119 tree t;
4121 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4122 fb_lvalue);
4123 if (ret != GS_ERROR)
4124 ret = GS_OK;
4126 t = voidify_wrapper_expr (wrap, *expr_p);
4127 gcc_assert (t == *expr_p);
4129 if (want_value)
4131 gimplify_and_add (wrap, pre_p);
4132 *expr_p = unshare_expr (*to_p);
4134 else
4135 *expr_p = wrap;
4136 return GS_OK;
4139 case COMPOUND_LITERAL_EXPR:
4141 tree complit = TREE_OPERAND (*expr_p, 1);
4142 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4143 tree decl = DECL_EXPR_DECL (decl_s);
4144 tree init = DECL_INITIAL (decl);
4146 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4147 into struct T x = { 0, 1, 2 } if the address of the
4148 compound literal has never been taken. */
4149 if (!TREE_ADDRESSABLE (complit)
4150 && !TREE_ADDRESSABLE (decl)
4151 && init)
4153 *expr_p = copy_node (*expr_p);
4154 TREE_OPERAND (*expr_p, 1) = init;
4155 return GS_OK;
4159 default:
4160 break;
4163 while (changed);
4165 return ret;
4169 /* Return true if T looks like a valid GIMPLE statement. */
4171 static bool
4172 is_gimple_stmt (tree t)
4174 const enum tree_code code = TREE_CODE (t);
4176 switch (code)
4178 case NOP_EXPR:
4179 /* The only valid NOP_EXPR is the empty statement. */
4180 return IS_EMPTY_STMT (t);
4182 case BIND_EXPR:
4183 case COND_EXPR:
4184 /* These are only valid if they're void. */
4185 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4187 case SWITCH_EXPR:
4188 case GOTO_EXPR:
4189 case RETURN_EXPR:
4190 case LABEL_EXPR:
4191 case CASE_LABEL_EXPR:
4192 case TRY_CATCH_EXPR:
4193 case TRY_FINALLY_EXPR:
4194 case EH_FILTER_EXPR:
4195 case CATCH_EXPR:
4196 case ASM_EXPR:
4197 case STATEMENT_LIST:
4198 case OMP_PARALLEL:
4199 case OMP_FOR:
4200 case OMP_SIMD:
4201 case OMP_DISTRIBUTE:
4202 case OMP_SECTIONS:
4203 case OMP_SECTION:
4204 case OMP_SINGLE:
4205 case OMP_MASTER:
4206 case OMP_TASKGROUP:
4207 case OMP_ORDERED:
4208 case OMP_CRITICAL:
4209 case OMP_TASK:
4210 /* These are always void. */
4211 return true;
4213 case CALL_EXPR:
4214 case MODIFY_EXPR:
4215 case PREDICT_EXPR:
4216 /* These are valid regardless of their type. */
4217 return true;
4219 default:
4220 return false;
4225 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4226 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4227 DECL_GIMPLE_REG_P set.
4229 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4230 other, unmodified part of the complex object just before the total store.
4231 As a consequence, if the object is still uninitialized, an undefined value
4232 will be loaded into a register, which may result in a spurious exception
4233 if the register is floating-point and the value happens to be a signaling
4234 NaN for example. Then the fully-fledged complex operations lowering pass
4235 followed by a DCE pass are necessary in order to fix things up. */
4237 static enum gimplify_status
4238 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4239 bool want_value)
4241 enum tree_code code, ocode;
4242 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4244 lhs = TREE_OPERAND (*expr_p, 0);
4245 rhs = TREE_OPERAND (*expr_p, 1);
4246 code = TREE_CODE (lhs);
4247 lhs = TREE_OPERAND (lhs, 0);
4249 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4250 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4251 TREE_NO_WARNING (other) = 1;
4252 other = get_formal_tmp_var (other, pre_p);
4254 realpart = code == REALPART_EXPR ? rhs : other;
4255 imagpart = code == REALPART_EXPR ? other : rhs;
4257 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4258 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4259 else
4260 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4262 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4263 *expr_p = (want_value) ? rhs : NULL_TREE;
4265 return GS_ALL_DONE;
4268 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4270 modify_expr
4271 : varname '=' rhs
4272 | '*' ID '=' rhs
4274 PRE_P points to the list where side effects that must happen before
4275 *EXPR_P should be stored.
4277 POST_P points to the list where side effects that must happen after
4278 *EXPR_P should be stored.
4280 WANT_VALUE is nonzero iff we want to use the value of this expression
4281 in another expression. */
4283 static enum gimplify_status
4284 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4285 bool want_value)
4287 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4288 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4289 enum gimplify_status ret = GS_UNHANDLED;
4290 gimple assign;
4291 location_t loc = EXPR_LOCATION (*expr_p);
4292 gimple_stmt_iterator gsi;
4294 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4295 || TREE_CODE (*expr_p) == INIT_EXPR);
4297 if (fn_contains_cilk_spawn_p (cfun)
4298 && lang_hooks.cilkplus.cilk_detect_spawn_and_unwrap (expr_p)
4299 && !seen_error ())
4300 return (enum gimplify_status)
4301 lang_hooks.cilkplus.gimplify_cilk_spawn (expr_p, pre_p, post_p);
4303 /* Trying to simplify a clobber using normal logic doesn't work,
4304 so handle it here. */
4305 if (TREE_CLOBBER_P (*from_p))
4307 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4308 if (ret == GS_ERROR)
4309 return ret;
4310 gcc_assert (!want_value
4311 && (TREE_CODE (*to_p) == VAR_DECL
4312 || TREE_CODE (*to_p) == MEM_REF));
4313 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4314 *expr_p = NULL;
4315 return GS_ALL_DONE;
4318 /* Insert pointer conversions required by the middle-end that are not
4319 required by the frontend. This fixes middle-end type checking for
4320 for example gcc.dg/redecl-6.c. */
4321 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4323 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4324 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4325 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4328 /* See if any simplifications can be done based on what the RHS is. */
4329 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4330 want_value);
4331 if (ret != GS_UNHANDLED)
4332 return ret;
4334 /* For zero sized types only gimplify the left hand side and right hand
4335 side as statements and throw away the assignment. Do this after
4336 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4337 types properly. */
4338 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4340 gimplify_stmt (from_p, pre_p);
4341 gimplify_stmt (to_p, pre_p);
4342 *expr_p = NULL_TREE;
4343 return GS_ALL_DONE;
4346 /* If the value being copied is of variable width, compute the length
4347 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4348 before gimplifying any of the operands so that we can resolve any
4349 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4350 the size of the expression to be copied, not of the destination, so
4351 that is what we must do here. */
4352 maybe_with_size_expr (from_p);
4354 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4355 if (ret == GS_ERROR)
4356 return ret;
4358 /* As a special case, we have to temporarily allow for assignments
4359 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4360 a toplevel statement, when gimplifying the GENERIC expression
4361 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4362 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4364 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4365 prevent gimplify_expr from trying to create a new temporary for
4366 foo's LHS, we tell it that it should only gimplify until it
4367 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4368 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4369 and all we need to do here is set 'a' to be its LHS. */
4370 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4371 fb_rvalue);
4372 if (ret == GS_ERROR)
4373 return ret;
4375 /* Now see if the above changed *from_p to something we handle specially. */
4376 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4377 want_value);
4378 if (ret != GS_UNHANDLED)
4379 return ret;
4381 /* If we've got a variable sized assignment between two lvalues (i.e. does
4382 not involve a call), then we can make things a bit more straightforward
4383 by converting the assignment to memcpy or memset. */
4384 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4386 tree from = TREE_OPERAND (*from_p, 0);
4387 tree size = TREE_OPERAND (*from_p, 1);
4389 if (TREE_CODE (from) == CONSTRUCTOR)
4390 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4392 if (is_gimple_addressable (from))
4394 *from_p = from;
4395 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4396 pre_p);
4400 /* Transform partial stores to non-addressable complex variables into
4401 total stores. This allows us to use real instead of virtual operands
4402 for these variables, which improves optimization. */
4403 if ((TREE_CODE (*to_p) == REALPART_EXPR
4404 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4405 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4406 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4408 /* Try to alleviate the effects of the gimplification creating artificial
4409 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4410 if (!gimplify_ctxp->into_ssa
4411 && TREE_CODE (*from_p) == VAR_DECL
4412 && DECL_IGNORED_P (*from_p)
4413 && DECL_P (*to_p)
4414 && !DECL_IGNORED_P (*to_p))
4416 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4417 DECL_NAME (*from_p)
4418 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4419 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4420 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4423 if (want_value && TREE_THIS_VOLATILE (*to_p))
4424 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4426 if (TREE_CODE (*from_p) == CALL_EXPR)
4428 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4429 instead of a GIMPLE_ASSIGN. */
4430 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4431 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4432 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4433 assign = gimple_build_call_from_tree (*from_p);
4434 gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
4435 notice_special_calls (assign);
4436 if (!gimple_call_noreturn_p (assign))
4437 gimple_call_set_lhs (assign, *to_p);
4439 else
4441 assign = gimple_build_assign (*to_p, *from_p);
4442 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4445 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4447 /* We should have got an SSA name from the start. */
4448 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4451 gimplify_seq_add_stmt (pre_p, assign);
4452 gsi = gsi_last (*pre_p);
4453 /* Don't fold stmts inside of target construct. We'll do it
4454 during omplower pass instead. */
4455 struct gimplify_omp_ctx *ctx;
4456 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
4457 if (ctx->region_type == ORT_TARGET)
4458 break;
4459 if (ctx == NULL)
4460 fold_stmt (&gsi);
4462 if (want_value)
4464 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4465 return GS_OK;
4467 else
4468 *expr_p = NULL;
4470 return GS_ALL_DONE;
4473 /* Gimplify a comparison between two variable-sized objects. Do this
4474 with a call to BUILT_IN_MEMCMP. */
4476 static enum gimplify_status
4477 gimplify_variable_sized_compare (tree *expr_p)
4479 location_t loc = EXPR_LOCATION (*expr_p);
4480 tree op0 = TREE_OPERAND (*expr_p, 0);
4481 tree op1 = TREE_OPERAND (*expr_p, 1);
4482 tree t, arg, dest, src, expr;
4484 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4485 arg = unshare_expr (arg);
4486 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4487 src = build_fold_addr_expr_loc (loc, op1);
4488 dest = build_fold_addr_expr_loc (loc, op0);
4489 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4490 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4492 expr
4493 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4494 SET_EXPR_LOCATION (expr, loc);
4495 *expr_p = expr;
4497 return GS_OK;
4500 /* Gimplify a comparison between two aggregate objects of integral scalar
4501 mode as a comparison between the bitwise equivalent scalar values. */
4503 static enum gimplify_status
4504 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4506 location_t loc = EXPR_LOCATION (*expr_p);
4507 tree op0 = TREE_OPERAND (*expr_p, 0);
4508 tree op1 = TREE_OPERAND (*expr_p, 1);
4510 tree type = TREE_TYPE (op0);
4511 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4513 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4514 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4516 *expr_p
4517 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4519 return GS_OK;
4522 /* Gimplify an expression sequence. This function gimplifies each
4523 expression and rewrites the original expression with the last
4524 expression of the sequence in GIMPLE form.
4526 PRE_P points to the list where the side effects for all the
4527 expressions in the sequence will be emitted.
4529 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4531 static enum gimplify_status
4532 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4534 tree t = *expr_p;
4538 tree *sub_p = &TREE_OPERAND (t, 0);
4540 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4541 gimplify_compound_expr (sub_p, pre_p, false);
4542 else
4543 gimplify_stmt (sub_p, pre_p);
4545 t = TREE_OPERAND (t, 1);
4547 while (TREE_CODE (t) == COMPOUND_EXPR);
4549 *expr_p = t;
4550 if (want_value)
4551 return GS_OK;
4552 else
4554 gimplify_stmt (expr_p, pre_p);
4555 return GS_ALL_DONE;
4559 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4560 gimplify. After gimplification, EXPR_P will point to a new temporary
4561 that holds the original value of the SAVE_EXPR node.
4563 PRE_P points to the list where side effects that must happen before
4564 *EXPR_P should be stored. */
4566 static enum gimplify_status
4567 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4569 enum gimplify_status ret = GS_ALL_DONE;
4570 tree val;
4572 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4573 val = TREE_OPERAND (*expr_p, 0);
4575 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4576 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4578 /* The operand may be a void-valued expression such as SAVE_EXPRs
4579 generated by the Java frontend for class initialization. It is
4580 being executed only for its side-effects. */
4581 if (TREE_TYPE (val) == void_type_node)
4583 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4584 is_gimple_stmt, fb_none);
4585 val = NULL;
4587 else
4588 val = get_initialized_tmp_var (val, pre_p, post_p);
4590 TREE_OPERAND (*expr_p, 0) = val;
4591 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4594 *expr_p = val;
4596 return ret;
4599 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
4601 unary_expr
4602 : ...
4603 | '&' varname
4606 PRE_P points to the list where side effects that must happen before
4607 *EXPR_P should be stored.
4609 POST_P points to the list where side effects that must happen after
4610 *EXPR_P should be stored. */
4612 static enum gimplify_status
4613 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4615 tree expr = *expr_p;
4616 tree op0 = TREE_OPERAND (expr, 0);
4617 enum gimplify_status ret;
4618 location_t loc = EXPR_LOCATION (*expr_p);
4620 switch (TREE_CODE (op0))
4622 case INDIRECT_REF:
4623 do_indirect_ref:
4624 /* Check if we are dealing with an expression of the form '&*ptr'.
4625 While the front end folds away '&*ptr' into 'ptr', these
4626 expressions may be generated internally by the compiler (e.g.,
4627 builtins like __builtin_va_end). */
4628 /* Caution: the silent array decomposition semantics we allow for
4629 ADDR_EXPR means we can't always discard the pair. */
4630 /* Gimplification of the ADDR_EXPR operand may drop
4631 cv-qualification conversions, so make sure we add them if
4632 needed. */
4634 tree op00 = TREE_OPERAND (op0, 0);
4635 tree t_expr = TREE_TYPE (expr);
4636 tree t_op00 = TREE_TYPE (op00);
4638 if (!useless_type_conversion_p (t_expr, t_op00))
4639 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4640 *expr_p = op00;
4641 ret = GS_OK;
4643 break;
4645 case VIEW_CONVERT_EXPR:
4646 /* Take the address of our operand and then convert it to the type of
4647 this ADDR_EXPR.
4649 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4650 all clear. The impact of this transformation is even less clear. */
4652 /* If the operand is a useless conversion, look through it. Doing so
4653 guarantees that the ADDR_EXPR and its operand will remain of the
4654 same type. */
4655 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4656 op0 = TREE_OPERAND (op0, 0);
4658 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4659 build_fold_addr_expr_loc (loc,
4660 TREE_OPERAND (op0, 0)));
4661 ret = GS_OK;
4662 break;
4664 default:
4665 /* We use fb_either here because the C frontend sometimes takes
4666 the address of a call that returns a struct; see
4667 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4668 the implied temporary explicit. */
4670 /* Make the operand addressable. */
4671 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4672 is_gimple_addressable, fb_either);
4673 if (ret == GS_ERROR)
4674 break;
4676 /* Then mark it. Beware that it may not be possible to do so directly
4677 if a temporary has been created by the gimplification. */
4678 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
4680 op0 = TREE_OPERAND (expr, 0);
4682 /* For various reasons, the gimplification of the expression
4683 may have made a new INDIRECT_REF. */
4684 if (TREE_CODE (op0) == INDIRECT_REF)
4685 goto do_indirect_ref;
4687 mark_addressable (TREE_OPERAND (expr, 0));
4689 /* The FEs may end up building ADDR_EXPRs early on a decl with
4690 an incomplete type. Re-build ADDR_EXPRs in canonical form
4691 here. */
4692 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4693 *expr_p = build_fold_addr_expr (op0);
4695 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
4696 recompute_tree_invariant_for_addr_expr (*expr_p);
4698 /* If we re-built the ADDR_EXPR add a conversion to the original type
4699 if required. */
4700 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4701 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
4703 break;
4706 return ret;
4709 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4710 value; output operands should be a gimple lvalue. */
4712 static enum gimplify_status
4713 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4715 tree expr;
4716 int noutputs;
4717 const char **oconstraints;
4718 int i;
4719 tree link;
4720 const char *constraint;
4721 bool allows_mem, allows_reg, is_inout;
4722 enum gimplify_status ret, tret;
4723 gimple stmt;
4724 vec<tree, va_gc> *inputs;
4725 vec<tree, va_gc> *outputs;
4726 vec<tree, va_gc> *clobbers;
4727 vec<tree, va_gc> *labels;
4728 tree link_next;
4730 expr = *expr_p;
4731 noutputs = list_length (ASM_OUTPUTS (expr));
4732 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4734 inputs = NULL;
4735 outputs = NULL;
4736 clobbers = NULL;
4737 labels = NULL;
4739 ret = GS_ALL_DONE;
4740 link_next = NULL_TREE;
4741 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4743 bool ok;
4744 size_t constraint_len;
4746 link_next = TREE_CHAIN (link);
4748 oconstraints[i]
4749 = constraint
4750 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4751 constraint_len = strlen (constraint);
4752 if (constraint_len == 0)
4753 continue;
4755 ok = parse_output_constraint (&constraint, i, 0, 0,
4756 &allows_mem, &allows_reg, &is_inout);
4757 if (!ok)
4759 ret = GS_ERROR;
4760 is_inout = false;
4763 if (!allows_reg && allows_mem)
4764 mark_addressable (TREE_VALUE (link));
4766 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4767 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4768 fb_lvalue | fb_mayfail);
4769 if (tret == GS_ERROR)
4771 error ("invalid lvalue in asm output %d", i);
4772 ret = tret;
4775 vec_safe_push (outputs, link);
4776 TREE_CHAIN (link) = NULL_TREE;
4778 if (is_inout)
4780 /* An input/output operand. To give the optimizers more
4781 flexibility, split it into separate input and output
4782 operands. */
4783 tree input;
4784 char buf[10];
4786 /* Turn the in/out constraint into an output constraint. */
4787 char *p = xstrdup (constraint);
4788 p[0] = '=';
4789 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4791 /* And add a matching input constraint. */
4792 if (allows_reg)
4794 sprintf (buf, "%d", i);
4796 /* If there are multiple alternatives in the constraint,
4797 handle each of them individually. Those that allow register
4798 will be replaced with operand number, the others will stay
4799 unchanged. */
4800 if (strchr (p, ',') != NULL)
4802 size_t len = 0, buflen = strlen (buf);
4803 char *beg, *end, *str, *dst;
4805 for (beg = p + 1;;)
4807 end = strchr (beg, ',');
4808 if (end == NULL)
4809 end = strchr (beg, '\0');
4810 if ((size_t) (end - beg) < buflen)
4811 len += buflen + 1;
4812 else
4813 len += end - beg + 1;
4814 if (*end)
4815 beg = end + 1;
4816 else
4817 break;
4820 str = (char *) alloca (len);
4821 for (beg = p + 1, dst = str;;)
4823 const char *tem;
4824 bool mem_p, reg_p, inout_p;
4826 end = strchr (beg, ',');
4827 if (end)
4828 *end = '\0';
4829 beg[-1] = '=';
4830 tem = beg - 1;
4831 parse_output_constraint (&tem, i, 0, 0,
4832 &mem_p, &reg_p, &inout_p);
4833 if (dst != str)
4834 *dst++ = ',';
4835 if (reg_p)
4837 memcpy (dst, buf, buflen);
4838 dst += buflen;
4840 else
4842 if (end)
4843 len = end - beg;
4844 else
4845 len = strlen (beg);
4846 memcpy (dst, beg, len);
4847 dst += len;
4849 if (end)
4850 beg = end + 1;
4851 else
4852 break;
4854 *dst = '\0';
4855 input = build_string (dst - str, str);
4857 else
4858 input = build_string (strlen (buf), buf);
4860 else
4861 input = build_string (constraint_len - 1, constraint + 1);
4863 free (p);
4865 input = build_tree_list (build_tree_list (NULL_TREE, input),
4866 unshare_expr (TREE_VALUE (link)));
4867 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4871 link_next = NULL_TREE;
4872 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
4874 link_next = TREE_CHAIN (link);
4875 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4876 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4877 oconstraints, &allows_mem, &allows_reg);
4879 /* If we can't make copies, we can only accept memory. */
4880 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
4882 if (allows_mem)
4883 allows_reg = 0;
4884 else
4886 error ("impossible constraint in %<asm%>");
4887 error ("non-memory input %d must stay in memory", i);
4888 return GS_ERROR;
4892 /* If the operand is a memory input, it should be an lvalue. */
4893 if (!allows_reg && allows_mem)
4895 tree inputv = TREE_VALUE (link);
4896 STRIP_NOPS (inputv);
4897 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
4898 || TREE_CODE (inputv) == PREINCREMENT_EXPR
4899 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
4900 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
4901 TREE_VALUE (link) = error_mark_node;
4902 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4903 is_gimple_lvalue, fb_lvalue | fb_mayfail);
4904 mark_addressable (TREE_VALUE (link));
4905 if (tret == GS_ERROR)
4907 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
4908 input_location = EXPR_LOCATION (TREE_VALUE (link));
4909 error ("memory input %d is not directly addressable", i);
4910 ret = tret;
4913 else
4915 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4916 is_gimple_asm_val, fb_rvalue);
4917 if (tret == GS_ERROR)
4918 ret = tret;
4921 TREE_CHAIN (link) = NULL_TREE;
4922 vec_safe_push (inputs, link);
4925 link_next = NULL_TREE;
4926 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
4928 link_next = TREE_CHAIN (link);
4929 TREE_CHAIN (link) = NULL_TREE;
4930 vec_safe_push (clobbers, link);
4933 link_next = NULL_TREE;
4934 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
4936 link_next = TREE_CHAIN (link);
4937 TREE_CHAIN (link) = NULL_TREE;
4938 vec_safe_push (labels, link);
4941 /* Do not add ASMs with errors to the gimple IL stream. */
4942 if (ret != GS_ERROR)
4944 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
4945 inputs, outputs, clobbers, labels);
4947 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
4948 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
4950 gimplify_seq_add_stmt (pre_p, stmt);
4953 return ret;
4956 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
4957 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
4958 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
4959 return to this function.
4961 FIXME should we complexify the prequeue handling instead? Or use flags
4962 for all the cleanups and let the optimizer tighten them up? The current
4963 code seems pretty fragile; it will break on a cleanup within any
4964 non-conditional nesting. But any such nesting would be broken, anyway;
4965 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
4966 and continues out of it. We can do that at the RTL level, though, so
4967 having an optimizer to tighten up try/finally regions would be a Good
4968 Thing. */
4970 static enum gimplify_status
4971 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
4973 gimple_stmt_iterator iter;
4974 gimple_seq body_sequence = NULL;
4976 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4978 /* We only care about the number of conditions between the innermost
4979 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
4980 any cleanups collected outside the CLEANUP_POINT_EXPR. */
4981 int old_conds = gimplify_ctxp->conditions;
4982 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
4983 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
4984 gimplify_ctxp->conditions = 0;
4985 gimplify_ctxp->conditional_cleanups = NULL;
4986 gimplify_ctxp->in_cleanup_point_expr = true;
4988 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
4990 gimplify_ctxp->conditions = old_conds;
4991 gimplify_ctxp->conditional_cleanups = old_cleanups;
4992 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
4994 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
4996 gimple wce = gsi_stmt (iter);
4998 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5000 if (gsi_one_before_end_p (iter))
5002 /* Note that gsi_insert_seq_before and gsi_remove do not
5003 scan operands, unlike some other sequence mutators. */
5004 if (!gimple_wce_cleanup_eh_only (wce))
5005 gsi_insert_seq_before_without_update (&iter,
5006 gimple_wce_cleanup (wce),
5007 GSI_SAME_STMT);
5008 gsi_remove (&iter, true);
5009 break;
5011 else
5013 gimple gtry;
5014 gimple_seq seq;
5015 enum gimple_try_flags kind;
5017 if (gimple_wce_cleanup_eh_only (wce))
5018 kind = GIMPLE_TRY_CATCH;
5019 else
5020 kind = GIMPLE_TRY_FINALLY;
5021 seq = gsi_split_seq_after (iter);
5023 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5024 /* Do not use gsi_replace here, as it may scan operands.
5025 We want to do a simple structural modification only. */
5026 gsi_set_stmt (&iter, gtry);
5027 iter = gsi_start (gtry->gimple_try.eval);
5030 else
5031 gsi_next (&iter);
5034 gimplify_seq_add_seq (pre_p, body_sequence);
5035 if (temp)
5037 *expr_p = temp;
5038 return GS_OK;
5040 else
5042 *expr_p = NULL;
5043 return GS_ALL_DONE;
5047 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5048 is the cleanup action required. EH_ONLY is true if the cleanup should
5049 only be executed if an exception is thrown, not on normal exit. */
5051 static void
5052 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5054 gimple wce;
5055 gimple_seq cleanup_stmts = NULL;
5057 /* Errors can result in improperly nested cleanups. Which results in
5058 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5059 if (seen_error ())
5060 return;
5062 if (gimple_conditional_context ())
5064 /* If we're in a conditional context, this is more complex. We only
5065 want to run the cleanup if we actually ran the initialization that
5066 necessitates it, but we want to run it after the end of the
5067 conditional context. So we wrap the try/finally around the
5068 condition and use a flag to determine whether or not to actually
5069 run the destructor. Thus
5071 test ? f(A()) : 0
5073 becomes (approximately)
5075 flag = 0;
5076 try {
5077 if (test) { A::A(temp); flag = 1; val = f(temp); }
5078 else { val = 0; }
5079 } finally {
5080 if (flag) A::~A(temp);
5084 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5085 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5086 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5088 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5089 gimplify_stmt (&cleanup, &cleanup_stmts);
5090 wce = gimple_build_wce (cleanup_stmts);
5092 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5093 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5094 gimplify_seq_add_stmt (pre_p, ftrue);
5096 /* Because of this manipulation, and the EH edges that jump
5097 threading cannot redirect, the temporary (VAR) will appear
5098 to be used uninitialized. Don't warn. */
5099 TREE_NO_WARNING (var) = 1;
5101 else
5103 gimplify_stmt (&cleanup, &cleanup_stmts);
5104 wce = gimple_build_wce (cleanup_stmts);
5105 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5106 gimplify_seq_add_stmt (pre_p, wce);
5110 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5112 static enum gimplify_status
5113 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5115 tree targ = *expr_p;
5116 tree temp = TARGET_EXPR_SLOT (targ);
5117 tree init = TARGET_EXPR_INITIAL (targ);
5118 enum gimplify_status ret;
5120 if (init)
5122 tree cleanup = NULL_TREE;
5124 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5125 to the temps list. Handle also variable length TARGET_EXPRs. */
5126 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5128 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5129 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5130 gimplify_vla_decl (temp, pre_p);
5132 else
5133 gimple_add_tmp_var (temp);
5135 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5136 expression is supposed to initialize the slot. */
5137 if (VOID_TYPE_P (TREE_TYPE (init)))
5138 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5139 else
5141 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5142 init = init_expr;
5143 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5144 init = NULL;
5145 ggc_free (init_expr);
5147 if (ret == GS_ERROR)
5149 /* PR c++/28266 Make sure this is expanded only once. */
5150 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5151 return GS_ERROR;
5153 if (init)
5154 gimplify_and_add (init, pre_p);
5156 /* If needed, push the cleanup for the temp. */
5157 if (TARGET_EXPR_CLEANUP (targ))
5159 if (CLEANUP_EH_ONLY (targ))
5160 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5161 CLEANUP_EH_ONLY (targ), pre_p);
5162 else
5163 cleanup = TARGET_EXPR_CLEANUP (targ);
5166 /* Add a clobber for the temporary going out of scope, like
5167 gimplify_bind_expr. */
5168 if (gimplify_ctxp->in_cleanup_point_expr
5169 && needs_to_live_in_memory (temp)
5170 && flag_stack_reuse == SR_ALL)
5172 tree clobber = build_constructor (TREE_TYPE (temp),
5173 NULL);
5174 TREE_THIS_VOLATILE (clobber) = true;
5175 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5176 if (cleanup)
5177 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5178 clobber);
5179 else
5180 cleanup = clobber;
5183 if (cleanup)
5184 gimple_push_cleanup (temp, cleanup, false, pre_p);
5186 /* Only expand this once. */
5187 TREE_OPERAND (targ, 3) = init;
5188 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5190 else
5191 /* We should have expanded this before. */
5192 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5194 *expr_p = temp;
5195 return GS_OK;
5198 /* Gimplification of expression trees. */
5200 /* Gimplify an expression which appears at statement context. The
5201 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5202 NULL, a new sequence is allocated.
5204 Return true if we actually added a statement to the queue. */
5206 bool
5207 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5209 gimple_seq_node last;
5211 last = gimple_seq_last (*seq_p);
5212 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5213 return last != gimple_seq_last (*seq_p);
5216 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5217 to CTX. If entries already exist, force them to be some flavor of private.
5218 If there is no enclosing parallel, do nothing. */
5220 void
5221 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5223 splay_tree_node n;
5225 if (decl == NULL || !DECL_P (decl))
5226 return;
5230 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5231 if (n != NULL)
5233 if (n->value & GOVD_SHARED)
5234 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5235 else if (n->value & GOVD_MAP)
5236 n->value |= GOVD_MAP_TO_ONLY;
5237 else
5238 return;
5240 else if (ctx->region_type == ORT_TARGET)
5241 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5242 else if (ctx->region_type != ORT_WORKSHARE
5243 && ctx->region_type != ORT_SIMD
5244 && ctx->region_type != ORT_TARGET_DATA)
5245 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5247 ctx = ctx->outer_context;
5249 while (ctx);
5252 /* Similarly for each of the type sizes of TYPE. */
5254 static void
5255 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5257 if (type == NULL || type == error_mark_node)
5258 return;
5259 type = TYPE_MAIN_VARIANT (type);
5261 if (pointer_set_insert (ctx->privatized_types, type))
5262 return;
5264 switch (TREE_CODE (type))
5266 case INTEGER_TYPE:
5267 case ENUMERAL_TYPE:
5268 case BOOLEAN_TYPE:
5269 case REAL_TYPE:
5270 case FIXED_POINT_TYPE:
5271 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5272 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5273 break;
5275 case ARRAY_TYPE:
5276 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5277 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5278 break;
5280 case RECORD_TYPE:
5281 case UNION_TYPE:
5282 case QUAL_UNION_TYPE:
5284 tree field;
5285 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5286 if (TREE_CODE (field) == FIELD_DECL)
5288 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5289 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5292 break;
5294 case POINTER_TYPE:
5295 case REFERENCE_TYPE:
5296 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5297 break;
5299 default:
5300 break;
5303 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5304 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5305 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5308 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5310 static void
5311 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5313 splay_tree_node n;
5314 unsigned int nflags;
5315 tree t;
5317 if (error_operand_p (decl))
5318 return;
5320 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5321 there are constructors involved somewhere. */
5322 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5323 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5324 flags |= GOVD_SEEN;
5326 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5327 if (n != NULL && n->value != GOVD_ALIGNED)
5329 /* We shouldn't be re-adding the decl with the same data
5330 sharing class. */
5331 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5332 /* The only combination of data sharing classes we should see is
5333 FIRSTPRIVATE and LASTPRIVATE. */
5334 nflags = n->value | flags;
5335 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5336 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)
5337 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5338 n->value = nflags;
5339 return;
5342 /* When adding a variable-sized variable, we have to handle all sorts
5343 of additional bits of data: the pointer replacement variable, and
5344 the parameters of the type. */
5345 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5347 /* Add the pointer replacement variable as PRIVATE if the variable
5348 replacement is private, else FIRSTPRIVATE since we'll need the
5349 address of the original variable either for SHARED, or for the
5350 copy into or out of the context. */
5351 if (!(flags & GOVD_LOCAL))
5353 nflags = flags & GOVD_MAP
5354 ? GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT
5355 : flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5356 nflags |= flags & GOVD_SEEN;
5357 t = DECL_VALUE_EXPR (decl);
5358 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5359 t = TREE_OPERAND (t, 0);
5360 gcc_assert (DECL_P (t));
5361 omp_add_variable (ctx, t, nflags);
5364 /* Add all of the variable and type parameters (which should have
5365 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5366 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5367 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5368 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5370 /* The variable-sized variable itself is never SHARED, only some form
5371 of PRIVATE. The sharing would take place via the pointer variable
5372 which we remapped above. */
5373 if (flags & GOVD_SHARED)
5374 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5375 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5377 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5378 alloca statement we generate for the variable, so make sure it
5379 is available. This isn't automatically needed for the SHARED
5380 case, since we won't be allocating local storage then.
5381 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5382 in this case omp_notice_variable will be called later
5383 on when it is gimplified. */
5384 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5385 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5386 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5388 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5389 && lang_hooks.decls.omp_privatize_by_reference (decl))
5391 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5393 /* Similar to the direct variable sized case above, we'll need the
5394 size of references being privatized. */
5395 if ((flags & GOVD_SHARED) == 0)
5397 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5398 if (TREE_CODE (t) != INTEGER_CST)
5399 omp_notice_variable (ctx, t, true);
5403 if (n != NULL)
5404 n->value |= flags;
5405 else
5406 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5409 /* Notice a threadprivate variable DECL used in OpenMP context CTX.
5410 This just prints out diagnostics about threadprivate variable uses
5411 in untied tasks. If DECL2 is non-NULL, prevent this warning
5412 on that variable. */
5414 static bool
5415 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5416 tree decl2)
5418 splay_tree_node n;
5419 struct gimplify_omp_ctx *octx;
5421 for (octx = ctx; octx; octx = octx->outer_context)
5422 if (octx->region_type == ORT_TARGET)
5424 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5425 if (n == NULL)
5427 error ("threadprivate variable %qE used in target region",
5428 DECL_NAME (decl));
5429 error_at (octx->location, "enclosing target region");
5430 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5432 if (decl2)
5433 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5436 if (ctx->region_type != ORT_UNTIED_TASK)
5437 return false;
5438 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5439 if (n == NULL)
5441 error ("threadprivate variable %qE used in untied task",
5442 DECL_NAME (decl));
5443 error_at (ctx->location, "enclosing task");
5444 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5446 if (decl2)
5447 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5448 return false;
5451 /* Record the fact that DECL was used within the OpenMP context CTX.
5452 IN_CODE is true when real code uses DECL, and false when we should
5453 merely emit default(none) errors. Return true if DECL is going to
5454 be remapped and thus DECL shouldn't be gimplified into its
5455 DECL_VALUE_EXPR (if any). */
5457 static bool
5458 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5460 splay_tree_node n;
5461 unsigned flags = in_code ? GOVD_SEEN : 0;
5462 bool ret = false, shared;
5464 if (error_operand_p (decl))
5465 return false;
5467 /* Threadprivate variables are predetermined. */
5468 if (is_global_var (decl))
5470 if (DECL_THREAD_LOCAL_P (decl))
5471 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5473 if (DECL_HAS_VALUE_EXPR_P (decl))
5475 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5477 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5478 return omp_notice_threadprivate_variable (ctx, decl, value);
5482 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5483 if (ctx->region_type == ORT_TARGET)
5485 if (n == NULL)
5487 if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl)))
5489 error ("%qD referenced in target region does not have "
5490 "a mappable type", decl);
5491 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_EXPLICIT | flags);
5493 else
5494 omp_add_variable (ctx, decl, GOVD_MAP | flags);
5496 else
5497 n->value |= flags;
5498 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
5499 goto do_outer;
5502 if (n == NULL)
5504 enum omp_clause_default_kind default_kind, kind;
5505 struct gimplify_omp_ctx *octx;
5507 if (ctx->region_type == ORT_WORKSHARE
5508 || ctx->region_type == ORT_SIMD
5509 || ctx->region_type == ORT_TARGET_DATA)
5510 goto do_outer;
5512 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5513 remapped firstprivate instead of shared. To some extent this is
5514 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5515 default_kind = ctx->default_kind;
5516 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5517 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5518 default_kind = kind;
5520 switch (default_kind)
5522 case OMP_CLAUSE_DEFAULT_NONE:
5523 if ((ctx->region_type & ORT_TASK) != 0)
5525 error ("%qE not specified in enclosing task",
5526 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5527 error_at (ctx->location, "enclosing task");
5529 else if (ctx->region_type == ORT_TEAMS)
5531 error ("%qE not specified in enclosing teams construct",
5532 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5533 error_at (ctx->location, "enclosing teams construct");
5535 else
5537 error ("%qE not specified in enclosing parallel",
5538 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5539 error_at (ctx->location, "enclosing parallel");
5541 /* FALLTHRU */
5542 case OMP_CLAUSE_DEFAULT_SHARED:
5543 flags |= GOVD_SHARED;
5544 break;
5545 case OMP_CLAUSE_DEFAULT_PRIVATE:
5546 flags |= GOVD_PRIVATE;
5547 break;
5548 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5549 flags |= GOVD_FIRSTPRIVATE;
5550 break;
5551 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5552 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5553 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5554 if (ctx->outer_context)
5555 omp_notice_variable (ctx->outer_context, decl, in_code);
5556 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5558 splay_tree_node n2;
5560 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0)
5561 continue;
5562 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5563 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5565 flags |= GOVD_FIRSTPRIVATE;
5566 break;
5568 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
5569 break;
5571 if (flags & GOVD_FIRSTPRIVATE)
5572 break;
5573 if (octx == NULL
5574 && (TREE_CODE (decl) == PARM_DECL
5575 || (!is_global_var (decl)
5576 && DECL_CONTEXT (decl) == current_function_decl)))
5578 flags |= GOVD_FIRSTPRIVATE;
5579 break;
5581 flags |= GOVD_SHARED;
5582 break;
5583 default:
5584 gcc_unreachable ();
5587 if ((flags & GOVD_PRIVATE)
5588 && lang_hooks.decls.omp_private_outer_ref (decl))
5589 flags |= GOVD_PRIVATE_OUTER_REF;
5591 omp_add_variable (ctx, decl, flags);
5593 shared = (flags & GOVD_SHARED) != 0;
5594 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5595 goto do_outer;
5598 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5599 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5600 && DECL_SIZE (decl)
5601 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5603 splay_tree_node n2;
5604 tree t = DECL_VALUE_EXPR (decl);
5605 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5606 t = TREE_OPERAND (t, 0);
5607 gcc_assert (DECL_P (t));
5608 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5609 n2->value |= GOVD_SEEN;
5612 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5613 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5615 /* If nothing changed, there's nothing left to do. */
5616 if ((n->value & flags) == flags)
5617 return ret;
5618 flags |= n->value;
5619 n->value = flags;
5621 do_outer:
5622 /* If the variable is private in the current context, then we don't
5623 need to propagate anything to an outer context. */
5624 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5625 return ret;
5626 if (ctx->outer_context
5627 && omp_notice_variable (ctx->outer_context, decl, in_code))
5628 return true;
5629 return ret;
5632 /* Verify that DECL is private within CTX. If there's specific information
5633 to the contrary in the innermost scope, generate an error. */
5635 static bool
5636 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, bool simd)
5638 splay_tree_node n;
5640 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5641 if (n != NULL)
5643 if (n->value & GOVD_SHARED)
5645 if (ctx == gimplify_omp_ctxp)
5647 if (simd)
5648 error ("iteration variable %qE is predetermined linear",
5649 DECL_NAME (decl));
5650 else
5651 error ("iteration variable %qE should be private",
5652 DECL_NAME (decl));
5653 n->value = GOVD_PRIVATE;
5654 return true;
5656 else
5657 return false;
5659 else if ((n->value & GOVD_EXPLICIT) != 0
5660 && (ctx == gimplify_omp_ctxp
5661 || (ctx->region_type == ORT_COMBINED_PARALLEL
5662 && gimplify_omp_ctxp->outer_context == ctx)))
5664 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5665 error ("iteration variable %qE should not be firstprivate",
5666 DECL_NAME (decl));
5667 else if ((n->value & GOVD_REDUCTION) != 0)
5668 error ("iteration variable %qE should not be reduction",
5669 DECL_NAME (decl));
5670 else if (simd && (n->value & GOVD_LASTPRIVATE) != 0)
5671 error ("iteration variable %qE should not be lastprivate",
5672 DECL_NAME (decl));
5673 else if (simd && (n->value & GOVD_PRIVATE) != 0)
5674 error ("iteration variable %qE should not be private",
5675 DECL_NAME (decl));
5676 else if (simd && (n->value & GOVD_LINEAR) != 0)
5677 error ("iteration variable %qE is predetermined linear",
5678 DECL_NAME (decl));
5680 return (ctx == gimplify_omp_ctxp
5681 || (ctx->region_type == ORT_COMBINED_PARALLEL
5682 && gimplify_omp_ctxp->outer_context == ctx));
5685 if (ctx->region_type != ORT_WORKSHARE
5686 && ctx->region_type != ORT_SIMD)
5687 return false;
5688 else if (ctx->outer_context)
5689 return omp_is_private (ctx->outer_context, decl, simd);
5690 return false;
5693 /* Return true if DECL is private within a parallel region
5694 that binds to the current construct's context or in parallel
5695 region's REDUCTION clause. */
5697 static bool
5698 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5700 splay_tree_node n;
5704 ctx = ctx->outer_context;
5705 if (ctx == NULL)
5706 return !(is_global_var (decl)
5707 /* References might be private, but might be shared too. */
5708 || lang_hooks.decls.omp_privatize_by_reference (decl));
5710 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
5711 continue;
5713 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5714 if (n != NULL)
5715 return (n->value & GOVD_SHARED) == 0;
5717 while (ctx->region_type == ORT_WORKSHARE
5718 || ctx->region_type == ORT_SIMD);
5719 return false;
5722 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5723 and previous omp contexts. */
5725 static void
5726 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5727 enum omp_region_type region_type)
5729 struct gimplify_omp_ctx *ctx, *outer_ctx;
5730 struct gimplify_ctx gctx;
5731 tree c;
5733 ctx = new_omp_context (region_type);
5734 outer_ctx = ctx->outer_context;
5736 while ((c = *list_p) != NULL)
5738 bool remove = false;
5739 bool notice_outer = true;
5740 const char *check_non_private = NULL;
5741 unsigned int flags;
5742 tree decl;
5744 switch (OMP_CLAUSE_CODE (c))
5746 case OMP_CLAUSE_PRIVATE:
5747 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5748 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5750 flags |= GOVD_PRIVATE_OUTER_REF;
5751 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5753 else
5754 notice_outer = false;
5755 goto do_add;
5756 case OMP_CLAUSE_SHARED:
5757 flags = GOVD_SHARED | GOVD_EXPLICIT;
5758 goto do_add;
5759 case OMP_CLAUSE_FIRSTPRIVATE:
5760 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5761 check_non_private = "firstprivate";
5762 goto do_add;
5763 case OMP_CLAUSE_LASTPRIVATE:
5764 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5765 check_non_private = "lastprivate";
5766 goto do_add;
5767 case OMP_CLAUSE_REDUCTION:
5768 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5769 check_non_private = "reduction";
5770 goto do_add;
5771 case OMP_CLAUSE_LINEAR:
5772 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
5773 is_gimple_val, fb_rvalue) == GS_ERROR)
5775 remove = true;
5776 break;
5778 flags = GOVD_LINEAR | GOVD_EXPLICIT;
5779 goto do_add;
5781 case OMP_CLAUSE_MAP:
5782 if (OMP_CLAUSE_SIZE (c)
5783 && gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
5784 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
5786 remove = true;
5787 break;
5789 decl = OMP_CLAUSE_DECL (c);
5790 if (!DECL_P (decl))
5792 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
5793 NULL, is_gimple_lvalue, fb_lvalue)
5794 == GS_ERROR)
5796 remove = true;
5797 break;
5799 break;
5801 flags = GOVD_MAP | GOVD_EXPLICIT;
5802 goto do_add;
5804 case OMP_CLAUSE_DEPEND:
5805 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
5807 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
5808 NULL, is_gimple_val, fb_rvalue);
5809 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5811 if (error_operand_p (OMP_CLAUSE_DECL (c)))
5813 remove = true;
5814 break;
5816 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
5817 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
5818 is_gimple_val, fb_rvalue) == GS_ERROR)
5820 remove = true;
5821 break;
5823 break;
5825 case OMP_CLAUSE_TO:
5826 case OMP_CLAUSE_FROM:
5827 if (OMP_CLAUSE_SIZE (c)
5828 && gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
5829 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
5831 remove = true;
5832 break;
5834 decl = OMP_CLAUSE_DECL (c);
5835 if (error_operand_p (decl))
5837 remove = true;
5838 break;
5840 if (!DECL_P (decl))
5842 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
5843 NULL, is_gimple_lvalue, fb_lvalue)
5844 == GS_ERROR)
5846 remove = true;
5847 break;
5849 break;
5851 goto do_notice;
5853 do_add:
5854 decl = OMP_CLAUSE_DECL (c);
5855 if (error_operand_p (decl))
5857 remove = true;
5858 break;
5860 omp_add_variable (ctx, decl, flags);
5861 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5862 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5864 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
5865 GOVD_LOCAL | GOVD_SEEN);
5866 gimplify_omp_ctxp = ctx;
5867 push_gimplify_context (&gctx);
5869 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5870 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5872 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5873 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5874 pop_gimplify_context
5875 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
5876 push_gimplify_context (&gctx);
5877 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
5878 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5879 pop_gimplify_context
5880 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
5881 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
5882 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
5884 gimplify_omp_ctxp = outer_ctx;
5886 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5887 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
5889 gimplify_omp_ctxp = ctx;
5890 push_gimplify_context (&gctx);
5891 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
5893 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
5894 NULL, NULL);
5895 TREE_SIDE_EFFECTS (bind) = 1;
5896 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
5897 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
5899 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
5900 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5901 pop_gimplify_context
5902 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
5903 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
5905 gimplify_omp_ctxp = outer_ctx;
5907 if (notice_outer)
5908 goto do_notice;
5909 break;
5911 case OMP_CLAUSE_COPYIN:
5912 case OMP_CLAUSE_COPYPRIVATE:
5913 decl = OMP_CLAUSE_DECL (c);
5914 if (error_operand_p (decl))
5916 remove = true;
5917 break;
5919 do_notice:
5920 if (outer_ctx)
5921 omp_notice_variable (outer_ctx, decl, true);
5922 if (check_non_private
5923 && region_type == ORT_WORKSHARE
5924 && omp_check_private (ctx, decl))
5926 error ("%s variable %qE is private in outer context",
5927 check_non_private, DECL_NAME (decl));
5928 remove = true;
5930 break;
5932 case OMP_CLAUSE_FINAL:
5933 case OMP_CLAUSE_IF:
5934 OMP_CLAUSE_OPERAND (c, 0)
5935 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
5936 /* Fall through. */
5938 case OMP_CLAUSE_SCHEDULE:
5939 case OMP_CLAUSE_NUM_THREADS:
5940 case OMP_CLAUSE_NUM_TEAMS:
5941 case OMP_CLAUSE_THREAD_LIMIT:
5942 case OMP_CLAUSE_DIST_SCHEDULE:
5943 case OMP_CLAUSE_DEVICE:
5944 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
5945 is_gimple_val, fb_rvalue) == GS_ERROR)
5946 remove = true;
5947 break;
5949 case OMP_CLAUSE_NOWAIT:
5950 case OMP_CLAUSE_ORDERED:
5951 case OMP_CLAUSE_UNTIED:
5952 case OMP_CLAUSE_COLLAPSE:
5953 case OMP_CLAUSE_MERGEABLE:
5954 case OMP_CLAUSE_PROC_BIND:
5955 case OMP_CLAUSE_SAFELEN:
5956 break;
5958 case OMP_CLAUSE_ALIGNED:
5959 decl = OMP_CLAUSE_DECL (c);
5960 if (error_operand_p (decl))
5962 remove = true;
5963 break;
5965 if (!is_global_var (decl)
5966 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
5967 omp_add_variable (ctx, decl, GOVD_ALIGNED);
5968 break;
5970 case OMP_CLAUSE_DEFAULT:
5971 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
5972 break;
5974 default:
5975 gcc_unreachable ();
5978 if (remove)
5979 *list_p = OMP_CLAUSE_CHAIN (c);
5980 else
5981 list_p = &OMP_CLAUSE_CHAIN (c);
5984 gimplify_omp_ctxp = ctx;
5987 /* For all variables that were not actually used within the context,
5988 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
5990 static int
5991 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
5993 tree *list_p = (tree *) data;
5994 tree decl = (tree) n->key;
5995 unsigned flags = n->value;
5996 enum omp_clause_code code;
5997 tree clause;
5998 bool private_debug;
6000 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6001 return 0;
6002 if ((flags & GOVD_SEEN) == 0)
6003 return 0;
6004 if (flags & GOVD_DEBUG_PRIVATE)
6006 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6007 private_debug = true;
6009 else if (flags & GOVD_MAP)
6010 private_debug = false;
6011 else
6012 private_debug
6013 = lang_hooks.decls.omp_private_debug_clause (decl,
6014 !!(flags & GOVD_SHARED));
6015 if (private_debug)
6016 code = OMP_CLAUSE_PRIVATE;
6017 else if (flags & GOVD_MAP)
6018 code = OMP_CLAUSE_MAP;
6019 else if (flags & GOVD_SHARED)
6021 if (is_global_var (decl))
6023 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6024 while (ctx != NULL)
6026 splay_tree_node on
6027 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6028 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6029 | GOVD_PRIVATE | GOVD_REDUCTION
6030 | GOVD_LINEAR)) != 0)
6031 break;
6032 ctx = ctx->outer_context;
6034 if (ctx == NULL)
6035 return 0;
6037 code = OMP_CLAUSE_SHARED;
6039 else if (flags & GOVD_PRIVATE)
6040 code = OMP_CLAUSE_PRIVATE;
6041 else if (flags & GOVD_FIRSTPRIVATE)
6042 code = OMP_CLAUSE_FIRSTPRIVATE;
6043 else if (flags & GOVD_LASTPRIVATE)
6044 code = OMP_CLAUSE_LASTPRIVATE;
6045 else if (flags & GOVD_ALIGNED)
6046 return 0;
6047 else
6048 gcc_unreachable ();
6050 clause = build_omp_clause (input_location, code);
6051 OMP_CLAUSE_DECL (clause) = decl;
6052 OMP_CLAUSE_CHAIN (clause) = *list_p;
6053 if (private_debug)
6054 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
6055 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6056 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
6057 else if (code == OMP_CLAUSE_MAP)
6059 OMP_CLAUSE_MAP_KIND (clause) = flags & GOVD_MAP_TO_ONLY
6060 ? OMP_CLAUSE_MAP_TO
6061 : OMP_CLAUSE_MAP_TOFROM;
6062 if (DECL_SIZE (decl)
6063 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6065 tree decl2 = DECL_VALUE_EXPR (decl);
6066 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6067 decl2 = TREE_OPERAND (decl2, 0);
6068 gcc_assert (DECL_P (decl2));
6069 tree mem = build_simple_mem_ref (decl2);
6070 OMP_CLAUSE_DECL (clause) = mem;
6071 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6072 if (gimplify_omp_ctxp->outer_context)
6074 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6075 omp_notice_variable (ctx, decl2, true);
6076 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
6078 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
6079 OMP_CLAUSE_MAP);
6080 OMP_CLAUSE_DECL (nc) = decl;
6081 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6082 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6083 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
6084 OMP_CLAUSE_CHAIN (clause) = nc;
6087 *list_p = clause;
6088 lang_hooks.decls.omp_finish_clause (clause);
6090 return 0;
6093 static void
6094 gimplify_adjust_omp_clauses (tree *list_p)
6096 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6097 tree c, decl;
6099 while ((c = *list_p) != NULL)
6101 splay_tree_node n;
6102 bool remove = false;
6104 switch (OMP_CLAUSE_CODE (c))
6106 case OMP_CLAUSE_PRIVATE:
6107 case OMP_CLAUSE_SHARED:
6108 case OMP_CLAUSE_FIRSTPRIVATE:
6109 case OMP_CLAUSE_LINEAR:
6110 decl = OMP_CLAUSE_DECL (c);
6111 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6112 remove = !(n->value & GOVD_SEEN);
6113 if (! remove)
6115 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
6116 if ((n->value & GOVD_DEBUG_PRIVATE)
6117 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6119 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6120 || ((n->value & GOVD_DATA_SHARE_CLASS)
6121 == GOVD_PRIVATE));
6122 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
6123 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6125 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6126 && ctx->outer_context
6127 && !(OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6128 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6129 && !is_global_var (decl))
6131 if (ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
6133 n = splay_tree_lookup (ctx->outer_context->variables,
6134 (splay_tree_key) decl);
6135 if (n == NULL
6136 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
6138 int flags = OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6139 ? GOVD_LASTPRIVATE : GOVD_SHARED;
6140 if (n == NULL)
6141 omp_add_variable (ctx->outer_context, decl,
6142 flags | GOVD_SEEN);
6143 else
6144 n->value |= flags | GOVD_SEEN;
6147 else
6148 omp_notice_variable (ctx->outer_context, decl, true);
6151 break;
6153 case OMP_CLAUSE_LASTPRIVATE:
6154 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6155 accurately reflect the presence of a FIRSTPRIVATE clause. */
6156 decl = OMP_CLAUSE_DECL (c);
6157 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6158 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6159 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6160 break;
6162 case OMP_CLAUSE_ALIGNED:
6163 decl = OMP_CLAUSE_DECL (c);
6164 if (!is_global_var (decl))
6166 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6167 remove = n == NULL || !(n->value & GOVD_SEEN);
6168 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6170 struct gimplify_omp_ctx *octx;
6171 if (n != NULL
6172 && (n->value & (GOVD_DATA_SHARE_CLASS
6173 & ~GOVD_FIRSTPRIVATE)))
6174 remove = true;
6175 else
6176 for (octx = ctx->outer_context; octx;
6177 octx = octx->outer_context)
6179 n = splay_tree_lookup (octx->variables,
6180 (splay_tree_key) decl);
6181 if (n == NULL)
6182 continue;
6183 if (n->value & GOVD_LOCAL)
6184 break;
6185 /* We have to avoid assigning a shared variable
6186 to itself when trying to add
6187 __builtin_assume_aligned. */
6188 if (n->value & GOVD_SHARED)
6190 remove = true;
6191 break;
6196 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
6198 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6199 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6200 remove = true;
6202 break;
6204 case OMP_CLAUSE_MAP:
6205 decl = OMP_CLAUSE_DECL (c);
6206 if (!DECL_P (decl))
6207 break;
6208 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6209 if (ctx->region_type == ORT_TARGET && !(n->value & GOVD_SEEN))
6210 remove = true;
6211 else if (DECL_SIZE (decl)
6212 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
6213 && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_POINTER)
6215 tree decl2 = DECL_VALUE_EXPR (decl);
6216 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6217 decl2 = TREE_OPERAND (decl2, 0);
6218 gcc_assert (DECL_P (decl2));
6219 tree mem = build_simple_mem_ref (decl2);
6220 OMP_CLAUSE_DECL (c) = mem;
6221 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6222 if (ctx->outer_context)
6224 omp_notice_variable (ctx->outer_context, decl2, true);
6225 omp_notice_variable (ctx->outer_context,
6226 OMP_CLAUSE_SIZE (c), true);
6228 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6229 OMP_CLAUSE_MAP);
6230 OMP_CLAUSE_DECL (nc) = decl;
6231 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6232 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6233 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
6234 OMP_CLAUSE_CHAIN (c) = nc;
6235 c = nc;
6237 break;
6239 case OMP_CLAUSE_TO:
6240 case OMP_CLAUSE_FROM:
6241 decl = OMP_CLAUSE_DECL (c);
6242 if (!DECL_P (decl))
6243 break;
6244 if (DECL_SIZE (decl)
6245 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6247 tree decl2 = DECL_VALUE_EXPR (decl);
6248 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6249 decl2 = TREE_OPERAND (decl2, 0);
6250 gcc_assert (DECL_P (decl2));
6251 tree mem = build_simple_mem_ref (decl2);
6252 OMP_CLAUSE_DECL (c) = mem;
6253 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6254 if (ctx->outer_context)
6256 omp_notice_variable (ctx->outer_context, decl2, true);
6257 omp_notice_variable (ctx->outer_context,
6258 OMP_CLAUSE_SIZE (c), true);
6261 break;
6263 case OMP_CLAUSE_REDUCTION:
6264 case OMP_CLAUSE_COPYIN:
6265 case OMP_CLAUSE_COPYPRIVATE:
6266 case OMP_CLAUSE_IF:
6267 case OMP_CLAUSE_NUM_THREADS:
6268 case OMP_CLAUSE_NUM_TEAMS:
6269 case OMP_CLAUSE_THREAD_LIMIT:
6270 case OMP_CLAUSE_DIST_SCHEDULE:
6271 case OMP_CLAUSE_DEVICE:
6272 case OMP_CLAUSE_SCHEDULE:
6273 case OMP_CLAUSE_NOWAIT:
6274 case OMP_CLAUSE_ORDERED:
6275 case OMP_CLAUSE_DEFAULT:
6276 case OMP_CLAUSE_UNTIED:
6277 case OMP_CLAUSE_COLLAPSE:
6278 case OMP_CLAUSE_FINAL:
6279 case OMP_CLAUSE_MERGEABLE:
6280 case OMP_CLAUSE_PROC_BIND:
6281 case OMP_CLAUSE_SAFELEN:
6282 case OMP_CLAUSE_DEPEND:
6283 break;
6285 default:
6286 gcc_unreachable ();
6289 if (remove)
6290 *list_p = OMP_CLAUSE_CHAIN (c);
6291 else
6292 list_p = &OMP_CLAUSE_CHAIN (c);
6295 /* Add in any implicit data sharing. */
6296 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
6298 gimplify_omp_ctxp = ctx->outer_context;
6299 delete_omp_context (ctx);
6302 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
6303 gimplification of the body, as well as scanning the body for used
6304 variables. We need to do this scan now, because variable-sized
6305 decls will be decomposed during gimplification. */
6307 static void
6308 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6310 tree expr = *expr_p;
6311 gimple g;
6312 gimple_seq body = NULL;
6313 struct gimplify_ctx gctx;
6315 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6316 OMP_PARALLEL_COMBINED (expr)
6317 ? ORT_COMBINED_PARALLEL
6318 : ORT_PARALLEL);
6320 push_gimplify_context (&gctx);
6322 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6323 if (gimple_code (g) == GIMPLE_BIND)
6324 pop_gimplify_context (g);
6325 else
6326 pop_gimplify_context (NULL);
6328 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6330 g = gimple_build_omp_parallel (body,
6331 OMP_PARALLEL_CLAUSES (expr),
6332 NULL_TREE, NULL_TREE);
6333 if (OMP_PARALLEL_COMBINED (expr))
6334 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6335 gimplify_seq_add_stmt (pre_p, g);
6336 *expr_p = NULL_TREE;
6339 /* Gimplify the contents of an OMP_TASK statement. This involves
6340 gimplification of the body, as well as scanning the body for used
6341 variables. We need to do this scan now, because variable-sized
6342 decls will be decomposed during gimplification. */
6344 static void
6345 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6347 tree expr = *expr_p;
6348 gimple g;
6349 gimple_seq body = NULL;
6350 struct gimplify_ctx gctx;
6352 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6353 find_omp_clause (OMP_TASK_CLAUSES (expr),
6354 OMP_CLAUSE_UNTIED)
6355 ? ORT_UNTIED_TASK : ORT_TASK);
6357 push_gimplify_context (&gctx);
6359 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6360 if (gimple_code (g) == GIMPLE_BIND)
6361 pop_gimplify_context (g);
6362 else
6363 pop_gimplify_context (NULL);
6365 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
6367 g = gimple_build_omp_task (body,
6368 OMP_TASK_CLAUSES (expr),
6369 NULL_TREE, NULL_TREE,
6370 NULL_TREE, NULL_TREE, NULL_TREE);
6371 gimplify_seq_add_stmt (pre_p, g);
6372 *expr_p = NULL_TREE;
6375 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
6376 with non-NULL OMP_FOR_INIT. */
6378 static tree
6379 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
6381 *walk_subtrees = 0;
6382 switch (TREE_CODE (*tp))
6384 case OMP_FOR:
6385 *walk_subtrees = 1;
6386 /* FALLTHRU */
6387 case OMP_SIMD:
6388 if (OMP_FOR_INIT (*tp) != NULL_TREE)
6389 return *tp;
6390 break;
6391 case BIND_EXPR:
6392 case STATEMENT_LIST:
6393 case OMP_PARALLEL:
6394 *walk_subtrees = 1;
6395 break;
6396 default:
6397 break;
6399 return NULL_TREE;
6402 /* Gimplify the gross structure of an OMP_FOR statement. */
6404 static enum gimplify_status
6405 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6407 tree for_stmt, orig_for_stmt, decl, var, t;
6408 enum gimplify_status ret = GS_ALL_DONE;
6409 enum gimplify_status tret;
6410 gimple gfor;
6411 gimple_seq for_body, for_pre_body;
6412 int i;
6413 bool simd;
6414 bitmap has_decl_expr = NULL;
6416 orig_for_stmt = for_stmt = *expr_p;
6418 simd = TREE_CODE (for_stmt) == OMP_SIMD;
6419 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6420 simd ? ORT_SIMD : ORT_WORKSHARE);
6422 /* Handle OMP_FOR_INIT. */
6423 for_pre_body = NULL;
6424 if (simd && OMP_FOR_PRE_BODY (for_stmt))
6426 has_decl_expr = BITMAP_ALLOC (NULL);
6427 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
6428 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
6429 == VAR_DECL)
6431 t = OMP_FOR_PRE_BODY (for_stmt);
6432 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6434 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
6436 tree_stmt_iterator si;
6437 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
6438 tsi_next (&si))
6440 t = tsi_stmt (si);
6441 if (TREE_CODE (t) == DECL_EXPR
6442 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
6443 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6447 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6448 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6450 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
6452 for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), find_combined_omp_for,
6453 NULL, NULL);
6454 gcc_assert (for_stmt != NULL_TREE);
6455 gimplify_omp_ctxp->combined_loop = true;
6458 for_body = NULL;
6459 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6460 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6461 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6462 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6463 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6465 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6466 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6467 decl = TREE_OPERAND (t, 0);
6468 gcc_assert (DECL_P (decl));
6469 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6470 || POINTER_TYPE_P (TREE_TYPE (decl)));
6472 /* Make sure the iteration variable is private. */
6473 tree c = NULL_TREE;
6474 if (orig_for_stmt != for_stmt)
6475 /* Do this only on innermost construct for combined ones. */;
6476 else if (simd)
6478 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
6479 (splay_tree_key)decl);
6480 omp_is_private (gimplify_omp_ctxp, decl, simd);
6481 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6482 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6483 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
6485 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
6486 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
6487 if (has_decl_expr
6488 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
6489 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6490 OMP_CLAUSE_DECL (c) = decl;
6491 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6492 OMP_FOR_CLAUSES (for_stmt) = c;
6493 omp_add_variable (gimplify_omp_ctxp, decl,
6494 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
6496 else
6498 bool lastprivate
6499 = (!has_decl_expr
6500 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
6501 c = build_omp_clause (input_location,
6502 lastprivate ? OMP_CLAUSE_LASTPRIVATE
6503 : OMP_CLAUSE_PRIVATE);
6504 OMP_CLAUSE_DECL (c) = decl;
6505 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6506 omp_add_variable (gimplify_omp_ctxp, decl,
6507 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
6508 | GOVD_SEEN);
6509 c = NULL_TREE;
6512 else if (omp_is_private (gimplify_omp_ctxp, decl, simd))
6513 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6514 else
6515 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6517 /* If DECL is not a gimple register, create a temporary variable to act
6518 as an iteration counter. This is valid, since DECL cannot be
6519 modified in the body of the loop. */
6520 if (orig_for_stmt != for_stmt)
6521 var = decl;
6522 else if (!is_gimple_reg (decl))
6524 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6525 TREE_OPERAND (t, 0) = var;
6527 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6529 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6531 else
6532 var = decl;
6534 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6535 is_gimple_val, fb_rvalue);
6536 ret = MIN (ret, tret);
6537 if (ret == GS_ERROR)
6538 return ret;
6540 /* Handle OMP_FOR_COND. */
6541 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6542 gcc_assert (COMPARISON_CLASS_P (t));
6543 gcc_assert (TREE_OPERAND (t, 0) == decl);
6545 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6546 is_gimple_val, fb_rvalue);
6547 ret = MIN (ret, tret);
6549 /* Handle OMP_FOR_INCR. */
6550 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6551 switch (TREE_CODE (t))
6553 case PREINCREMENT_EXPR:
6554 case POSTINCREMENT_EXPR:
6555 if (orig_for_stmt != for_stmt)
6556 break;
6557 t = build_int_cst (TREE_TYPE (decl), 1);
6558 if (c)
6559 OMP_CLAUSE_LINEAR_STEP (c) = t;
6560 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6561 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6562 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6563 break;
6565 case PREDECREMENT_EXPR:
6566 case POSTDECREMENT_EXPR:
6567 if (orig_for_stmt != for_stmt)
6568 break;
6569 t = build_int_cst (TREE_TYPE (decl), -1);
6570 if (c)
6571 OMP_CLAUSE_LINEAR_STEP (c) = t;
6572 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6573 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6574 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6575 break;
6577 case MODIFY_EXPR:
6578 gcc_assert (TREE_OPERAND (t, 0) == decl);
6579 TREE_OPERAND (t, 0) = var;
6581 t = TREE_OPERAND (t, 1);
6582 switch (TREE_CODE (t))
6584 case PLUS_EXPR:
6585 if (TREE_OPERAND (t, 1) == decl)
6587 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6588 TREE_OPERAND (t, 0) = var;
6589 break;
6592 /* Fallthru. */
6593 case MINUS_EXPR:
6594 case POINTER_PLUS_EXPR:
6595 gcc_assert (TREE_OPERAND (t, 0) == decl);
6596 TREE_OPERAND (t, 0) = var;
6597 break;
6598 default:
6599 gcc_unreachable ();
6602 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6603 is_gimple_val, fb_rvalue);
6604 ret = MIN (ret, tret);
6605 if (c)
6607 OMP_CLAUSE_LINEAR_STEP (c) = TREE_OPERAND (t, 1);
6608 if (TREE_CODE (t) == MINUS_EXPR)
6610 t = TREE_OPERAND (t, 1);
6611 OMP_CLAUSE_LINEAR_STEP (c)
6612 = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
6613 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
6614 &for_pre_body, NULL,
6615 is_gimple_val, fb_rvalue);
6616 ret = MIN (ret, tret);
6619 break;
6621 default:
6622 gcc_unreachable ();
6625 if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6626 && orig_for_stmt == for_stmt)
6628 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6629 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6630 && OMP_CLAUSE_DECL (c) == decl
6631 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6633 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6634 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6635 gcc_assert (TREE_OPERAND (t, 0) == var);
6636 t = TREE_OPERAND (t, 1);
6637 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6638 || TREE_CODE (t) == MINUS_EXPR
6639 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6640 gcc_assert (TREE_OPERAND (t, 0) == var);
6641 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6642 TREE_OPERAND (t, 1));
6643 gimplify_assign (decl, t,
6644 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6649 BITMAP_FREE (has_decl_expr);
6651 gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body);
6653 if (orig_for_stmt != for_stmt)
6654 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6656 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6657 decl = TREE_OPERAND (t, 0);
6658 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6659 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6660 TREE_OPERAND (t, 0) = var;
6661 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6662 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
6663 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
6666 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt));
6668 int kind;
6669 switch (TREE_CODE (orig_for_stmt))
6671 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
6672 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
6673 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
6674 default:
6675 gcc_unreachable ();
6677 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
6678 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6679 for_pre_body);
6680 if (orig_for_stmt != for_stmt)
6681 gimple_omp_for_set_combined_p (gfor, true);
6682 if (gimplify_omp_ctxp
6683 && (gimplify_omp_ctxp->combined_loop
6684 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
6685 && gimplify_omp_ctxp->outer_context
6686 && gimplify_omp_ctxp->outer_context->combined_loop)))
6688 gimple_omp_for_set_combined_into_p (gfor, true);
6689 if (gimplify_omp_ctxp->combined_loop)
6690 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
6691 else
6692 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
6695 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6697 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6698 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6699 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6700 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6701 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6702 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6703 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6704 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6707 gimplify_seq_add_stmt (pre_p, gfor);
6708 if (ret != GS_ALL_DONE)
6709 return GS_ERROR;
6710 *expr_p = NULL_TREE;
6711 return GS_ALL_DONE;
6714 /* Gimplify the gross structure of other OpenMP constructs.
6715 In particular, OMP_SECTIONS, OMP_SINGLE, OMP_TARGET, OMP_TARGET_DATA
6716 and OMP_TEAMS. */
6718 static void
6719 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6721 tree expr = *expr_p;
6722 gimple stmt;
6723 gimple_seq body = NULL;
6724 enum omp_region_type ort = ORT_WORKSHARE;
6726 switch (TREE_CODE (expr))
6728 case OMP_SECTIONS:
6729 case OMP_SINGLE:
6730 break;
6731 case OMP_TARGET:
6732 ort = ORT_TARGET;
6733 break;
6734 case OMP_TARGET_DATA:
6735 ort = ORT_TARGET_DATA;
6736 break;
6737 case OMP_TEAMS:
6738 ort = ORT_TEAMS;
6739 break;
6740 default:
6741 gcc_unreachable ();
6743 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort);
6744 if (ort == ORT_TARGET || ort == ORT_TARGET_DATA)
6746 struct gimplify_ctx gctx;
6747 push_gimplify_context (&gctx);
6748 gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
6749 if (gimple_code (g) == GIMPLE_BIND)
6750 pop_gimplify_context (g);
6751 else
6752 pop_gimplify_context (NULL);
6753 if (ort == ORT_TARGET_DATA)
6755 gimple_seq cleanup = NULL;
6756 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TARGET_END_DATA);
6757 g = gimple_build_call (fn, 0);
6758 gimple_seq_add_stmt (&cleanup, g);
6759 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
6760 body = NULL;
6761 gimple_seq_add_stmt (&body, g);
6764 else
6765 gimplify_and_add (OMP_BODY (expr), &body);
6766 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6768 switch (TREE_CODE (expr))
6770 case OMP_SECTIONS:
6771 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6772 break;
6773 case OMP_SINGLE:
6774 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6775 break;
6776 case OMP_TARGET:
6777 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
6778 OMP_CLAUSES (expr));
6779 break;
6780 case OMP_TARGET_DATA:
6781 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
6782 OMP_CLAUSES (expr));
6783 break;
6784 case OMP_TEAMS:
6785 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
6786 break;
6787 default:
6788 gcc_unreachable ();
6791 gimplify_seq_add_stmt (pre_p, stmt);
6792 *expr_p = NULL_TREE;
6795 /* Gimplify the gross structure of OpenMP target update construct. */
6797 static void
6798 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
6800 tree expr = *expr_p;
6801 gimple stmt;
6803 gimplify_scan_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr), pre_p,
6804 ORT_WORKSHARE);
6805 gimplify_adjust_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr));
6806 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_UPDATE,
6807 OMP_TARGET_UPDATE_CLAUSES (expr));
6809 gimplify_seq_add_stmt (pre_p, stmt);
6810 *expr_p = NULL_TREE;
6813 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
6814 stabilized the lhs of the atomic operation as *ADDR. Return true if
6815 EXPR is this stabilized form. */
6817 static bool
6818 goa_lhs_expr_p (tree expr, tree addr)
6820 /* Also include casts to other type variants. The C front end is fond
6821 of adding these for e.g. volatile variables. This is like
6822 STRIP_TYPE_NOPS but includes the main variant lookup. */
6823 STRIP_USELESS_TYPE_CONVERSION (expr);
6825 if (TREE_CODE (expr) == INDIRECT_REF)
6827 expr = TREE_OPERAND (expr, 0);
6828 while (expr != addr
6829 && (CONVERT_EXPR_P (expr)
6830 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6831 && TREE_CODE (expr) == TREE_CODE (addr)
6832 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
6834 expr = TREE_OPERAND (expr, 0);
6835 addr = TREE_OPERAND (addr, 0);
6837 if (expr == addr)
6838 return true;
6839 return (TREE_CODE (addr) == ADDR_EXPR
6840 && TREE_CODE (expr) == ADDR_EXPR
6841 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6843 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6844 return true;
6845 return false;
6848 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
6849 expression does not involve the lhs, evaluate it into a temporary.
6850 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
6851 or -1 if an error was encountered. */
6853 static int
6854 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6855 tree lhs_var)
6857 tree expr = *expr_p;
6858 int saw_lhs;
6860 if (goa_lhs_expr_p (expr, lhs_addr))
6862 *expr_p = lhs_var;
6863 return 1;
6865 if (is_gimple_val (expr))
6866 return 0;
6868 saw_lhs = 0;
6869 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6871 case tcc_binary:
6872 case tcc_comparison:
6873 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6874 lhs_var);
6875 case tcc_unary:
6876 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6877 lhs_var);
6878 break;
6879 case tcc_expression:
6880 switch (TREE_CODE (expr))
6882 case TRUTH_ANDIF_EXPR:
6883 case TRUTH_ORIF_EXPR:
6884 case TRUTH_AND_EXPR:
6885 case TRUTH_OR_EXPR:
6886 case TRUTH_XOR_EXPR:
6887 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6888 lhs_addr, lhs_var);
6889 case TRUTH_NOT_EXPR:
6890 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6891 lhs_addr, lhs_var);
6892 break;
6893 case COMPOUND_EXPR:
6894 /* Break out any preevaluations from cp_build_modify_expr. */
6895 for (; TREE_CODE (expr) == COMPOUND_EXPR;
6896 expr = TREE_OPERAND (expr, 1))
6897 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
6898 *expr_p = expr;
6899 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
6900 default:
6901 break;
6903 break;
6904 default:
6905 break;
6908 if (saw_lhs == 0)
6910 enum gimplify_status gs;
6911 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6912 if (gs != GS_ALL_DONE)
6913 saw_lhs = -1;
6916 return saw_lhs;
6919 /* Gimplify an OMP_ATOMIC statement. */
6921 static enum gimplify_status
6922 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6924 tree addr = TREE_OPERAND (*expr_p, 0);
6925 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
6926 ? NULL : TREE_OPERAND (*expr_p, 1);
6927 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6928 tree tmp_load;
6929 gimple loadstmt, storestmt;
6931 tmp_load = create_tmp_reg (type, NULL);
6932 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6933 return GS_ERROR;
6935 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6936 != GS_ALL_DONE)
6937 return GS_ERROR;
6939 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
6940 gimplify_seq_add_stmt (pre_p, loadstmt);
6941 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6942 != GS_ALL_DONE)
6943 return GS_ERROR;
6945 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
6946 rhs = tmp_load;
6947 storestmt = gimple_build_omp_atomic_store (rhs);
6948 gimplify_seq_add_stmt (pre_p, storestmt);
6949 if (OMP_ATOMIC_SEQ_CST (*expr_p))
6951 gimple_omp_atomic_set_seq_cst (loadstmt);
6952 gimple_omp_atomic_set_seq_cst (storestmt);
6954 switch (TREE_CODE (*expr_p))
6956 case OMP_ATOMIC_READ:
6957 case OMP_ATOMIC_CAPTURE_OLD:
6958 *expr_p = tmp_load;
6959 gimple_omp_atomic_set_need_value (loadstmt);
6960 break;
6961 case OMP_ATOMIC_CAPTURE_NEW:
6962 *expr_p = rhs;
6963 gimple_omp_atomic_set_need_value (storestmt);
6964 break;
6965 default:
6966 *expr_p = NULL;
6967 break;
6970 return GS_ALL_DONE;
6973 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
6974 body, and adding some EH bits. */
6976 static enum gimplify_status
6977 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
6979 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
6980 gimple g;
6981 gimple_seq body = NULL;
6982 struct gimplify_ctx gctx;
6983 int subcode = 0;
6985 /* Wrap the transaction body in a BIND_EXPR so we have a context
6986 where to put decls for OpenMP. */
6987 if (TREE_CODE (tbody) != BIND_EXPR)
6989 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
6990 TREE_SIDE_EFFECTS (bind) = 1;
6991 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
6992 TRANSACTION_EXPR_BODY (expr) = bind;
6995 push_gimplify_context (&gctx);
6996 temp = voidify_wrapper_expr (*expr_p, NULL);
6998 g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
6999 pop_gimplify_context (g);
7001 g = gimple_build_transaction (body, NULL);
7002 if (TRANSACTION_EXPR_OUTER (expr))
7003 subcode = GTMA_IS_OUTER;
7004 else if (TRANSACTION_EXPR_RELAXED (expr))
7005 subcode = GTMA_IS_RELAXED;
7006 gimple_transaction_set_subcode (g, subcode);
7008 gimplify_seq_add_stmt (pre_p, g);
7010 if (temp)
7012 *expr_p = temp;
7013 return GS_OK;
7016 *expr_p = NULL_TREE;
7017 return GS_ALL_DONE;
7020 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
7021 expression produces a value to be used as an operand inside a GIMPLE
7022 statement, the value will be stored back in *EXPR_P. This value will
7023 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
7024 an SSA_NAME. The corresponding sequence of GIMPLE statements is
7025 emitted in PRE_P and POST_P.
7027 Additionally, this process may overwrite parts of the input
7028 expression during gimplification. Ideally, it should be
7029 possible to do non-destructive gimplification.
7031 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
7032 the expression needs to evaluate to a value to be used as
7033 an operand in a GIMPLE statement, this value will be stored in
7034 *EXPR_P on exit. This happens when the caller specifies one
7035 of fb_lvalue or fb_rvalue fallback flags.
7037 PRE_P will contain the sequence of GIMPLE statements corresponding
7038 to the evaluation of EXPR and all the side-effects that must
7039 be executed before the main expression. On exit, the last
7040 statement of PRE_P is the core statement being gimplified. For
7041 instance, when gimplifying 'if (++a)' the last statement in
7042 PRE_P will be 'if (t.1)' where t.1 is the result of
7043 pre-incrementing 'a'.
7045 POST_P will contain the sequence of GIMPLE statements corresponding
7046 to the evaluation of all the side-effects that must be executed
7047 after the main expression. If this is NULL, the post
7048 side-effects are stored at the end of PRE_P.
7050 The reason why the output is split in two is to handle post
7051 side-effects explicitly. In some cases, an expression may have
7052 inner and outer post side-effects which need to be emitted in
7053 an order different from the one given by the recursive
7054 traversal. For instance, for the expression (*p--)++ the post
7055 side-effects of '--' must actually occur *after* the post
7056 side-effects of '++'. However, gimplification will first visit
7057 the inner expression, so if a separate POST sequence was not
7058 used, the resulting sequence would be:
7060 1 t.1 = *p
7061 2 p = p - 1
7062 3 t.2 = t.1 + 1
7063 4 *p = t.2
7065 However, the post-decrement operation in line #2 must not be
7066 evaluated until after the store to *p at line #4, so the
7067 correct sequence should be:
7069 1 t.1 = *p
7070 2 t.2 = t.1 + 1
7071 3 *p = t.2
7072 4 p = p - 1
7074 So, by specifying a separate post queue, it is possible
7075 to emit the post side-effects in the correct order.
7076 If POST_P is NULL, an internal queue will be used. Before
7077 returning to the caller, the sequence POST_P is appended to
7078 the main output sequence PRE_P.
7080 GIMPLE_TEST_F points to a function that takes a tree T and
7081 returns nonzero if T is in the GIMPLE form requested by the
7082 caller. The GIMPLE predicates are in gimple.c.
7084 FALLBACK tells the function what sort of a temporary we want if
7085 gimplification cannot produce an expression that complies with
7086 GIMPLE_TEST_F.
7088 fb_none means that no temporary should be generated
7089 fb_rvalue means that an rvalue is OK to generate
7090 fb_lvalue means that an lvalue is OK to generate
7091 fb_either means that either is OK, but an lvalue is preferable.
7092 fb_mayfail means that gimplification may fail (in which case
7093 GS_ERROR will be returned)
7095 The return value is either GS_ERROR or GS_ALL_DONE, since this
7096 function iterates until EXPR is completely gimplified or an error
7097 occurs. */
7099 enum gimplify_status
7100 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7101 bool (*gimple_test_f) (tree), fallback_t fallback)
7103 tree tmp;
7104 gimple_seq internal_pre = NULL;
7105 gimple_seq internal_post = NULL;
7106 tree save_expr;
7107 bool is_statement;
7108 location_t saved_location;
7109 enum gimplify_status ret;
7110 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
7112 save_expr = *expr_p;
7113 if (save_expr == NULL_TREE)
7114 return GS_ALL_DONE;
7116 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
7117 is_statement = gimple_test_f == is_gimple_stmt;
7118 if (is_statement)
7119 gcc_assert (pre_p);
7121 /* Consistency checks. */
7122 if (gimple_test_f == is_gimple_reg)
7123 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
7124 else if (gimple_test_f == is_gimple_val
7125 || gimple_test_f == is_gimple_call_addr
7126 || gimple_test_f == is_gimple_condexpr
7127 || gimple_test_f == is_gimple_mem_rhs
7128 || gimple_test_f == is_gimple_mem_rhs_or_call
7129 || gimple_test_f == is_gimple_reg_rhs
7130 || gimple_test_f == is_gimple_reg_rhs_or_call
7131 || gimple_test_f == is_gimple_asm_val
7132 || gimple_test_f == is_gimple_mem_ref_addr)
7133 gcc_assert (fallback & fb_rvalue);
7134 else if (gimple_test_f == is_gimple_min_lval
7135 || gimple_test_f == is_gimple_lvalue)
7136 gcc_assert (fallback & fb_lvalue);
7137 else if (gimple_test_f == is_gimple_addressable)
7138 gcc_assert (fallback & fb_either);
7139 else if (gimple_test_f == is_gimple_stmt)
7140 gcc_assert (fallback == fb_none);
7141 else
7143 /* We should have recognized the GIMPLE_TEST_F predicate to
7144 know what kind of fallback to use in case a temporary is
7145 needed to hold the value or address of *EXPR_P. */
7146 gcc_unreachable ();
7149 /* We used to check the predicate here and return immediately if it
7150 succeeds. This is wrong; the design is for gimplification to be
7151 idempotent, and for the predicates to only test for valid forms, not
7152 whether they are fully simplified. */
7153 if (pre_p == NULL)
7154 pre_p = &internal_pre;
7156 if (post_p == NULL)
7157 post_p = &internal_post;
7159 /* Remember the last statements added to PRE_P and POST_P. Every
7160 new statement added by the gimplification helpers needs to be
7161 annotated with location information. To centralize the
7162 responsibility, we remember the last statement that had been
7163 added to both queues before gimplifying *EXPR_P. If
7164 gimplification produces new statements in PRE_P and POST_P, those
7165 statements will be annotated with the same location information
7166 as *EXPR_P. */
7167 pre_last_gsi = gsi_last (*pre_p);
7168 post_last_gsi = gsi_last (*post_p);
7170 saved_location = input_location;
7171 if (save_expr != error_mark_node
7172 && EXPR_HAS_LOCATION (*expr_p))
7173 input_location = EXPR_LOCATION (*expr_p);
7175 /* Loop over the specific gimplifiers until the toplevel node
7176 remains the same. */
7179 /* Strip away as many useless type conversions as possible
7180 at the toplevel. */
7181 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
7183 /* Remember the expr. */
7184 save_expr = *expr_p;
7186 /* Die, die, die, my darling. */
7187 if (save_expr == error_mark_node
7188 || (TREE_TYPE (save_expr)
7189 && TREE_TYPE (save_expr) == error_mark_node))
7191 ret = GS_ERROR;
7192 break;
7195 /* Do any language-specific gimplification. */
7196 ret = ((enum gimplify_status)
7197 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
7198 if (ret == GS_OK)
7200 if (*expr_p == NULL_TREE)
7201 break;
7202 if (*expr_p != save_expr)
7203 continue;
7205 else if (ret != GS_UNHANDLED)
7206 break;
7208 /* Make sure that all the cases set 'ret' appropriately. */
7209 ret = GS_UNHANDLED;
7210 switch (TREE_CODE (*expr_p))
7212 /* First deal with the special cases. */
7214 case POSTINCREMENT_EXPR:
7215 case POSTDECREMENT_EXPR:
7216 case PREINCREMENT_EXPR:
7217 case PREDECREMENT_EXPR:
7218 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
7219 fallback != fb_none,
7220 TREE_TYPE (*expr_p));
7221 break;
7223 case ARRAY_REF:
7224 case ARRAY_RANGE_REF:
7225 case REALPART_EXPR:
7226 case IMAGPART_EXPR:
7227 case COMPONENT_REF:
7228 case VIEW_CONVERT_EXPR:
7229 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
7230 fallback ? fallback : fb_rvalue);
7231 break;
7233 case COND_EXPR:
7234 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
7236 /* C99 code may assign to an array in a structure value of a
7237 conditional expression, and this has undefined behavior
7238 only on execution, so create a temporary if an lvalue is
7239 required. */
7240 if (fallback == fb_lvalue)
7242 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7243 mark_addressable (*expr_p);
7244 ret = GS_OK;
7246 break;
7248 case CILK_SPAWN_STMT:
7249 gcc_assert
7250 (fn_contains_cilk_spawn_p (cfun)
7251 && lang_hooks.cilkplus.cilk_detect_spawn_and_unwrap (expr_p));
7252 if (!seen_error ())
7254 ret = (enum gimplify_status)
7255 lang_hooks.cilkplus.gimplify_cilk_spawn (expr_p, pre_p,
7256 post_p);
7257 break;
7259 /* If errors are seen, then just process it as a CALL_EXPR. */
7261 case CALL_EXPR:
7262 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
7264 /* C99 code may assign to an array in a structure returned
7265 from a function, and this has undefined behavior only on
7266 execution, so create a temporary if an lvalue is
7267 required. */
7268 if (fallback == fb_lvalue)
7270 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7271 mark_addressable (*expr_p);
7272 ret = GS_OK;
7274 break;
7276 case TREE_LIST:
7277 gcc_unreachable ();
7279 case COMPOUND_EXPR:
7280 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
7281 break;
7283 case COMPOUND_LITERAL_EXPR:
7284 ret = gimplify_compound_literal_expr (expr_p, pre_p,
7285 gimple_test_f, fallback);
7286 break;
7288 case MODIFY_EXPR:
7289 case INIT_EXPR:
7290 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
7291 fallback != fb_none);
7292 break;
7294 case TRUTH_ANDIF_EXPR:
7295 case TRUTH_ORIF_EXPR:
7297 /* Preserve the original type of the expression and the
7298 source location of the outer expression. */
7299 tree org_type = TREE_TYPE (*expr_p);
7300 *expr_p = gimple_boolify (*expr_p);
7301 *expr_p = build3_loc (input_location, COND_EXPR,
7302 org_type, *expr_p,
7303 fold_convert_loc
7304 (input_location,
7305 org_type, boolean_true_node),
7306 fold_convert_loc
7307 (input_location,
7308 org_type, boolean_false_node));
7309 ret = GS_OK;
7310 break;
7313 case TRUTH_NOT_EXPR:
7315 tree type = TREE_TYPE (*expr_p);
7316 /* The parsers are careful to generate TRUTH_NOT_EXPR
7317 only with operands that are always zero or one.
7318 We do not fold here but handle the only interesting case
7319 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
7320 *expr_p = gimple_boolify (*expr_p);
7321 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
7322 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
7323 TREE_TYPE (*expr_p),
7324 TREE_OPERAND (*expr_p, 0));
7325 else
7326 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
7327 TREE_TYPE (*expr_p),
7328 TREE_OPERAND (*expr_p, 0),
7329 build_int_cst (TREE_TYPE (*expr_p), 1));
7330 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
7331 *expr_p = fold_convert_loc (input_location, type, *expr_p);
7332 ret = GS_OK;
7333 break;
7336 case ADDR_EXPR:
7337 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
7338 break;
7340 case ANNOTATE_EXPR:
7342 tree cond = TREE_OPERAND (*expr_p, 0);
7343 tree id = TREE_OPERAND (*expr_p, 1);
7344 tree tmp = create_tmp_var_raw (TREE_TYPE(cond), NULL);
7345 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
7346 gimple call = gimple_build_call_internal (IFN_ANNOTATE, 2,
7347 cond, id);
7348 gimple_call_set_lhs (call, tmp);
7349 gimplify_seq_add_stmt (pre_p, call);
7350 *expr_p = tmp;
7351 ret = GS_ALL_DONE;
7352 break;
7355 case VA_ARG_EXPR:
7356 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
7357 break;
7359 CASE_CONVERT:
7360 if (IS_EMPTY_STMT (*expr_p))
7362 ret = GS_ALL_DONE;
7363 break;
7366 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7367 || fallback == fb_none)
7369 /* Just strip a conversion to void (or in void context) and
7370 try again. */
7371 *expr_p = TREE_OPERAND (*expr_p, 0);
7372 ret = GS_OK;
7373 break;
7376 ret = gimplify_conversion (expr_p);
7377 if (ret == GS_ERROR)
7378 break;
7379 if (*expr_p != save_expr)
7380 break;
7381 /* FALLTHRU */
7383 case FIX_TRUNC_EXPR:
7384 /* unary_expr: ... | '(' cast ')' val | ... */
7385 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7386 is_gimple_val, fb_rvalue);
7387 recalculate_side_effects (*expr_p);
7388 break;
7390 case INDIRECT_REF:
7392 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
7393 bool notrap = TREE_THIS_NOTRAP (*expr_p);
7394 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7396 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7397 if (*expr_p != save_expr)
7399 ret = GS_OK;
7400 break;
7403 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7404 is_gimple_reg, fb_rvalue);
7405 if (ret == GS_ERROR)
7406 break;
7408 recalculate_side_effects (*expr_p);
7409 *expr_p = fold_build2_loc (input_location, MEM_REF,
7410 TREE_TYPE (*expr_p),
7411 TREE_OPERAND (*expr_p, 0),
7412 build_int_cst (saved_ptr_type, 0));
7413 TREE_THIS_VOLATILE (*expr_p) = volatilep;
7414 TREE_THIS_NOTRAP (*expr_p) = notrap;
7415 ret = GS_OK;
7416 break;
7419 /* We arrive here through the various re-gimplifcation paths. */
7420 case MEM_REF:
7421 /* First try re-folding the whole thing. */
7422 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7423 TREE_OPERAND (*expr_p, 0),
7424 TREE_OPERAND (*expr_p, 1));
7425 if (tmp)
7427 *expr_p = tmp;
7428 recalculate_side_effects (*expr_p);
7429 ret = GS_OK;
7430 break;
7432 /* Avoid re-gimplifying the address operand if it is already
7433 in suitable form. Re-gimplifying would mark the address
7434 operand addressable. Always gimplify when not in SSA form
7435 as we still may have to gimplify decls with value-exprs. */
7436 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7437 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7439 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7440 is_gimple_mem_ref_addr, fb_rvalue);
7441 if (ret == GS_ERROR)
7442 break;
7444 recalculate_side_effects (*expr_p);
7445 ret = GS_ALL_DONE;
7446 break;
7448 /* Constants need not be gimplified. */
7449 case INTEGER_CST:
7450 case REAL_CST:
7451 case FIXED_CST:
7452 case STRING_CST:
7453 case COMPLEX_CST:
7454 case VECTOR_CST:
7455 /* Drop the overflow flag on constants, we do not want
7456 that in the GIMPLE IL. */
7457 if (TREE_OVERFLOW_P (*expr_p))
7458 *expr_p = drop_tree_overflow (*expr_p);
7459 ret = GS_ALL_DONE;
7460 break;
7462 case CONST_DECL:
7463 /* If we require an lvalue, such as for ADDR_EXPR, retain the
7464 CONST_DECL node. Otherwise the decl is replaceable by its
7465 value. */
7466 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
7467 if (fallback & fb_lvalue)
7468 ret = GS_ALL_DONE;
7469 else
7471 *expr_p = DECL_INITIAL (*expr_p);
7472 ret = GS_OK;
7474 break;
7476 case DECL_EXPR:
7477 ret = gimplify_decl_expr (expr_p, pre_p);
7478 break;
7480 case BIND_EXPR:
7481 ret = gimplify_bind_expr (expr_p, pre_p);
7482 break;
7484 case LOOP_EXPR:
7485 ret = gimplify_loop_expr (expr_p, pre_p);
7486 break;
7488 case SWITCH_EXPR:
7489 ret = gimplify_switch_expr (expr_p, pre_p);
7490 break;
7492 case EXIT_EXPR:
7493 ret = gimplify_exit_expr (expr_p);
7494 break;
7496 case GOTO_EXPR:
7497 /* If the target is not LABEL, then it is a computed jump
7498 and the target needs to be gimplified. */
7499 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
7501 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7502 NULL, is_gimple_val, fb_rvalue);
7503 if (ret == GS_ERROR)
7504 break;
7506 gimplify_seq_add_stmt (pre_p,
7507 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
7508 ret = GS_ALL_DONE;
7509 break;
7511 case PREDICT_EXPR:
7512 gimplify_seq_add_stmt (pre_p,
7513 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7514 PREDICT_EXPR_OUTCOME (*expr_p)));
7515 ret = GS_ALL_DONE;
7516 break;
7518 case LABEL_EXPR:
7519 ret = GS_ALL_DONE;
7520 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7521 == current_function_decl);
7522 gimplify_seq_add_stmt (pre_p,
7523 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
7524 break;
7526 case CASE_LABEL_EXPR:
7527 ret = gimplify_case_label_expr (expr_p, pre_p);
7528 break;
7530 case RETURN_EXPR:
7531 ret = gimplify_return_expr (*expr_p, pre_p);
7532 break;
7534 case CONSTRUCTOR:
7535 /* Don't reduce this in place; let gimplify_init_constructor work its
7536 magic. Buf if we're just elaborating this for side effects, just
7537 gimplify any element that has side-effects. */
7538 if (fallback == fb_none)
7540 unsigned HOST_WIDE_INT ix;
7541 tree val;
7542 tree temp = NULL_TREE;
7543 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7544 if (TREE_SIDE_EFFECTS (val))
7545 append_to_statement_list (val, &temp);
7547 *expr_p = temp;
7548 ret = temp ? GS_OK : GS_ALL_DONE;
7550 /* C99 code may assign to an array in a constructed
7551 structure or union, and this has undefined behavior only
7552 on execution, so create a temporary if an lvalue is
7553 required. */
7554 else if (fallback == fb_lvalue)
7556 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7557 mark_addressable (*expr_p);
7558 ret = GS_OK;
7560 else
7561 ret = GS_ALL_DONE;
7562 break;
7564 /* The following are special cases that are not handled by the
7565 original GIMPLE grammar. */
7567 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7568 eliminated. */
7569 case SAVE_EXPR:
7570 ret = gimplify_save_expr (expr_p, pre_p, post_p);
7571 break;
7573 case BIT_FIELD_REF:
7574 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7575 post_p, is_gimple_lvalue, fb_either);
7576 recalculate_side_effects (*expr_p);
7577 break;
7579 case TARGET_MEM_REF:
7581 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7583 if (TMR_BASE (*expr_p))
7584 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
7585 post_p, is_gimple_mem_ref_addr, fb_either);
7586 if (TMR_INDEX (*expr_p))
7587 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7588 post_p, is_gimple_val, fb_rvalue);
7589 if (TMR_INDEX2 (*expr_p))
7590 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7591 post_p, is_gimple_val, fb_rvalue);
7592 /* TMR_STEP and TMR_OFFSET are always integer constants. */
7593 ret = MIN (r0, r1);
7595 break;
7597 case NON_LVALUE_EXPR:
7598 /* This should have been stripped above. */
7599 gcc_unreachable ();
7601 case ASM_EXPR:
7602 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
7603 break;
7605 case TRY_FINALLY_EXPR:
7606 case TRY_CATCH_EXPR:
7608 gimple_seq eval, cleanup;
7609 gimple try_;
7611 /* Calls to destructors are generated automatically in FINALLY/CATCH
7612 block. They should have location as UNKNOWN_LOCATION. However,
7613 gimplify_call_expr will reset these call stmts to input_location
7614 if it finds stmt's location is unknown. To prevent resetting for
7615 destructors, we set the input_location to unknown.
7616 Note that this only affects the destructor calls in FINALLY/CATCH
7617 block, and will automatically reset to its original value by the
7618 end of gimplify_expr. */
7619 input_location = UNKNOWN_LOCATION;
7620 eval = cleanup = NULL;
7621 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
7622 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
7623 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
7624 if (gimple_seq_empty_p (cleanup))
7626 gimple_seq_add_seq (pre_p, eval);
7627 ret = GS_ALL_DONE;
7628 break;
7630 try_ = gimple_build_try (eval, cleanup,
7631 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
7632 ? GIMPLE_TRY_FINALLY
7633 : GIMPLE_TRY_CATCH);
7634 if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
7635 gimple_set_location (try_, saved_location);
7636 else
7637 gimple_set_location (try_, EXPR_LOCATION (save_expr));
7638 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
7639 gimple_try_set_catch_is_cleanup (try_,
7640 TRY_CATCH_IS_CLEANUP (*expr_p));
7641 gimplify_seq_add_stmt (pre_p, try_);
7642 ret = GS_ALL_DONE;
7643 break;
7646 case CLEANUP_POINT_EXPR:
7647 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
7648 break;
7650 case TARGET_EXPR:
7651 ret = gimplify_target_expr (expr_p, pre_p, post_p);
7652 break;
7654 case CATCH_EXPR:
7656 gimple c;
7657 gimple_seq handler = NULL;
7658 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
7659 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
7660 gimplify_seq_add_stmt (pre_p, c);
7661 ret = GS_ALL_DONE;
7662 break;
7665 case EH_FILTER_EXPR:
7667 gimple ehf;
7668 gimple_seq failure = NULL;
7670 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
7671 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
7672 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
7673 gimplify_seq_add_stmt (pre_p, ehf);
7674 ret = GS_ALL_DONE;
7675 break;
7678 case OBJ_TYPE_REF:
7680 enum gimplify_status r0, r1;
7681 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
7682 post_p, is_gimple_val, fb_rvalue);
7683 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
7684 post_p, is_gimple_val, fb_rvalue);
7685 TREE_SIDE_EFFECTS (*expr_p) = 0;
7686 ret = MIN (r0, r1);
7688 break;
7690 case LABEL_DECL:
7691 /* We get here when taking the address of a label. We mark
7692 the label as "forced"; meaning it can never be removed and
7693 it is a potential target for any computed goto. */
7694 FORCED_LABEL (*expr_p) = 1;
7695 ret = GS_ALL_DONE;
7696 break;
7698 case STATEMENT_LIST:
7699 ret = gimplify_statement_list (expr_p, pre_p);
7700 break;
7702 case WITH_SIZE_EXPR:
7704 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7705 post_p == &internal_post ? NULL : post_p,
7706 gimple_test_f, fallback);
7707 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7708 is_gimple_val, fb_rvalue);
7709 ret = GS_ALL_DONE;
7711 break;
7713 case VAR_DECL:
7714 case PARM_DECL:
7715 ret = gimplify_var_or_parm_decl (expr_p);
7716 break;
7718 case RESULT_DECL:
7719 /* When within an OpenMP context, notice uses of variables. */
7720 if (gimplify_omp_ctxp)
7721 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
7722 ret = GS_ALL_DONE;
7723 break;
7725 case SSA_NAME:
7726 /* Allow callbacks into the gimplifier during optimization. */
7727 ret = GS_ALL_DONE;
7728 break;
7730 case OMP_PARALLEL:
7731 gimplify_omp_parallel (expr_p, pre_p);
7732 ret = GS_ALL_DONE;
7733 break;
7735 case OMP_TASK:
7736 gimplify_omp_task (expr_p, pre_p);
7737 ret = GS_ALL_DONE;
7738 break;
7740 case OMP_FOR:
7741 case OMP_SIMD:
7742 case OMP_DISTRIBUTE:
7743 ret = gimplify_omp_for (expr_p, pre_p);
7744 break;
7746 case OMP_SECTIONS:
7747 case OMP_SINGLE:
7748 case OMP_TARGET:
7749 case OMP_TARGET_DATA:
7750 case OMP_TEAMS:
7751 gimplify_omp_workshare (expr_p, pre_p);
7752 ret = GS_ALL_DONE;
7753 break;
7755 case OMP_TARGET_UPDATE:
7756 gimplify_omp_target_update (expr_p, pre_p);
7757 ret = GS_ALL_DONE;
7758 break;
7760 case OMP_SECTION:
7761 case OMP_MASTER:
7762 case OMP_TASKGROUP:
7763 case OMP_ORDERED:
7764 case OMP_CRITICAL:
7766 gimple_seq body = NULL;
7767 gimple g;
7769 gimplify_and_add (OMP_BODY (*expr_p), &body);
7770 switch (TREE_CODE (*expr_p))
7772 case OMP_SECTION:
7773 g = gimple_build_omp_section (body);
7774 break;
7775 case OMP_MASTER:
7776 g = gimple_build_omp_master (body);
7777 break;
7778 case OMP_TASKGROUP:
7780 gimple_seq cleanup = NULL;
7781 tree fn
7782 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
7783 g = gimple_build_call (fn, 0);
7784 gimple_seq_add_stmt (&cleanup, g);
7785 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
7786 body = NULL;
7787 gimple_seq_add_stmt (&body, g);
7788 g = gimple_build_omp_taskgroup (body);
7790 break;
7791 case OMP_ORDERED:
7792 g = gimple_build_omp_ordered (body);
7793 break;
7794 case OMP_CRITICAL:
7795 g = gimple_build_omp_critical (body,
7796 OMP_CRITICAL_NAME (*expr_p));
7797 break;
7798 default:
7799 gcc_unreachable ();
7801 gimplify_seq_add_stmt (pre_p, g);
7802 ret = GS_ALL_DONE;
7803 break;
7806 case OMP_ATOMIC:
7807 case OMP_ATOMIC_READ:
7808 case OMP_ATOMIC_CAPTURE_OLD:
7809 case OMP_ATOMIC_CAPTURE_NEW:
7810 ret = gimplify_omp_atomic (expr_p, pre_p);
7811 break;
7813 case TRANSACTION_EXPR:
7814 ret = gimplify_transaction (expr_p, pre_p);
7815 break;
7817 case TRUTH_AND_EXPR:
7818 case TRUTH_OR_EXPR:
7819 case TRUTH_XOR_EXPR:
7821 tree orig_type = TREE_TYPE (*expr_p);
7822 tree new_type, xop0, xop1;
7823 *expr_p = gimple_boolify (*expr_p);
7824 new_type = TREE_TYPE (*expr_p);
7825 if (!useless_type_conversion_p (orig_type, new_type))
7827 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
7828 ret = GS_OK;
7829 break;
7832 /* Boolified binary truth expressions are semantically equivalent
7833 to bitwise binary expressions. Canonicalize them to the
7834 bitwise variant. */
7835 switch (TREE_CODE (*expr_p))
7837 case TRUTH_AND_EXPR:
7838 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
7839 break;
7840 case TRUTH_OR_EXPR:
7841 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
7842 break;
7843 case TRUTH_XOR_EXPR:
7844 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
7845 break;
7846 default:
7847 break;
7849 /* Now make sure that operands have compatible type to
7850 expression's new_type. */
7851 xop0 = TREE_OPERAND (*expr_p, 0);
7852 xop1 = TREE_OPERAND (*expr_p, 1);
7853 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
7854 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
7855 new_type,
7856 xop0);
7857 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
7858 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
7859 new_type,
7860 xop1);
7861 /* Continue classified as tcc_binary. */
7862 goto expr_2;
7865 case FMA_EXPR:
7866 case VEC_COND_EXPR:
7867 case VEC_PERM_EXPR:
7868 /* Classified as tcc_expression. */
7869 goto expr_3;
7871 case POINTER_PLUS_EXPR:
7873 enum gimplify_status r0, r1;
7874 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7875 post_p, is_gimple_val, fb_rvalue);
7876 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7877 post_p, is_gimple_val, fb_rvalue);
7878 recalculate_side_effects (*expr_p);
7879 ret = MIN (r0, r1);
7880 /* Convert &X + CST to invariant &MEM[&X, CST]. Do this
7881 after gimplifying operands - this is similar to how
7882 it would be folding all gimplified stmts on creation
7883 to have them canonicalized, which is what we eventually
7884 should do anyway. */
7885 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7886 && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
7888 *expr_p = build_fold_addr_expr_with_type_loc
7889 (input_location,
7890 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
7891 TREE_OPERAND (*expr_p, 0),
7892 fold_convert (ptr_type_node,
7893 TREE_OPERAND (*expr_p, 1))),
7894 TREE_TYPE (*expr_p));
7895 ret = MIN (ret, GS_OK);
7897 break;
7900 case CILK_SYNC_STMT:
7902 if (!fn_contains_cilk_spawn_p (cfun))
7904 error_at (EXPR_LOCATION (*expr_p),
7905 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
7906 ret = GS_ERROR;
7908 else
7910 gimplify_cilk_sync (expr_p, pre_p);
7911 ret = GS_ALL_DONE;
7913 break;
7916 default:
7917 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
7919 case tcc_comparison:
7920 /* Handle comparison of objects of non scalar mode aggregates
7921 with a call to memcmp. It would be nice to only have to do
7922 this for variable-sized objects, but then we'd have to allow
7923 the same nest of reference nodes we allow for MODIFY_EXPR and
7924 that's too complex.
7926 Compare scalar mode aggregates as scalar mode values. Using
7927 memcmp for them would be very inefficient at best, and is
7928 plain wrong if bitfields are involved. */
7930 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
7932 /* Vector comparisons need no boolification. */
7933 if (TREE_CODE (type) == VECTOR_TYPE)
7934 goto expr_2;
7935 else if (!AGGREGATE_TYPE_P (type))
7937 tree org_type = TREE_TYPE (*expr_p);
7938 *expr_p = gimple_boolify (*expr_p);
7939 if (!useless_type_conversion_p (org_type,
7940 TREE_TYPE (*expr_p)))
7942 *expr_p = fold_convert_loc (input_location,
7943 org_type, *expr_p);
7944 ret = GS_OK;
7946 else
7947 goto expr_2;
7949 else if (TYPE_MODE (type) != BLKmode)
7950 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
7951 else
7952 ret = gimplify_variable_sized_compare (expr_p);
7954 break;
7957 /* If *EXPR_P does not need to be special-cased, handle it
7958 according to its class. */
7959 case tcc_unary:
7960 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7961 post_p, is_gimple_val, fb_rvalue);
7962 break;
7964 case tcc_binary:
7965 expr_2:
7967 enum gimplify_status r0, r1;
7969 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7970 post_p, is_gimple_val, fb_rvalue);
7971 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7972 post_p, is_gimple_val, fb_rvalue);
7974 ret = MIN (r0, r1);
7975 break;
7978 expr_3:
7980 enum gimplify_status r0, r1, r2;
7982 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7983 post_p, is_gimple_val, fb_rvalue);
7984 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7985 post_p, is_gimple_val, fb_rvalue);
7986 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
7987 post_p, is_gimple_val, fb_rvalue);
7989 ret = MIN (MIN (r0, r1), r2);
7990 break;
7993 case tcc_declaration:
7994 case tcc_constant:
7995 ret = GS_ALL_DONE;
7996 goto dont_recalculate;
7998 default:
7999 gcc_unreachable ();
8002 recalculate_side_effects (*expr_p);
8004 dont_recalculate:
8005 break;
8008 gcc_assert (*expr_p || ret != GS_OK);
8010 while (ret == GS_OK);
8012 /* If we encountered an error_mark somewhere nested inside, either
8013 stub out the statement or propagate the error back out. */
8014 if (ret == GS_ERROR)
8016 if (is_statement)
8017 *expr_p = NULL;
8018 goto out;
8021 /* This was only valid as a return value from the langhook, which
8022 we handled. Make sure it doesn't escape from any other context. */
8023 gcc_assert (ret != GS_UNHANDLED);
8025 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
8027 /* We aren't looking for a value, and we don't have a valid
8028 statement. If it doesn't have side-effects, throw it away. */
8029 if (!TREE_SIDE_EFFECTS (*expr_p))
8030 *expr_p = NULL;
8031 else if (!TREE_THIS_VOLATILE (*expr_p))
8033 /* This is probably a _REF that contains something nested that
8034 has side effects. Recurse through the operands to find it. */
8035 enum tree_code code = TREE_CODE (*expr_p);
8037 switch (code)
8039 case COMPONENT_REF:
8040 case REALPART_EXPR:
8041 case IMAGPART_EXPR:
8042 case VIEW_CONVERT_EXPR:
8043 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8044 gimple_test_f, fallback);
8045 break;
8047 case ARRAY_REF:
8048 case ARRAY_RANGE_REF:
8049 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8050 gimple_test_f, fallback);
8051 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8052 gimple_test_f, fallback);
8053 break;
8055 default:
8056 /* Anything else with side-effects must be converted to
8057 a valid statement before we get here. */
8058 gcc_unreachable ();
8061 *expr_p = NULL;
8063 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
8064 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
8066 /* Historically, the compiler has treated a bare reference
8067 to a non-BLKmode volatile lvalue as forcing a load. */
8068 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
8070 /* Normally, we do not want to create a temporary for a
8071 TREE_ADDRESSABLE type because such a type should not be
8072 copied by bitwise-assignment. However, we make an
8073 exception here, as all we are doing here is ensuring that
8074 we read the bytes that make up the type. We use
8075 create_tmp_var_raw because create_tmp_var will abort when
8076 given a TREE_ADDRESSABLE type. */
8077 tree tmp = create_tmp_var_raw (type, "vol");
8078 gimple_add_tmp_var (tmp);
8079 gimplify_assign (tmp, *expr_p, pre_p);
8080 *expr_p = NULL;
8082 else
8083 /* We can't do anything useful with a volatile reference to
8084 an incomplete type, so just throw it away. Likewise for
8085 a BLKmode type, since any implicit inner load should
8086 already have been turned into an explicit one by the
8087 gimplification process. */
8088 *expr_p = NULL;
8091 /* If we are gimplifying at the statement level, we're done. Tack
8092 everything together and return. */
8093 if (fallback == fb_none || is_statement)
8095 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
8096 it out for GC to reclaim it. */
8097 *expr_p = NULL_TREE;
8099 if (!gimple_seq_empty_p (internal_pre)
8100 || !gimple_seq_empty_p (internal_post))
8102 gimplify_seq_add_seq (&internal_pre, internal_post);
8103 gimplify_seq_add_seq (pre_p, internal_pre);
8106 /* The result of gimplifying *EXPR_P is going to be the last few
8107 statements in *PRE_P and *POST_P. Add location information
8108 to all the statements that were added by the gimplification
8109 helpers. */
8110 if (!gimple_seq_empty_p (*pre_p))
8111 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
8113 if (!gimple_seq_empty_p (*post_p))
8114 annotate_all_with_location_after (*post_p, post_last_gsi,
8115 input_location);
8117 goto out;
8120 #ifdef ENABLE_GIMPLE_CHECKING
8121 if (*expr_p)
8123 enum tree_code code = TREE_CODE (*expr_p);
8124 /* These expressions should already be in gimple IR form. */
8125 gcc_assert (code != MODIFY_EXPR
8126 && code != ASM_EXPR
8127 && code != BIND_EXPR
8128 && code != CATCH_EXPR
8129 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
8130 && code != EH_FILTER_EXPR
8131 && code != GOTO_EXPR
8132 && code != LABEL_EXPR
8133 && code != LOOP_EXPR
8134 && code != SWITCH_EXPR
8135 && code != TRY_FINALLY_EXPR
8136 && code != OMP_CRITICAL
8137 && code != OMP_FOR
8138 && code != OMP_MASTER
8139 && code != OMP_TASKGROUP
8140 && code != OMP_ORDERED
8141 && code != OMP_PARALLEL
8142 && code != OMP_SECTIONS
8143 && code != OMP_SECTION
8144 && code != OMP_SINGLE);
8146 #endif
8148 /* Otherwise we're gimplifying a subexpression, so the resulting
8149 value is interesting. If it's a valid operand that matches
8150 GIMPLE_TEST_F, we're done. Unless we are handling some
8151 post-effects internally; if that's the case, we need to copy into
8152 a temporary before adding the post-effects to POST_P. */
8153 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
8154 goto out;
8156 /* Otherwise, we need to create a new temporary for the gimplified
8157 expression. */
8159 /* We can't return an lvalue if we have an internal postqueue. The
8160 object the lvalue refers to would (probably) be modified by the
8161 postqueue; we need to copy the value out first, which means an
8162 rvalue. */
8163 if ((fallback & fb_lvalue)
8164 && gimple_seq_empty_p (internal_post)
8165 && is_gimple_addressable (*expr_p))
8167 /* An lvalue will do. Take the address of the expression, store it
8168 in a temporary, and replace the expression with an INDIRECT_REF of
8169 that temporary. */
8170 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
8171 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
8172 *expr_p = build_simple_mem_ref (tmp);
8174 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
8176 /* An rvalue will do. Assign the gimplified expression into a
8177 new temporary TMP and replace the original expression with
8178 TMP. First, make sure that the expression has a type so that
8179 it can be assigned into a temporary. */
8180 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
8181 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
8183 else
8185 #ifdef ENABLE_GIMPLE_CHECKING
8186 if (!(fallback & fb_mayfail))
8188 fprintf (stderr, "gimplification failed:\n");
8189 print_generic_expr (stderr, *expr_p, 0);
8190 debug_tree (*expr_p);
8191 internal_error ("gimplification failed");
8193 #endif
8194 gcc_assert (fallback & fb_mayfail);
8196 /* If this is an asm statement, and the user asked for the
8197 impossible, don't die. Fail and let gimplify_asm_expr
8198 issue an error. */
8199 ret = GS_ERROR;
8200 goto out;
8203 /* Make sure the temporary matches our predicate. */
8204 gcc_assert ((*gimple_test_f) (*expr_p));
8206 if (!gimple_seq_empty_p (internal_post))
8208 annotate_all_with_location (internal_post, input_location);
8209 gimplify_seq_add_seq (pre_p, internal_post);
8212 out:
8213 input_location = saved_location;
8214 return ret;
8217 /* Look through TYPE for variable-sized objects and gimplify each such
8218 size that we find. Add to LIST_P any statements generated. */
8220 void
8221 gimplify_type_sizes (tree type, gimple_seq *list_p)
8223 tree field, t;
8225 if (type == NULL || type == error_mark_node)
8226 return;
8228 /* We first do the main variant, then copy into any other variants. */
8229 type = TYPE_MAIN_VARIANT (type);
8231 /* Avoid infinite recursion. */
8232 if (TYPE_SIZES_GIMPLIFIED (type))
8233 return;
8235 TYPE_SIZES_GIMPLIFIED (type) = 1;
8237 switch (TREE_CODE (type))
8239 case INTEGER_TYPE:
8240 case ENUMERAL_TYPE:
8241 case BOOLEAN_TYPE:
8242 case REAL_TYPE:
8243 case FIXED_POINT_TYPE:
8244 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
8245 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
8247 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8249 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
8250 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
8252 break;
8254 case ARRAY_TYPE:
8255 /* These types may not have declarations, so handle them here. */
8256 gimplify_type_sizes (TREE_TYPE (type), list_p);
8257 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
8258 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
8259 with assigned stack slots, for -O1+ -g they should be tracked
8260 by VTA. */
8261 if (!(TYPE_NAME (type)
8262 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
8263 && DECL_IGNORED_P (TYPE_NAME (type)))
8264 && TYPE_DOMAIN (type)
8265 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
8267 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8268 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8269 DECL_IGNORED_P (t) = 0;
8270 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8271 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8272 DECL_IGNORED_P (t) = 0;
8274 break;
8276 case RECORD_TYPE:
8277 case UNION_TYPE:
8278 case QUAL_UNION_TYPE:
8279 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8280 if (TREE_CODE (field) == FIELD_DECL)
8282 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
8283 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
8284 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8285 gimplify_type_sizes (TREE_TYPE (field), list_p);
8287 break;
8289 case POINTER_TYPE:
8290 case REFERENCE_TYPE:
8291 /* We used to recurse on the pointed-to type here, which turned out to
8292 be incorrect because its definition might refer to variables not
8293 yet initialized at this point if a forward declaration is involved.
8295 It was actually useful for anonymous pointed-to types to ensure
8296 that the sizes evaluation dominates every possible later use of the
8297 values. Restricting to such types here would be safe since there
8298 is no possible forward declaration around, but would introduce an
8299 undesirable middle-end semantic to anonymity. We then defer to
8300 front-ends the responsibility of ensuring that the sizes are
8301 evaluated both early and late enough, e.g. by attaching artificial
8302 type declarations to the tree. */
8303 break;
8305 default:
8306 break;
8309 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
8310 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
8312 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8314 TYPE_SIZE (t) = TYPE_SIZE (type);
8315 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
8316 TYPE_SIZES_GIMPLIFIED (t) = 1;
8320 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
8321 a size or position, has had all of its SAVE_EXPRs evaluated.
8322 We add any required statements to *STMT_P. */
8324 void
8325 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
8327 tree expr = *expr_p;
8329 /* We don't do anything if the value isn't there, is constant, or contains
8330 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
8331 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
8332 will want to replace it with a new variable, but that will cause problems
8333 if this type is from outside the function. It's OK to have that here. */
8334 if (is_gimple_sizepos (expr))
8335 return;
8337 *expr_p = unshare_expr (expr);
8339 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
8342 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
8343 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
8344 is true, also gimplify the parameters. */
8346 gimple
8347 gimplify_body (tree fndecl, bool do_parms)
8349 location_t saved_location = input_location;
8350 gimple_seq parm_stmts, seq;
8351 gimple outer_bind;
8352 struct gimplify_ctx gctx;
8353 struct cgraph_node *cgn;
8355 timevar_push (TV_TREE_GIMPLIFY);
8357 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
8358 gimplification. */
8359 default_rtl_profile ();
8361 gcc_assert (gimplify_ctxp == NULL);
8362 push_gimplify_context (&gctx);
8364 if (flag_openmp)
8366 gcc_assert (gimplify_omp_ctxp == NULL);
8367 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
8368 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
8371 /* Unshare most shared trees in the body and in that of any nested functions.
8372 It would seem we don't have to do this for nested functions because
8373 they are supposed to be output and then the outer function gimplified
8374 first, but the g++ front end doesn't always do it that way. */
8375 unshare_body (fndecl);
8376 unvisit_body (fndecl);
8378 cgn = cgraph_get_node (fndecl);
8379 if (cgn && cgn->origin)
8380 nonlocal_vlas = pointer_set_create ();
8382 /* Make sure input_location isn't set to something weird. */
8383 input_location = DECL_SOURCE_LOCATION (fndecl);
8385 /* Resolve callee-copies. This has to be done before processing
8386 the body so that DECL_VALUE_EXPR gets processed correctly. */
8387 parm_stmts = do_parms ? gimplify_parameters () : NULL;
8389 /* Gimplify the function's body. */
8390 seq = NULL;
8391 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
8392 outer_bind = gimple_seq_first_stmt (seq);
8393 if (!outer_bind)
8395 outer_bind = gimple_build_nop ();
8396 gimplify_seq_add_stmt (&seq, outer_bind);
8399 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
8400 not the case, wrap everything in a GIMPLE_BIND to make it so. */
8401 if (gimple_code (outer_bind) == GIMPLE_BIND
8402 && gimple_seq_first (seq) == gimple_seq_last (seq))
8404 else
8405 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8407 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8409 /* If we had callee-copies statements, insert them at the beginning
8410 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
8411 if (!gimple_seq_empty_p (parm_stmts))
8413 tree parm;
8415 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8416 gimple_bind_set_body (outer_bind, parm_stmts);
8418 for (parm = DECL_ARGUMENTS (current_function_decl);
8419 parm; parm = DECL_CHAIN (parm))
8420 if (DECL_HAS_VALUE_EXPR_P (parm))
8422 DECL_HAS_VALUE_EXPR_P (parm) = 0;
8423 DECL_IGNORED_P (parm) = 0;
8427 if (nonlocal_vlas)
8429 pointer_set_destroy (nonlocal_vlas);
8430 nonlocal_vlas = NULL;
8433 if ((flag_openmp || flag_openmp_simd) && gimplify_omp_ctxp)
8435 delete_omp_context (gimplify_omp_ctxp);
8436 gimplify_omp_ctxp = NULL;
8439 pop_gimplify_context (outer_bind);
8440 gcc_assert (gimplify_ctxp == NULL);
8442 #ifdef ENABLE_CHECKING
8443 if (!seen_error ())
8444 verify_gimple_in_seq (gimple_bind_body (outer_bind));
8445 #endif
8447 timevar_pop (TV_TREE_GIMPLIFY);
8448 input_location = saved_location;
8450 return outer_bind;
8453 typedef char *char_p; /* For DEF_VEC_P. */
8455 /* Return whether we should exclude FNDECL from instrumentation. */
8457 static bool
8458 flag_instrument_functions_exclude_p (tree fndecl)
8460 vec<char_p> *v;
8462 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
8463 if (v && v->length () > 0)
8465 const char *name;
8466 int i;
8467 char *s;
8469 name = lang_hooks.decl_printable_name (fndecl, 0);
8470 FOR_EACH_VEC_ELT (*v, i, s)
8471 if (strstr (name, s) != NULL)
8472 return true;
8475 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
8476 if (v && v->length () > 0)
8478 const char *name;
8479 int i;
8480 char *s;
8482 name = DECL_SOURCE_FILE (fndecl);
8483 FOR_EACH_VEC_ELT (*v, i, s)
8484 if (strstr (name, s) != NULL)
8485 return true;
8488 return false;
8491 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
8492 node for the function we want to gimplify.
8494 Return the sequence of GIMPLE statements corresponding to the body
8495 of FNDECL. */
8497 void
8498 gimplify_function_tree (tree fndecl)
8500 tree parm, ret;
8501 gimple_seq seq;
8502 gimple bind;
8504 gcc_assert (!gimple_body (fndecl));
8506 if (DECL_STRUCT_FUNCTION (fndecl))
8507 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8508 else
8509 push_struct_function (fndecl);
8511 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
8513 /* Preliminarily mark non-addressed complex variables as eligible
8514 for promotion to gimple registers. We'll transform their uses
8515 as we find them. */
8516 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8517 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
8518 && !TREE_THIS_VOLATILE (parm)
8519 && !needs_to_live_in_memory (parm))
8520 DECL_GIMPLE_REG_P (parm) = 1;
8523 ret = DECL_RESULT (fndecl);
8524 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
8525 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
8526 && !needs_to_live_in_memory (ret))
8527 DECL_GIMPLE_REG_P (ret) = 1;
8529 bind = gimplify_body (fndecl, true);
8531 /* The tree body of the function is no longer needed, replace it
8532 with the new GIMPLE body. */
8533 seq = NULL;
8534 gimple_seq_add_stmt (&seq, bind);
8535 gimple_set_body (fndecl, seq);
8537 /* If we're instrumenting function entry/exit, then prepend the call to
8538 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8539 catch the exit hook. */
8540 /* ??? Add some way to ignore exceptions for this TFE. */
8541 if (flag_instrument_function_entry_exit
8542 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8543 && !flag_instrument_functions_exclude_p (fndecl))
8545 tree x;
8546 gimple new_bind;
8547 gimple tf;
8548 gimple_seq cleanup = NULL, body = NULL;
8549 tree tmp_var;
8550 gimple call;
8552 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8553 call = gimple_build_call (x, 1, integer_zero_node);
8554 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8555 gimple_call_set_lhs (call, tmp_var);
8556 gimplify_seq_add_stmt (&cleanup, call);
8557 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
8558 call = gimple_build_call (x, 2,
8559 build_fold_addr_expr (current_function_decl),
8560 tmp_var);
8561 gimplify_seq_add_stmt (&cleanup, call);
8562 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
8564 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8565 call = gimple_build_call (x, 1, integer_zero_node);
8566 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8567 gimple_call_set_lhs (call, tmp_var);
8568 gimplify_seq_add_stmt (&body, call);
8569 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
8570 call = gimple_build_call (x, 2,
8571 build_fold_addr_expr (current_function_decl),
8572 tmp_var);
8573 gimplify_seq_add_stmt (&body, call);
8574 gimplify_seq_add_stmt (&body, tf);
8575 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
8576 /* Clear the block for BIND, since it is no longer directly inside
8577 the function, but within a try block. */
8578 gimple_bind_set_block (bind, NULL);
8580 /* Replace the current function body with the body
8581 wrapped in the try/finally TF. */
8582 seq = NULL;
8583 gimple_seq_add_stmt (&seq, new_bind);
8584 gimple_set_body (fndecl, seq);
8587 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8588 cfun->curr_properties = PROP_gimple_any;
8590 pop_cfun ();
8593 /* Some transformations like inlining may invalidate the GIMPLE form
8594 for operands. This function traverses all the operands in STMT and
8595 gimplifies anything that is not a valid gimple operand. Any new
8596 GIMPLE statements are inserted before *GSI_P. */
8598 void
8599 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
8601 size_t i, num_ops;
8602 tree lhs;
8603 gimple_seq pre = NULL;
8604 gimple post_stmt = NULL;
8605 struct gimplify_ctx gctx;
8607 push_gimplify_context (&gctx);
8608 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
8610 switch (gimple_code (stmt))
8612 case GIMPLE_COND:
8613 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
8614 is_gimple_val, fb_rvalue);
8615 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
8616 is_gimple_val, fb_rvalue);
8617 break;
8618 case GIMPLE_SWITCH:
8619 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
8620 is_gimple_val, fb_rvalue);
8621 break;
8622 case GIMPLE_OMP_ATOMIC_LOAD:
8623 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
8624 is_gimple_val, fb_rvalue);
8625 break;
8626 case GIMPLE_ASM:
8628 size_t i, noutputs = gimple_asm_noutputs (stmt);
8629 const char *constraint, **oconstraints;
8630 bool allows_mem, allows_reg, is_inout;
8632 oconstraints
8633 = (const char **) alloca ((noutputs) * sizeof (const char *));
8634 for (i = 0; i < noutputs; i++)
8636 tree op = gimple_asm_output_op (stmt, i);
8637 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
8638 oconstraints[i] = constraint;
8639 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
8640 &allows_reg, &is_inout);
8641 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8642 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
8643 fb_lvalue | fb_mayfail);
8645 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
8647 tree op = gimple_asm_input_op (stmt, i);
8648 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
8649 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
8650 oconstraints, &allows_mem, &allows_reg);
8651 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
8652 allows_reg = 0;
8653 if (!allows_reg && allows_mem)
8654 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8655 is_gimple_lvalue, fb_lvalue | fb_mayfail);
8656 else
8657 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8658 is_gimple_asm_val, fb_rvalue);
8661 break;
8662 default:
8663 /* NOTE: We start gimplifying operands from last to first to
8664 make sure that side-effects on the RHS of calls, assignments
8665 and ASMs are executed before the LHS. The ordering is not
8666 important for other statements. */
8667 num_ops = gimple_num_ops (stmt);
8668 for (i = num_ops; i > 0; i--)
8670 tree op = gimple_op (stmt, i - 1);
8671 if (op == NULL_TREE)
8672 continue;
8673 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
8674 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
8675 else if (i == 2
8676 && is_gimple_assign (stmt)
8677 && num_ops == 2
8678 && get_gimple_rhs_class (gimple_expr_code (stmt))
8679 == GIMPLE_SINGLE_RHS)
8680 gimplify_expr (&op, &pre, NULL,
8681 rhs_predicate_for (gimple_assign_lhs (stmt)),
8682 fb_rvalue);
8683 else if (i == 2 && is_gimple_call (stmt))
8685 if (TREE_CODE (op) == FUNCTION_DECL)
8686 continue;
8687 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
8689 else
8690 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
8691 gimple_set_op (stmt, i - 1, op);
8694 lhs = gimple_get_lhs (stmt);
8695 /* If the LHS changed it in a way that requires a simple RHS,
8696 create temporary. */
8697 if (lhs && !is_gimple_reg (lhs))
8699 bool need_temp = false;
8701 if (is_gimple_assign (stmt)
8702 && num_ops == 2
8703 && get_gimple_rhs_class (gimple_expr_code (stmt))
8704 == GIMPLE_SINGLE_RHS)
8705 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
8706 rhs_predicate_for (gimple_assign_lhs (stmt)),
8707 fb_rvalue);
8708 else if (is_gimple_reg (lhs))
8710 if (is_gimple_reg_type (TREE_TYPE (lhs)))
8712 if (is_gimple_call (stmt))
8714 i = gimple_call_flags (stmt);
8715 if ((i & ECF_LOOPING_CONST_OR_PURE)
8716 || !(i & (ECF_CONST | ECF_PURE)))
8717 need_temp = true;
8719 if (stmt_can_throw_internal (stmt))
8720 need_temp = true;
8723 else
8725 if (is_gimple_reg_type (TREE_TYPE (lhs)))
8726 need_temp = true;
8727 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
8729 if (is_gimple_call (stmt))
8731 tree fndecl = gimple_call_fndecl (stmt);
8733 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
8734 && !(fndecl && DECL_RESULT (fndecl)
8735 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
8736 need_temp = true;
8738 else
8739 need_temp = true;
8742 if (need_temp)
8744 tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
8745 if (gimple_in_ssa_p (cfun))
8746 temp = make_ssa_name (temp, NULL);
8747 gimple_set_lhs (stmt, temp);
8748 post_stmt = gimple_build_assign (lhs, temp);
8751 break;
8754 if (!gimple_seq_empty_p (pre))
8755 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
8756 if (post_stmt)
8757 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
8759 pop_gimplify_context (NULL);
8762 /* Expand EXPR to list of gimple statements STMTS. GIMPLE_TEST_F specifies
8763 the predicate that will hold for the result. If VAR is not NULL, make the
8764 base variable of the final destination be VAR if suitable. */
8766 tree
8767 force_gimple_operand_1 (tree expr, gimple_seq *stmts,
8768 gimple_predicate gimple_test_f, tree var)
8770 enum gimplify_status ret;
8771 struct gimplify_ctx gctx;
8772 location_t saved_location;
8774 *stmts = NULL;
8776 /* gimple_test_f might be more strict than is_gimple_val, make
8777 sure we pass both. Just checking gimple_test_f doesn't work
8778 because most gimple predicates do not work recursively. */
8779 if (is_gimple_val (expr)
8780 && (*gimple_test_f) (expr))
8781 return expr;
8783 push_gimplify_context (&gctx);
8784 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
8785 gimplify_ctxp->allow_rhs_cond_expr = true;
8786 saved_location = input_location;
8787 input_location = UNKNOWN_LOCATION;
8789 if (var)
8791 if (gimplify_ctxp->into_ssa
8792 && is_gimple_reg (var))
8793 var = make_ssa_name (var, NULL);
8794 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
8797 if (TREE_CODE (expr) != MODIFY_EXPR
8798 && TREE_TYPE (expr) == void_type_node)
8800 gimplify_and_add (expr, stmts);
8801 expr = NULL_TREE;
8803 else
8805 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
8806 gcc_assert (ret != GS_ERROR);
8809 input_location = saved_location;
8810 pop_gimplify_context (NULL);
8812 return expr;
8815 /* Expand EXPR to list of gimple statements STMTS. If SIMPLE is true,
8816 force the result to be either ssa_name or an invariant, otherwise
8817 just force it to be a rhs expression. If VAR is not NULL, make the
8818 base variable of the final destination be VAR if suitable. */
8820 tree
8821 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
8823 return force_gimple_operand_1 (expr, stmts,
8824 simple ? is_gimple_val : is_gimple_reg_rhs,
8825 var);
8828 /* Invoke force_gimple_operand_1 for EXPR with parameters GIMPLE_TEST_F
8829 and VAR. If some statements are produced, emits them at GSI.
8830 If BEFORE is true. the statements are appended before GSI, otherwise
8831 they are appended after it. M specifies the way GSI moves after
8832 insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING are the usual values). */
8834 tree
8835 force_gimple_operand_gsi_1 (gimple_stmt_iterator *gsi, tree expr,
8836 gimple_predicate gimple_test_f,
8837 tree var, bool before,
8838 enum gsi_iterator_update m)
8840 gimple_seq stmts;
8842 expr = force_gimple_operand_1 (expr, &stmts, gimple_test_f, var);
8844 if (!gimple_seq_empty_p (stmts))
8846 if (before)
8847 gsi_insert_seq_before (gsi, stmts, m);
8848 else
8849 gsi_insert_seq_after (gsi, stmts, m);
8852 return expr;
8855 /* Invoke force_gimple_operand_1 for EXPR with parameter VAR.
8856 If SIMPLE is true, force the result to be either ssa_name or an invariant,
8857 otherwise just force it to be a rhs expression. If some statements are
8858 produced, emits them at GSI. If BEFORE is true, the statements are
8859 appended before GSI, otherwise they are appended after it. M specifies
8860 the way GSI moves after insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING
8861 are the usual values). */
8863 tree
8864 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
8865 bool simple_p, tree var, bool before,
8866 enum gsi_iterator_update m)
8868 return force_gimple_operand_gsi_1 (gsi, expr,
8869 simple_p
8870 ? is_gimple_val : is_gimple_reg_rhs,
8871 var, before, m);
8874 /* Return a dummy expression of type TYPE in order to keep going after an
8875 error. */
8877 static tree
8878 dummy_object (tree type)
8880 tree t = build_int_cst (build_pointer_type (type), 0);
8881 return build2 (MEM_REF, type, t, t);
8884 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
8885 builtin function, but a very special sort of operator. */
8887 enum gimplify_status
8888 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
8890 tree promoted_type, have_va_type;
8891 tree valist = TREE_OPERAND (*expr_p, 0);
8892 tree type = TREE_TYPE (*expr_p);
8893 tree t;
8894 location_t loc = EXPR_LOCATION (*expr_p);
8896 /* Verify that valist is of the proper type. */
8897 have_va_type = TREE_TYPE (valist);
8898 if (have_va_type == error_mark_node)
8899 return GS_ERROR;
8900 have_va_type = targetm.canonical_va_list_type (have_va_type);
8902 if (have_va_type == NULL_TREE)
8904 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
8905 return GS_ERROR;
8908 /* Generate a diagnostic for requesting data of a type that cannot
8909 be passed through `...' due to type promotion at the call site. */
8910 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
8911 != type)
8913 static bool gave_help;
8914 bool warned;
8916 /* Unfortunately, this is merely undefined, rather than a constraint
8917 violation, so we cannot make this an error. If this call is never
8918 executed, the program is still strictly conforming. */
8919 warned = warning_at (loc, 0,
8920 "%qT is promoted to %qT when passed through %<...%>",
8921 type, promoted_type);
8922 if (!gave_help && warned)
8924 gave_help = true;
8925 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
8926 promoted_type, type);
8929 /* We can, however, treat "undefined" any way we please.
8930 Call abort to encourage the user to fix the program. */
8931 if (warned)
8932 inform (loc, "if this code is reached, the program will abort");
8933 /* Before the abort, allow the evaluation of the va_list
8934 expression to exit or longjmp. */
8935 gimplify_and_add (valist, pre_p);
8936 t = build_call_expr_loc (loc,
8937 builtin_decl_implicit (BUILT_IN_TRAP), 0);
8938 gimplify_and_add (t, pre_p);
8940 /* This is dead code, but go ahead and finish so that the
8941 mode of the result comes out right. */
8942 *expr_p = dummy_object (type);
8943 return GS_ALL_DONE;
8945 else
8947 /* Make it easier for the backends by protecting the valist argument
8948 from multiple evaluations. */
8949 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
8951 /* For this case, the backends will be expecting a pointer to
8952 TREE_TYPE (abi), but it's possible we've
8953 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
8954 So fix it. */
8955 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
8957 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
8958 valist = fold_convert_loc (loc, p1,
8959 build_fold_addr_expr_loc (loc, valist));
8962 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
8964 else
8965 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
8967 if (!targetm.gimplify_va_arg_expr)
8968 /* FIXME: Once most targets are converted we should merely
8969 assert this is non-null. */
8970 return GS_ALL_DONE;
8972 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
8973 return GS_OK;
8977 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
8979 DST/SRC are the destination and source respectively. You can pass
8980 ungimplified trees in DST or SRC, in which case they will be
8981 converted to a gimple operand if necessary.
8983 This function returns the newly created GIMPLE_ASSIGN tuple. */
8985 gimple
8986 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
8988 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8989 gimplify_and_add (t, seq_p);
8990 ggc_free (t);
8991 return gimple_seq_last_stmt (*seq_p);