* testsuite/17_intro/static.cc: Ignore AIX TOC reload warnings.
[official-gcc.git] / gcc / gimplify.c
blob5c71d2c360501ed7c26eeae23ef93077e0a7163a
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2013 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tree.h"
27 #include "expr.h"
28 #include "gimple.h"
29 #include "gimplify.h"
30 #include "gimple-iterator.h"
31 #include "stringpool.h"
32 #include "calls.h"
33 #include "varasm.h"
34 #include "stor-layout.h"
35 #include "stmt.h"
36 #include "print-tree.h"
37 #include "tree-iterator.h"
38 #include "tree-inline.h"
39 #include "tree-pretty-print.h"
40 #include "langhooks.h"
41 #include "bitmap.h"
42 #include "gimple-ssa.h"
43 #include "cgraph.h"
44 #include "tree-cfg.h"
45 #include "tree-ssanames.h"
46 #include "tree-ssa.h"
47 #include "diagnostic-core.h"
48 #include "target.h"
49 #include "splay-tree.h"
50 #include "omp-low.h"
51 #include "gimple-low.h"
52 #include "cilk.h"
54 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
55 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
57 enum gimplify_omp_var_data
59 GOVD_SEEN = 1,
60 GOVD_EXPLICIT = 2,
61 GOVD_SHARED = 4,
62 GOVD_PRIVATE = 8,
63 GOVD_FIRSTPRIVATE = 16,
64 GOVD_LASTPRIVATE = 32,
65 GOVD_REDUCTION = 64,
66 GOVD_LOCAL = 128,
67 GOVD_MAP = 256,
68 GOVD_DEBUG_PRIVATE = 512,
69 GOVD_PRIVATE_OUTER_REF = 1024,
70 GOVD_LINEAR = 2048,
71 GOVD_ALIGNED = 4096,
72 GOVD_MAP_TO_ONLY = 8192,
73 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
74 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
75 | GOVD_LOCAL)
79 enum omp_region_type
81 ORT_WORKSHARE = 0,
82 ORT_SIMD = 1,
83 ORT_PARALLEL = 2,
84 ORT_COMBINED_PARALLEL = 3,
85 ORT_TASK = 4,
86 ORT_UNTIED_TASK = 5,
87 ORT_TEAMS = 8,
88 ORT_TARGET_DATA = 16,
89 ORT_TARGET = 32
92 struct gimplify_omp_ctx
94 struct gimplify_omp_ctx *outer_context;
95 splay_tree variables;
96 struct pointer_set_t *privatized_types;
97 location_t location;
98 enum omp_clause_default_kind default_kind;
99 enum omp_region_type region_type;
100 bool combined_loop;
103 struct gimplify_ctx *gimplify_ctxp;
104 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
107 /* Forward declaration. */
108 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
110 /* Shorter alias name for the above function for use in gimplify.c
111 only. */
113 static inline void
114 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
116 gimple_seq_add_stmt_without_update (seq_p, gs);
119 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
120 NULL, a new sequence is allocated. This function is
121 similar to gimple_seq_add_seq, but does not scan the operands.
122 During gimplification, we need to manipulate statement sequences
123 before the def/use vectors have been constructed. */
125 static void
126 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
128 gimple_stmt_iterator si;
130 if (src == NULL)
131 return;
133 si = gsi_last (*dst_p);
134 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
137 /* Set up a context for the gimplifier. */
139 void
140 push_gimplify_context (struct gimplify_ctx *c)
142 memset (c, '\0', sizeof (*c));
143 c->prev_context = gimplify_ctxp;
144 gimplify_ctxp = c;
147 /* Tear down a context for the gimplifier. If BODY is non-null, then
148 put the temporaries into the outer BIND_EXPR. Otherwise, put them
149 in the local_decls.
151 BODY is not a sequence, but the first tuple in a sequence. */
153 void
154 pop_gimplify_context (gimple body)
156 struct gimplify_ctx *c = gimplify_ctxp;
158 gcc_assert (c
159 && (!c->bind_expr_stack.exists ()
160 || c->bind_expr_stack.is_empty ()));
161 c->bind_expr_stack.release ();
162 gimplify_ctxp = c->prev_context;
164 if (body)
165 declare_vars (c->temps, body, false);
166 else
167 record_vars (c->temps);
169 if (c->temp_htab.is_created ())
170 c->temp_htab.dispose ();
173 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
175 static void
176 gimple_push_bind_expr (gimple gimple_bind)
178 gimplify_ctxp->bind_expr_stack.reserve (8);
179 gimplify_ctxp->bind_expr_stack.safe_push (gimple_bind);
182 /* Pop the first element off the stack of bindings. */
184 static void
185 gimple_pop_bind_expr (void)
187 gimplify_ctxp->bind_expr_stack.pop ();
190 /* Return the first element of the stack of bindings. */
192 gimple
193 gimple_current_bind_expr (void)
195 return gimplify_ctxp->bind_expr_stack.last ();
198 /* Return the stack of bindings created during gimplification. */
200 vec<gimple>
201 gimple_bind_expr_stack (void)
203 return gimplify_ctxp->bind_expr_stack;
206 /* Return true iff there is a COND_EXPR between us and the innermost
207 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
209 static bool
210 gimple_conditional_context (void)
212 return gimplify_ctxp->conditions > 0;
215 /* Note that we've entered a COND_EXPR. */
217 static void
218 gimple_push_condition (void)
220 #ifdef ENABLE_GIMPLE_CHECKING
221 if (gimplify_ctxp->conditions == 0)
222 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
223 #endif
224 ++(gimplify_ctxp->conditions);
227 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
228 now, add any conditional cleanups we've seen to the prequeue. */
230 static void
231 gimple_pop_condition (gimple_seq *pre_p)
233 int conds = --(gimplify_ctxp->conditions);
235 gcc_assert (conds >= 0);
236 if (conds == 0)
238 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
239 gimplify_ctxp->conditional_cleanups = NULL;
243 /* A stable comparison routine for use with splay trees and DECLs. */
245 static int
246 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
248 tree a = (tree) xa;
249 tree b = (tree) xb;
251 return DECL_UID (a) - DECL_UID (b);
254 /* Create a new omp construct that deals with variable remapping. */
256 static struct gimplify_omp_ctx *
257 new_omp_context (enum omp_region_type region_type)
259 struct gimplify_omp_ctx *c;
261 c = XCNEW (struct gimplify_omp_ctx);
262 c->outer_context = gimplify_omp_ctxp;
263 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
264 c->privatized_types = pointer_set_create ();
265 c->location = input_location;
266 c->region_type = region_type;
267 if ((region_type & ORT_TASK) == 0)
268 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
269 else
270 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
272 return c;
275 /* Destroy an omp construct that deals with variable remapping. */
277 static void
278 delete_omp_context (struct gimplify_omp_ctx *c)
280 splay_tree_delete (c->variables);
281 pointer_set_destroy (c->privatized_types);
282 XDELETE (c);
285 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
286 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
288 /* Both gimplify the statement T and append it to *SEQ_P. This function
289 behaves exactly as gimplify_stmt, but you don't have to pass T as a
290 reference. */
292 void
293 gimplify_and_add (tree t, gimple_seq *seq_p)
295 gimplify_stmt (&t, seq_p);
298 /* Gimplify statement T into sequence *SEQ_P, and return the first
299 tuple in the sequence of generated tuples for this statement.
300 Return NULL if gimplifying T produced no tuples. */
302 static gimple
303 gimplify_and_return_first (tree t, gimple_seq *seq_p)
305 gimple_stmt_iterator last = gsi_last (*seq_p);
307 gimplify_and_add (t, seq_p);
309 if (!gsi_end_p (last))
311 gsi_next (&last);
312 return gsi_stmt (last);
314 else
315 return gimple_seq_first_stmt (*seq_p);
318 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
319 LHS, or for a call argument. */
321 static bool
322 is_gimple_mem_rhs (tree t)
324 /* If we're dealing with a renamable type, either source or dest must be
325 a renamed variable. */
326 if (is_gimple_reg_type (TREE_TYPE (t)))
327 return is_gimple_val (t);
328 else
329 return is_gimple_val (t) || is_gimple_lvalue (t);
332 /* Return true if T is a CALL_EXPR or an expression that can be
333 assigned to a temporary. Note that this predicate should only be
334 used during gimplification. See the rationale for this in
335 gimplify_modify_expr. */
337 static bool
338 is_gimple_reg_rhs_or_call (tree t)
340 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
341 || TREE_CODE (t) == CALL_EXPR);
344 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
345 this predicate should only be used during gimplification. See the
346 rationale for this in gimplify_modify_expr. */
348 static bool
349 is_gimple_mem_rhs_or_call (tree t)
351 /* If we're dealing with a renamable type, either source or dest must be
352 a renamed variable. */
353 if (is_gimple_reg_type (TREE_TYPE (t)))
354 return is_gimple_val (t);
355 else
356 return (is_gimple_val (t) || is_gimple_lvalue (t)
357 || TREE_CODE (t) == CALL_EXPR);
360 /* Create a temporary with a name derived from VAL. Subroutine of
361 lookup_tmp_var; nobody else should call this function. */
363 static inline tree
364 create_tmp_from_val (tree val, bool is_formal)
366 /* Drop all qualifiers and address-space information from the value type. */
367 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
368 tree var = create_tmp_var (type, get_name (val));
369 if (is_formal
370 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
371 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE))
372 DECL_GIMPLE_REG_P (var) = 1;
373 return var;
376 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
377 an existing expression temporary. */
379 static tree
380 lookup_tmp_var (tree val, bool is_formal)
382 tree ret;
384 /* If not optimizing, never really reuse a temporary. local-alloc
385 won't allocate any variable that is used in more than one basic
386 block, which means it will go into memory, causing much extra
387 work in reload and final and poorer code generation, outweighing
388 the extra memory allocation here. */
389 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
390 ret = create_tmp_from_val (val, is_formal);
391 else
393 elt_t elt, *elt_p;
394 elt_t **slot;
396 elt.val = val;
397 if (!gimplify_ctxp->temp_htab.is_created ())
398 gimplify_ctxp->temp_htab.create (1000);
399 slot = gimplify_ctxp->temp_htab.find_slot (&elt, INSERT);
400 if (*slot == NULL)
402 elt_p = XNEW (elt_t);
403 elt_p->val = val;
404 elt_p->temp = ret = create_tmp_from_val (val, is_formal);
405 *slot = elt_p;
407 else
409 elt_p = *slot;
410 ret = elt_p->temp;
414 return ret;
417 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
419 static tree
420 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
421 bool is_formal)
423 tree t, mod;
425 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
426 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
427 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
428 fb_rvalue);
430 if (gimplify_ctxp->into_ssa
431 && is_gimple_reg_type (TREE_TYPE (val)))
432 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)), NULL);
433 else
434 t = lookup_tmp_var (val, is_formal);
436 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
438 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_HERE (val));
440 /* gimplify_modify_expr might want to reduce this further. */
441 gimplify_and_add (mod, pre_p);
442 ggc_free (mod);
444 return t;
447 /* Return a formal temporary variable initialized with VAL. PRE_P is as
448 in gimplify_expr. Only use this function if:
450 1) The value of the unfactored expression represented by VAL will not
451 change between the initialization and use of the temporary, and
452 2) The temporary will not be otherwise modified.
454 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
455 and #2 means it is inappropriate for && temps.
457 For other cases, use get_initialized_tmp_var instead. */
459 tree
460 get_formal_tmp_var (tree val, gimple_seq *pre_p)
462 return internal_get_tmp_var (val, pre_p, NULL, true);
465 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
466 are as in gimplify_expr. */
468 tree
469 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
471 return internal_get_tmp_var (val, pre_p, post_p, false);
474 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
475 generate debug info for them; otherwise don't. */
477 void
478 declare_vars (tree vars, gimple scope, bool debug_info)
480 tree last = vars;
481 if (last)
483 tree temps, block;
485 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
487 temps = nreverse (last);
489 block = gimple_bind_block (scope);
490 gcc_assert (!block || TREE_CODE (block) == BLOCK);
491 if (!block || !debug_info)
493 DECL_CHAIN (last) = gimple_bind_vars (scope);
494 gimple_bind_set_vars (scope, temps);
496 else
498 /* We need to attach the nodes both to the BIND_EXPR and to its
499 associated BLOCK for debugging purposes. The key point here
500 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
501 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
502 if (BLOCK_VARS (block))
503 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
504 else
506 gimple_bind_set_vars (scope,
507 chainon (gimple_bind_vars (scope), temps));
508 BLOCK_VARS (block) = temps;
514 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
515 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
516 no such upper bound can be obtained. */
518 static void
519 force_constant_size (tree var)
521 /* The only attempt we make is by querying the maximum size of objects
522 of the variable's type. */
524 HOST_WIDE_INT max_size;
526 gcc_assert (TREE_CODE (var) == VAR_DECL);
528 max_size = max_int_size_in_bytes (TREE_TYPE (var));
530 gcc_assert (max_size >= 0);
532 DECL_SIZE_UNIT (var)
533 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
534 DECL_SIZE (var)
535 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
538 /* Push the temporary variable TMP into the current binding. */
540 void
541 gimple_add_tmp_var (tree tmp)
543 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
545 /* Later processing assumes that the object size is constant, which might
546 not be true at this point. Force the use of a constant upper bound in
547 this case. */
548 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
549 force_constant_size (tmp);
551 DECL_CONTEXT (tmp) = current_function_decl;
552 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
554 if (gimplify_ctxp)
556 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
557 gimplify_ctxp->temps = tmp;
559 /* Mark temporaries local within the nearest enclosing parallel. */
560 if (gimplify_omp_ctxp)
562 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
563 while (ctx
564 && (ctx->region_type == ORT_WORKSHARE
565 || ctx->region_type == ORT_SIMD))
566 ctx = ctx->outer_context;
567 if (ctx)
568 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
571 else if (cfun)
572 record_vars (tmp);
573 else
575 gimple_seq body_seq;
577 /* This case is for nested functions. We need to expose the locals
578 they create. */
579 body_seq = gimple_body (current_function_decl);
580 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
586 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
587 nodes that are referenced more than once in GENERIC functions. This is
588 necessary because gimplification (translation into GIMPLE) is performed
589 by modifying tree nodes in-place, so gimplication of a shared node in a
590 first context could generate an invalid GIMPLE form in a second context.
592 This is achieved with a simple mark/copy/unmark algorithm that walks the
593 GENERIC representation top-down, marks nodes with TREE_VISITED the first
594 time it encounters them, duplicates them if they already have TREE_VISITED
595 set, and finally removes the TREE_VISITED marks it has set.
597 The algorithm works only at the function level, i.e. it generates a GENERIC
598 representation of a function with no nodes shared within the function when
599 passed a GENERIC function (except for nodes that are allowed to be shared).
601 At the global level, it is also necessary to unshare tree nodes that are
602 referenced in more than one function, for the same aforementioned reason.
603 This requires some cooperation from the front-end. There are 2 strategies:
605 1. Manual unsharing. The front-end needs to call unshare_expr on every
606 expression that might end up being shared across functions.
608 2. Deep unsharing. This is an extension of regular unsharing. Instead
609 of calling unshare_expr on expressions that might be shared across
610 functions, the front-end pre-marks them with TREE_VISITED. This will
611 ensure that they are unshared on the first reference within functions
612 when the regular unsharing algorithm runs. The counterpart is that
613 this algorithm must look deeper than for manual unsharing, which is
614 specified by LANG_HOOKS_DEEP_UNSHARING.
616 If there are only few specific cases of node sharing across functions, it is
617 probably easier for a front-end to unshare the expressions manually. On the
618 contrary, if the expressions generated at the global level are as widespread
619 as expressions generated within functions, deep unsharing is very likely the
620 way to go. */
622 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
623 These nodes model computations that must be done once. If we were to
624 unshare something like SAVE_EXPR(i++), the gimplification process would
625 create wrong code. However, if DATA is non-null, it must hold a pointer
626 set that is used to unshare the subtrees of these nodes. */
628 static tree
629 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
631 tree t = *tp;
632 enum tree_code code = TREE_CODE (t);
634 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
635 copy their subtrees if we can make sure to do it only once. */
636 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
638 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
640 else
641 *walk_subtrees = 0;
644 /* Stop at types, decls, constants like copy_tree_r. */
645 else if (TREE_CODE_CLASS (code) == tcc_type
646 || TREE_CODE_CLASS (code) == tcc_declaration
647 || TREE_CODE_CLASS (code) == tcc_constant
648 /* We can't do anything sensible with a BLOCK used as an
649 expression, but we also can't just die when we see it
650 because of non-expression uses. So we avert our eyes
651 and cross our fingers. Silly Java. */
652 || code == BLOCK)
653 *walk_subtrees = 0;
655 /* Cope with the statement expression extension. */
656 else if (code == STATEMENT_LIST)
659 /* Leave the bulk of the work to copy_tree_r itself. */
660 else
661 copy_tree_r (tp, walk_subtrees, NULL);
663 return NULL_TREE;
666 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
667 If *TP has been visited already, then *TP is deeply copied by calling
668 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
670 static tree
671 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
673 tree t = *tp;
674 enum tree_code code = TREE_CODE (t);
676 /* Skip types, decls, and constants. But we do want to look at their
677 types and the bounds of types. Mark them as visited so we properly
678 unmark their subtrees on the unmark pass. If we've already seen them,
679 don't look down further. */
680 if (TREE_CODE_CLASS (code) == tcc_type
681 || TREE_CODE_CLASS (code) == tcc_declaration
682 || TREE_CODE_CLASS (code) == tcc_constant)
684 if (TREE_VISITED (t))
685 *walk_subtrees = 0;
686 else
687 TREE_VISITED (t) = 1;
690 /* If this node has been visited already, unshare it and don't look
691 any deeper. */
692 else if (TREE_VISITED (t))
694 walk_tree (tp, mostly_copy_tree_r, data, NULL);
695 *walk_subtrees = 0;
698 /* Otherwise, mark the node as visited and keep looking. */
699 else
700 TREE_VISITED (t) = 1;
702 return NULL_TREE;
705 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
706 copy_if_shared_r callback unmodified. */
708 static inline void
709 copy_if_shared (tree *tp, void *data)
711 walk_tree (tp, copy_if_shared_r, data, NULL);
714 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
715 any nested functions. */
717 static void
718 unshare_body (tree fndecl)
720 struct cgraph_node *cgn = cgraph_get_node (fndecl);
721 /* If the language requires deep unsharing, we need a pointer set to make
722 sure we don't repeatedly unshare subtrees of unshareable nodes. */
723 struct pointer_set_t *visited
724 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
726 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
727 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
728 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
730 if (visited)
731 pointer_set_destroy (visited);
733 if (cgn)
734 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
735 unshare_body (cgn->decl);
738 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
739 Subtrees are walked until the first unvisited node is encountered. */
741 static tree
742 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
744 tree t = *tp;
746 /* If this node has been visited, unmark it and keep looking. */
747 if (TREE_VISITED (t))
748 TREE_VISITED (t) = 0;
750 /* Otherwise, don't look any deeper. */
751 else
752 *walk_subtrees = 0;
754 return NULL_TREE;
757 /* Unmark the visited trees rooted at *TP. */
759 static inline void
760 unmark_visited (tree *tp)
762 walk_tree (tp, unmark_visited_r, NULL, NULL);
765 /* Likewise, but mark all trees as not visited. */
767 static void
768 unvisit_body (tree fndecl)
770 struct cgraph_node *cgn = cgraph_get_node (fndecl);
772 unmark_visited (&DECL_SAVED_TREE (fndecl));
773 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
774 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
776 if (cgn)
777 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
778 unvisit_body (cgn->decl);
781 /* Unconditionally make an unshared copy of EXPR. This is used when using
782 stored expressions which span multiple functions, such as BINFO_VTABLE,
783 as the normal unsharing process can't tell that they're shared. */
785 tree
786 unshare_expr (tree expr)
788 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
789 return expr;
792 /* Worker for unshare_expr_without_location. */
794 static tree
795 prune_expr_location (tree *tp, int *walk_subtrees, void *)
797 if (EXPR_P (*tp))
798 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
799 else
800 *walk_subtrees = 0;
801 return NULL_TREE;
804 /* Similar to unshare_expr but also prune all expression locations
805 from EXPR. */
807 tree
808 unshare_expr_without_location (tree expr)
810 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
811 if (EXPR_P (expr))
812 walk_tree (&expr, prune_expr_location, NULL, NULL);
813 return expr;
816 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
817 contain statements and have a value. Assign its value to a temporary
818 and give it void_type_node. Return the temporary, or NULL_TREE if
819 WRAPPER was already void. */
821 tree
822 voidify_wrapper_expr (tree wrapper, tree temp)
824 tree type = TREE_TYPE (wrapper);
825 if (type && !VOID_TYPE_P (type))
827 tree *p;
829 /* Set p to point to the body of the wrapper. Loop until we find
830 something that isn't a wrapper. */
831 for (p = &wrapper; p && *p; )
833 switch (TREE_CODE (*p))
835 case BIND_EXPR:
836 TREE_SIDE_EFFECTS (*p) = 1;
837 TREE_TYPE (*p) = void_type_node;
838 /* For a BIND_EXPR, the body is operand 1. */
839 p = &BIND_EXPR_BODY (*p);
840 break;
842 case CLEANUP_POINT_EXPR:
843 case TRY_FINALLY_EXPR:
844 case TRY_CATCH_EXPR:
845 TREE_SIDE_EFFECTS (*p) = 1;
846 TREE_TYPE (*p) = void_type_node;
847 p = &TREE_OPERAND (*p, 0);
848 break;
850 case STATEMENT_LIST:
852 tree_stmt_iterator i = tsi_last (*p);
853 TREE_SIDE_EFFECTS (*p) = 1;
854 TREE_TYPE (*p) = void_type_node;
855 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
857 break;
859 case COMPOUND_EXPR:
860 /* Advance to the last statement. Set all container types to
861 void. */
862 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
864 TREE_SIDE_EFFECTS (*p) = 1;
865 TREE_TYPE (*p) = void_type_node;
867 break;
869 case TRANSACTION_EXPR:
870 TREE_SIDE_EFFECTS (*p) = 1;
871 TREE_TYPE (*p) = void_type_node;
872 p = &TRANSACTION_EXPR_BODY (*p);
873 break;
875 default:
876 /* Assume that any tree upon which voidify_wrapper_expr is
877 directly called is a wrapper, and that its body is op0. */
878 if (p == &wrapper)
880 TREE_SIDE_EFFECTS (*p) = 1;
881 TREE_TYPE (*p) = void_type_node;
882 p = &TREE_OPERAND (*p, 0);
883 break;
885 goto out;
889 out:
890 if (p == NULL || IS_EMPTY_STMT (*p))
891 temp = NULL_TREE;
892 else if (temp)
894 /* The wrapper is on the RHS of an assignment that we're pushing
895 down. */
896 gcc_assert (TREE_CODE (temp) == INIT_EXPR
897 || TREE_CODE (temp) == MODIFY_EXPR);
898 TREE_OPERAND (temp, 1) = *p;
899 *p = temp;
901 else
903 temp = create_tmp_var (type, "retval");
904 *p = build2 (INIT_EXPR, type, temp, *p);
907 return temp;
910 return NULL_TREE;
913 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
914 a temporary through which they communicate. */
916 static void
917 build_stack_save_restore (gimple *save, gimple *restore)
919 tree tmp_var;
921 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
922 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
923 gimple_call_set_lhs (*save, tmp_var);
925 *restore
926 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
927 1, tmp_var);
930 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
932 static enum gimplify_status
933 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
935 tree bind_expr = *expr_p;
936 bool old_save_stack = gimplify_ctxp->save_stack;
937 tree t;
938 gimple gimple_bind;
939 gimple_seq body, cleanup;
940 gimple stack_save;
942 tree temp = voidify_wrapper_expr (bind_expr, NULL);
944 /* Mark variables seen in this bind expr. */
945 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
947 if (TREE_CODE (t) == VAR_DECL)
949 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
951 /* Mark variable as local. */
952 if (ctx && !DECL_EXTERNAL (t)
953 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
954 || splay_tree_lookup (ctx->variables,
955 (splay_tree_key) t) == NULL))
956 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
958 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
960 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
961 cfun->has_local_explicit_reg_vars = true;
964 /* Preliminarily mark non-addressed complex variables as eligible
965 for promotion to gimple registers. We'll transform their uses
966 as we find them. */
967 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
968 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
969 && !TREE_THIS_VOLATILE (t)
970 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
971 && !needs_to_live_in_memory (t))
972 DECL_GIMPLE_REG_P (t) = 1;
975 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
976 BIND_EXPR_BLOCK (bind_expr));
977 gimple_push_bind_expr (gimple_bind);
979 gimplify_ctxp->save_stack = false;
981 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
982 body = NULL;
983 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
984 gimple_bind_set_body (gimple_bind, body);
986 cleanup = NULL;
987 stack_save = NULL;
988 if (gimplify_ctxp->save_stack)
990 gimple stack_restore;
992 /* Save stack on entry and restore it on exit. Add a try_finally
993 block to achieve this. */
994 build_stack_save_restore (&stack_save, &stack_restore);
996 gimplify_seq_add_stmt (&cleanup, stack_restore);
999 /* Add clobbers for all variables that go out of scope. */
1000 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1002 if (TREE_CODE (t) == VAR_DECL
1003 && !is_global_var (t)
1004 && DECL_CONTEXT (t) == current_function_decl
1005 && !DECL_HARD_REGISTER (t)
1006 && !TREE_THIS_VOLATILE (t)
1007 && !DECL_HAS_VALUE_EXPR_P (t)
1008 /* Only care for variables that have to be in memory. Others
1009 will be rewritten into SSA names, hence moved to the top-level. */
1010 && !is_gimple_reg (t)
1011 && flag_stack_reuse != SR_NONE)
1013 tree clobber = build_constructor (TREE_TYPE (t),
1014 NULL);
1015 TREE_THIS_VOLATILE (clobber) = 1;
1016 gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
1020 if (cleanup)
1022 gimple gs;
1023 gimple_seq new_body;
1025 new_body = NULL;
1026 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1027 GIMPLE_TRY_FINALLY);
1029 if (stack_save)
1030 gimplify_seq_add_stmt (&new_body, stack_save);
1031 gimplify_seq_add_stmt (&new_body, gs);
1032 gimple_bind_set_body (gimple_bind, new_body);
1035 gimplify_ctxp->save_stack = old_save_stack;
1036 gimple_pop_bind_expr ();
1038 gimplify_seq_add_stmt (pre_p, gimple_bind);
1040 if (temp)
1042 *expr_p = temp;
1043 return GS_OK;
1046 *expr_p = NULL_TREE;
1047 return GS_ALL_DONE;
1050 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1051 GIMPLE value, it is assigned to a new temporary and the statement is
1052 re-written to return the temporary.
1054 PRE_P points to the sequence where side effects that must happen before
1055 STMT should be stored. */
1057 static enum gimplify_status
1058 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1060 gimple ret;
1061 tree ret_expr = TREE_OPERAND (stmt, 0);
1062 tree result_decl, result;
1064 if (ret_expr == error_mark_node)
1065 return GS_ERROR;
1067 /* Implicit _Cilk_sync must be inserted right before any return statement
1068 if there is a _Cilk_spawn in the function. If the user has provided a
1069 _Cilk_sync, the optimizer should remove this duplicate one. */
1070 if (fn_contains_cilk_spawn_p (cfun))
1072 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1073 gimplify_and_add (impl_sync, pre_p);
1076 if (!ret_expr
1077 || TREE_CODE (ret_expr) == RESULT_DECL
1078 || ret_expr == error_mark_node)
1080 gimple ret = gimple_build_return (ret_expr);
1081 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1082 gimplify_seq_add_stmt (pre_p, ret);
1083 return GS_ALL_DONE;
1086 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1087 result_decl = NULL_TREE;
1088 else
1090 result_decl = TREE_OPERAND (ret_expr, 0);
1092 /* See through a return by reference. */
1093 if (TREE_CODE (result_decl) == INDIRECT_REF)
1094 result_decl = TREE_OPERAND (result_decl, 0);
1096 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1097 || TREE_CODE (ret_expr) == INIT_EXPR)
1098 && TREE_CODE (result_decl) == RESULT_DECL);
1101 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1102 Recall that aggregate_value_p is FALSE for any aggregate type that is
1103 returned in registers. If we're returning values in registers, then
1104 we don't want to extend the lifetime of the RESULT_DECL, particularly
1105 across another call. In addition, for those aggregates for which
1106 hard_function_value generates a PARALLEL, we'll die during normal
1107 expansion of structure assignments; there's special code in expand_return
1108 to handle this case that does not exist in expand_expr. */
1109 if (!result_decl)
1110 result = NULL_TREE;
1111 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1113 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1115 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1116 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1117 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1118 should be effectively allocated by the caller, i.e. all calls to
1119 this function must be subject to the Return Slot Optimization. */
1120 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1121 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1123 result = result_decl;
1125 else if (gimplify_ctxp->return_temp)
1126 result = gimplify_ctxp->return_temp;
1127 else
1129 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
1131 /* ??? With complex control flow (usually involving abnormal edges),
1132 we can wind up warning about an uninitialized value for this. Due
1133 to how this variable is constructed and initialized, this is never
1134 true. Give up and never warn. */
1135 TREE_NO_WARNING (result) = 1;
1137 gimplify_ctxp->return_temp = result;
1140 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1141 Then gimplify the whole thing. */
1142 if (result != result_decl)
1143 TREE_OPERAND (ret_expr, 0) = result;
1145 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1147 ret = gimple_build_return (result);
1148 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1149 gimplify_seq_add_stmt (pre_p, ret);
1151 return GS_ALL_DONE;
1154 /* Gimplify a variable-length array DECL. */
1156 static void
1157 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1159 /* This is a variable-sized decl. Simplify its size and mark it
1160 for deferred expansion. */
1161 tree t, addr, ptr_type;
1163 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1164 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1166 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1167 if (DECL_HAS_VALUE_EXPR_P (decl))
1168 return;
1170 /* All occurrences of this decl in final gimplified code will be
1171 replaced by indirection. Setting DECL_VALUE_EXPR does two
1172 things: First, it lets the rest of the gimplifier know what
1173 replacement to use. Second, it lets the debug info know
1174 where to find the value. */
1175 ptr_type = build_pointer_type (TREE_TYPE (decl));
1176 addr = create_tmp_var (ptr_type, get_name (decl));
1177 DECL_IGNORED_P (addr) = 0;
1178 t = build_fold_indirect_ref (addr);
1179 TREE_THIS_NOTRAP (t) = 1;
1180 SET_DECL_VALUE_EXPR (decl, t);
1181 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1183 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1184 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1185 size_int (DECL_ALIGN (decl)));
1186 /* The call has been built for a variable-sized object. */
1187 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1188 t = fold_convert (ptr_type, t);
1189 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1191 gimplify_and_add (t, seq_p);
1193 /* Indicate that we need to restore the stack level when the
1194 enclosing BIND_EXPR is exited. */
1195 gimplify_ctxp->save_stack = true;
1198 /* A helper function to be called via walk_tree. Mark all labels under *TP
1199 as being forced. To be called for DECL_INITIAL of static variables. */
1201 static tree
1202 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1204 if (TYPE_P (*tp))
1205 *walk_subtrees = 0;
1206 if (TREE_CODE (*tp) == LABEL_DECL)
1207 FORCED_LABEL (*tp) = 1;
1209 return NULL_TREE;
1212 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1213 and initialization explicit. */
1215 static enum gimplify_status
1216 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1218 tree stmt = *stmt_p;
1219 tree decl = DECL_EXPR_DECL (stmt);
1221 *stmt_p = NULL_TREE;
1223 if (TREE_TYPE (decl) == error_mark_node)
1224 return GS_ERROR;
1226 if ((TREE_CODE (decl) == TYPE_DECL
1227 || TREE_CODE (decl) == VAR_DECL)
1228 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1229 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1231 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1232 in case its size expressions contain problematic nodes like CALL_EXPR. */
1233 if (TREE_CODE (decl) == TYPE_DECL
1234 && DECL_ORIGINAL_TYPE (decl)
1235 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1236 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1238 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1240 tree init = DECL_INITIAL (decl);
1242 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1243 || (!TREE_STATIC (decl)
1244 && flag_stack_check == GENERIC_STACK_CHECK
1245 && compare_tree_int (DECL_SIZE_UNIT (decl),
1246 STACK_CHECK_MAX_VAR_SIZE) > 0))
1247 gimplify_vla_decl (decl, seq_p);
1249 /* Some front ends do not explicitly declare all anonymous
1250 artificial variables. We compensate here by declaring the
1251 variables, though it would be better if the front ends would
1252 explicitly declare them. */
1253 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1254 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1255 gimple_add_tmp_var (decl);
1257 if (init && init != error_mark_node)
1259 if (!TREE_STATIC (decl))
1261 DECL_INITIAL (decl) = NULL_TREE;
1262 init = build2 (INIT_EXPR, void_type_node, decl, init);
1263 gimplify_and_add (init, seq_p);
1264 ggc_free (init);
1266 else
1267 /* We must still examine initializers for static variables
1268 as they may contain a label address. */
1269 walk_tree (&init, force_labels_r, NULL, NULL);
1273 return GS_ALL_DONE;
1276 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1277 and replacing the LOOP_EXPR with goto, but if the loop contains an
1278 EXIT_EXPR, we need to append a label for it to jump to. */
1280 static enum gimplify_status
1281 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1283 tree saved_label = gimplify_ctxp->exit_label;
1284 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1286 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1288 gimplify_ctxp->exit_label = NULL_TREE;
1290 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1292 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1294 if (gimplify_ctxp->exit_label)
1295 gimplify_seq_add_stmt (pre_p,
1296 gimple_build_label (gimplify_ctxp->exit_label));
1298 gimplify_ctxp->exit_label = saved_label;
1300 *expr_p = NULL;
1301 return GS_ALL_DONE;
1304 /* Gimplify a statement list onto a sequence. These may be created either
1305 by an enlightened front-end, or by shortcut_cond_expr. */
1307 static enum gimplify_status
1308 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1310 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1312 tree_stmt_iterator i = tsi_start (*expr_p);
1314 while (!tsi_end_p (i))
1316 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1317 tsi_delink (&i);
1320 if (temp)
1322 *expr_p = temp;
1323 return GS_OK;
1326 return GS_ALL_DONE;
1330 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1331 branch to. */
1333 static enum gimplify_status
1334 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1336 tree switch_expr = *expr_p;
1337 gimple_seq switch_body_seq = NULL;
1338 enum gimplify_status ret;
1339 tree index_type = TREE_TYPE (switch_expr);
1340 if (index_type == NULL_TREE)
1341 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1343 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1344 fb_rvalue);
1345 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1346 return ret;
1348 if (SWITCH_BODY (switch_expr))
1350 vec<tree> labels;
1351 vec<tree> saved_labels;
1352 tree default_case = NULL_TREE;
1353 gimple gimple_switch;
1355 /* If someone can be bothered to fill in the labels, they can
1356 be bothered to null out the body too. */
1357 gcc_assert (!SWITCH_LABELS (switch_expr));
1359 /* Save old labels, get new ones from body, then restore the old
1360 labels. Save all the things from the switch body to append after. */
1361 saved_labels = gimplify_ctxp->case_labels;
1362 gimplify_ctxp->case_labels.create (8);
1364 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1365 labels = gimplify_ctxp->case_labels;
1366 gimplify_ctxp->case_labels = saved_labels;
1368 preprocess_case_label_vec_for_gimple (labels, index_type,
1369 &default_case);
1371 if (!default_case)
1373 gimple new_default;
1375 default_case
1376 = build_case_label (NULL_TREE, NULL_TREE,
1377 create_artificial_label (UNKNOWN_LOCATION));
1378 new_default = gimple_build_label (CASE_LABEL (default_case));
1379 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1382 gimple_switch = gimple_build_switch (SWITCH_COND (switch_expr),
1383 default_case, labels);
1384 gimplify_seq_add_stmt (pre_p, gimple_switch);
1385 gimplify_seq_add_seq (pre_p, switch_body_seq);
1386 labels.release ();
1388 else
1389 gcc_assert (SWITCH_LABELS (switch_expr));
1391 return GS_ALL_DONE;
1394 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1396 static enum gimplify_status
1397 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1399 struct gimplify_ctx *ctxp;
1400 gimple gimple_label;
1402 /* Invalid OpenMP programs can play Duff's Device type games with
1403 #pragma omp parallel. At least in the C front end, we don't
1404 detect such invalid branches until after gimplification. */
1405 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1406 if (ctxp->case_labels.exists ())
1407 break;
1409 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1410 ctxp->case_labels.safe_push (*expr_p);
1411 gimplify_seq_add_stmt (pre_p, gimple_label);
1413 return GS_ALL_DONE;
1416 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1417 if necessary. */
1419 tree
1420 build_and_jump (tree *label_p)
1422 if (label_p == NULL)
1423 /* If there's nowhere to jump, just fall through. */
1424 return NULL_TREE;
1426 if (*label_p == NULL_TREE)
1428 tree label = create_artificial_label (UNKNOWN_LOCATION);
1429 *label_p = label;
1432 return build1 (GOTO_EXPR, void_type_node, *label_p);
1435 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1436 This also involves building a label to jump to and communicating it to
1437 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1439 static enum gimplify_status
1440 gimplify_exit_expr (tree *expr_p)
1442 tree cond = TREE_OPERAND (*expr_p, 0);
1443 tree expr;
1445 expr = build_and_jump (&gimplify_ctxp->exit_label);
1446 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1447 *expr_p = expr;
1449 return GS_OK;
1452 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1453 different from its canonical type, wrap the whole thing inside a
1454 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1455 type.
1457 The canonical type of a COMPONENT_REF is the type of the field being
1458 referenced--unless the field is a bit-field which can be read directly
1459 in a smaller mode, in which case the canonical type is the
1460 sign-appropriate type corresponding to that mode. */
1462 static void
1463 canonicalize_component_ref (tree *expr_p)
1465 tree expr = *expr_p;
1466 tree type;
1468 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1470 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1471 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1472 else
1473 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1475 /* One could argue that all the stuff below is not necessary for
1476 the non-bitfield case and declare it a FE error if type
1477 adjustment would be needed. */
1478 if (TREE_TYPE (expr) != type)
1480 #ifdef ENABLE_TYPES_CHECKING
1481 tree old_type = TREE_TYPE (expr);
1482 #endif
1483 int type_quals;
1485 /* We need to preserve qualifiers and propagate them from
1486 operand 0. */
1487 type_quals = TYPE_QUALS (type)
1488 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1489 if (TYPE_QUALS (type) != type_quals)
1490 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1492 /* Set the type of the COMPONENT_REF to the underlying type. */
1493 TREE_TYPE (expr) = type;
1495 #ifdef ENABLE_TYPES_CHECKING
1496 /* It is now a FE error, if the conversion from the canonical
1497 type to the original expression type is not useless. */
1498 gcc_assert (useless_type_conversion_p (old_type, type));
1499 #endif
1503 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1504 to foo, embed that change in the ADDR_EXPR by converting
1505 T array[U];
1506 (T *)&array
1508 &array[L]
1509 where L is the lower bound. For simplicity, only do this for constant
1510 lower bound.
1511 The constraint is that the type of &array[L] is trivially convertible
1512 to T *. */
1514 static void
1515 canonicalize_addr_expr (tree *expr_p)
1517 tree expr = *expr_p;
1518 tree addr_expr = TREE_OPERAND (expr, 0);
1519 tree datype, ddatype, pddatype;
1521 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1522 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1523 || TREE_CODE (addr_expr) != ADDR_EXPR)
1524 return;
1526 /* The addr_expr type should be a pointer to an array. */
1527 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1528 if (TREE_CODE (datype) != ARRAY_TYPE)
1529 return;
1531 /* The pointer to element type shall be trivially convertible to
1532 the expression pointer type. */
1533 ddatype = TREE_TYPE (datype);
1534 pddatype = build_pointer_type (ddatype);
1535 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1536 pddatype))
1537 return;
1539 /* The lower bound and element sizes must be constant. */
1540 if (!TYPE_SIZE_UNIT (ddatype)
1541 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1542 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1543 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1544 return;
1546 /* All checks succeeded. Build a new node to merge the cast. */
1547 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1548 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1549 NULL_TREE, NULL_TREE);
1550 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1552 /* We can have stripped a required restrict qualifier above. */
1553 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1554 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1557 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1558 underneath as appropriate. */
1560 static enum gimplify_status
1561 gimplify_conversion (tree *expr_p)
1563 location_t loc = EXPR_LOCATION (*expr_p);
1564 gcc_assert (CONVERT_EXPR_P (*expr_p));
1566 /* Then strip away all but the outermost conversion. */
1567 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1569 /* And remove the outermost conversion if it's useless. */
1570 if (tree_ssa_useless_type_conversion (*expr_p))
1571 *expr_p = TREE_OPERAND (*expr_p, 0);
1573 /* If we still have a conversion at the toplevel,
1574 then canonicalize some constructs. */
1575 if (CONVERT_EXPR_P (*expr_p))
1577 tree sub = TREE_OPERAND (*expr_p, 0);
1579 /* If a NOP conversion is changing the type of a COMPONENT_REF
1580 expression, then canonicalize its type now in order to expose more
1581 redundant conversions. */
1582 if (TREE_CODE (sub) == COMPONENT_REF)
1583 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1585 /* If a NOP conversion is changing a pointer to array of foo
1586 to a pointer to foo, embed that change in the ADDR_EXPR. */
1587 else if (TREE_CODE (sub) == ADDR_EXPR)
1588 canonicalize_addr_expr (expr_p);
1591 /* If we have a conversion to a non-register type force the
1592 use of a VIEW_CONVERT_EXPR instead. */
1593 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1594 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1595 TREE_OPERAND (*expr_p, 0));
1597 return GS_OK;
1600 /* Nonlocal VLAs seen in the current function. */
1601 static struct pointer_set_t *nonlocal_vlas;
1603 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
1604 DECL_VALUE_EXPR, and it's worth re-examining things. */
1606 static enum gimplify_status
1607 gimplify_var_or_parm_decl (tree *expr_p)
1609 tree decl = *expr_p;
1611 /* ??? If this is a local variable, and it has not been seen in any
1612 outer BIND_EXPR, then it's probably the result of a duplicate
1613 declaration, for which we've already issued an error. It would
1614 be really nice if the front end wouldn't leak these at all.
1615 Currently the only known culprit is C++ destructors, as seen
1616 in g++.old-deja/g++.jason/binding.C. */
1617 if (TREE_CODE (decl) == VAR_DECL
1618 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1619 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1620 && decl_function_context (decl) == current_function_decl)
1622 gcc_assert (seen_error ());
1623 return GS_ERROR;
1626 /* When within an OpenMP context, notice uses of variables. */
1627 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1628 return GS_ALL_DONE;
1630 /* If the decl is an alias for another expression, substitute it now. */
1631 if (DECL_HAS_VALUE_EXPR_P (decl))
1633 tree value_expr = DECL_VALUE_EXPR (decl);
1635 /* For referenced nonlocal VLAs add a decl for debugging purposes
1636 to the current function. */
1637 if (TREE_CODE (decl) == VAR_DECL
1638 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1639 && nonlocal_vlas != NULL
1640 && TREE_CODE (value_expr) == INDIRECT_REF
1641 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1642 && decl_function_context (decl) != current_function_decl)
1644 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1645 while (ctx
1646 && (ctx->region_type == ORT_WORKSHARE
1647 || ctx->region_type == ORT_SIMD))
1648 ctx = ctx->outer_context;
1649 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1651 tree copy = copy_node (decl), block;
1653 lang_hooks.dup_lang_specific_decl (copy);
1654 SET_DECL_RTL (copy, 0);
1655 TREE_USED (copy) = 1;
1656 block = DECL_INITIAL (current_function_decl);
1657 DECL_CHAIN (copy) = BLOCK_VARS (block);
1658 BLOCK_VARS (block) = copy;
1659 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1660 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1664 *expr_p = unshare_expr (value_expr);
1665 return GS_OK;
1668 return GS_ALL_DONE;
1671 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1672 node *EXPR_P.
1674 compound_lval
1675 : min_lval '[' val ']'
1676 | min_lval '.' ID
1677 | compound_lval '[' val ']'
1678 | compound_lval '.' ID
1680 This is not part of the original SIMPLE definition, which separates
1681 array and member references, but it seems reasonable to handle them
1682 together. Also, this way we don't run into problems with union
1683 aliasing; gcc requires that for accesses through a union to alias, the
1684 union reference must be explicit, which was not always the case when we
1685 were splitting up array and member refs.
1687 PRE_P points to the sequence where side effects that must happen before
1688 *EXPR_P should be stored.
1690 POST_P points to the sequence where side effects that must happen after
1691 *EXPR_P should be stored. */
1693 static enum gimplify_status
1694 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1695 fallback_t fallback)
1697 tree *p;
1698 enum gimplify_status ret = GS_ALL_DONE, tret;
1699 int i;
1700 location_t loc = EXPR_LOCATION (*expr_p);
1701 tree expr = *expr_p;
1703 /* Create a stack of the subexpressions so later we can walk them in
1704 order from inner to outer. */
1705 stack_vec<tree, 10> expr_stack;
1707 /* We can handle anything that get_inner_reference can deal with. */
1708 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1710 restart:
1711 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1712 if (TREE_CODE (*p) == INDIRECT_REF)
1713 *p = fold_indirect_ref_loc (loc, *p);
1715 if (handled_component_p (*p))
1717 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1718 additional COMPONENT_REFs. */
1719 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1720 && gimplify_var_or_parm_decl (p) == GS_OK)
1721 goto restart;
1722 else
1723 break;
1725 expr_stack.safe_push (*p);
1728 gcc_assert (expr_stack.length ());
1730 /* Now EXPR_STACK is a stack of pointers to all the refs we've
1731 walked through and P points to the innermost expression.
1733 Java requires that we elaborated nodes in source order. That
1734 means we must gimplify the inner expression followed by each of
1735 the indices, in order. But we can't gimplify the inner
1736 expression until we deal with any variable bounds, sizes, or
1737 positions in order to deal with PLACEHOLDER_EXPRs.
1739 So we do this in three steps. First we deal with the annotations
1740 for any variables in the components, then we gimplify the base,
1741 then we gimplify any indices, from left to right. */
1742 for (i = expr_stack.length () - 1; i >= 0; i--)
1744 tree t = expr_stack[i];
1746 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1748 /* Gimplify the low bound and element type size and put them into
1749 the ARRAY_REF. If these values are set, they have already been
1750 gimplified. */
1751 if (TREE_OPERAND (t, 2) == NULL_TREE)
1753 tree low = unshare_expr (array_ref_low_bound (t));
1754 if (!is_gimple_min_invariant (low))
1756 TREE_OPERAND (t, 2) = low;
1757 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1758 post_p, is_gimple_reg,
1759 fb_rvalue);
1760 ret = MIN (ret, tret);
1763 else
1765 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1766 is_gimple_reg, fb_rvalue);
1767 ret = MIN (ret, tret);
1770 if (TREE_OPERAND (t, 3) == NULL_TREE)
1772 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1773 tree elmt_size = unshare_expr (array_ref_element_size (t));
1774 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1776 /* Divide the element size by the alignment of the element
1777 type (above). */
1778 elmt_size
1779 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
1781 if (!is_gimple_min_invariant (elmt_size))
1783 TREE_OPERAND (t, 3) = elmt_size;
1784 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
1785 post_p, is_gimple_reg,
1786 fb_rvalue);
1787 ret = MIN (ret, tret);
1790 else
1792 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1793 is_gimple_reg, fb_rvalue);
1794 ret = MIN (ret, tret);
1797 else if (TREE_CODE (t) == COMPONENT_REF)
1799 /* Set the field offset into T and gimplify it. */
1800 if (TREE_OPERAND (t, 2) == NULL_TREE)
1802 tree offset = unshare_expr (component_ref_field_offset (t));
1803 tree field = TREE_OPERAND (t, 1);
1804 tree factor
1805 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1807 /* Divide the offset by its alignment. */
1808 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
1810 if (!is_gimple_min_invariant (offset))
1812 TREE_OPERAND (t, 2) = offset;
1813 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1814 post_p, is_gimple_reg,
1815 fb_rvalue);
1816 ret = MIN (ret, tret);
1819 else
1821 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1822 is_gimple_reg, fb_rvalue);
1823 ret = MIN (ret, tret);
1828 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
1829 so as to match the min_lval predicate. Failure to do so may result
1830 in the creation of large aggregate temporaries. */
1831 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
1832 fallback | fb_lvalue);
1833 ret = MIN (ret, tret);
1835 /* And finally, the indices and operands of ARRAY_REF. During this
1836 loop we also remove any useless conversions. */
1837 for (; expr_stack.length () > 0; )
1839 tree t = expr_stack.pop ();
1841 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1843 /* Gimplify the dimension. */
1844 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
1846 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1847 is_gimple_val, fb_rvalue);
1848 ret = MIN (ret, tret);
1852 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
1854 /* The innermost expression P may have originally had
1855 TREE_SIDE_EFFECTS set which would have caused all the outer
1856 expressions in *EXPR_P leading to P to also have had
1857 TREE_SIDE_EFFECTS set. */
1858 recalculate_side_effects (t);
1861 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
1862 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
1864 canonicalize_component_ref (expr_p);
1867 expr_stack.release ();
1869 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
1871 return ret;
1874 /* Gimplify the self modifying expression pointed to by EXPR_P
1875 (++, --, +=, -=).
1877 PRE_P points to the list where side effects that must happen before
1878 *EXPR_P should be stored.
1880 POST_P points to the list where side effects that must happen after
1881 *EXPR_P should be stored.
1883 WANT_VALUE is nonzero iff we want to use the value of this expression
1884 in another expression.
1886 ARITH_TYPE is the type the computation should be performed in. */
1888 enum gimplify_status
1889 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1890 bool want_value, tree arith_type)
1892 enum tree_code code;
1893 tree lhs, lvalue, rhs, t1;
1894 gimple_seq post = NULL, *orig_post_p = post_p;
1895 bool postfix;
1896 enum tree_code arith_code;
1897 enum gimplify_status ret;
1898 location_t loc = EXPR_LOCATION (*expr_p);
1900 code = TREE_CODE (*expr_p);
1902 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
1903 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
1905 /* Prefix or postfix? */
1906 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
1907 /* Faster to treat as prefix if result is not used. */
1908 postfix = want_value;
1909 else
1910 postfix = false;
1912 /* For postfix, make sure the inner expression's post side effects
1913 are executed after side effects from this expression. */
1914 if (postfix)
1915 post_p = &post;
1917 /* Add or subtract? */
1918 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
1919 arith_code = PLUS_EXPR;
1920 else
1921 arith_code = MINUS_EXPR;
1923 /* Gimplify the LHS into a GIMPLE lvalue. */
1924 lvalue = TREE_OPERAND (*expr_p, 0);
1925 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
1926 if (ret == GS_ERROR)
1927 return ret;
1929 /* Extract the operands to the arithmetic operation. */
1930 lhs = lvalue;
1931 rhs = TREE_OPERAND (*expr_p, 1);
1933 /* For postfix operator, we evaluate the LHS to an rvalue and then use
1934 that as the result value and in the postqueue operation. */
1935 if (postfix)
1937 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
1938 if (ret == GS_ERROR)
1939 return ret;
1941 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
1944 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
1945 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
1947 rhs = convert_to_ptrofftype_loc (loc, rhs);
1948 if (arith_code == MINUS_EXPR)
1949 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
1950 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
1952 else
1953 t1 = fold_convert (TREE_TYPE (*expr_p),
1954 fold_build2 (arith_code, arith_type,
1955 fold_convert (arith_type, lhs),
1956 fold_convert (arith_type, rhs)));
1958 if (postfix)
1960 gimplify_assign (lvalue, t1, pre_p);
1961 gimplify_seq_add_seq (orig_post_p, post);
1962 *expr_p = lhs;
1963 return GS_ALL_DONE;
1965 else
1967 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
1968 return GS_OK;
1972 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
1974 static void
1975 maybe_with_size_expr (tree *expr_p)
1977 tree expr = *expr_p;
1978 tree type = TREE_TYPE (expr);
1979 tree size;
1981 /* If we've already wrapped this or the type is error_mark_node, we can't do
1982 anything. */
1983 if (TREE_CODE (expr) == WITH_SIZE_EXPR
1984 || type == error_mark_node)
1985 return;
1987 /* If the size isn't known or is a constant, we have nothing to do. */
1988 size = TYPE_SIZE_UNIT (type);
1989 if (!size || TREE_CODE (size) == INTEGER_CST)
1990 return;
1992 /* Otherwise, make a WITH_SIZE_EXPR. */
1993 size = unshare_expr (size);
1994 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
1995 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
1998 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
1999 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2000 the CALL_EXPR. */
2002 static enum gimplify_status
2003 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2005 bool (*test) (tree);
2006 fallback_t fb;
2008 /* In general, we allow lvalues for function arguments to avoid
2009 extra overhead of copying large aggregates out of even larger
2010 aggregates into temporaries only to copy the temporaries to
2011 the argument list. Make optimizers happy by pulling out to
2012 temporaries those types that fit in registers. */
2013 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2014 test = is_gimple_val, fb = fb_rvalue;
2015 else
2017 test = is_gimple_lvalue, fb = fb_either;
2018 /* Also strip a TARGET_EXPR that would force an extra copy. */
2019 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2021 tree init = TARGET_EXPR_INITIAL (*arg_p);
2022 if (init
2023 && !VOID_TYPE_P (TREE_TYPE (init)))
2024 *arg_p = init;
2028 /* If this is a variable sized type, we must remember the size. */
2029 maybe_with_size_expr (arg_p);
2031 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2032 /* Make sure arguments have the same location as the function call
2033 itself. */
2034 protected_set_expr_location (*arg_p, call_location);
2036 /* There is a sequence point before a function call. Side effects in
2037 the argument list must occur before the actual call. So, when
2038 gimplifying arguments, force gimplify_expr to use an internal
2039 post queue which is then appended to the end of PRE_P. */
2040 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2043 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2044 WANT_VALUE is true if the result of the call is desired. */
2046 static enum gimplify_status
2047 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2049 tree fndecl, parms, p, fnptrtype;
2050 enum gimplify_status ret;
2051 int i, nargs;
2052 gimple call;
2053 bool builtin_va_start_p = FALSE;
2054 location_t loc = EXPR_LOCATION (*expr_p);
2056 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2058 /* For reliable diagnostics during inlining, it is necessary that
2059 every call_expr be annotated with file and line. */
2060 if (! EXPR_HAS_LOCATION (*expr_p))
2061 SET_EXPR_LOCATION (*expr_p, input_location);
2063 if (fn_contains_cilk_spawn_p (cfun)
2064 && lang_hooks.cilkplus.cilk_detect_spawn_and_unwrap (expr_p)
2065 && !seen_error ())
2066 return (enum gimplify_status)
2067 lang_hooks.cilkplus.gimplify_cilk_spawn (expr_p, pre_p, NULL);
2069 /* This may be a call to a builtin function.
2071 Builtin function calls may be transformed into different
2072 (and more efficient) builtin function calls under certain
2073 circumstances. Unfortunately, gimplification can muck things
2074 up enough that the builtin expanders are not aware that certain
2075 transformations are still valid.
2077 So we attempt transformation/gimplification of the call before
2078 we gimplify the CALL_EXPR. At this time we do not manage to
2079 transform all calls in the same manner as the expanders do, but
2080 we do transform most of them. */
2081 fndecl = get_callee_fndecl (*expr_p);
2082 if (fndecl
2083 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2084 switch (DECL_FUNCTION_CODE (fndecl))
2086 case BUILT_IN_VA_START:
2088 builtin_va_start_p = TRUE;
2089 if (call_expr_nargs (*expr_p) < 2)
2091 error ("too few arguments to function %<va_start%>");
2092 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2093 return GS_OK;
2096 if (fold_builtin_next_arg (*expr_p, true))
2098 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2099 return GS_OK;
2101 break;
2103 case BUILT_IN_LINE:
2105 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2106 *expr_p = build_int_cst (TREE_TYPE (*expr_p), loc.line);
2107 return GS_OK;
2109 case BUILT_IN_FILE:
2111 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2112 *expr_p = build_string_literal (strlen (loc.file) + 1, loc.file);
2113 return GS_OK;
2115 case BUILT_IN_FUNCTION:
2117 const char *function;
2118 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2119 *expr_p = build_string_literal (strlen (function) + 1, function);
2120 return GS_OK;
2122 default:
2125 if (fndecl && DECL_BUILT_IN (fndecl))
2127 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2128 if (new_tree && new_tree != *expr_p)
2130 /* There was a transformation of this call which computes the
2131 same value, but in a more efficient way. Return and try
2132 again. */
2133 *expr_p = new_tree;
2134 return GS_OK;
2138 /* Remember the original function pointer type. */
2139 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2141 /* There is a sequence point before the call, so any side effects in
2142 the calling expression must occur before the actual call. Force
2143 gimplify_expr to use an internal post queue. */
2144 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2145 is_gimple_call_addr, fb_rvalue);
2147 nargs = call_expr_nargs (*expr_p);
2149 /* Get argument types for verification. */
2150 fndecl = get_callee_fndecl (*expr_p);
2151 parms = NULL_TREE;
2152 if (fndecl)
2153 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2154 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2155 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2157 if (fndecl && DECL_ARGUMENTS (fndecl))
2158 p = DECL_ARGUMENTS (fndecl);
2159 else if (parms)
2160 p = parms;
2161 else
2162 p = NULL_TREE;
2163 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2166 /* If the last argument is __builtin_va_arg_pack () and it is not
2167 passed as a named argument, decrease the number of CALL_EXPR
2168 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2169 if (!p
2170 && i < nargs
2171 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2173 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2174 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2176 if (last_arg_fndecl
2177 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2178 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2179 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2181 tree call = *expr_p;
2183 --nargs;
2184 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2185 CALL_EXPR_FN (call),
2186 nargs, CALL_EXPR_ARGP (call));
2188 /* Copy all CALL_EXPR flags, location and block, except
2189 CALL_EXPR_VA_ARG_PACK flag. */
2190 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2191 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2192 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2193 = CALL_EXPR_RETURN_SLOT_OPT (call);
2194 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2195 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2197 /* Set CALL_EXPR_VA_ARG_PACK. */
2198 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2202 /* Finally, gimplify the function arguments. */
2203 if (nargs > 0)
2205 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2206 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2207 PUSH_ARGS_REVERSED ? i-- : i++)
2209 enum gimplify_status t;
2211 /* Avoid gimplifying the second argument to va_start, which needs to
2212 be the plain PARM_DECL. */
2213 if ((i != 1) || !builtin_va_start_p)
2215 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2216 EXPR_LOCATION (*expr_p));
2218 if (t == GS_ERROR)
2219 ret = GS_ERROR;
2224 /* Verify the function result. */
2225 if (want_value && fndecl
2226 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2228 error_at (loc, "using result of function returning %<void%>");
2229 ret = GS_ERROR;
2232 /* Try this again in case gimplification exposed something. */
2233 if (ret != GS_ERROR)
2235 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2237 if (new_tree && new_tree != *expr_p)
2239 /* There was a transformation of this call which computes the
2240 same value, but in a more efficient way. Return and try
2241 again. */
2242 *expr_p = new_tree;
2243 return GS_OK;
2246 else
2248 *expr_p = error_mark_node;
2249 return GS_ERROR;
2252 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2253 decl. This allows us to eliminate redundant or useless
2254 calls to "const" functions. */
2255 if (TREE_CODE (*expr_p) == CALL_EXPR)
2257 int flags = call_expr_flags (*expr_p);
2258 if (flags & (ECF_CONST | ECF_PURE)
2259 /* An infinite loop is considered a side effect. */
2260 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2261 TREE_SIDE_EFFECTS (*expr_p) = 0;
2264 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2265 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2266 form and delegate the creation of a GIMPLE_CALL to
2267 gimplify_modify_expr. This is always possible because when
2268 WANT_VALUE is true, the caller wants the result of this call into
2269 a temporary, which means that we will emit an INIT_EXPR in
2270 internal_get_tmp_var which will then be handled by
2271 gimplify_modify_expr. */
2272 if (!want_value)
2274 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2275 have to do is replicate it as a GIMPLE_CALL tuple. */
2276 gimple_stmt_iterator gsi;
2277 call = gimple_build_call_from_tree (*expr_p);
2278 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2279 notice_special_calls (call);
2280 gimplify_seq_add_stmt (pre_p, call);
2281 gsi = gsi_last (*pre_p);
2282 /* Don't fold stmts inside of target construct. We'll do it
2283 during omplower pass instead. */
2284 struct gimplify_omp_ctx *ctx;
2285 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2286 if (ctx->region_type == ORT_TARGET)
2287 break;
2288 if (ctx == NULL)
2289 fold_stmt (&gsi);
2290 *expr_p = NULL_TREE;
2292 else
2293 /* Remember the original function type. */
2294 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2295 CALL_EXPR_FN (*expr_p));
2297 return ret;
2300 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2301 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2303 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2304 condition is true or false, respectively. If null, we should generate
2305 our own to skip over the evaluation of this specific expression.
2307 LOCUS is the source location of the COND_EXPR.
2309 This function is the tree equivalent of do_jump.
2311 shortcut_cond_r should only be called by shortcut_cond_expr. */
2313 static tree
2314 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2315 location_t locus)
2317 tree local_label = NULL_TREE;
2318 tree t, expr = NULL;
2320 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2321 retain the shortcut semantics. Just insert the gotos here;
2322 shortcut_cond_expr will append the real blocks later. */
2323 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2325 location_t new_locus;
2327 /* Turn if (a && b) into
2329 if (a); else goto no;
2330 if (b) goto yes; else goto no;
2331 (no:) */
2333 if (false_label_p == NULL)
2334 false_label_p = &local_label;
2336 /* Keep the original source location on the first 'if'. */
2337 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2338 append_to_statement_list (t, &expr);
2340 /* Set the source location of the && on the second 'if'. */
2341 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2342 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2343 new_locus);
2344 append_to_statement_list (t, &expr);
2346 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2348 location_t new_locus;
2350 /* Turn if (a || b) into
2352 if (a) goto yes;
2353 if (b) goto yes; else goto no;
2354 (yes:) */
2356 if (true_label_p == NULL)
2357 true_label_p = &local_label;
2359 /* Keep the original source location on the first 'if'. */
2360 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2361 append_to_statement_list (t, &expr);
2363 /* Set the source location of the || on the second 'if'. */
2364 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2365 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2366 new_locus);
2367 append_to_statement_list (t, &expr);
2369 else if (TREE_CODE (pred) == COND_EXPR
2370 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2371 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2373 location_t new_locus;
2375 /* As long as we're messing with gotos, turn if (a ? b : c) into
2376 if (a)
2377 if (b) goto yes; else goto no;
2378 else
2379 if (c) goto yes; else goto no;
2381 Don't do this if one of the arms has void type, which can happen
2382 in C++ when the arm is throw. */
2384 /* Keep the original source location on the first 'if'. Set the source
2385 location of the ? on the second 'if'. */
2386 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2387 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2388 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2389 false_label_p, locus),
2390 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2391 false_label_p, new_locus));
2393 else
2395 expr = build3 (COND_EXPR, void_type_node, pred,
2396 build_and_jump (true_label_p),
2397 build_and_jump (false_label_p));
2398 SET_EXPR_LOCATION (expr, locus);
2401 if (local_label)
2403 t = build1 (LABEL_EXPR, void_type_node, local_label);
2404 append_to_statement_list (t, &expr);
2407 return expr;
2410 /* Given a conditional expression EXPR with short-circuit boolean
2411 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2412 predicate apart into the equivalent sequence of conditionals. */
2414 static tree
2415 shortcut_cond_expr (tree expr)
2417 tree pred = TREE_OPERAND (expr, 0);
2418 tree then_ = TREE_OPERAND (expr, 1);
2419 tree else_ = TREE_OPERAND (expr, 2);
2420 tree true_label, false_label, end_label, t;
2421 tree *true_label_p;
2422 tree *false_label_p;
2423 bool emit_end, emit_false, jump_over_else;
2424 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2425 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2427 /* First do simple transformations. */
2428 if (!else_se)
2430 /* If there is no 'else', turn
2431 if (a && b) then c
2432 into
2433 if (a) if (b) then c. */
2434 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2436 /* Keep the original source location on the first 'if'. */
2437 location_t locus = EXPR_LOC_OR_HERE (expr);
2438 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2439 /* Set the source location of the && on the second 'if'. */
2440 if (EXPR_HAS_LOCATION (pred))
2441 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2442 then_ = shortcut_cond_expr (expr);
2443 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2444 pred = TREE_OPERAND (pred, 0);
2445 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2446 SET_EXPR_LOCATION (expr, locus);
2450 if (!then_se)
2452 /* If there is no 'then', turn
2453 if (a || b); else d
2454 into
2455 if (a); else if (b); else d. */
2456 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2458 /* Keep the original source location on the first 'if'. */
2459 location_t locus = EXPR_LOC_OR_HERE (expr);
2460 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2461 /* Set the source location of the || on the second 'if'. */
2462 if (EXPR_HAS_LOCATION (pred))
2463 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2464 else_ = shortcut_cond_expr (expr);
2465 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2466 pred = TREE_OPERAND (pred, 0);
2467 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2468 SET_EXPR_LOCATION (expr, locus);
2472 /* If we're done, great. */
2473 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2474 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2475 return expr;
2477 /* Otherwise we need to mess with gotos. Change
2478 if (a) c; else d;
2480 if (a); else goto no;
2481 c; goto end;
2482 no: d; end:
2483 and recursively gimplify the condition. */
2485 true_label = false_label = end_label = NULL_TREE;
2487 /* If our arms just jump somewhere, hijack those labels so we don't
2488 generate jumps to jumps. */
2490 if (then_
2491 && TREE_CODE (then_) == GOTO_EXPR
2492 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2494 true_label = GOTO_DESTINATION (then_);
2495 then_ = NULL;
2496 then_se = false;
2499 if (else_
2500 && TREE_CODE (else_) == GOTO_EXPR
2501 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2503 false_label = GOTO_DESTINATION (else_);
2504 else_ = NULL;
2505 else_se = false;
2508 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2509 if (true_label)
2510 true_label_p = &true_label;
2511 else
2512 true_label_p = NULL;
2514 /* The 'else' branch also needs a label if it contains interesting code. */
2515 if (false_label || else_se)
2516 false_label_p = &false_label;
2517 else
2518 false_label_p = NULL;
2520 /* If there was nothing else in our arms, just forward the label(s). */
2521 if (!then_se && !else_se)
2522 return shortcut_cond_r (pred, true_label_p, false_label_p,
2523 EXPR_LOC_OR_HERE (expr));
2525 /* If our last subexpression already has a terminal label, reuse it. */
2526 if (else_se)
2527 t = expr_last (else_);
2528 else if (then_se)
2529 t = expr_last (then_);
2530 else
2531 t = NULL;
2532 if (t && TREE_CODE (t) == LABEL_EXPR)
2533 end_label = LABEL_EXPR_LABEL (t);
2535 /* If we don't care about jumping to the 'else' branch, jump to the end
2536 if the condition is false. */
2537 if (!false_label_p)
2538 false_label_p = &end_label;
2540 /* We only want to emit these labels if we aren't hijacking them. */
2541 emit_end = (end_label == NULL_TREE);
2542 emit_false = (false_label == NULL_TREE);
2544 /* We only emit the jump over the else clause if we have to--if the
2545 then clause may fall through. Otherwise we can wind up with a
2546 useless jump and a useless label at the end of gimplified code,
2547 which will cause us to think that this conditional as a whole
2548 falls through even if it doesn't. If we then inline a function
2549 which ends with such a condition, that can cause us to issue an
2550 inappropriate warning about control reaching the end of a
2551 non-void function. */
2552 jump_over_else = block_may_fallthru (then_);
2554 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2555 EXPR_LOC_OR_HERE (expr));
2557 expr = NULL;
2558 append_to_statement_list (pred, &expr);
2560 append_to_statement_list (then_, &expr);
2561 if (else_se)
2563 if (jump_over_else)
2565 tree last = expr_last (expr);
2566 t = build_and_jump (&end_label);
2567 if (EXPR_HAS_LOCATION (last))
2568 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2569 append_to_statement_list (t, &expr);
2571 if (emit_false)
2573 t = build1 (LABEL_EXPR, void_type_node, false_label);
2574 append_to_statement_list (t, &expr);
2576 append_to_statement_list (else_, &expr);
2578 if (emit_end && end_label)
2580 t = build1 (LABEL_EXPR, void_type_node, end_label);
2581 append_to_statement_list (t, &expr);
2584 return expr;
2587 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2589 tree
2590 gimple_boolify (tree expr)
2592 tree type = TREE_TYPE (expr);
2593 location_t loc = EXPR_LOCATION (expr);
2595 if (TREE_CODE (expr) == NE_EXPR
2596 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2597 && integer_zerop (TREE_OPERAND (expr, 1)))
2599 tree call = TREE_OPERAND (expr, 0);
2600 tree fn = get_callee_fndecl (call);
2602 /* For __builtin_expect ((long) (x), y) recurse into x as well
2603 if x is truth_value_p. */
2604 if (fn
2605 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2606 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2607 && call_expr_nargs (call) == 2)
2609 tree arg = CALL_EXPR_ARG (call, 0);
2610 if (arg)
2612 if (TREE_CODE (arg) == NOP_EXPR
2613 && TREE_TYPE (arg) == TREE_TYPE (call))
2614 arg = TREE_OPERAND (arg, 0);
2615 if (truth_value_p (TREE_CODE (arg)))
2617 arg = gimple_boolify (arg);
2618 CALL_EXPR_ARG (call, 0)
2619 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2625 switch (TREE_CODE (expr))
2627 case TRUTH_AND_EXPR:
2628 case TRUTH_OR_EXPR:
2629 case TRUTH_XOR_EXPR:
2630 case TRUTH_ANDIF_EXPR:
2631 case TRUTH_ORIF_EXPR:
2632 /* Also boolify the arguments of truth exprs. */
2633 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2634 /* FALLTHRU */
2636 case TRUTH_NOT_EXPR:
2637 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2639 /* These expressions always produce boolean results. */
2640 if (TREE_CODE (type) != BOOLEAN_TYPE)
2641 TREE_TYPE (expr) = boolean_type_node;
2642 return expr;
2644 case ANNOTATE_EXPR:
2645 if ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1))
2646 == annot_expr_ivdep_kind)
2648 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2649 if (TREE_CODE (type) != BOOLEAN_TYPE)
2650 TREE_TYPE (expr) = boolean_type_node;
2651 return expr;
2653 /* FALLTHRU */
2655 default:
2656 if (COMPARISON_CLASS_P (expr))
2658 /* There expressions always prduce boolean results. */
2659 if (TREE_CODE (type) != BOOLEAN_TYPE)
2660 TREE_TYPE (expr) = boolean_type_node;
2661 return expr;
2663 /* Other expressions that get here must have boolean values, but
2664 might need to be converted to the appropriate mode. */
2665 if (TREE_CODE (type) == BOOLEAN_TYPE)
2666 return expr;
2667 return fold_convert_loc (loc, boolean_type_node, expr);
2671 /* Given a conditional expression *EXPR_P without side effects, gimplify
2672 its operands. New statements are inserted to PRE_P. */
2674 static enum gimplify_status
2675 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2677 tree expr = *expr_p, cond;
2678 enum gimplify_status ret, tret;
2679 enum tree_code code;
2681 cond = gimple_boolify (COND_EXPR_COND (expr));
2683 /* We need to handle && and || specially, as their gimplification
2684 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2685 code = TREE_CODE (cond);
2686 if (code == TRUTH_ANDIF_EXPR)
2687 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2688 else if (code == TRUTH_ORIF_EXPR)
2689 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2690 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2691 COND_EXPR_COND (*expr_p) = cond;
2693 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2694 is_gimple_val, fb_rvalue);
2695 ret = MIN (ret, tret);
2696 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2697 is_gimple_val, fb_rvalue);
2699 return MIN (ret, tret);
2702 /* Return true if evaluating EXPR could trap.
2703 EXPR is GENERIC, while tree_could_trap_p can be called
2704 only on GIMPLE. */
2706 static bool
2707 generic_expr_could_trap_p (tree expr)
2709 unsigned i, n;
2711 if (!expr || is_gimple_val (expr))
2712 return false;
2714 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2715 return true;
2717 n = TREE_OPERAND_LENGTH (expr);
2718 for (i = 0; i < n; i++)
2719 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2720 return true;
2722 return false;
2725 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2726 into
2728 if (p) if (p)
2729 t1 = a; a;
2730 else or else
2731 t1 = b; b;
2734 The second form is used when *EXPR_P is of type void.
2736 PRE_P points to the list where side effects that must happen before
2737 *EXPR_P should be stored. */
2739 static enum gimplify_status
2740 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2742 tree expr = *expr_p;
2743 tree type = TREE_TYPE (expr);
2744 location_t loc = EXPR_LOCATION (expr);
2745 tree tmp, arm1, arm2;
2746 enum gimplify_status ret;
2747 tree label_true, label_false, label_cont;
2748 bool have_then_clause_p, have_else_clause_p;
2749 gimple gimple_cond;
2750 enum tree_code pred_code;
2751 gimple_seq seq = NULL;
2753 /* If this COND_EXPR has a value, copy the values into a temporary within
2754 the arms. */
2755 if (!VOID_TYPE_P (type))
2757 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
2758 tree result;
2760 /* If either an rvalue is ok or we do not require an lvalue, create the
2761 temporary. But we cannot do that if the type is addressable. */
2762 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
2763 && !TREE_ADDRESSABLE (type))
2765 if (gimplify_ctxp->allow_rhs_cond_expr
2766 /* If either branch has side effects or could trap, it can't be
2767 evaluated unconditionally. */
2768 && !TREE_SIDE_EFFECTS (then_)
2769 && !generic_expr_could_trap_p (then_)
2770 && !TREE_SIDE_EFFECTS (else_)
2771 && !generic_expr_could_trap_p (else_))
2772 return gimplify_pure_cond_expr (expr_p, pre_p);
2774 tmp = create_tmp_var (type, "iftmp");
2775 result = tmp;
2778 /* Otherwise, only create and copy references to the values. */
2779 else
2781 type = build_pointer_type (type);
2783 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2784 then_ = build_fold_addr_expr_loc (loc, then_);
2786 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2787 else_ = build_fold_addr_expr_loc (loc, else_);
2789 expr
2790 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
2792 tmp = create_tmp_var (type, "iftmp");
2793 result = build_simple_mem_ref_loc (loc, tmp);
2796 /* Build the new then clause, `tmp = then_;'. But don't build the
2797 assignment if the value is void; in C++ it can be if it's a throw. */
2798 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2799 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
2801 /* Similarly, build the new else clause, `tmp = else_;'. */
2802 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2803 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
2805 TREE_TYPE (expr) = void_type_node;
2806 recalculate_side_effects (expr);
2808 /* Move the COND_EXPR to the prequeue. */
2809 gimplify_stmt (&expr, pre_p);
2811 *expr_p = result;
2812 return GS_ALL_DONE;
2815 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
2816 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
2817 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
2818 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
2820 /* Make sure the condition has BOOLEAN_TYPE. */
2821 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2823 /* Break apart && and || conditions. */
2824 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2825 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2827 expr = shortcut_cond_expr (expr);
2829 if (expr != *expr_p)
2831 *expr_p = expr;
2833 /* We can't rely on gimplify_expr to re-gimplify the expanded
2834 form properly, as cleanups might cause the target labels to be
2835 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2836 set up a conditional context. */
2837 gimple_push_condition ();
2838 gimplify_stmt (expr_p, &seq);
2839 gimple_pop_condition (pre_p);
2840 gimple_seq_add_seq (pre_p, seq);
2842 return GS_ALL_DONE;
2846 /* Now do the normal gimplification. */
2848 /* Gimplify condition. */
2849 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
2850 fb_rvalue);
2851 if (ret == GS_ERROR)
2852 return GS_ERROR;
2853 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
2855 gimple_push_condition ();
2857 have_then_clause_p = have_else_clause_p = false;
2858 if (TREE_OPERAND (expr, 1) != NULL
2859 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
2860 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
2861 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
2862 == current_function_decl)
2863 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2864 have different locations, otherwise we end up with incorrect
2865 location information on the branches. */
2866 && (optimize
2867 || !EXPR_HAS_LOCATION (expr)
2868 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
2869 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
2871 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
2872 have_then_clause_p = true;
2874 else
2875 label_true = create_artificial_label (UNKNOWN_LOCATION);
2876 if (TREE_OPERAND (expr, 2) != NULL
2877 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
2878 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
2879 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
2880 == current_function_decl)
2881 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2882 have different locations, otherwise we end up with incorrect
2883 location information on the branches. */
2884 && (optimize
2885 || !EXPR_HAS_LOCATION (expr)
2886 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
2887 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
2889 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
2890 have_else_clause_p = true;
2892 else
2893 label_false = create_artificial_label (UNKNOWN_LOCATION);
2895 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
2896 &arm2);
2898 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
2899 label_false);
2901 gimplify_seq_add_stmt (&seq, gimple_cond);
2902 label_cont = NULL_TREE;
2903 if (!have_then_clause_p)
2905 /* For if (...) {} else { code; } put label_true after
2906 the else block. */
2907 if (TREE_OPERAND (expr, 1) == NULL_TREE
2908 && !have_else_clause_p
2909 && TREE_OPERAND (expr, 2) != NULL_TREE)
2910 label_cont = label_true;
2911 else
2913 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
2914 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
2915 /* For if (...) { code; } else {} or
2916 if (...) { code; } else goto label; or
2917 if (...) { code; return; } else { ... }
2918 label_cont isn't needed. */
2919 if (!have_else_clause_p
2920 && TREE_OPERAND (expr, 2) != NULL_TREE
2921 && gimple_seq_may_fallthru (seq))
2923 gimple g;
2924 label_cont = create_artificial_label (UNKNOWN_LOCATION);
2926 g = gimple_build_goto (label_cont);
2928 /* GIMPLE_COND's are very low level; they have embedded
2929 gotos. This particular embedded goto should not be marked
2930 with the location of the original COND_EXPR, as it would
2931 correspond to the COND_EXPR's condition, not the ELSE or the
2932 THEN arms. To avoid marking it with the wrong location, flag
2933 it as "no location". */
2934 gimple_set_do_not_emit_location (g);
2936 gimplify_seq_add_stmt (&seq, g);
2940 if (!have_else_clause_p)
2942 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
2943 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
2945 if (label_cont)
2946 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
2948 gimple_pop_condition (pre_p);
2949 gimple_seq_add_seq (pre_p, seq);
2951 if (ret == GS_ERROR)
2952 ; /* Do nothing. */
2953 else if (have_then_clause_p || have_else_clause_p)
2954 ret = GS_ALL_DONE;
2955 else
2957 /* Both arms are empty; replace the COND_EXPR with its predicate. */
2958 expr = TREE_OPERAND (expr, 0);
2959 gimplify_stmt (&expr, pre_p);
2962 *expr_p = NULL;
2963 return ret;
2966 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
2967 to be marked addressable.
2969 We cannot rely on such an expression being directly markable if a temporary
2970 has been created by the gimplification. In this case, we create another
2971 temporary and initialize it with a copy, which will become a store after we
2972 mark it addressable. This can happen if the front-end passed us something
2973 that it could not mark addressable yet, like a Fortran pass-by-reference
2974 parameter (int) floatvar. */
2976 static void
2977 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
2979 while (handled_component_p (*expr_p))
2980 expr_p = &TREE_OPERAND (*expr_p, 0);
2981 if (is_gimple_reg (*expr_p))
2982 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
2985 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2986 a call to __builtin_memcpy. */
2988 static enum gimplify_status
2989 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
2990 gimple_seq *seq_p)
2992 tree t, to, to_ptr, from, from_ptr;
2993 gimple gs;
2994 location_t loc = EXPR_LOCATION (*expr_p);
2996 to = TREE_OPERAND (*expr_p, 0);
2997 from = TREE_OPERAND (*expr_p, 1);
2999 /* Mark the RHS addressable. Beware that it may not be possible to do so
3000 directly if a temporary has been created by the gimplification. */
3001 prepare_gimple_addressable (&from, seq_p);
3003 mark_addressable (from);
3004 from_ptr = build_fold_addr_expr_loc (loc, from);
3005 gimplify_arg (&from_ptr, seq_p, loc);
3007 mark_addressable (to);
3008 to_ptr = build_fold_addr_expr_loc (loc, to);
3009 gimplify_arg (&to_ptr, seq_p, loc);
3011 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3013 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3015 if (want_value)
3017 /* tmp = memcpy() */
3018 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3019 gimple_call_set_lhs (gs, t);
3020 gimplify_seq_add_stmt (seq_p, gs);
3022 *expr_p = build_simple_mem_ref (t);
3023 return GS_ALL_DONE;
3026 gimplify_seq_add_stmt (seq_p, gs);
3027 *expr_p = NULL;
3028 return GS_ALL_DONE;
3031 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3032 a call to __builtin_memset. In this case we know that the RHS is
3033 a CONSTRUCTOR with an empty element list. */
3035 static enum gimplify_status
3036 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3037 gimple_seq *seq_p)
3039 tree t, from, to, to_ptr;
3040 gimple gs;
3041 location_t loc = EXPR_LOCATION (*expr_p);
3043 /* Assert our assumptions, to abort instead of producing wrong code
3044 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3045 not be immediately exposed. */
3046 from = TREE_OPERAND (*expr_p, 1);
3047 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3048 from = TREE_OPERAND (from, 0);
3050 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3051 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3053 /* Now proceed. */
3054 to = TREE_OPERAND (*expr_p, 0);
3056 to_ptr = build_fold_addr_expr_loc (loc, to);
3057 gimplify_arg (&to_ptr, seq_p, loc);
3058 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3060 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3062 if (want_value)
3064 /* tmp = memset() */
3065 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3066 gimple_call_set_lhs (gs, t);
3067 gimplify_seq_add_stmt (seq_p, gs);
3069 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3070 return GS_ALL_DONE;
3073 gimplify_seq_add_stmt (seq_p, gs);
3074 *expr_p = NULL;
3075 return GS_ALL_DONE;
3078 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3079 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3080 assignment. Return non-null if we detect a potential overlap. */
3082 struct gimplify_init_ctor_preeval_data
3084 /* The base decl of the lhs object. May be NULL, in which case we
3085 have to assume the lhs is indirect. */
3086 tree lhs_base_decl;
3088 /* The alias set of the lhs object. */
3089 alias_set_type lhs_alias_set;
3092 static tree
3093 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3095 struct gimplify_init_ctor_preeval_data *data
3096 = (struct gimplify_init_ctor_preeval_data *) xdata;
3097 tree t = *tp;
3099 /* If we find the base object, obviously we have overlap. */
3100 if (data->lhs_base_decl == t)
3101 return t;
3103 /* If the constructor component is indirect, determine if we have a
3104 potential overlap with the lhs. The only bits of information we
3105 have to go on at this point are addressability and alias sets. */
3106 if ((INDIRECT_REF_P (t)
3107 || TREE_CODE (t) == MEM_REF)
3108 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3109 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3110 return t;
3112 /* If the constructor component is a call, determine if it can hide a
3113 potential overlap with the lhs through an INDIRECT_REF like above.
3114 ??? Ugh - this is completely broken. In fact this whole analysis
3115 doesn't look conservative. */
3116 if (TREE_CODE (t) == CALL_EXPR)
3118 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3120 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3121 if (POINTER_TYPE_P (TREE_VALUE (type))
3122 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3123 && alias_sets_conflict_p (data->lhs_alias_set,
3124 get_alias_set
3125 (TREE_TYPE (TREE_VALUE (type)))))
3126 return t;
3129 if (IS_TYPE_OR_DECL_P (t))
3130 *walk_subtrees = 0;
3131 return NULL;
3134 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3135 force values that overlap with the lhs (as described by *DATA)
3136 into temporaries. */
3138 static void
3139 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3140 struct gimplify_init_ctor_preeval_data *data)
3142 enum gimplify_status one;
3144 /* If the value is constant, then there's nothing to pre-evaluate. */
3145 if (TREE_CONSTANT (*expr_p))
3147 /* Ensure it does not have side effects, it might contain a reference to
3148 the object we're initializing. */
3149 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3150 return;
3153 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3154 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3155 return;
3157 /* Recurse for nested constructors. */
3158 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3160 unsigned HOST_WIDE_INT ix;
3161 constructor_elt *ce;
3162 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3164 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3165 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3167 return;
3170 /* If this is a variable sized type, we must remember the size. */
3171 maybe_with_size_expr (expr_p);
3173 /* Gimplify the constructor element to something appropriate for the rhs
3174 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3175 the gimplifier will consider this a store to memory. Doing this
3176 gimplification now means that we won't have to deal with complicated
3177 language-specific trees, nor trees like SAVE_EXPR that can induce
3178 exponential search behavior. */
3179 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3180 if (one == GS_ERROR)
3182 *expr_p = NULL;
3183 return;
3186 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3187 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3188 always be true for all scalars, since is_gimple_mem_rhs insists on a
3189 temporary variable for them. */
3190 if (DECL_P (*expr_p))
3191 return;
3193 /* If this is of variable size, we have no choice but to assume it doesn't
3194 overlap since we can't make a temporary for it. */
3195 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3196 return;
3198 /* Otherwise, we must search for overlap ... */
3199 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3200 return;
3202 /* ... and if found, force the value into a temporary. */
3203 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3206 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3207 a RANGE_EXPR in a CONSTRUCTOR for an array.
3209 var = lower;
3210 loop_entry:
3211 object[var] = value;
3212 if (var == upper)
3213 goto loop_exit;
3214 var = var + 1;
3215 goto loop_entry;
3216 loop_exit:
3218 We increment var _after_ the loop exit check because we might otherwise
3219 fail if upper == TYPE_MAX_VALUE (type for upper).
3221 Note that we never have to deal with SAVE_EXPRs here, because this has
3222 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3224 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3225 gimple_seq *, bool);
3227 static void
3228 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3229 tree value, tree array_elt_type,
3230 gimple_seq *pre_p, bool cleared)
3232 tree loop_entry_label, loop_exit_label, fall_thru_label;
3233 tree var, var_type, cref, tmp;
3235 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3236 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3237 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3239 /* Create and initialize the index variable. */
3240 var_type = TREE_TYPE (upper);
3241 var = create_tmp_var (var_type, NULL);
3242 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3244 /* Add the loop entry label. */
3245 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3247 /* Build the reference. */
3248 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3249 var, NULL_TREE, NULL_TREE);
3251 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3252 the store. Otherwise just assign value to the reference. */
3254 if (TREE_CODE (value) == CONSTRUCTOR)
3255 /* NB we might have to call ourself recursively through
3256 gimplify_init_ctor_eval if the value is a constructor. */
3257 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3258 pre_p, cleared);
3259 else
3260 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3262 /* We exit the loop when the index var is equal to the upper bound. */
3263 gimplify_seq_add_stmt (pre_p,
3264 gimple_build_cond (EQ_EXPR, var, upper,
3265 loop_exit_label, fall_thru_label));
3267 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3269 /* Otherwise, increment the index var... */
3270 tmp = build2 (PLUS_EXPR, var_type, var,
3271 fold_convert (var_type, integer_one_node));
3272 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3274 /* ...and jump back to the loop entry. */
3275 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3277 /* Add the loop exit label. */
3278 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3281 /* Return true if FDECL is accessing a field that is zero sized. */
3283 static bool
3284 zero_sized_field_decl (const_tree fdecl)
3286 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3287 && integer_zerop (DECL_SIZE (fdecl)))
3288 return true;
3289 return false;
3292 /* Return true if TYPE is zero sized. */
3294 static bool
3295 zero_sized_type (const_tree type)
3297 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3298 && integer_zerop (TYPE_SIZE (type)))
3299 return true;
3300 return false;
3303 /* A subroutine of gimplify_init_constructor. Generate individual
3304 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3305 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3306 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3307 zeroed first. */
3309 static void
3310 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3311 gimple_seq *pre_p, bool cleared)
3313 tree array_elt_type = NULL;
3314 unsigned HOST_WIDE_INT ix;
3315 tree purpose, value;
3317 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3318 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3320 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3322 tree cref;
3324 /* NULL values are created above for gimplification errors. */
3325 if (value == NULL)
3326 continue;
3328 if (cleared && initializer_zerop (value))
3329 continue;
3331 /* ??? Here's to hoping the front end fills in all of the indices,
3332 so we don't have to figure out what's missing ourselves. */
3333 gcc_assert (purpose);
3335 /* Skip zero-sized fields, unless value has side-effects. This can
3336 happen with calls to functions returning a zero-sized type, which
3337 we shouldn't discard. As a number of downstream passes don't
3338 expect sets of zero-sized fields, we rely on the gimplification of
3339 the MODIFY_EXPR we make below to drop the assignment statement. */
3340 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3341 continue;
3343 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3344 whole range. */
3345 if (TREE_CODE (purpose) == RANGE_EXPR)
3347 tree lower = TREE_OPERAND (purpose, 0);
3348 tree upper = TREE_OPERAND (purpose, 1);
3350 /* If the lower bound is equal to upper, just treat it as if
3351 upper was the index. */
3352 if (simple_cst_equal (lower, upper))
3353 purpose = upper;
3354 else
3356 gimplify_init_ctor_eval_range (object, lower, upper, value,
3357 array_elt_type, pre_p, cleared);
3358 continue;
3362 if (array_elt_type)
3364 /* Do not use bitsizetype for ARRAY_REF indices. */
3365 if (TYPE_DOMAIN (TREE_TYPE (object)))
3366 purpose
3367 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3368 purpose);
3369 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3370 purpose, NULL_TREE, NULL_TREE);
3372 else
3374 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3375 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3376 unshare_expr (object), purpose, NULL_TREE);
3379 if (TREE_CODE (value) == CONSTRUCTOR
3380 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3381 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3382 pre_p, cleared);
3383 else
3385 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3386 gimplify_and_add (init, pre_p);
3387 ggc_free (init);
3392 /* Return the appropriate RHS predicate for this LHS. */
3394 gimple_predicate
3395 rhs_predicate_for (tree lhs)
3397 if (is_gimple_reg (lhs))
3398 return is_gimple_reg_rhs_or_call;
3399 else
3400 return is_gimple_mem_rhs_or_call;
3403 /* Gimplify a C99 compound literal expression. This just means adding
3404 the DECL_EXPR before the current statement and using its anonymous
3405 decl instead. */
3407 static enum gimplify_status
3408 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3409 bool (*gimple_test_f) (tree),
3410 fallback_t fallback)
3412 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3413 tree decl = DECL_EXPR_DECL (decl_s);
3414 tree init = DECL_INITIAL (decl);
3415 /* Mark the decl as addressable if the compound literal
3416 expression is addressable now, otherwise it is marked too late
3417 after we gimplify the initialization expression. */
3418 if (TREE_ADDRESSABLE (*expr_p))
3419 TREE_ADDRESSABLE (decl) = 1;
3420 /* Otherwise, if we don't need an lvalue and have a literal directly
3421 substitute it. Check if it matches the gimple predicate, as
3422 otherwise we'd generate a new temporary, and we can as well just
3423 use the decl we already have. */
3424 else if (!TREE_ADDRESSABLE (decl)
3425 && init
3426 && (fallback & fb_lvalue) == 0
3427 && gimple_test_f (init))
3429 *expr_p = init;
3430 return GS_OK;
3433 /* Preliminarily mark non-addressed complex variables as eligible
3434 for promotion to gimple registers. We'll transform their uses
3435 as we find them. */
3436 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3437 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3438 && !TREE_THIS_VOLATILE (decl)
3439 && !needs_to_live_in_memory (decl))
3440 DECL_GIMPLE_REG_P (decl) = 1;
3442 /* If the decl is not addressable, then it is being used in some
3443 expression or on the right hand side of a statement, and it can
3444 be put into a readonly data section. */
3445 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3446 TREE_READONLY (decl) = 1;
3448 /* This decl isn't mentioned in the enclosing block, so add it to the
3449 list of temps. FIXME it seems a bit of a kludge to say that
3450 anonymous artificial vars aren't pushed, but everything else is. */
3451 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3452 gimple_add_tmp_var (decl);
3454 gimplify_and_add (decl_s, pre_p);
3455 *expr_p = decl;
3456 return GS_OK;
3459 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3460 return a new CONSTRUCTOR if something changed. */
3462 static tree
3463 optimize_compound_literals_in_ctor (tree orig_ctor)
3465 tree ctor = orig_ctor;
3466 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3467 unsigned int idx, num = vec_safe_length (elts);
3469 for (idx = 0; idx < num; idx++)
3471 tree value = (*elts)[idx].value;
3472 tree newval = value;
3473 if (TREE_CODE (value) == CONSTRUCTOR)
3474 newval = optimize_compound_literals_in_ctor (value);
3475 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3477 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3478 tree decl = DECL_EXPR_DECL (decl_s);
3479 tree init = DECL_INITIAL (decl);
3481 if (!TREE_ADDRESSABLE (value)
3482 && !TREE_ADDRESSABLE (decl)
3483 && init
3484 && TREE_CODE (init) == CONSTRUCTOR)
3485 newval = optimize_compound_literals_in_ctor (init);
3487 if (newval == value)
3488 continue;
3490 if (ctor == orig_ctor)
3492 ctor = copy_node (orig_ctor);
3493 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3494 elts = CONSTRUCTOR_ELTS (ctor);
3496 (*elts)[idx].value = newval;
3498 return ctor;
3501 /* A subroutine of gimplify_modify_expr. Break out elements of a
3502 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3504 Note that we still need to clear any elements that don't have explicit
3505 initializers, so if not all elements are initialized we keep the
3506 original MODIFY_EXPR, we just remove all of the constructor elements.
3508 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3509 GS_ERROR if we would have to create a temporary when gimplifying
3510 this constructor. Otherwise, return GS_OK.
3512 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3514 static enum gimplify_status
3515 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3516 bool want_value, bool notify_temp_creation)
3518 tree object, ctor, type;
3519 enum gimplify_status ret;
3520 vec<constructor_elt, va_gc> *elts;
3522 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3524 if (!notify_temp_creation)
3526 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3527 is_gimple_lvalue, fb_lvalue);
3528 if (ret == GS_ERROR)
3529 return ret;
3532 object = TREE_OPERAND (*expr_p, 0);
3533 ctor = TREE_OPERAND (*expr_p, 1) =
3534 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3535 type = TREE_TYPE (ctor);
3536 elts = CONSTRUCTOR_ELTS (ctor);
3537 ret = GS_ALL_DONE;
3539 switch (TREE_CODE (type))
3541 case RECORD_TYPE:
3542 case UNION_TYPE:
3543 case QUAL_UNION_TYPE:
3544 case ARRAY_TYPE:
3546 struct gimplify_init_ctor_preeval_data preeval_data;
3547 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3548 bool cleared, complete_p, valid_const_initializer;
3550 /* Aggregate types must lower constructors to initialization of
3551 individual elements. The exception is that a CONSTRUCTOR node
3552 with no elements indicates zero-initialization of the whole. */
3553 if (vec_safe_is_empty (elts))
3555 if (notify_temp_creation)
3556 return GS_OK;
3557 break;
3560 /* Fetch information about the constructor to direct later processing.
3561 We might want to make static versions of it in various cases, and
3562 can only do so if it known to be a valid constant initializer. */
3563 valid_const_initializer
3564 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3565 &num_ctor_elements, &complete_p);
3567 /* If a const aggregate variable is being initialized, then it
3568 should never be a lose to promote the variable to be static. */
3569 if (valid_const_initializer
3570 && num_nonzero_elements > 1
3571 && TREE_READONLY (object)
3572 && TREE_CODE (object) == VAR_DECL
3573 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3575 if (notify_temp_creation)
3576 return GS_ERROR;
3577 DECL_INITIAL (object) = ctor;
3578 TREE_STATIC (object) = 1;
3579 if (!DECL_NAME (object))
3580 DECL_NAME (object) = create_tmp_var_name ("C");
3581 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3583 /* ??? C++ doesn't automatically append a .<number> to the
3584 assembler name, and even when it does, it looks at FE private
3585 data structures to figure out what that number should be,
3586 which are not set for this variable. I suppose this is
3587 important for local statics for inline functions, which aren't
3588 "local" in the object file sense. So in order to get a unique
3589 TU-local symbol, we must invoke the lhd version now. */
3590 lhd_set_decl_assembler_name (object);
3592 *expr_p = NULL_TREE;
3593 break;
3596 /* If there are "lots" of initialized elements, even discounting
3597 those that are not address constants (and thus *must* be
3598 computed at runtime), then partition the constructor into
3599 constant and non-constant parts. Block copy the constant
3600 parts in, then generate code for the non-constant parts. */
3601 /* TODO. There's code in cp/typeck.c to do this. */
3603 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3604 /* store_constructor will ignore the clearing of variable-sized
3605 objects. Initializers for such objects must explicitly set
3606 every field that needs to be set. */
3607 cleared = false;
3608 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
3609 /* If the constructor isn't complete, clear the whole object
3610 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
3612 ??? This ought not to be needed. For any element not present
3613 in the initializer, we should simply set them to zero. Except
3614 we'd need to *find* the elements that are not present, and that
3615 requires trickery to avoid quadratic compile-time behavior in
3616 large cases or excessive memory use in small cases. */
3617 cleared = true;
3618 else if (num_ctor_elements - num_nonzero_elements
3619 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3620 && num_nonzero_elements < num_ctor_elements / 4)
3621 /* If there are "lots" of zeros, it's more efficient to clear
3622 the memory and then set the nonzero elements. */
3623 cleared = true;
3624 else
3625 cleared = false;
3627 /* If there are "lots" of initialized elements, and all of them
3628 are valid address constants, then the entire initializer can
3629 be dropped to memory, and then memcpy'd out. Don't do this
3630 for sparse arrays, though, as it's more efficient to follow
3631 the standard CONSTRUCTOR behavior of memset followed by
3632 individual element initialization. Also don't do this for small
3633 all-zero initializers (which aren't big enough to merit
3634 clearing), and don't try to make bitwise copies of
3635 TREE_ADDRESSABLE types.
3637 We cannot apply such transformation when compiling chkp static
3638 initializer because creation of initializer image in the memory
3639 will require static initialization of bounds for it. It should
3640 result in another gimplification of similar initializer and we
3641 may fall into infinite loop. */
3642 if (valid_const_initializer
3643 && !(cleared || num_nonzero_elements == 0)
3644 && !TREE_ADDRESSABLE (type)
3645 && (!current_function_decl
3646 || !lookup_attribute ("chkp ctor",
3647 DECL_ATTRIBUTES (current_function_decl))))
3649 HOST_WIDE_INT size = int_size_in_bytes (type);
3650 unsigned int align;
3652 /* ??? We can still get unbounded array types, at least
3653 from the C++ front end. This seems wrong, but attempt
3654 to work around it for now. */
3655 if (size < 0)
3657 size = int_size_in_bytes (TREE_TYPE (object));
3658 if (size >= 0)
3659 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3662 /* Find the maximum alignment we can assume for the object. */
3663 /* ??? Make use of DECL_OFFSET_ALIGN. */
3664 if (DECL_P (object))
3665 align = DECL_ALIGN (object);
3666 else
3667 align = TYPE_ALIGN (type);
3669 /* Do a block move either if the size is so small as to make
3670 each individual move a sub-unit move on average, or if it
3671 is so large as to make individual moves inefficient. */
3672 if (size > 0
3673 && num_nonzero_elements > 1
3674 && (size < num_nonzero_elements
3675 || !can_move_by_pieces (size, align)))
3677 if (notify_temp_creation)
3678 return GS_ERROR;
3680 walk_tree (&ctor, force_labels_r, NULL, NULL);
3681 ctor = tree_output_constant_def (ctor);
3682 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3683 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3684 TREE_OPERAND (*expr_p, 1) = ctor;
3686 /* This is no longer an assignment of a CONSTRUCTOR, but
3687 we still may have processing to do on the LHS. So
3688 pretend we didn't do anything here to let that happen. */
3689 return GS_UNHANDLED;
3693 /* If the target is volatile, we have non-zero elements and more than
3694 one field to assign, initialize the target from a temporary. */
3695 if (TREE_THIS_VOLATILE (object)
3696 && !TREE_ADDRESSABLE (type)
3697 && num_nonzero_elements > 0
3698 && vec_safe_length (elts) > 1)
3700 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3701 TREE_OPERAND (*expr_p, 0) = temp;
3702 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3703 *expr_p,
3704 build2 (MODIFY_EXPR, void_type_node,
3705 object, temp));
3706 return GS_OK;
3709 if (notify_temp_creation)
3710 return GS_OK;
3712 /* If there are nonzero elements and if needed, pre-evaluate to capture
3713 elements overlapping with the lhs into temporaries. We must do this
3714 before clearing to fetch the values before they are zeroed-out. */
3715 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
3717 preeval_data.lhs_base_decl = get_base_address (object);
3718 if (!DECL_P (preeval_data.lhs_base_decl))
3719 preeval_data.lhs_base_decl = NULL;
3720 preeval_data.lhs_alias_set = get_alias_set (object);
3722 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3723 pre_p, post_p, &preeval_data);
3726 if (cleared)
3728 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3729 Note that we still have to gimplify, in order to handle the
3730 case of variable sized types. Avoid shared tree structures. */
3731 CONSTRUCTOR_ELTS (ctor) = NULL;
3732 TREE_SIDE_EFFECTS (ctor) = 0;
3733 object = unshare_expr (object);
3734 gimplify_stmt (expr_p, pre_p);
3737 /* If we have not block cleared the object, or if there are nonzero
3738 elements in the constructor, add assignments to the individual
3739 scalar fields of the object. */
3740 if (!cleared || num_nonzero_elements > 0)
3741 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3743 *expr_p = NULL_TREE;
3745 break;
3747 case COMPLEX_TYPE:
3749 tree r, i;
3751 if (notify_temp_creation)
3752 return GS_OK;
3754 /* Extract the real and imaginary parts out of the ctor. */
3755 gcc_assert (elts->length () == 2);
3756 r = (*elts)[0].value;
3757 i = (*elts)[1].value;
3758 if (r == NULL || i == NULL)
3760 tree zero = build_zero_cst (TREE_TYPE (type));
3761 if (r == NULL)
3762 r = zero;
3763 if (i == NULL)
3764 i = zero;
3767 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3768 represent creation of a complex value. */
3769 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3771 ctor = build_complex (type, r, i);
3772 TREE_OPERAND (*expr_p, 1) = ctor;
3774 else
3776 ctor = build2 (COMPLEX_EXPR, type, r, i);
3777 TREE_OPERAND (*expr_p, 1) = ctor;
3778 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3779 pre_p,
3780 post_p,
3781 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3782 fb_rvalue);
3785 break;
3787 case VECTOR_TYPE:
3789 unsigned HOST_WIDE_INT ix;
3790 constructor_elt *ce;
3792 if (notify_temp_creation)
3793 return GS_OK;
3795 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3796 if (TREE_CONSTANT (ctor))
3798 bool constant_p = true;
3799 tree value;
3801 /* Even when ctor is constant, it might contain non-*_CST
3802 elements, such as addresses or trapping values like
3803 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3804 in VECTOR_CST nodes. */
3805 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3806 if (!CONSTANT_CLASS_P (value))
3808 constant_p = false;
3809 break;
3812 if (constant_p)
3814 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3815 break;
3818 /* Don't reduce an initializer constant even if we can't
3819 make a VECTOR_CST. It won't do anything for us, and it'll
3820 prevent us from representing it as a single constant. */
3821 if (initializer_constant_valid_p (ctor, type))
3822 break;
3824 TREE_CONSTANT (ctor) = 0;
3827 /* Vector types use CONSTRUCTOR all the way through gimple
3828 compilation as a general initializer. */
3829 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
3831 enum gimplify_status tret;
3832 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3833 fb_rvalue);
3834 if (tret == GS_ERROR)
3835 ret = GS_ERROR;
3837 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3838 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3840 break;
3842 default:
3843 /* So how did we get a CONSTRUCTOR for a scalar type? */
3844 gcc_unreachable ();
3847 if (ret == GS_ERROR)
3848 return GS_ERROR;
3849 else if (want_value)
3851 *expr_p = object;
3852 return GS_OK;
3854 else
3856 /* If we have gimplified both sides of the initializer but have
3857 not emitted an assignment, do so now. */
3858 if (*expr_p)
3860 tree lhs = TREE_OPERAND (*expr_p, 0);
3861 tree rhs = TREE_OPERAND (*expr_p, 1);
3862 gimple init = gimple_build_assign (lhs, rhs);
3863 gimplify_seq_add_stmt (pre_p, init);
3864 *expr_p = NULL;
3867 return GS_ALL_DONE;
3871 /* Given a pointer value OP0, return a simplified version of an
3872 indirection through OP0, or NULL_TREE if no simplification is
3873 possible. This may only be applied to a rhs of an expression.
3874 Note that the resulting type may be different from the type pointed
3875 to in the sense that it is still compatible from the langhooks
3876 point of view. */
3878 static tree
3879 gimple_fold_indirect_ref_rhs (tree t)
3881 return gimple_fold_indirect_ref (t);
3884 /* Subroutine of gimplify_modify_expr to do simplifications of
3885 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
3886 something changes. */
3888 static enum gimplify_status
3889 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
3890 gimple_seq *pre_p, gimple_seq *post_p,
3891 bool want_value)
3893 enum gimplify_status ret = GS_UNHANDLED;
3894 bool changed;
3898 changed = false;
3899 switch (TREE_CODE (*from_p))
3901 case VAR_DECL:
3902 /* If we're assigning from a read-only variable initialized with
3903 a constructor, do the direct assignment from the constructor,
3904 but only if neither source nor target are volatile since this
3905 latter assignment might end up being done on a per-field basis. */
3906 if (DECL_INITIAL (*from_p)
3907 && TREE_READONLY (*from_p)
3908 && !TREE_THIS_VOLATILE (*from_p)
3909 && !TREE_THIS_VOLATILE (*to_p)
3910 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
3912 tree old_from = *from_p;
3913 enum gimplify_status subret;
3915 /* Move the constructor into the RHS. */
3916 *from_p = unshare_expr (DECL_INITIAL (*from_p));
3918 /* Let's see if gimplify_init_constructor will need to put
3919 it in memory. */
3920 subret = gimplify_init_constructor (expr_p, NULL, NULL,
3921 false, true);
3922 if (subret == GS_ERROR)
3924 /* If so, revert the change. */
3925 *from_p = old_from;
3927 else
3929 ret = GS_OK;
3930 changed = true;
3933 break;
3934 case INDIRECT_REF:
3936 /* If we have code like
3938 *(const A*)(A*)&x
3940 where the type of "x" is a (possibly cv-qualified variant
3941 of "A"), treat the entire expression as identical to "x".
3942 This kind of code arises in C++ when an object is bound
3943 to a const reference, and if "x" is a TARGET_EXPR we want
3944 to take advantage of the optimization below. */
3945 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
3946 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
3947 if (t)
3949 if (TREE_THIS_VOLATILE (t) != volatile_p)
3951 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3952 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
3953 build_fold_addr_expr (t));
3954 if (REFERENCE_CLASS_P (t))
3955 TREE_THIS_VOLATILE (t) = volatile_p;
3957 *from_p = t;
3958 ret = GS_OK;
3959 changed = true;
3961 break;
3964 case TARGET_EXPR:
3966 /* If we are initializing something from a TARGET_EXPR, strip the
3967 TARGET_EXPR and initialize it directly, if possible. This can't
3968 be done if the initializer is void, since that implies that the
3969 temporary is set in some non-trivial way.
3971 ??? What about code that pulls out the temp and uses it
3972 elsewhere? I think that such code never uses the TARGET_EXPR as
3973 an initializer. If I'm wrong, we'll die because the temp won't
3974 have any RTL. In that case, I guess we'll need to replace
3975 references somehow. */
3976 tree init = TARGET_EXPR_INITIAL (*from_p);
3978 if (init
3979 && !VOID_TYPE_P (TREE_TYPE (init)))
3981 *from_p = init;
3982 ret = GS_OK;
3983 changed = true;
3986 break;
3988 case COMPOUND_EXPR:
3989 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
3990 caught. */
3991 gimplify_compound_expr (from_p, pre_p, true);
3992 ret = GS_OK;
3993 changed = true;
3994 break;
3996 case CONSTRUCTOR:
3997 /* If we already made some changes, let the front end have a
3998 crack at this before we break it down. */
3999 if (ret != GS_UNHANDLED)
4000 break;
4001 /* If we're initializing from a CONSTRUCTOR, break this into
4002 individual MODIFY_EXPRs. */
4003 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4004 false);
4006 case COND_EXPR:
4007 /* If we're assigning to a non-register type, push the assignment
4008 down into the branches. This is mandatory for ADDRESSABLE types,
4009 since we cannot generate temporaries for such, but it saves a
4010 copy in other cases as well. */
4011 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4013 /* This code should mirror the code in gimplify_cond_expr. */
4014 enum tree_code code = TREE_CODE (*expr_p);
4015 tree cond = *from_p;
4016 tree result = *to_p;
4018 ret = gimplify_expr (&result, pre_p, post_p,
4019 is_gimple_lvalue, fb_lvalue);
4020 if (ret != GS_ERROR)
4021 ret = GS_OK;
4023 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4024 TREE_OPERAND (cond, 1)
4025 = build2 (code, void_type_node, result,
4026 TREE_OPERAND (cond, 1));
4027 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4028 TREE_OPERAND (cond, 2)
4029 = build2 (code, void_type_node, unshare_expr (result),
4030 TREE_OPERAND (cond, 2));
4032 TREE_TYPE (cond) = void_type_node;
4033 recalculate_side_effects (cond);
4035 if (want_value)
4037 gimplify_and_add (cond, pre_p);
4038 *expr_p = unshare_expr (result);
4040 else
4041 *expr_p = cond;
4042 return ret;
4044 break;
4046 case CALL_EXPR:
4047 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4048 return slot so that we don't generate a temporary. */
4049 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4050 && aggregate_value_p (*from_p, *from_p))
4052 bool use_target;
4054 if (!(rhs_predicate_for (*to_p))(*from_p))
4055 /* If we need a temporary, *to_p isn't accurate. */
4056 use_target = false;
4057 /* It's OK to use the return slot directly unless it's an NRV. */
4058 else if (TREE_CODE (*to_p) == RESULT_DECL
4059 && DECL_NAME (*to_p) == NULL_TREE
4060 && needs_to_live_in_memory (*to_p))
4061 use_target = true;
4062 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4063 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4064 /* Don't force regs into memory. */
4065 use_target = false;
4066 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4067 /* It's OK to use the target directly if it's being
4068 initialized. */
4069 use_target = true;
4070 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4071 /* Always use the target and thus RSO for variable-sized types.
4072 GIMPLE cannot deal with a variable-sized assignment
4073 embedded in a call statement. */
4074 use_target = true;
4075 else if (TREE_CODE (*to_p) != SSA_NAME
4076 && (!is_gimple_variable (*to_p)
4077 || needs_to_live_in_memory (*to_p)))
4078 /* Don't use the original target if it's already addressable;
4079 if its address escapes, and the called function uses the
4080 NRV optimization, a conforming program could see *to_p
4081 change before the called function returns; see c++/19317.
4082 When optimizing, the return_slot pass marks more functions
4083 as safe after we have escape info. */
4084 use_target = false;
4085 else
4086 use_target = true;
4088 if (use_target)
4090 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4091 mark_addressable (*to_p);
4094 break;
4096 case WITH_SIZE_EXPR:
4097 /* Likewise for calls that return an aggregate of non-constant size,
4098 since we would not be able to generate a temporary at all. */
4099 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4101 *from_p = TREE_OPERAND (*from_p, 0);
4102 /* We don't change ret in this case because the
4103 WITH_SIZE_EXPR might have been added in
4104 gimplify_modify_expr, so returning GS_OK would lead to an
4105 infinite loop. */
4106 changed = true;
4108 break;
4110 /* If we're initializing from a container, push the initialization
4111 inside it. */
4112 case CLEANUP_POINT_EXPR:
4113 case BIND_EXPR:
4114 case STATEMENT_LIST:
4116 tree wrap = *from_p;
4117 tree t;
4119 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4120 fb_lvalue);
4121 if (ret != GS_ERROR)
4122 ret = GS_OK;
4124 t = voidify_wrapper_expr (wrap, *expr_p);
4125 gcc_assert (t == *expr_p);
4127 if (want_value)
4129 gimplify_and_add (wrap, pre_p);
4130 *expr_p = unshare_expr (*to_p);
4132 else
4133 *expr_p = wrap;
4134 return GS_OK;
4137 case COMPOUND_LITERAL_EXPR:
4139 tree complit = TREE_OPERAND (*expr_p, 1);
4140 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4141 tree decl = DECL_EXPR_DECL (decl_s);
4142 tree init = DECL_INITIAL (decl);
4144 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4145 into struct T x = { 0, 1, 2 } if the address of the
4146 compound literal has never been taken. */
4147 if (!TREE_ADDRESSABLE (complit)
4148 && !TREE_ADDRESSABLE (decl)
4149 && init)
4151 *expr_p = copy_node (*expr_p);
4152 TREE_OPERAND (*expr_p, 1) = init;
4153 return GS_OK;
4157 default:
4158 break;
4161 while (changed);
4163 return ret;
4167 /* Return true if T looks like a valid GIMPLE statement. */
4169 static bool
4170 is_gimple_stmt (tree t)
4172 const enum tree_code code = TREE_CODE (t);
4174 switch (code)
4176 case NOP_EXPR:
4177 /* The only valid NOP_EXPR is the empty statement. */
4178 return IS_EMPTY_STMT (t);
4180 case BIND_EXPR:
4181 case COND_EXPR:
4182 /* These are only valid if they're void. */
4183 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4185 case SWITCH_EXPR:
4186 case GOTO_EXPR:
4187 case RETURN_EXPR:
4188 case LABEL_EXPR:
4189 case CASE_LABEL_EXPR:
4190 case TRY_CATCH_EXPR:
4191 case TRY_FINALLY_EXPR:
4192 case EH_FILTER_EXPR:
4193 case CATCH_EXPR:
4194 case ASM_EXPR:
4195 case STATEMENT_LIST:
4196 case OMP_PARALLEL:
4197 case OMP_FOR:
4198 case OMP_SIMD:
4199 case CILK_SIMD:
4200 case OMP_DISTRIBUTE:
4201 case OMP_SECTIONS:
4202 case OMP_SECTION:
4203 case OMP_SINGLE:
4204 case OMP_MASTER:
4205 case OMP_TASKGROUP:
4206 case OMP_ORDERED:
4207 case OMP_CRITICAL:
4208 case OMP_TASK:
4209 /* These are always void. */
4210 return true;
4212 case CALL_EXPR:
4213 case MODIFY_EXPR:
4214 case PREDICT_EXPR:
4215 /* These are valid regardless of their type. */
4216 return true;
4218 default:
4219 return false;
4224 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4225 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4226 DECL_GIMPLE_REG_P set.
4228 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4229 other, unmodified part of the complex object just before the total store.
4230 As a consequence, if the object is still uninitialized, an undefined value
4231 will be loaded into a register, which may result in a spurious exception
4232 if the register is floating-point and the value happens to be a signaling
4233 NaN for example. Then the fully-fledged complex operations lowering pass
4234 followed by a DCE pass are necessary in order to fix things up. */
4236 static enum gimplify_status
4237 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4238 bool want_value)
4240 enum tree_code code, ocode;
4241 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4243 lhs = TREE_OPERAND (*expr_p, 0);
4244 rhs = TREE_OPERAND (*expr_p, 1);
4245 code = TREE_CODE (lhs);
4246 lhs = TREE_OPERAND (lhs, 0);
4248 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4249 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4250 TREE_NO_WARNING (other) = 1;
4251 other = get_formal_tmp_var (other, pre_p);
4253 realpart = code == REALPART_EXPR ? rhs : other;
4254 imagpart = code == REALPART_EXPR ? other : rhs;
4256 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4257 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4258 else
4259 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4261 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4262 *expr_p = (want_value) ? rhs : NULL_TREE;
4264 return GS_ALL_DONE;
4267 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4269 modify_expr
4270 : varname '=' rhs
4271 | '*' ID '=' rhs
4273 PRE_P points to the list where side effects that must happen before
4274 *EXPR_P should be stored.
4276 POST_P points to the list where side effects that must happen after
4277 *EXPR_P should be stored.
4279 WANT_VALUE is nonzero iff we want to use the value of this expression
4280 in another expression. */
4282 static enum gimplify_status
4283 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4284 bool want_value)
4286 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4287 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4288 enum gimplify_status ret = GS_UNHANDLED;
4289 gimple assign;
4290 location_t loc = EXPR_LOCATION (*expr_p);
4291 gimple_stmt_iterator gsi;
4293 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4294 || TREE_CODE (*expr_p) == INIT_EXPR);
4296 if (fn_contains_cilk_spawn_p (cfun)
4297 && lang_hooks.cilkplus.cilk_detect_spawn_and_unwrap (expr_p)
4298 && !seen_error ())
4299 return (enum gimplify_status)
4300 lang_hooks.cilkplus.gimplify_cilk_spawn (expr_p, pre_p, post_p);
4302 /* Trying to simplify a clobber using normal logic doesn't work,
4303 so handle it here. */
4304 if (TREE_CLOBBER_P (*from_p))
4306 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4307 if (ret == GS_ERROR)
4308 return ret;
4309 gcc_assert (!want_value
4310 && (TREE_CODE (*to_p) == VAR_DECL
4311 || TREE_CODE (*to_p) == MEM_REF));
4312 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4313 *expr_p = NULL;
4314 return GS_ALL_DONE;
4317 /* Insert pointer conversions required by the middle-end that are not
4318 required by the frontend. This fixes middle-end type checking for
4319 for example gcc.dg/redecl-6.c. */
4320 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4322 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4323 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4324 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4327 /* See if any simplifications can be done based on what the RHS is. */
4328 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4329 want_value);
4330 if (ret != GS_UNHANDLED)
4331 return ret;
4333 /* For zero sized types only gimplify the left hand side and right hand
4334 side as statements and throw away the assignment. Do this after
4335 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4336 types properly. */
4337 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4339 gimplify_stmt (from_p, pre_p);
4340 gimplify_stmt (to_p, pre_p);
4341 *expr_p = NULL_TREE;
4342 return GS_ALL_DONE;
4345 /* If the value being copied is of variable width, compute the length
4346 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4347 before gimplifying any of the operands so that we can resolve any
4348 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4349 the size of the expression to be copied, not of the destination, so
4350 that is what we must do here. */
4351 maybe_with_size_expr (from_p);
4353 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4354 if (ret == GS_ERROR)
4355 return ret;
4357 /* As a special case, we have to temporarily allow for assignments
4358 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4359 a toplevel statement, when gimplifying the GENERIC expression
4360 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4361 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4363 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4364 prevent gimplify_expr from trying to create a new temporary for
4365 foo's LHS, we tell it that it should only gimplify until it
4366 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4367 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4368 and all we need to do here is set 'a' to be its LHS. */
4369 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4370 fb_rvalue);
4371 if (ret == GS_ERROR)
4372 return ret;
4374 /* Now see if the above changed *from_p to something we handle specially. */
4375 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4376 want_value);
4377 if (ret != GS_UNHANDLED)
4378 return ret;
4380 /* If we've got a variable sized assignment between two lvalues (i.e. does
4381 not involve a call), then we can make things a bit more straightforward
4382 by converting the assignment to memcpy or memset. */
4383 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4385 tree from = TREE_OPERAND (*from_p, 0);
4386 tree size = TREE_OPERAND (*from_p, 1);
4388 if (TREE_CODE (from) == CONSTRUCTOR)
4389 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4391 if (is_gimple_addressable (from))
4393 *from_p = from;
4394 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4395 pre_p);
4399 /* Transform partial stores to non-addressable complex variables into
4400 total stores. This allows us to use real instead of virtual operands
4401 for these variables, which improves optimization. */
4402 if ((TREE_CODE (*to_p) == REALPART_EXPR
4403 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4404 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4405 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4407 /* Try to alleviate the effects of the gimplification creating artificial
4408 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4409 if (!gimplify_ctxp->into_ssa
4410 && TREE_CODE (*from_p) == VAR_DECL
4411 && DECL_IGNORED_P (*from_p)
4412 && DECL_P (*to_p)
4413 && !DECL_IGNORED_P (*to_p))
4415 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4416 DECL_NAME (*from_p)
4417 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4418 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4419 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4422 if (want_value && TREE_THIS_VOLATILE (*to_p))
4423 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4425 if (TREE_CODE (*from_p) == CALL_EXPR)
4427 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4428 instead of a GIMPLE_ASSIGN. */
4429 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4430 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4431 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4432 assign = gimple_build_call_from_tree (*from_p);
4433 gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
4434 notice_special_calls (assign);
4435 if (!gimple_call_noreturn_p (assign))
4436 gimple_call_set_lhs (assign, *to_p);
4438 else
4440 assign = gimple_build_assign (*to_p, *from_p);
4441 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4444 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4446 /* We should have got an SSA name from the start. */
4447 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4450 gimplify_seq_add_stmt (pre_p, assign);
4451 gsi = gsi_last (*pre_p);
4452 /* Don't fold stmts inside of target construct. We'll do it
4453 during omplower pass instead. */
4454 struct gimplify_omp_ctx *ctx;
4455 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
4456 if (ctx->region_type == ORT_TARGET)
4457 break;
4458 if (ctx == NULL)
4459 fold_stmt (&gsi);
4461 if (want_value)
4463 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4464 return GS_OK;
4466 else
4467 *expr_p = NULL;
4469 return GS_ALL_DONE;
4472 /* Gimplify a comparison between two variable-sized objects. Do this
4473 with a call to BUILT_IN_MEMCMP. */
4475 static enum gimplify_status
4476 gimplify_variable_sized_compare (tree *expr_p)
4478 location_t loc = EXPR_LOCATION (*expr_p);
4479 tree op0 = TREE_OPERAND (*expr_p, 0);
4480 tree op1 = TREE_OPERAND (*expr_p, 1);
4481 tree t, arg, dest, src, expr;
4483 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4484 arg = unshare_expr (arg);
4485 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4486 src = build_fold_addr_expr_loc (loc, op1);
4487 dest = build_fold_addr_expr_loc (loc, op0);
4488 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4489 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4491 expr
4492 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4493 SET_EXPR_LOCATION (expr, loc);
4494 *expr_p = expr;
4496 return GS_OK;
4499 /* Gimplify a comparison between two aggregate objects of integral scalar
4500 mode as a comparison between the bitwise equivalent scalar values. */
4502 static enum gimplify_status
4503 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4505 location_t loc = EXPR_LOCATION (*expr_p);
4506 tree op0 = TREE_OPERAND (*expr_p, 0);
4507 tree op1 = TREE_OPERAND (*expr_p, 1);
4509 tree type = TREE_TYPE (op0);
4510 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4512 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4513 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4515 *expr_p
4516 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4518 return GS_OK;
4521 /* Gimplify an expression sequence. This function gimplifies each
4522 expression and rewrites the original expression with the last
4523 expression of the sequence in GIMPLE form.
4525 PRE_P points to the list where the side effects for all the
4526 expressions in the sequence will be emitted.
4528 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4530 static enum gimplify_status
4531 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4533 tree t = *expr_p;
4537 tree *sub_p = &TREE_OPERAND (t, 0);
4539 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4540 gimplify_compound_expr (sub_p, pre_p, false);
4541 else
4542 gimplify_stmt (sub_p, pre_p);
4544 t = TREE_OPERAND (t, 1);
4546 while (TREE_CODE (t) == COMPOUND_EXPR);
4548 *expr_p = t;
4549 if (want_value)
4550 return GS_OK;
4551 else
4553 gimplify_stmt (expr_p, pre_p);
4554 return GS_ALL_DONE;
4558 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4559 gimplify. After gimplification, EXPR_P will point to a new temporary
4560 that holds the original value of the SAVE_EXPR node.
4562 PRE_P points to the list where side effects that must happen before
4563 *EXPR_P should be stored. */
4565 static enum gimplify_status
4566 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4568 enum gimplify_status ret = GS_ALL_DONE;
4569 tree val;
4571 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4572 val = TREE_OPERAND (*expr_p, 0);
4574 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4575 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4577 /* The operand may be a void-valued expression such as SAVE_EXPRs
4578 generated by the Java frontend for class initialization. It is
4579 being executed only for its side-effects. */
4580 if (TREE_TYPE (val) == void_type_node)
4582 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4583 is_gimple_stmt, fb_none);
4584 val = NULL;
4586 else
4587 val = get_initialized_tmp_var (val, pre_p, post_p);
4589 TREE_OPERAND (*expr_p, 0) = val;
4590 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4593 *expr_p = val;
4595 return ret;
4598 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
4600 unary_expr
4601 : ...
4602 | '&' varname
4605 PRE_P points to the list where side effects that must happen before
4606 *EXPR_P should be stored.
4608 POST_P points to the list where side effects that must happen after
4609 *EXPR_P should be stored. */
4611 static enum gimplify_status
4612 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4614 tree expr = *expr_p;
4615 tree op0 = TREE_OPERAND (expr, 0);
4616 enum gimplify_status ret;
4617 location_t loc = EXPR_LOCATION (*expr_p);
4619 switch (TREE_CODE (op0))
4621 case INDIRECT_REF:
4622 do_indirect_ref:
4623 /* Check if we are dealing with an expression of the form '&*ptr'.
4624 While the front end folds away '&*ptr' into 'ptr', these
4625 expressions may be generated internally by the compiler (e.g.,
4626 builtins like __builtin_va_end). */
4627 /* Caution: the silent array decomposition semantics we allow for
4628 ADDR_EXPR means we can't always discard the pair. */
4629 /* Gimplification of the ADDR_EXPR operand may drop
4630 cv-qualification conversions, so make sure we add them if
4631 needed. */
4633 tree op00 = TREE_OPERAND (op0, 0);
4634 tree t_expr = TREE_TYPE (expr);
4635 tree t_op00 = TREE_TYPE (op00);
4637 if (!useless_type_conversion_p (t_expr, t_op00))
4638 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4639 *expr_p = op00;
4640 ret = GS_OK;
4642 break;
4644 case VIEW_CONVERT_EXPR:
4645 /* Take the address of our operand and then convert it to the type of
4646 this ADDR_EXPR.
4648 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4649 all clear. The impact of this transformation is even less clear. */
4651 /* If the operand is a useless conversion, look through it. Doing so
4652 guarantees that the ADDR_EXPR and its operand will remain of the
4653 same type. */
4654 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4655 op0 = TREE_OPERAND (op0, 0);
4657 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4658 build_fold_addr_expr_loc (loc,
4659 TREE_OPERAND (op0, 0)));
4660 ret = GS_OK;
4661 break;
4663 default:
4664 /* We use fb_either here because the C frontend sometimes takes
4665 the address of a call that returns a struct; see
4666 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4667 the implied temporary explicit. */
4669 /* Make the operand addressable. */
4670 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4671 is_gimple_addressable, fb_either);
4672 if (ret == GS_ERROR)
4673 break;
4675 /* Then mark it. Beware that it may not be possible to do so directly
4676 if a temporary has been created by the gimplification. */
4677 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
4679 op0 = TREE_OPERAND (expr, 0);
4681 /* For various reasons, the gimplification of the expression
4682 may have made a new INDIRECT_REF. */
4683 if (TREE_CODE (op0) == INDIRECT_REF)
4684 goto do_indirect_ref;
4686 mark_addressable (TREE_OPERAND (expr, 0));
4688 /* The FEs may end up building ADDR_EXPRs early on a decl with
4689 an incomplete type. Re-build ADDR_EXPRs in canonical form
4690 here. */
4691 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4692 *expr_p = build_fold_addr_expr (op0);
4694 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
4695 recompute_tree_invariant_for_addr_expr (*expr_p);
4697 /* If we re-built the ADDR_EXPR add a conversion to the original type
4698 if required. */
4699 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4700 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
4702 break;
4705 return ret;
4708 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4709 value; output operands should be a gimple lvalue. */
4711 static enum gimplify_status
4712 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4714 tree expr;
4715 int noutputs;
4716 const char **oconstraints;
4717 int i;
4718 tree link;
4719 const char *constraint;
4720 bool allows_mem, allows_reg, is_inout;
4721 enum gimplify_status ret, tret;
4722 gimple stmt;
4723 vec<tree, va_gc> *inputs;
4724 vec<tree, va_gc> *outputs;
4725 vec<tree, va_gc> *clobbers;
4726 vec<tree, va_gc> *labels;
4727 tree link_next;
4729 expr = *expr_p;
4730 noutputs = list_length (ASM_OUTPUTS (expr));
4731 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4733 inputs = NULL;
4734 outputs = NULL;
4735 clobbers = NULL;
4736 labels = NULL;
4738 ret = GS_ALL_DONE;
4739 link_next = NULL_TREE;
4740 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4742 bool ok;
4743 size_t constraint_len;
4745 link_next = TREE_CHAIN (link);
4747 oconstraints[i]
4748 = constraint
4749 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4750 constraint_len = strlen (constraint);
4751 if (constraint_len == 0)
4752 continue;
4754 ok = parse_output_constraint (&constraint, i, 0, 0,
4755 &allows_mem, &allows_reg, &is_inout);
4756 if (!ok)
4758 ret = GS_ERROR;
4759 is_inout = false;
4762 if (!allows_reg && allows_mem)
4763 mark_addressable (TREE_VALUE (link));
4765 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4766 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4767 fb_lvalue | fb_mayfail);
4768 if (tret == GS_ERROR)
4770 error ("invalid lvalue in asm output %d", i);
4771 ret = tret;
4774 vec_safe_push (outputs, link);
4775 TREE_CHAIN (link) = NULL_TREE;
4777 if (is_inout)
4779 /* An input/output operand. To give the optimizers more
4780 flexibility, split it into separate input and output
4781 operands. */
4782 tree input;
4783 char buf[10];
4785 /* Turn the in/out constraint into an output constraint. */
4786 char *p = xstrdup (constraint);
4787 p[0] = '=';
4788 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4790 /* And add a matching input constraint. */
4791 if (allows_reg)
4793 sprintf (buf, "%d", i);
4795 /* If there are multiple alternatives in the constraint,
4796 handle each of them individually. Those that allow register
4797 will be replaced with operand number, the others will stay
4798 unchanged. */
4799 if (strchr (p, ',') != NULL)
4801 size_t len = 0, buflen = strlen (buf);
4802 char *beg, *end, *str, *dst;
4804 for (beg = p + 1;;)
4806 end = strchr (beg, ',');
4807 if (end == NULL)
4808 end = strchr (beg, '\0');
4809 if ((size_t) (end - beg) < buflen)
4810 len += buflen + 1;
4811 else
4812 len += end - beg + 1;
4813 if (*end)
4814 beg = end + 1;
4815 else
4816 break;
4819 str = (char *) alloca (len);
4820 for (beg = p + 1, dst = str;;)
4822 const char *tem;
4823 bool mem_p, reg_p, inout_p;
4825 end = strchr (beg, ',');
4826 if (end)
4827 *end = '\0';
4828 beg[-1] = '=';
4829 tem = beg - 1;
4830 parse_output_constraint (&tem, i, 0, 0,
4831 &mem_p, &reg_p, &inout_p);
4832 if (dst != str)
4833 *dst++ = ',';
4834 if (reg_p)
4836 memcpy (dst, buf, buflen);
4837 dst += buflen;
4839 else
4841 if (end)
4842 len = end - beg;
4843 else
4844 len = strlen (beg);
4845 memcpy (dst, beg, len);
4846 dst += len;
4848 if (end)
4849 beg = end + 1;
4850 else
4851 break;
4853 *dst = '\0';
4854 input = build_string (dst - str, str);
4856 else
4857 input = build_string (strlen (buf), buf);
4859 else
4860 input = build_string (constraint_len - 1, constraint + 1);
4862 free (p);
4864 input = build_tree_list (build_tree_list (NULL_TREE, input),
4865 unshare_expr (TREE_VALUE (link)));
4866 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4870 link_next = NULL_TREE;
4871 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
4873 link_next = TREE_CHAIN (link);
4874 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4875 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4876 oconstraints, &allows_mem, &allows_reg);
4878 /* If we can't make copies, we can only accept memory. */
4879 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
4881 if (allows_mem)
4882 allows_reg = 0;
4883 else
4885 error ("impossible constraint in %<asm%>");
4886 error ("non-memory input %d must stay in memory", i);
4887 return GS_ERROR;
4891 /* If the operand is a memory input, it should be an lvalue. */
4892 if (!allows_reg && allows_mem)
4894 tree inputv = TREE_VALUE (link);
4895 STRIP_NOPS (inputv);
4896 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
4897 || TREE_CODE (inputv) == PREINCREMENT_EXPR
4898 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
4899 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
4900 TREE_VALUE (link) = error_mark_node;
4901 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4902 is_gimple_lvalue, fb_lvalue | fb_mayfail);
4903 mark_addressable (TREE_VALUE (link));
4904 if (tret == GS_ERROR)
4906 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
4907 input_location = EXPR_LOCATION (TREE_VALUE (link));
4908 error ("memory input %d is not directly addressable", i);
4909 ret = tret;
4912 else
4914 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4915 is_gimple_asm_val, fb_rvalue);
4916 if (tret == GS_ERROR)
4917 ret = tret;
4920 TREE_CHAIN (link) = NULL_TREE;
4921 vec_safe_push (inputs, link);
4924 link_next = NULL_TREE;
4925 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
4927 link_next = TREE_CHAIN (link);
4928 TREE_CHAIN (link) = NULL_TREE;
4929 vec_safe_push (clobbers, link);
4932 link_next = NULL_TREE;
4933 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
4935 link_next = TREE_CHAIN (link);
4936 TREE_CHAIN (link) = NULL_TREE;
4937 vec_safe_push (labels, link);
4940 /* Do not add ASMs with errors to the gimple IL stream. */
4941 if (ret != GS_ERROR)
4943 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
4944 inputs, outputs, clobbers, labels);
4946 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
4947 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
4949 gimplify_seq_add_stmt (pre_p, stmt);
4952 return ret;
4955 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
4956 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
4957 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
4958 return to this function.
4960 FIXME should we complexify the prequeue handling instead? Or use flags
4961 for all the cleanups and let the optimizer tighten them up? The current
4962 code seems pretty fragile; it will break on a cleanup within any
4963 non-conditional nesting. But any such nesting would be broken, anyway;
4964 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
4965 and continues out of it. We can do that at the RTL level, though, so
4966 having an optimizer to tighten up try/finally regions would be a Good
4967 Thing. */
4969 static enum gimplify_status
4970 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
4972 gimple_stmt_iterator iter;
4973 gimple_seq body_sequence = NULL;
4975 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4977 /* We only care about the number of conditions between the innermost
4978 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
4979 any cleanups collected outside the CLEANUP_POINT_EXPR. */
4980 int old_conds = gimplify_ctxp->conditions;
4981 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
4982 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
4983 gimplify_ctxp->conditions = 0;
4984 gimplify_ctxp->conditional_cleanups = NULL;
4985 gimplify_ctxp->in_cleanup_point_expr = true;
4987 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
4989 gimplify_ctxp->conditions = old_conds;
4990 gimplify_ctxp->conditional_cleanups = old_cleanups;
4991 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
4993 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
4995 gimple wce = gsi_stmt (iter);
4997 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
4999 if (gsi_one_before_end_p (iter))
5001 /* Note that gsi_insert_seq_before and gsi_remove do not
5002 scan operands, unlike some other sequence mutators. */
5003 if (!gimple_wce_cleanup_eh_only (wce))
5004 gsi_insert_seq_before_without_update (&iter,
5005 gimple_wce_cleanup (wce),
5006 GSI_SAME_STMT);
5007 gsi_remove (&iter, true);
5008 break;
5010 else
5012 gimple_statement_try *gtry;
5013 gimple_seq seq;
5014 enum gimple_try_flags kind;
5016 if (gimple_wce_cleanup_eh_only (wce))
5017 kind = GIMPLE_TRY_CATCH;
5018 else
5019 kind = GIMPLE_TRY_FINALLY;
5020 seq = gsi_split_seq_after (iter);
5022 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5023 /* Do not use gsi_replace here, as it may scan operands.
5024 We want to do a simple structural modification only. */
5025 gsi_set_stmt (&iter, gtry);
5026 iter = gsi_start (gtry->eval);
5029 else
5030 gsi_next (&iter);
5033 gimplify_seq_add_seq (pre_p, body_sequence);
5034 if (temp)
5036 *expr_p = temp;
5037 return GS_OK;
5039 else
5041 *expr_p = NULL;
5042 return GS_ALL_DONE;
5046 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5047 is the cleanup action required. EH_ONLY is true if the cleanup should
5048 only be executed if an exception is thrown, not on normal exit. */
5050 static void
5051 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5053 gimple wce;
5054 gimple_seq cleanup_stmts = NULL;
5056 /* Errors can result in improperly nested cleanups. Which results in
5057 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5058 if (seen_error ())
5059 return;
5061 if (gimple_conditional_context ())
5063 /* If we're in a conditional context, this is more complex. We only
5064 want to run the cleanup if we actually ran the initialization that
5065 necessitates it, but we want to run it after the end of the
5066 conditional context. So we wrap the try/finally around the
5067 condition and use a flag to determine whether or not to actually
5068 run the destructor. Thus
5070 test ? f(A()) : 0
5072 becomes (approximately)
5074 flag = 0;
5075 try {
5076 if (test) { A::A(temp); flag = 1; val = f(temp); }
5077 else { val = 0; }
5078 } finally {
5079 if (flag) A::~A(temp);
5083 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5084 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5085 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5087 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5088 gimplify_stmt (&cleanup, &cleanup_stmts);
5089 wce = gimple_build_wce (cleanup_stmts);
5091 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5092 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5093 gimplify_seq_add_stmt (pre_p, ftrue);
5095 /* Because of this manipulation, and the EH edges that jump
5096 threading cannot redirect, the temporary (VAR) will appear
5097 to be used uninitialized. Don't warn. */
5098 TREE_NO_WARNING (var) = 1;
5100 else
5102 gimplify_stmt (&cleanup, &cleanup_stmts);
5103 wce = gimple_build_wce (cleanup_stmts);
5104 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5105 gimplify_seq_add_stmt (pre_p, wce);
5109 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5111 static enum gimplify_status
5112 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5114 tree targ = *expr_p;
5115 tree temp = TARGET_EXPR_SLOT (targ);
5116 tree init = TARGET_EXPR_INITIAL (targ);
5117 enum gimplify_status ret;
5119 if (init)
5121 tree cleanup = NULL_TREE;
5123 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5124 to the temps list. Handle also variable length TARGET_EXPRs. */
5125 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5127 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5128 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5129 gimplify_vla_decl (temp, pre_p);
5131 else
5132 gimple_add_tmp_var (temp);
5134 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5135 expression is supposed to initialize the slot. */
5136 if (VOID_TYPE_P (TREE_TYPE (init)))
5137 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5138 else
5140 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5141 init = init_expr;
5142 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5143 init = NULL;
5144 ggc_free (init_expr);
5146 if (ret == GS_ERROR)
5148 /* PR c++/28266 Make sure this is expanded only once. */
5149 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5150 return GS_ERROR;
5152 if (init)
5153 gimplify_and_add (init, pre_p);
5155 /* If needed, push the cleanup for the temp. */
5156 if (TARGET_EXPR_CLEANUP (targ))
5158 if (CLEANUP_EH_ONLY (targ))
5159 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5160 CLEANUP_EH_ONLY (targ), pre_p);
5161 else
5162 cleanup = TARGET_EXPR_CLEANUP (targ);
5165 /* Add a clobber for the temporary going out of scope, like
5166 gimplify_bind_expr. */
5167 if (gimplify_ctxp->in_cleanup_point_expr
5168 && needs_to_live_in_memory (temp)
5169 && flag_stack_reuse == SR_ALL)
5171 tree clobber = build_constructor (TREE_TYPE (temp),
5172 NULL);
5173 TREE_THIS_VOLATILE (clobber) = true;
5174 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5175 if (cleanup)
5176 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5177 clobber);
5178 else
5179 cleanup = clobber;
5182 if (cleanup)
5183 gimple_push_cleanup (temp, cleanup, false, pre_p);
5185 /* Only expand this once. */
5186 TREE_OPERAND (targ, 3) = init;
5187 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5189 else
5190 /* We should have expanded this before. */
5191 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5193 *expr_p = temp;
5194 return GS_OK;
5197 /* Gimplification of expression trees. */
5199 /* Gimplify an expression which appears at statement context. The
5200 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5201 NULL, a new sequence is allocated.
5203 Return true if we actually added a statement to the queue. */
5205 bool
5206 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5208 gimple_seq_node last;
5210 last = gimple_seq_last (*seq_p);
5211 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5212 return last != gimple_seq_last (*seq_p);
5215 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5216 to CTX. If entries already exist, force them to be some flavor of private.
5217 If there is no enclosing parallel, do nothing. */
5219 void
5220 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5222 splay_tree_node n;
5224 if (decl == NULL || !DECL_P (decl))
5225 return;
5229 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5230 if (n != NULL)
5232 if (n->value & GOVD_SHARED)
5233 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5234 else if (n->value & GOVD_MAP)
5235 n->value |= GOVD_MAP_TO_ONLY;
5236 else
5237 return;
5239 else if (ctx->region_type == ORT_TARGET)
5240 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5241 else if (ctx->region_type != ORT_WORKSHARE
5242 && ctx->region_type != ORT_SIMD
5243 && ctx->region_type != ORT_TARGET_DATA)
5244 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5246 ctx = ctx->outer_context;
5248 while (ctx);
5251 /* Similarly for each of the type sizes of TYPE. */
5253 static void
5254 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5256 if (type == NULL || type == error_mark_node)
5257 return;
5258 type = TYPE_MAIN_VARIANT (type);
5260 if (pointer_set_insert (ctx->privatized_types, type))
5261 return;
5263 switch (TREE_CODE (type))
5265 case INTEGER_TYPE:
5266 case ENUMERAL_TYPE:
5267 case BOOLEAN_TYPE:
5268 case REAL_TYPE:
5269 case FIXED_POINT_TYPE:
5270 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5271 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5272 break;
5274 case ARRAY_TYPE:
5275 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5276 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5277 break;
5279 case RECORD_TYPE:
5280 case UNION_TYPE:
5281 case QUAL_UNION_TYPE:
5283 tree field;
5284 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5285 if (TREE_CODE (field) == FIELD_DECL)
5287 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5288 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5291 break;
5293 case POINTER_TYPE:
5294 case REFERENCE_TYPE:
5295 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5296 break;
5298 default:
5299 break;
5302 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5303 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5304 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5307 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5309 static void
5310 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5312 splay_tree_node n;
5313 unsigned int nflags;
5314 tree t;
5316 if (error_operand_p (decl))
5317 return;
5319 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5320 there are constructors involved somewhere. */
5321 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5322 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5323 flags |= GOVD_SEEN;
5325 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5326 if (n != NULL && n->value != GOVD_ALIGNED)
5328 /* We shouldn't be re-adding the decl with the same data
5329 sharing class. */
5330 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5331 /* The only combination of data sharing classes we should see is
5332 FIRSTPRIVATE and LASTPRIVATE. */
5333 nflags = n->value | flags;
5334 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5335 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)
5336 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5337 n->value = nflags;
5338 return;
5341 /* When adding a variable-sized variable, we have to handle all sorts
5342 of additional bits of data: the pointer replacement variable, and
5343 the parameters of the type. */
5344 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5346 /* Add the pointer replacement variable as PRIVATE if the variable
5347 replacement is private, else FIRSTPRIVATE since we'll need the
5348 address of the original variable either for SHARED, or for the
5349 copy into or out of the context. */
5350 if (!(flags & GOVD_LOCAL))
5352 nflags = flags & GOVD_MAP
5353 ? GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT
5354 : flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5355 nflags |= flags & GOVD_SEEN;
5356 t = DECL_VALUE_EXPR (decl);
5357 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5358 t = TREE_OPERAND (t, 0);
5359 gcc_assert (DECL_P (t));
5360 omp_add_variable (ctx, t, nflags);
5363 /* Add all of the variable and type parameters (which should have
5364 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5365 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5366 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5367 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5369 /* The variable-sized variable itself is never SHARED, only some form
5370 of PRIVATE. The sharing would take place via the pointer variable
5371 which we remapped above. */
5372 if (flags & GOVD_SHARED)
5373 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5374 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5376 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5377 alloca statement we generate for the variable, so make sure it
5378 is available. This isn't automatically needed for the SHARED
5379 case, since we won't be allocating local storage then.
5380 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5381 in this case omp_notice_variable will be called later
5382 on when it is gimplified. */
5383 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5384 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5385 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5387 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5388 && lang_hooks.decls.omp_privatize_by_reference (decl))
5390 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5392 /* Similar to the direct variable sized case above, we'll need the
5393 size of references being privatized. */
5394 if ((flags & GOVD_SHARED) == 0)
5396 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5397 if (TREE_CODE (t) != INTEGER_CST)
5398 omp_notice_variable (ctx, t, true);
5402 if (n != NULL)
5403 n->value |= flags;
5404 else
5405 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5408 /* Notice a threadprivate variable DECL used in OpenMP context CTX.
5409 This just prints out diagnostics about threadprivate variable uses
5410 in untied tasks. If DECL2 is non-NULL, prevent this warning
5411 on that variable. */
5413 static bool
5414 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5415 tree decl2)
5417 splay_tree_node n;
5418 struct gimplify_omp_ctx *octx;
5420 for (octx = ctx; octx; octx = octx->outer_context)
5421 if (octx->region_type == ORT_TARGET)
5423 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5424 if (n == NULL)
5426 error ("threadprivate variable %qE used in target region",
5427 DECL_NAME (decl));
5428 error_at (octx->location, "enclosing target region");
5429 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5431 if (decl2)
5432 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5435 if (ctx->region_type != ORT_UNTIED_TASK)
5436 return false;
5437 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5438 if (n == NULL)
5440 error ("threadprivate variable %qE used in untied task",
5441 DECL_NAME (decl));
5442 error_at (ctx->location, "enclosing task");
5443 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5445 if (decl2)
5446 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5447 return false;
5450 /* Record the fact that DECL was used within the OpenMP context CTX.
5451 IN_CODE is true when real code uses DECL, and false when we should
5452 merely emit default(none) errors. Return true if DECL is going to
5453 be remapped and thus DECL shouldn't be gimplified into its
5454 DECL_VALUE_EXPR (if any). */
5456 static bool
5457 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5459 splay_tree_node n;
5460 unsigned flags = in_code ? GOVD_SEEN : 0;
5461 bool ret = false, shared;
5463 if (error_operand_p (decl))
5464 return false;
5466 /* Threadprivate variables are predetermined. */
5467 if (is_global_var (decl))
5469 if (DECL_THREAD_LOCAL_P (decl))
5470 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5472 if (DECL_HAS_VALUE_EXPR_P (decl))
5474 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5476 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5477 return omp_notice_threadprivate_variable (ctx, decl, value);
5481 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5482 if (ctx->region_type == ORT_TARGET)
5484 if (n == NULL)
5486 if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl)))
5488 error ("%qD referenced in target region does not have "
5489 "a mappable type", decl);
5490 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_EXPLICIT | flags);
5492 else
5493 omp_add_variable (ctx, decl, GOVD_MAP | flags);
5495 else
5496 n->value |= flags;
5497 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
5498 goto do_outer;
5501 if (n == NULL)
5503 enum omp_clause_default_kind default_kind, kind;
5504 struct gimplify_omp_ctx *octx;
5506 if (ctx->region_type == ORT_WORKSHARE
5507 || ctx->region_type == ORT_SIMD
5508 || ctx->region_type == ORT_TARGET_DATA)
5509 goto do_outer;
5511 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5512 remapped firstprivate instead of shared. To some extent this is
5513 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5514 default_kind = ctx->default_kind;
5515 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5516 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5517 default_kind = kind;
5519 switch (default_kind)
5521 case OMP_CLAUSE_DEFAULT_NONE:
5522 if ((ctx->region_type & ORT_TASK) != 0)
5524 error ("%qE not specified in enclosing task",
5525 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5526 error_at (ctx->location, "enclosing task");
5528 else if (ctx->region_type == ORT_TEAMS)
5530 error ("%qE not specified in enclosing teams construct",
5531 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5532 error_at (ctx->location, "enclosing teams construct");
5534 else
5536 error ("%qE not specified in enclosing parallel",
5537 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5538 error_at (ctx->location, "enclosing parallel");
5540 /* FALLTHRU */
5541 case OMP_CLAUSE_DEFAULT_SHARED:
5542 flags |= GOVD_SHARED;
5543 break;
5544 case OMP_CLAUSE_DEFAULT_PRIVATE:
5545 flags |= GOVD_PRIVATE;
5546 break;
5547 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5548 flags |= GOVD_FIRSTPRIVATE;
5549 break;
5550 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5551 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5552 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5553 if (ctx->outer_context)
5554 omp_notice_variable (ctx->outer_context, decl, in_code);
5555 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5557 splay_tree_node n2;
5559 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0)
5560 continue;
5561 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5562 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5564 flags |= GOVD_FIRSTPRIVATE;
5565 break;
5567 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
5568 break;
5570 if (flags & GOVD_FIRSTPRIVATE)
5571 break;
5572 if (octx == NULL
5573 && (TREE_CODE (decl) == PARM_DECL
5574 || (!is_global_var (decl)
5575 && DECL_CONTEXT (decl) == current_function_decl)))
5577 flags |= GOVD_FIRSTPRIVATE;
5578 break;
5580 flags |= GOVD_SHARED;
5581 break;
5582 default:
5583 gcc_unreachable ();
5586 if ((flags & GOVD_PRIVATE)
5587 && lang_hooks.decls.omp_private_outer_ref (decl))
5588 flags |= GOVD_PRIVATE_OUTER_REF;
5590 omp_add_variable (ctx, decl, flags);
5592 shared = (flags & GOVD_SHARED) != 0;
5593 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5594 goto do_outer;
5597 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5598 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5599 && DECL_SIZE (decl)
5600 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5602 splay_tree_node n2;
5603 tree t = DECL_VALUE_EXPR (decl);
5604 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5605 t = TREE_OPERAND (t, 0);
5606 gcc_assert (DECL_P (t));
5607 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5608 n2->value |= GOVD_SEEN;
5611 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5612 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5614 /* If nothing changed, there's nothing left to do. */
5615 if ((n->value & flags) == flags)
5616 return ret;
5617 flags |= n->value;
5618 n->value = flags;
5620 do_outer:
5621 /* If the variable is private in the current context, then we don't
5622 need to propagate anything to an outer context. */
5623 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5624 return ret;
5625 if (ctx->outer_context
5626 && omp_notice_variable (ctx->outer_context, decl, in_code))
5627 return true;
5628 return ret;
5631 /* Verify that DECL is private within CTX. If there's specific information
5632 to the contrary in the innermost scope, generate an error. */
5634 static bool
5635 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, bool simd)
5637 splay_tree_node n;
5639 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5640 if (n != NULL)
5642 if (n->value & GOVD_SHARED)
5644 if (ctx == gimplify_omp_ctxp)
5646 if (simd)
5647 error ("iteration variable %qE is predetermined linear",
5648 DECL_NAME (decl));
5649 else
5650 error ("iteration variable %qE should be private",
5651 DECL_NAME (decl));
5652 n->value = GOVD_PRIVATE;
5653 return true;
5655 else
5656 return false;
5658 else if ((n->value & GOVD_EXPLICIT) != 0
5659 && (ctx == gimplify_omp_ctxp
5660 || (ctx->region_type == ORT_COMBINED_PARALLEL
5661 && gimplify_omp_ctxp->outer_context == ctx)))
5663 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5664 error ("iteration variable %qE should not be firstprivate",
5665 DECL_NAME (decl));
5666 else if ((n->value & GOVD_REDUCTION) != 0)
5667 error ("iteration variable %qE should not be reduction",
5668 DECL_NAME (decl));
5669 else if (simd && (n->value & GOVD_LASTPRIVATE) != 0)
5670 error ("iteration variable %qE should not be lastprivate",
5671 DECL_NAME (decl));
5672 else if (simd && (n->value & GOVD_PRIVATE) != 0)
5673 error ("iteration variable %qE should not be private",
5674 DECL_NAME (decl));
5675 else if (simd && (n->value & GOVD_LINEAR) != 0)
5676 error ("iteration variable %qE is predetermined linear",
5677 DECL_NAME (decl));
5679 return (ctx == gimplify_omp_ctxp
5680 || (ctx->region_type == ORT_COMBINED_PARALLEL
5681 && gimplify_omp_ctxp->outer_context == ctx));
5684 if (ctx->region_type != ORT_WORKSHARE
5685 && ctx->region_type != ORT_SIMD)
5686 return false;
5687 else if (ctx->outer_context)
5688 return omp_is_private (ctx->outer_context, decl, simd);
5689 return false;
5692 /* Return true if DECL is private within a parallel region
5693 that binds to the current construct's context or in parallel
5694 region's REDUCTION clause. */
5696 static bool
5697 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5699 splay_tree_node n;
5703 ctx = ctx->outer_context;
5704 if (ctx == NULL)
5705 return !(is_global_var (decl)
5706 /* References might be private, but might be shared too. */
5707 || lang_hooks.decls.omp_privatize_by_reference (decl));
5709 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
5710 continue;
5712 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5713 if (n != NULL)
5714 return (n->value & GOVD_SHARED) == 0;
5716 while (ctx->region_type == ORT_WORKSHARE
5717 || ctx->region_type == ORT_SIMD);
5718 return false;
5721 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5722 and previous omp contexts. */
5724 static void
5725 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5726 enum omp_region_type region_type)
5728 struct gimplify_omp_ctx *ctx, *outer_ctx;
5729 struct gimplify_ctx gctx;
5730 tree c;
5732 ctx = new_omp_context (region_type);
5733 outer_ctx = ctx->outer_context;
5735 while ((c = *list_p) != NULL)
5737 bool remove = false;
5738 bool notice_outer = true;
5739 const char *check_non_private = NULL;
5740 unsigned int flags;
5741 tree decl;
5743 switch (OMP_CLAUSE_CODE (c))
5745 case OMP_CLAUSE_PRIVATE:
5746 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5747 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5749 flags |= GOVD_PRIVATE_OUTER_REF;
5750 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5752 else
5753 notice_outer = false;
5754 goto do_add;
5755 case OMP_CLAUSE_SHARED:
5756 flags = GOVD_SHARED | GOVD_EXPLICIT;
5757 goto do_add;
5758 case OMP_CLAUSE_FIRSTPRIVATE:
5759 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5760 check_non_private = "firstprivate";
5761 goto do_add;
5762 case OMP_CLAUSE_LASTPRIVATE:
5763 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5764 check_non_private = "lastprivate";
5765 goto do_add;
5766 case OMP_CLAUSE_REDUCTION:
5767 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5768 check_non_private = "reduction";
5769 goto do_add;
5770 case OMP_CLAUSE_LINEAR:
5771 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
5772 is_gimple_val, fb_rvalue) == GS_ERROR)
5774 remove = true;
5775 break;
5777 flags = GOVD_LINEAR | GOVD_EXPLICIT;
5778 goto do_add;
5780 case OMP_CLAUSE_MAP:
5781 if (OMP_CLAUSE_SIZE (c)
5782 && gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
5783 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
5785 remove = true;
5786 break;
5788 decl = OMP_CLAUSE_DECL (c);
5789 if (!DECL_P (decl))
5791 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
5792 NULL, is_gimple_lvalue, fb_lvalue)
5793 == GS_ERROR)
5795 remove = true;
5796 break;
5798 break;
5800 flags = GOVD_MAP | GOVD_EXPLICIT;
5801 goto do_add;
5803 case OMP_CLAUSE_DEPEND:
5804 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
5806 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
5807 NULL, is_gimple_val, fb_rvalue);
5808 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5810 if (error_operand_p (OMP_CLAUSE_DECL (c)))
5812 remove = true;
5813 break;
5815 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
5816 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
5817 is_gimple_val, fb_rvalue) == GS_ERROR)
5819 remove = true;
5820 break;
5822 break;
5824 case OMP_CLAUSE_TO:
5825 case OMP_CLAUSE_FROM:
5826 if (OMP_CLAUSE_SIZE (c)
5827 && gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
5828 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
5830 remove = true;
5831 break;
5833 decl = OMP_CLAUSE_DECL (c);
5834 if (error_operand_p (decl))
5836 remove = true;
5837 break;
5839 if (!DECL_P (decl))
5841 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
5842 NULL, is_gimple_lvalue, fb_lvalue)
5843 == GS_ERROR)
5845 remove = true;
5846 break;
5848 break;
5850 goto do_notice;
5852 do_add:
5853 decl = OMP_CLAUSE_DECL (c);
5854 if (error_operand_p (decl))
5856 remove = true;
5857 break;
5859 omp_add_variable (ctx, decl, flags);
5860 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5861 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5863 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
5864 GOVD_LOCAL | GOVD_SEEN);
5865 gimplify_omp_ctxp = ctx;
5866 push_gimplify_context (&gctx);
5868 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5869 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5871 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5872 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5873 pop_gimplify_context
5874 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
5875 push_gimplify_context (&gctx);
5876 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
5877 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5878 pop_gimplify_context
5879 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
5880 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
5881 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
5883 gimplify_omp_ctxp = outer_ctx;
5885 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5886 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
5888 gimplify_omp_ctxp = ctx;
5889 push_gimplify_context (&gctx);
5890 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
5892 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
5893 NULL, NULL);
5894 TREE_SIDE_EFFECTS (bind) = 1;
5895 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
5896 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
5898 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
5899 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5900 pop_gimplify_context
5901 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
5902 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
5904 gimplify_omp_ctxp = outer_ctx;
5906 if (notice_outer)
5907 goto do_notice;
5908 break;
5910 case OMP_CLAUSE_COPYIN:
5911 case OMP_CLAUSE_COPYPRIVATE:
5912 decl = OMP_CLAUSE_DECL (c);
5913 if (error_operand_p (decl))
5915 remove = true;
5916 break;
5918 do_notice:
5919 if (outer_ctx)
5920 omp_notice_variable (outer_ctx, decl, true);
5921 if (check_non_private
5922 && region_type == ORT_WORKSHARE
5923 && omp_check_private (ctx, decl))
5925 error ("%s variable %qE is private in outer context",
5926 check_non_private, DECL_NAME (decl));
5927 remove = true;
5929 break;
5931 case OMP_CLAUSE_FINAL:
5932 case OMP_CLAUSE_IF:
5933 OMP_CLAUSE_OPERAND (c, 0)
5934 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
5935 /* Fall through. */
5937 case OMP_CLAUSE_SCHEDULE:
5938 case OMP_CLAUSE_NUM_THREADS:
5939 case OMP_CLAUSE_NUM_TEAMS:
5940 case OMP_CLAUSE_THREAD_LIMIT:
5941 case OMP_CLAUSE_DIST_SCHEDULE:
5942 case OMP_CLAUSE_DEVICE:
5943 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
5944 is_gimple_val, fb_rvalue) == GS_ERROR)
5945 remove = true;
5946 break;
5948 case OMP_CLAUSE_NOWAIT:
5949 case OMP_CLAUSE_ORDERED:
5950 case OMP_CLAUSE_UNTIED:
5951 case OMP_CLAUSE_COLLAPSE:
5952 case OMP_CLAUSE_MERGEABLE:
5953 case OMP_CLAUSE_PROC_BIND:
5954 case OMP_CLAUSE_SAFELEN:
5955 break;
5957 case OMP_CLAUSE_ALIGNED:
5958 decl = OMP_CLAUSE_DECL (c);
5959 if (error_operand_p (decl))
5961 remove = true;
5962 break;
5964 if (!is_global_var (decl)
5965 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
5966 omp_add_variable (ctx, decl, GOVD_ALIGNED);
5967 break;
5969 case OMP_CLAUSE_DEFAULT:
5970 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
5971 break;
5973 default:
5974 gcc_unreachable ();
5977 if (remove)
5978 *list_p = OMP_CLAUSE_CHAIN (c);
5979 else
5980 list_p = &OMP_CLAUSE_CHAIN (c);
5983 gimplify_omp_ctxp = ctx;
5986 /* For all variables that were not actually used within the context,
5987 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
5989 static int
5990 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
5992 tree *list_p = (tree *) data;
5993 tree decl = (tree) n->key;
5994 unsigned flags = n->value;
5995 enum omp_clause_code code;
5996 tree clause;
5997 bool private_debug;
5999 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6000 return 0;
6001 if ((flags & GOVD_SEEN) == 0)
6002 return 0;
6003 if (flags & GOVD_DEBUG_PRIVATE)
6005 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6006 private_debug = true;
6008 else if (flags & GOVD_MAP)
6009 private_debug = false;
6010 else
6011 private_debug
6012 = lang_hooks.decls.omp_private_debug_clause (decl,
6013 !!(flags & GOVD_SHARED));
6014 if (private_debug)
6015 code = OMP_CLAUSE_PRIVATE;
6016 else if (flags & GOVD_MAP)
6017 code = OMP_CLAUSE_MAP;
6018 else if (flags & GOVD_SHARED)
6020 if (is_global_var (decl))
6022 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6023 while (ctx != NULL)
6025 splay_tree_node on
6026 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6027 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6028 | GOVD_PRIVATE | GOVD_REDUCTION
6029 | GOVD_LINEAR)) != 0)
6030 break;
6031 ctx = ctx->outer_context;
6033 if (ctx == NULL)
6034 return 0;
6036 code = OMP_CLAUSE_SHARED;
6038 else if (flags & GOVD_PRIVATE)
6039 code = OMP_CLAUSE_PRIVATE;
6040 else if (flags & GOVD_FIRSTPRIVATE)
6041 code = OMP_CLAUSE_FIRSTPRIVATE;
6042 else if (flags & GOVD_LASTPRIVATE)
6043 code = OMP_CLAUSE_LASTPRIVATE;
6044 else if (flags & GOVD_ALIGNED)
6045 return 0;
6046 else
6047 gcc_unreachable ();
6049 clause = build_omp_clause (input_location, code);
6050 OMP_CLAUSE_DECL (clause) = decl;
6051 OMP_CLAUSE_CHAIN (clause) = *list_p;
6052 if (private_debug)
6053 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
6054 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6055 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
6056 else if (code == OMP_CLAUSE_MAP)
6058 OMP_CLAUSE_MAP_KIND (clause) = flags & GOVD_MAP_TO_ONLY
6059 ? OMP_CLAUSE_MAP_TO
6060 : OMP_CLAUSE_MAP_TOFROM;
6061 if (DECL_SIZE (decl)
6062 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6064 tree decl2 = DECL_VALUE_EXPR (decl);
6065 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6066 decl2 = TREE_OPERAND (decl2, 0);
6067 gcc_assert (DECL_P (decl2));
6068 tree mem = build_simple_mem_ref (decl2);
6069 OMP_CLAUSE_DECL (clause) = mem;
6070 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6071 if (gimplify_omp_ctxp->outer_context)
6073 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6074 omp_notice_variable (ctx, decl2, true);
6075 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
6077 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
6078 OMP_CLAUSE_MAP);
6079 OMP_CLAUSE_DECL (nc) = decl;
6080 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6081 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6082 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
6083 OMP_CLAUSE_CHAIN (clause) = nc;
6086 *list_p = clause;
6087 lang_hooks.decls.omp_finish_clause (clause);
6089 return 0;
6092 static void
6093 gimplify_adjust_omp_clauses (tree *list_p)
6095 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6096 tree c, decl;
6098 while ((c = *list_p) != NULL)
6100 splay_tree_node n;
6101 bool remove = false;
6103 switch (OMP_CLAUSE_CODE (c))
6105 case OMP_CLAUSE_PRIVATE:
6106 case OMP_CLAUSE_SHARED:
6107 case OMP_CLAUSE_FIRSTPRIVATE:
6108 case OMP_CLAUSE_LINEAR:
6109 decl = OMP_CLAUSE_DECL (c);
6110 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6111 remove = !(n->value & GOVD_SEEN);
6112 if (! remove)
6114 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
6115 if ((n->value & GOVD_DEBUG_PRIVATE)
6116 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6118 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6119 || ((n->value & GOVD_DATA_SHARE_CLASS)
6120 == GOVD_PRIVATE));
6121 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
6122 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6124 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6125 && ctx->outer_context
6126 && !(OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6127 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6128 && !is_global_var (decl))
6130 if (ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
6132 n = splay_tree_lookup (ctx->outer_context->variables,
6133 (splay_tree_key) decl);
6134 if (n == NULL
6135 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
6137 int flags = OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6138 ? GOVD_LASTPRIVATE : GOVD_SHARED;
6139 if (n == NULL)
6140 omp_add_variable (ctx->outer_context, decl,
6141 flags | GOVD_SEEN);
6142 else
6143 n->value |= flags | GOVD_SEEN;
6146 else
6147 omp_notice_variable (ctx->outer_context, decl, true);
6150 break;
6152 case OMP_CLAUSE_LASTPRIVATE:
6153 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6154 accurately reflect the presence of a FIRSTPRIVATE clause. */
6155 decl = OMP_CLAUSE_DECL (c);
6156 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6157 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6158 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6159 break;
6161 case OMP_CLAUSE_ALIGNED:
6162 decl = OMP_CLAUSE_DECL (c);
6163 if (!is_global_var (decl))
6165 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6166 remove = n == NULL || !(n->value & GOVD_SEEN);
6167 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6169 struct gimplify_omp_ctx *octx;
6170 if (n != NULL
6171 && (n->value & (GOVD_DATA_SHARE_CLASS
6172 & ~GOVD_FIRSTPRIVATE)))
6173 remove = true;
6174 else
6175 for (octx = ctx->outer_context; octx;
6176 octx = octx->outer_context)
6178 n = splay_tree_lookup (octx->variables,
6179 (splay_tree_key) decl);
6180 if (n == NULL)
6181 continue;
6182 if (n->value & GOVD_LOCAL)
6183 break;
6184 /* We have to avoid assigning a shared variable
6185 to itself when trying to add
6186 __builtin_assume_aligned. */
6187 if (n->value & GOVD_SHARED)
6189 remove = true;
6190 break;
6195 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
6197 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6198 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6199 remove = true;
6201 break;
6203 case OMP_CLAUSE_MAP:
6204 decl = OMP_CLAUSE_DECL (c);
6205 if (!DECL_P (decl))
6206 break;
6207 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6208 if (ctx->region_type == ORT_TARGET && !(n->value & GOVD_SEEN))
6209 remove = true;
6210 else if (DECL_SIZE (decl)
6211 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
6212 && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_POINTER)
6214 tree decl2 = DECL_VALUE_EXPR (decl);
6215 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6216 decl2 = TREE_OPERAND (decl2, 0);
6217 gcc_assert (DECL_P (decl2));
6218 tree mem = build_simple_mem_ref (decl2);
6219 OMP_CLAUSE_DECL (c) = mem;
6220 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6221 if (ctx->outer_context)
6223 omp_notice_variable (ctx->outer_context, decl2, true);
6224 omp_notice_variable (ctx->outer_context,
6225 OMP_CLAUSE_SIZE (c), true);
6227 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6228 OMP_CLAUSE_MAP);
6229 OMP_CLAUSE_DECL (nc) = decl;
6230 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6231 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6232 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
6233 OMP_CLAUSE_CHAIN (c) = nc;
6234 c = nc;
6236 break;
6238 case OMP_CLAUSE_TO:
6239 case OMP_CLAUSE_FROM:
6240 decl = OMP_CLAUSE_DECL (c);
6241 if (!DECL_P (decl))
6242 break;
6243 if (DECL_SIZE (decl)
6244 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6246 tree decl2 = DECL_VALUE_EXPR (decl);
6247 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6248 decl2 = TREE_OPERAND (decl2, 0);
6249 gcc_assert (DECL_P (decl2));
6250 tree mem = build_simple_mem_ref (decl2);
6251 OMP_CLAUSE_DECL (c) = mem;
6252 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6253 if (ctx->outer_context)
6255 omp_notice_variable (ctx->outer_context, decl2, true);
6256 omp_notice_variable (ctx->outer_context,
6257 OMP_CLAUSE_SIZE (c), true);
6260 break;
6262 case OMP_CLAUSE_REDUCTION:
6263 case OMP_CLAUSE_COPYIN:
6264 case OMP_CLAUSE_COPYPRIVATE:
6265 case OMP_CLAUSE_IF:
6266 case OMP_CLAUSE_NUM_THREADS:
6267 case OMP_CLAUSE_NUM_TEAMS:
6268 case OMP_CLAUSE_THREAD_LIMIT:
6269 case OMP_CLAUSE_DIST_SCHEDULE:
6270 case OMP_CLAUSE_DEVICE:
6271 case OMP_CLAUSE_SCHEDULE:
6272 case OMP_CLAUSE_NOWAIT:
6273 case OMP_CLAUSE_ORDERED:
6274 case OMP_CLAUSE_DEFAULT:
6275 case OMP_CLAUSE_UNTIED:
6276 case OMP_CLAUSE_COLLAPSE:
6277 case OMP_CLAUSE_FINAL:
6278 case OMP_CLAUSE_MERGEABLE:
6279 case OMP_CLAUSE_PROC_BIND:
6280 case OMP_CLAUSE_SAFELEN:
6281 case OMP_CLAUSE_DEPEND:
6282 break;
6284 default:
6285 gcc_unreachable ();
6288 if (remove)
6289 *list_p = OMP_CLAUSE_CHAIN (c);
6290 else
6291 list_p = &OMP_CLAUSE_CHAIN (c);
6294 /* Add in any implicit data sharing. */
6295 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
6297 gimplify_omp_ctxp = ctx->outer_context;
6298 delete_omp_context (ctx);
6301 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
6302 gimplification of the body, as well as scanning the body for used
6303 variables. We need to do this scan now, because variable-sized
6304 decls will be decomposed during gimplification. */
6306 static void
6307 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6309 tree expr = *expr_p;
6310 gimple g;
6311 gimple_seq body = NULL;
6312 struct gimplify_ctx gctx;
6314 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6315 OMP_PARALLEL_COMBINED (expr)
6316 ? ORT_COMBINED_PARALLEL
6317 : ORT_PARALLEL);
6319 push_gimplify_context (&gctx);
6321 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6322 if (gimple_code (g) == GIMPLE_BIND)
6323 pop_gimplify_context (g);
6324 else
6325 pop_gimplify_context (NULL);
6327 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6329 g = gimple_build_omp_parallel (body,
6330 OMP_PARALLEL_CLAUSES (expr),
6331 NULL_TREE, NULL_TREE);
6332 if (OMP_PARALLEL_COMBINED (expr))
6333 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6334 gimplify_seq_add_stmt (pre_p, g);
6335 *expr_p = NULL_TREE;
6338 /* Gimplify the contents of an OMP_TASK statement. This involves
6339 gimplification of the body, as well as scanning the body for used
6340 variables. We need to do this scan now, because variable-sized
6341 decls will be decomposed during gimplification. */
6343 static void
6344 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6346 tree expr = *expr_p;
6347 gimple g;
6348 gimple_seq body = NULL;
6349 struct gimplify_ctx gctx;
6351 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6352 find_omp_clause (OMP_TASK_CLAUSES (expr),
6353 OMP_CLAUSE_UNTIED)
6354 ? ORT_UNTIED_TASK : ORT_TASK);
6356 push_gimplify_context (&gctx);
6358 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6359 if (gimple_code (g) == GIMPLE_BIND)
6360 pop_gimplify_context (g);
6361 else
6362 pop_gimplify_context (NULL);
6364 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
6366 g = gimple_build_omp_task (body,
6367 OMP_TASK_CLAUSES (expr),
6368 NULL_TREE, NULL_TREE,
6369 NULL_TREE, NULL_TREE, NULL_TREE);
6370 gimplify_seq_add_stmt (pre_p, g);
6371 *expr_p = NULL_TREE;
6374 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
6375 with non-NULL OMP_FOR_INIT. */
6377 static tree
6378 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
6380 *walk_subtrees = 0;
6381 switch (TREE_CODE (*tp))
6383 case OMP_FOR:
6384 *walk_subtrees = 1;
6385 /* FALLTHRU */
6386 case OMP_SIMD:
6387 if (OMP_FOR_INIT (*tp) != NULL_TREE)
6388 return *tp;
6389 break;
6390 case BIND_EXPR:
6391 case STATEMENT_LIST:
6392 case OMP_PARALLEL:
6393 *walk_subtrees = 1;
6394 break;
6395 default:
6396 break;
6398 return NULL_TREE;
6401 /* Gimplify the gross structure of an OMP_FOR statement. */
6403 static enum gimplify_status
6404 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6406 tree for_stmt, orig_for_stmt, decl, var, t;
6407 enum gimplify_status ret = GS_ALL_DONE;
6408 enum gimplify_status tret;
6409 gimple gfor;
6410 gimple_seq for_body, for_pre_body;
6411 int i;
6412 bool simd;
6413 bitmap has_decl_expr = NULL;
6415 orig_for_stmt = for_stmt = *expr_p;
6417 simd = TREE_CODE (for_stmt) == OMP_SIMD
6418 || TREE_CODE (for_stmt) == CILK_SIMD;
6419 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6420 simd ? ORT_SIMD : ORT_WORKSHARE);
6422 /* Handle OMP_FOR_INIT. */
6423 for_pre_body = NULL;
6424 if (simd && OMP_FOR_PRE_BODY (for_stmt))
6426 has_decl_expr = BITMAP_ALLOC (NULL);
6427 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
6428 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
6429 == VAR_DECL)
6431 t = OMP_FOR_PRE_BODY (for_stmt);
6432 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6434 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
6436 tree_stmt_iterator si;
6437 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
6438 tsi_next (&si))
6440 t = tsi_stmt (si);
6441 if (TREE_CODE (t) == DECL_EXPR
6442 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
6443 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6447 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6448 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6450 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
6452 for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), find_combined_omp_for,
6453 NULL, NULL);
6454 gcc_assert (for_stmt != NULL_TREE);
6455 gimplify_omp_ctxp->combined_loop = true;
6458 for_body = NULL;
6459 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6460 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6461 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6462 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6463 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6465 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6466 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6467 decl = TREE_OPERAND (t, 0);
6468 gcc_assert (DECL_P (decl));
6469 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6470 || POINTER_TYPE_P (TREE_TYPE (decl)));
6472 /* Make sure the iteration variable is private. */
6473 tree c = NULL_TREE;
6474 if (orig_for_stmt != for_stmt)
6475 /* Do this only on innermost construct for combined ones. */;
6476 else if (simd)
6478 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
6479 (splay_tree_key)decl);
6480 omp_is_private (gimplify_omp_ctxp, decl, simd);
6481 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6482 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6483 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
6485 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
6486 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
6487 if (has_decl_expr
6488 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
6489 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6490 OMP_CLAUSE_DECL (c) = decl;
6491 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6492 OMP_FOR_CLAUSES (for_stmt) = c;
6493 omp_add_variable (gimplify_omp_ctxp, decl,
6494 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
6496 else
6498 bool lastprivate
6499 = (!has_decl_expr
6500 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
6501 c = build_omp_clause (input_location,
6502 lastprivate ? OMP_CLAUSE_LASTPRIVATE
6503 : OMP_CLAUSE_PRIVATE);
6504 OMP_CLAUSE_DECL (c) = decl;
6505 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6506 omp_add_variable (gimplify_omp_ctxp, decl,
6507 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
6508 | GOVD_SEEN);
6509 c = NULL_TREE;
6512 else if (omp_is_private (gimplify_omp_ctxp, decl, simd))
6513 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6514 else
6515 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6517 /* If DECL is not a gimple register, create a temporary variable to act
6518 as an iteration counter. This is valid, since DECL cannot be
6519 modified in the body of the loop. */
6520 if (orig_for_stmt != for_stmt)
6521 var = decl;
6522 else if (!is_gimple_reg (decl))
6524 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6525 TREE_OPERAND (t, 0) = var;
6527 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6529 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6531 else
6532 var = decl;
6534 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6535 is_gimple_val, fb_rvalue);
6536 ret = MIN (ret, tret);
6537 if (ret == GS_ERROR)
6538 return ret;
6540 /* Handle OMP_FOR_COND. */
6541 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6542 gcc_assert (COMPARISON_CLASS_P (t));
6543 gcc_assert (TREE_OPERAND (t, 0) == decl);
6545 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6546 is_gimple_val, fb_rvalue);
6547 ret = MIN (ret, tret);
6549 /* Handle OMP_FOR_INCR. */
6550 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6551 switch (TREE_CODE (t))
6553 case PREINCREMENT_EXPR:
6554 case POSTINCREMENT_EXPR:
6556 tree decl = TREE_OPERAND (t, 0);
6557 // c_omp_for_incr_canonicalize_ptr() should have been
6558 // called to massage things appropriately.
6559 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
6561 if (orig_for_stmt != for_stmt)
6562 break;
6563 t = build_int_cst (TREE_TYPE (decl), 1);
6564 if (c)
6565 OMP_CLAUSE_LINEAR_STEP (c) = t;
6566 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6567 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6568 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6569 break;
6572 case PREDECREMENT_EXPR:
6573 case POSTDECREMENT_EXPR:
6574 if (orig_for_stmt != for_stmt)
6575 break;
6576 t = build_int_cst (TREE_TYPE (decl), -1);
6577 if (c)
6578 OMP_CLAUSE_LINEAR_STEP (c) = t;
6579 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6580 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6581 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6582 break;
6584 case MODIFY_EXPR:
6585 gcc_assert (TREE_OPERAND (t, 0) == decl);
6586 TREE_OPERAND (t, 0) = var;
6588 t = TREE_OPERAND (t, 1);
6589 switch (TREE_CODE (t))
6591 case PLUS_EXPR:
6592 if (TREE_OPERAND (t, 1) == decl)
6594 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6595 TREE_OPERAND (t, 0) = var;
6596 break;
6599 /* Fallthru. */
6600 case MINUS_EXPR:
6601 case POINTER_PLUS_EXPR:
6602 gcc_assert (TREE_OPERAND (t, 0) == decl);
6603 TREE_OPERAND (t, 0) = var;
6604 break;
6605 default:
6606 gcc_unreachable ();
6609 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6610 is_gimple_val, fb_rvalue);
6611 ret = MIN (ret, tret);
6612 if (c)
6614 OMP_CLAUSE_LINEAR_STEP (c) = TREE_OPERAND (t, 1);
6615 if (TREE_CODE (t) == MINUS_EXPR)
6617 t = TREE_OPERAND (t, 1);
6618 OMP_CLAUSE_LINEAR_STEP (c)
6619 = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
6620 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
6621 &for_pre_body, NULL,
6622 is_gimple_val, fb_rvalue);
6623 ret = MIN (ret, tret);
6626 break;
6628 default:
6629 gcc_unreachable ();
6632 if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6633 && orig_for_stmt == for_stmt)
6635 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6636 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6637 && OMP_CLAUSE_DECL (c) == decl
6638 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6640 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6641 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6642 gcc_assert (TREE_OPERAND (t, 0) == var);
6643 t = TREE_OPERAND (t, 1);
6644 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6645 || TREE_CODE (t) == MINUS_EXPR
6646 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6647 gcc_assert (TREE_OPERAND (t, 0) == var);
6648 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6649 TREE_OPERAND (t, 1));
6650 gimplify_assign (decl, t,
6651 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6656 BITMAP_FREE (has_decl_expr);
6658 gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body);
6660 if (orig_for_stmt != for_stmt)
6661 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6663 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6664 decl = TREE_OPERAND (t, 0);
6665 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6666 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6667 TREE_OPERAND (t, 0) = var;
6668 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6669 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
6670 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
6673 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt));
6675 int kind;
6676 switch (TREE_CODE (orig_for_stmt))
6678 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
6679 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
6680 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
6681 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
6682 default:
6683 gcc_unreachable ();
6685 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
6686 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6687 for_pre_body);
6688 if (orig_for_stmt != for_stmt)
6689 gimple_omp_for_set_combined_p (gfor, true);
6690 if (gimplify_omp_ctxp
6691 && (gimplify_omp_ctxp->combined_loop
6692 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
6693 && gimplify_omp_ctxp->outer_context
6694 && gimplify_omp_ctxp->outer_context->combined_loop)))
6696 gimple_omp_for_set_combined_into_p (gfor, true);
6697 if (gimplify_omp_ctxp->combined_loop)
6698 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
6699 else
6700 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
6703 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6705 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6706 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6707 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6708 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6709 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6710 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6711 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6712 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6715 gimplify_seq_add_stmt (pre_p, gfor);
6716 if (ret != GS_ALL_DONE)
6717 return GS_ERROR;
6718 *expr_p = NULL_TREE;
6719 return GS_ALL_DONE;
6722 /* Gimplify the gross structure of other OpenMP constructs.
6723 In particular, OMP_SECTIONS, OMP_SINGLE, OMP_TARGET, OMP_TARGET_DATA
6724 and OMP_TEAMS. */
6726 static void
6727 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6729 tree expr = *expr_p;
6730 gimple stmt;
6731 gimple_seq body = NULL;
6732 enum omp_region_type ort = ORT_WORKSHARE;
6734 switch (TREE_CODE (expr))
6736 case OMP_SECTIONS:
6737 case OMP_SINGLE:
6738 break;
6739 case OMP_TARGET:
6740 ort = ORT_TARGET;
6741 break;
6742 case OMP_TARGET_DATA:
6743 ort = ORT_TARGET_DATA;
6744 break;
6745 case OMP_TEAMS:
6746 ort = ORT_TEAMS;
6747 break;
6748 default:
6749 gcc_unreachable ();
6751 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort);
6752 if (ort == ORT_TARGET || ort == ORT_TARGET_DATA)
6754 struct gimplify_ctx gctx;
6755 push_gimplify_context (&gctx);
6756 gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
6757 if (gimple_code (g) == GIMPLE_BIND)
6758 pop_gimplify_context (g);
6759 else
6760 pop_gimplify_context (NULL);
6761 if (ort == ORT_TARGET_DATA)
6763 gimple_seq cleanup = NULL;
6764 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TARGET_END_DATA);
6765 g = gimple_build_call (fn, 0);
6766 gimple_seq_add_stmt (&cleanup, g);
6767 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
6768 body = NULL;
6769 gimple_seq_add_stmt (&body, g);
6772 else
6773 gimplify_and_add (OMP_BODY (expr), &body);
6774 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6776 switch (TREE_CODE (expr))
6778 case OMP_SECTIONS:
6779 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6780 break;
6781 case OMP_SINGLE:
6782 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6783 break;
6784 case OMP_TARGET:
6785 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
6786 OMP_CLAUSES (expr));
6787 break;
6788 case OMP_TARGET_DATA:
6789 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
6790 OMP_CLAUSES (expr));
6791 break;
6792 case OMP_TEAMS:
6793 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
6794 break;
6795 default:
6796 gcc_unreachable ();
6799 gimplify_seq_add_stmt (pre_p, stmt);
6800 *expr_p = NULL_TREE;
6803 /* Gimplify the gross structure of OpenMP target update construct. */
6805 static void
6806 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
6808 tree expr = *expr_p;
6809 gimple stmt;
6811 gimplify_scan_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr), pre_p,
6812 ORT_WORKSHARE);
6813 gimplify_adjust_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr));
6814 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_UPDATE,
6815 OMP_TARGET_UPDATE_CLAUSES (expr));
6817 gimplify_seq_add_stmt (pre_p, stmt);
6818 *expr_p = NULL_TREE;
6821 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
6822 stabilized the lhs of the atomic operation as *ADDR. Return true if
6823 EXPR is this stabilized form. */
6825 static bool
6826 goa_lhs_expr_p (tree expr, tree addr)
6828 /* Also include casts to other type variants. The C front end is fond
6829 of adding these for e.g. volatile variables. This is like
6830 STRIP_TYPE_NOPS but includes the main variant lookup. */
6831 STRIP_USELESS_TYPE_CONVERSION (expr);
6833 if (TREE_CODE (expr) == INDIRECT_REF)
6835 expr = TREE_OPERAND (expr, 0);
6836 while (expr != addr
6837 && (CONVERT_EXPR_P (expr)
6838 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6839 && TREE_CODE (expr) == TREE_CODE (addr)
6840 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
6842 expr = TREE_OPERAND (expr, 0);
6843 addr = TREE_OPERAND (addr, 0);
6845 if (expr == addr)
6846 return true;
6847 return (TREE_CODE (addr) == ADDR_EXPR
6848 && TREE_CODE (expr) == ADDR_EXPR
6849 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6851 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6852 return true;
6853 return false;
6856 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
6857 expression does not involve the lhs, evaluate it into a temporary.
6858 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
6859 or -1 if an error was encountered. */
6861 static int
6862 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6863 tree lhs_var)
6865 tree expr = *expr_p;
6866 int saw_lhs;
6868 if (goa_lhs_expr_p (expr, lhs_addr))
6870 *expr_p = lhs_var;
6871 return 1;
6873 if (is_gimple_val (expr))
6874 return 0;
6876 saw_lhs = 0;
6877 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6879 case tcc_binary:
6880 case tcc_comparison:
6881 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6882 lhs_var);
6883 case tcc_unary:
6884 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6885 lhs_var);
6886 break;
6887 case tcc_expression:
6888 switch (TREE_CODE (expr))
6890 case TRUTH_ANDIF_EXPR:
6891 case TRUTH_ORIF_EXPR:
6892 case TRUTH_AND_EXPR:
6893 case TRUTH_OR_EXPR:
6894 case TRUTH_XOR_EXPR:
6895 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6896 lhs_addr, lhs_var);
6897 case TRUTH_NOT_EXPR:
6898 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6899 lhs_addr, lhs_var);
6900 break;
6901 case COMPOUND_EXPR:
6902 /* Break out any preevaluations from cp_build_modify_expr. */
6903 for (; TREE_CODE (expr) == COMPOUND_EXPR;
6904 expr = TREE_OPERAND (expr, 1))
6905 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
6906 *expr_p = expr;
6907 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
6908 default:
6909 break;
6911 break;
6912 default:
6913 break;
6916 if (saw_lhs == 0)
6918 enum gimplify_status gs;
6919 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6920 if (gs != GS_ALL_DONE)
6921 saw_lhs = -1;
6924 return saw_lhs;
6927 /* Gimplify an OMP_ATOMIC statement. */
6929 static enum gimplify_status
6930 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6932 tree addr = TREE_OPERAND (*expr_p, 0);
6933 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
6934 ? NULL : TREE_OPERAND (*expr_p, 1);
6935 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6936 tree tmp_load;
6937 gimple loadstmt, storestmt;
6939 tmp_load = create_tmp_reg (type, NULL);
6940 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6941 return GS_ERROR;
6943 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6944 != GS_ALL_DONE)
6945 return GS_ERROR;
6947 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
6948 gimplify_seq_add_stmt (pre_p, loadstmt);
6949 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6950 != GS_ALL_DONE)
6951 return GS_ERROR;
6953 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
6954 rhs = tmp_load;
6955 storestmt = gimple_build_omp_atomic_store (rhs);
6956 gimplify_seq_add_stmt (pre_p, storestmt);
6957 if (OMP_ATOMIC_SEQ_CST (*expr_p))
6959 gimple_omp_atomic_set_seq_cst (loadstmt);
6960 gimple_omp_atomic_set_seq_cst (storestmt);
6962 switch (TREE_CODE (*expr_p))
6964 case OMP_ATOMIC_READ:
6965 case OMP_ATOMIC_CAPTURE_OLD:
6966 *expr_p = tmp_load;
6967 gimple_omp_atomic_set_need_value (loadstmt);
6968 break;
6969 case OMP_ATOMIC_CAPTURE_NEW:
6970 *expr_p = rhs;
6971 gimple_omp_atomic_set_need_value (storestmt);
6972 break;
6973 default:
6974 *expr_p = NULL;
6975 break;
6978 return GS_ALL_DONE;
6981 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
6982 body, and adding some EH bits. */
6984 static enum gimplify_status
6985 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
6987 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
6988 gimple g;
6989 gimple_seq body = NULL;
6990 struct gimplify_ctx gctx;
6991 int subcode = 0;
6993 /* Wrap the transaction body in a BIND_EXPR so we have a context
6994 where to put decls for OpenMP. */
6995 if (TREE_CODE (tbody) != BIND_EXPR)
6997 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
6998 TREE_SIDE_EFFECTS (bind) = 1;
6999 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
7000 TRANSACTION_EXPR_BODY (expr) = bind;
7003 push_gimplify_context (&gctx);
7004 temp = voidify_wrapper_expr (*expr_p, NULL);
7006 g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
7007 pop_gimplify_context (g);
7009 g = gimple_build_transaction (body, NULL);
7010 if (TRANSACTION_EXPR_OUTER (expr))
7011 subcode = GTMA_IS_OUTER;
7012 else if (TRANSACTION_EXPR_RELAXED (expr))
7013 subcode = GTMA_IS_RELAXED;
7014 gimple_transaction_set_subcode (g, subcode);
7016 gimplify_seq_add_stmt (pre_p, g);
7018 if (temp)
7020 *expr_p = temp;
7021 return GS_OK;
7024 *expr_p = NULL_TREE;
7025 return GS_ALL_DONE;
7028 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
7029 expression produces a value to be used as an operand inside a GIMPLE
7030 statement, the value will be stored back in *EXPR_P. This value will
7031 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
7032 an SSA_NAME. The corresponding sequence of GIMPLE statements is
7033 emitted in PRE_P and POST_P.
7035 Additionally, this process may overwrite parts of the input
7036 expression during gimplification. Ideally, it should be
7037 possible to do non-destructive gimplification.
7039 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
7040 the expression needs to evaluate to a value to be used as
7041 an operand in a GIMPLE statement, this value will be stored in
7042 *EXPR_P on exit. This happens when the caller specifies one
7043 of fb_lvalue or fb_rvalue fallback flags.
7045 PRE_P will contain the sequence of GIMPLE statements corresponding
7046 to the evaluation of EXPR and all the side-effects that must
7047 be executed before the main expression. On exit, the last
7048 statement of PRE_P is the core statement being gimplified. For
7049 instance, when gimplifying 'if (++a)' the last statement in
7050 PRE_P will be 'if (t.1)' where t.1 is the result of
7051 pre-incrementing 'a'.
7053 POST_P will contain the sequence of GIMPLE statements corresponding
7054 to the evaluation of all the side-effects that must be executed
7055 after the main expression. If this is NULL, the post
7056 side-effects are stored at the end of PRE_P.
7058 The reason why the output is split in two is to handle post
7059 side-effects explicitly. In some cases, an expression may have
7060 inner and outer post side-effects which need to be emitted in
7061 an order different from the one given by the recursive
7062 traversal. For instance, for the expression (*p--)++ the post
7063 side-effects of '--' must actually occur *after* the post
7064 side-effects of '++'. However, gimplification will first visit
7065 the inner expression, so if a separate POST sequence was not
7066 used, the resulting sequence would be:
7068 1 t.1 = *p
7069 2 p = p - 1
7070 3 t.2 = t.1 + 1
7071 4 *p = t.2
7073 However, the post-decrement operation in line #2 must not be
7074 evaluated until after the store to *p at line #4, so the
7075 correct sequence should be:
7077 1 t.1 = *p
7078 2 t.2 = t.1 + 1
7079 3 *p = t.2
7080 4 p = p - 1
7082 So, by specifying a separate post queue, it is possible
7083 to emit the post side-effects in the correct order.
7084 If POST_P is NULL, an internal queue will be used. Before
7085 returning to the caller, the sequence POST_P is appended to
7086 the main output sequence PRE_P.
7088 GIMPLE_TEST_F points to a function that takes a tree T and
7089 returns nonzero if T is in the GIMPLE form requested by the
7090 caller. The GIMPLE predicates are in gimple.c.
7092 FALLBACK tells the function what sort of a temporary we want if
7093 gimplification cannot produce an expression that complies with
7094 GIMPLE_TEST_F.
7096 fb_none means that no temporary should be generated
7097 fb_rvalue means that an rvalue is OK to generate
7098 fb_lvalue means that an lvalue is OK to generate
7099 fb_either means that either is OK, but an lvalue is preferable.
7100 fb_mayfail means that gimplification may fail (in which case
7101 GS_ERROR will be returned)
7103 The return value is either GS_ERROR or GS_ALL_DONE, since this
7104 function iterates until EXPR is completely gimplified or an error
7105 occurs. */
7107 enum gimplify_status
7108 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7109 bool (*gimple_test_f) (tree), fallback_t fallback)
7111 tree tmp;
7112 gimple_seq internal_pre = NULL;
7113 gimple_seq internal_post = NULL;
7114 tree save_expr;
7115 bool is_statement;
7116 location_t saved_location;
7117 enum gimplify_status ret;
7118 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
7120 save_expr = *expr_p;
7121 if (save_expr == NULL_TREE)
7122 return GS_ALL_DONE;
7124 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
7125 is_statement = gimple_test_f == is_gimple_stmt;
7126 if (is_statement)
7127 gcc_assert (pre_p);
7129 /* Consistency checks. */
7130 if (gimple_test_f == is_gimple_reg)
7131 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
7132 else if (gimple_test_f == is_gimple_val
7133 || gimple_test_f == is_gimple_call_addr
7134 || gimple_test_f == is_gimple_condexpr
7135 || gimple_test_f == is_gimple_mem_rhs
7136 || gimple_test_f == is_gimple_mem_rhs_or_call
7137 || gimple_test_f == is_gimple_reg_rhs
7138 || gimple_test_f == is_gimple_reg_rhs_or_call
7139 || gimple_test_f == is_gimple_asm_val
7140 || gimple_test_f == is_gimple_mem_ref_addr)
7141 gcc_assert (fallback & fb_rvalue);
7142 else if (gimple_test_f == is_gimple_min_lval
7143 || gimple_test_f == is_gimple_lvalue)
7144 gcc_assert (fallback & fb_lvalue);
7145 else if (gimple_test_f == is_gimple_addressable)
7146 gcc_assert (fallback & fb_either);
7147 else if (gimple_test_f == is_gimple_stmt)
7148 gcc_assert (fallback == fb_none);
7149 else
7151 /* We should have recognized the GIMPLE_TEST_F predicate to
7152 know what kind of fallback to use in case a temporary is
7153 needed to hold the value or address of *EXPR_P. */
7154 gcc_unreachable ();
7157 /* We used to check the predicate here and return immediately if it
7158 succeeds. This is wrong; the design is for gimplification to be
7159 idempotent, and for the predicates to only test for valid forms, not
7160 whether they are fully simplified. */
7161 if (pre_p == NULL)
7162 pre_p = &internal_pre;
7164 if (post_p == NULL)
7165 post_p = &internal_post;
7167 /* Remember the last statements added to PRE_P and POST_P. Every
7168 new statement added by the gimplification helpers needs to be
7169 annotated with location information. To centralize the
7170 responsibility, we remember the last statement that had been
7171 added to both queues before gimplifying *EXPR_P. If
7172 gimplification produces new statements in PRE_P and POST_P, those
7173 statements will be annotated with the same location information
7174 as *EXPR_P. */
7175 pre_last_gsi = gsi_last (*pre_p);
7176 post_last_gsi = gsi_last (*post_p);
7178 saved_location = input_location;
7179 if (save_expr != error_mark_node
7180 && EXPR_HAS_LOCATION (*expr_p))
7181 input_location = EXPR_LOCATION (*expr_p);
7183 /* Loop over the specific gimplifiers until the toplevel node
7184 remains the same. */
7187 /* Strip away as many useless type conversions as possible
7188 at the toplevel. */
7189 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
7191 /* Remember the expr. */
7192 save_expr = *expr_p;
7194 /* Die, die, die, my darling. */
7195 if (save_expr == error_mark_node
7196 || (TREE_TYPE (save_expr)
7197 && TREE_TYPE (save_expr) == error_mark_node))
7199 ret = GS_ERROR;
7200 break;
7203 /* Do any language-specific gimplification. */
7204 ret = ((enum gimplify_status)
7205 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
7206 if (ret == GS_OK)
7208 if (*expr_p == NULL_TREE)
7209 break;
7210 if (*expr_p != save_expr)
7211 continue;
7213 else if (ret != GS_UNHANDLED)
7214 break;
7216 /* Make sure that all the cases set 'ret' appropriately. */
7217 ret = GS_UNHANDLED;
7218 switch (TREE_CODE (*expr_p))
7220 /* First deal with the special cases. */
7222 case POSTINCREMENT_EXPR:
7223 case POSTDECREMENT_EXPR:
7224 case PREINCREMENT_EXPR:
7225 case PREDECREMENT_EXPR:
7226 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
7227 fallback != fb_none,
7228 TREE_TYPE (*expr_p));
7229 break;
7231 case ARRAY_REF:
7232 case ARRAY_RANGE_REF:
7233 case REALPART_EXPR:
7234 case IMAGPART_EXPR:
7235 case COMPONENT_REF:
7236 case VIEW_CONVERT_EXPR:
7237 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
7238 fallback ? fallback : fb_rvalue);
7239 break;
7241 case COND_EXPR:
7242 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
7244 /* C99 code may assign to an array in a structure value of a
7245 conditional expression, and this has undefined behavior
7246 only on execution, so create a temporary if an lvalue is
7247 required. */
7248 if (fallback == fb_lvalue)
7250 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7251 mark_addressable (*expr_p);
7252 ret = GS_OK;
7254 break;
7256 case CILK_SPAWN_STMT:
7257 gcc_assert
7258 (fn_contains_cilk_spawn_p (cfun)
7259 && lang_hooks.cilkplus.cilk_detect_spawn_and_unwrap (expr_p));
7260 if (!seen_error ())
7262 ret = (enum gimplify_status)
7263 lang_hooks.cilkplus.gimplify_cilk_spawn (expr_p, pre_p,
7264 post_p);
7265 break;
7267 /* If errors are seen, then just process it as a CALL_EXPR. */
7269 case CALL_EXPR:
7270 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
7272 /* C99 code may assign to an array in a structure returned
7273 from a function, and this has undefined behavior only on
7274 execution, so create a temporary if an lvalue is
7275 required. */
7276 if (fallback == fb_lvalue)
7278 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7279 mark_addressable (*expr_p);
7280 ret = GS_OK;
7282 break;
7284 case TREE_LIST:
7285 gcc_unreachable ();
7287 case COMPOUND_EXPR:
7288 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
7289 break;
7291 case COMPOUND_LITERAL_EXPR:
7292 ret = gimplify_compound_literal_expr (expr_p, pre_p,
7293 gimple_test_f, fallback);
7294 break;
7296 case MODIFY_EXPR:
7297 case INIT_EXPR:
7298 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
7299 fallback != fb_none);
7300 break;
7302 case TRUTH_ANDIF_EXPR:
7303 case TRUTH_ORIF_EXPR:
7305 /* Preserve the original type of the expression and the
7306 source location of the outer expression. */
7307 tree org_type = TREE_TYPE (*expr_p);
7308 *expr_p = gimple_boolify (*expr_p);
7309 *expr_p = build3_loc (input_location, COND_EXPR,
7310 org_type, *expr_p,
7311 fold_convert_loc
7312 (input_location,
7313 org_type, boolean_true_node),
7314 fold_convert_loc
7315 (input_location,
7316 org_type, boolean_false_node));
7317 ret = GS_OK;
7318 break;
7321 case TRUTH_NOT_EXPR:
7323 tree type = TREE_TYPE (*expr_p);
7324 /* The parsers are careful to generate TRUTH_NOT_EXPR
7325 only with operands that are always zero or one.
7326 We do not fold here but handle the only interesting case
7327 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
7328 *expr_p = gimple_boolify (*expr_p);
7329 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
7330 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
7331 TREE_TYPE (*expr_p),
7332 TREE_OPERAND (*expr_p, 0));
7333 else
7334 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
7335 TREE_TYPE (*expr_p),
7336 TREE_OPERAND (*expr_p, 0),
7337 build_int_cst (TREE_TYPE (*expr_p), 1));
7338 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
7339 *expr_p = fold_convert_loc (input_location, type, *expr_p);
7340 ret = GS_OK;
7341 break;
7344 case ADDR_EXPR:
7345 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
7346 break;
7348 case ANNOTATE_EXPR:
7350 tree cond = TREE_OPERAND (*expr_p, 0);
7351 tree id = TREE_OPERAND (*expr_p, 1);
7352 tree tmp = create_tmp_var_raw (TREE_TYPE(cond), NULL);
7353 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
7354 gimple call = gimple_build_call_internal (IFN_ANNOTATE, 2,
7355 cond, id);
7356 gimple_call_set_lhs (call, tmp);
7357 gimplify_seq_add_stmt (pre_p, call);
7358 *expr_p = tmp;
7359 ret = GS_ALL_DONE;
7360 break;
7363 case VA_ARG_EXPR:
7364 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
7365 break;
7367 CASE_CONVERT:
7368 if (IS_EMPTY_STMT (*expr_p))
7370 ret = GS_ALL_DONE;
7371 break;
7374 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7375 || fallback == fb_none)
7377 /* Just strip a conversion to void (or in void context) and
7378 try again. */
7379 *expr_p = TREE_OPERAND (*expr_p, 0);
7380 ret = GS_OK;
7381 break;
7384 ret = gimplify_conversion (expr_p);
7385 if (ret == GS_ERROR)
7386 break;
7387 if (*expr_p != save_expr)
7388 break;
7389 /* FALLTHRU */
7391 case FIX_TRUNC_EXPR:
7392 /* unary_expr: ... | '(' cast ')' val | ... */
7393 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7394 is_gimple_val, fb_rvalue);
7395 recalculate_side_effects (*expr_p);
7396 break;
7398 case INDIRECT_REF:
7400 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
7401 bool notrap = TREE_THIS_NOTRAP (*expr_p);
7402 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7404 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7405 if (*expr_p != save_expr)
7407 ret = GS_OK;
7408 break;
7411 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7412 is_gimple_reg, fb_rvalue);
7413 if (ret == GS_ERROR)
7414 break;
7416 recalculate_side_effects (*expr_p);
7417 *expr_p = fold_build2_loc (input_location, MEM_REF,
7418 TREE_TYPE (*expr_p),
7419 TREE_OPERAND (*expr_p, 0),
7420 build_int_cst (saved_ptr_type, 0));
7421 TREE_THIS_VOLATILE (*expr_p) = volatilep;
7422 TREE_THIS_NOTRAP (*expr_p) = notrap;
7423 ret = GS_OK;
7424 break;
7427 /* We arrive here through the various re-gimplifcation paths. */
7428 case MEM_REF:
7429 /* First try re-folding the whole thing. */
7430 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7431 TREE_OPERAND (*expr_p, 0),
7432 TREE_OPERAND (*expr_p, 1));
7433 if (tmp)
7435 *expr_p = tmp;
7436 recalculate_side_effects (*expr_p);
7437 ret = GS_OK;
7438 break;
7440 /* Avoid re-gimplifying the address operand if it is already
7441 in suitable form. Re-gimplifying would mark the address
7442 operand addressable. Always gimplify when not in SSA form
7443 as we still may have to gimplify decls with value-exprs. */
7444 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7445 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7447 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7448 is_gimple_mem_ref_addr, fb_rvalue);
7449 if (ret == GS_ERROR)
7450 break;
7452 recalculate_side_effects (*expr_p);
7453 ret = GS_ALL_DONE;
7454 break;
7456 /* Constants need not be gimplified. */
7457 case INTEGER_CST:
7458 case REAL_CST:
7459 case FIXED_CST:
7460 case STRING_CST:
7461 case COMPLEX_CST:
7462 case VECTOR_CST:
7463 /* Drop the overflow flag on constants, we do not want
7464 that in the GIMPLE IL. */
7465 if (TREE_OVERFLOW_P (*expr_p))
7466 *expr_p = drop_tree_overflow (*expr_p);
7467 ret = GS_ALL_DONE;
7468 break;
7470 case CONST_DECL:
7471 /* If we require an lvalue, such as for ADDR_EXPR, retain the
7472 CONST_DECL node. Otherwise the decl is replaceable by its
7473 value. */
7474 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
7475 if (fallback & fb_lvalue)
7476 ret = GS_ALL_DONE;
7477 else
7479 *expr_p = DECL_INITIAL (*expr_p);
7480 ret = GS_OK;
7482 break;
7484 case DECL_EXPR:
7485 ret = gimplify_decl_expr (expr_p, pre_p);
7486 break;
7488 case BIND_EXPR:
7489 ret = gimplify_bind_expr (expr_p, pre_p);
7490 break;
7492 case LOOP_EXPR:
7493 ret = gimplify_loop_expr (expr_p, pre_p);
7494 break;
7496 case SWITCH_EXPR:
7497 ret = gimplify_switch_expr (expr_p, pre_p);
7498 break;
7500 case EXIT_EXPR:
7501 ret = gimplify_exit_expr (expr_p);
7502 break;
7504 case GOTO_EXPR:
7505 /* If the target is not LABEL, then it is a computed jump
7506 and the target needs to be gimplified. */
7507 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
7509 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7510 NULL, is_gimple_val, fb_rvalue);
7511 if (ret == GS_ERROR)
7512 break;
7514 gimplify_seq_add_stmt (pre_p,
7515 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
7516 ret = GS_ALL_DONE;
7517 break;
7519 case PREDICT_EXPR:
7520 gimplify_seq_add_stmt (pre_p,
7521 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7522 PREDICT_EXPR_OUTCOME (*expr_p)));
7523 ret = GS_ALL_DONE;
7524 break;
7526 case LABEL_EXPR:
7527 ret = GS_ALL_DONE;
7528 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7529 == current_function_decl);
7530 gimplify_seq_add_stmt (pre_p,
7531 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
7532 break;
7534 case CASE_LABEL_EXPR:
7535 ret = gimplify_case_label_expr (expr_p, pre_p);
7536 break;
7538 case RETURN_EXPR:
7539 ret = gimplify_return_expr (*expr_p, pre_p);
7540 break;
7542 case CONSTRUCTOR:
7543 /* Don't reduce this in place; let gimplify_init_constructor work its
7544 magic. Buf if we're just elaborating this for side effects, just
7545 gimplify any element that has side-effects. */
7546 if (fallback == fb_none)
7548 unsigned HOST_WIDE_INT ix;
7549 tree val;
7550 tree temp = NULL_TREE;
7551 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7552 if (TREE_SIDE_EFFECTS (val))
7553 append_to_statement_list (val, &temp);
7555 *expr_p = temp;
7556 ret = temp ? GS_OK : GS_ALL_DONE;
7558 /* C99 code may assign to an array in a constructed
7559 structure or union, and this has undefined behavior only
7560 on execution, so create a temporary if an lvalue is
7561 required. */
7562 else if (fallback == fb_lvalue)
7564 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7565 mark_addressable (*expr_p);
7566 ret = GS_OK;
7568 else
7569 ret = GS_ALL_DONE;
7570 break;
7572 /* The following are special cases that are not handled by the
7573 original GIMPLE grammar. */
7575 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7576 eliminated. */
7577 case SAVE_EXPR:
7578 ret = gimplify_save_expr (expr_p, pre_p, post_p);
7579 break;
7581 case BIT_FIELD_REF:
7582 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7583 post_p, is_gimple_lvalue, fb_either);
7584 recalculate_side_effects (*expr_p);
7585 break;
7587 case TARGET_MEM_REF:
7589 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7591 if (TMR_BASE (*expr_p))
7592 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
7593 post_p, is_gimple_mem_ref_addr, fb_either);
7594 if (TMR_INDEX (*expr_p))
7595 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7596 post_p, is_gimple_val, fb_rvalue);
7597 if (TMR_INDEX2 (*expr_p))
7598 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7599 post_p, is_gimple_val, fb_rvalue);
7600 /* TMR_STEP and TMR_OFFSET are always integer constants. */
7601 ret = MIN (r0, r1);
7603 break;
7605 case NON_LVALUE_EXPR:
7606 /* This should have been stripped above. */
7607 gcc_unreachable ();
7609 case ASM_EXPR:
7610 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
7611 break;
7613 case TRY_FINALLY_EXPR:
7614 case TRY_CATCH_EXPR:
7616 gimple_seq eval, cleanup;
7617 gimple try_;
7619 /* Calls to destructors are generated automatically in FINALLY/CATCH
7620 block. They should have location as UNKNOWN_LOCATION. However,
7621 gimplify_call_expr will reset these call stmts to input_location
7622 if it finds stmt's location is unknown. To prevent resetting for
7623 destructors, we set the input_location to unknown.
7624 Note that this only affects the destructor calls in FINALLY/CATCH
7625 block, and will automatically reset to its original value by the
7626 end of gimplify_expr. */
7627 input_location = UNKNOWN_LOCATION;
7628 eval = cleanup = NULL;
7629 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
7630 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
7631 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
7632 if (gimple_seq_empty_p (cleanup))
7634 gimple_seq_add_seq (pre_p, eval);
7635 ret = GS_ALL_DONE;
7636 break;
7638 try_ = gimple_build_try (eval, cleanup,
7639 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
7640 ? GIMPLE_TRY_FINALLY
7641 : GIMPLE_TRY_CATCH);
7642 if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
7643 gimple_set_location (try_, saved_location);
7644 else
7645 gimple_set_location (try_, EXPR_LOCATION (save_expr));
7646 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
7647 gimple_try_set_catch_is_cleanup (try_,
7648 TRY_CATCH_IS_CLEANUP (*expr_p));
7649 gimplify_seq_add_stmt (pre_p, try_);
7650 ret = GS_ALL_DONE;
7651 break;
7654 case CLEANUP_POINT_EXPR:
7655 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
7656 break;
7658 case TARGET_EXPR:
7659 ret = gimplify_target_expr (expr_p, pre_p, post_p);
7660 break;
7662 case CATCH_EXPR:
7664 gimple c;
7665 gimple_seq handler = NULL;
7666 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
7667 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
7668 gimplify_seq_add_stmt (pre_p, c);
7669 ret = GS_ALL_DONE;
7670 break;
7673 case EH_FILTER_EXPR:
7675 gimple ehf;
7676 gimple_seq failure = NULL;
7678 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
7679 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
7680 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
7681 gimplify_seq_add_stmt (pre_p, ehf);
7682 ret = GS_ALL_DONE;
7683 break;
7686 case OBJ_TYPE_REF:
7688 enum gimplify_status r0, r1;
7689 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
7690 post_p, is_gimple_val, fb_rvalue);
7691 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
7692 post_p, is_gimple_val, fb_rvalue);
7693 TREE_SIDE_EFFECTS (*expr_p) = 0;
7694 ret = MIN (r0, r1);
7696 break;
7698 case LABEL_DECL:
7699 /* We get here when taking the address of a label. We mark
7700 the label as "forced"; meaning it can never be removed and
7701 it is a potential target for any computed goto. */
7702 FORCED_LABEL (*expr_p) = 1;
7703 ret = GS_ALL_DONE;
7704 break;
7706 case STATEMENT_LIST:
7707 ret = gimplify_statement_list (expr_p, pre_p);
7708 break;
7710 case WITH_SIZE_EXPR:
7712 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7713 post_p == &internal_post ? NULL : post_p,
7714 gimple_test_f, fallback);
7715 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7716 is_gimple_val, fb_rvalue);
7717 ret = GS_ALL_DONE;
7719 break;
7721 case VAR_DECL:
7722 case PARM_DECL:
7723 ret = gimplify_var_or_parm_decl (expr_p);
7724 break;
7726 case RESULT_DECL:
7727 /* When within an OpenMP context, notice uses of variables. */
7728 if (gimplify_omp_ctxp)
7729 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
7730 ret = GS_ALL_DONE;
7731 break;
7733 case SSA_NAME:
7734 /* Allow callbacks into the gimplifier during optimization. */
7735 ret = GS_ALL_DONE;
7736 break;
7738 case OMP_PARALLEL:
7739 gimplify_omp_parallel (expr_p, pre_p);
7740 ret = GS_ALL_DONE;
7741 break;
7743 case OMP_TASK:
7744 gimplify_omp_task (expr_p, pre_p);
7745 ret = GS_ALL_DONE;
7746 break;
7748 case OMP_FOR:
7749 case OMP_SIMD:
7750 case CILK_SIMD:
7751 case OMP_DISTRIBUTE:
7752 ret = gimplify_omp_for (expr_p, pre_p);
7753 break;
7755 case OMP_SECTIONS:
7756 case OMP_SINGLE:
7757 case OMP_TARGET:
7758 case OMP_TARGET_DATA:
7759 case OMP_TEAMS:
7760 gimplify_omp_workshare (expr_p, pre_p);
7761 ret = GS_ALL_DONE;
7762 break;
7764 case OMP_TARGET_UPDATE:
7765 gimplify_omp_target_update (expr_p, pre_p);
7766 ret = GS_ALL_DONE;
7767 break;
7769 case OMP_SECTION:
7770 case OMP_MASTER:
7771 case OMP_TASKGROUP:
7772 case OMP_ORDERED:
7773 case OMP_CRITICAL:
7775 gimple_seq body = NULL;
7776 gimple g;
7778 gimplify_and_add (OMP_BODY (*expr_p), &body);
7779 switch (TREE_CODE (*expr_p))
7781 case OMP_SECTION:
7782 g = gimple_build_omp_section (body);
7783 break;
7784 case OMP_MASTER:
7785 g = gimple_build_omp_master (body);
7786 break;
7787 case OMP_TASKGROUP:
7789 gimple_seq cleanup = NULL;
7790 tree fn
7791 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
7792 g = gimple_build_call (fn, 0);
7793 gimple_seq_add_stmt (&cleanup, g);
7794 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
7795 body = NULL;
7796 gimple_seq_add_stmt (&body, g);
7797 g = gimple_build_omp_taskgroup (body);
7799 break;
7800 case OMP_ORDERED:
7801 g = gimple_build_omp_ordered (body);
7802 break;
7803 case OMP_CRITICAL:
7804 g = gimple_build_omp_critical (body,
7805 OMP_CRITICAL_NAME (*expr_p));
7806 break;
7807 default:
7808 gcc_unreachable ();
7810 gimplify_seq_add_stmt (pre_p, g);
7811 ret = GS_ALL_DONE;
7812 break;
7815 case OMP_ATOMIC:
7816 case OMP_ATOMIC_READ:
7817 case OMP_ATOMIC_CAPTURE_OLD:
7818 case OMP_ATOMIC_CAPTURE_NEW:
7819 ret = gimplify_omp_atomic (expr_p, pre_p);
7820 break;
7822 case TRANSACTION_EXPR:
7823 ret = gimplify_transaction (expr_p, pre_p);
7824 break;
7826 case TRUTH_AND_EXPR:
7827 case TRUTH_OR_EXPR:
7828 case TRUTH_XOR_EXPR:
7830 tree orig_type = TREE_TYPE (*expr_p);
7831 tree new_type, xop0, xop1;
7832 *expr_p = gimple_boolify (*expr_p);
7833 new_type = TREE_TYPE (*expr_p);
7834 if (!useless_type_conversion_p (orig_type, new_type))
7836 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
7837 ret = GS_OK;
7838 break;
7841 /* Boolified binary truth expressions are semantically equivalent
7842 to bitwise binary expressions. Canonicalize them to the
7843 bitwise variant. */
7844 switch (TREE_CODE (*expr_p))
7846 case TRUTH_AND_EXPR:
7847 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
7848 break;
7849 case TRUTH_OR_EXPR:
7850 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
7851 break;
7852 case TRUTH_XOR_EXPR:
7853 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
7854 break;
7855 default:
7856 break;
7858 /* Now make sure that operands have compatible type to
7859 expression's new_type. */
7860 xop0 = TREE_OPERAND (*expr_p, 0);
7861 xop1 = TREE_OPERAND (*expr_p, 1);
7862 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
7863 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
7864 new_type,
7865 xop0);
7866 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
7867 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
7868 new_type,
7869 xop1);
7870 /* Continue classified as tcc_binary. */
7871 goto expr_2;
7874 case FMA_EXPR:
7875 case VEC_COND_EXPR:
7876 case VEC_PERM_EXPR:
7877 /* Classified as tcc_expression. */
7878 goto expr_3;
7880 case POINTER_PLUS_EXPR:
7882 enum gimplify_status r0, r1;
7883 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7884 post_p, is_gimple_val, fb_rvalue);
7885 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7886 post_p, is_gimple_val, fb_rvalue);
7887 recalculate_side_effects (*expr_p);
7888 ret = MIN (r0, r1);
7889 /* Convert &X + CST to invariant &MEM[&X, CST]. Do this
7890 after gimplifying operands - this is similar to how
7891 it would be folding all gimplified stmts on creation
7892 to have them canonicalized, which is what we eventually
7893 should do anyway. */
7894 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7895 && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
7897 *expr_p = build_fold_addr_expr_with_type_loc
7898 (input_location,
7899 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
7900 TREE_OPERAND (*expr_p, 0),
7901 fold_convert (ptr_type_node,
7902 TREE_OPERAND (*expr_p, 1))),
7903 TREE_TYPE (*expr_p));
7904 ret = MIN (ret, GS_OK);
7906 break;
7909 case CILK_SYNC_STMT:
7911 if (!fn_contains_cilk_spawn_p (cfun))
7913 error_at (EXPR_LOCATION (*expr_p),
7914 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
7915 ret = GS_ERROR;
7917 else
7919 gimplify_cilk_sync (expr_p, pre_p);
7920 ret = GS_ALL_DONE;
7922 break;
7925 default:
7926 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
7928 case tcc_comparison:
7929 /* Handle comparison of objects of non scalar mode aggregates
7930 with a call to memcmp. It would be nice to only have to do
7931 this for variable-sized objects, but then we'd have to allow
7932 the same nest of reference nodes we allow for MODIFY_EXPR and
7933 that's too complex.
7935 Compare scalar mode aggregates as scalar mode values. Using
7936 memcmp for them would be very inefficient at best, and is
7937 plain wrong if bitfields are involved. */
7939 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
7941 /* Vector comparisons need no boolification. */
7942 if (TREE_CODE (type) == VECTOR_TYPE)
7943 goto expr_2;
7944 else if (!AGGREGATE_TYPE_P (type))
7946 tree org_type = TREE_TYPE (*expr_p);
7947 *expr_p = gimple_boolify (*expr_p);
7948 if (!useless_type_conversion_p (org_type,
7949 TREE_TYPE (*expr_p)))
7951 *expr_p = fold_convert_loc (input_location,
7952 org_type, *expr_p);
7953 ret = GS_OK;
7955 else
7956 goto expr_2;
7958 else if (TYPE_MODE (type) != BLKmode)
7959 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
7960 else
7961 ret = gimplify_variable_sized_compare (expr_p);
7963 break;
7966 /* If *EXPR_P does not need to be special-cased, handle it
7967 according to its class. */
7968 case tcc_unary:
7969 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7970 post_p, is_gimple_val, fb_rvalue);
7971 break;
7973 case tcc_binary:
7974 expr_2:
7976 enum gimplify_status r0, r1;
7978 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7979 post_p, is_gimple_val, fb_rvalue);
7980 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7981 post_p, is_gimple_val, fb_rvalue);
7983 ret = MIN (r0, r1);
7984 break;
7987 expr_3:
7989 enum gimplify_status r0, r1, r2;
7991 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7992 post_p, is_gimple_val, fb_rvalue);
7993 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7994 post_p, is_gimple_val, fb_rvalue);
7995 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
7996 post_p, is_gimple_val, fb_rvalue);
7998 ret = MIN (MIN (r0, r1), r2);
7999 break;
8002 case tcc_declaration:
8003 case tcc_constant:
8004 ret = GS_ALL_DONE;
8005 goto dont_recalculate;
8007 default:
8008 gcc_unreachable ();
8011 recalculate_side_effects (*expr_p);
8013 dont_recalculate:
8014 break;
8017 gcc_assert (*expr_p || ret != GS_OK);
8019 while (ret == GS_OK);
8021 /* If we encountered an error_mark somewhere nested inside, either
8022 stub out the statement or propagate the error back out. */
8023 if (ret == GS_ERROR)
8025 if (is_statement)
8026 *expr_p = NULL;
8027 goto out;
8030 /* This was only valid as a return value from the langhook, which
8031 we handled. Make sure it doesn't escape from any other context. */
8032 gcc_assert (ret != GS_UNHANDLED);
8034 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
8036 /* We aren't looking for a value, and we don't have a valid
8037 statement. If it doesn't have side-effects, throw it away. */
8038 if (!TREE_SIDE_EFFECTS (*expr_p))
8039 *expr_p = NULL;
8040 else if (!TREE_THIS_VOLATILE (*expr_p))
8042 /* This is probably a _REF that contains something nested that
8043 has side effects. Recurse through the operands to find it. */
8044 enum tree_code code = TREE_CODE (*expr_p);
8046 switch (code)
8048 case COMPONENT_REF:
8049 case REALPART_EXPR:
8050 case IMAGPART_EXPR:
8051 case VIEW_CONVERT_EXPR:
8052 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8053 gimple_test_f, fallback);
8054 break;
8056 case ARRAY_REF:
8057 case ARRAY_RANGE_REF:
8058 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8059 gimple_test_f, fallback);
8060 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8061 gimple_test_f, fallback);
8062 break;
8064 default:
8065 /* Anything else with side-effects must be converted to
8066 a valid statement before we get here. */
8067 gcc_unreachable ();
8070 *expr_p = NULL;
8072 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
8073 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
8075 /* Historically, the compiler has treated a bare reference
8076 to a non-BLKmode volatile lvalue as forcing a load. */
8077 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
8079 /* Normally, we do not want to create a temporary for a
8080 TREE_ADDRESSABLE type because such a type should not be
8081 copied by bitwise-assignment. However, we make an
8082 exception here, as all we are doing here is ensuring that
8083 we read the bytes that make up the type. We use
8084 create_tmp_var_raw because create_tmp_var will abort when
8085 given a TREE_ADDRESSABLE type. */
8086 tree tmp = create_tmp_var_raw (type, "vol");
8087 gimple_add_tmp_var (tmp);
8088 gimplify_assign (tmp, *expr_p, pre_p);
8089 *expr_p = NULL;
8091 else
8092 /* We can't do anything useful with a volatile reference to
8093 an incomplete type, so just throw it away. Likewise for
8094 a BLKmode type, since any implicit inner load should
8095 already have been turned into an explicit one by the
8096 gimplification process. */
8097 *expr_p = NULL;
8100 /* If we are gimplifying at the statement level, we're done. Tack
8101 everything together and return. */
8102 if (fallback == fb_none || is_statement)
8104 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
8105 it out for GC to reclaim it. */
8106 *expr_p = NULL_TREE;
8108 if (!gimple_seq_empty_p (internal_pre)
8109 || !gimple_seq_empty_p (internal_post))
8111 gimplify_seq_add_seq (&internal_pre, internal_post);
8112 gimplify_seq_add_seq (pre_p, internal_pre);
8115 /* The result of gimplifying *EXPR_P is going to be the last few
8116 statements in *PRE_P and *POST_P. Add location information
8117 to all the statements that were added by the gimplification
8118 helpers. */
8119 if (!gimple_seq_empty_p (*pre_p))
8120 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
8122 if (!gimple_seq_empty_p (*post_p))
8123 annotate_all_with_location_after (*post_p, post_last_gsi,
8124 input_location);
8126 goto out;
8129 #ifdef ENABLE_GIMPLE_CHECKING
8130 if (*expr_p)
8132 enum tree_code code = TREE_CODE (*expr_p);
8133 /* These expressions should already be in gimple IR form. */
8134 gcc_assert (code != MODIFY_EXPR
8135 && code != ASM_EXPR
8136 && code != BIND_EXPR
8137 && code != CATCH_EXPR
8138 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
8139 && code != EH_FILTER_EXPR
8140 && code != GOTO_EXPR
8141 && code != LABEL_EXPR
8142 && code != LOOP_EXPR
8143 && code != SWITCH_EXPR
8144 && code != TRY_FINALLY_EXPR
8145 && code != OMP_CRITICAL
8146 && code != OMP_FOR
8147 && code != OMP_MASTER
8148 && code != OMP_TASKGROUP
8149 && code != OMP_ORDERED
8150 && code != OMP_PARALLEL
8151 && code != OMP_SECTIONS
8152 && code != OMP_SECTION
8153 && code != OMP_SINGLE);
8155 #endif
8157 /* Otherwise we're gimplifying a subexpression, so the resulting
8158 value is interesting. If it's a valid operand that matches
8159 GIMPLE_TEST_F, we're done. Unless we are handling some
8160 post-effects internally; if that's the case, we need to copy into
8161 a temporary before adding the post-effects to POST_P. */
8162 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
8163 goto out;
8165 /* Otherwise, we need to create a new temporary for the gimplified
8166 expression. */
8168 /* We can't return an lvalue if we have an internal postqueue. The
8169 object the lvalue refers to would (probably) be modified by the
8170 postqueue; we need to copy the value out first, which means an
8171 rvalue. */
8172 if ((fallback & fb_lvalue)
8173 && gimple_seq_empty_p (internal_post)
8174 && is_gimple_addressable (*expr_p))
8176 /* An lvalue will do. Take the address of the expression, store it
8177 in a temporary, and replace the expression with an INDIRECT_REF of
8178 that temporary. */
8179 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
8180 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
8181 *expr_p = build_simple_mem_ref (tmp);
8183 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
8185 /* An rvalue will do. Assign the gimplified expression into a
8186 new temporary TMP and replace the original expression with
8187 TMP. First, make sure that the expression has a type so that
8188 it can be assigned into a temporary. */
8189 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
8190 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
8192 else
8194 #ifdef ENABLE_GIMPLE_CHECKING
8195 if (!(fallback & fb_mayfail))
8197 fprintf (stderr, "gimplification failed:\n");
8198 print_generic_expr (stderr, *expr_p, 0);
8199 debug_tree (*expr_p);
8200 internal_error ("gimplification failed");
8202 #endif
8203 gcc_assert (fallback & fb_mayfail);
8205 /* If this is an asm statement, and the user asked for the
8206 impossible, don't die. Fail and let gimplify_asm_expr
8207 issue an error. */
8208 ret = GS_ERROR;
8209 goto out;
8212 /* Make sure the temporary matches our predicate. */
8213 gcc_assert ((*gimple_test_f) (*expr_p));
8215 if (!gimple_seq_empty_p (internal_post))
8217 annotate_all_with_location (internal_post, input_location);
8218 gimplify_seq_add_seq (pre_p, internal_post);
8221 out:
8222 input_location = saved_location;
8223 return ret;
8226 /* Look through TYPE for variable-sized objects and gimplify each such
8227 size that we find. Add to LIST_P any statements generated. */
8229 void
8230 gimplify_type_sizes (tree type, gimple_seq *list_p)
8232 tree field, t;
8234 if (type == NULL || type == error_mark_node)
8235 return;
8237 /* We first do the main variant, then copy into any other variants. */
8238 type = TYPE_MAIN_VARIANT (type);
8240 /* Avoid infinite recursion. */
8241 if (TYPE_SIZES_GIMPLIFIED (type))
8242 return;
8244 TYPE_SIZES_GIMPLIFIED (type) = 1;
8246 switch (TREE_CODE (type))
8248 case INTEGER_TYPE:
8249 case ENUMERAL_TYPE:
8250 case BOOLEAN_TYPE:
8251 case REAL_TYPE:
8252 case FIXED_POINT_TYPE:
8253 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
8254 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
8256 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8258 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
8259 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
8261 break;
8263 case ARRAY_TYPE:
8264 /* These types may not have declarations, so handle them here. */
8265 gimplify_type_sizes (TREE_TYPE (type), list_p);
8266 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
8267 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
8268 with assigned stack slots, for -O1+ -g they should be tracked
8269 by VTA. */
8270 if (!(TYPE_NAME (type)
8271 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
8272 && DECL_IGNORED_P (TYPE_NAME (type)))
8273 && TYPE_DOMAIN (type)
8274 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
8276 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8277 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8278 DECL_IGNORED_P (t) = 0;
8279 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8280 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8281 DECL_IGNORED_P (t) = 0;
8283 break;
8285 case RECORD_TYPE:
8286 case UNION_TYPE:
8287 case QUAL_UNION_TYPE:
8288 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8289 if (TREE_CODE (field) == FIELD_DECL)
8291 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
8292 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
8293 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8294 gimplify_type_sizes (TREE_TYPE (field), list_p);
8296 break;
8298 case POINTER_TYPE:
8299 case REFERENCE_TYPE:
8300 /* We used to recurse on the pointed-to type here, which turned out to
8301 be incorrect because its definition might refer to variables not
8302 yet initialized at this point if a forward declaration is involved.
8304 It was actually useful for anonymous pointed-to types to ensure
8305 that the sizes evaluation dominates every possible later use of the
8306 values. Restricting to such types here would be safe since there
8307 is no possible forward declaration around, but would introduce an
8308 undesirable middle-end semantic to anonymity. We then defer to
8309 front-ends the responsibility of ensuring that the sizes are
8310 evaluated both early and late enough, e.g. by attaching artificial
8311 type declarations to the tree. */
8312 break;
8314 default:
8315 break;
8318 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
8319 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
8321 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8323 TYPE_SIZE (t) = TYPE_SIZE (type);
8324 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
8325 TYPE_SIZES_GIMPLIFIED (t) = 1;
8329 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
8330 a size or position, has had all of its SAVE_EXPRs evaluated.
8331 We add any required statements to *STMT_P. */
8333 void
8334 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
8336 tree expr = *expr_p;
8338 /* We don't do anything if the value isn't there, is constant, or contains
8339 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
8340 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
8341 will want to replace it with a new variable, but that will cause problems
8342 if this type is from outside the function. It's OK to have that here. */
8343 if (is_gimple_sizepos (expr))
8344 return;
8346 *expr_p = unshare_expr (expr);
8348 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
8351 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
8352 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
8353 is true, also gimplify the parameters. */
8355 gimple
8356 gimplify_body (tree fndecl, bool do_parms)
8358 location_t saved_location = input_location;
8359 gimple_seq parm_stmts, seq;
8360 gimple outer_bind;
8361 struct gimplify_ctx gctx;
8362 struct cgraph_node *cgn;
8364 timevar_push (TV_TREE_GIMPLIFY);
8366 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
8367 gimplification. */
8368 default_rtl_profile ();
8370 gcc_assert (gimplify_ctxp == NULL);
8371 push_gimplify_context (&gctx);
8373 if (flag_openmp)
8375 gcc_assert (gimplify_omp_ctxp == NULL);
8376 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
8377 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
8380 /* Unshare most shared trees in the body and in that of any nested functions.
8381 It would seem we don't have to do this for nested functions because
8382 they are supposed to be output and then the outer function gimplified
8383 first, but the g++ front end doesn't always do it that way. */
8384 unshare_body (fndecl);
8385 unvisit_body (fndecl);
8387 cgn = cgraph_get_node (fndecl);
8388 if (cgn && cgn->origin)
8389 nonlocal_vlas = pointer_set_create ();
8391 /* Make sure input_location isn't set to something weird. */
8392 input_location = DECL_SOURCE_LOCATION (fndecl);
8394 /* Resolve callee-copies. This has to be done before processing
8395 the body so that DECL_VALUE_EXPR gets processed correctly. */
8396 parm_stmts = do_parms ? gimplify_parameters () : NULL;
8398 /* Gimplify the function's body. */
8399 seq = NULL;
8400 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
8401 outer_bind = gimple_seq_first_stmt (seq);
8402 if (!outer_bind)
8404 outer_bind = gimple_build_nop ();
8405 gimplify_seq_add_stmt (&seq, outer_bind);
8408 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
8409 not the case, wrap everything in a GIMPLE_BIND to make it so. */
8410 if (gimple_code (outer_bind) == GIMPLE_BIND
8411 && gimple_seq_first (seq) == gimple_seq_last (seq))
8413 else
8414 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8416 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8418 /* If we had callee-copies statements, insert them at the beginning
8419 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
8420 if (!gimple_seq_empty_p (parm_stmts))
8422 tree parm;
8424 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8425 gimple_bind_set_body (outer_bind, parm_stmts);
8427 for (parm = DECL_ARGUMENTS (current_function_decl);
8428 parm; parm = DECL_CHAIN (parm))
8429 if (DECL_HAS_VALUE_EXPR_P (parm))
8431 DECL_HAS_VALUE_EXPR_P (parm) = 0;
8432 DECL_IGNORED_P (parm) = 0;
8436 if (nonlocal_vlas)
8438 pointer_set_destroy (nonlocal_vlas);
8439 nonlocal_vlas = NULL;
8442 if ((flag_openmp || flag_openmp_simd) && gimplify_omp_ctxp)
8444 delete_omp_context (gimplify_omp_ctxp);
8445 gimplify_omp_ctxp = NULL;
8448 pop_gimplify_context (outer_bind);
8449 gcc_assert (gimplify_ctxp == NULL);
8451 #ifdef ENABLE_CHECKING
8452 if (!seen_error ())
8453 verify_gimple_in_seq (gimple_bind_body (outer_bind));
8454 #endif
8456 timevar_pop (TV_TREE_GIMPLIFY);
8457 input_location = saved_location;
8459 return outer_bind;
8462 typedef char *char_p; /* For DEF_VEC_P. */
8464 /* Return whether we should exclude FNDECL from instrumentation. */
8466 static bool
8467 flag_instrument_functions_exclude_p (tree fndecl)
8469 vec<char_p> *v;
8471 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
8472 if (v && v->length () > 0)
8474 const char *name;
8475 int i;
8476 char *s;
8478 name = lang_hooks.decl_printable_name (fndecl, 0);
8479 FOR_EACH_VEC_ELT (*v, i, s)
8480 if (strstr (name, s) != NULL)
8481 return true;
8484 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
8485 if (v && v->length () > 0)
8487 const char *name;
8488 int i;
8489 char *s;
8491 name = DECL_SOURCE_FILE (fndecl);
8492 FOR_EACH_VEC_ELT (*v, i, s)
8493 if (strstr (name, s) != NULL)
8494 return true;
8497 return false;
8500 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
8501 node for the function we want to gimplify.
8503 Return the sequence of GIMPLE statements corresponding to the body
8504 of FNDECL. */
8506 void
8507 gimplify_function_tree (tree fndecl)
8509 tree parm, ret;
8510 gimple_seq seq;
8511 gimple bind;
8513 gcc_assert (!gimple_body (fndecl));
8515 if (DECL_STRUCT_FUNCTION (fndecl))
8516 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8517 else
8518 push_struct_function (fndecl);
8520 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
8522 /* Preliminarily mark non-addressed complex variables as eligible
8523 for promotion to gimple registers. We'll transform their uses
8524 as we find them. */
8525 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8526 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
8527 && !TREE_THIS_VOLATILE (parm)
8528 && !needs_to_live_in_memory (parm))
8529 DECL_GIMPLE_REG_P (parm) = 1;
8532 ret = DECL_RESULT (fndecl);
8533 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
8534 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
8535 && !needs_to_live_in_memory (ret))
8536 DECL_GIMPLE_REG_P (ret) = 1;
8538 bind = gimplify_body (fndecl, true);
8540 /* The tree body of the function is no longer needed, replace it
8541 with the new GIMPLE body. */
8542 seq = NULL;
8543 gimple_seq_add_stmt (&seq, bind);
8544 gimple_set_body (fndecl, seq);
8546 /* If we're instrumenting function entry/exit, then prepend the call to
8547 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8548 catch the exit hook. */
8549 /* ??? Add some way to ignore exceptions for this TFE. */
8550 if (flag_instrument_function_entry_exit
8551 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8552 && !flag_instrument_functions_exclude_p (fndecl))
8554 tree x;
8555 gimple new_bind;
8556 gimple tf;
8557 gimple_seq cleanup = NULL, body = NULL;
8558 tree tmp_var;
8559 gimple call;
8561 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8562 call = gimple_build_call (x, 1, integer_zero_node);
8563 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8564 gimple_call_set_lhs (call, tmp_var);
8565 gimplify_seq_add_stmt (&cleanup, call);
8566 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
8567 call = gimple_build_call (x, 2,
8568 build_fold_addr_expr (current_function_decl),
8569 tmp_var);
8570 gimplify_seq_add_stmt (&cleanup, call);
8571 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
8573 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8574 call = gimple_build_call (x, 1, integer_zero_node);
8575 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8576 gimple_call_set_lhs (call, tmp_var);
8577 gimplify_seq_add_stmt (&body, call);
8578 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
8579 call = gimple_build_call (x, 2,
8580 build_fold_addr_expr (current_function_decl),
8581 tmp_var);
8582 gimplify_seq_add_stmt (&body, call);
8583 gimplify_seq_add_stmt (&body, tf);
8584 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
8585 /* Clear the block for BIND, since it is no longer directly inside
8586 the function, but within a try block. */
8587 gimple_bind_set_block (bind, NULL);
8589 /* Replace the current function body with the body
8590 wrapped in the try/finally TF. */
8591 seq = NULL;
8592 gimple_seq_add_stmt (&seq, new_bind);
8593 gimple_set_body (fndecl, seq);
8596 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8597 cfun->curr_properties = PROP_gimple_any;
8599 pop_cfun ();
8602 /* Return a dummy expression of type TYPE in order to keep going after an
8603 error. */
8605 static tree
8606 dummy_object (tree type)
8608 tree t = build_int_cst (build_pointer_type (type), 0);
8609 return build2 (MEM_REF, type, t, t);
8612 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
8613 builtin function, but a very special sort of operator. */
8615 enum gimplify_status
8616 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
8618 tree promoted_type, have_va_type;
8619 tree valist = TREE_OPERAND (*expr_p, 0);
8620 tree type = TREE_TYPE (*expr_p);
8621 tree t;
8622 location_t loc = EXPR_LOCATION (*expr_p);
8624 /* Verify that valist is of the proper type. */
8625 have_va_type = TREE_TYPE (valist);
8626 if (have_va_type == error_mark_node)
8627 return GS_ERROR;
8628 have_va_type = targetm.canonical_va_list_type (have_va_type);
8630 if (have_va_type == NULL_TREE)
8632 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
8633 return GS_ERROR;
8636 /* Generate a diagnostic for requesting data of a type that cannot
8637 be passed through `...' due to type promotion at the call site. */
8638 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
8639 != type)
8641 static bool gave_help;
8642 bool warned;
8644 /* Unfortunately, this is merely undefined, rather than a constraint
8645 violation, so we cannot make this an error. If this call is never
8646 executed, the program is still strictly conforming. */
8647 warned = warning_at (loc, 0,
8648 "%qT is promoted to %qT when passed through %<...%>",
8649 type, promoted_type);
8650 if (!gave_help && warned)
8652 gave_help = true;
8653 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
8654 promoted_type, type);
8657 /* We can, however, treat "undefined" any way we please.
8658 Call abort to encourage the user to fix the program. */
8659 if (warned)
8660 inform (loc, "if this code is reached, the program will abort");
8661 /* Before the abort, allow the evaluation of the va_list
8662 expression to exit or longjmp. */
8663 gimplify_and_add (valist, pre_p);
8664 t = build_call_expr_loc (loc,
8665 builtin_decl_implicit (BUILT_IN_TRAP), 0);
8666 gimplify_and_add (t, pre_p);
8668 /* This is dead code, but go ahead and finish so that the
8669 mode of the result comes out right. */
8670 *expr_p = dummy_object (type);
8671 return GS_ALL_DONE;
8673 else
8675 /* Make it easier for the backends by protecting the valist argument
8676 from multiple evaluations. */
8677 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
8679 /* For this case, the backends will be expecting a pointer to
8680 TREE_TYPE (abi), but it's possible we've
8681 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
8682 So fix it. */
8683 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
8685 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
8686 valist = fold_convert_loc (loc, p1,
8687 build_fold_addr_expr_loc (loc, valist));
8690 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
8692 else
8693 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
8695 if (!targetm.gimplify_va_arg_expr)
8696 /* FIXME: Once most targets are converted we should merely
8697 assert this is non-null. */
8698 return GS_ALL_DONE;
8700 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
8701 return GS_OK;
8705 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
8707 DST/SRC are the destination and source respectively. You can pass
8708 ungimplified trees in DST or SRC, in which case they will be
8709 converted to a gimple operand if necessary.
8711 This function returns the newly created GIMPLE_ASSIGN tuple. */
8713 gimple
8714 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
8716 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8717 gimplify_and_add (t, seq_p);
8718 ggc_free (t);
8719 return gimple_seq_last_stmt (*seq_p);
8722 inline hashval_t
8723 gimplify_hasher::hash (const value_type *p)
8725 tree t = p->val;
8726 return iterative_hash_expr (t, 0);
8729 inline bool
8730 gimplify_hasher::equal (const value_type *p1, const compare_type *p2)
8732 tree t1 = p1->val;
8733 tree t2 = p2->val;
8734 enum tree_code code = TREE_CODE (t1);
8736 if (TREE_CODE (t2) != code
8737 || TREE_TYPE (t1) != TREE_TYPE (t2))
8738 return false;
8740 if (!operand_equal_p (t1, t2, 0))
8741 return false;
8743 #ifdef ENABLE_CHECKING
8744 /* Only allow them to compare equal if they also hash equal; otherwise
8745 results are nondeterminate, and we fail bootstrap comparison. */
8746 gcc_assert (hash (p1) == hash (p2));
8747 #endif
8749 return true;