2009-07-17 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / gimplify.c
blobe6fc13d890264bd38bd78ed613a3048e01693650
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "varray.h"
31 #include "gimple.h"
32 #include "tree-iterator.h"
33 #include "tree-inline.h"
34 #include "diagnostic.h"
35 #include "langhooks.h"
36 #include "langhooks-def.h"
37 #include "tree-flow.h"
38 #include "cgraph.h"
39 #include "timevar.h"
40 #include "except.h"
41 #include "hashtab.h"
42 #include "flags.h"
43 #include "real.h"
44 #include "function.h"
45 #include "output.h"
46 #include "expr.h"
47 #include "ggc.h"
48 #include "toplev.h"
49 #include "target.h"
50 #include "optabs.h"
51 #include "pointer-set.h"
52 #include "splay-tree.h"
53 #include "vec.h"
54 #include "gimple.h"
57 enum gimplify_omp_var_data
59 GOVD_SEEN = 1,
60 GOVD_EXPLICIT = 2,
61 GOVD_SHARED = 4,
62 GOVD_PRIVATE = 8,
63 GOVD_FIRSTPRIVATE = 16,
64 GOVD_LASTPRIVATE = 32,
65 GOVD_REDUCTION = 64,
66 GOVD_LOCAL = 128,
67 GOVD_DEBUG_PRIVATE = 256,
68 GOVD_PRIVATE_OUTER_REF = 512,
69 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
70 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
74 enum omp_region_type
76 ORT_WORKSHARE = 0,
77 ORT_TASK = 1,
78 ORT_PARALLEL = 2,
79 ORT_COMBINED_PARALLEL = 3
82 struct gimplify_omp_ctx
84 struct gimplify_omp_ctx *outer_context;
85 splay_tree variables;
86 struct pointer_set_t *privatized_types;
87 location_t location;
88 enum omp_clause_default_kind default_kind;
89 enum omp_region_type region_type;
92 static struct gimplify_ctx *gimplify_ctxp;
93 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
96 /* Formal (expression) temporary table handling: Multiple occurrences of
97 the same scalar expression are evaluated into the same temporary. */
99 typedef struct gimple_temp_hash_elt
101 tree val; /* Key */
102 tree temp; /* Value */
103 } elt_t;
105 /* Forward declarations. */
106 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
108 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
109 form and we don't do any syntax checking. */
110 void
111 mark_addressable (tree x)
113 while (handled_component_p (x))
114 x = TREE_OPERAND (x, 0);
115 if (TREE_CODE (x) != VAR_DECL
116 && TREE_CODE (x) != PARM_DECL
117 && TREE_CODE (x) != RESULT_DECL)
118 return ;
119 TREE_ADDRESSABLE (x) = 1;
122 /* Return a hash value for a formal temporary table entry. */
124 static hashval_t
125 gimple_tree_hash (const void *p)
127 tree t = ((const elt_t *) p)->val;
128 return iterative_hash_expr (t, 0);
131 /* Compare two formal temporary table entries. */
133 static int
134 gimple_tree_eq (const void *p1, const void *p2)
136 tree t1 = ((const elt_t *) p1)->val;
137 tree t2 = ((const elt_t *) p2)->val;
138 enum tree_code code = TREE_CODE (t1);
140 if (TREE_CODE (t2) != code
141 || TREE_TYPE (t1) != TREE_TYPE (t2))
142 return 0;
144 if (!operand_equal_p (t1, t2, 0))
145 return 0;
147 /* Only allow them to compare equal if they also hash equal; otherwise
148 results are nondeterminate, and we fail bootstrap comparison. */
149 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
151 return 1;
154 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
155 *SEQ_P is NULL, a new sequence is allocated. This function is
156 similar to gimple_seq_add_stmt, but does not scan the operands.
157 During gimplification, we need to manipulate statement sequences
158 before the def/use vectors have been constructed. */
160 static void
161 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
163 gimple_stmt_iterator si;
165 if (gs == NULL)
166 return;
168 if (*seq_p == NULL)
169 *seq_p = gimple_seq_alloc ();
171 si = gsi_last (*seq_p);
173 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
176 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
177 NULL, a new sequence is allocated. This function is
178 similar to gimple_seq_add_seq, but does not scan the operands.
179 During gimplification, we need to manipulate statement sequences
180 before the def/use vectors have been constructed. */
182 static void
183 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
185 gimple_stmt_iterator si;
187 if (src == NULL)
188 return;
190 if (*dst_p == NULL)
191 *dst_p = gimple_seq_alloc ();
193 si = gsi_last (*dst_p);
194 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
197 /* Set up a context for the gimplifier. */
199 void
200 push_gimplify_context (struct gimplify_ctx *c)
202 memset (c, '\0', sizeof (*c));
203 c->prev_context = gimplify_ctxp;
204 gimplify_ctxp = c;
207 /* Tear down a context for the gimplifier. If BODY is non-null, then
208 put the temporaries into the outer BIND_EXPR. Otherwise, put them
209 in the local_decls.
211 BODY is not a sequence, but the first tuple in a sequence. */
213 void
214 pop_gimplify_context (gimple body)
216 struct gimplify_ctx *c = gimplify_ctxp;
218 gcc_assert (c && (c->bind_expr_stack == NULL
219 || VEC_empty (gimple, c->bind_expr_stack)));
220 VEC_free (gimple, heap, c->bind_expr_stack);
221 gimplify_ctxp = c->prev_context;
223 if (body)
224 declare_vars (c->temps, body, false);
225 else
226 record_vars (c->temps);
228 if (c->temp_htab)
229 htab_delete (c->temp_htab);
232 static void
233 gimple_push_bind_expr (gimple gimple_bind)
235 if (gimplify_ctxp->bind_expr_stack == NULL)
236 gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
237 VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
240 static void
241 gimple_pop_bind_expr (void)
243 VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
246 gimple
247 gimple_current_bind_expr (void)
249 return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
252 /* Return the stack GIMPLE_BINDs created during gimplification. */
254 VEC(gimple, heap) *
255 gimple_bind_expr_stack (void)
257 return gimplify_ctxp->bind_expr_stack;
260 /* Returns true iff there is a COND_EXPR between us and the innermost
261 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
263 static bool
264 gimple_conditional_context (void)
266 return gimplify_ctxp->conditions > 0;
269 /* Note that we've entered a COND_EXPR. */
271 static void
272 gimple_push_condition (void)
274 #ifdef ENABLE_GIMPLE_CHECKING
275 if (gimplify_ctxp->conditions == 0)
276 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
277 #endif
278 ++(gimplify_ctxp->conditions);
281 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
282 now, add any conditional cleanups we've seen to the prequeue. */
284 static void
285 gimple_pop_condition (gimple_seq *pre_p)
287 int conds = --(gimplify_ctxp->conditions);
289 gcc_assert (conds >= 0);
290 if (conds == 0)
292 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
293 gimplify_ctxp->conditional_cleanups = NULL;
297 /* A stable comparison routine for use with splay trees and DECLs. */
299 static int
300 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
302 tree a = (tree) xa;
303 tree b = (tree) xb;
305 return DECL_UID (a) - DECL_UID (b);
308 /* Create a new omp construct that deals with variable remapping. */
310 static struct gimplify_omp_ctx *
311 new_omp_context (enum omp_region_type region_type)
313 struct gimplify_omp_ctx *c;
315 c = XCNEW (struct gimplify_omp_ctx);
316 c->outer_context = gimplify_omp_ctxp;
317 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
318 c->privatized_types = pointer_set_create ();
319 c->location = input_location;
320 c->region_type = region_type;
321 if (region_type != ORT_TASK)
322 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
323 else
324 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
326 return c;
329 /* Destroy an omp construct that deals with variable remapping. */
331 static void
332 delete_omp_context (struct gimplify_omp_ctx *c)
334 splay_tree_delete (c->variables);
335 pointer_set_destroy (c->privatized_types);
336 XDELETE (c);
339 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
340 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
342 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
344 static void
345 append_to_statement_list_1 (tree t, tree *list_p)
347 tree list = *list_p;
348 tree_stmt_iterator i;
350 if (!list)
352 if (t && TREE_CODE (t) == STATEMENT_LIST)
354 *list_p = t;
355 return;
357 *list_p = list = alloc_stmt_list ();
360 i = tsi_last (list);
361 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
364 /* Add T to the end of the list container pointed to by LIST_P.
365 If T is an expression with no effects, it is ignored. */
367 void
368 append_to_statement_list (tree t, tree *list_p)
370 if (t && TREE_SIDE_EFFECTS (t))
371 append_to_statement_list_1 (t, list_p);
374 /* Similar, but the statement is always added, regardless of side effects. */
376 void
377 append_to_statement_list_force (tree t, tree *list_p)
379 if (t != NULL_TREE)
380 append_to_statement_list_1 (t, list_p);
383 /* Both gimplify the statement T and append it to *SEQ_P. This function
384 behaves exactly as gimplify_stmt, but you don't have to pass T as a
385 reference. */
387 void
388 gimplify_and_add (tree t, gimple_seq *seq_p)
390 gimplify_stmt (&t, seq_p);
393 /* Gimplify statement T into sequence *SEQ_P, and return the first
394 tuple in the sequence of generated tuples for this statement.
395 Return NULL if gimplifying T produced no tuples. */
397 static gimple
398 gimplify_and_return_first (tree t, gimple_seq *seq_p)
400 gimple_stmt_iterator last = gsi_last (*seq_p);
402 gimplify_and_add (t, seq_p);
404 if (!gsi_end_p (last))
406 gsi_next (&last);
407 return gsi_stmt (last);
409 else
410 return gimple_seq_first_stmt (*seq_p);
413 /* Strip off a legitimate source ending from the input string NAME of
414 length LEN. Rather than having to know the names used by all of
415 our front ends, we strip off an ending of a period followed by
416 up to five characters. (Java uses ".class".) */
418 static inline void
419 remove_suffix (char *name, int len)
421 int i;
423 for (i = 2; i < 8 && len > i; i++)
425 if (name[len - i] == '.')
427 name[len - i] = '\0';
428 break;
433 /* Create a new temporary name with PREFIX. Returns an identifier. */
435 static GTY(()) unsigned int tmp_var_id_num;
437 tree
438 create_tmp_var_name (const char *prefix)
440 char *tmp_name;
442 if (prefix)
444 char *preftmp = ASTRDUP (prefix);
446 remove_suffix (preftmp, strlen (preftmp));
447 prefix = preftmp;
450 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
451 return get_identifier (tmp_name);
455 /* Create a new temporary variable declaration of type TYPE.
456 Does NOT push it into the current binding. */
458 tree
459 create_tmp_var_raw (tree type, const char *prefix)
461 tree tmp_var;
462 tree new_type;
464 /* Make the type of the variable writable. */
465 new_type = build_type_variant (type, 0, 0);
466 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
468 tmp_var = build_decl (input_location,
469 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
470 type);
472 /* The variable was declared by the compiler. */
473 DECL_ARTIFICIAL (tmp_var) = 1;
474 /* And we don't want debug info for it. */
475 DECL_IGNORED_P (tmp_var) = 1;
477 /* Make the variable writable. */
478 TREE_READONLY (tmp_var) = 0;
480 DECL_EXTERNAL (tmp_var) = 0;
481 TREE_STATIC (tmp_var) = 0;
482 TREE_USED (tmp_var) = 1;
484 return tmp_var;
487 /* Create a new temporary variable declaration of type TYPE. DOES push the
488 variable into the current binding. Further, assume that this is called
489 only from gimplification or optimization, at which point the creation of
490 certain types are bugs. */
492 tree
493 create_tmp_var (tree type, const char *prefix)
495 tree tmp_var;
497 /* We don't allow types that are addressable (meaning we can't make copies),
498 or incomplete. We also used to reject every variable size objects here,
499 but now support those for which a constant upper bound can be obtained.
500 The processing for variable sizes is performed in gimple_add_tmp_var,
501 point at which it really matters and possibly reached via paths not going
502 through this function, e.g. after direct calls to create_tmp_var_raw. */
503 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
505 tmp_var = create_tmp_var_raw (type, prefix);
506 gimple_add_tmp_var (tmp_var);
507 return tmp_var;
510 /* Create a temporary with a name derived from VAL. Subroutine of
511 lookup_tmp_var; nobody else should call this function. */
513 static inline tree
514 create_tmp_from_val (tree val)
516 return create_tmp_var (TREE_TYPE (val), get_name (val));
519 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
520 an existing expression temporary. */
522 static tree
523 lookup_tmp_var (tree val, bool is_formal)
525 tree ret;
527 /* If not optimizing, never really reuse a temporary. local-alloc
528 won't allocate any variable that is used in more than one basic
529 block, which means it will go into memory, causing much extra
530 work in reload and final and poorer code generation, outweighing
531 the extra memory allocation here. */
532 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
533 ret = create_tmp_from_val (val);
534 else
536 elt_t elt, *elt_p;
537 void **slot;
539 elt.val = val;
540 if (gimplify_ctxp->temp_htab == NULL)
541 gimplify_ctxp->temp_htab
542 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
543 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
544 if (*slot == NULL)
546 elt_p = XNEW (elt_t);
547 elt_p->val = val;
548 elt_p->temp = ret = create_tmp_from_val (val);
549 *slot = (void *) elt_p;
551 else
553 elt_p = (elt_t *) *slot;
554 ret = elt_p->temp;
558 return ret;
562 /* Return true if T is a CALL_EXPR or an expression that can be
563 assignmed to a temporary. Note that this predicate should only be
564 used during gimplification. See the rationale for this in
565 gimplify_modify_expr. */
567 static bool
568 is_gimple_reg_rhs_or_call (tree t)
570 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
571 || TREE_CODE (t) == CALL_EXPR);
574 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
575 this predicate should only be used during gimplification. See the
576 rationale for this in gimplify_modify_expr. */
578 static bool
579 is_gimple_mem_rhs_or_call (tree t)
581 /* If we're dealing with a renamable type, either source or dest must be
582 a renamed variable. */
583 if (is_gimple_reg_type (TREE_TYPE (t)))
584 return is_gimple_val (t);
585 else
586 return (is_gimple_val (t) || is_gimple_lvalue (t)
587 || TREE_CODE (t) == CALL_EXPR);
590 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
592 static tree
593 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
594 bool is_formal)
596 tree t, mod;
598 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
599 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
600 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
601 fb_rvalue);
603 t = lookup_tmp_var (val, is_formal);
605 if (is_formal
606 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
607 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE))
608 DECL_GIMPLE_REG_P (t) = 1;
610 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
612 if (EXPR_HAS_LOCATION (val))
613 SET_EXPR_LOCATION (mod, EXPR_LOCATION (val));
614 else
615 SET_EXPR_LOCATION (mod, input_location);
617 /* gimplify_modify_expr might want to reduce this further. */
618 gimplify_and_add (mod, pre_p);
619 ggc_free (mod);
621 /* If we're gimplifying into ssa, gimplify_modify_expr will have
622 given our temporary an SSA name. Find and return it. */
623 if (gimplify_ctxp->into_ssa)
625 gimple last = gimple_seq_last_stmt (*pre_p);
626 t = gimple_get_lhs (last);
629 return t;
632 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
633 in gimplify_expr. Only use this function if:
635 1) The value of the unfactored expression represented by VAL will not
636 change between the initialization and use of the temporary, and
637 2) The temporary will not be otherwise modified.
639 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
640 and #2 means it is inappropriate for && temps.
642 For other cases, use get_initialized_tmp_var instead. */
644 tree
645 get_formal_tmp_var (tree val, gimple_seq *pre_p)
647 return internal_get_tmp_var (val, pre_p, NULL, true);
650 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
651 are as in gimplify_expr. */
653 tree
654 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
656 return internal_get_tmp_var (val, pre_p, post_p, false);
659 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
660 true, generate debug info for them; otherwise don't. */
662 void
663 declare_vars (tree vars, gimple scope, bool debug_info)
665 tree last = vars;
666 if (last)
668 tree temps, block;
670 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
672 temps = nreverse (last);
674 block = gimple_bind_block (scope);
675 gcc_assert (!block || TREE_CODE (block) == BLOCK);
676 if (!block || !debug_info)
678 TREE_CHAIN (last) = gimple_bind_vars (scope);
679 gimple_bind_set_vars (scope, temps);
681 else
683 /* We need to attach the nodes both to the BIND_EXPR and to its
684 associated BLOCK for debugging purposes. The key point here
685 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
686 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
687 if (BLOCK_VARS (block))
688 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
689 else
691 gimple_bind_set_vars (scope,
692 chainon (gimple_bind_vars (scope), temps));
693 BLOCK_VARS (block) = temps;
699 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
700 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
701 no such upper bound can be obtained. */
703 static void
704 force_constant_size (tree var)
706 /* The only attempt we make is by querying the maximum size of objects
707 of the variable's type. */
709 HOST_WIDE_INT max_size;
711 gcc_assert (TREE_CODE (var) == VAR_DECL);
713 max_size = max_int_size_in_bytes (TREE_TYPE (var));
715 gcc_assert (max_size >= 0);
717 DECL_SIZE_UNIT (var)
718 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
719 DECL_SIZE (var)
720 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
723 void
724 gimple_add_tmp_var (tree tmp)
726 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
728 /* Later processing assumes that the object size is constant, which might
729 not be true at this point. Force the use of a constant upper bound in
730 this case. */
731 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
732 force_constant_size (tmp);
734 DECL_CONTEXT (tmp) = current_function_decl;
735 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
737 if (gimplify_ctxp)
739 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
740 gimplify_ctxp->temps = tmp;
742 /* Mark temporaries local within the nearest enclosing parallel. */
743 if (gimplify_omp_ctxp)
745 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
746 while (ctx && ctx->region_type == ORT_WORKSHARE)
747 ctx = ctx->outer_context;
748 if (ctx)
749 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
752 else if (cfun)
753 record_vars (tmp);
754 else
756 gimple_seq body_seq;
758 /* This case is for nested functions. We need to expose the locals
759 they create. */
760 body_seq = gimple_body (current_function_decl);
761 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
765 /* Determines whether to assign a location to the statement GS. */
767 static bool
768 should_carry_location_p (gimple gs)
770 /* Don't emit a line note for a label. We particularly don't want to
771 emit one for the break label, since it doesn't actually correspond
772 to the beginning of the loop/switch. */
773 if (gimple_code (gs) == GIMPLE_LABEL)
774 return false;
776 return true;
779 /* Same, but for a tree. */
781 static bool
782 tree_should_carry_location_p (const_tree stmt)
784 /* Don't emit a line note for a label. We particularly don't want to
785 emit one for the break label, since it doesn't actually correspond
786 to the beginning of the loop/switch. */
787 if (TREE_CODE (stmt) == LABEL_EXPR)
788 return false;
790 /* Do not annotate empty statements, since it confuses gcov. */
791 if (!TREE_SIDE_EFFECTS (stmt))
792 return false;
794 return true;
797 /* Return true if a location should not be emitted for this statement
798 by annotate_one_with_location. */
800 static inline bool
801 gimple_do_not_emit_location_p (gimple g)
803 return gimple_plf (g, GF_PLF_1);
806 /* Mark statement G so a location will not be emitted by
807 annotate_one_with_location. */
809 static inline void
810 gimple_set_do_not_emit_location (gimple g)
812 /* The PLF flags are initialized to 0 when a new tuple is created,
813 so no need to initialize it anywhere. */
814 gimple_set_plf (g, GF_PLF_1, true);
817 /* Set the location for gimple statement GS to LOCATION. */
819 static void
820 annotate_one_with_location (gimple gs, location_t location)
822 if (!gimple_has_location (gs)
823 && !gimple_do_not_emit_location_p (gs)
824 && should_carry_location_p (gs))
825 gimple_set_location (gs, location);
828 /* Same, but for tree T. */
830 static void
831 tree_annotate_one_with_location (tree t, location_t location)
833 if (CAN_HAVE_LOCATION_P (t)
834 && ! EXPR_HAS_LOCATION (t) && tree_should_carry_location_p (t))
835 SET_EXPR_LOCATION (t, location);
839 /* Set LOCATION for all the statements after iterator GSI in sequence
840 SEQ. If GSI is pointing to the end of the sequence, start with the
841 first statement in SEQ. */
843 static void
844 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
845 location_t location)
847 if (gsi_end_p (gsi))
848 gsi = gsi_start (seq);
849 else
850 gsi_next (&gsi);
852 for (; !gsi_end_p (gsi); gsi_next (&gsi))
853 annotate_one_with_location (gsi_stmt (gsi), location);
857 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
859 void
860 annotate_all_with_location (gimple_seq stmt_p, location_t location)
862 gimple_stmt_iterator i;
864 if (gimple_seq_empty_p (stmt_p))
865 return;
867 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
869 gimple gs = gsi_stmt (i);
870 annotate_one_with_location (gs, location);
874 /* Same, but for statement or statement list in *STMT_P. */
876 void
877 tree_annotate_all_with_location (tree *stmt_p, location_t location)
879 tree_stmt_iterator i;
881 if (!*stmt_p)
882 return;
884 for (i = tsi_start (*stmt_p); !tsi_end_p (i); tsi_next (&i))
886 tree t = tsi_stmt (i);
888 /* Assuming we've already been gimplified, we shouldn't
889 see nested chaining constructs anymore. */
890 gcc_assert (TREE_CODE (t) != STATEMENT_LIST
891 && TREE_CODE (t) != COMPOUND_EXPR);
893 tree_annotate_one_with_location (t, location);
898 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
899 These nodes model computations that should only be done once. If we
900 were to unshare something like SAVE_EXPR(i++), the gimplification
901 process would create wrong code. */
903 static tree
904 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
906 enum tree_code code = TREE_CODE (*tp);
907 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
908 if (TREE_CODE_CLASS (code) == tcc_type
909 || TREE_CODE_CLASS (code) == tcc_declaration
910 || TREE_CODE_CLASS (code) == tcc_constant
911 || code == SAVE_EXPR || code == TARGET_EXPR
912 /* We can't do anything sensible with a BLOCK used as an expression,
913 but we also can't just die when we see it because of non-expression
914 uses. So just avert our eyes and cross our fingers. Silly Java. */
915 || code == BLOCK)
916 *walk_subtrees = 0;
917 else
919 gcc_assert (code != BIND_EXPR);
920 copy_tree_r (tp, walk_subtrees, data);
923 return NULL_TREE;
926 /* Callback for walk_tree to unshare most of the shared trees rooted at
927 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
928 then *TP is deep copied by calling copy_tree_r.
930 This unshares the same trees as copy_tree_r with the exception of
931 SAVE_EXPR nodes. These nodes model computations that should only be
932 done once. If we were to unshare something like SAVE_EXPR(i++), the
933 gimplification process would create wrong code. */
935 static tree
936 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
937 void *data ATTRIBUTE_UNUSED)
939 tree t = *tp;
940 enum tree_code code = TREE_CODE (t);
942 /* Skip types, decls, and constants. But we do want to look at their
943 types and the bounds of types. Mark them as visited so we properly
944 unmark their subtrees on the unmark pass. If we've already seen them,
945 don't look down further. */
946 if (TREE_CODE_CLASS (code) == tcc_type
947 || TREE_CODE_CLASS (code) == tcc_declaration
948 || TREE_CODE_CLASS (code) == tcc_constant)
950 if (TREE_VISITED (t))
951 *walk_subtrees = 0;
952 else
953 TREE_VISITED (t) = 1;
956 /* If this node has been visited already, unshare it and don't look
957 any deeper. */
958 else if (TREE_VISITED (t))
960 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
961 *walk_subtrees = 0;
964 /* Otherwise, mark the tree as visited and keep looking. */
965 else
966 TREE_VISITED (t) = 1;
968 return NULL_TREE;
971 static tree
972 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
973 void *data ATTRIBUTE_UNUSED)
975 if (TREE_VISITED (*tp))
976 TREE_VISITED (*tp) = 0;
977 else
978 *walk_subtrees = 0;
980 return NULL_TREE;
983 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
984 bodies of any nested functions if we are unsharing the entire body of
985 FNDECL. */
987 static void
988 unshare_body (tree *body_p, tree fndecl)
990 struct cgraph_node *cgn = cgraph_node (fndecl);
992 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
993 if (body_p == &DECL_SAVED_TREE (fndecl))
994 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
995 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
998 /* Likewise, but mark all trees as not visited. */
1000 static void
1001 unvisit_body (tree *body_p, tree fndecl)
1003 struct cgraph_node *cgn = cgraph_node (fndecl);
1005 walk_tree (body_p, unmark_visited_r, NULL, NULL);
1006 if (body_p == &DECL_SAVED_TREE (fndecl))
1007 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1008 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
1011 /* Unconditionally make an unshared copy of EXPR. This is used when using
1012 stored expressions which span multiple functions, such as BINFO_VTABLE,
1013 as the normal unsharing process can't tell that they're shared. */
1015 tree
1016 unshare_expr (tree expr)
1018 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1019 return expr;
1022 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1023 contain statements and have a value. Assign its value to a temporary
1024 and give it void_type_node. Returns the temporary, or NULL_TREE if
1025 WRAPPER was already void. */
1027 tree
1028 voidify_wrapper_expr (tree wrapper, tree temp)
1030 tree type = TREE_TYPE (wrapper);
1031 if (type && !VOID_TYPE_P (type))
1033 tree *p;
1035 /* Set p to point to the body of the wrapper. Loop until we find
1036 something that isn't a wrapper. */
1037 for (p = &wrapper; p && *p; )
1039 switch (TREE_CODE (*p))
1041 case BIND_EXPR:
1042 TREE_SIDE_EFFECTS (*p) = 1;
1043 TREE_TYPE (*p) = void_type_node;
1044 /* For a BIND_EXPR, the body is operand 1. */
1045 p = &BIND_EXPR_BODY (*p);
1046 break;
1048 case CLEANUP_POINT_EXPR:
1049 case TRY_FINALLY_EXPR:
1050 case TRY_CATCH_EXPR:
1051 TREE_SIDE_EFFECTS (*p) = 1;
1052 TREE_TYPE (*p) = void_type_node;
1053 p = &TREE_OPERAND (*p, 0);
1054 break;
1056 case STATEMENT_LIST:
1058 tree_stmt_iterator i = tsi_last (*p);
1059 TREE_SIDE_EFFECTS (*p) = 1;
1060 TREE_TYPE (*p) = void_type_node;
1061 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1063 break;
1065 case COMPOUND_EXPR:
1066 /* Advance to the last statement. Set all container types to void. */
1067 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1069 TREE_SIDE_EFFECTS (*p) = 1;
1070 TREE_TYPE (*p) = void_type_node;
1072 break;
1074 default:
1075 goto out;
1079 out:
1080 if (p == NULL || IS_EMPTY_STMT (*p))
1081 temp = NULL_TREE;
1082 else if (temp)
1084 /* The wrapper is on the RHS of an assignment that we're pushing
1085 down. */
1086 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1087 || TREE_CODE (temp) == MODIFY_EXPR);
1088 TREE_OPERAND (temp, 1) = *p;
1089 *p = temp;
1091 else
1093 temp = create_tmp_var (type, "retval");
1094 *p = build2 (INIT_EXPR, type, temp, *p);
1097 return temp;
1100 return NULL_TREE;
1103 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1104 a temporary through which they communicate. */
1106 static void
1107 build_stack_save_restore (gimple *save, gimple *restore)
1109 tree tmp_var;
1111 *save = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
1112 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1113 gimple_call_set_lhs (*save, tmp_var);
1115 *restore = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1116 1, tmp_var);
1119 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1121 static enum gimplify_status
1122 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1124 tree bind_expr = *expr_p;
1125 bool old_save_stack = gimplify_ctxp->save_stack;
1126 tree t;
1127 gimple gimple_bind;
1128 gimple_seq body;
1130 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1132 /* Mark variables seen in this bind expr. */
1133 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1135 if (TREE_CODE (t) == VAR_DECL)
1137 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1139 /* Mark variable as local. */
1140 if (ctx && !is_global_var (t)
1141 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1142 || splay_tree_lookup (ctx->variables,
1143 (splay_tree_key) t) == NULL))
1144 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1146 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1148 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1149 cfun->has_local_explicit_reg_vars = true;
1152 /* Preliminarily mark non-addressed complex variables as eligible
1153 for promotion to gimple registers. We'll transform their uses
1154 as we find them.
1155 We exclude complex types if not optimizing because they can be
1156 subject to partial stores in GNU C by means of the __real__ and
1157 __imag__ operators and we cannot promote them to total stores
1158 (see gimplify_modify_expr_complex_part). */
1159 if (optimize
1160 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1161 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1162 && !TREE_THIS_VOLATILE (t)
1163 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1164 && !needs_to_live_in_memory (t))
1165 DECL_GIMPLE_REG_P (t) = 1;
1168 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1169 BIND_EXPR_BLOCK (bind_expr));
1170 gimple_push_bind_expr (gimple_bind);
1172 gimplify_ctxp->save_stack = false;
1174 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1175 body = NULL;
1176 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1177 gimple_bind_set_body (gimple_bind, body);
1179 if (gimplify_ctxp->save_stack)
1181 gimple stack_save, stack_restore, gs;
1182 gimple_seq cleanup, new_body;
1184 /* Save stack on entry and restore it on exit. Add a try_finally
1185 block to achieve this. Note that mudflap depends on the
1186 format of the emitted code: see mx_register_decls(). */
1187 build_stack_save_restore (&stack_save, &stack_restore);
1189 cleanup = new_body = NULL;
1190 gimplify_seq_add_stmt (&cleanup, stack_restore);
1191 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1192 GIMPLE_TRY_FINALLY);
1194 gimplify_seq_add_stmt (&new_body, stack_save);
1195 gimplify_seq_add_stmt (&new_body, gs);
1196 gimple_bind_set_body (gimple_bind, new_body);
1199 gimplify_ctxp->save_stack = old_save_stack;
1200 gimple_pop_bind_expr ();
1202 gimplify_seq_add_stmt (pre_p, gimple_bind);
1204 if (temp)
1206 *expr_p = temp;
1207 return GS_OK;
1210 *expr_p = NULL_TREE;
1211 return GS_ALL_DONE;
1214 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1215 GIMPLE value, it is assigned to a new temporary and the statement is
1216 re-written to return the temporary.
1218 PRE_P points to the sequence where side effects that must happen before
1219 STMT should be stored. */
1221 static enum gimplify_status
1222 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1224 gimple ret;
1225 tree ret_expr = TREE_OPERAND (stmt, 0);
1226 tree result_decl, result;
1228 if (ret_expr == error_mark_node)
1229 return GS_ERROR;
1231 if (!ret_expr
1232 || TREE_CODE (ret_expr) == RESULT_DECL
1233 || ret_expr == error_mark_node)
1235 gimple ret = gimple_build_return (ret_expr);
1236 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1237 gimplify_seq_add_stmt (pre_p, ret);
1238 return GS_ALL_DONE;
1241 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1242 result_decl = NULL_TREE;
1243 else
1245 result_decl = TREE_OPERAND (ret_expr, 0);
1247 /* See through a return by reference. */
1248 if (TREE_CODE (result_decl) == INDIRECT_REF)
1249 result_decl = TREE_OPERAND (result_decl, 0);
1251 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1252 || TREE_CODE (ret_expr) == INIT_EXPR)
1253 && TREE_CODE (result_decl) == RESULT_DECL);
1256 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1257 Recall that aggregate_value_p is FALSE for any aggregate type that is
1258 returned in registers. If we're returning values in registers, then
1259 we don't want to extend the lifetime of the RESULT_DECL, particularly
1260 across another call. In addition, for those aggregates for which
1261 hard_function_value generates a PARALLEL, we'll die during normal
1262 expansion of structure assignments; there's special code in expand_return
1263 to handle this case that does not exist in expand_expr. */
1264 if (!result_decl
1265 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1266 result = result_decl;
1267 else if (gimplify_ctxp->return_temp)
1268 result = gimplify_ctxp->return_temp;
1269 else
1271 result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1272 if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1273 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1274 DECL_GIMPLE_REG_P (result) = 1;
1276 /* ??? With complex control flow (usually involving abnormal edges),
1277 we can wind up warning about an uninitialized value for this. Due
1278 to how this variable is constructed and initialized, this is never
1279 true. Give up and never warn. */
1280 TREE_NO_WARNING (result) = 1;
1282 gimplify_ctxp->return_temp = result;
1285 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1286 Then gimplify the whole thing. */
1287 if (result != result_decl)
1288 TREE_OPERAND (ret_expr, 0) = result;
1290 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1292 ret = gimple_build_return (result);
1293 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1294 gimplify_seq_add_stmt (pre_p, ret);
1296 return GS_ALL_DONE;
1299 static void
1300 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1302 /* This is a variable-sized decl. Simplify its size and mark it
1303 for deferred expansion. Note that mudflap depends on the format
1304 of the emitted code: see mx_register_decls(). */
1305 tree t, addr, ptr_type;
1307 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1308 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1310 /* All occurrences of this decl in final gimplified code will be
1311 replaced by indirection. Setting DECL_VALUE_EXPR does two
1312 things: First, it lets the rest of the gimplifier know what
1313 replacement to use. Second, it lets the debug info know
1314 where to find the value. */
1315 ptr_type = build_pointer_type (TREE_TYPE (decl));
1316 addr = create_tmp_var (ptr_type, get_name (decl));
1317 DECL_IGNORED_P (addr) = 0;
1318 t = build_fold_indirect_ref (addr);
1319 SET_DECL_VALUE_EXPR (decl, t);
1320 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1322 t = built_in_decls[BUILT_IN_ALLOCA];
1323 t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
1324 t = fold_convert (ptr_type, t);
1325 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1327 gimplify_and_add (t, seq_p);
1329 /* Indicate that we need to restore the stack level when the
1330 enclosing BIND_EXPR is exited. */
1331 gimplify_ctxp->save_stack = true;
1335 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1336 and initialization explicit. */
1338 static enum gimplify_status
1339 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1341 tree stmt = *stmt_p;
1342 tree decl = DECL_EXPR_DECL (stmt);
1344 *stmt_p = NULL_TREE;
1346 if (TREE_TYPE (decl) == error_mark_node)
1347 return GS_ERROR;
1349 if ((TREE_CODE (decl) == TYPE_DECL
1350 || TREE_CODE (decl) == VAR_DECL)
1351 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1352 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1354 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1356 tree init = DECL_INITIAL (decl);
1358 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1359 || (!TREE_STATIC (decl)
1360 && flag_stack_check == GENERIC_STACK_CHECK
1361 && compare_tree_int (DECL_SIZE_UNIT (decl),
1362 STACK_CHECK_MAX_VAR_SIZE) > 0))
1363 gimplify_vla_decl (decl, seq_p);
1365 if (init && init != error_mark_node)
1367 if (!TREE_STATIC (decl))
1369 DECL_INITIAL (decl) = NULL_TREE;
1370 init = build2 (INIT_EXPR, void_type_node, decl, init);
1371 gimplify_and_add (init, seq_p);
1372 ggc_free (init);
1374 else
1375 /* We must still examine initializers for static variables
1376 as they may contain a label address. */
1377 walk_tree (&init, force_labels_r, NULL, NULL);
1380 /* Some front ends do not explicitly declare all anonymous
1381 artificial variables. We compensate here by declaring the
1382 variables, though it would be better if the front ends would
1383 explicitly declare them. */
1384 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1385 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1386 gimple_add_tmp_var (decl);
1389 return GS_ALL_DONE;
1392 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1393 and replacing the LOOP_EXPR with goto, but if the loop contains an
1394 EXIT_EXPR, we need to append a label for it to jump to. */
1396 static enum gimplify_status
1397 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1399 tree saved_label = gimplify_ctxp->exit_label;
1400 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1402 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1404 gimplify_ctxp->exit_label = NULL_TREE;
1406 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1408 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1410 if (gimplify_ctxp->exit_label)
1411 gimplify_seq_add_stmt (pre_p, gimple_build_label (gimplify_ctxp->exit_label));
1413 gimplify_ctxp->exit_label = saved_label;
1415 *expr_p = NULL;
1416 return GS_ALL_DONE;
1419 /* Gimplifies a statement list onto a sequence. These may be created either
1420 by an enlightened front-end, or by shortcut_cond_expr. */
1422 static enum gimplify_status
1423 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1425 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1427 tree_stmt_iterator i = tsi_start (*expr_p);
1429 while (!tsi_end_p (i))
1431 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1432 tsi_delink (&i);
1435 if (temp)
1437 *expr_p = temp;
1438 return GS_OK;
1441 return GS_ALL_DONE;
1444 /* Compare two case labels. Because the front end should already have
1445 made sure that case ranges do not overlap, it is enough to only compare
1446 the CASE_LOW values of each case label. */
1448 static int
1449 compare_case_labels (const void *p1, const void *p2)
1451 const_tree const case1 = *(const_tree const*)p1;
1452 const_tree const case2 = *(const_tree const*)p2;
1454 /* The 'default' case label always goes first. */
1455 if (!CASE_LOW (case1))
1456 return -1;
1457 else if (!CASE_LOW (case2))
1458 return 1;
1459 else
1460 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1464 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1466 void
1467 sort_case_labels (VEC(tree,heap)* label_vec)
1469 size_t len = VEC_length (tree, label_vec);
1470 qsort (VEC_address (tree, label_vec), len, sizeof (tree),
1471 compare_case_labels);
1475 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1476 branch to. */
1478 static enum gimplify_status
1479 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1481 tree switch_expr = *expr_p;
1482 gimple_seq switch_body_seq = NULL;
1483 enum gimplify_status ret;
1485 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1486 fb_rvalue);
1487 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1488 return ret;
1490 if (SWITCH_BODY (switch_expr))
1492 VEC (tree,heap) *labels;
1493 VEC (tree,heap) *saved_labels;
1494 tree default_case = NULL_TREE;
1495 size_t i, len;
1496 gimple gimple_switch;
1498 /* If someone can be bothered to fill in the labels, they can
1499 be bothered to null out the body too. */
1500 gcc_assert (!SWITCH_LABELS (switch_expr));
1502 /* save old labels, get new ones from body, then restore the old
1503 labels. Save all the things from the switch body to append after. */
1504 saved_labels = gimplify_ctxp->case_labels;
1505 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1507 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1508 labels = gimplify_ctxp->case_labels;
1509 gimplify_ctxp->case_labels = saved_labels;
1511 i = 0;
1512 while (i < VEC_length (tree, labels))
1514 tree elt = VEC_index (tree, labels, i);
1515 tree low = CASE_LOW (elt);
1516 bool remove_element = FALSE;
1518 if (low)
1520 /* Discard empty ranges. */
1521 tree high = CASE_HIGH (elt);
1522 if (high && tree_int_cst_lt (high, low))
1523 remove_element = TRUE;
1525 else
1527 /* The default case must be the last label in the list. */
1528 gcc_assert (!default_case);
1529 default_case = elt;
1530 remove_element = TRUE;
1533 if (remove_element)
1534 VEC_ordered_remove (tree, labels, i);
1535 else
1536 i++;
1538 len = i;
1540 if (!VEC_empty (tree, labels))
1541 sort_case_labels (labels);
1543 if (!default_case)
1545 tree type = TREE_TYPE (switch_expr);
1547 /* If the switch has no default label, add one, so that we jump
1548 around the switch body. If the labels already cover the whole
1549 range of type, add the default label pointing to one of the
1550 existing labels. */
1551 if (type == void_type_node)
1552 type = TREE_TYPE (SWITCH_COND (switch_expr));
1553 if (len
1554 && INTEGRAL_TYPE_P (type)
1555 && TYPE_MIN_VALUE (type)
1556 && TYPE_MAX_VALUE (type)
1557 && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)),
1558 TYPE_MIN_VALUE (type)))
1560 tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1));
1561 if (!high)
1562 high = CASE_LOW (VEC_index (tree, labels, len - 1));
1563 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (type)))
1565 for (i = 1; i < len; i++)
1567 high = CASE_LOW (VEC_index (tree, labels, i));
1568 low = CASE_HIGH (VEC_index (tree, labels, i - 1));
1569 if (!low)
1570 low = CASE_LOW (VEC_index (tree, labels, i - 1));
1571 if ((TREE_INT_CST_LOW (low) + 1
1572 != TREE_INT_CST_LOW (high))
1573 || (TREE_INT_CST_HIGH (low)
1574 + (TREE_INT_CST_LOW (high) == 0)
1575 != TREE_INT_CST_HIGH (high)))
1576 break;
1578 if (i == len)
1579 default_case = build3 (CASE_LABEL_EXPR, void_type_node,
1580 NULL_TREE, NULL_TREE,
1581 CASE_LABEL (VEC_index (tree,
1582 labels, 0)));
1586 if (!default_case)
1588 gimple new_default;
1590 default_case
1591 = build3 (CASE_LABEL_EXPR, void_type_node,
1592 NULL_TREE, NULL_TREE,
1593 create_artificial_label (UNKNOWN_LOCATION));
1594 new_default = gimple_build_label (CASE_LABEL (default_case));
1595 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1599 gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr),
1600 default_case, labels);
1601 gimplify_seq_add_stmt (pre_p, gimple_switch);
1602 gimplify_seq_add_seq (pre_p, switch_body_seq);
1603 VEC_free(tree, heap, labels);
1605 else
1606 gcc_assert (SWITCH_LABELS (switch_expr));
1608 return GS_ALL_DONE;
1612 static enum gimplify_status
1613 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1615 struct gimplify_ctx *ctxp;
1616 gimple gimple_label;
1618 /* Invalid OpenMP programs can play Duff's Device type games with
1619 #pragma omp parallel. At least in the C front end, we don't
1620 detect such invalid branches until after gimplification. */
1621 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1622 if (ctxp->case_labels)
1623 break;
1625 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1626 VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
1627 gimplify_seq_add_stmt (pre_p, gimple_label);
1629 return GS_ALL_DONE;
1632 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1633 if necessary. */
1635 tree
1636 build_and_jump (tree *label_p)
1638 if (label_p == NULL)
1639 /* If there's nowhere to jump, just fall through. */
1640 return NULL_TREE;
1642 if (*label_p == NULL_TREE)
1644 tree label = create_artificial_label (UNKNOWN_LOCATION);
1645 *label_p = label;
1648 return build1 (GOTO_EXPR, void_type_node, *label_p);
1651 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1652 This also involves building a label to jump to and communicating it to
1653 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1655 static enum gimplify_status
1656 gimplify_exit_expr (tree *expr_p)
1658 tree cond = TREE_OPERAND (*expr_p, 0);
1659 tree expr;
1661 expr = build_and_jump (&gimplify_ctxp->exit_label);
1662 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1663 *expr_p = expr;
1665 return GS_OK;
1668 /* A helper function to be called via walk_tree. Mark all labels under *TP
1669 as being forced. To be called for DECL_INITIAL of static variables. */
1671 tree
1672 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1674 if (TYPE_P (*tp))
1675 *walk_subtrees = 0;
1676 if (TREE_CODE (*tp) == LABEL_DECL)
1677 FORCED_LABEL (*tp) = 1;
1679 return NULL_TREE;
1682 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1683 different from its canonical type, wrap the whole thing inside a
1684 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1685 type.
1687 The canonical type of a COMPONENT_REF is the type of the field being
1688 referenced--unless the field is a bit-field which can be read directly
1689 in a smaller mode, in which case the canonical type is the
1690 sign-appropriate type corresponding to that mode. */
1692 static void
1693 canonicalize_component_ref (tree *expr_p)
1695 tree expr = *expr_p;
1696 tree type;
1698 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1700 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1701 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1702 else
1703 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1705 /* One could argue that all the stuff below is not necessary for
1706 the non-bitfield case and declare it a FE error if type
1707 adjustment would be needed. */
1708 if (TREE_TYPE (expr) != type)
1710 #ifdef ENABLE_TYPES_CHECKING
1711 tree old_type = TREE_TYPE (expr);
1712 #endif
1713 int type_quals;
1715 /* We need to preserve qualifiers and propagate them from
1716 operand 0. */
1717 type_quals = TYPE_QUALS (type)
1718 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1719 if (TYPE_QUALS (type) != type_quals)
1720 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1722 /* Set the type of the COMPONENT_REF to the underlying type. */
1723 TREE_TYPE (expr) = type;
1725 #ifdef ENABLE_TYPES_CHECKING
1726 /* It is now a FE error, if the conversion from the canonical
1727 type to the original expression type is not useless. */
1728 gcc_assert (useless_type_conversion_p (old_type, type));
1729 #endif
1733 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1734 to foo, embed that change in the ADDR_EXPR by converting
1735 T array[U];
1736 (T *)&array
1738 &array[L]
1739 where L is the lower bound. For simplicity, only do this for constant
1740 lower bound.
1741 The constraint is that the type of &array[L] is trivially convertible
1742 to T *. */
1744 static void
1745 canonicalize_addr_expr (tree *expr_p)
1747 tree expr = *expr_p;
1748 tree addr_expr = TREE_OPERAND (expr, 0);
1749 tree datype, ddatype, pddatype;
1751 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1752 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1753 || TREE_CODE (addr_expr) != ADDR_EXPR)
1754 return;
1756 /* The addr_expr type should be a pointer to an array. */
1757 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1758 if (TREE_CODE (datype) != ARRAY_TYPE)
1759 return;
1761 /* The pointer to element type shall be trivially convertible to
1762 the expression pointer type. */
1763 ddatype = TREE_TYPE (datype);
1764 pddatype = build_pointer_type (ddatype);
1765 if (!useless_type_conversion_p (pddatype, ddatype))
1766 return;
1768 /* The lower bound and element sizes must be constant. */
1769 if (!TYPE_SIZE_UNIT (ddatype)
1770 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1771 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1772 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1773 return;
1775 /* All checks succeeded. Build a new node to merge the cast. */
1776 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1777 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1778 NULL_TREE, NULL_TREE);
1779 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1782 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1783 underneath as appropriate. */
1785 static enum gimplify_status
1786 gimplify_conversion (tree *expr_p)
1788 tree tem;
1789 gcc_assert (CONVERT_EXPR_P (*expr_p));
1791 /* Then strip away all but the outermost conversion. */
1792 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1794 /* And remove the outermost conversion if it's useless. */
1795 if (tree_ssa_useless_type_conversion (*expr_p))
1796 *expr_p = TREE_OPERAND (*expr_p, 0);
1798 /* Attempt to avoid NOP_EXPR by producing reference to a subtype.
1799 For example this fold (subclass *)&A into &A->subclass avoiding
1800 a need for statement. */
1801 if (CONVERT_EXPR_P (*expr_p)
1802 && POINTER_TYPE_P (TREE_TYPE (*expr_p))
1803 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))
1804 && (tem = maybe_fold_offset_to_address
1805 (EXPR_LOCATION (*expr_p), TREE_OPERAND (*expr_p, 0),
1806 integer_zero_node, TREE_TYPE (*expr_p))) != NULL_TREE)
1807 *expr_p = tem;
1809 /* If we still have a conversion at the toplevel,
1810 then canonicalize some constructs. */
1811 if (CONVERT_EXPR_P (*expr_p))
1813 tree sub = TREE_OPERAND (*expr_p, 0);
1815 /* If a NOP conversion is changing the type of a COMPONENT_REF
1816 expression, then canonicalize its type now in order to expose more
1817 redundant conversions. */
1818 if (TREE_CODE (sub) == COMPONENT_REF)
1819 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1821 /* If a NOP conversion is changing a pointer to array of foo
1822 to a pointer to foo, embed that change in the ADDR_EXPR. */
1823 else if (TREE_CODE (sub) == ADDR_EXPR)
1824 canonicalize_addr_expr (expr_p);
1827 /* If we have a conversion to a non-register type force the
1828 use of a VIEW_CONVERT_EXPR instead. */
1829 if (!is_gimple_reg_type (TREE_TYPE (*expr_p)))
1830 *expr_p = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1831 TREE_OPERAND (*expr_p, 0));
1833 return GS_OK;
1836 /* Nonlocal VLAs seen in the current function. */
1837 static struct pointer_set_t *nonlocal_vlas;
1839 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1840 DECL_VALUE_EXPR, and it's worth re-examining things. */
1842 static enum gimplify_status
1843 gimplify_var_or_parm_decl (tree *expr_p)
1845 tree decl = *expr_p;
1847 /* ??? If this is a local variable, and it has not been seen in any
1848 outer BIND_EXPR, then it's probably the result of a duplicate
1849 declaration, for which we've already issued an error. It would
1850 be really nice if the front end wouldn't leak these at all.
1851 Currently the only known culprit is C++ destructors, as seen
1852 in g++.old-deja/g++.jason/binding.C. */
1853 if (TREE_CODE (decl) == VAR_DECL
1854 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1855 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1856 && decl_function_context (decl) == current_function_decl)
1858 gcc_assert (errorcount || sorrycount);
1859 return GS_ERROR;
1862 /* When within an OpenMP context, notice uses of variables. */
1863 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1864 return GS_ALL_DONE;
1866 /* If the decl is an alias for another expression, substitute it now. */
1867 if (DECL_HAS_VALUE_EXPR_P (decl))
1869 tree value_expr = DECL_VALUE_EXPR (decl);
1871 /* For referenced nonlocal VLAs add a decl for debugging purposes
1872 to the current function. */
1873 if (TREE_CODE (decl) == VAR_DECL
1874 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1875 && nonlocal_vlas != NULL
1876 && TREE_CODE (value_expr) == INDIRECT_REF
1877 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1878 && decl_function_context (decl) != current_function_decl)
1880 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1881 while (ctx && ctx->region_type == ORT_WORKSHARE)
1882 ctx = ctx->outer_context;
1883 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1885 tree copy = copy_node (decl), block;
1887 lang_hooks.dup_lang_specific_decl (copy);
1888 SET_DECL_RTL (copy, NULL_RTX);
1889 TREE_USED (copy) = 1;
1890 block = DECL_INITIAL (current_function_decl);
1891 TREE_CHAIN (copy) = BLOCK_VARS (block);
1892 BLOCK_VARS (block) = copy;
1893 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1894 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1898 *expr_p = unshare_expr (value_expr);
1899 return GS_OK;
1902 return GS_ALL_DONE;
1906 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1907 node *EXPR_P.
1909 compound_lval
1910 : min_lval '[' val ']'
1911 | min_lval '.' ID
1912 | compound_lval '[' val ']'
1913 | compound_lval '.' ID
1915 This is not part of the original SIMPLE definition, which separates
1916 array and member references, but it seems reasonable to handle them
1917 together. Also, this way we don't run into problems with union
1918 aliasing; gcc requires that for accesses through a union to alias, the
1919 union reference must be explicit, which was not always the case when we
1920 were splitting up array and member refs.
1922 PRE_P points to the sequence where side effects that must happen before
1923 *EXPR_P should be stored.
1925 POST_P points to the sequence where side effects that must happen after
1926 *EXPR_P should be stored. */
1928 static enum gimplify_status
1929 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1930 fallback_t fallback)
1932 tree *p;
1933 VEC(tree,heap) *stack;
1934 enum gimplify_status ret = GS_OK, tret;
1935 int i;
1937 /* Create a stack of the subexpressions so later we can walk them in
1938 order from inner to outer. */
1939 stack = VEC_alloc (tree, heap, 10);
1941 /* We can handle anything that get_inner_reference can deal with. */
1942 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1944 restart:
1945 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1946 if (TREE_CODE (*p) == INDIRECT_REF)
1947 *p = fold_indirect_ref (*p);
1949 if (handled_component_p (*p))
1951 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1952 additional COMPONENT_REFs. */
1953 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1954 && gimplify_var_or_parm_decl (p) == GS_OK)
1955 goto restart;
1956 else
1957 break;
1959 VEC_safe_push (tree, heap, stack, *p);
1962 gcc_assert (VEC_length (tree, stack));
1964 /* Now STACK is a stack of pointers to all the refs we've walked through
1965 and P points to the innermost expression.
1967 Java requires that we elaborated nodes in source order. That
1968 means we must gimplify the inner expression followed by each of
1969 the indices, in order. But we can't gimplify the inner
1970 expression until we deal with any variable bounds, sizes, or
1971 positions in order to deal with PLACEHOLDER_EXPRs.
1973 So we do this in three steps. First we deal with the annotations
1974 for any variables in the components, then we gimplify the base,
1975 then we gimplify any indices, from left to right. */
1976 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1978 tree t = VEC_index (tree, stack, i);
1980 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1982 /* Gimplify the low bound and element type size and put them into
1983 the ARRAY_REF. If these values are set, they have already been
1984 gimplified. */
1985 if (TREE_OPERAND (t, 2) == NULL_TREE)
1987 tree low = unshare_expr (array_ref_low_bound (t));
1988 if (!is_gimple_min_invariant (low))
1990 TREE_OPERAND (t, 2) = low;
1991 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1992 post_p, is_gimple_reg,
1993 fb_rvalue);
1994 ret = MIN (ret, tret);
1998 if (!TREE_OPERAND (t, 3))
2000 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2001 tree elmt_size = unshare_expr (array_ref_element_size (t));
2002 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2004 /* Divide the element size by the alignment of the element
2005 type (above). */
2006 elmt_size = size_binop (EXACT_DIV_EXPR, elmt_size, factor);
2008 if (!is_gimple_min_invariant (elmt_size))
2010 TREE_OPERAND (t, 3) = elmt_size;
2011 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2012 post_p, is_gimple_reg,
2013 fb_rvalue);
2014 ret = MIN (ret, tret);
2018 else if (TREE_CODE (t) == COMPONENT_REF)
2020 /* Set the field offset into T and gimplify it. */
2021 if (!TREE_OPERAND (t, 2))
2023 tree offset = unshare_expr (component_ref_field_offset (t));
2024 tree field = TREE_OPERAND (t, 1);
2025 tree factor
2026 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2028 /* Divide the offset by its alignment. */
2029 offset = size_binop (EXACT_DIV_EXPR, offset, factor);
2031 if (!is_gimple_min_invariant (offset))
2033 TREE_OPERAND (t, 2) = offset;
2034 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2035 post_p, is_gimple_reg,
2036 fb_rvalue);
2037 ret = MIN (ret, tret);
2043 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2044 so as to match the min_lval predicate. Failure to do so may result
2045 in the creation of large aggregate temporaries. */
2046 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2047 fallback | fb_lvalue);
2048 ret = MIN (ret, tret);
2050 /* And finally, the indices and operands to BIT_FIELD_REF. During this
2051 loop we also remove any useless conversions. */
2052 for (; VEC_length (tree, stack) > 0; )
2054 tree t = VEC_pop (tree, stack);
2056 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2058 /* Gimplify the dimension. */
2059 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2061 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2062 is_gimple_val, fb_rvalue);
2063 ret = MIN (ret, tret);
2066 else if (TREE_CODE (t) == BIT_FIELD_REF)
2068 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2069 is_gimple_val, fb_rvalue);
2070 ret = MIN (ret, tret);
2071 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2072 is_gimple_val, fb_rvalue);
2073 ret = MIN (ret, tret);
2076 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2078 /* The innermost expression P may have originally had
2079 TREE_SIDE_EFFECTS set which would have caused all the outer
2080 expressions in *EXPR_P leading to P to also have had
2081 TREE_SIDE_EFFECTS set. */
2082 recalculate_side_effects (t);
2085 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2086 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2088 canonicalize_component_ref (expr_p);
2089 ret = MIN (ret, GS_OK);
2092 VEC_free (tree, heap, stack);
2094 return ret;
2097 /* Gimplify the self modifying expression pointed to by EXPR_P
2098 (++, --, +=, -=).
2100 PRE_P points to the list where side effects that must happen before
2101 *EXPR_P should be stored.
2103 POST_P points to the list where side effects that must happen after
2104 *EXPR_P should be stored.
2106 WANT_VALUE is nonzero iff we want to use the value of this expression
2107 in another expression. */
2109 static enum gimplify_status
2110 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2111 bool want_value)
2113 enum tree_code code;
2114 tree lhs, lvalue, rhs, t1;
2115 gimple_seq post = NULL, *orig_post_p = post_p;
2116 bool postfix;
2117 enum tree_code arith_code;
2118 enum gimplify_status ret;
2120 code = TREE_CODE (*expr_p);
2122 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2123 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2125 /* Prefix or postfix? */
2126 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2127 /* Faster to treat as prefix if result is not used. */
2128 postfix = want_value;
2129 else
2130 postfix = false;
2132 /* For postfix, make sure the inner expression's post side effects
2133 are executed after side effects from this expression. */
2134 if (postfix)
2135 post_p = &post;
2137 /* Add or subtract? */
2138 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2139 arith_code = PLUS_EXPR;
2140 else
2141 arith_code = MINUS_EXPR;
2143 /* Gimplify the LHS into a GIMPLE lvalue. */
2144 lvalue = TREE_OPERAND (*expr_p, 0);
2145 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2146 if (ret == GS_ERROR)
2147 return ret;
2149 /* Extract the operands to the arithmetic operation. */
2150 lhs = lvalue;
2151 rhs = TREE_OPERAND (*expr_p, 1);
2153 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2154 that as the result value and in the postqueue operation. We also
2155 make sure to make lvalue a minimal lval, see
2156 gcc.c-torture/execute/20040313-1.c for an example where this matters. */
2157 if (postfix)
2159 if (!is_gimple_min_lval (lvalue))
2161 mark_addressable (lvalue);
2162 lvalue = build_fold_addr_expr (lvalue);
2163 gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue);
2164 lvalue = build_fold_indirect_ref (lvalue);
2166 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2167 if (ret == GS_ERROR)
2168 return ret;
2171 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2172 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2174 rhs = fold_convert (sizetype, rhs);
2175 if (arith_code == MINUS_EXPR)
2176 rhs = fold_build1 (NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2177 arith_code = POINTER_PLUS_EXPR;
2180 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
2182 if (postfix)
2184 gimplify_assign (lvalue, t1, orig_post_p);
2185 gimplify_seq_add_seq (orig_post_p, post);
2186 *expr_p = lhs;
2187 return GS_ALL_DONE;
2189 else
2191 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2192 return GS_OK;
2197 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2199 static void
2200 maybe_with_size_expr (tree *expr_p)
2202 tree expr = *expr_p;
2203 tree type = TREE_TYPE (expr);
2204 tree size;
2206 /* If we've already wrapped this or the type is error_mark_node, we can't do
2207 anything. */
2208 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2209 || type == error_mark_node)
2210 return;
2212 /* If the size isn't known or is a constant, we have nothing to do. */
2213 size = TYPE_SIZE_UNIT (type);
2214 if (!size || TREE_CODE (size) == INTEGER_CST)
2215 return;
2217 /* Otherwise, make a WITH_SIZE_EXPR. */
2218 size = unshare_expr (size);
2219 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2220 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2224 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2225 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2226 the CALL_EXPR. */
2228 static enum gimplify_status
2229 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2231 bool (*test) (tree);
2232 fallback_t fb;
2234 /* In general, we allow lvalues for function arguments to avoid
2235 extra overhead of copying large aggregates out of even larger
2236 aggregates into temporaries only to copy the temporaries to
2237 the argument list. Make optimizers happy by pulling out to
2238 temporaries those types that fit in registers. */
2239 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2240 test = is_gimple_val, fb = fb_rvalue;
2241 else
2242 test = is_gimple_lvalue, fb = fb_either;
2244 /* If this is a variable sized type, we must remember the size. */
2245 maybe_with_size_expr (arg_p);
2247 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2248 /* Make sure arguments have the same location as the function call
2249 itself. */
2250 protected_set_expr_location (*arg_p, call_location);
2252 /* There is a sequence point before a function call. Side effects in
2253 the argument list must occur before the actual call. So, when
2254 gimplifying arguments, force gimplify_expr to use an internal
2255 post queue which is then appended to the end of PRE_P. */
2256 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2260 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2261 WANT_VALUE is true if the result of the call is desired. */
2263 static enum gimplify_status
2264 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2266 tree fndecl, parms, p;
2267 enum gimplify_status ret;
2268 int i, nargs;
2269 gimple call;
2270 bool builtin_va_start_p = FALSE;
2272 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2274 /* For reliable diagnostics during inlining, it is necessary that
2275 every call_expr be annotated with file and line. */
2276 if (! EXPR_HAS_LOCATION (*expr_p))
2277 SET_EXPR_LOCATION (*expr_p, input_location);
2279 /* This may be a call to a builtin function.
2281 Builtin function calls may be transformed into different
2282 (and more efficient) builtin function calls under certain
2283 circumstances. Unfortunately, gimplification can muck things
2284 up enough that the builtin expanders are not aware that certain
2285 transformations are still valid.
2287 So we attempt transformation/gimplification of the call before
2288 we gimplify the CALL_EXPR. At this time we do not manage to
2289 transform all calls in the same manner as the expanders do, but
2290 we do transform most of them. */
2291 fndecl = get_callee_fndecl (*expr_p);
2292 if (fndecl && DECL_BUILT_IN (fndecl))
2294 tree new_tree = fold_call_expr (*expr_p, !want_value);
2296 if (new_tree && new_tree != *expr_p)
2298 /* There was a transformation of this call which computes the
2299 same value, but in a more efficient way. Return and try
2300 again. */
2301 *expr_p = new_tree;
2302 return GS_OK;
2305 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2306 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
2308 builtin_va_start_p = TRUE;
2309 if (call_expr_nargs (*expr_p) < 2)
2311 error ("too few arguments to function %<va_start%>");
2312 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2313 return GS_OK;
2316 if (fold_builtin_next_arg (*expr_p, true))
2318 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2319 return GS_OK;
2324 /* There is a sequence point before the call, so any side effects in
2325 the calling expression must occur before the actual call. Force
2326 gimplify_expr to use an internal post queue. */
2327 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2328 is_gimple_call_addr, fb_rvalue);
2330 nargs = call_expr_nargs (*expr_p);
2332 /* Get argument types for verification. */
2333 fndecl = get_callee_fndecl (*expr_p);
2334 parms = NULL_TREE;
2335 if (fndecl)
2336 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2337 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2338 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2340 if (fndecl && DECL_ARGUMENTS (fndecl))
2341 p = DECL_ARGUMENTS (fndecl);
2342 else if (parms)
2343 p = parms;
2344 else
2345 p = NULL_TREE;
2346 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2349 /* If the last argument is __builtin_va_arg_pack () and it is not
2350 passed as a named argument, decrease the number of CALL_EXPR
2351 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2352 if (!p
2353 && i < nargs
2354 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2356 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2357 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2359 if (last_arg_fndecl
2360 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2361 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2362 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2364 tree call = *expr_p;
2366 --nargs;
2367 *expr_p = build_call_array (TREE_TYPE (call), CALL_EXPR_FN (call),
2368 nargs, CALL_EXPR_ARGP (call));
2370 /* Copy all CALL_EXPR flags, location and block, except
2371 CALL_EXPR_VA_ARG_PACK flag. */
2372 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2373 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2374 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2375 = CALL_EXPR_RETURN_SLOT_OPT (call);
2376 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2377 CALL_CANNOT_INLINE_P (*expr_p) = CALL_CANNOT_INLINE_P (call);
2378 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2379 TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
2381 /* Set CALL_EXPR_VA_ARG_PACK. */
2382 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2386 /* Finally, gimplify the function arguments. */
2387 if (nargs > 0)
2389 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2390 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2391 PUSH_ARGS_REVERSED ? i-- : i++)
2393 enum gimplify_status t;
2395 /* Avoid gimplifying the second argument to va_start, which needs to
2396 be the plain PARM_DECL. */
2397 if ((i != 1) || !builtin_va_start_p)
2399 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2400 EXPR_LOCATION (*expr_p));
2402 if (t == GS_ERROR)
2403 ret = GS_ERROR;
2408 /* Try this again in case gimplification exposed something. */
2409 if (ret != GS_ERROR)
2411 tree new_tree = fold_call_expr (*expr_p, !want_value);
2413 if (new_tree && new_tree != *expr_p)
2415 /* There was a transformation of this call which computes the
2416 same value, but in a more efficient way. Return and try
2417 again. */
2418 *expr_p = new_tree;
2419 return GS_OK;
2422 else
2424 *expr_p = error_mark_node;
2425 return GS_ERROR;
2428 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2429 decl. This allows us to eliminate redundant or useless
2430 calls to "const" functions. */
2431 if (TREE_CODE (*expr_p) == CALL_EXPR)
2433 int flags = call_expr_flags (*expr_p);
2434 if (flags & (ECF_CONST | ECF_PURE)
2435 /* An infinite loop is considered a side effect. */
2436 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2437 TREE_SIDE_EFFECTS (*expr_p) = 0;
2440 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2441 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2442 form and delegate the creation of a GIMPLE_CALL to
2443 gimplify_modify_expr. This is always possible because when
2444 WANT_VALUE is true, the caller wants the result of this call into
2445 a temporary, which means that we will emit an INIT_EXPR in
2446 internal_get_tmp_var which will then be handled by
2447 gimplify_modify_expr. */
2448 if (!want_value)
2450 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2451 have to do is replicate it as a GIMPLE_CALL tuple. */
2452 call = gimple_build_call_from_tree (*expr_p);
2453 gimplify_seq_add_stmt (pre_p, call);
2454 *expr_p = NULL_TREE;
2457 return ret;
2460 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2461 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2463 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2464 condition is true or false, respectively. If null, we should generate
2465 our own to skip over the evaluation of this specific expression.
2467 LOCUS is the source location of the COND_EXPR.
2469 This function is the tree equivalent of do_jump.
2471 shortcut_cond_r should only be called by shortcut_cond_expr. */
2473 static tree
2474 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2475 location_t locus)
2477 tree local_label = NULL_TREE;
2478 tree t, expr = NULL;
2480 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2481 retain the shortcut semantics. Just insert the gotos here;
2482 shortcut_cond_expr will append the real blocks later. */
2483 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2485 location_t new_locus;
2487 /* Turn if (a && b) into
2489 if (a); else goto no;
2490 if (b) goto yes; else goto no;
2491 (no:) */
2493 if (false_label_p == NULL)
2494 false_label_p = &local_label;
2496 /* Keep the original source location on the first 'if'. */
2497 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2498 append_to_statement_list (t, &expr);
2500 /* Set the source location of the && on the second 'if'. */
2501 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2502 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2503 new_locus);
2504 append_to_statement_list (t, &expr);
2506 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2508 location_t new_locus;
2510 /* Turn if (a || b) into
2512 if (a) goto yes;
2513 if (b) goto yes; else goto no;
2514 (yes:) */
2516 if (true_label_p == NULL)
2517 true_label_p = &local_label;
2519 /* Keep the original source location on the first 'if'. */
2520 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2521 append_to_statement_list (t, &expr);
2523 /* Set the source location of the || on the second 'if'. */
2524 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2525 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2526 new_locus);
2527 append_to_statement_list (t, &expr);
2529 else if (TREE_CODE (pred) == COND_EXPR)
2531 location_t new_locus;
2533 /* As long as we're messing with gotos, turn if (a ? b : c) into
2534 if (a)
2535 if (b) goto yes; else goto no;
2536 else
2537 if (c) goto yes; else goto no; */
2539 /* Keep the original source location on the first 'if'. Set the source
2540 location of the ? on the second 'if'. */
2541 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2542 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2543 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2544 false_label_p, locus),
2545 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2546 false_label_p, new_locus));
2548 else
2550 expr = build3 (COND_EXPR, void_type_node, pred,
2551 build_and_jump (true_label_p),
2552 build_and_jump (false_label_p));
2553 SET_EXPR_LOCATION (expr, locus);
2556 if (local_label)
2558 t = build1 (LABEL_EXPR, void_type_node, local_label);
2559 append_to_statement_list (t, &expr);
2562 return expr;
2565 /* Given a conditional expression EXPR with short-circuit boolean
2566 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2567 predicate appart into the equivalent sequence of conditionals. */
2569 static tree
2570 shortcut_cond_expr (tree expr)
2572 tree pred = TREE_OPERAND (expr, 0);
2573 tree then_ = TREE_OPERAND (expr, 1);
2574 tree else_ = TREE_OPERAND (expr, 2);
2575 tree true_label, false_label, end_label, t;
2576 tree *true_label_p;
2577 tree *false_label_p;
2578 bool emit_end, emit_false, jump_over_else;
2579 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2580 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2582 /* First do simple transformations. */
2583 if (!else_se)
2585 /* If there is no 'else', turn
2586 if (a && b) then c
2587 into
2588 if (a) if (b) then c. */
2589 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2591 /* Keep the original source location on the first 'if'. */
2592 location_t locus = EXPR_HAS_LOCATION (expr)
2593 ? EXPR_LOCATION (expr) : input_location;
2594 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2595 /* Set the source location of the && on the second 'if'. */
2596 if (EXPR_HAS_LOCATION (pred))
2597 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2598 then_ = shortcut_cond_expr (expr);
2599 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2600 pred = TREE_OPERAND (pred, 0);
2601 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2602 SET_EXPR_LOCATION (expr, locus);
2606 if (!then_se)
2608 /* If there is no 'then', turn
2609 if (a || b); else d
2610 into
2611 if (a); else if (b); else d. */
2612 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2614 /* Keep the original source location on the first 'if'. */
2615 location_t locus = EXPR_HAS_LOCATION (expr)
2616 ? EXPR_LOCATION (expr) : input_location;
2617 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2618 /* Set the source location of the || on the second 'if'. */
2619 if (EXPR_HAS_LOCATION (pred))
2620 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2621 else_ = shortcut_cond_expr (expr);
2622 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2623 pred = TREE_OPERAND (pred, 0);
2624 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2625 SET_EXPR_LOCATION (expr, locus);
2629 /* If we're done, great. */
2630 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2631 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2632 return expr;
2634 /* Otherwise we need to mess with gotos. Change
2635 if (a) c; else d;
2637 if (a); else goto no;
2638 c; goto end;
2639 no: d; end:
2640 and recursively gimplify the condition. */
2642 true_label = false_label = end_label = NULL_TREE;
2644 /* If our arms just jump somewhere, hijack those labels so we don't
2645 generate jumps to jumps. */
2647 if (then_
2648 && TREE_CODE (then_) == GOTO_EXPR
2649 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2651 true_label = GOTO_DESTINATION (then_);
2652 then_ = NULL;
2653 then_se = false;
2656 if (else_
2657 && TREE_CODE (else_) == GOTO_EXPR
2658 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2660 false_label = GOTO_DESTINATION (else_);
2661 else_ = NULL;
2662 else_se = false;
2665 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2666 if (true_label)
2667 true_label_p = &true_label;
2668 else
2669 true_label_p = NULL;
2671 /* The 'else' branch also needs a label if it contains interesting code. */
2672 if (false_label || else_se)
2673 false_label_p = &false_label;
2674 else
2675 false_label_p = NULL;
2677 /* If there was nothing else in our arms, just forward the label(s). */
2678 if (!then_se && !else_se)
2679 return shortcut_cond_r (pred, true_label_p, false_label_p,
2680 EXPR_HAS_LOCATION (expr)
2681 ? EXPR_LOCATION (expr) : input_location);
2683 /* If our last subexpression already has a terminal label, reuse it. */
2684 if (else_se)
2685 t = expr_last (else_);
2686 else if (then_se)
2687 t = expr_last (then_);
2688 else
2689 t = NULL;
2690 if (t && TREE_CODE (t) == LABEL_EXPR)
2691 end_label = LABEL_EXPR_LABEL (t);
2693 /* If we don't care about jumping to the 'else' branch, jump to the end
2694 if the condition is false. */
2695 if (!false_label_p)
2696 false_label_p = &end_label;
2698 /* We only want to emit these labels if we aren't hijacking them. */
2699 emit_end = (end_label == NULL_TREE);
2700 emit_false = (false_label == NULL_TREE);
2702 /* We only emit the jump over the else clause if we have to--if the
2703 then clause may fall through. Otherwise we can wind up with a
2704 useless jump and a useless label at the end of gimplified code,
2705 which will cause us to think that this conditional as a whole
2706 falls through even if it doesn't. If we then inline a function
2707 which ends with such a condition, that can cause us to issue an
2708 inappropriate warning about control reaching the end of a
2709 non-void function. */
2710 jump_over_else = block_may_fallthru (then_);
2712 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2713 EXPR_HAS_LOCATION (expr)
2714 ? EXPR_LOCATION (expr) : input_location);
2716 expr = NULL;
2717 append_to_statement_list (pred, &expr);
2719 append_to_statement_list (then_, &expr);
2720 if (else_se)
2722 if (jump_over_else)
2724 tree last = expr_last (expr);
2725 t = build_and_jump (&end_label);
2726 if (EXPR_HAS_LOCATION (last))
2727 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2728 append_to_statement_list (t, &expr);
2730 if (emit_false)
2732 t = build1 (LABEL_EXPR, void_type_node, false_label);
2733 append_to_statement_list (t, &expr);
2735 append_to_statement_list (else_, &expr);
2737 if (emit_end && end_label)
2739 t = build1 (LABEL_EXPR, void_type_node, end_label);
2740 append_to_statement_list (t, &expr);
2743 return expr;
2746 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2748 tree
2749 gimple_boolify (tree expr)
2751 tree type = TREE_TYPE (expr);
2753 if (TREE_CODE (type) == BOOLEAN_TYPE)
2754 return expr;
2756 switch (TREE_CODE (expr))
2758 case TRUTH_AND_EXPR:
2759 case TRUTH_OR_EXPR:
2760 case TRUTH_XOR_EXPR:
2761 case TRUTH_ANDIF_EXPR:
2762 case TRUTH_ORIF_EXPR:
2763 /* Also boolify the arguments of truth exprs. */
2764 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2765 /* FALLTHRU */
2767 case TRUTH_NOT_EXPR:
2768 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2769 /* FALLTHRU */
2771 case EQ_EXPR: case NE_EXPR:
2772 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2773 /* These expressions always produce boolean results. */
2774 TREE_TYPE (expr) = boolean_type_node;
2775 return expr;
2777 default:
2778 /* Other expressions that get here must have boolean values, but
2779 might need to be converted to the appropriate mode. */
2780 return fold_convert (boolean_type_node, expr);
2784 /* Given a conditional expression *EXPR_P without side effects, gimplify
2785 its operands. New statements are inserted to PRE_P. */
2787 static enum gimplify_status
2788 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2790 tree expr = *expr_p, cond;
2791 enum gimplify_status ret, tret;
2792 enum tree_code code;
2794 cond = gimple_boolify (COND_EXPR_COND (expr));
2796 /* We need to handle && and || specially, as their gimplification
2797 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2798 code = TREE_CODE (cond);
2799 if (code == TRUTH_ANDIF_EXPR)
2800 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2801 else if (code == TRUTH_ORIF_EXPR)
2802 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2803 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2804 COND_EXPR_COND (*expr_p) = cond;
2806 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2807 is_gimple_val, fb_rvalue);
2808 ret = MIN (ret, tret);
2809 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2810 is_gimple_val, fb_rvalue);
2812 return MIN (ret, tret);
2815 /* Returns true if evaluating EXPR could trap.
2816 EXPR is GENERIC, while tree_could_trap_p can be called
2817 only on GIMPLE. */
2819 static bool
2820 generic_expr_could_trap_p (tree expr)
2822 unsigned i, n;
2824 if (!expr || is_gimple_val (expr))
2825 return false;
2827 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2828 return true;
2830 n = TREE_OPERAND_LENGTH (expr);
2831 for (i = 0; i < n; i++)
2832 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2833 return true;
2835 return false;
2838 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2839 into
2841 if (p) if (p)
2842 t1 = a; a;
2843 else or else
2844 t1 = b; b;
2847 The second form is used when *EXPR_P is of type void.
2849 PRE_P points to the list where side effects that must happen before
2850 *EXPR_P should be stored. */
2852 static enum gimplify_status
2853 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2855 tree expr = *expr_p;
2856 tree tmp, type, arm1, arm2;
2857 enum gimplify_status ret;
2858 tree label_true, label_false, label_cont;
2859 bool have_then_clause_p, have_else_clause_p;
2860 gimple gimple_cond;
2861 enum tree_code pred_code;
2862 gimple_seq seq = NULL;
2864 type = TREE_TYPE (expr);
2866 /* If this COND_EXPR has a value, copy the values into a temporary within
2867 the arms. */
2868 if (! VOID_TYPE_P (type))
2870 tree result;
2872 /* If an rvalue is ok or we do not require an lvalue, avoid creating
2873 an addressable temporary. */
2874 if (((fallback & fb_rvalue)
2875 || !(fallback & fb_lvalue))
2876 && !TREE_ADDRESSABLE (type))
2878 if (gimplify_ctxp->allow_rhs_cond_expr
2879 /* If either branch has side effects or could trap, it can't be
2880 evaluated unconditionally. */
2881 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1))
2882 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 1))
2883 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 2))
2884 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 2)))
2885 return gimplify_pure_cond_expr (expr_p, pre_p);
2887 result = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2888 ret = GS_ALL_DONE;
2890 else
2892 tree type = build_pointer_type (TREE_TYPE (expr));
2894 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2895 TREE_OPERAND (expr, 1) =
2896 build_fold_addr_expr (TREE_OPERAND (expr, 1));
2898 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2899 TREE_OPERAND (expr, 2) =
2900 build_fold_addr_expr (TREE_OPERAND (expr, 2));
2902 tmp = create_tmp_var (type, "iftmp");
2904 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2905 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2907 result = build_fold_indirect_ref (tmp);
2910 /* Build the then clause, 't1 = a;'. But don't build an assignment
2911 if this branch is void; in C++ it can be, if it's a throw. */
2912 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2913 TREE_OPERAND (expr, 1)
2914 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 1));
2916 /* Build the else clause, 't1 = b;'. */
2917 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2918 TREE_OPERAND (expr, 2)
2919 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 2));
2921 TREE_TYPE (expr) = void_type_node;
2922 recalculate_side_effects (expr);
2924 /* Move the COND_EXPR to the prequeue. */
2925 gimplify_stmt (&expr, pre_p);
2927 *expr_p = result;
2928 return GS_ALL_DONE;
2931 /* Make sure the condition has BOOLEAN_TYPE. */
2932 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2934 /* Break apart && and || conditions. */
2935 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2936 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2938 expr = shortcut_cond_expr (expr);
2940 if (expr != *expr_p)
2942 *expr_p = expr;
2944 /* We can't rely on gimplify_expr to re-gimplify the expanded
2945 form properly, as cleanups might cause the target labels to be
2946 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2947 set up a conditional context. */
2948 gimple_push_condition ();
2949 gimplify_stmt (expr_p, &seq);
2950 gimple_pop_condition (pre_p);
2951 gimple_seq_add_seq (pre_p, seq);
2953 return GS_ALL_DONE;
2957 /* Now do the normal gimplification. */
2959 /* Gimplify condition. */
2960 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
2961 fb_rvalue);
2962 if (ret == GS_ERROR)
2963 return GS_ERROR;
2964 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
2966 gimple_push_condition ();
2968 have_then_clause_p = have_else_clause_p = false;
2969 if (TREE_OPERAND (expr, 1) != NULL
2970 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
2971 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
2972 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
2973 == current_function_decl)
2974 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2975 have different locations, otherwise we end up with incorrect
2976 location information on the branches. */
2977 && (optimize
2978 || !EXPR_HAS_LOCATION (expr)
2979 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
2980 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
2982 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
2983 have_then_clause_p = true;
2985 else
2986 label_true = create_artificial_label (UNKNOWN_LOCATION);
2987 if (TREE_OPERAND (expr, 2) != NULL
2988 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
2989 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
2990 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
2991 == current_function_decl)
2992 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2993 have different locations, otherwise we end up with incorrect
2994 location information on the branches. */
2995 && (optimize
2996 || !EXPR_HAS_LOCATION (expr)
2997 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
2998 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3000 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3001 have_else_clause_p = true;
3003 else
3004 label_false = create_artificial_label (UNKNOWN_LOCATION);
3006 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3007 &arm2);
3009 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3010 label_false);
3012 gimplify_seq_add_stmt (&seq, gimple_cond);
3013 label_cont = NULL_TREE;
3014 if (!have_then_clause_p)
3016 /* For if (...) {} else { code; } put label_true after
3017 the else block. */
3018 if (TREE_OPERAND (expr, 1) == NULL_TREE
3019 && !have_else_clause_p
3020 && TREE_OPERAND (expr, 2) != NULL_TREE)
3021 label_cont = label_true;
3022 else
3024 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3025 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3026 /* For if (...) { code; } else {} or
3027 if (...) { code; } else goto label; or
3028 if (...) { code; return; } else { ... }
3029 label_cont isn't needed. */
3030 if (!have_else_clause_p
3031 && TREE_OPERAND (expr, 2) != NULL_TREE
3032 && gimple_seq_may_fallthru (seq))
3034 gimple g;
3035 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3037 g = gimple_build_goto (label_cont);
3039 /* GIMPLE_COND's are very low level; they have embedded
3040 gotos. This particular embedded goto should not be marked
3041 with the location of the original COND_EXPR, as it would
3042 correspond to the COND_EXPR's condition, not the ELSE or the
3043 THEN arms. To avoid marking it with the wrong location, flag
3044 it as "no location". */
3045 gimple_set_do_not_emit_location (g);
3047 gimplify_seq_add_stmt (&seq, g);
3051 if (!have_else_clause_p)
3053 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3054 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3056 if (label_cont)
3057 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3059 gimple_pop_condition (pre_p);
3060 gimple_seq_add_seq (pre_p, seq);
3062 if (ret == GS_ERROR)
3063 ; /* Do nothing. */
3064 else if (have_then_clause_p || have_else_clause_p)
3065 ret = GS_ALL_DONE;
3066 else
3068 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3069 expr = TREE_OPERAND (expr, 0);
3070 gimplify_stmt (&expr, pre_p);
3073 *expr_p = NULL;
3074 return ret;
3077 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3078 a call to __builtin_memcpy. */
3080 static enum gimplify_status
3081 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3082 gimple_seq *seq_p)
3084 tree t, to, to_ptr, from, from_ptr;
3085 gimple gs;
3087 to = TREE_OPERAND (*expr_p, 0);
3088 from = TREE_OPERAND (*expr_p, 1);
3090 mark_addressable (from);
3091 from_ptr = build_fold_addr_expr (from);
3092 gimplify_arg (&from_ptr, seq_p, EXPR_LOCATION (*expr_p));
3094 mark_addressable (to);
3095 to_ptr = build_fold_addr_expr (to);
3096 gimplify_arg (&to_ptr, seq_p, EXPR_LOCATION (*expr_p));
3098 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
3100 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3102 if (want_value)
3104 /* tmp = memcpy() */
3105 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3106 gimple_call_set_lhs (gs, t);
3107 gimplify_seq_add_stmt (seq_p, gs);
3109 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3110 return GS_ALL_DONE;
3113 gimplify_seq_add_stmt (seq_p, gs);
3114 *expr_p = NULL;
3115 return GS_ALL_DONE;
3118 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3119 a call to __builtin_memset. In this case we know that the RHS is
3120 a CONSTRUCTOR with an empty element list. */
3122 static enum gimplify_status
3123 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3124 gimple_seq *seq_p)
3126 tree t, from, to, to_ptr;
3127 gimple gs;
3129 /* Assert our assumptions, to abort instead of producing wrong code
3130 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3131 not be immediately exposed. */
3132 from = TREE_OPERAND (*expr_p, 1);
3133 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3134 from = TREE_OPERAND (from, 0);
3136 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3137 && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
3139 /* Now proceed. */
3140 to = TREE_OPERAND (*expr_p, 0);
3142 to_ptr = build_fold_addr_expr (to);
3143 gimplify_arg (&to_ptr, seq_p, EXPR_LOCATION (*expr_p));
3144 t = implicit_built_in_decls[BUILT_IN_MEMSET];
3146 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3148 if (want_value)
3150 /* tmp = memset() */
3151 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3152 gimple_call_set_lhs (gs, t);
3153 gimplify_seq_add_stmt (seq_p, gs);
3155 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3156 return GS_ALL_DONE;
3159 gimplify_seq_add_stmt (seq_p, gs);
3160 *expr_p = NULL;
3161 return GS_ALL_DONE;
3164 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3165 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3166 assignment. Returns non-null if we detect a potential overlap. */
3168 struct gimplify_init_ctor_preeval_data
3170 /* The base decl of the lhs object. May be NULL, in which case we
3171 have to assume the lhs is indirect. */
3172 tree lhs_base_decl;
3174 /* The alias set of the lhs object. */
3175 alias_set_type lhs_alias_set;
3178 static tree
3179 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3181 struct gimplify_init_ctor_preeval_data *data
3182 = (struct gimplify_init_ctor_preeval_data *) xdata;
3183 tree t = *tp;
3185 /* If we find the base object, obviously we have overlap. */
3186 if (data->lhs_base_decl == t)
3187 return t;
3189 /* If the constructor component is indirect, determine if we have a
3190 potential overlap with the lhs. The only bits of information we
3191 have to go on at this point are addressability and alias sets. */
3192 if (TREE_CODE (t) == INDIRECT_REF
3193 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3194 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3195 return t;
3197 /* If the constructor component is a call, determine if it can hide a
3198 potential overlap with the lhs through an INDIRECT_REF like above. */
3199 if (TREE_CODE (t) == CALL_EXPR)
3201 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3203 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3204 if (POINTER_TYPE_P (TREE_VALUE (type))
3205 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3206 && alias_sets_conflict_p (data->lhs_alias_set,
3207 get_alias_set
3208 (TREE_TYPE (TREE_VALUE (type)))))
3209 return t;
3212 if (IS_TYPE_OR_DECL_P (t))
3213 *walk_subtrees = 0;
3214 return NULL;
3217 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3218 force values that overlap with the lhs (as described by *DATA)
3219 into temporaries. */
3221 static void
3222 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3223 struct gimplify_init_ctor_preeval_data *data)
3225 enum gimplify_status one;
3227 /* If the value is constant, then there's nothing to pre-evaluate. */
3228 if (TREE_CONSTANT (*expr_p))
3230 /* Ensure it does not have side effects, it might contain a reference to
3231 the object we're initializing. */
3232 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3233 return;
3236 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3237 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3238 return;
3240 /* Recurse for nested constructors. */
3241 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3243 unsigned HOST_WIDE_INT ix;
3244 constructor_elt *ce;
3245 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3247 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
3248 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3250 return;
3253 /* If this is a variable sized type, we must remember the size. */
3254 maybe_with_size_expr (expr_p);
3256 /* Gimplify the constructor element to something appropriate for the rhs
3257 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3258 the gimplifier will consider this a store to memory. Doing this
3259 gimplification now means that we won't have to deal with complicated
3260 language-specific trees, nor trees like SAVE_EXPR that can induce
3261 exponential search behavior. */
3262 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3263 if (one == GS_ERROR)
3265 *expr_p = NULL;
3266 return;
3269 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3270 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3271 always be true for all scalars, since is_gimple_mem_rhs insists on a
3272 temporary variable for them. */
3273 if (DECL_P (*expr_p))
3274 return;
3276 /* If this is of variable size, we have no choice but to assume it doesn't
3277 overlap since we can't make a temporary for it. */
3278 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3279 return;
3281 /* Otherwise, we must search for overlap ... */
3282 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3283 return;
3285 /* ... and if found, force the value into a temporary. */
3286 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3289 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3290 a RANGE_EXPR in a CONSTRUCTOR for an array.
3292 var = lower;
3293 loop_entry:
3294 object[var] = value;
3295 if (var == upper)
3296 goto loop_exit;
3297 var = var + 1;
3298 goto loop_entry;
3299 loop_exit:
3301 We increment var _after_ the loop exit check because we might otherwise
3302 fail if upper == TYPE_MAX_VALUE (type for upper).
3304 Note that we never have to deal with SAVE_EXPRs here, because this has
3305 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3307 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
3308 gimple_seq *, bool);
3310 static void
3311 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3312 tree value, tree array_elt_type,
3313 gimple_seq *pre_p, bool cleared)
3315 tree loop_entry_label, loop_exit_label, fall_thru_label;
3316 tree var, var_type, cref, tmp;
3318 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3319 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3320 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3322 /* Create and initialize the index variable. */
3323 var_type = TREE_TYPE (upper);
3324 var = create_tmp_var (var_type, NULL);
3325 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3327 /* Add the loop entry label. */
3328 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3330 /* Build the reference. */
3331 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3332 var, NULL_TREE, NULL_TREE);
3334 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3335 the store. Otherwise just assign value to the reference. */
3337 if (TREE_CODE (value) == CONSTRUCTOR)
3338 /* NB we might have to call ourself recursively through
3339 gimplify_init_ctor_eval if the value is a constructor. */
3340 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3341 pre_p, cleared);
3342 else
3343 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3345 /* We exit the loop when the index var is equal to the upper bound. */
3346 gimplify_seq_add_stmt (pre_p,
3347 gimple_build_cond (EQ_EXPR, var, upper,
3348 loop_exit_label, fall_thru_label));
3350 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3352 /* Otherwise, increment the index var... */
3353 tmp = build2 (PLUS_EXPR, var_type, var,
3354 fold_convert (var_type, integer_one_node));
3355 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3357 /* ...and jump back to the loop entry. */
3358 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3360 /* Add the loop exit label. */
3361 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3364 /* Return true if FDECL is accessing a field that is zero sized. */
3366 static bool
3367 zero_sized_field_decl (const_tree fdecl)
3369 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3370 && integer_zerop (DECL_SIZE (fdecl)))
3371 return true;
3372 return false;
3375 /* Return true if TYPE is zero sized. */
3377 static bool
3378 zero_sized_type (const_tree type)
3380 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3381 && integer_zerop (TYPE_SIZE (type)))
3382 return true;
3383 return false;
3386 /* A subroutine of gimplify_init_constructor. Generate individual
3387 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3388 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3389 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3390 zeroed first. */
3392 static void
3393 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
3394 gimple_seq *pre_p, bool cleared)
3396 tree array_elt_type = NULL;
3397 unsigned HOST_WIDE_INT ix;
3398 tree purpose, value;
3400 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3401 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3403 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3405 tree cref;
3407 /* NULL values are created above for gimplification errors. */
3408 if (value == NULL)
3409 continue;
3411 if (cleared && initializer_zerop (value))
3412 continue;
3414 /* ??? Here's to hoping the front end fills in all of the indices,
3415 so we don't have to figure out what's missing ourselves. */
3416 gcc_assert (purpose);
3418 /* Skip zero-sized fields, unless value has side-effects. This can
3419 happen with calls to functions returning a zero-sized type, which
3420 we shouldn't discard. As a number of downstream passes don't
3421 expect sets of zero-sized fields, we rely on the gimplification of
3422 the MODIFY_EXPR we make below to drop the assignment statement. */
3423 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3424 continue;
3426 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3427 whole range. */
3428 if (TREE_CODE (purpose) == RANGE_EXPR)
3430 tree lower = TREE_OPERAND (purpose, 0);
3431 tree upper = TREE_OPERAND (purpose, 1);
3433 /* If the lower bound is equal to upper, just treat it as if
3434 upper was the index. */
3435 if (simple_cst_equal (lower, upper))
3436 purpose = upper;
3437 else
3439 gimplify_init_ctor_eval_range (object, lower, upper, value,
3440 array_elt_type, pre_p, cleared);
3441 continue;
3445 if (array_elt_type)
3447 /* Do not use bitsizetype for ARRAY_REF indices. */
3448 if (TYPE_DOMAIN (TREE_TYPE (object)))
3449 purpose = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3450 purpose);
3451 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3452 purpose, NULL_TREE, NULL_TREE);
3454 else
3456 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3457 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3458 unshare_expr (object), purpose, NULL_TREE);
3461 if (TREE_CODE (value) == CONSTRUCTOR
3462 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3463 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3464 pre_p, cleared);
3465 else
3467 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3468 gimplify_and_add (init, pre_p);
3469 ggc_free (init);
3475 /* Returns the appropriate RHS predicate for this LHS. */
3477 gimple_predicate
3478 rhs_predicate_for (tree lhs)
3480 if (is_gimple_reg (lhs))
3481 return is_gimple_reg_rhs_or_call;
3482 else
3483 return is_gimple_mem_rhs_or_call;
3486 /* Gimplify a C99 compound literal expression. This just means adding
3487 the DECL_EXPR before the current statement and using its anonymous
3488 decl instead. */
3490 static enum gimplify_status
3491 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p)
3493 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3494 tree decl = DECL_EXPR_DECL (decl_s);
3495 /* Mark the decl as addressable if the compound literal
3496 expression is addressable now, otherwise it is marked too late
3497 after we gimplify the initialization expression. */
3498 if (TREE_ADDRESSABLE (*expr_p))
3499 TREE_ADDRESSABLE (decl) = 1;
3501 /* Preliminarily mark non-addressed complex variables as eligible
3502 for promotion to gimple registers. We'll transform their uses
3503 as we find them. */
3504 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3505 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3506 && !TREE_THIS_VOLATILE (decl)
3507 && !needs_to_live_in_memory (decl))
3508 DECL_GIMPLE_REG_P (decl) = 1;
3510 /* This decl isn't mentioned in the enclosing block, so add it to the
3511 list of temps. FIXME it seems a bit of a kludge to say that
3512 anonymous artificial vars aren't pushed, but everything else is. */
3513 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3514 gimple_add_tmp_var (decl);
3516 gimplify_and_add (decl_s, pre_p);
3517 *expr_p = decl;
3518 return GS_OK;
3521 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3522 return a new CONSTRUCTOR if something changed. */
3524 static tree
3525 optimize_compound_literals_in_ctor (tree orig_ctor)
3527 tree ctor = orig_ctor;
3528 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor);
3529 unsigned int idx, num = VEC_length (constructor_elt, elts);
3531 for (idx = 0; idx < num; idx++)
3533 tree value = VEC_index (constructor_elt, elts, idx)->value;
3534 tree newval = value;
3535 if (TREE_CODE (value) == CONSTRUCTOR)
3536 newval = optimize_compound_literals_in_ctor (value);
3537 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3539 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3540 tree decl = DECL_EXPR_DECL (decl_s);
3541 tree init = DECL_INITIAL (decl);
3543 if (!TREE_ADDRESSABLE (value)
3544 && !TREE_ADDRESSABLE (decl)
3545 && init)
3546 newval = optimize_compound_literals_in_ctor (init);
3548 if (newval == value)
3549 continue;
3551 if (ctor == orig_ctor)
3553 ctor = copy_node (orig_ctor);
3554 CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts);
3555 elts = CONSTRUCTOR_ELTS (ctor);
3557 VEC_index (constructor_elt, elts, idx)->value = newval;
3559 return ctor;
3564 /* A subroutine of gimplify_modify_expr. Break out elements of a
3565 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3567 Note that we still need to clear any elements that don't have explicit
3568 initializers, so if not all elements are initialized we keep the
3569 original MODIFY_EXPR, we just remove all of the constructor elements.
3571 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3572 GS_ERROR if we would have to create a temporary when gimplifying
3573 this constructor. Otherwise, return GS_OK.
3575 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3577 static enum gimplify_status
3578 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3579 bool want_value, bool notify_temp_creation)
3581 tree object, ctor, type;
3582 enum gimplify_status ret;
3583 VEC(constructor_elt,gc) *elts;
3585 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3587 if (!notify_temp_creation)
3589 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3590 is_gimple_lvalue, fb_lvalue);
3591 if (ret == GS_ERROR)
3592 return ret;
3595 object = TREE_OPERAND (*expr_p, 0);
3596 ctor = TREE_OPERAND (*expr_p, 1) =
3597 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3598 type = TREE_TYPE (ctor);
3599 elts = CONSTRUCTOR_ELTS (ctor);
3600 ret = GS_ALL_DONE;
3602 switch (TREE_CODE (type))
3604 case RECORD_TYPE:
3605 case UNION_TYPE:
3606 case QUAL_UNION_TYPE:
3607 case ARRAY_TYPE:
3609 struct gimplify_init_ctor_preeval_data preeval_data;
3610 HOST_WIDE_INT num_type_elements, num_ctor_elements;
3611 HOST_WIDE_INT num_nonzero_elements;
3612 bool cleared, valid_const_initializer;
3614 /* Aggregate types must lower constructors to initialization of
3615 individual elements. The exception is that a CONSTRUCTOR node
3616 with no elements indicates zero-initialization of the whole. */
3617 if (VEC_empty (constructor_elt, elts))
3619 if (notify_temp_creation)
3620 return GS_OK;
3621 break;
3624 /* Fetch information about the constructor to direct later processing.
3625 We might want to make static versions of it in various cases, and
3626 can only do so if it known to be a valid constant initializer. */
3627 valid_const_initializer
3628 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3629 &num_ctor_elements, &cleared);
3631 /* If a const aggregate variable is being initialized, then it
3632 should never be a lose to promote the variable to be static. */
3633 if (valid_const_initializer
3634 && num_nonzero_elements > 1
3635 && TREE_READONLY (object)
3636 && TREE_CODE (object) == VAR_DECL
3637 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3639 if (notify_temp_creation)
3640 return GS_ERROR;
3641 DECL_INITIAL (object) = ctor;
3642 TREE_STATIC (object) = 1;
3643 if (!DECL_NAME (object))
3644 DECL_NAME (object) = create_tmp_var_name ("C");
3645 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3647 /* ??? C++ doesn't automatically append a .<number> to the
3648 assembler name, and even when it does, it looks a FE private
3649 data structures to figure out what that number should be,
3650 which are not set for this variable. I suppose this is
3651 important for local statics for inline functions, which aren't
3652 "local" in the object file sense. So in order to get a unique
3653 TU-local symbol, we must invoke the lhd version now. */
3654 lhd_set_decl_assembler_name (object);
3656 *expr_p = NULL_TREE;
3657 break;
3660 /* If there are "lots" of initialized elements, even discounting
3661 those that are not address constants (and thus *must* be
3662 computed at runtime), then partition the constructor into
3663 constant and non-constant parts. Block copy the constant
3664 parts in, then generate code for the non-constant parts. */
3665 /* TODO. There's code in cp/typeck.c to do this. */
3667 num_type_elements = count_type_elements (type, true);
3669 /* If count_type_elements could not determine number of type elements
3670 for a constant-sized object, assume clearing is needed.
3671 Don't do this for variable-sized objects, as store_constructor
3672 will ignore the clearing of variable-sized objects. */
3673 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3674 cleared = true;
3675 /* If there are "lots" of zeros, then block clear the object first. */
3676 else if (num_type_elements - num_nonzero_elements
3677 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3678 && num_nonzero_elements < num_type_elements/4)
3679 cleared = true;
3680 /* ??? This bit ought not be needed. For any element not present
3681 in the initializer, we should simply set them to zero. Except
3682 we'd need to *find* the elements that are not present, and that
3683 requires trickery to avoid quadratic compile-time behavior in
3684 large cases or excessive memory use in small cases. */
3685 else if (num_ctor_elements < num_type_elements)
3686 cleared = true;
3688 /* If there are "lots" of initialized elements, and all of them
3689 are valid address constants, then the entire initializer can
3690 be dropped to memory, and then memcpy'd out. Don't do this
3691 for sparse arrays, though, as it's more efficient to follow
3692 the standard CONSTRUCTOR behavior of memset followed by
3693 individual element initialization. Also don't do this for small
3694 all-zero initializers (which aren't big enough to merit
3695 clearing), and don't try to make bitwise copies of
3696 TREE_ADDRESSABLE types. */
3697 if (valid_const_initializer
3698 && !(cleared || num_nonzero_elements == 0)
3699 && !TREE_ADDRESSABLE (type))
3701 HOST_WIDE_INT size = int_size_in_bytes (type);
3702 unsigned int align;
3704 /* ??? We can still get unbounded array types, at least
3705 from the C++ front end. This seems wrong, but attempt
3706 to work around it for now. */
3707 if (size < 0)
3709 size = int_size_in_bytes (TREE_TYPE (object));
3710 if (size >= 0)
3711 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3714 /* Find the maximum alignment we can assume for the object. */
3715 /* ??? Make use of DECL_OFFSET_ALIGN. */
3716 if (DECL_P (object))
3717 align = DECL_ALIGN (object);
3718 else
3719 align = TYPE_ALIGN (type);
3721 if (size > 0
3722 && num_nonzero_elements > 1
3723 && !can_move_by_pieces (size, align))
3725 tree new_tree;
3727 if (notify_temp_creation)
3728 return GS_ERROR;
3730 new_tree = create_tmp_var_raw (type, "C");
3732 gimple_add_tmp_var (new_tree);
3733 TREE_STATIC (new_tree) = 1;
3734 TREE_READONLY (new_tree) = 1;
3735 DECL_INITIAL (new_tree) = ctor;
3736 if (align > DECL_ALIGN (new_tree))
3738 DECL_ALIGN (new_tree) = align;
3739 DECL_USER_ALIGN (new_tree) = 1;
3741 walk_tree (&DECL_INITIAL (new_tree), force_labels_r, NULL, NULL);
3743 TREE_OPERAND (*expr_p, 1) = new_tree;
3745 /* This is no longer an assignment of a CONSTRUCTOR, but
3746 we still may have processing to do on the LHS. So
3747 pretend we didn't do anything here to let that happen. */
3748 return GS_UNHANDLED;
3752 if (notify_temp_creation)
3753 return GS_OK;
3755 /* If there are nonzero elements, pre-evaluate to capture elements
3756 overlapping with the lhs into temporaries. We must do this before
3757 clearing to fetch the values before they are zeroed-out. */
3758 if (num_nonzero_elements > 0)
3760 preeval_data.lhs_base_decl = get_base_address (object);
3761 if (!DECL_P (preeval_data.lhs_base_decl))
3762 preeval_data.lhs_base_decl = NULL;
3763 preeval_data.lhs_alias_set = get_alias_set (object);
3765 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3766 pre_p, post_p, &preeval_data);
3769 if (cleared)
3771 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3772 Note that we still have to gimplify, in order to handle the
3773 case of variable sized types. Avoid shared tree structures. */
3774 CONSTRUCTOR_ELTS (ctor) = NULL;
3775 TREE_SIDE_EFFECTS (ctor) = 0;
3776 object = unshare_expr (object);
3777 gimplify_stmt (expr_p, pre_p);
3780 /* If we have not block cleared the object, or if there are nonzero
3781 elements in the constructor, add assignments to the individual
3782 scalar fields of the object. */
3783 if (!cleared || num_nonzero_elements > 0)
3784 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3786 *expr_p = NULL_TREE;
3788 break;
3790 case COMPLEX_TYPE:
3792 tree r, i;
3794 if (notify_temp_creation)
3795 return GS_OK;
3797 /* Extract the real and imaginary parts out of the ctor. */
3798 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3799 r = VEC_index (constructor_elt, elts, 0)->value;
3800 i = VEC_index (constructor_elt, elts, 1)->value;
3801 if (r == NULL || i == NULL)
3803 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3804 if (r == NULL)
3805 r = zero;
3806 if (i == NULL)
3807 i = zero;
3810 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3811 represent creation of a complex value. */
3812 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3814 ctor = build_complex (type, r, i);
3815 TREE_OPERAND (*expr_p, 1) = ctor;
3817 else
3819 ctor = build2 (COMPLEX_EXPR, type, r, i);
3820 TREE_OPERAND (*expr_p, 1) = ctor;
3821 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3822 pre_p,
3823 post_p,
3824 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3825 fb_rvalue);
3828 break;
3830 case VECTOR_TYPE:
3832 unsigned HOST_WIDE_INT ix;
3833 constructor_elt *ce;
3835 if (notify_temp_creation)
3836 return GS_OK;
3838 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3839 if (TREE_CONSTANT (ctor))
3841 bool constant_p = true;
3842 tree value;
3844 /* Even when ctor is constant, it might contain non-*_CST
3845 elements, such as addresses or trapping values like
3846 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3847 in VECTOR_CST nodes. */
3848 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3849 if (!CONSTANT_CLASS_P (value))
3851 constant_p = false;
3852 break;
3855 if (constant_p)
3857 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3858 break;
3861 /* Don't reduce an initializer constant even if we can't
3862 make a VECTOR_CST. It won't do anything for us, and it'll
3863 prevent us from representing it as a single constant. */
3864 if (initializer_constant_valid_p (ctor, type))
3865 break;
3867 TREE_CONSTANT (ctor) = 0;
3870 /* Vector types use CONSTRUCTOR all the way through gimple
3871 compilation as a general initializer. */
3872 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3874 enum gimplify_status tret;
3875 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3876 fb_rvalue);
3877 if (tret == GS_ERROR)
3878 ret = GS_ERROR;
3880 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3881 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3883 break;
3885 default:
3886 /* So how did we get a CONSTRUCTOR for a scalar type? */
3887 gcc_unreachable ();
3890 if (ret == GS_ERROR)
3891 return GS_ERROR;
3892 else if (want_value)
3894 *expr_p = object;
3895 return GS_OK;
3897 else
3899 /* If we have gimplified both sides of the initializer but have
3900 not emitted an assignment, do so now. */
3901 if (*expr_p)
3903 tree lhs = TREE_OPERAND (*expr_p, 0);
3904 tree rhs = TREE_OPERAND (*expr_p, 1);
3905 gimple init = gimple_build_assign (lhs, rhs);
3906 gimplify_seq_add_stmt (pre_p, init);
3907 *expr_p = NULL;
3910 return GS_ALL_DONE;
3914 /* Given a pointer value OP0, return a simplified version of an
3915 indirection through OP0, or NULL_TREE if no simplification is
3916 possible. Note that the resulting type may be different from
3917 the type pointed to in the sense that it is still compatible
3918 from the langhooks point of view. */
3920 tree
3921 gimple_fold_indirect_ref (tree t)
3923 tree type = TREE_TYPE (TREE_TYPE (t));
3924 tree sub = t;
3925 tree subtype;
3927 STRIP_USELESS_TYPE_CONVERSION (sub);
3928 subtype = TREE_TYPE (sub);
3929 if (!POINTER_TYPE_P (subtype))
3930 return NULL_TREE;
3932 if (TREE_CODE (sub) == ADDR_EXPR)
3934 tree op = TREE_OPERAND (sub, 0);
3935 tree optype = TREE_TYPE (op);
3936 /* *&p => p */
3937 if (useless_type_conversion_p (type, optype))
3938 return op;
3940 /* *(foo *)&fooarray => fooarray[0] */
3941 if (TREE_CODE (optype) == ARRAY_TYPE
3942 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3944 tree type_domain = TYPE_DOMAIN (optype);
3945 tree min_val = size_zero_node;
3946 if (type_domain && TYPE_MIN_VALUE (type_domain))
3947 min_val = TYPE_MIN_VALUE (type_domain);
3948 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3952 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3953 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3954 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3956 tree type_domain;
3957 tree min_val = size_zero_node;
3958 tree osub = sub;
3959 sub = gimple_fold_indirect_ref (sub);
3960 if (! sub)
3961 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3962 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3963 if (type_domain && TYPE_MIN_VALUE (type_domain))
3964 min_val = TYPE_MIN_VALUE (type_domain);
3965 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3968 return NULL_TREE;
3971 /* Given a pointer value OP0, return a simplified version of an
3972 indirection through OP0, or NULL_TREE if no simplification is
3973 possible. This may only be applied to a rhs of an expression.
3974 Note that the resulting type may be different from the type pointed
3975 to in the sense that it is still compatible from the langhooks
3976 point of view. */
3978 static tree
3979 gimple_fold_indirect_ref_rhs (tree t)
3981 return gimple_fold_indirect_ref (t);
3984 /* Subroutine of gimplify_modify_expr to do simplifications of
3985 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
3986 something changes. */
3988 static enum gimplify_status
3989 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
3990 gimple_seq *pre_p, gimple_seq *post_p,
3991 bool want_value)
3993 enum gimplify_status ret = GS_OK;
3995 while (ret != GS_UNHANDLED)
3996 switch (TREE_CODE (*from_p))
3998 case VAR_DECL:
3999 /* If we're assigning from a read-only variable initialized with
4000 a constructor, do the direct assignment from the constructor,
4001 but only if neither source nor target are volatile since this
4002 latter assignment might end up being done on a per-field basis. */
4003 if (DECL_INITIAL (*from_p)
4004 && TREE_READONLY (*from_p)
4005 && !TREE_THIS_VOLATILE (*from_p)
4006 && !TREE_THIS_VOLATILE (*to_p)
4007 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4009 tree old_from = *from_p;
4011 /* Move the constructor into the RHS. */
4012 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4014 /* Let's see if gimplify_init_constructor will need to put
4015 it in memory. If so, revert the change. */
4016 ret = gimplify_init_constructor (expr_p, NULL, NULL, false, true);
4017 if (ret == GS_ERROR)
4019 *from_p = old_from;
4020 /* Fall through. */
4022 else
4024 ret = GS_OK;
4025 break;
4028 ret = GS_UNHANDLED;
4029 break;
4030 case INDIRECT_REF:
4032 /* If we have code like
4034 *(const A*)(A*)&x
4036 where the type of "x" is a (possibly cv-qualified variant
4037 of "A"), treat the entire expression as identical to "x".
4038 This kind of code arises in C++ when an object is bound
4039 to a const reference, and if "x" is a TARGET_EXPR we want
4040 to take advantage of the optimization below. */
4041 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4042 if (t)
4044 *from_p = t;
4045 ret = GS_OK;
4047 else
4048 ret = GS_UNHANDLED;
4049 break;
4052 case TARGET_EXPR:
4054 /* If we are initializing something from a TARGET_EXPR, strip the
4055 TARGET_EXPR and initialize it directly, if possible. This can't
4056 be done if the initializer is void, since that implies that the
4057 temporary is set in some non-trivial way.
4059 ??? What about code that pulls out the temp and uses it
4060 elsewhere? I think that such code never uses the TARGET_EXPR as
4061 an initializer. If I'm wrong, we'll die because the temp won't
4062 have any RTL. In that case, I guess we'll need to replace
4063 references somehow. */
4064 tree init = TARGET_EXPR_INITIAL (*from_p);
4066 if (init
4067 && !VOID_TYPE_P (TREE_TYPE (init)))
4069 *from_p = init;
4070 ret = GS_OK;
4072 else
4073 ret = GS_UNHANDLED;
4075 break;
4077 case COMPOUND_EXPR:
4078 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4079 caught. */
4080 gimplify_compound_expr (from_p, pre_p, true);
4081 ret = GS_OK;
4082 break;
4084 case CONSTRUCTOR:
4085 /* If we're initializing from a CONSTRUCTOR, break this into
4086 individual MODIFY_EXPRs. */
4087 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4088 false);
4090 case COND_EXPR:
4091 /* If we're assigning to a non-register type, push the assignment
4092 down into the branches. This is mandatory for ADDRESSABLE types,
4093 since we cannot generate temporaries for such, but it saves a
4094 copy in other cases as well. */
4095 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4097 /* This code should mirror the code in gimplify_cond_expr. */
4098 enum tree_code code = TREE_CODE (*expr_p);
4099 tree cond = *from_p;
4100 tree result = *to_p;
4102 ret = gimplify_expr (&result, pre_p, post_p,
4103 is_gimple_lvalue, fb_lvalue);
4104 if (ret != GS_ERROR)
4105 ret = GS_OK;
4107 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4108 TREE_OPERAND (cond, 1)
4109 = build2 (code, void_type_node, result,
4110 TREE_OPERAND (cond, 1));
4111 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4112 TREE_OPERAND (cond, 2)
4113 = build2 (code, void_type_node, unshare_expr (result),
4114 TREE_OPERAND (cond, 2));
4116 TREE_TYPE (cond) = void_type_node;
4117 recalculate_side_effects (cond);
4119 if (want_value)
4121 gimplify_and_add (cond, pre_p);
4122 *expr_p = unshare_expr (result);
4124 else
4125 *expr_p = cond;
4126 return ret;
4128 else
4129 ret = GS_UNHANDLED;
4130 break;
4132 case CALL_EXPR:
4133 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4134 return slot so that we don't generate a temporary. */
4135 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4136 && aggregate_value_p (*from_p, *from_p))
4138 bool use_target;
4140 if (!(rhs_predicate_for (*to_p))(*from_p))
4141 /* If we need a temporary, *to_p isn't accurate. */
4142 use_target = false;
4143 else if (TREE_CODE (*to_p) == RESULT_DECL
4144 && DECL_NAME (*to_p) == NULL_TREE
4145 && needs_to_live_in_memory (*to_p))
4146 /* It's OK to use the return slot directly unless it's an NRV. */
4147 use_target = true;
4148 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4149 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4150 /* Don't force regs into memory. */
4151 use_target = false;
4152 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4153 /* It's OK to use the target directly if it's being
4154 initialized. */
4155 use_target = true;
4156 else if (!is_gimple_non_addressable (*to_p))
4157 /* Don't use the original target if it's already addressable;
4158 if its address escapes, and the called function uses the
4159 NRV optimization, a conforming program could see *to_p
4160 change before the called function returns; see c++/19317.
4161 When optimizing, the return_slot pass marks more functions
4162 as safe after we have escape info. */
4163 use_target = false;
4164 else
4165 use_target = true;
4167 if (use_target)
4169 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4170 mark_addressable (*to_p);
4174 ret = GS_UNHANDLED;
4175 break;
4177 /* If we're initializing from a container, push the initialization
4178 inside it. */
4179 case CLEANUP_POINT_EXPR:
4180 case BIND_EXPR:
4181 case STATEMENT_LIST:
4183 tree wrap = *from_p;
4184 tree t;
4186 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4187 fb_lvalue);
4188 if (ret != GS_ERROR)
4189 ret = GS_OK;
4191 t = voidify_wrapper_expr (wrap, *expr_p);
4192 gcc_assert (t == *expr_p);
4194 if (want_value)
4196 gimplify_and_add (wrap, pre_p);
4197 *expr_p = unshare_expr (*to_p);
4199 else
4200 *expr_p = wrap;
4201 return GS_OK;
4204 case COMPOUND_LITERAL_EXPR:
4206 tree complit = TREE_OPERAND (*expr_p, 1);
4207 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4208 tree decl = DECL_EXPR_DECL (decl_s);
4209 tree init = DECL_INITIAL (decl);
4211 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4212 into struct T x = { 0, 1, 2 } if the address of the
4213 compound literal has never been taken. */
4214 if (!TREE_ADDRESSABLE (complit)
4215 && !TREE_ADDRESSABLE (decl)
4216 && init)
4218 *expr_p = copy_node (*expr_p);
4219 TREE_OPERAND (*expr_p, 1) = init;
4220 return GS_OK;
4224 default:
4225 ret = GS_UNHANDLED;
4226 break;
4229 return ret;
4233 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4234 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4235 DECL_GIMPLE_REG_P set.
4237 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4238 other, unmodified part of the complex object just before the total store.
4239 As a consequence, if the object is still uninitialized, an undefined value
4240 will be loaded into a register, which may result in a spurious exception
4241 if the register is floating-point and the value happens to be a signaling
4242 NaN for example. Then the fully-fledged complex operations lowering pass
4243 followed by a DCE pass are necessary in order to fix things up. */
4245 static enum gimplify_status
4246 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4247 bool want_value)
4249 enum tree_code code, ocode;
4250 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4252 lhs = TREE_OPERAND (*expr_p, 0);
4253 rhs = TREE_OPERAND (*expr_p, 1);
4254 code = TREE_CODE (lhs);
4255 lhs = TREE_OPERAND (lhs, 0);
4257 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4258 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4259 other = get_formal_tmp_var (other, pre_p);
4261 realpart = code == REALPART_EXPR ? rhs : other;
4262 imagpart = code == REALPART_EXPR ? other : rhs;
4264 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4265 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4266 else
4267 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4269 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4270 *expr_p = (want_value) ? rhs : NULL_TREE;
4272 return GS_ALL_DONE;
4276 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4278 modify_expr
4279 : varname '=' rhs
4280 | '*' ID '=' rhs
4282 PRE_P points to the list where side effects that must happen before
4283 *EXPR_P should be stored.
4285 POST_P points to the list where side effects that must happen after
4286 *EXPR_P should be stored.
4288 WANT_VALUE is nonzero iff we want to use the value of this expression
4289 in another expression. */
4291 static enum gimplify_status
4292 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4293 bool want_value)
4295 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4296 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4297 enum gimplify_status ret = GS_UNHANDLED;
4298 gimple assign;
4300 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4301 || TREE_CODE (*expr_p) == INIT_EXPR);
4303 /* Insert pointer conversions required by the middle-end that are not
4304 required by the frontend. This fixes middle-end type checking for
4305 for example gcc.dg/redecl-6.c. */
4306 if (POINTER_TYPE_P (TREE_TYPE (*to_p))
4307 && lang_hooks.types_compatible_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4309 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4310 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4311 *from_p = fold_convert (TREE_TYPE (*to_p), *from_p);
4314 /* See if any simplifications can be done based on what the RHS is. */
4315 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4316 want_value);
4317 if (ret != GS_UNHANDLED)
4318 return ret;
4320 /* For zero sized types only gimplify the left hand side and right hand
4321 side as statements and throw away the assignment. Do this after
4322 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4323 types properly. */
4324 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4326 gimplify_stmt (from_p, pre_p);
4327 gimplify_stmt (to_p, pre_p);
4328 *expr_p = NULL_TREE;
4329 return GS_ALL_DONE;
4332 /* If the value being copied is of variable width, compute the length
4333 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4334 before gimplifying any of the operands so that we can resolve any
4335 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4336 the size of the expression to be copied, not of the destination, so
4337 that is what we must do here. */
4338 maybe_with_size_expr (from_p);
4340 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4341 if (ret == GS_ERROR)
4342 return ret;
4344 /* As a special case, we have to temporarily allow for assignments
4345 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4346 a toplevel statement, when gimplifying the GENERIC expression
4347 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4348 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4350 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4351 prevent gimplify_expr from trying to create a new temporary for
4352 foo's LHS, we tell it that it should only gimplify until it
4353 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4354 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4355 and all we need to do here is set 'a' to be its LHS. */
4356 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4357 fb_rvalue);
4358 if (ret == GS_ERROR)
4359 return ret;
4361 /* Now see if the above changed *from_p to something we handle specially. */
4362 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4363 want_value);
4364 if (ret != GS_UNHANDLED)
4365 return ret;
4367 /* If we've got a variable sized assignment between two lvalues (i.e. does
4368 not involve a call), then we can make things a bit more straightforward
4369 by converting the assignment to memcpy or memset. */
4370 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4372 tree from = TREE_OPERAND (*from_p, 0);
4373 tree size = TREE_OPERAND (*from_p, 1);
4375 if (TREE_CODE (from) == CONSTRUCTOR)
4376 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4378 if (is_gimple_addressable (from))
4380 *from_p = from;
4381 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4382 pre_p);
4386 /* Transform partial stores to non-addressable complex variables into
4387 total stores. This allows us to use real instead of virtual operands
4388 for these variables, which improves optimization. */
4389 if ((TREE_CODE (*to_p) == REALPART_EXPR
4390 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4391 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4392 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4394 /* Try to alleviate the effects of the gimplification creating artificial
4395 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4396 if (!gimplify_ctxp->into_ssa
4397 && DECL_P (*from_p)
4398 && DECL_IGNORED_P (*from_p)
4399 && DECL_P (*to_p)
4400 && !DECL_IGNORED_P (*to_p))
4402 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4403 DECL_NAME (*from_p)
4404 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4405 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4406 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4409 if (TREE_CODE (*from_p) == CALL_EXPR)
4411 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4412 instead of a GIMPLE_ASSIGN. */
4413 assign = gimple_build_call_from_tree (*from_p);
4414 gimple_call_set_lhs (assign, *to_p);
4416 else
4418 assign = gimple_build_assign (*to_p, *from_p);
4419 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4422 gimplify_seq_add_stmt (pre_p, assign);
4424 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4426 /* If we've somehow already got an SSA_NAME on the LHS, then
4427 we've probably modified it twice. Not good. */
4428 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
4429 *to_p = make_ssa_name (*to_p, assign);
4430 gimple_set_lhs (assign, *to_p);
4433 if (want_value)
4435 *expr_p = unshare_expr (*to_p);
4436 return GS_OK;
4438 else
4439 *expr_p = NULL;
4441 return GS_ALL_DONE;
4444 /* Gimplify a comparison between two variable-sized objects. Do this
4445 with a call to BUILT_IN_MEMCMP. */
4447 static enum gimplify_status
4448 gimplify_variable_sized_compare (tree *expr_p)
4450 tree op0 = TREE_OPERAND (*expr_p, 0);
4451 tree op1 = TREE_OPERAND (*expr_p, 1);
4452 tree t, arg, dest, src;
4454 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4455 arg = unshare_expr (arg);
4456 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4457 src = build_fold_addr_expr (op1);
4458 dest = build_fold_addr_expr (op0);
4459 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
4460 t = build_call_expr (t, 3, dest, src, arg);
4461 *expr_p
4462 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4464 return GS_OK;
4467 /* Gimplify a comparison between two aggregate objects of integral scalar
4468 mode as a comparison between the bitwise equivalent scalar values. */
4470 static enum gimplify_status
4471 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4473 tree op0 = TREE_OPERAND (*expr_p, 0);
4474 tree op1 = TREE_OPERAND (*expr_p, 1);
4476 tree type = TREE_TYPE (op0);
4477 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4479 op0 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op0);
4480 op1 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op1);
4482 *expr_p
4483 = fold_build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4485 return GS_OK;
4488 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
4489 points to the expression to gimplify.
4491 Expressions of the form 'a && b' are gimplified to:
4493 a && b ? true : false
4495 LOCUS is the source location to be put on the generated COND_EXPR.
4496 gimplify_cond_expr will do the rest. */
4498 static enum gimplify_status
4499 gimplify_boolean_expr (tree *expr_p, location_t locus)
4501 /* Preserve the original type of the expression. */
4502 tree type = TREE_TYPE (*expr_p);
4504 *expr_p = build3 (COND_EXPR, type, *expr_p,
4505 fold_convert (type, boolean_true_node),
4506 fold_convert (type, boolean_false_node));
4508 SET_EXPR_LOCATION (*expr_p, locus);
4510 return GS_OK;
4513 /* Gimplifies an expression sequence. This function gimplifies each
4514 expression and re-writes the original expression with the last
4515 expression of the sequence in GIMPLE form.
4517 PRE_P points to the list where the side effects for all the
4518 expressions in the sequence will be emitted.
4520 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4522 static enum gimplify_status
4523 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4525 tree t = *expr_p;
4529 tree *sub_p = &TREE_OPERAND (t, 0);
4531 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4532 gimplify_compound_expr (sub_p, pre_p, false);
4533 else
4534 gimplify_stmt (sub_p, pre_p);
4536 t = TREE_OPERAND (t, 1);
4538 while (TREE_CODE (t) == COMPOUND_EXPR);
4540 *expr_p = t;
4541 if (want_value)
4542 return GS_OK;
4543 else
4545 gimplify_stmt (expr_p, pre_p);
4546 return GS_ALL_DONE;
4551 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4552 gimplify. After gimplification, EXPR_P will point to a new temporary
4553 that holds the original value of the SAVE_EXPR node.
4555 PRE_P points to the list where side effects that must happen before
4556 *EXPR_P should be stored. */
4558 static enum gimplify_status
4559 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4561 enum gimplify_status ret = GS_ALL_DONE;
4562 tree val;
4564 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4565 val = TREE_OPERAND (*expr_p, 0);
4567 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4568 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4570 /* The operand may be a void-valued expression such as SAVE_EXPRs
4571 generated by the Java frontend for class initialization. It is
4572 being executed only for its side-effects. */
4573 if (TREE_TYPE (val) == void_type_node)
4575 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4576 is_gimple_stmt, fb_none);
4577 val = NULL;
4579 else
4580 val = get_initialized_tmp_var (val, pre_p, post_p);
4582 TREE_OPERAND (*expr_p, 0) = val;
4583 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4586 *expr_p = val;
4588 return ret;
4591 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
4593 unary_expr
4594 : ...
4595 | '&' varname
4598 PRE_P points to the list where side effects that must happen before
4599 *EXPR_P should be stored.
4601 POST_P points to the list where side effects that must happen after
4602 *EXPR_P should be stored. */
4604 static enum gimplify_status
4605 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4607 tree expr = *expr_p;
4608 tree op0 = TREE_OPERAND (expr, 0);
4609 enum gimplify_status ret;
4611 switch (TREE_CODE (op0))
4613 case INDIRECT_REF:
4614 case MISALIGNED_INDIRECT_REF:
4615 do_indirect_ref:
4616 /* Check if we are dealing with an expression of the form '&*ptr'.
4617 While the front end folds away '&*ptr' into 'ptr', these
4618 expressions may be generated internally by the compiler (e.g.,
4619 builtins like __builtin_va_end). */
4620 /* Caution: the silent array decomposition semantics we allow for
4621 ADDR_EXPR means we can't always discard the pair. */
4622 /* Gimplification of the ADDR_EXPR operand may drop
4623 cv-qualification conversions, so make sure we add them if
4624 needed. */
4626 tree op00 = TREE_OPERAND (op0, 0);
4627 tree t_expr = TREE_TYPE (expr);
4628 tree t_op00 = TREE_TYPE (op00);
4630 if (!useless_type_conversion_p (t_expr, t_op00))
4631 op00 = fold_convert (TREE_TYPE (expr), op00);
4632 *expr_p = op00;
4633 ret = GS_OK;
4635 break;
4637 case VIEW_CONVERT_EXPR:
4638 /* Take the address of our operand and then convert it to the type of
4639 this ADDR_EXPR.
4641 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4642 all clear. The impact of this transformation is even less clear. */
4644 /* If the operand is a useless conversion, look through it. Doing so
4645 guarantees that the ADDR_EXPR and its operand will remain of the
4646 same type. */
4647 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4648 op0 = TREE_OPERAND (op0, 0);
4650 *expr_p = fold_convert (TREE_TYPE (expr),
4651 build_fold_addr_expr (TREE_OPERAND (op0, 0)));
4652 ret = GS_OK;
4653 break;
4655 default:
4656 /* We use fb_either here because the C frontend sometimes takes
4657 the address of a call that returns a struct; see
4658 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4659 the implied temporary explicit. */
4661 /* Mark the RHS addressable. */
4662 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4663 is_gimple_addressable, fb_either);
4664 if (ret == GS_ERROR)
4665 break;
4667 /* We cannot rely on making the RHS addressable if it is
4668 a temporary created by gimplification. In this case create a
4669 new temporary that is initialized by a copy (which will
4670 become a store after we mark it addressable).
4671 This mostly happens if the frontend passed us something that
4672 it could not mark addressable yet, like a fortran
4673 pass-by-reference parameter (int) floatvar. */
4674 if (is_gimple_reg (TREE_OPERAND (expr, 0)))
4675 TREE_OPERAND (expr, 0)
4676 = get_initialized_tmp_var (TREE_OPERAND (expr, 0), pre_p, post_p);
4678 op0 = TREE_OPERAND (expr, 0);
4680 /* For various reasons, the gimplification of the expression
4681 may have made a new INDIRECT_REF. */
4682 if (TREE_CODE (op0) == INDIRECT_REF)
4683 goto do_indirect_ref;
4685 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
4686 recompute_tree_invariant_for_addr_expr (expr);
4688 mark_addressable (TREE_OPERAND (expr, 0));
4689 break;
4692 return ret;
4695 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4696 value; output operands should be a gimple lvalue. */
4698 static enum gimplify_status
4699 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4701 tree expr;
4702 int noutputs;
4703 const char **oconstraints;
4704 int i;
4705 tree link;
4706 const char *constraint;
4707 bool allows_mem, allows_reg, is_inout;
4708 enum gimplify_status ret, tret;
4709 gimple stmt;
4710 VEC(tree, gc) *inputs;
4711 VEC(tree, gc) *outputs;
4712 VEC(tree, gc) *clobbers;
4713 tree link_next;
4715 expr = *expr_p;
4716 noutputs = list_length (ASM_OUTPUTS (expr));
4717 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4719 inputs = outputs = clobbers = NULL;
4721 ret = GS_ALL_DONE;
4722 link_next = NULL_TREE;
4723 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4725 bool ok;
4726 size_t constraint_len;
4728 link_next = TREE_CHAIN (link);
4730 oconstraints[i]
4731 = constraint
4732 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4733 constraint_len = strlen (constraint);
4734 if (constraint_len == 0)
4735 continue;
4737 ok = parse_output_constraint (&constraint, i, 0, 0,
4738 &allows_mem, &allows_reg, &is_inout);
4739 if (!ok)
4741 ret = GS_ERROR;
4742 is_inout = false;
4745 if (!allows_reg && allows_mem)
4746 mark_addressable (TREE_VALUE (link));
4748 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4749 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4750 fb_lvalue | fb_mayfail);
4751 if (tret == GS_ERROR)
4753 error ("invalid lvalue in asm output %d", i);
4754 ret = tret;
4757 VEC_safe_push (tree, gc, outputs, link);
4758 TREE_CHAIN (link) = NULL_TREE;
4760 if (is_inout)
4762 /* An input/output operand. To give the optimizers more
4763 flexibility, split it into separate input and output
4764 operands. */
4765 tree input;
4766 char buf[10];
4768 /* Turn the in/out constraint into an output constraint. */
4769 char *p = xstrdup (constraint);
4770 p[0] = '=';
4771 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4773 /* And add a matching input constraint. */
4774 if (allows_reg)
4776 sprintf (buf, "%d", i);
4778 /* If there are multiple alternatives in the constraint,
4779 handle each of them individually. Those that allow register
4780 will be replaced with operand number, the others will stay
4781 unchanged. */
4782 if (strchr (p, ',') != NULL)
4784 size_t len = 0, buflen = strlen (buf);
4785 char *beg, *end, *str, *dst;
4787 for (beg = p + 1;;)
4789 end = strchr (beg, ',');
4790 if (end == NULL)
4791 end = strchr (beg, '\0');
4792 if ((size_t) (end - beg) < buflen)
4793 len += buflen + 1;
4794 else
4795 len += end - beg + 1;
4796 if (*end)
4797 beg = end + 1;
4798 else
4799 break;
4802 str = (char *) alloca (len);
4803 for (beg = p + 1, dst = str;;)
4805 const char *tem;
4806 bool mem_p, reg_p, inout_p;
4808 end = strchr (beg, ',');
4809 if (end)
4810 *end = '\0';
4811 beg[-1] = '=';
4812 tem = beg - 1;
4813 parse_output_constraint (&tem, i, 0, 0,
4814 &mem_p, &reg_p, &inout_p);
4815 if (dst != str)
4816 *dst++ = ',';
4817 if (reg_p)
4819 memcpy (dst, buf, buflen);
4820 dst += buflen;
4822 else
4824 if (end)
4825 len = end - beg;
4826 else
4827 len = strlen (beg);
4828 memcpy (dst, beg, len);
4829 dst += len;
4831 if (end)
4832 beg = end + 1;
4833 else
4834 break;
4836 *dst = '\0';
4837 input = build_string (dst - str, str);
4839 else
4840 input = build_string (strlen (buf), buf);
4842 else
4843 input = build_string (constraint_len - 1, constraint + 1);
4845 free (p);
4847 input = build_tree_list (build_tree_list (NULL_TREE, input),
4848 unshare_expr (TREE_VALUE (link)));
4849 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4853 link_next = NULL_TREE;
4854 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
4856 link_next = TREE_CHAIN (link);
4857 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4858 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4859 oconstraints, &allows_mem, &allows_reg);
4861 /* If we can't make copies, we can only accept memory. */
4862 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
4864 if (allows_mem)
4865 allows_reg = 0;
4866 else
4868 error ("impossible constraint in %<asm%>");
4869 error ("non-memory input %d must stay in memory", i);
4870 return GS_ERROR;
4874 /* If the operand is a memory input, it should be an lvalue. */
4875 if (!allows_reg && allows_mem)
4877 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4878 is_gimple_lvalue, fb_lvalue | fb_mayfail);
4879 mark_addressable (TREE_VALUE (link));
4880 if (tret == GS_ERROR)
4882 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
4883 input_location = EXPR_LOCATION (TREE_VALUE (link));
4884 error ("memory input %d is not directly addressable", i);
4885 ret = tret;
4888 else
4890 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4891 is_gimple_asm_val, fb_rvalue);
4892 if (tret == GS_ERROR)
4893 ret = tret;
4896 TREE_CHAIN (link) = NULL_TREE;
4897 VEC_safe_push (tree, gc, inputs, link);
4900 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
4901 VEC_safe_push (tree, gc, clobbers, link);
4903 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
4904 inputs, outputs, clobbers);
4906 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
4907 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
4909 gimplify_seq_add_stmt (pre_p, stmt);
4911 return ret;
4914 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
4915 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
4916 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
4917 return to this function.
4919 FIXME should we complexify the prequeue handling instead? Or use flags
4920 for all the cleanups and let the optimizer tighten them up? The current
4921 code seems pretty fragile; it will break on a cleanup within any
4922 non-conditional nesting. But any such nesting would be broken, anyway;
4923 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
4924 and continues out of it. We can do that at the RTL level, though, so
4925 having an optimizer to tighten up try/finally regions would be a Good
4926 Thing. */
4928 static enum gimplify_status
4929 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
4931 gimple_stmt_iterator iter;
4932 gimple_seq body_sequence = NULL;
4934 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4936 /* We only care about the number of conditions between the innermost
4937 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
4938 any cleanups collected outside the CLEANUP_POINT_EXPR. */
4939 int old_conds = gimplify_ctxp->conditions;
4940 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
4941 gimplify_ctxp->conditions = 0;
4942 gimplify_ctxp->conditional_cleanups = NULL;
4944 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
4946 gimplify_ctxp->conditions = old_conds;
4947 gimplify_ctxp->conditional_cleanups = old_cleanups;
4949 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
4951 gimple wce = gsi_stmt (iter);
4953 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
4955 if (gsi_one_before_end_p (iter))
4957 /* Note that gsi_insert_seq_before and gsi_remove do not
4958 scan operands, unlike some other sequence mutators. */
4959 gsi_insert_seq_before_without_update (&iter,
4960 gimple_wce_cleanup (wce),
4961 GSI_SAME_STMT);
4962 gsi_remove (&iter, true);
4963 break;
4965 else
4967 gimple gtry;
4968 gimple_seq seq;
4969 enum gimple_try_flags kind;
4971 if (gimple_wce_cleanup_eh_only (wce))
4972 kind = GIMPLE_TRY_CATCH;
4973 else
4974 kind = GIMPLE_TRY_FINALLY;
4975 seq = gsi_split_seq_after (iter);
4977 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
4978 /* Do not use gsi_replace here, as it may scan operands.
4979 We want to do a simple structural modification only. */
4980 *gsi_stmt_ptr (&iter) = gtry;
4981 iter = gsi_start (seq);
4984 else
4985 gsi_next (&iter);
4988 gimplify_seq_add_seq (pre_p, body_sequence);
4989 if (temp)
4991 *expr_p = temp;
4992 return GS_OK;
4994 else
4996 *expr_p = NULL;
4997 return GS_ALL_DONE;
5001 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5002 is the cleanup action required. EH_ONLY is true if the cleanup should
5003 only be executed if an exception is thrown, not on normal exit. */
5005 static void
5006 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5008 gimple wce;
5009 gimple_seq cleanup_stmts = NULL;
5011 /* Errors can result in improperly nested cleanups. Which results in
5012 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5013 if (errorcount || sorrycount)
5014 return;
5016 if (gimple_conditional_context ())
5018 /* If we're in a conditional context, this is more complex. We only
5019 want to run the cleanup if we actually ran the initialization that
5020 necessitates it, but we want to run it after the end of the
5021 conditional context. So we wrap the try/finally around the
5022 condition and use a flag to determine whether or not to actually
5023 run the destructor. Thus
5025 test ? f(A()) : 0
5027 becomes (approximately)
5029 flag = 0;
5030 try {
5031 if (test) { A::A(temp); flag = 1; val = f(temp); }
5032 else { val = 0; }
5033 } finally {
5034 if (flag) A::~A(temp);
5038 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5039 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5040 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5042 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5043 gimplify_stmt (&cleanup, &cleanup_stmts);
5044 wce = gimple_build_wce (cleanup_stmts);
5046 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5047 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5048 gimplify_seq_add_stmt (pre_p, ftrue);
5050 /* Because of this manipulation, and the EH edges that jump
5051 threading cannot redirect, the temporary (VAR) will appear
5052 to be used uninitialized. Don't warn. */
5053 TREE_NO_WARNING (var) = 1;
5055 else
5057 gimplify_stmt (&cleanup, &cleanup_stmts);
5058 wce = gimple_build_wce (cleanup_stmts);
5059 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5060 gimplify_seq_add_stmt (pre_p, wce);
5064 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5066 static enum gimplify_status
5067 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5069 tree targ = *expr_p;
5070 tree temp = TARGET_EXPR_SLOT (targ);
5071 tree init = TARGET_EXPR_INITIAL (targ);
5072 enum gimplify_status ret;
5074 if (init)
5076 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5077 to the temps list. Handle also variable length TARGET_EXPRs. */
5078 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5080 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5081 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5082 gimplify_vla_decl (temp, pre_p);
5084 else
5085 gimple_add_tmp_var (temp);
5087 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5088 expression is supposed to initialize the slot. */
5089 if (VOID_TYPE_P (TREE_TYPE (init)))
5090 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5091 else
5093 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5094 init = init_expr;
5095 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5096 init = NULL;
5097 ggc_free (init_expr);
5099 if (ret == GS_ERROR)
5101 /* PR c++/28266 Make sure this is expanded only once. */
5102 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5103 return GS_ERROR;
5105 if (init)
5106 gimplify_and_add (init, pre_p);
5108 /* If needed, push the cleanup for the temp. */
5109 if (TARGET_EXPR_CLEANUP (targ))
5110 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5111 CLEANUP_EH_ONLY (targ), pre_p);
5113 /* Only expand this once. */
5114 TREE_OPERAND (targ, 3) = init;
5115 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5117 else
5118 /* We should have expanded this before. */
5119 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5121 *expr_p = temp;
5122 return GS_OK;
5125 /* Gimplification of expression trees. */
5127 /* Gimplify an expression which appears at statement context. The
5128 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5129 NULL, a new sequence is allocated.
5131 Return true if we actually added a statement to the queue. */
5133 bool
5134 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5136 gimple_seq_node last;
5138 if (!*seq_p)
5139 *seq_p = gimple_seq_alloc ();
5141 last = gimple_seq_last (*seq_p);
5142 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5143 return last != gimple_seq_last (*seq_p);
5147 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5148 to CTX. If entries already exist, force them to be some flavor of private.
5149 If there is no enclosing parallel, do nothing. */
5151 void
5152 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5154 splay_tree_node n;
5156 if (decl == NULL || !DECL_P (decl))
5157 return;
5161 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5162 if (n != NULL)
5164 if (n->value & GOVD_SHARED)
5165 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5166 else
5167 return;
5169 else if (ctx->region_type != ORT_WORKSHARE)
5170 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5172 ctx = ctx->outer_context;
5174 while (ctx);
5177 /* Similarly for each of the type sizes of TYPE. */
5179 static void
5180 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5182 if (type == NULL || type == error_mark_node)
5183 return;
5184 type = TYPE_MAIN_VARIANT (type);
5186 if (pointer_set_insert (ctx->privatized_types, type))
5187 return;
5189 switch (TREE_CODE (type))
5191 case INTEGER_TYPE:
5192 case ENUMERAL_TYPE:
5193 case BOOLEAN_TYPE:
5194 case REAL_TYPE:
5195 case FIXED_POINT_TYPE:
5196 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5197 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5198 break;
5200 case ARRAY_TYPE:
5201 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5202 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5203 break;
5205 case RECORD_TYPE:
5206 case UNION_TYPE:
5207 case QUAL_UNION_TYPE:
5209 tree field;
5210 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5211 if (TREE_CODE (field) == FIELD_DECL)
5213 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5214 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5217 break;
5219 case POINTER_TYPE:
5220 case REFERENCE_TYPE:
5221 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5222 break;
5224 default:
5225 break;
5228 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5229 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5230 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5233 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5235 static void
5236 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5238 splay_tree_node n;
5239 unsigned int nflags;
5240 tree t;
5242 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5243 return;
5245 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5246 there are constructors involved somewhere. */
5247 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5248 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5249 flags |= GOVD_SEEN;
5251 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5252 if (n != NULL)
5254 /* We shouldn't be re-adding the decl with the same data
5255 sharing class. */
5256 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5257 /* The only combination of data sharing classes we should see is
5258 FIRSTPRIVATE and LASTPRIVATE. */
5259 nflags = n->value | flags;
5260 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5261 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5262 n->value = nflags;
5263 return;
5266 /* When adding a variable-sized variable, we have to handle all sorts
5267 of additional bits of data: the pointer replacement variable, and
5268 the parameters of the type. */
5269 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5271 /* Add the pointer replacement variable as PRIVATE if the variable
5272 replacement is private, else FIRSTPRIVATE since we'll need the
5273 address of the original variable either for SHARED, or for the
5274 copy into or out of the context. */
5275 if (!(flags & GOVD_LOCAL))
5277 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5278 nflags |= flags & GOVD_SEEN;
5279 t = DECL_VALUE_EXPR (decl);
5280 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5281 t = TREE_OPERAND (t, 0);
5282 gcc_assert (DECL_P (t));
5283 omp_add_variable (ctx, t, nflags);
5286 /* Add all of the variable and type parameters (which should have
5287 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5288 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5289 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5290 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5292 /* The variable-sized variable itself is never SHARED, only some form
5293 of PRIVATE. The sharing would take place via the pointer variable
5294 which we remapped above. */
5295 if (flags & GOVD_SHARED)
5296 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5297 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5299 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5300 alloca statement we generate for the variable, so make sure it
5301 is available. This isn't automatically needed for the SHARED
5302 case, since we won't be allocating local storage then.
5303 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5304 in this case omp_notice_variable will be called later
5305 on when it is gimplified. */
5306 else if (! (flags & GOVD_LOCAL))
5307 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5309 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5311 gcc_assert ((flags & GOVD_LOCAL) == 0);
5312 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5314 /* Similar to the direct variable sized case above, we'll need the
5315 size of references being privatized. */
5316 if ((flags & GOVD_SHARED) == 0)
5318 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5319 if (TREE_CODE (t) != INTEGER_CST)
5320 omp_notice_variable (ctx, t, true);
5324 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5327 /* Record the fact that DECL was used within the OpenMP context CTX.
5328 IN_CODE is true when real code uses DECL, and false when we should
5329 merely emit default(none) errors. Return true if DECL is going to
5330 be remapped and thus DECL shouldn't be gimplified into its
5331 DECL_VALUE_EXPR (if any). */
5333 static bool
5334 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5336 splay_tree_node n;
5337 unsigned flags = in_code ? GOVD_SEEN : 0;
5338 bool ret = false, shared;
5340 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5341 return false;
5343 /* Threadprivate variables are predetermined. */
5344 if (is_global_var (decl))
5346 if (DECL_THREAD_LOCAL_P (decl))
5347 return false;
5349 if (DECL_HAS_VALUE_EXPR_P (decl))
5351 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5353 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5354 return false;
5358 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5359 if (n == NULL)
5361 enum omp_clause_default_kind default_kind, kind;
5362 struct gimplify_omp_ctx *octx;
5364 if (ctx->region_type == ORT_WORKSHARE)
5365 goto do_outer;
5367 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5368 remapped firstprivate instead of shared. To some extent this is
5369 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5370 default_kind = ctx->default_kind;
5371 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5372 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5373 default_kind = kind;
5375 switch (default_kind)
5377 case OMP_CLAUSE_DEFAULT_NONE:
5378 error ("%qE not specified in enclosing parallel",
5379 DECL_NAME (decl));
5380 error_at (ctx->location, "enclosing parallel");
5381 /* FALLTHRU */
5382 case OMP_CLAUSE_DEFAULT_SHARED:
5383 flags |= GOVD_SHARED;
5384 break;
5385 case OMP_CLAUSE_DEFAULT_PRIVATE:
5386 flags |= GOVD_PRIVATE;
5387 break;
5388 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5389 flags |= GOVD_FIRSTPRIVATE;
5390 break;
5391 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5392 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5393 gcc_assert (ctx->region_type == ORT_TASK);
5394 if (ctx->outer_context)
5395 omp_notice_variable (ctx->outer_context, decl, in_code);
5396 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5398 splay_tree_node n2;
5400 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5401 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5403 flags |= GOVD_FIRSTPRIVATE;
5404 break;
5406 if ((octx->region_type & ORT_PARALLEL) != 0)
5407 break;
5409 if (flags & GOVD_FIRSTPRIVATE)
5410 break;
5411 if (octx == NULL
5412 && (TREE_CODE (decl) == PARM_DECL
5413 || (!is_global_var (decl)
5414 && DECL_CONTEXT (decl) == current_function_decl)))
5416 flags |= GOVD_FIRSTPRIVATE;
5417 break;
5419 flags |= GOVD_SHARED;
5420 break;
5421 default:
5422 gcc_unreachable ();
5425 if ((flags & GOVD_PRIVATE)
5426 && lang_hooks.decls.omp_private_outer_ref (decl))
5427 flags |= GOVD_PRIVATE_OUTER_REF;
5429 omp_add_variable (ctx, decl, flags);
5431 shared = (flags & GOVD_SHARED) != 0;
5432 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5433 goto do_outer;
5436 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5437 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5438 && DECL_SIZE (decl)
5439 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5441 splay_tree_node n2;
5442 tree t = DECL_VALUE_EXPR (decl);
5443 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5444 t = TREE_OPERAND (t, 0);
5445 gcc_assert (DECL_P (t));
5446 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5447 n2->value |= GOVD_SEEN;
5450 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5451 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5453 /* If nothing changed, there's nothing left to do. */
5454 if ((n->value & flags) == flags)
5455 return ret;
5456 flags |= n->value;
5457 n->value = flags;
5459 do_outer:
5460 /* If the variable is private in the current context, then we don't
5461 need to propagate anything to an outer context. */
5462 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5463 return ret;
5464 if (ctx->outer_context
5465 && omp_notice_variable (ctx->outer_context, decl, in_code))
5466 return true;
5467 return ret;
5470 /* Verify that DECL is private within CTX. If there's specific information
5471 to the contrary in the innermost scope, generate an error. */
5473 static bool
5474 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
5476 splay_tree_node n;
5478 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5479 if (n != NULL)
5481 if (n->value & GOVD_SHARED)
5483 if (ctx == gimplify_omp_ctxp)
5485 error ("iteration variable %qE should be private",
5486 DECL_NAME (decl));
5487 n->value = GOVD_PRIVATE;
5488 return true;
5490 else
5491 return false;
5493 else if ((n->value & GOVD_EXPLICIT) != 0
5494 && (ctx == gimplify_omp_ctxp
5495 || (ctx->region_type == ORT_COMBINED_PARALLEL
5496 && gimplify_omp_ctxp->outer_context == ctx)))
5498 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5499 error ("iteration variable %qE should not be firstprivate",
5500 DECL_NAME (decl));
5501 else if ((n->value & GOVD_REDUCTION) != 0)
5502 error ("iteration variable %qE should not be reduction",
5503 DECL_NAME (decl));
5505 return (ctx == gimplify_omp_ctxp
5506 || (ctx->region_type == ORT_COMBINED_PARALLEL
5507 && gimplify_omp_ctxp->outer_context == ctx));
5510 if (ctx->region_type != ORT_WORKSHARE)
5511 return false;
5512 else if (ctx->outer_context)
5513 return omp_is_private (ctx->outer_context, decl);
5514 return false;
5517 /* Return true if DECL is private within a parallel region
5518 that binds to the current construct's context or in parallel
5519 region's REDUCTION clause. */
5521 static bool
5522 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5524 splay_tree_node n;
5528 ctx = ctx->outer_context;
5529 if (ctx == NULL)
5530 return !(is_global_var (decl)
5531 /* References might be private, but might be shared too. */
5532 || lang_hooks.decls.omp_privatize_by_reference (decl));
5534 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5535 if (n != NULL)
5536 return (n->value & GOVD_SHARED) == 0;
5538 while (ctx->region_type == ORT_WORKSHARE);
5539 return false;
5542 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5543 and previous omp contexts. */
5545 static void
5546 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5547 enum omp_region_type region_type)
5549 struct gimplify_omp_ctx *ctx, *outer_ctx;
5550 struct gimplify_ctx gctx;
5551 tree c;
5553 ctx = new_omp_context (region_type);
5554 outer_ctx = ctx->outer_context;
5556 while ((c = *list_p) != NULL)
5558 bool remove = false;
5559 bool notice_outer = true;
5560 const char *check_non_private = NULL;
5561 unsigned int flags;
5562 tree decl;
5564 switch (OMP_CLAUSE_CODE (c))
5566 case OMP_CLAUSE_PRIVATE:
5567 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5568 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5570 flags |= GOVD_PRIVATE_OUTER_REF;
5571 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5573 else
5574 notice_outer = false;
5575 goto do_add;
5576 case OMP_CLAUSE_SHARED:
5577 flags = GOVD_SHARED | GOVD_EXPLICIT;
5578 goto do_add;
5579 case OMP_CLAUSE_FIRSTPRIVATE:
5580 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5581 check_non_private = "firstprivate";
5582 goto do_add;
5583 case OMP_CLAUSE_LASTPRIVATE:
5584 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5585 check_non_private = "lastprivate";
5586 goto do_add;
5587 case OMP_CLAUSE_REDUCTION:
5588 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5589 check_non_private = "reduction";
5590 goto do_add;
5592 do_add:
5593 decl = OMP_CLAUSE_DECL (c);
5594 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5596 remove = true;
5597 break;
5599 omp_add_variable (ctx, decl, flags);
5600 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5601 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5603 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
5604 GOVD_LOCAL | GOVD_SEEN);
5605 gimplify_omp_ctxp = ctx;
5606 push_gimplify_context (&gctx);
5608 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc ();
5609 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc ();
5611 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5612 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5613 pop_gimplify_context
5614 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
5615 push_gimplify_context (&gctx);
5616 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
5617 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5618 pop_gimplify_context
5619 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
5620 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
5621 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
5623 gimplify_omp_ctxp = outer_ctx;
5625 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5626 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
5628 gimplify_omp_ctxp = ctx;
5629 push_gimplify_context (&gctx);
5630 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
5632 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
5633 NULL, NULL);
5634 TREE_SIDE_EFFECTS (bind) = 1;
5635 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
5636 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
5638 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
5639 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5640 pop_gimplify_context
5641 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
5642 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
5644 gimplify_omp_ctxp = outer_ctx;
5646 if (notice_outer)
5647 goto do_notice;
5648 break;
5650 case OMP_CLAUSE_COPYIN:
5651 case OMP_CLAUSE_COPYPRIVATE:
5652 decl = OMP_CLAUSE_DECL (c);
5653 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5655 remove = true;
5656 break;
5658 do_notice:
5659 if (outer_ctx)
5660 omp_notice_variable (outer_ctx, decl, true);
5661 if (check_non_private
5662 && region_type == ORT_WORKSHARE
5663 && omp_check_private (ctx, decl))
5665 error ("%s variable %qE is private in outer context",
5666 check_non_private, DECL_NAME (decl));
5667 remove = true;
5669 break;
5671 case OMP_CLAUSE_IF:
5672 OMP_CLAUSE_OPERAND (c, 0)
5673 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
5674 /* Fall through. */
5676 case OMP_CLAUSE_SCHEDULE:
5677 case OMP_CLAUSE_NUM_THREADS:
5678 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
5679 is_gimple_val, fb_rvalue) == GS_ERROR)
5680 remove = true;
5681 break;
5683 case OMP_CLAUSE_NOWAIT:
5684 case OMP_CLAUSE_ORDERED:
5685 case OMP_CLAUSE_UNTIED:
5686 case OMP_CLAUSE_COLLAPSE:
5687 break;
5689 case OMP_CLAUSE_DEFAULT:
5690 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
5691 break;
5693 default:
5694 gcc_unreachable ();
5697 if (remove)
5698 *list_p = OMP_CLAUSE_CHAIN (c);
5699 else
5700 list_p = &OMP_CLAUSE_CHAIN (c);
5703 gimplify_omp_ctxp = ctx;
5706 /* For all variables that were not actually used within the context,
5707 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
5709 static int
5710 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
5712 tree *list_p = (tree *) data;
5713 tree decl = (tree) n->key;
5714 unsigned flags = n->value;
5715 enum omp_clause_code code;
5716 tree clause;
5717 bool private_debug;
5719 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
5720 return 0;
5721 if ((flags & GOVD_SEEN) == 0)
5722 return 0;
5723 if (flags & GOVD_DEBUG_PRIVATE)
5725 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
5726 private_debug = true;
5728 else
5729 private_debug
5730 = lang_hooks.decls.omp_private_debug_clause (decl,
5731 !!(flags & GOVD_SHARED));
5732 if (private_debug)
5733 code = OMP_CLAUSE_PRIVATE;
5734 else if (flags & GOVD_SHARED)
5736 if (is_global_var (decl))
5738 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
5739 while (ctx != NULL)
5741 splay_tree_node on
5742 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5743 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
5744 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
5745 break;
5746 ctx = ctx->outer_context;
5748 if (ctx == NULL)
5749 return 0;
5751 code = OMP_CLAUSE_SHARED;
5753 else if (flags & GOVD_PRIVATE)
5754 code = OMP_CLAUSE_PRIVATE;
5755 else if (flags & GOVD_FIRSTPRIVATE)
5756 code = OMP_CLAUSE_FIRSTPRIVATE;
5757 else
5758 gcc_unreachable ();
5760 clause = build_omp_clause (input_location, code);
5761 OMP_CLAUSE_DECL (clause) = decl;
5762 OMP_CLAUSE_CHAIN (clause) = *list_p;
5763 if (private_debug)
5764 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
5765 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
5766 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
5767 *list_p = clause;
5768 lang_hooks.decls.omp_finish_clause (clause);
5770 return 0;
5773 static void
5774 gimplify_adjust_omp_clauses (tree *list_p)
5776 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
5777 tree c, decl;
5779 while ((c = *list_p) != NULL)
5781 splay_tree_node n;
5782 bool remove = false;
5784 switch (OMP_CLAUSE_CODE (c))
5786 case OMP_CLAUSE_PRIVATE:
5787 case OMP_CLAUSE_SHARED:
5788 case OMP_CLAUSE_FIRSTPRIVATE:
5789 decl = OMP_CLAUSE_DECL (c);
5790 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5791 remove = !(n->value & GOVD_SEEN);
5792 if (! remove)
5794 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
5795 if ((n->value & GOVD_DEBUG_PRIVATE)
5796 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
5798 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
5799 || ((n->value & GOVD_DATA_SHARE_CLASS)
5800 == GOVD_PRIVATE));
5801 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
5802 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
5805 break;
5807 case OMP_CLAUSE_LASTPRIVATE:
5808 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
5809 accurately reflect the presence of a FIRSTPRIVATE clause. */
5810 decl = OMP_CLAUSE_DECL (c);
5811 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5812 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5813 = (n->value & GOVD_FIRSTPRIVATE) != 0;
5814 break;
5816 case OMP_CLAUSE_REDUCTION:
5817 case OMP_CLAUSE_COPYIN:
5818 case OMP_CLAUSE_COPYPRIVATE:
5819 case OMP_CLAUSE_IF:
5820 case OMP_CLAUSE_NUM_THREADS:
5821 case OMP_CLAUSE_SCHEDULE:
5822 case OMP_CLAUSE_NOWAIT:
5823 case OMP_CLAUSE_ORDERED:
5824 case OMP_CLAUSE_DEFAULT:
5825 case OMP_CLAUSE_UNTIED:
5826 case OMP_CLAUSE_COLLAPSE:
5827 break;
5829 default:
5830 gcc_unreachable ();
5833 if (remove)
5834 *list_p = OMP_CLAUSE_CHAIN (c);
5835 else
5836 list_p = &OMP_CLAUSE_CHAIN (c);
5839 /* Add in any implicit data sharing. */
5840 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
5842 gimplify_omp_ctxp = ctx->outer_context;
5843 delete_omp_context (ctx);
5846 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
5847 gimplification of the body, as well as scanning the body for used
5848 variables. We need to do this scan now, because variable-sized
5849 decls will be decomposed during gimplification. */
5851 static void
5852 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
5854 tree expr = *expr_p;
5855 gimple g;
5856 gimple_seq body = NULL;
5857 struct gimplify_ctx gctx;
5859 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
5860 OMP_PARALLEL_COMBINED (expr)
5861 ? ORT_COMBINED_PARALLEL
5862 : ORT_PARALLEL);
5864 push_gimplify_context (&gctx);
5866 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
5867 if (gimple_code (g) == GIMPLE_BIND)
5868 pop_gimplify_context (g);
5869 else
5870 pop_gimplify_context (NULL);
5872 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
5874 g = gimple_build_omp_parallel (body,
5875 OMP_PARALLEL_CLAUSES (expr),
5876 NULL_TREE, NULL_TREE);
5877 if (OMP_PARALLEL_COMBINED (expr))
5878 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
5879 gimplify_seq_add_stmt (pre_p, g);
5880 *expr_p = NULL_TREE;
5883 /* Gimplify the contents of an OMP_TASK statement. This involves
5884 gimplification of the body, as well as scanning the body for used
5885 variables. We need to do this scan now, because variable-sized
5886 decls will be decomposed during gimplification. */
5888 static void
5889 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
5891 tree expr = *expr_p;
5892 gimple g;
5893 gimple_seq body = NULL;
5894 struct gimplify_ctx gctx;
5896 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, ORT_TASK);
5898 push_gimplify_context (&gctx);
5900 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
5901 if (gimple_code (g) == GIMPLE_BIND)
5902 pop_gimplify_context (g);
5903 else
5904 pop_gimplify_context (NULL);
5906 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
5908 g = gimple_build_omp_task (body,
5909 OMP_TASK_CLAUSES (expr),
5910 NULL_TREE, NULL_TREE,
5911 NULL_TREE, NULL_TREE, NULL_TREE);
5912 gimplify_seq_add_stmt (pre_p, g);
5913 *expr_p = NULL_TREE;
5916 /* Gimplify the gross structure of an OMP_FOR statement. */
5918 static enum gimplify_status
5919 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
5921 tree for_stmt, decl, var, t;
5922 enum gimplify_status ret = GS_ALL_DONE;
5923 enum gimplify_status tret;
5924 gimple gfor;
5925 gimple_seq for_body, for_pre_body;
5926 int i;
5928 for_stmt = *expr_p;
5930 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
5931 ORT_WORKSHARE);
5933 /* Handle OMP_FOR_INIT. */
5934 for_pre_body = NULL;
5935 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
5936 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
5938 for_body = gimple_seq_alloc ();
5939 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
5940 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
5941 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
5942 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
5943 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
5945 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
5946 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
5947 decl = TREE_OPERAND (t, 0);
5948 gcc_assert (DECL_P (decl));
5949 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
5950 || POINTER_TYPE_P (TREE_TYPE (decl)));
5952 /* Make sure the iteration variable is private. */
5953 if (omp_is_private (gimplify_omp_ctxp, decl))
5954 omp_notice_variable (gimplify_omp_ctxp, decl, true);
5955 else
5956 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
5958 /* If DECL is not a gimple register, create a temporary variable to act
5959 as an iteration counter. This is valid, since DECL cannot be
5960 modified in the body of the loop. */
5961 if (!is_gimple_reg (decl))
5963 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
5964 TREE_OPERAND (t, 0) = var;
5966 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
5968 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
5970 else
5971 var = decl;
5973 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
5974 is_gimple_val, fb_rvalue);
5975 ret = MIN (ret, tret);
5976 if (ret == GS_ERROR)
5977 return ret;
5979 /* Handle OMP_FOR_COND. */
5980 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
5981 gcc_assert (COMPARISON_CLASS_P (t));
5982 gcc_assert (TREE_OPERAND (t, 0) == decl);
5984 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
5985 is_gimple_val, fb_rvalue);
5986 ret = MIN (ret, tret);
5988 /* Handle OMP_FOR_INCR. */
5989 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
5990 switch (TREE_CODE (t))
5992 case PREINCREMENT_EXPR:
5993 case POSTINCREMENT_EXPR:
5994 t = build_int_cst (TREE_TYPE (decl), 1);
5995 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
5996 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
5997 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
5998 break;
6000 case PREDECREMENT_EXPR:
6001 case POSTDECREMENT_EXPR:
6002 t = build_int_cst (TREE_TYPE (decl), -1);
6003 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6004 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6005 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6006 break;
6008 case MODIFY_EXPR:
6009 gcc_assert (TREE_OPERAND (t, 0) == decl);
6010 TREE_OPERAND (t, 0) = var;
6012 t = TREE_OPERAND (t, 1);
6013 switch (TREE_CODE (t))
6015 case PLUS_EXPR:
6016 case PLUSNV_EXPR:
6017 if (TREE_OPERAND (t, 1) == decl)
6019 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6020 TREE_OPERAND (t, 0) = var;
6021 break;
6024 /* Fallthru. */
6025 case MINUS_EXPR:
6026 case MINUSNV_EXPR:
6027 case POINTER_PLUS_EXPR:
6028 case POINTER_PLUSNV_EXPR:
6029 gcc_assert (TREE_OPERAND (t, 0) == decl);
6030 TREE_OPERAND (t, 0) = var;
6031 break;
6032 default:
6033 gcc_unreachable ();
6036 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6037 is_gimple_val, fb_rvalue);
6038 ret = MIN (ret, tret);
6039 break;
6041 default:
6042 gcc_unreachable ();
6045 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6047 tree c;
6048 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6049 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6050 && OMP_CLAUSE_DECL (c) == decl
6051 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6053 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6054 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6055 gcc_assert (TREE_OPERAND (t, 0) == var);
6056 t = TREE_OPERAND (t, 1);
6057 gcc_assert (PLUS_EXPR_P (t)
6058 || MINUS_EXPR_P (t)
6059 || POINTER_PLUS_EXPR_P (t));
6060 gcc_assert (TREE_OPERAND (t, 0) == var);
6061 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6062 TREE_OPERAND (t, 1));
6063 gimplify_assign (decl, t,
6064 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6069 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
6071 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
6073 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
6074 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6075 for_pre_body);
6077 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6079 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6080 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6081 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6082 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6083 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6084 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6085 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6086 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6089 gimplify_seq_add_stmt (pre_p, gfor);
6090 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
6093 /* Gimplify the gross structure of other OpenMP worksharing constructs.
6094 In particular, OMP_SECTIONS and OMP_SINGLE. */
6096 static void
6097 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6099 tree expr = *expr_p;
6100 gimple stmt;
6101 gimple_seq body = NULL;
6103 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6104 gimplify_and_add (OMP_BODY (expr), &body);
6105 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6107 if (TREE_CODE (expr) == OMP_SECTIONS)
6108 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6109 else if (TREE_CODE (expr) == OMP_SINGLE)
6110 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6111 else
6112 gcc_unreachable ();
6114 gimplify_seq_add_stmt (pre_p, stmt);
6117 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
6118 stabilized the lhs of the atomic operation as *ADDR. Return true if
6119 EXPR is this stabilized form. */
6121 static bool
6122 goa_lhs_expr_p (tree expr, tree addr)
6124 /* Also include casts to other type variants. The C front end is fond
6125 of adding these for e.g. volatile variables. This is like
6126 STRIP_TYPE_NOPS but includes the main variant lookup. */
6127 while ((CONVERT_EXPR_P (expr)
6128 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6129 && TREE_OPERAND (expr, 0) != error_mark_node
6130 && (TYPE_MAIN_VARIANT (TREE_TYPE (expr))
6131 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))))
6132 expr = TREE_OPERAND (expr, 0);
6134 if (TREE_CODE (expr) == INDIRECT_REF)
6136 expr = TREE_OPERAND (expr, 0);
6137 while (expr != addr
6138 && (CONVERT_EXPR_P (expr)
6139 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6140 && TREE_CODE (expr) == TREE_CODE (addr)
6141 && TYPE_MAIN_VARIANT (TREE_TYPE (expr))
6142 == TYPE_MAIN_VARIANT (TREE_TYPE (addr)))
6144 expr = TREE_OPERAND (expr, 0);
6145 addr = TREE_OPERAND (addr, 0);
6147 if (expr == addr)
6148 return true;
6149 return (TREE_CODE (addr) == ADDR_EXPR
6150 && TREE_CODE (expr) == ADDR_EXPR
6151 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6153 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6154 return true;
6155 return false;
6158 /* Walk *EXPR_P and replace
6159 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
6160 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
6161 a subexpression, 0 if it did not, or -1 if an error was encountered. */
6163 static int
6164 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6165 tree lhs_var)
6167 tree expr = *expr_p;
6168 int saw_lhs;
6170 if (goa_lhs_expr_p (expr, lhs_addr))
6172 *expr_p = lhs_var;
6173 return 1;
6175 if (is_gimple_val (expr))
6176 return 0;
6178 saw_lhs = 0;
6179 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6181 case tcc_binary:
6182 case tcc_comparison:
6183 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6184 lhs_var);
6185 case tcc_unary:
6186 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6187 lhs_var);
6188 break;
6189 case tcc_expression:
6190 switch (TREE_CODE (expr))
6192 case TRUTH_ANDIF_EXPR:
6193 case TRUTH_ORIF_EXPR:
6194 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6195 lhs_addr, lhs_var);
6196 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6197 lhs_addr, lhs_var);
6198 break;
6199 default:
6200 break;
6202 break;
6203 default:
6204 break;
6207 if (saw_lhs == 0)
6209 enum gimplify_status gs;
6210 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6211 if (gs != GS_ALL_DONE)
6212 saw_lhs = -1;
6215 return saw_lhs;
6219 /* Gimplify an OMP_ATOMIC statement. */
6221 static enum gimplify_status
6222 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6224 tree addr = TREE_OPERAND (*expr_p, 0);
6225 tree rhs = TREE_OPERAND (*expr_p, 1);
6226 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6227 tree tmp_load;
6229 tmp_load = create_tmp_var (type, NULL);
6230 if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6231 return GS_ERROR;
6233 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6234 != GS_ALL_DONE)
6235 return GS_ERROR;
6237 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_load (tmp_load, addr));
6238 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6239 != GS_ALL_DONE)
6240 return GS_ERROR;
6241 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_store (rhs));
6242 *expr_p = NULL;
6244 return GS_ALL_DONE;
6248 /* Converts the GENERIC expression tree *EXPR_P to GIMPLE. If the
6249 expression produces a value to be used as an operand inside a GIMPLE
6250 statement, the value will be stored back in *EXPR_P. This value will
6251 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6252 an SSA_NAME. The corresponding sequence of GIMPLE statements is
6253 emitted in PRE_P and POST_P.
6255 Additionally, this process may overwrite parts of the input
6256 expression during gimplification. Ideally, it should be
6257 possible to do non-destructive gimplification.
6259 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
6260 the expression needs to evaluate to a value to be used as
6261 an operand in a GIMPLE statement, this value will be stored in
6262 *EXPR_P on exit. This happens when the caller specifies one
6263 of fb_lvalue or fb_rvalue fallback flags.
6265 PRE_P will contain the sequence of GIMPLE statements corresponding
6266 to the evaluation of EXPR and all the side-effects that must
6267 be executed before the main expression. On exit, the last
6268 statement of PRE_P is the core statement being gimplified. For
6269 instance, when gimplifying 'if (++a)' the last statement in
6270 PRE_P will be 'if (t.1)' where t.1 is the result of
6271 pre-incrementing 'a'.
6273 POST_P will contain the sequence of GIMPLE statements corresponding
6274 to the evaluation of all the side-effects that must be executed
6275 after the main expression. If this is NULL, the post
6276 side-effects are stored at the end of PRE_P.
6278 The reason why the output is split in two is to handle post
6279 side-effects explicitly. In some cases, an expression may have
6280 inner and outer post side-effects which need to be emitted in
6281 an order different from the one given by the recursive
6282 traversal. For instance, for the expression (*p--)++ the post
6283 side-effects of '--' must actually occur *after* the post
6284 side-effects of '++'. However, gimplification will first visit
6285 the inner expression, so if a separate POST sequence was not
6286 used, the resulting sequence would be:
6288 1 t.1 = *p
6289 2 p = p - 1
6290 3 t.2 = t.1 + 1
6291 4 *p = t.2
6293 However, the post-decrement operation in line #2 must not be
6294 evaluated until after the store to *p at line #4, so the
6295 correct sequence should be:
6297 1 t.1 = *p
6298 2 t.2 = t.1 + 1
6299 3 *p = t.2
6300 4 p = p - 1
6302 So, by specifying a separate post queue, it is possible
6303 to emit the post side-effects in the correct order.
6304 If POST_P is NULL, an internal queue will be used. Before
6305 returning to the caller, the sequence POST_P is appended to
6306 the main output sequence PRE_P.
6308 GIMPLE_TEST_F points to a function that takes a tree T and
6309 returns nonzero if T is in the GIMPLE form requested by the
6310 caller. The GIMPLE predicates are in tree-gimple.c.
6312 FALLBACK tells the function what sort of a temporary we want if
6313 gimplification cannot produce an expression that complies with
6314 GIMPLE_TEST_F.
6316 fb_none means that no temporary should be generated
6317 fb_rvalue means that an rvalue is OK to generate
6318 fb_lvalue means that an lvalue is OK to generate
6319 fb_either means that either is OK, but an lvalue is preferable.
6320 fb_mayfail means that gimplification may fail (in which case
6321 GS_ERROR will be returned)
6323 The return value is either GS_ERROR or GS_ALL_DONE, since this
6324 function iterates until EXPR is completely gimplified or an error
6325 occurs. */
6327 enum gimplify_status
6328 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6329 bool (*gimple_test_f) (tree), fallback_t fallback)
6331 tree tmp;
6332 gimple_seq internal_pre = NULL;
6333 gimple_seq internal_post = NULL;
6334 tree save_expr;
6335 bool is_statement;
6336 location_t saved_location;
6337 enum gimplify_status ret;
6338 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6340 save_expr = *expr_p;
6341 if (save_expr == NULL_TREE)
6342 return GS_ALL_DONE;
6344 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
6345 is_statement = gimple_test_f == is_gimple_stmt;
6346 if (is_statement)
6347 gcc_assert (pre_p);
6349 /* Consistency checks. */
6350 if (gimple_test_f == is_gimple_reg)
6351 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
6352 else if (gimple_test_f == is_gimple_val
6353 || gimple_test_f == is_gimple_call_addr
6354 || gimple_test_f == is_gimple_condexpr
6355 || gimple_test_f == is_gimple_mem_rhs
6356 || gimple_test_f == is_gimple_mem_rhs_or_call
6357 || gimple_test_f == is_gimple_reg_rhs
6358 || gimple_test_f == is_gimple_reg_rhs_or_call
6359 || gimple_test_f == is_gimple_asm_val)
6360 gcc_assert (fallback & fb_rvalue);
6361 else if (gimple_test_f == is_gimple_min_lval
6362 || gimple_test_f == is_gimple_lvalue)
6363 gcc_assert (fallback & fb_lvalue);
6364 else if (gimple_test_f == is_gimple_addressable)
6365 gcc_assert (fallback & fb_either);
6366 else if (gimple_test_f == is_gimple_stmt)
6367 gcc_assert (fallback == fb_none);
6368 else
6370 /* We should have recognized the GIMPLE_TEST_F predicate to
6371 know what kind of fallback to use in case a temporary is
6372 needed to hold the value or address of *EXPR_P. */
6373 gcc_unreachable ();
6376 /* We used to check the predicate here and return immediately if it
6377 succeeds. This is wrong; the design is for gimplification to be
6378 idempotent, and for the predicates to only test for valid forms, not
6379 whether they are fully simplified. */
6380 if (pre_p == NULL)
6381 pre_p = &internal_pre;
6383 if (post_p == NULL)
6384 post_p = &internal_post;
6386 /* Remember the last statements added to PRE_P and POST_P. Every
6387 new statement added by the gimplification helpers needs to be
6388 annotated with location information. To centralize the
6389 responsibility, we remember the last statement that had been
6390 added to both queues before gimplifying *EXPR_P. If
6391 gimplification produces new statements in PRE_P and POST_P, those
6392 statements will be annotated with the same location information
6393 as *EXPR_P. */
6394 pre_last_gsi = gsi_last (*pre_p);
6395 post_last_gsi = gsi_last (*post_p);
6397 saved_location = input_location;
6398 if (save_expr != error_mark_node
6399 && EXPR_HAS_LOCATION (*expr_p))
6400 input_location = EXPR_LOCATION (*expr_p);
6402 /* Loop over the specific gimplifiers until the toplevel node
6403 remains the same. */
6406 /* Strip away as many useless type conversions as possible
6407 at the toplevel. */
6408 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6410 /* Remember the expr. */
6411 save_expr = *expr_p;
6413 /* Die, die, die, my darling. */
6414 if (save_expr == error_mark_node
6415 || (TREE_TYPE (save_expr)
6416 && TREE_TYPE (save_expr) == error_mark_node))
6418 ret = GS_ERROR;
6419 break;
6422 /* Do any language-specific gimplification. */
6423 ret = ((enum gimplify_status)
6424 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6425 if (ret == GS_OK)
6427 if (*expr_p == NULL_TREE)
6428 break;
6429 if (*expr_p != save_expr)
6430 continue;
6432 else if (ret != GS_UNHANDLED)
6433 break;
6435 ret = GS_OK;
6436 switch (TREE_CODE (*expr_p))
6438 /* First deal with the special cases. */
6440 case POSTINCREMENT_EXPR:
6441 case POSTDECREMENT_EXPR:
6442 case PREINCREMENT_EXPR:
6443 case PREDECREMENT_EXPR:
6444 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
6445 fallback != fb_none);
6446 break;
6448 case ARRAY_REF:
6449 case ARRAY_RANGE_REF:
6450 case REALPART_EXPR:
6451 case IMAGPART_EXPR:
6452 case COMPONENT_REF:
6453 case VIEW_CONVERT_EXPR:
6454 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
6455 fallback ? fallback : fb_rvalue);
6456 break;
6458 case COND_EXPR:
6459 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
6461 /* C99 code may assign to an array in a structure value of a
6462 conditional expression, and this has undefined behavior
6463 only on execution, so create a temporary if an lvalue is
6464 required. */
6465 if (fallback == fb_lvalue)
6467 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6468 mark_addressable (*expr_p);
6470 break;
6472 case CALL_EXPR:
6473 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
6475 /* C99 code may assign to an array in a structure returned
6476 from a function, and this has undefined behavior only on
6477 execution, so create a temporary if an lvalue is
6478 required. */
6479 if (fallback == fb_lvalue)
6481 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6482 mark_addressable (*expr_p);
6484 break;
6486 case TREE_LIST:
6487 gcc_unreachable ();
6489 case COMPOUND_EXPR:
6490 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
6491 break;
6493 case COMPOUND_LITERAL_EXPR:
6494 ret = gimplify_compound_literal_expr (expr_p, pre_p);
6495 break;
6497 case MODIFY_EXPR:
6498 case INIT_EXPR:
6499 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
6500 fallback != fb_none);
6501 break;
6503 case TRUTH_ANDIF_EXPR:
6504 case TRUTH_ORIF_EXPR:
6505 /* Pass the source location of the outer expression. */
6506 ret = gimplify_boolean_expr (expr_p, saved_location);
6507 break;
6509 case TRUTH_NOT_EXPR:
6510 if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE)
6512 tree type = TREE_TYPE (*expr_p);
6513 *expr_p = fold_convert (type, gimple_boolify (*expr_p));
6514 ret = GS_OK;
6515 break;
6518 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6519 is_gimple_val, fb_rvalue);
6520 recalculate_side_effects (*expr_p);
6521 break;
6523 case ADDR_EXPR:
6524 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
6525 break;
6527 case VA_ARG_EXPR:
6528 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6529 break;
6531 CASE_CONVERT:
6532 if (IS_EMPTY_STMT (*expr_p))
6534 ret = GS_ALL_DONE;
6535 break;
6538 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
6539 || fallback == fb_none)
6541 /* Just strip a conversion to void (or in void context) and
6542 try again. */
6543 *expr_p = TREE_OPERAND (*expr_p, 0);
6544 break;
6547 ret = gimplify_conversion (expr_p);
6548 if (ret == GS_ERROR)
6549 break;
6550 if (*expr_p != save_expr)
6551 break;
6552 /* FALLTHRU */
6554 case FIX_TRUNC_EXPR:
6555 /* unary_expr: ... | '(' cast ')' val | ... */
6556 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6557 is_gimple_val, fb_rvalue);
6558 recalculate_side_effects (*expr_p);
6559 break;
6561 case INDIRECT_REF:
6562 *expr_p = fold_indirect_ref (*expr_p);
6563 if (*expr_p != save_expr)
6564 break;
6565 /* else fall through. */
6566 case ALIGN_INDIRECT_REF:
6567 case MISALIGNED_INDIRECT_REF:
6568 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6569 is_gimple_reg, fb_rvalue);
6570 recalculate_side_effects (*expr_p);
6571 break;
6573 /* Constants need not be gimplified. */
6574 case INTEGER_CST:
6575 case REAL_CST:
6576 case FIXED_CST:
6577 case STRING_CST:
6578 case COMPLEX_CST:
6579 case VECTOR_CST:
6580 ret = GS_ALL_DONE;
6581 break;
6583 case CONST_DECL:
6584 /* If we require an lvalue, such as for ADDR_EXPR, retain the
6585 CONST_DECL node. Otherwise the decl is replaceable by its
6586 value. */
6587 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
6588 if (fallback & fb_lvalue)
6589 ret = GS_ALL_DONE;
6590 else
6591 *expr_p = DECL_INITIAL (*expr_p);
6592 break;
6594 case DECL_EXPR:
6595 ret = gimplify_decl_expr (expr_p, pre_p);
6596 break;
6598 case EXC_PTR_EXPR:
6599 /* FIXME make this a decl. */
6600 ret = GS_ALL_DONE;
6601 break;
6603 case BIND_EXPR:
6604 ret = gimplify_bind_expr (expr_p, pre_p);
6605 break;
6607 case LOOP_EXPR:
6608 ret = gimplify_loop_expr (expr_p, pre_p);
6609 break;
6611 case SWITCH_EXPR:
6612 ret = gimplify_switch_expr (expr_p, pre_p);
6613 break;
6615 case EXIT_EXPR:
6616 ret = gimplify_exit_expr (expr_p);
6617 break;
6619 case GOTO_EXPR:
6620 /* If the target is not LABEL, then it is a computed jump
6621 and the target needs to be gimplified. */
6622 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
6624 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
6625 NULL, is_gimple_val, fb_rvalue);
6626 if (ret == GS_ERROR)
6627 break;
6629 gimplify_seq_add_stmt (pre_p,
6630 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
6631 break;
6633 case PREDICT_EXPR:
6634 gimplify_seq_add_stmt (pre_p,
6635 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
6636 PREDICT_EXPR_OUTCOME (*expr_p)));
6637 ret = GS_ALL_DONE;
6638 break;
6640 case LABEL_EXPR:
6641 ret = GS_ALL_DONE;
6642 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
6643 == current_function_decl);
6644 gimplify_seq_add_stmt (pre_p,
6645 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6646 break;
6648 case CASE_LABEL_EXPR:
6649 ret = gimplify_case_label_expr (expr_p, pre_p);
6650 break;
6652 case RETURN_EXPR:
6653 ret = gimplify_return_expr (*expr_p, pre_p);
6654 break;
6656 case CONSTRUCTOR:
6657 /* Don't reduce this in place; let gimplify_init_constructor work its
6658 magic. Buf if we're just elaborating this for side effects, just
6659 gimplify any element that has side-effects. */
6660 if (fallback == fb_none)
6662 unsigned HOST_WIDE_INT ix;
6663 constructor_elt *ce;
6664 tree temp = NULL_TREE;
6665 for (ix = 0;
6666 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
6667 ix, ce);
6668 ix++)
6669 if (TREE_SIDE_EFFECTS (ce->value))
6670 append_to_statement_list (ce->value, &temp);
6672 *expr_p = temp;
6673 ret = GS_OK;
6675 /* C99 code may assign to an array in a constructed
6676 structure or union, and this has undefined behavior only
6677 on execution, so create a temporary if an lvalue is
6678 required. */
6679 else if (fallback == fb_lvalue)
6681 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6682 mark_addressable (*expr_p);
6684 else
6685 ret = GS_ALL_DONE;
6686 break;
6688 /* The following are special cases that are not handled by the
6689 original GIMPLE grammar. */
6691 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
6692 eliminated. */
6693 case SAVE_EXPR:
6694 ret = gimplify_save_expr (expr_p, pre_p, post_p);
6695 break;
6697 case BIT_FIELD_REF:
6699 enum gimplify_status r0, r1, r2;
6701 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6702 post_p, is_gimple_lvalue, fb_either);
6703 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6704 post_p, is_gimple_val, fb_rvalue);
6705 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
6706 post_p, is_gimple_val, fb_rvalue);
6707 recalculate_side_effects (*expr_p);
6709 ret = MIN (r0, MIN (r1, r2));
6711 break;
6713 case TARGET_MEM_REF:
6715 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
6717 if (TMR_SYMBOL (*expr_p))
6718 r0 = gimplify_expr (&TMR_SYMBOL (*expr_p), pre_p,
6719 post_p, is_gimple_lvalue, fb_either);
6720 else if (TMR_BASE (*expr_p))
6721 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
6722 post_p, is_gimple_val, fb_either);
6723 if (TMR_INDEX (*expr_p))
6724 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
6725 post_p, is_gimple_val, fb_rvalue);
6726 /* TMR_STEP and TMR_OFFSET are always integer constants. */
6727 ret = MIN (r0, r1);
6729 break;
6731 case NON_LVALUE_EXPR:
6732 /* This should have been stripped above. */
6733 gcc_unreachable ();
6735 case ASM_EXPR:
6736 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
6737 break;
6739 case TRY_FINALLY_EXPR:
6740 case TRY_CATCH_EXPR:
6742 gimple_seq eval, cleanup;
6743 gimple try_;
6745 eval = cleanup = NULL;
6746 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
6747 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
6748 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
6749 if (gimple_seq_empty_p (cleanup))
6751 gimple_seq_add_seq (pre_p, eval);
6752 ret = GS_ALL_DONE;
6753 break;
6755 try_ = gimple_build_try (eval, cleanup,
6756 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
6757 ? GIMPLE_TRY_FINALLY
6758 : GIMPLE_TRY_CATCH);
6759 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
6760 gimple_try_set_catch_is_cleanup (try_,
6761 TRY_CATCH_IS_CLEANUP (*expr_p));
6762 gimplify_seq_add_stmt (pre_p, try_);
6763 ret = GS_ALL_DONE;
6764 break;
6767 case CLEANUP_POINT_EXPR:
6768 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
6769 break;
6771 case TARGET_EXPR:
6772 ret = gimplify_target_expr (expr_p, pre_p, post_p);
6773 break;
6775 case CATCH_EXPR:
6777 gimple c;
6778 gimple_seq handler = NULL;
6779 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
6780 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
6781 gimplify_seq_add_stmt (pre_p, c);
6782 ret = GS_ALL_DONE;
6783 break;
6786 case EH_FILTER_EXPR:
6788 gimple ehf;
6789 gimple_seq failure = NULL;
6791 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
6792 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
6793 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
6794 gimple_eh_filter_set_must_not_throw
6795 (ehf, EH_FILTER_MUST_NOT_THROW (*expr_p));
6796 gimplify_seq_add_stmt (pre_p, ehf);
6797 ret = GS_ALL_DONE;
6798 break;
6801 case OBJ_TYPE_REF:
6803 enum gimplify_status r0, r1;
6804 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
6805 post_p, is_gimple_val, fb_rvalue);
6806 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
6807 post_p, is_gimple_val, fb_rvalue);
6808 TREE_SIDE_EFFECTS (*expr_p) = 0;
6809 ret = MIN (r0, r1);
6811 break;
6813 case LABEL_DECL:
6814 /* We get here when taking the address of a label. We mark
6815 the label as "forced"; meaning it can never be removed and
6816 it is a potential target for any computed goto. */
6817 FORCED_LABEL (*expr_p) = 1;
6818 ret = GS_ALL_DONE;
6819 break;
6821 case STATEMENT_LIST:
6822 ret = gimplify_statement_list (expr_p, pre_p);
6823 break;
6825 case WITH_SIZE_EXPR:
6827 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6828 post_p == &internal_post ? NULL : post_p,
6829 gimple_test_f, fallback);
6830 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
6831 is_gimple_val, fb_rvalue);
6833 break;
6835 case VAR_DECL:
6836 case PARM_DECL:
6837 ret = gimplify_var_or_parm_decl (expr_p);
6838 break;
6840 case RESULT_DECL:
6841 /* When within an OpenMP context, notice uses of variables. */
6842 if (gimplify_omp_ctxp)
6843 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
6844 ret = GS_ALL_DONE;
6845 break;
6847 case SSA_NAME:
6848 /* Allow callbacks into the gimplifier during optimization. */
6849 ret = GS_ALL_DONE;
6850 break;
6852 case OMP_PARALLEL:
6853 gimplify_omp_parallel (expr_p, pre_p);
6854 ret = GS_ALL_DONE;
6855 break;
6857 case OMP_TASK:
6858 gimplify_omp_task (expr_p, pre_p);
6859 ret = GS_ALL_DONE;
6860 break;
6862 case OMP_FOR:
6863 ret = gimplify_omp_for (expr_p, pre_p);
6864 break;
6866 case OMP_SECTIONS:
6867 case OMP_SINGLE:
6868 gimplify_omp_workshare (expr_p, pre_p);
6869 ret = GS_ALL_DONE;
6870 break;
6872 case OMP_SECTION:
6873 case OMP_MASTER:
6874 case OMP_ORDERED:
6875 case OMP_CRITICAL:
6877 gimple_seq body = NULL;
6878 gimple g;
6880 gimplify_and_add (OMP_BODY (*expr_p), &body);
6881 switch (TREE_CODE (*expr_p))
6883 case OMP_SECTION:
6884 g = gimple_build_omp_section (body);
6885 break;
6886 case OMP_MASTER:
6887 g = gimple_build_omp_master (body);
6888 break;
6889 case OMP_ORDERED:
6890 g = gimple_build_omp_ordered (body);
6891 break;
6892 case OMP_CRITICAL:
6893 g = gimple_build_omp_critical (body,
6894 OMP_CRITICAL_NAME (*expr_p));
6895 break;
6896 default:
6897 gcc_unreachable ();
6899 gimplify_seq_add_stmt (pre_p, g);
6900 ret = GS_ALL_DONE;
6901 break;
6904 case OMP_ATOMIC:
6905 ret = gimplify_omp_atomic (expr_p, pre_p);
6906 break;
6908 case POINTER_PLUS_EXPR:
6909 case POINTER_PLUSNV_EXPR:
6910 /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
6911 The second is gimple immediate saving a need for extra statement.
6913 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
6914 && (tmp = maybe_fold_offset_to_address
6915 (EXPR_LOCATION (*expr_p),
6916 TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
6917 TREE_TYPE (*expr_p))))
6919 *expr_p = tmp;
6920 break;
6922 /* Convert (void *)&a + 4 into (void *)&a[1]. */
6923 if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
6924 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
6925 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
6926 0),0)))
6927 && (tmp = maybe_fold_offset_to_address
6928 (EXPR_LOCATION (*expr_p),
6929 TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
6930 TREE_OPERAND (*expr_p, 1),
6931 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
6932 0)))))
6934 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
6935 break;
6937 /* FALLTHRU */
6939 default:
6940 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6942 case tcc_comparison:
6943 /* Handle comparison of objects of non scalar mode aggregates
6944 with a call to memcmp. It would be nice to only have to do
6945 this for variable-sized objects, but then we'd have to allow
6946 the same nest of reference nodes we allow for MODIFY_EXPR and
6947 that's too complex.
6949 Compare scalar mode aggregates as scalar mode values. Using
6950 memcmp for them would be very inefficient at best, and is
6951 plain wrong if bitfields are involved. */
6953 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
6955 if (!AGGREGATE_TYPE_P (type))
6956 goto expr_2;
6957 else if (TYPE_MODE (type) != BLKmode)
6958 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
6959 else
6960 ret = gimplify_variable_sized_compare (expr_p);
6962 break;
6965 /* If *EXPR_P does not need to be special-cased, handle it
6966 according to its class. */
6967 case tcc_unary:
6968 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6969 post_p, is_gimple_val, fb_rvalue);
6970 break;
6972 case tcc_binary:
6973 expr_2:
6975 enum gimplify_status r0, r1;
6977 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6978 post_p, is_gimple_val, fb_rvalue);
6979 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6980 post_p, is_gimple_val, fb_rvalue);
6982 ret = MIN (r0, r1);
6983 break;
6986 case tcc_declaration:
6987 case tcc_constant:
6988 ret = GS_ALL_DONE;
6989 goto dont_recalculate;
6991 default:
6992 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
6993 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
6994 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
6995 goto expr_2;
6998 recalculate_side_effects (*expr_p);
7000 dont_recalculate:
7001 break;
7004 /* If we replaced *expr_p, gimplify again. */
7005 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
7006 ret = GS_ALL_DONE;
7008 while (ret == GS_OK);
7010 /* If we encountered an error_mark somewhere nested inside, either
7011 stub out the statement or propagate the error back out. */
7012 if (ret == GS_ERROR)
7014 if (is_statement)
7015 *expr_p = NULL;
7016 goto out;
7019 /* This was only valid as a return value from the langhook, which
7020 we handled. Make sure it doesn't escape from any other context. */
7021 gcc_assert (ret != GS_UNHANDLED);
7023 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
7025 /* We aren't looking for a value, and we don't have a valid
7026 statement. If it doesn't have side-effects, throw it away. */
7027 if (!TREE_SIDE_EFFECTS (*expr_p))
7028 *expr_p = NULL;
7029 else if (!TREE_THIS_VOLATILE (*expr_p))
7031 /* This is probably a _REF that contains something nested that
7032 has side effects. Recurse through the operands to find it. */
7033 enum tree_code code = TREE_CODE (*expr_p);
7035 switch (code)
7037 case COMPONENT_REF:
7038 case REALPART_EXPR:
7039 case IMAGPART_EXPR:
7040 case VIEW_CONVERT_EXPR:
7041 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7042 gimple_test_f, fallback);
7043 break;
7045 case ARRAY_REF:
7046 case ARRAY_RANGE_REF:
7047 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7048 gimple_test_f, fallback);
7049 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7050 gimple_test_f, fallback);
7051 break;
7053 default:
7054 /* Anything else with side-effects must be converted to
7055 a valid statement before we get here. */
7056 gcc_unreachable ();
7059 *expr_p = NULL;
7061 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
7062 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
7064 /* Historically, the compiler has treated a bare reference
7065 to a non-BLKmode volatile lvalue as forcing a load. */
7066 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
7068 /* Normally, we do not want to create a temporary for a
7069 TREE_ADDRESSABLE type because such a type should not be
7070 copied by bitwise-assignment. However, we make an
7071 exception here, as all we are doing here is ensuring that
7072 we read the bytes that make up the type. We use
7073 create_tmp_var_raw because create_tmp_var will abort when
7074 given a TREE_ADDRESSABLE type. */
7075 tree tmp = create_tmp_var_raw (type, "vol");
7076 gimple_add_tmp_var (tmp);
7077 gimplify_assign (tmp, *expr_p, pre_p);
7078 *expr_p = NULL;
7080 else
7081 /* We can't do anything useful with a volatile reference to
7082 an incomplete type, so just throw it away. Likewise for
7083 a BLKmode type, since any implicit inner load should
7084 already have been turned into an explicit one by the
7085 gimplification process. */
7086 *expr_p = NULL;
7089 /* If we are gimplifying at the statement level, we're done. Tack
7090 everything together and return. */
7091 if (fallback == fb_none || is_statement)
7093 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
7094 it out for GC to reclaim it. */
7095 *expr_p = NULL_TREE;
7097 if (!gimple_seq_empty_p (internal_pre)
7098 || !gimple_seq_empty_p (internal_post))
7100 gimplify_seq_add_seq (&internal_pre, internal_post);
7101 gimplify_seq_add_seq (pre_p, internal_pre);
7104 /* The result of gimplifying *EXPR_P is going to be the last few
7105 statements in *PRE_P and *POST_P. Add location information
7106 to all the statements that were added by the gimplification
7107 helpers. */
7108 if (!gimple_seq_empty_p (*pre_p))
7109 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
7111 if (!gimple_seq_empty_p (*post_p))
7112 annotate_all_with_location_after (*post_p, post_last_gsi,
7113 input_location);
7115 goto out;
7118 #ifdef ENABLE_GIMPLE_CHECKING
7119 if (*expr_p)
7121 enum tree_code code = TREE_CODE (*expr_p);
7122 /* These expressions should already be in gimple IR form. */
7123 gcc_assert (code != MODIFY_EXPR
7124 && code != ASM_EXPR
7125 && code != BIND_EXPR
7126 && code != CATCH_EXPR
7127 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
7128 && code != EH_FILTER_EXPR
7129 && code != GOTO_EXPR
7130 && code != LABEL_EXPR
7131 && code != LOOP_EXPR
7132 && code != RESX_EXPR
7133 && code != SWITCH_EXPR
7134 && code != TRY_FINALLY_EXPR
7135 && code != OMP_CRITICAL
7136 && code != OMP_FOR
7137 && code != OMP_MASTER
7138 && code != OMP_ORDERED
7139 && code != OMP_PARALLEL
7140 && code != OMP_SECTIONS
7141 && code != OMP_SECTION
7142 && code != OMP_SINGLE);
7144 #endif
7146 /* Otherwise we're gimplifying a subexpression, so the resulting
7147 value is interesting. If it's a valid operand that matches
7148 GIMPLE_TEST_F, we're done. Unless we are handling some
7149 post-effects internally; if that's the case, we need to copy into
7150 a temporary before adding the post-effects to POST_P. */
7151 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
7152 goto out;
7154 /* Otherwise, we need to create a new temporary for the gimplified
7155 expression. */
7157 /* We can't return an lvalue if we have an internal postqueue. The
7158 object the lvalue refers to would (probably) be modified by the
7159 postqueue; we need to copy the value out first, which means an
7160 rvalue. */
7161 if ((fallback & fb_lvalue)
7162 && gimple_seq_empty_p (internal_post)
7163 && is_gimple_addressable (*expr_p))
7165 /* An lvalue will do. Take the address of the expression, store it
7166 in a temporary, and replace the expression with an INDIRECT_REF of
7167 that temporary. */
7168 tmp = build_fold_addr_expr (*expr_p);
7169 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7170 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
7172 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
7174 /* An rvalue will do. Assign the gimplified expression into a
7175 new temporary TMP and replace the original expression with
7176 TMP. First, make sure that the expression has a type so that
7177 it can be assigned into a temporary. */
7178 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
7180 if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue))
7181 /* The postqueue might change the value of the expression between
7182 the initialization and use of the temporary, so we can't use a
7183 formal temp. FIXME do we care? */
7185 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7186 if (TREE_CODE (TREE_TYPE (*expr_p)) == COMPLEX_TYPE
7187 || TREE_CODE (TREE_TYPE (*expr_p)) == VECTOR_TYPE)
7188 DECL_GIMPLE_REG_P (*expr_p) = 1;
7190 else
7191 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
7193 else
7195 #ifdef ENABLE_GIMPLE_CHECKING
7196 if (!(fallback & fb_mayfail))
7198 fprintf (stderr, "gimplification failed:\n");
7199 print_generic_expr (stderr, *expr_p, 0);
7200 debug_tree (*expr_p);
7201 internal_error ("gimplification failed");
7203 #endif
7204 gcc_assert (fallback & fb_mayfail);
7206 /* If this is an asm statement, and the user asked for the
7207 impossible, don't die. Fail and let gimplify_asm_expr
7208 issue an error. */
7209 ret = GS_ERROR;
7210 goto out;
7213 /* Make sure the temporary matches our predicate. */
7214 gcc_assert ((*gimple_test_f) (*expr_p));
7216 if (!gimple_seq_empty_p (internal_post))
7218 annotate_all_with_location (internal_post, input_location);
7219 gimplify_seq_add_seq (pre_p, internal_post);
7222 out:
7223 input_location = saved_location;
7224 return ret;
7227 /* Look through TYPE for variable-sized objects and gimplify each such
7228 size that we find. Add to LIST_P any statements generated. */
7230 void
7231 gimplify_type_sizes (tree type, gimple_seq *list_p)
7233 tree field, t;
7235 if (type == NULL || type == error_mark_node)
7236 return;
7238 /* We first do the main variant, then copy into any other variants. */
7239 type = TYPE_MAIN_VARIANT (type);
7241 /* Avoid infinite recursion. */
7242 if (TYPE_SIZES_GIMPLIFIED (type))
7243 return;
7245 TYPE_SIZES_GIMPLIFIED (type) = 1;
7247 switch (TREE_CODE (type))
7249 case INTEGER_TYPE:
7250 case ENUMERAL_TYPE:
7251 case BOOLEAN_TYPE:
7252 case REAL_TYPE:
7253 case FIXED_POINT_TYPE:
7254 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
7255 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
7257 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7259 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
7260 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
7262 break;
7264 case ARRAY_TYPE:
7265 /* These types may not have declarations, so handle them here. */
7266 gimplify_type_sizes (TREE_TYPE (type), list_p);
7267 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
7268 /* When not optimizing, ensure VLA bounds aren't removed. */
7269 if (!optimize
7270 && TYPE_DOMAIN (type)
7271 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
7273 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7274 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7275 DECL_IGNORED_P (t) = 0;
7276 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7277 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7278 DECL_IGNORED_P (t) = 0;
7280 break;
7282 case RECORD_TYPE:
7283 case UNION_TYPE:
7284 case QUAL_UNION_TYPE:
7285 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7286 if (TREE_CODE (field) == FIELD_DECL)
7288 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
7289 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
7290 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
7291 gimplify_type_sizes (TREE_TYPE (field), list_p);
7293 break;
7295 case POINTER_TYPE:
7296 case REFERENCE_TYPE:
7297 /* We used to recurse on the pointed-to type here, which turned out to
7298 be incorrect because its definition might refer to variables not
7299 yet initialized at this point if a forward declaration is involved.
7301 It was actually useful for anonymous pointed-to types to ensure
7302 that the sizes evaluation dominates every possible later use of the
7303 values. Restricting to such types here would be safe since there
7304 is no possible forward declaration around, but would introduce an
7305 undesirable middle-end semantic to anonymity. We then defer to
7306 front-ends the responsibility of ensuring that the sizes are
7307 evaluated both early and late enough, e.g. by attaching artificial
7308 type declarations to the tree. */
7309 break;
7311 default:
7312 break;
7315 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
7316 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
7318 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7320 TYPE_SIZE (t) = TYPE_SIZE (type);
7321 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
7322 TYPE_SIZES_GIMPLIFIED (t) = 1;
7326 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
7327 a size or position, has had all of its SAVE_EXPRs evaluated.
7328 We add any required statements to *STMT_P. */
7330 void
7331 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
7333 tree type, expr = *expr_p;
7335 /* We don't do anything if the value isn't there, is constant, or contains
7336 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
7337 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
7338 will want to replace it with a new variable, but that will cause problems
7339 if this type is from outside the function. It's OK to have that here. */
7340 if (expr == NULL_TREE || TREE_CONSTANT (expr)
7341 || TREE_CODE (expr) == VAR_DECL
7342 || CONTAINS_PLACEHOLDER_P (expr))
7343 return;
7345 type = TREE_TYPE (expr);
7346 *expr_p = unshare_expr (expr);
7348 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
7349 expr = *expr_p;
7351 /* Verify that we've an exact type match with the original expression.
7352 In particular, we do not wish to drop a "sizetype" in favour of a
7353 type of similar dimensions. We don't want to pollute the generic
7354 type-stripping code with this knowledge because it doesn't matter
7355 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
7356 and friends retain their "sizetype-ness". */
7357 if (TREE_TYPE (expr) != type
7358 && TREE_CODE (type) == INTEGER_TYPE
7359 && TYPE_IS_SIZETYPE (type))
7361 tree tmp;
7362 gimple stmt;
7364 *expr_p = create_tmp_var (type, NULL);
7365 tmp = build1 (NOP_EXPR, type, expr);
7366 stmt = gimplify_assign (*expr_p, tmp, stmt_p);
7367 if (EXPR_HAS_LOCATION (expr))
7368 gimple_set_location (stmt, EXPR_LOCATION (expr));
7369 else
7370 gimple_set_location (stmt, input_location);
7375 /* Gimplify the body of statements pointed to by BODY_P and return a
7376 GIMPLE_BIND containing the sequence of GIMPLE statements
7377 corresponding to BODY_P. FNDECL is the function decl containing
7378 *BODY_P. */
7380 gimple
7381 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
7383 location_t saved_location = input_location;
7384 gimple_seq parm_stmts, seq;
7385 gimple outer_bind;
7386 struct gimplify_ctx gctx;
7388 timevar_push (TV_TREE_GIMPLIFY);
7390 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
7391 gimplification. */
7392 default_rtl_profile ();
7394 gcc_assert (gimplify_ctxp == NULL);
7395 push_gimplify_context (&gctx);
7397 /* Unshare most shared trees in the body and in that of any nested functions.
7398 It would seem we don't have to do this for nested functions because
7399 they are supposed to be output and then the outer function gimplified
7400 first, but the g++ front end doesn't always do it that way. */
7401 unshare_body (body_p, fndecl);
7402 unvisit_body (body_p, fndecl);
7404 if (cgraph_node (fndecl)->origin)
7405 nonlocal_vlas = pointer_set_create ();
7407 /* Make sure input_location isn't set to something weird. */
7408 input_location = DECL_SOURCE_LOCATION (fndecl);
7410 /* Resolve callee-copies. This has to be done before processing
7411 the body so that DECL_VALUE_EXPR gets processed correctly. */
7412 parm_stmts = (do_parms) ? gimplify_parameters () : NULL;
7414 /* Gimplify the function's body. */
7415 seq = NULL;
7416 gimplify_stmt (body_p, &seq);
7417 outer_bind = gimple_seq_first_stmt (seq);
7418 if (!outer_bind)
7420 outer_bind = gimple_build_nop ();
7421 gimplify_seq_add_stmt (&seq, outer_bind);
7424 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
7425 not the case, wrap everything in a GIMPLE_BIND to make it so. */
7426 if (gimple_code (outer_bind) == GIMPLE_BIND
7427 && gimple_seq_first (seq) == gimple_seq_last (seq))
7429 else
7430 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
7432 *body_p = NULL_TREE;
7434 /* If we had callee-copies statements, insert them at the beginning
7435 of the function. */
7436 if (!gimple_seq_empty_p (parm_stmts))
7438 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
7439 gimple_bind_set_body (outer_bind, parm_stmts);
7442 if (nonlocal_vlas)
7444 pointer_set_destroy (nonlocal_vlas);
7445 nonlocal_vlas = NULL;
7448 pop_gimplify_context (outer_bind);
7449 gcc_assert (gimplify_ctxp == NULL);
7451 #ifdef ENABLE_TYPES_CHECKING
7452 if (!errorcount && !sorrycount)
7453 verify_types_in_gimple_seq (gimple_bind_body (outer_bind));
7454 #endif
7456 timevar_pop (TV_TREE_GIMPLIFY);
7457 input_location = saved_location;
7459 return outer_bind;
7462 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
7463 node for the function we want to gimplify.
7465 Returns the sequence of GIMPLE statements corresponding to the body
7466 of FNDECL. */
7468 void
7469 gimplify_function_tree (tree fndecl)
7471 tree oldfn, parm, ret;
7472 gimple_seq seq;
7473 gimple bind;
7475 oldfn = current_function_decl;
7476 current_function_decl = fndecl;
7477 if (DECL_STRUCT_FUNCTION (fndecl))
7478 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
7479 else
7480 push_struct_function (fndecl);
7482 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
7484 /* Preliminarily mark non-addressed complex variables as eligible
7485 for promotion to gimple registers. We'll transform their uses
7486 as we find them. */
7487 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
7488 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
7489 && !TREE_THIS_VOLATILE (parm)
7490 && !needs_to_live_in_memory (parm))
7491 DECL_GIMPLE_REG_P (parm) = 1;
7494 ret = DECL_RESULT (fndecl);
7495 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7496 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
7497 && !needs_to_live_in_memory (ret))
7498 DECL_GIMPLE_REG_P (ret) = 1;
7500 bind = gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
7502 /* The tree body of the function is no longer needed, replace it
7503 with the new GIMPLE body. */
7504 seq = gimple_seq_alloc ();
7505 gimple_seq_add_stmt (&seq, bind);
7506 gimple_set_body (fndecl, seq);
7508 /* If we're instrumenting function entry/exit, then prepend the call to
7509 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
7510 catch the exit hook. */
7511 /* ??? Add some way to ignore exceptions for this TFE. */
7512 if (flag_instrument_function_entry_exit
7513 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
7514 && !flag_instrument_functions_exclude_p (fndecl))
7516 tree x;
7517 gimple new_bind;
7518 gimple tf;
7519 gimple_seq cleanup = NULL, body = NULL;
7521 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
7522 gimplify_seq_add_stmt (&cleanup, gimple_build_call (x, 0));
7523 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
7525 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
7526 gimplify_seq_add_stmt (&body, gimple_build_call (x, 0));
7527 gimplify_seq_add_stmt (&body, tf);
7528 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
7529 /* Clear the block for BIND, since it is no longer directly inside
7530 the function, but within a try block. */
7531 gimple_bind_set_block (bind, NULL);
7533 /* Replace the current function body with the body
7534 wrapped in the try/finally TF. */
7535 seq = gimple_seq_alloc ();
7536 gimple_seq_add_stmt (&seq, new_bind);
7537 gimple_set_body (fndecl, seq);
7540 DECL_SAVED_TREE (fndecl) = NULL_TREE;
7542 current_function_decl = oldfn;
7543 pop_cfun ();
7547 /* Some transformations like inlining may invalidate the GIMPLE form
7548 for operands. This function traverses all the operands in STMT and
7549 gimplifies anything that is not a valid gimple operand. Any new
7550 GIMPLE statements are inserted before *GSI_P. */
7552 void
7553 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
7555 size_t i, num_ops;
7556 tree orig_lhs = NULL_TREE, lhs, t;
7557 gimple_seq pre = NULL;
7558 gimple post_stmt = NULL;
7559 struct gimplify_ctx gctx;
7561 push_gimplify_context (&gctx);
7562 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7564 switch (gimple_code (stmt))
7566 case GIMPLE_COND:
7567 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
7568 is_gimple_val, fb_rvalue);
7569 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
7570 is_gimple_val, fb_rvalue);
7571 break;
7572 case GIMPLE_SWITCH:
7573 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
7574 is_gimple_val, fb_rvalue);
7575 break;
7576 case GIMPLE_OMP_ATOMIC_LOAD:
7577 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
7578 is_gimple_val, fb_rvalue);
7579 break;
7580 case GIMPLE_ASM:
7582 size_t i, noutputs = gimple_asm_noutputs (stmt);
7583 const char *constraint, **oconstraints;
7584 bool allows_mem, allows_reg, is_inout;
7586 oconstraints
7587 = (const char **) alloca ((noutputs) * sizeof (const char *));
7588 for (i = 0; i < noutputs; i++)
7590 tree op = gimple_asm_output_op (stmt, i);
7591 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7592 oconstraints[i] = constraint;
7593 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
7594 &allows_reg, &is_inout);
7595 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7596 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
7597 fb_lvalue | fb_mayfail);
7599 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
7601 tree op = gimple_asm_input_op (stmt, i);
7602 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7603 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7604 oconstraints, &allows_mem, &allows_reg);
7605 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
7606 allows_reg = 0;
7607 if (!allows_reg && allows_mem)
7608 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7609 is_gimple_lvalue, fb_lvalue | fb_mayfail);
7610 else
7611 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7612 is_gimple_asm_val, fb_rvalue);
7615 break;
7616 default:
7617 /* NOTE: We start gimplifying operands from last to first to
7618 make sure that side-effects on the RHS of calls, assignments
7619 and ASMs are executed before the LHS. The ordering is not
7620 important for other statements. */
7621 num_ops = gimple_num_ops (stmt);
7622 orig_lhs = gimple_get_lhs (stmt);
7623 for (i = num_ops; i > 0; i--)
7625 tree op = gimple_op (stmt, i - 1);
7626 if (op == NULL_TREE)
7627 continue;
7628 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
7629 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
7630 else if (i == 2
7631 && is_gimple_assign (stmt)
7632 && num_ops == 2
7633 && get_gimple_rhs_class (gimple_expr_code (stmt))
7634 == GIMPLE_SINGLE_RHS)
7635 gimplify_expr (&op, &pre, NULL,
7636 rhs_predicate_for (gimple_assign_lhs (stmt)),
7637 fb_rvalue);
7638 else if (i == 2 && is_gimple_call (stmt))
7640 if (TREE_CODE (op) == FUNCTION_DECL)
7641 continue;
7642 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
7644 else
7645 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
7646 gimple_set_op (stmt, i - 1, op);
7649 lhs = gimple_get_lhs (stmt);
7650 /* If the LHS changed it in a way that requires a simple RHS,
7651 create temporary. */
7652 if (lhs && !is_gimple_reg (lhs))
7654 bool need_temp = false;
7656 if (is_gimple_assign (stmt)
7657 && num_ops == 2
7658 && get_gimple_rhs_class (gimple_expr_code (stmt))
7659 == GIMPLE_SINGLE_RHS)
7660 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
7661 rhs_predicate_for (gimple_assign_lhs (stmt)),
7662 fb_rvalue);
7663 else if (is_gimple_reg (lhs))
7665 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7667 if (is_gimple_call (stmt))
7669 i = gimple_call_flags (stmt);
7670 if ((i & ECF_LOOPING_CONST_OR_PURE)
7671 || !(i & (ECF_CONST | ECF_PURE)))
7672 need_temp = true;
7674 if (stmt_can_throw_internal (stmt))
7675 need_temp = true;
7678 else
7680 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7681 need_temp = true;
7682 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
7684 if (is_gimple_call (stmt))
7686 tree fndecl = gimple_call_fndecl (stmt);
7688 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
7689 && !(fndecl && DECL_RESULT (fndecl)
7690 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
7691 need_temp = true;
7693 else
7694 need_temp = true;
7697 if (need_temp)
7699 tree temp = create_tmp_var (TREE_TYPE (lhs), NULL);
7701 if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
7702 || TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE)
7703 DECL_GIMPLE_REG_P (temp) = 1;
7704 if (TREE_CODE (orig_lhs) == SSA_NAME)
7705 orig_lhs = SSA_NAME_VAR (orig_lhs);
7707 if (gimple_in_ssa_p (cfun))
7708 temp = make_ssa_name (temp, NULL);
7709 gimple_set_lhs (stmt, temp);
7710 post_stmt = gimple_build_assign (lhs, temp);
7711 if (TREE_CODE (lhs) == SSA_NAME)
7712 SSA_NAME_DEF_STMT (lhs) = post_stmt;
7715 break;
7718 if (gimple_referenced_vars (cfun))
7719 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7720 add_referenced_var (t);
7722 if (!gimple_seq_empty_p (pre))
7724 if (gimple_in_ssa_p (cfun))
7726 gimple_stmt_iterator i;
7728 for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
7729 mark_symbols_for_renaming (gsi_stmt (i));
7731 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
7733 if (post_stmt)
7734 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
7736 pop_gimplify_context (NULL);
7740 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
7741 force the result to be either ssa_name or an invariant, otherwise
7742 just force it to be a rhs expression. If VAR is not NULL, make the
7743 base variable of the final destination be VAR if suitable. */
7745 tree
7746 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
7748 tree t;
7749 enum gimplify_status ret;
7750 gimple_predicate gimple_test_f;
7751 struct gimplify_ctx gctx;
7753 *stmts = NULL;
7755 if (is_gimple_val (expr))
7756 return expr;
7758 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
7760 push_gimplify_context (&gctx);
7761 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7762 gimplify_ctxp->allow_rhs_cond_expr = true;
7764 if (var)
7765 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
7767 if (TREE_CODE (expr) != MODIFY_EXPR
7768 && TREE_TYPE (expr) == void_type_node)
7770 gimplify_and_add (expr, stmts);
7771 expr = NULL_TREE;
7773 else
7775 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
7776 gcc_assert (ret != GS_ERROR);
7779 if (gimple_referenced_vars (cfun))
7780 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7781 add_referenced_var (t);
7783 pop_gimplify_context (NULL);
7785 return expr;
7788 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
7789 some statements are produced, emits them at GSI. If BEFORE is true.
7790 the statements are appended before GSI, otherwise they are appended after
7791 it. M specifies the way GSI moves after insertion (GSI_SAME_STMT or
7792 GSI_CONTINUE_LINKING are the usual values). */
7794 tree
7795 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
7796 bool simple_p, tree var, bool before,
7797 enum gsi_iterator_update m)
7799 gimple_seq stmts;
7801 expr = force_gimple_operand (expr, &stmts, simple_p, var);
7803 if (!gimple_seq_empty_p (stmts))
7805 if (gimple_in_ssa_p (cfun))
7807 gimple_stmt_iterator i;
7809 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
7810 mark_symbols_for_renaming (gsi_stmt (i));
7813 if (before)
7814 gsi_insert_seq_before (gsi, stmts, m);
7815 else
7816 gsi_insert_seq_after (gsi, stmts, m);
7819 return expr;
7822 #include "gt-gimplify.h"